diff --git a/logs_norope/diff_modes/mode_0_param_norope_seed_42/config.json b/logs_norope/diff_modes/mode_0_param_norope_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..bf8f7dd17eff73546c645f585116d4bb39e8f023 --- /dev/null +++ b/logs_norope/diff_modes/mode_0_param_norope_seed_42/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 0, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "f77b067c-976e-4096-b123-f4fd1f3941ec", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_0_param_norope_seed_42/training_log_f77b067c-976e-4096-b123-f4fd1f3941ec.txt b/logs_norope/diff_modes/mode_0_param_norope_seed_42/training_log_f77b067c-976e-4096-b123-f4fd1f3941ec.txt new file mode 100644 index 0000000000000000000000000000000000000000..98c76a841263e14174b80ef7ef71ec46223292a4 --- /dev/null +++ b/logs_norope/diff_modes/mode_0_param_norope_seed_42/training_log_f77b067c-976e-4096-b123-f4fd1f3941ec.txt @@ -0,0 +1,2360 @@ +[2025-07-17 09:50:57] [Rank 0] PRINT: --- Script Start: Thu Jul 17 09:50:57 2025 --- +[2025-07-17 09:50:57] [Rank 0] PRINT: --- Script Start: Thu Jul 17 09:50:57 2025 --- +[2025-07-17 09:50:57] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=0, model_parameterization='norope') +[2025-07-17 09:50:57] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=0, model_parameterization='norope') +[2025-07-17 09:50:57] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 09:50:57] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 09:50:57] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 09:50:57] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 09:50:58] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_0_param_norope_seed_42 +[2025-07-17 09:50:58] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_0_param_norope_seed_42 +[2025-07-17 09:50:58] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 09:50:58] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 09:50:58] [Rank 0] PRINT: Constructing model... +[2025-07-17 09:50:58] [Rank 0] PRINT: Constructing model... +[2025-07-17 09:51:00] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 09:51:00] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 09:51:00] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 09:51:00] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 09:51:00] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 09:51:00] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 09:51:00] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 09:51:00] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 09:51:00] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 0 +[2025-07-17 09:51:00] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 0 +[2025-07-17 09:51:00] [Rank 0] PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices. +[2025-07-17 09:51:00] [Rank 0] PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices. +[2025-07-17 09:51:00] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 09:51:00] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 09:51:00] [Rank 0] PRINT: Muon optimizer is active with 68 parameters. +[2025-07-17 09:51:00] [Rank 0] PRINT: Muon optimizer is active with 68 parameters. +[2025-07-17 09:51:00] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 09:51:00] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 09:51:01] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 09:51:01] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 09:51:01] [Rank 0] PRINT: Starting warmup... +[2025-07-17 09:51:01] [Rank 0] PRINT: Starting warmup... +[2025-07-17 10:04:20] [Rank 0] PRINT: Warmup complete. +[2025-07-17 10:04:20] [Rank 0] PRINT: Warmup complete. +[2025-07-17 10:04:21] [Rank 0] PRINT: Starting training... +[2025-07-17 10:04:21] [Rank 0] PRINT: Starting training... +[2025-07-17 10:06:21] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 10:06:21] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 10:06:26] [Rank 0] step:21/10000 train_time:4875ms step_avg:232.14ms +[2025-07-17 10:06:26] [Rank 0] step:21/10000 train_time:4875ms step_avg:232.14ms +[2025-07-17 10:06:31] [Rank 0] step:41/10000 train_time:9439ms step_avg:230.21ms +[2025-07-17 10:06:31] [Rank 0] step:41/10000 train_time:9439ms step_avg:230.21ms +[2025-07-17 10:06:35] [Rank 0] step:61/10000 train_time:14008ms step_avg:229.63ms +[2025-07-17 10:06:35] [Rank 0] step:61/10000 train_time:14008ms step_avg:229.63ms +[2025-07-17 10:06:40] [Rank 0] step:81/10000 train_time:18575ms step_avg:229.32ms +[2025-07-17 10:06:40] [Rank 0] step:81/10000 train_time:18575ms step_avg:229.32ms +[2025-07-17 10:06:44] [Rank 0] step:101/10000 train_time:23140ms step_avg:229.11ms +[2025-07-17 10:06:44] [Rank 0] step:101/10000 train_time:23140ms step_avg:229.11ms +[2025-07-17 10:06:49] [Rank 0] step:121/10000 train_time:27706ms step_avg:228.98ms +[2025-07-17 10:06:49] [Rank 0] step:121/10000 train_time:27706ms step_avg:228.98ms +[2025-07-17 10:06:54] [Rank 0] PRINT: step:125/10000 val_loss:5.1155 train_time:28850ms step_avg:230.80ms +[2025-07-17 10:06:54] [Rank 0] PRINT: step:125/10000 val_loss:5.1155 train_time:28850ms step_avg:230.80ms +[2025-07-17 10:06:57] [Rank 0] step:141/10000 train_time:32268ms step_avg:228.85ms +[2025-07-17 10:06:57] [Rank 0] step:141/10000 train_time:32268ms step_avg:228.85ms +[2025-07-17 10:07:02] [Rank 0] step:161/10000 train_time:36834ms step_avg:228.78ms +[2025-07-17 10:07:02] [Rank 0] step:161/10000 train_time:36834ms step_avg:228.78ms +[2025-07-17 10:07:07] [Rank 0] step:181/10000 train_time:41402ms step_avg:228.74ms +[2025-07-17 10:07:07] [Rank 0] step:181/10000 train_time:41402ms step_avg:228.74ms +[2025-07-17 10:07:11] [Rank 0] step:201/10000 train_time:45974ms step_avg:228.73ms +[2025-07-17 10:07:11] [Rank 0] step:201/10000 train_time:45974ms step_avg:228.73ms +[2025-07-17 10:07:16] [Rank 0] step:221/10000 train_time:50548ms step_avg:228.72ms +[2025-07-17 10:07:16] [Rank 0] step:221/10000 train_time:50548ms step_avg:228.72ms +[2025-07-17 10:07:20] [Rank 0] step:241/10000 train_time:55122ms step_avg:228.72ms +[2025-07-17 10:07:20] [Rank 0] step:241/10000 train_time:55122ms step_avg:228.72ms +[2025-07-17 10:07:27] [Rank 0] PRINT: step:250/10000 val_loss:4.6796 train_time:57410ms step_avg:229.64ms +[2025-07-17 10:07:27] [Rank 0] PRINT: step:250/10000 val_loss:4.6796 train_time:57410ms step_avg:229.64ms +[2025-07-17 10:07:29] [Rank 0] step:261/10000 train_time:59692ms step_avg:228.70ms +[2025-07-17 10:07:29] [Rank 0] step:261/10000 train_time:59692ms step_avg:228.70ms +[2025-07-17 10:07:34] [Rank 0] step:281/10000 train_time:64264ms step_avg:228.70ms +[2025-07-17 10:07:34] [Rank 0] step:281/10000 train_time:64264ms step_avg:228.70ms +[2025-07-17 10:07:38] [Rank 0] step:301/10000 train_time:68838ms step_avg:228.70ms +[2025-07-17 10:07:38] [Rank 0] step:301/10000 train_time:68838ms step_avg:228.70ms +[2025-07-17 10:07:43] [Rank 0] step:321/10000 train_time:73409ms step_avg:228.69ms +[2025-07-17 10:07:43] [Rank 0] step:321/10000 train_time:73409ms step_avg:228.69ms +[2025-07-17 10:07:48] [Rank 0] step:341/10000 train_time:77989ms step_avg:228.71ms +[2025-07-17 10:07:48] [Rank 0] step:341/10000 train_time:77989ms step_avg:228.71ms +[2025-07-17 10:07:52] [Rank 0] step:361/10000 train_time:82569ms step_avg:228.72ms +[2025-07-17 10:07:52] [Rank 0] step:361/10000 train_time:82569ms step_avg:228.72ms +[2025-07-17 10:08:00] [Rank 0] PRINT: step:375/10000 val_loss:4.4891 train_time:86005ms step_avg:229.35ms +[2025-07-17 10:08:00] [Rank 0] PRINT: step:375/10000 val_loss:4.4891 train_time:86005ms step_avg:229.35ms +[2025-07-17 10:08:01] [Rank 0] step:381/10000 train_time:87151ms step_avg:228.74ms +[2025-07-17 10:08:01] [Rank 0] step:381/10000 train_time:87151ms step_avg:228.74ms +[2025-07-17 10:08:06] [Rank 0] step:401/10000 train_time:91726ms step_avg:228.74ms +[2025-07-17 10:08:06] [Rank 0] step:401/10000 train_time:91726ms step_avg:228.74ms +[2025-07-17 10:08:10] [Rank 0] step:421/10000 train_time:96299ms step_avg:228.74ms +[2025-07-17 10:08:10] [Rank 0] step:421/10000 train_time:96299ms step_avg:228.74ms +[2025-07-17 10:08:15] [Rank 0] step:441/10000 train_time:100974ms step_avg:228.97ms +[2025-07-17 10:08:15] [Rank 0] step:441/10000 train_time:100974ms step_avg:228.97ms +[2025-07-17 10:08:20] [Rank 0] step:461/10000 train_time:105550ms step_avg:228.96ms +[2025-07-17 10:08:20] [Rank 0] step:461/10000 train_time:105550ms step_avg:228.96ms +[2025-07-17 10:08:24] [Rank 0] step:481/10000 train_time:110127ms step_avg:228.96ms +[2025-07-17 10:08:24] [Rank 0] step:481/10000 train_time:110127ms step_avg:228.96ms +[2025-07-17 10:08:33] [Rank 0] PRINT: step:500/10000 val_loss:4.4120 train_time:114706ms step_avg:229.41ms +[2025-07-17 10:08:33] [Rank 0] PRINT: step:500/10000 val_loss:4.4120 train_time:114706ms step_avg:229.41ms +[2025-07-17 10:08:33] [Rank 0] step:501/10000 train_time:114727ms step_avg:229.00ms +[2025-07-17 10:08:33] [Rank 0] step:501/10000 train_time:114727ms step_avg:229.00ms +[2025-07-17 10:08:38] [Rank 0] step:521/10000 train_time:119813ms step_avg:229.97ms +[2025-07-17 10:08:38] [Rank 0] step:521/10000 train_time:119813ms step_avg:229.97ms +[2025-07-17 10:08:43] [Rank 0] step:541/10000 train_time:124389ms step_avg:229.92ms +[2025-07-17 10:08:43] [Rank 0] step:541/10000 train_time:124389ms step_avg:229.92ms +[2025-07-17 10:08:47] [Rank 0] step:561/10000 train_time:128965ms step_avg:229.88ms +[2025-07-17 10:08:47] [Rank 0] step:561/10000 train_time:128965ms step_avg:229.88ms +[2025-07-17 10:08:52] [Rank 0] step:581/10000 train_time:133544ms step_avg:229.85ms +[2025-07-17 10:08:52] [Rank 0] step:581/10000 train_time:133544ms step_avg:229.85ms +[2025-07-17 10:08:57] [Rank 0] step:601/10000 train_time:138122ms step_avg:229.82ms +[2025-07-17 10:08:57] [Rank 0] step:601/10000 train_time:138122ms step_avg:229.82ms +[2025-07-17 10:09:01] [Rank 0] step:621/10000 train_time:142699ms step_avg:229.79ms +[2025-07-17 10:09:01] [Rank 0] step:621/10000 train_time:142699ms step_avg:229.79ms +[2025-07-17 10:09:07] [Rank 0] PRINT: step:625/10000 val_loss:4.4841 train_time:143846ms step_avg:230.15ms +[2025-07-17 10:09:07] [Rank 0] PRINT: step:625/10000 val_loss:4.4841 train_time:143846ms step_avg:230.15ms +[2025-07-17 10:09:10] [Rank 0] step:641/10000 train_time:147274ms step_avg:229.76ms +[2025-07-17 10:09:10] [Rank 0] step:641/10000 train_time:147274ms step_avg:229.76ms +[2025-07-17 10:09:15] [Rank 0] step:661/10000 train_time:151854ms step_avg:229.73ms +[2025-07-17 10:09:15] [Rank 0] step:661/10000 train_time:151854ms step_avg:229.73ms +[2025-07-17 10:09:19] [Rank 0] step:681/10000 train_time:156431ms step_avg:229.71ms +[2025-07-17 10:09:19] [Rank 0] step:681/10000 train_time:156431ms step_avg:229.71ms +[2025-07-17 10:09:24] [Rank 0] step:701/10000 train_time:161011ms step_avg:229.69ms +[2025-07-17 10:09:24] [Rank 0] step:701/10000 train_time:161011ms step_avg:229.69ms +[2025-07-17 10:09:29] [Rank 0] step:721/10000 train_time:165594ms step_avg:229.67ms +[2025-07-17 10:09:29] [Rank 0] step:721/10000 train_time:165594ms step_avg:229.67ms +[2025-07-17 10:09:33] [Rank 0] step:741/10000 train_time:170178ms step_avg:229.66ms +[2025-07-17 10:09:33] [Rank 0] step:741/10000 train_time:170178ms step_avg:229.66ms +[2025-07-17 10:09:40] [Rank 0] PRINT: step:750/10000 val_loss:4.4968 train_time:172487ms step_avg:229.98ms +[2025-07-17 10:09:40] [Rank 0] PRINT: step:750/10000 val_loss:4.4968 train_time:172487ms step_avg:229.98ms +[2025-07-17 10:09:42] [Rank 0] step:761/10000 train_time:174794ms step_avg:229.69ms +[2025-07-17 10:09:42] [Rank 0] step:761/10000 train_time:174794ms step_avg:229.69ms +[2025-07-17 10:09:47] [Rank 0] step:781/10000 train_time:179411ms step_avg:229.72ms +[2025-07-17 10:09:47] [Rank 0] step:781/10000 train_time:179411ms step_avg:229.72ms +[2025-07-17 10:09:51] [Rank 0] step:801/10000 train_time:184029ms step_avg:229.75ms +[2025-07-17 10:09:51] [Rank 0] step:801/10000 train_time:184029ms step_avg:229.75ms +[2025-07-17 10:09:56] [Rank 0] step:821/10000 train_time:188647ms step_avg:229.78ms +[2025-07-17 10:09:56] [Rank 0] step:821/10000 train_time:188647ms step_avg:229.78ms +[2025-07-17 10:10:01] [Rank 0] step:841/10000 train_time:193268ms step_avg:229.81ms +[2025-07-17 10:10:01] [Rank 0] step:841/10000 train_time:193268ms step_avg:229.81ms +[2025-07-17 10:10:05] [Rank 0] step:861/10000 train_time:197890ms step_avg:229.84ms +[2025-07-17 10:10:05] [Rank 0] step:861/10000 train_time:197890ms step_avg:229.84ms +[2025-07-17 10:10:13] [Rank 0] PRINT: step:875/10000 val_loss:4.4538 train_time:201355ms step_avg:230.12ms +[2025-07-17 10:10:13] [Rank 0] PRINT: step:875/10000 val_loss:4.4538 train_time:201355ms step_avg:230.12ms +[2025-07-17 10:10:14] [Rank 0] step:881/10000 train_time:202510ms step_avg:229.86ms +[2025-07-17 10:10:14] [Rank 0] step:881/10000 train_time:202510ms step_avg:229.86ms +[2025-07-17 10:10:19] [Rank 0] step:901/10000 train_time:207128ms step_avg:229.89ms +[2025-07-17 10:10:19] [Rank 0] step:901/10000 train_time:207128ms step_avg:229.89ms +[2025-07-17 10:10:24] [Rank 0] step:921/10000 train_time:211748ms step_avg:229.91ms +[2025-07-17 10:10:24] [Rank 0] step:921/10000 train_time:211748ms step_avg:229.91ms +[2025-07-17 10:10:28] [Rank 0] step:941/10000 train_time:216371ms step_avg:229.94ms +[2025-07-17 10:10:28] [Rank 0] step:941/10000 train_time:216371ms step_avg:229.94ms +[2025-07-17 10:10:33] [Rank 0] step:961/10000 train_time:220993ms step_avg:229.96ms +[2025-07-17 10:10:33] [Rank 0] step:961/10000 train_time:220993ms step_avg:229.96ms +[2025-07-17 10:10:37] [Rank 0] step:981/10000 train_time:225613ms step_avg:229.98ms +[2025-07-17 10:10:37] [Rank 0] step:981/10000 train_time:225613ms step_avg:229.98ms +[2025-07-17 10:10:46] [Rank 0] PRINT: step:1000/10000 val_loss:4.5604 train_time:230236ms step_avg:230.24ms +[2025-07-17 10:10:46] [Rank 0] PRINT: step:1000/10000 val_loss:4.5604 train_time:230236ms step_avg:230.24ms +[2025-07-17 10:10:47] [Rank 0] step:1001/10000 train_time:230257ms step_avg:230.03ms +[2025-07-17 10:10:47] [Rank 0] step:1001/10000 train_time:230257ms step_avg:230.03ms +[2025-07-17 10:10:52] [Rank 0] step:1021/10000 train_time:235380ms step_avg:230.54ms +[2025-07-17 10:10:52] [Rank 0] step:1021/10000 train_time:235380ms step_avg:230.54ms +[2025-07-17 10:10:56] [Rank 0] step:1041/10000 train_time:240001ms step_avg:230.55ms +[2025-07-17 10:10:56] [Rank 0] step:1041/10000 train_time:240001ms step_avg:230.55ms +[2025-07-17 10:11:01] [Rank 0] step:1061/10000 train_time:244626ms step_avg:230.56ms +[2025-07-17 10:11:01] [Rank 0] step:1061/10000 train_time:244626ms step_avg:230.56ms +[2025-07-17 10:11:06] [Rank 0] step:1081/10000 train_time:249252ms step_avg:230.58ms +[2025-07-17 10:11:06] [Rank 0] step:1081/10000 train_time:249252ms step_avg:230.58ms +[2025-07-17 10:11:10] [Rank 0] step:1101/10000 train_time:253882ms step_avg:230.59ms +[2025-07-17 10:11:10] [Rank 0] step:1101/10000 train_time:253882ms step_avg:230.59ms +[2025-07-17 10:11:15] [Rank 0] step:1121/10000 train_time:258516ms step_avg:230.61ms +[2025-07-17 10:11:15] [Rank 0] step:1121/10000 train_time:258516ms step_avg:230.61ms +[2025-07-17 10:11:20] [Rank 0] PRINT: step:1125/10000 val_loss:4.5649 train_time:259674ms step_avg:230.82ms +[2025-07-17 10:11:20] [Rank 0] PRINT: step:1125/10000 val_loss:4.5649 train_time:259674ms step_avg:230.82ms +[2025-07-17 10:11:24] [Rank 0] step:1141/10000 train_time:263147ms step_avg:230.63ms +[2025-07-17 10:11:24] [Rank 0] step:1141/10000 train_time:263147ms step_avg:230.63ms +[2025-07-17 10:11:29] [Rank 0] step:1161/10000 train_time:267782ms step_avg:230.65ms +[2025-07-17 10:11:29] [Rank 0] step:1161/10000 train_time:267782ms step_avg:230.65ms +[2025-07-17 10:11:33] [Rank 0] step:1181/10000 train_time:272411ms step_avg:230.66ms +[2025-07-17 10:11:33] [Rank 0] step:1181/10000 train_time:272411ms step_avg:230.66ms +[2025-07-17 10:11:38] [Rank 0] step:1201/10000 train_time:277047ms step_avg:230.68ms +[2025-07-17 10:11:38] [Rank 0] step:1201/10000 train_time:277047ms step_avg:230.68ms +[2025-07-17 10:11:42] [Rank 0] step:1221/10000 train_time:281678ms step_avg:230.69ms +[2025-07-17 10:11:42] [Rank 0] step:1221/10000 train_time:281678ms step_avg:230.69ms +[2025-07-17 10:11:47] [Rank 0] step:1241/10000 train_time:286310ms step_avg:230.71ms +[2025-07-17 10:11:47] [Rank 0] step:1241/10000 train_time:286310ms step_avg:230.71ms +[2025-07-17 10:11:54] [Rank 0] PRINT: step:1250/10000 val_loss:4.5724 train_time:288626ms step_avg:230.90ms +[2025-07-17 10:11:54] [Rank 0] PRINT: step:1250/10000 val_loss:4.5724 train_time:288626ms step_avg:230.90ms +[2025-07-17 10:11:56] [Rank 0] step:1261/10000 train_time:290939ms step_avg:230.72ms +[2025-07-17 10:11:56] [Rank 0] step:1261/10000 train_time:290939ms step_avg:230.72ms +[2025-07-17 10:12:01] [Rank 0] step:1281/10000 train_time:295570ms step_avg:230.73ms +[2025-07-17 10:12:01] [Rank 0] step:1281/10000 train_time:295570ms step_avg:230.73ms +[2025-07-17 10:12:05] [Rank 0] step:1301/10000 train_time:300201ms step_avg:230.75ms +[2025-07-17 10:12:05] [Rank 0] step:1301/10000 train_time:300201ms step_avg:230.75ms +[2025-07-17 10:12:10] [Rank 0] step:1321/10000 train_time:304833ms step_avg:230.76ms +[2025-07-17 10:12:10] [Rank 0] step:1321/10000 train_time:304833ms step_avg:230.76ms +[2025-07-17 10:12:15] [Rank 0] step:1341/10000 train_time:309470ms step_avg:230.78ms +[2025-07-17 10:12:15] [Rank 0] step:1341/10000 train_time:309470ms step_avg:230.78ms +[2025-07-17 10:12:19] [Rank 0] step:1361/10000 train_time:314106ms step_avg:230.79ms +[2025-07-17 10:12:19] [Rank 0] step:1361/10000 train_time:314106ms step_avg:230.79ms +[2025-07-17 10:12:27] [Rank 0] PRINT: step:1375/10000 val_loss:4.6556 train_time:317581ms step_avg:230.97ms +[2025-07-17 10:12:27] [Rank 0] PRINT: step:1375/10000 val_loss:4.6556 train_time:317581ms step_avg:230.97ms +[2025-07-17 10:12:28] [Rank 0] step:1381/10000 train_time:318742ms step_avg:230.81ms +[2025-07-17 10:12:28] [Rank 0] step:1381/10000 train_time:318742ms step_avg:230.81ms +[2025-07-17 10:12:33] [Rank 0] step:1401/10000 train_time:323375ms step_avg:230.82ms +[2025-07-17 10:12:33] [Rank 0] step:1401/10000 train_time:323375ms step_avg:230.82ms +[2025-07-17 10:12:38] [Rank 0] step:1421/10000 train_time:328011ms step_avg:230.83ms +[2025-07-17 10:12:38] [Rank 0] step:1421/10000 train_time:328011ms step_avg:230.83ms +[2025-07-17 10:12:42] [Rank 0] step:1441/10000 train_time:332643ms step_avg:230.84ms +[2025-07-17 10:12:42] [Rank 0] step:1441/10000 train_time:332643ms step_avg:230.84ms +[2025-07-17 10:12:47] [Rank 0] step:1461/10000 train_time:337281ms step_avg:230.86ms +[2025-07-17 10:12:47] [Rank 0] step:1461/10000 train_time:337281ms step_avg:230.86ms +[2025-07-17 10:12:52] [Rank 0] step:1481/10000 train_time:341916ms step_avg:230.87ms +[2025-07-17 10:12:52] [Rank 0] step:1481/10000 train_time:341916ms step_avg:230.87ms +[2025-07-17 10:13:01] [Rank 0] PRINT: step:1500/10000 val_loss:4.5906 train_time:346573ms step_avg:231.05ms +[2025-07-17 10:13:01] [Rank 0] PRINT: step:1500/10000 val_loss:4.5906 train_time:346573ms step_avg:231.05ms +[2025-07-17 10:13:01] [Rank 0] step:1501/10000 train_time:346595ms step_avg:230.91ms +[2025-07-17 10:13:01] [Rank 0] step:1501/10000 train_time:346595ms step_avg:230.91ms +[2025-07-17 10:13:05] [Rank 0] step:1521/10000 train_time:351237ms step_avg:230.93ms +[2025-07-17 10:13:05] [Rank 0] step:1521/10000 train_time:351237ms step_avg:230.93ms +[2025-07-17 10:13:11] [Rank 0] step:1541/10000 train_time:356433ms step_avg:231.30ms +[2025-07-17 10:13:11] [Rank 0] step:1541/10000 train_time:356433ms step_avg:231.30ms +[2025-07-17 10:13:15] [Rank 0] step:1561/10000 train_time:361088ms step_avg:231.32ms +[2025-07-17 10:13:15] [Rank 0] step:1561/10000 train_time:361088ms step_avg:231.32ms +[2025-07-17 10:13:20] [Rank 0] step:1581/10000 train_time:365750ms step_avg:231.34ms +[2025-07-17 10:13:20] [Rank 0] step:1581/10000 train_time:365750ms step_avg:231.34ms +[2025-07-17 10:13:25] [Rank 0] step:1601/10000 train_time:370408ms step_avg:231.36ms +[2025-07-17 10:13:25] [Rank 0] step:1601/10000 train_time:370408ms step_avg:231.36ms +[2025-07-17 10:13:29] [Rank 0] step:1621/10000 train_time:375069ms step_avg:231.38ms +[2025-07-17 10:13:29] [Rank 0] step:1621/10000 train_time:375069ms step_avg:231.38ms +[2025-07-17 10:13:35] [Rank 0] PRINT: step:1625/10000 val_loss:4.6165 train_time:376235ms step_avg:231.53ms +[2025-07-17 10:13:35] [Rank 0] PRINT: step:1625/10000 val_loss:4.6165 train_time:376235ms step_avg:231.53ms +[2025-07-17 10:13:38] [Rank 0] step:1641/10000 train_time:379728ms step_avg:231.40ms +[2025-07-17 10:13:38] [Rank 0] step:1641/10000 train_time:379728ms step_avg:231.40ms +[2025-07-17 10:13:43] [Rank 0] step:1661/10000 train_time:384391ms step_avg:231.42ms +[2025-07-17 10:13:43] [Rank 0] step:1661/10000 train_time:384391ms step_avg:231.42ms +[2025-07-17 10:13:48] [Rank 0] step:1681/10000 train_time:389055ms step_avg:231.44ms +[2025-07-17 10:13:48] [Rank 0] step:1681/10000 train_time:389055ms step_avg:231.44ms +[2025-07-17 10:13:52] [Rank 0] step:1701/10000 train_time:393722ms step_avg:231.46ms +[2025-07-17 10:13:52] [Rank 0] step:1701/10000 train_time:393722ms step_avg:231.46ms +[2025-07-17 10:13:57] [Rank 0] step:1721/10000 train_time:398390ms step_avg:231.49ms +[2025-07-17 10:13:57] [Rank 0] step:1721/10000 train_time:398390ms step_avg:231.49ms +[2025-07-17 10:14:02] [Rank 0] step:1741/10000 train_time:403056ms step_avg:231.51ms +[2025-07-17 10:14:02] [Rank 0] step:1741/10000 train_time:403056ms step_avg:231.51ms +[2025-07-17 10:14:08] [Rank 0] PRINT: step:1750/10000 val_loss:4.7128 train_time:405391ms step_avg:231.65ms +[2025-07-17 10:14:08] [Rank 0] PRINT: step:1750/10000 val_loss:4.7128 train_time:405391ms step_avg:231.65ms +[2025-07-17 10:14:11] [Rank 0] step:1761/10000 train_time:407721ms step_avg:231.53ms +[2025-07-17 10:14:11] [Rank 0] step:1761/10000 train_time:407721ms step_avg:231.53ms +[2025-07-17 10:14:16] [Rank 0] step:1781/10000 train_time:412387ms step_avg:231.55ms +[2025-07-17 10:14:16] [Rank 0] step:1781/10000 train_time:412387ms step_avg:231.55ms +[2025-07-17 10:14:20] [Rank 0] step:1801/10000 train_time:417052ms step_avg:231.57ms +[2025-07-17 10:14:20] [Rank 0] step:1801/10000 train_time:417052ms step_avg:231.57ms +[2025-07-17 10:14:25] [Rank 0] step:1821/10000 train_time:421719ms step_avg:231.59ms +[2025-07-17 10:14:25] [Rank 0] step:1821/10000 train_time:421719ms step_avg:231.59ms +[2025-07-17 10:14:30] [Rank 0] step:1841/10000 train_time:426389ms step_avg:231.61ms +[2025-07-17 10:14:30] [Rank 0] step:1841/10000 train_time:426389ms step_avg:231.61ms +[2025-07-17 10:14:34] [Rank 0] step:1861/10000 train_time:431061ms step_avg:231.63ms +[2025-07-17 10:14:34] [Rank 0] step:1861/10000 train_time:431061ms step_avg:231.63ms +[2025-07-17 10:14:42] [Rank 0] PRINT: step:1875/10000 val_loss:4.6753 train_time:434562ms step_avg:231.77ms +[2025-07-17 10:14:42] [Rank 0] PRINT: step:1875/10000 val_loss:4.6753 train_time:434562ms step_avg:231.77ms +[2025-07-17 10:14:43] [Rank 0] step:1881/10000 train_time:435727ms step_avg:231.65ms +[2025-07-17 10:14:43] [Rank 0] step:1881/10000 train_time:435727ms step_avg:231.65ms +[2025-07-17 10:14:48] [Rank 0] step:1901/10000 train_time:440397ms step_avg:231.67ms +[2025-07-17 10:14:48] [Rank 0] step:1901/10000 train_time:440397ms step_avg:231.67ms +[2025-07-17 10:14:53] [Rank 0] step:1921/10000 train_time:445064ms step_avg:231.68ms +[2025-07-17 10:14:53] [Rank 0] step:1921/10000 train_time:445064ms step_avg:231.68ms +[2025-07-17 10:14:57] [Rank 0] step:1941/10000 train_time:449734ms step_avg:231.70ms +[2025-07-17 10:14:57] [Rank 0] step:1941/10000 train_time:449734ms step_avg:231.70ms +[2025-07-17 10:15:02] [Rank 0] step:1961/10000 train_time:454406ms step_avg:231.72ms +[2025-07-17 10:15:02] [Rank 0] step:1961/10000 train_time:454406ms step_avg:231.72ms +[2025-07-17 10:15:07] [Rank 0] step:1981/10000 train_time:459077ms step_avg:231.74ms +[2025-07-17 10:15:07] [Rank 0] step:1981/10000 train_time:459077ms step_avg:231.74ms +[2025-07-17 10:15:16] [Rank 0] PRINT: step:2000/10000 val_loss:4.7230 train_time:463739ms step_avg:231.87ms +[2025-07-17 10:15:16] [Rank 0] PRINT: step:2000/10000 val_loss:4.7230 train_time:463739ms step_avg:231.87ms +[2025-07-17 10:15:16] [Rank 0] step:2001/10000 train_time:463760ms step_avg:231.76ms +[2025-07-17 10:15:16] [Rank 0] step:2001/10000 train_time:463760ms step_avg:231.76ms +[2025-07-17 10:15:21] [Rank 0] step:2021/10000 train_time:468405ms step_avg:231.77ms +[2025-07-17 10:15:21] [Rank 0] step:2021/10000 train_time:468405ms step_avg:231.77ms +[2025-07-17 10:15:26] [Rank 0] step:2041/10000 train_time:473577ms step_avg:232.03ms +[2025-07-17 10:15:26] [Rank 0] step:2041/10000 train_time:473577ms step_avg:232.03ms +[2025-07-17 10:15:30] [Rank 0] step:2061/10000 train_time:478242ms step_avg:232.04ms +[2025-07-17 10:15:30] [Rank 0] step:2061/10000 train_time:478242ms step_avg:232.04ms +[2025-07-17 10:15:35] [Rank 0] step:2081/10000 train_time:482908ms step_avg:232.06ms +[2025-07-17 10:15:35] [Rank 0] step:2081/10000 train_time:482908ms step_avg:232.06ms +[2025-07-17 10:15:40] [Rank 0] step:2101/10000 train_time:487575ms step_avg:232.07ms +[2025-07-17 10:15:40] [Rank 0] step:2101/10000 train_time:487575ms step_avg:232.07ms +[2025-07-17 10:15:44] [Rank 0] step:2121/10000 train_time:492244ms step_avg:232.08ms +[2025-07-17 10:15:44] [Rank 0] step:2121/10000 train_time:492244ms step_avg:232.08ms +[2025-07-17 10:15:50] [Rank 0] PRINT: step:2125/10000 val_loss:4.7590 train_time:493414ms step_avg:232.19ms +[2025-07-17 10:15:50] [Rank 0] PRINT: step:2125/10000 val_loss:4.7590 train_time:493414ms step_avg:232.19ms +[2025-07-17 10:15:54] [Rank 0] step:2141/10000 train_time:496912ms step_avg:232.09ms +[2025-07-17 10:15:54] [Rank 0] step:2141/10000 train_time:496912ms step_avg:232.09ms +[2025-07-17 10:15:58] [Rank 0] step:2161/10000 train_time:501578ms step_avg:232.10ms +[2025-07-17 10:15:58] [Rank 0] step:2161/10000 train_time:501578ms step_avg:232.10ms +[2025-07-17 10:16:03] [Rank 0] step:2181/10000 train_time:506242ms step_avg:232.11ms +[2025-07-17 10:16:03] [Rank 0] step:2181/10000 train_time:506242ms step_avg:232.11ms +[2025-07-17 10:16:08] [Rank 0] step:2201/10000 train_time:510909ms step_avg:232.13ms +[2025-07-17 10:16:08] [Rank 0] step:2201/10000 train_time:510909ms step_avg:232.13ms +[2025-07-17 10:16:12] [Rank 0] step:2221/10000 train_time:515576ms step_avg:232.14ms +[2025-07-17 10:16:12] [Rank 0] step:2221/10000 train_time:515576ms step_avg:232.14ms +[2025-07-17 10:16:17] [Rank 0] step:2241/10000 train_time:520337ms step_avg:232.19ms +[2025-07-17 10:16:17] [Rank 0] step:2241/10000 train_time:520337ms step_avg:232.19ms +[2025-07-17 10:16:24] [Rank 0] PRINT: step:2250/10000 val_loss:4.0941 train_time:522725ms step_avg:232.32ms +[2025-07-17 10:16:24] [Rank 0] PRINT: step:2250/10000 val_loss:4.0941 train_time:522725ms step_avg:232.32ms +[2025-07-17 10:16:26] [Rank 0] step:2261/10000 train_time:525113ms step_avg:232.25ms +[2025-07-17 10:16:26] [Rank 0] step:2261/10000 train_time:525113ms step_avg:232.25ms +[2025-07-17 10:16:31] [Rank 0] step:2281/10000 train_time:529893ms step_avg:232.31ms +[2025-07-17 10:16:31] [Rank 0] step:2281/10000 train_time:529893ms step_avg:232.31ms +[2025-07-17 10:16:36] [Rank 0] step:2301/10000 train_time:534674ms step_avg:232.37ms +[2025-07-17 10:16:36] [Rank 0] step:2301/10000 train_time:534674ms step_avg:232.37ms +[2025-07-17 10:16:41] [Rank 0] step:2321/10000 train_time:539455ms step_avg:232.42ms +[2025-07-17 10:16:41] [Rank 0] step:2321/10000 train_time:539455ms step_avg:232.42ms +[2025-07-17 10:16:45] [Rank 0] step:2341/10000 train_time:544235ms step_avg:232.48ms +[2025-07-17 10:16:45] [Rank 0] step:2341/10000 train_time:544235ms step_avg:232.48ms +[2025-07-17 10:16:50] [Rank 0] step:2361/10000 train_time:549017ms step_avg:232.54ms +[2025-07-17 10:16:50] [Rank 0] step:2361/10000 train_time:549017ms step_avg:232.54ms +[2025-07-17 10:16:58] [Rank 0] PRINT: step:2375/10000 val_loss:4.1669 train_time:552601ms step_avg:232.67ms +[2025-07-17 10:16:58] [Rank 0] PRINT: step:2375/10000 val_loss:4.1669 train_time:552601ms step_avg:232.67ms +[2025-07-17 10:17:00] [Rank 0] step:2381/10000 train_time:553800ms step_avg:232.59ms +[2025-07-17 10:17:00] [Rank 0] step:2381/10000 train_time:553800ms step_avg:232.59ms +[2025-07-17 10:17:04] [Rank 0] step:2401/10000 train_time:558581ms step_avg:232.65ms +[2025-07-17 10:17:04] [Rank 0] step:2401/10000 train_time:558581ms step_avg:232.65ms +[2025-07-17 10:17:09] [Rank 0] step:2421/10000 train_time:563361ms step_avg:232.70ms +[2025-07-17 10:17:09] [Rank 0] step:2421/10000 train_time:563361ms step_avg:232.70ms +[2025-07-17 10:17:14] [Rank 0] step:2441/10000 train_time:568142ms step_avg:232.75ms +[2025-07-17 10:17:14] [Rank 0] step:2441/10000 train_time:568142ms step_avg:232.75ms +[2025-07-17 10:17:19] [Rank 0] step:2461/10000 train_time:572923ms step_avg:232.80ms +[2025-07-17 10:17:19] [Rank 0] step:2461/10000 train_time:572923ms step_avg:232.80ms +[2025-07-17 10:17:24] [Rank 0] step:2481/10000 train_time:577703ms step_avg:232.85ms +[2025-07-17 10:17:24] [Rank 0] step:2481/10000 train_time:577703ms step_avg:232.85ms +[2025-07-17 10:17:33] [Rank 0] PRINT: step:2500/10000 val_loss:4.3540 train_time:582483ms step_avg:232.99ms +[2025-07-17 10:17:33] [Rank 0] PRINT: step:2500/10000 val_loss:4.3540 train_time:582483ms step_avg:232.99ms +[2025-07-17 10:17:33] [Rank 0] step:2501/10000 train_time:582505ms step_avg:232.91ms +[2025-07-17 10:17:33] [Rank 0] step:2501/10000 train_time:582505ms step_avg:232.91ms +[2025-07-17 10:17:38] [Rank 0] step:2521/10000 train_time:587263ms step_avg:232.95ms +[2025-07-17 10:17:38] [Rank 0] step:2521/10000 train_time:587263ms step_avg:232.95ms +[2025-07-17 10:17:43] [Rank 0] step:2541/10000 train_time:592047ms step_avg:233.00ms +[2025-07-17 10:17:43] [Rank 0] step:2541/10000 train_time:592047ms step_avg:233.00ms +[2025-07-17 10:17:47] [Rank 0] step:2561/10000 train_time:596930ms step_avg:233.08ms +[2025-07-17 10:17:47] [Rank 0] step:2561/10000 train_time:596930ms step_avg:233.08ms +[2025-07-17 10:17:52] [Rank 0] step:2581/10000 train_time:601716ms step_avg:233.13ms +[2025-07-17 10:17:52] [Rank 0] step:2581/10000 train_time:601716ms step_avg:233.13ms +[2025-07-17 10:17:57] [Rank 0] step:2601/10000 train_time:606497ms step_avg:233.18ms +[2025-07-17 10:17:57] [Rank 0] step:2601/10000 train_time:606497ms step_avg:233.18ms +[2025-07-17 10:18:02] [Rank 0] step:2621/10000 train_time:611280ms step_avg:233.22ms +[2025-07-17 10:18:02] [Rank 0] step:2621/10000 train_time:611280ms step_avg:233.22ms +[2025-07-17 10:18:07] [Rank 0] PRINT: step:2625/10000 val_loss:4.4480 train_time:612478ms step_avg:233.33ms +[2025-07-17 10:18:07] [Rank 0] PRINT: step:2625/10000 val_loss:4.4480 train_time:612478ms step_avg:233.33ms +[2025-07-17 10:18:11] [Rank 0] step:2641/10000 train_time:616063ms step_avg:233.27ms +[2025-07-17 10:18:11] [Rank 0] step:2641/10000 train_time:616063ms step_avg:233.27ms +[2025-07-17 10:18:16] [Rank 0] step:2661/10000 train_time:620849ms step_avg:233.31ms +[2025-07-17 10:18:16] [Rank 0] step:2661/10000 train_time:620849ms step_avg:233.31ms +[2025-07-17 10:18:21] [Rank 0] step:2681/10000 train_time:625635ms step_avg:233.36ms +[2025-07-17 10:18:21] [Rank 0] step:2681/10000 train_time:625635ms step_avg:233.36ms +[2025-07-17 10:18:25] [Rank 0] step:2701/10000 train_time:630421ms step_avg:233.40ms +[2025-07-17 10:18:25] [Rank 0] step:2701/10000 train_time:630421ms step_avg:233.40ms +[2025-07-17 10:18:30] [Rank 0] step:2721/10000 train_time:635206ms step_avg:233.45ms +[2025-07-17 10:18:30] [Rank 0] step:2721/10000 train_time:635206ms step_avg:233.45ms +[2025-07-17 10:18:35] [Rank 0] step:2741/10000 train_time:639993ms step_avg:233.49ms +[2025-07-17 10:18:35] [Rank 0] step:2741/10000 train_time:639993ms step_avg:233.49ms +[2025-07-17 10:18:42] [Rank 0] PRINT: step:2750/10000 val_loss:4.2952 train_time:642387ms step_avg:233.60ms +[2025-07-17 10:18:42] [Rank 0] PRINT: step:2750/10000 val_loss:4.2952 train_time:642387ms step_avg:233.60ms +[2025-07-17 10:18:44] [Rank 0] step:2761/10000 train_time:644783ms step_avg:233.53ms +[2025-07-17 10:18:44] [Rank 0] step:2761/10000 train_time:644783ms step_avg:233.53ms +[2025-07-17 10:18:49] [Rank 0] step:2781/10000 train_time:649566ms step_avg:233.57ms +[2025-07-17 10:18:49] [Rank 0] step:2781/10000 train_time:649566ms step_avg:233.57ms +[2025-07-17 10:18:54] [Rank 0] step:2801/10000 train_time:654352ms step_avg:233.61ms +[2025-07-17 10:18:54] [Rank 0] step:2801/10000 train_time:654352ms step_avg:233.61ms +[2025-07-17 10:18:59] [Rank 0] step:2821/10000 train_time:659137ms step_avg:233.65ms +[2025-07-17 10:18:59] [Rank 0] step:2821/10000 train_time:659137ms step_avg:233.65ms +[2025-07-17 10:19:03] [Rank 0] step:2841/10000 train_time:663919ms step_avg:233.69ms +[2025-07-17 10:19:03] [Rank 0] step:2841/10000 train_time:663919ms step_avg:233.69ms +[2025-07-17 10:19:08] [Rank 0] step:2861/10000 train_time:668706ms step_avg:233.73ms +[2025-07-17 10:19:08] [Rank 0] step:2861/10000 train_time:668706ms step_avg:233.73ms +[2025-07-17 10:19:16] [Rank 0] PRINT: step:2875/10000 val_loss:4.3057 train_time:672294ms step_avg:233.84ms +[2025-07-17 10:19:16] [Rank 0] PRINT: step:2875/10000 val_loss:4.3057 train_time:672294ms step_avg:233.84ms +[2025-07-17 10:19:18] [Rank 0] step:2881/10000 train_time:673490ms step_avg:233.77ms +[2025-07-17 10:19:18] [Rank 0] step:2881/10000 train_time:673490ms step_avg:233.77ms +[2025-07-17 10:19:22] [Rank 0] step:2901/10000 train_time:678278ms step_avg:233.81ms +[2025-07-17 10:19:22] [Rank 0] step:2901/10000 train_time:678278ms step_avg:233.81ms +[2025-07-17 10:19:27] [Rank 0] step:2921/10000 train_time:683065ms step_avg:233.85ms +[2025-07-17 10:19:27] [Rank 0] step:2921/10000 train_time:683065ms step_avg:233.85ms +[2025-07-17 10:19:32] [Rank 0] step:2941/10000 train_time:687850ms step_avg:233.88ms +[2025-07-17 10:19:32] [Rank 0] step:2941/10000 train_time:687850ms step_avg:233.88ms +[2025-07-17 10:19:37] [Rank 0] step:2961/10000 train_time:692634ms step_avg:233.92ms +[2025-07-17 10:19:37] [Rank 0] step:2961/10000 train_time:692634ms step_avg:233.92ms +[2025-07-17 10:19:41] [Rank 0] step:2981/10000 train_time:697433ms step_avg:233.96ms +[2025-07-17 10:19:41] [Rank 0] step:2981/10000 train_time:697433ms step_avg:233.96ms +[2025-07-17 10:19:51] [Rank 0] PRINT: step:3000/10000 val_loss:4.3888 train_time:702233ms step_avg:234.08ms +[2025-07-17 10:19:51] [Rank 0] PRINT: step:3000/10000 val_loss:4.3888 train_time:702233ms step_avg:234.08ms +[2025-07-17 10:19:51] [Rank 0] step:3001/10000 train_time:702255ms step_avg:234.01ms +[2025-07-17 10:19:51] [Rank 0] step:3001/10000 train_time:702255ms step_avg:234.01ms +[2025-07-17 10:19:56] [Rank 0] step:3021/10000 train_time:707039ms step_avg:234.04ms +[2025-07-17 10:19:56] [Rank 0] step:3021/10000 train_time:707039ms step_avg:234.04ms +[2025-07-17 10:20:00] [Rank 0] step:3041/10000 train_time:711847ms step_avg:234.08ms +[2025-07-17 10:20:00] [Rank 0] step:3041/10000 train_time:711847ms step_avg:234.08ms +[2025-07-17 10:20:06] [Rank 0] step:3061/10000 train_time:717154ms step_avg:234.29ms +[2025-07-17 10:20:06] [Rank 0] step:3061/10000 train_time:717154ms step_avg:234.29ms +[2025-07-17 10:20:11] [Rank 0] step:3081/10000 train_time:721957ms step_avg:234.33ms +[2025-07-17 10:20:11] [Rank 0] step:3081/10000 train_time:721957ms step_avg:234.33ms +[2025-07-17 10:20:15] [Rank 0] step:3101/10000 train_time:726759ms step_avg:234.36ms +[2025-07-17 10:20:15] [Rank 0] step:3101/10000 train_time:726759ms step_avg:234.36ms +[2025-07-17 10:20:20] [Rank 0] step:3121/10000 train_time:731559ms step_avg:234.40ms +[2025-07-17 10:20:20] [Rank 0] step:3121/10000 train_time:731559ms step_avg:234.40ms +[2025-07-17 10:20:26] [Rank 0] PRINT: step:3125/10000 val_loss:4.6514 train_time:732760ms step_avg:234.48ms +[2025-07-17 10:20:26] [Rank 0] PRINT: step:3125/10000 val_loss:4.6514 train_time:732760ms step_avg:234.48ms +[2025-07-17 10:20:30] [Rank 0] step:3141/10000 train_time:736363ms step_avg:234.44ms +[2025-07-17 10:20:30] [Rank 0] step:3141/10000 train_time:736363ms step_avg:234.44ms +[2025-07-17 10:20:34] [Rank 0] step:3161/10000 train_time:741170ms step_avg:234.47ms +[2025-07-17 10:20:34] [Rank 0] step:3161/10000 train_time:741170ms step_avg:234.47ms +[2025-07-17 10:20:39] [Rank 0] step:3181/10000 train_time:745976ms step_avg:234.51ms +[2025-07-17 10:20:39] [Rank 0] step:3181/10000 train_time:745976ms step_avg:234.51ms +[2025-07-17 10:20:44] [Rank 0] step:3201/10000 train_time:750783ms step_avg:234.55ms +[2025-07-17 10:20:44] [Rank 0] step:3201/10000 train_time:750783ms step_avg:234.55ms +[2025-07-17 10:20:49] [Rank 0] step:3221/10000 train_time:755591ms step_avg:234.58ms +[2025-07-17 10:20:49] [Rank 0] step:3221/10000 train_time:755591ms step_avg:234.58ms +[2025-07-17 10:20:54] [Rank 0] step:3241/10000 train_time:760394ms step_avg:234.62ms +[2025-07-17 10:20:54] [Rank 0] step:3241/10000 train_time:760394ms step_avg:234.62ms +[2025-07-17 10:21:00] [Rank 0] PRINT: step:3250/10000 val_loss:4.1543 train_time:762799ms step_avg:234.71ms +[2025-07-17 10:21:00] [Rank 0] PRINT: step:3250/10000 val_loss:4.1543 train_time:762799ms step_avg:234.71ms +[2025-07-17 10:21:03] [Rank 0] step:3261/10000 train_time:765193ms step_avg:234.65ms +[2025-07-17 10:21:03] [Rank 0] step:3261/10000 train_time:765193ms step_avg:234.65ms +[2025-07-17 10:21:08] [Rank 0] step:3281/10000 train_time:769992ms step_avg:234.68ms +[2025-07-17 10:21:08] [Rank 0] step:3281/10000 train_time:769992ms step_avg:234.68ms +[2025-07-17 10:21:13] [Rank 0] step:3301/10000 train_time:774791ms step_avg:234.71ms +[2025-07-17 10:21:13] [Rank 0] step:3301/10000 train_time:774791ms step_avg:234.71ms +[2025-07-17 10:21:17] [Rank 0] step:3321/10000 train_time:779590ms step_avg:234.75ms +[2025-07-17 10:21:17] [Rank 0] step:3321/10000 train_time:779590ms step_avg:234.75ms +[2025-07-17 10:21:22] [Rank 0] step:3341/10000 train_time:784393ms step_avg:234.78ms +[2025-07-17 10:21:22] [Rank 0] step:3341/10000 train_time:784393ms step_avg:234.78ms +[2025-07-17 10:21:27] [Rank 0] step:3361/10000 train_time:789191ms step_avg:234.81ms +[2025-07-17 10:21:27] [Rank 0] step:3361/10000 train_time:789191ms step_avg:234.81ms +[2025-07-17 10:21:35] [Rank 0] PRINT: step:3375/10000 val_loss:4.4635 train_time:792791ms step_avg:234.90ms +[2025-07-17 10:21:35] [Rank 0] PRINT: step:3375/10000 val_loss:4.4635 train_time:792791ms step_avg:234.90ms +[2025-07-17 10:21:36] [Rank 0] step:3381/10000 train_time:793987ms step_avg:234.84ms +[2025-07-17 10:21:36] [Rank 0] step:3381/10000 train_time:793987ms step_avg:234.84ms +[2025-07-17 10:21:41] [Rank 0] step:3401/10000 train_time:798786ms step_avg:234.87ms +[2025-07-17 10:21:41] [Rank 0] step:3401/10000 train_time:798786ms step_avg:234.87ms +[2025-07-17 10:21:46] [Rank 0] step:3421/10000 train_time:803583ms step_avg:234.90ms +[2025-07-17 10:21:46] [Rank 0] step:3421/10000 train_time:803583ms step_avg:234.90ms +[2025-07-17 10:21:51] [Rank 0] step:3441/10000 train_time:808380ms step_avg:234.93ms +[2025-07-17 10:21:51] [Rank 0] step:3441/10000 train_time:808380ms step_avg:234.93ms +[2025-07-17 10:21:56] [Rank 0] step:3461/10000 train_time:813178ms step_avg:234.95ms +[2025-07-17 10:21:56] [Rank 0] step:3461/10000 train_time:813178ms step_avg:234.95ms +[2025-07-17 10:22:00] [Rank 0] step:3481/10000 train_time:817980ms step_avg:234.98ms +[2025-07-17 10:22:00] [Rank 0] step:3481/10000 train_time:817980ms step_avg:234.98ms +[2025-07-17 10:22:10] [Rank 0] PRINT: step:3500/10000 val_loss:4.4623 train_time:822777ms step_avg:235.08ms +[2025-07-17 10:22:10] [Rank 0] PRINT: step:3500/10000 val_loss:4.4623 train_time:822777ms step_avg:235.08ms +[2025-07-17 10:22:10] [Rank 0] step:3501/10000 train_time:822799ms step_avg:235.02ms +[2025-07-17 10:22:10] [Rank 0] step:3501/10000 train_time:822799ms step_avg:235.02ms +[2025-07-17 10:22:15] [Rank 0] step:3521/10000 train_time:827568ms step_avg:235.04ms +[2025-07-17 10:22:15] [Rank 0] step:3521/10000 train_time:827568ms step_avg:235.04ms +[2025-07-17 10:22:19] [Rank 0] step:3541/10000 train_time:832366ms step_avg:235.07ms +[2025-07-17 10:22:19] [Rank 0] step:3541/10000 train_time:832366ms step_avg:235.07ms +[2025-07-17 10:22:25] [Rank 0] step:3561/10000 train_time:837683ms step_avg:235.24ms +[2025-07-17 10:22:25] [Rank 0] step:3561/10000 train_time:837683ms step_avg:235.24ms +[2025-07-17 10:22:29] [Rank 0] step:3581/10000 train_time:842477ms step_avg:235.26ms +[2025-07-17 10:22:29] [Rank 0] step:3581/10000 train_time:842477ms step_avg:235.26ms +[2025-07-17 10:22:34] [Rank 0] step:3601/10000 train_time:847272ms step_avg:235.29ms +[2025-07-17 10:22:34] [Rank 0] step:3601/10000 train_time:847272ms step_avg:235.29ms +[2025-07-17 10:22:39] [Rank 0] step:3621/10000 train_time:852064ms step_avg:235.31ms +[2025-07-17 10:22:39] [Rank 0] step:3621/10000 train_time:852064ms step_avg:235.31ms +[2025-07-17 10:22:45] [Rank 0] PRINT: step:3625/10000 val_loss:4.4252 train_time:853263ms step_avg:235.38ms +[2025-07-17 10:22:45] [Rank 0] PRINT: step:3625/10000 val_loss:4.4252 train_time:853263ms step_avg:235.38ms +[2025-07-17 10:22:48] [Rank 0] step:3641/10000 train_time:856858ms step_avg:235.34ms +[2025-07-17 10:22:48] [Rank 0] step:3641/10000 train_time:856858ms step_avg:235.34ms +[2025-07-17 10:22:53] [Rank 0] step:3661/10000 train_time:861652ms step_avg:235.36ms +[2025-07-17 10:22:53] [Rank 0] step:3661/10000 train_time:861652ms step_avg:235.36ms +[2025-07-17 10:22:58] [Rank 0] step:3681/10000 train_time:866447ms step_avg:235.38ms +[2025-07-17 10:22:58] [Rank 0] step:3681/10000 train_time:866447ms step_avg:235.38ms +[2025-07-17 10:23:03] [Rank 0] step:3701/10000 train_time:871242ms step_avg:235.41ms +[2025-07-17 10:23:03] [Rank 0] step:3701/10000 train_time:871242ms step_avg:235.41ms +[2025-07-17 10:23:08] [Rank 0] step:3721/10000 train_time:876099ms step_avg:235.45ms +[2025-07-17 10:23:08] [Rank 0] step:3721/10000 train_time:876099ms step_avg:235.45ms +[2025-07-17 10:23:13] [Rank 0] step:3741/10000 train_time:880980ms step_avg:235.49ms +[2025-07-17 10:23:13] [Rank 0] step:3741/10000 train_time:880980ms step_avg:235.49ms +[2025-07-17 10:23:19] [Rank 0] PRINT: step:3750/10000 val_loss:4.4078 train_time:883419ms step_avg:235.58ms +[2025-07-17 10:23:19] [Rank 0] PRINT: step:3750/10000 val_loss:4.4078 train_time:883419ms step_avg:235.58ms +[2025-07-17 10:23:22] [Rank 0] step:3761/10000 train_time:885856ms step_avg:235.54ms +[2025-07-17 10:23:22] [Rank 0] step:3761/10000 train_time:885856ms step_avg:235.54ms +[2025-07-17 10:23:27] [Rank 0] step:3781/10000 train_time:890736ms step_avg:235.58ms +[2025-07-17 10:23:27] [Rank 0] step:3781/10000 train_time:890736ms step_avg:235.58ms +[2025-07-17 10:23:32] [Rank 0] step:3801/10000 train_time:895611ms step_avg:235.63ms +[2025-07-17 10:23:32] [Rank 0] step:3801/10000 train_time:895611ms step_avg:235.63ms +[2025-07-17 10:23:37] [Rank 0] step:3821/10000 train_time:900487ms step_avg:235.67ms +[2025-07-17 10:23:37] [Rank 0] step:3821/10000 train_time:900487ms step_avg:235.67ms +[2025-07-17 10:23:42] [Rank 0] step:3841/10000 train_time:905366ms step_avg:235.71ms +[2025-07-17 10:23:42] [Rank 0] step:3841/10000 train_time:905366ms step_avg:235.71ms +[2025-07-17 10:23:46] [Rank 0] step:3861/10000 train_time:910249ms step_avg:235.75ms +[2025-07-17 10:23:46] [Rank 0] step:3861/10000 train_time:910249ms step_avg:235.75ms +[2025-07-17 10:23:54] [Rank 0] PRINT: step:3875/10000 val_loss:4.4040 train_time:913911ms step_avg:235.85ms +[2025-07-17 10:23:54] [Rank 0] PRINT: step:3875/10000 val_loss:4.4040 train_time:913911ms step_avg:235.85ms +[2025-07-17 10:23:56] [Rank 0] step:3881/10000 train_time:915131ms step_avg:235.80ms +[2025-07-17 10:23:56] [Rank 0] step:3881/10000 train_time:915131ms step_avg:235.80ms +[2025-07-17 10:24:00] [Rank 0] step:3901/10000 train_time:920016ms step_avg:235.84ms +[2025-07-17 10:24:00] [Rank 0] step:3901/10000 train_time:920016ms step_avg:235.84ms +[2025-07-17 10:24:05] [Rank 0] step:3921/10000 train_time:924898ms step_avg:235.88ms +[2025-07-17 10:24:05] [Rank 0] step:3921/10000 train_time:924898ms step_avg:235.88ms +[2025-07-17 10:24:10] [Rank 0] step:3941/10000 train_time:929779ms step_avg:235.92ms +[2025-07-17 10:24:10] [Rank 0] step:3941/10000 train_time:929779ms step_avg:235.92ms +[2025-07-17 10:24:15] [Rank 0] step:3961/10000 train_time:934690ms step_avg:235.97ms +[2025-07-17 10:24:15] [Rank 0] step:3961/10000 train_time:934690ms step_avg:235.97ms +[2025-07-17 10:24:20] [Rank 0] step:3981/10000 train_time:939544ms step_avg:236.01ms +[2025-07-17 10:24:20] [Rank 0] step:3981/10000 train_time:939544ms step_avg:236.01ms +[2025-07-17 10:24:29] [Rank 0] PRINT: step:4000/10000 val_loss:4.5395 train_time:944416ms step_avg:236.10ms +[2025-07-17 10:24:29] [Rank 0] PRINT: step:4000/10000 val_loss:4.5395 train_time:944416ms step_avg:236.10ms +[2025-07-17 10:24:29] [Rank 0] step:4001/10000 train_time:944437ms step_avg:236.05ms +[2025-07-17 10:24:29] [Rank 0] step:4001/10000 train_time:944437ms step_avg:236.05ms +[2025-07-17 10:24:34] [Rank 0] step:4021/10000 train_time:949297ms step_avg:236.08ms +[2025-07-17 10:24:34] [Rank 0] step:4021/10000 train_time:949297ms step_avg:236.08ms +[2025-07-17 10:24:39] [Rank 0] step:4041/10000 train_time:954178ms step_avg:236.12ms +[2025-07-17 10:24:39] [Rank 0] step:4041/10000 train_time:954178ms step_avg:236.12ms +[2025-07-17 10:24:44] [Rank 0] step:4061/10000 train_time:959060ms step_avg:236.16ms +[2025-07-17 10:24:44] [Rank 0] step:4061/10000 train_time:959060ms step_avg:236.16ms +[2025-07-17 10:24:50] [Rank 0] step:4081/10000 train_time:964445ms step_avg:236.33ms +[2025-07-17 10:24:50] [Rank 0] step:4081/10000 train_time:964445ms step_avg:236.33ms +[2025-07-17 10:24:54] [Rank 0] step:4101/10000 train_time:969331ms step_avg:236.36ms +[2025-07-17 10:24:54] [Rank 0] step:4101/10000 train_time:969331ms step_avg:236.36ms +[2025-07-17 10:24:59] [Rank 0] step:4121/10000 train_time:974212ms step_avg:236.40ms +[2025-07-17 10:24:59] [Rank 0] step:4121/10000 train_time:974212ms step_avg:236.40ms +[2025-07-17 10:25:05] [Rank 0] PRINT: step:4125/10000 val_loss:4.4374 train_time:975435ms step_avg:236.47ms +[2025-07-17 10:25:05] [Rank 0] PRINT: step:4125/10000 val_loss:4.4374 train_time:975435ms step_avg:236.47ms +[2025-07-17 10:25:09] [Rank 0] step:4141/10000 train_time:979093ms step_avg:236.44ms +[2025-07-17 10:25:09] [Rank 0] step:4141/10000 train_time:979093ms step_avg:236.44ms +[2025-07-17 10:25:14] [Rank 0] step:4161/10000 train_time:983973ms step_avg:236.48ms +[2025-07-17 10:25:14] [Rank 0] step:4161/10000 train_time:983973ms step_avg:236.48ms +[2025-07-17 10:25:19] [Rank 0] step:4181/10000 train_time:988852ms step_avg:236.51ms +[2025-07-17 10:25:19] [Rank 0] step:4181/10000 train_time:988852ms step_avg:236.51ms +[2025-07-17 10:25:23] [Rank 0] step:4201/10000 train_time:993736ms step_avg:236.55ms +[2025-07-17 10:25:23] [Rank 0] step:4201/10000 train_time:993736ms step_avg:236.55ms +[2025-07-17 10:25:28] [Rank 0] step:4221/10000 train_time:998619ms step_avg:236.58ms +[2025-07-17 10:25:28] [Rank 0] step:4221/10000 train_time:998619ms step_avg:236.58ms +[2025-07-17 10:25:33] [Rank 0] step:4241/10000 train_time:1003504ms step_avg:236.62ms +[2025-07-17 10:25:33] [Rank 0] step:4241/10000 train_time:1003504ms step_avg:236.62ms +[2025-07-17 10:25:40] [Rank 0] PRINT: step:4250/10000 val_loss:4.4419 train_time:1005941ms step_avg:236.69ms +[2025-07-17 10:25:40] [Rank 0] PRINT: step:4250/10000 val_loss:4.4419 train_time:1005941ms step_avg:236.69ms +[2025-07-17 10:25:43] [Rank 0] step:4261/10000 train_time:1008382ms step_avg:236.65ms +[2025-07-17 10:25:43] [Rank 0] step:4261/10000 train_time:1008382ms step_avg:236.65ms +[2025-07-17 10:25:48] [Rank 0] step:4281/10000 train_time:1013264ms step_avg:236.69ms +[2025-07-17 10:25:48] [Rank 0] step:4281/10000 train_time:1013264ms step_avg:236.69ms +[2025-07-17 10:25:52] [Rank 0] step:4301/10000 train_time:1018148ms step_avg:236.72ms +[2025-07-17 10:25:52] [Rank 0] step:4301/10000 train_time:1018148ms step_avg:236.72ms +[2025-07-17 10:25:57] [Rank 0] step:4321/10000 train_time:1023040ms step_avg:236.76ms +[2025-07-17 10:25:57] [Rank 0] step:4321/10000 train_time:1023040ms step_avg:236.76ms +[2025-07-17 10:26:02] [Rank 0] step:4341/10000 train_time:1027924ms step_avg:236.79ms +[2025-07-17 10:26:02] [Rank 0] step:4341/10000 train_time:1027924ms step_avg:236.79ms +[2025-07-17 10:26:07] [Rank 0] step:4361/10000 train_time:1032809ms step_avg:236.83ms +[2025-07-17 10:26:07] [Rank 0] step:4361/10000 train_time:1032809ms step_avg:236.83ms +[2025-07-17 10:26:15] [Rank 0] PRINT: step:4375/10000 val_loss:4.5215 train_time:1036469ms step_avg:236.91ms +[2025-07-17 10:26:15] [Rank 0] PRINT: step:4375/10000 val_loss:4.5215 train_time:1036469ms step_avg:236.91ms +[2025-07-17 10:26:17] [Rank 0] step:4381/10000 train_time:1037690ms step_avg:236.86ms +[2025-07-17 10:26:17] [Rank 0] step:4381/10000 train_time:1037690ms step_avg:236.86ms +[2025-07-17 10:26:22] [Rank 0] step:4401/10000 train_time:1042571ms step_avg:236.89ms +[2025-07-17 10:26:22] [Rank 0] step:4401/10000 train_time:1042571ms step_avg:236.89ms +[2025-07-17 10:26:26] [Rank 0] step:4421/10000 train_time:1047455ms step_avg:236.93ms +[2025-07-17 10:26:26] [Rank 0] step:4421/10000 train_time:1047455ms step_avg:236.93ms +[2025-07-17 10:26:31] [Rank 0] step:4441/10000 train_time:1052337ms step_avg:236.96ms +[2025-07-17 10:26:31] [Rank 0] step:4441/10000 train_time:1052337ms step_avg:236.96ms +[2025-07-17 10:26:36] [Rank 0] step:4461/10000 train_time:1057235ms step_avg:236.99ms +[2025-07-17 10:26:36] [Rank 0] step:4461/10000 train_time:1057235ms step_avg:236.99ms +[2025-07-17 10:26:41] [Rank 0] step:4481/10000 train_time:1062132ms step_avg:237.03ms +[2025-07-17 10:26:41] [Rank 0] step:4481/10000 train_time:1062132ms step_avg:237.03ms +[2025-07-17 10:26:50] [Rank 0] PRINT: step:4500/10000 val_loss:4.5004 train_time:1067028ms step_avg:237.12ms +[2025-07-17 10:26:50] [Rank 0] PRINT: step:4500/10000 val_loss:4.5004 train_time:1067028ms step_avg:237.12ms +[2025-07-17 10:26:51] [Rank 0] step:4501/10000 train_time:1067051ms step_avg:237.07ms +[2025-07-17 10:26:51] [Rank 0] step:4501/10000 train_time:1067051ms step_avg:237.07ms +[2025-07-17 10:26:56] [Rank 0] step:4521/10000 train_time:1071926ms step_avg:237.10ms +[2025-07-17 10:26:56] [Rank 0] step:4521/10000 train_time:1071926ms step_avg:237.10ms +[2025-07-17 10:27:00] [Rank 0] step:4541/10000 train_time:1076819ms step_avg:237.13ms +[2025-07-17 10:27:00] [Rank 0] step:4541/10000 train_time:1076819ms step_avg:237.13ms +[2025-07-17 10:27:05] [Rank 0] step:4561/10000 train_time:1081711ms step_avg:237.17ms +[2025-07-17 10:27:05] [Rank 0] step:4561/10000 train_time:1081711ms step_avg:237.17ms +[2025-07-17 10:27:10] [Rank 0] step:4581/10000 train_time:1086704ms step_avg:237.22ms +[2025-07-17 10:27:10] [Rank 0] step:4581/10000 train_time:1086704ms step_avg:237.22ms +[2025-07-17 10:27:15] [Rank 0] step:4601/10000 train_time:1091602ms step_avg:237.25ms +[2025-07-17 10:27:15] [Rank 0] step:4601/10000 train_time:1091602ms step_avg:237.25ms +[2025-07-17 10:27:20] [Rank 0] step:4621/10000 train_time:1096497ms step_avg:237.29ms +[2025-07-17 10:27:20] [Rank 0] step:4621/10000 train_time:1096497ms step_avg:237.29ms +[2025-07-17 10:27:26] [Rank 0] PRINT: step:4625/10000 val_loss:4.4321 train_time:1097725ms step_avg:237.35ms +[2025-07-17 10:27:26] [Rank 0] PRINT: step:4625/10000 val_loss:4.4321 train_time:1097725ms step_avg:237.35ms +[2025-07-17 10:27:30] [Rank 0] step:4641/10000 train_time:1101395ms step_avg:237.32ms +[2025-07-17 10:27:30] [Rank 0] step:4641/10000 train_time:1101395ms step_avg:237.32ms +[2025-07-17 10:27:35] [Rank 0] step:4661/10000 train_time:1106301ms step_avg:237.35ms +[2025-07-17 10:27:35] [Rank 0] step:4661/10000 train_time:1106301ms step_avg:237.35ms +[2025-07-17 10:27:39] [Rank 0] step:4681/10000 train_time:1111200ms step_avg:237.39ms +[2025-07-17 10:27:39] [Rank 0] step:4681/10000 train_time:1111200ms step_avg:237.39ms +[2025-07-17 10:27:44] [Rank 0] step:4701/10000 train_time:1116103ms step_avg:237.42ms +[2025-07-17 10:27:44] [Rank 0] step:4701/10000 train_time:1116103ms step_avg:237.42ms +[2025-07-17 10:27:49] [Rank 0] step:4721/10000 train_time:1121002ms step_avg:237.45ms +[2025-07-17 10:27:49] [Rank 0] step:4721/10000 train_time:1121002ms step_avg:237.45ms +[2025-07-17 10:27:54] [Rank 0] step:4741/10000 train_time:1125901ms step_avg:237.48ms +[2025-07-17 10:27:54] [Rank 0] step:4741/10000 train_time:1125901ms step_avg:237.48ms +[2025-07-17 10:28:01] [Rank 0] PRINT: step:4750/10000 val_loss:4.5470 train_time:1128356ms step_avg:237.55ms +[2025-07-17 10:28:01] [Rank 0] PRINT: step:4750/10000 val_loss:4.5470 train_time:1128356ms step_avg:237.55ms +[2025-07-17 10:28:04] [Rank 0] step:4761/10000 train_time:1130803ms step_avg:237.51ms +[2025-07-17 10:28:04] [Rank 0] step:4761/10000 train_time:1130803ms step_avg:237.51ms +[2025-07-17 10:28:09] [Rank 0] step:4781/10000 train_time:1135699ms step_avg:237.54ms +[2025-07-17 10:28:09] [Rank 0] step:4781/10000 train_time:1135699ms step_avg:237.54ms +[2025-07-17 10:28:14] [Rank 0] step:4801/10000 train_time:1140593ms step_avg:237.57ms +[2025-07-17 10:28:14] [Rank 0] step:4801/10000 train_time:1140593ms step_avg:237.57ms +[2025-07-17 10:28:18] [Rank 0] step:4821/10000 train_time:1145493ms step_avg:237.60ms +[2025-07-17 10:28:18] [Rank 0] step:4821/10000 train_time:1145493ms step_avg:237.60ms +[2025-07-17 10:28:23] [Rank 0] step:4841/10000 train_time:1150392ms step_avg:237.64ms +[2025-07-17 10:28:23] [Rank 0] step:4841/10000 train_time:1150392ms step_avg:237.64ms +[2025-07-17 10:28:28] [Rank 0] step:4861/10000 train_time:1155287ms step_avg:237.66ms +[2025-07-17 10:28:28] [Rank 0] step:4861/10000 train_time:1155287ms step_avg:237.66ms +[2025-07-17 10:28:36] [Rank 0] PRINT: step:4875/10000 val_loss:4.4063 train_time:1158961ms step_avg:237.74ms +[2025-07-17 10:28:36] [Rank 0] PRINT: step:4875/10000 val_loss:4.4063 train_time:1158961ms step_avg:237.74ms +[2025-07-17 10:28:38] [Rank 0] step:4881/10000 train_time:1160186ms step_avg:237.69ms +[2025-07-17 10:28:38] [Rank 0] step:4881/10000 train_time:1160186ms step_avg:237.69ms +[2025-07-17 10:28:43] [Rank 0] step:4901/10000 train_time:1165087ms step_avg:237.72ms +[2025-07-17 10:28:43] [Rank 0] step:4901/10000 train_time:1165087ms step_avg:237.72ms +[2025-07-17 10:28:48] [Rank 0] step:4921/10000 train_time:1169980ms step_avg:237.75ms +[2025-07-17 10:28:48] [Rank 0] step:4921/10000 train_time:1169980ms step_avg:237.75ms +[2025-07-17 10:28:53] [Rank 0] step:4941/10000 train_time:1174883ms step_avg:237.78ms +[2025-07-17 10:28:53] [Rank 0] step:4941/10000 train_time:1174883ms step_avg:237.78ms +[2025-07-17 10:28:57] [Rank 0] step:4961/10000 train_time:1179774ms step_avg:237.81ms +[2025-07-17 10:28:57] [Rank 0] step:4961/10000 train_time:1179774ms step_avg:237.81ms +[2025-07-17 10:29:02] [Rank 0] step:4981/10000 train_time:1184670ms step_avg:237.84ms +[2025-07-17 10:29:02] [Rank 0] step:4981/10000 train_time:1184670ms step_avg:237.84ms +[2025-07-17 10:29:12] [Rank 0] PRINT: step:5000/10000 val_loss:4.6000 train_time:1189571ms step_avg:237.91ms +[2025-07-17 10:29:12] [Rank 0] PRINT: step:5000/10000 val_loss:4.6000 train_time:1189571ms step_avg:237.91ms +[2025-07-17 10:29:12] [Rank 0] step:5001/10000 train_time:1189593ms step_avg:237.87ms +[2025-07-17 10:29:12] [Rank 0] step:5001/10000 train_time:1189593ms step_avg:237.87ms +[2025-07-17 10:29:17] [Rank 0] step:5021/10000 train_time:1194471ms step_avg:237.89ms +[2025-07-17 10:29:17] [Rank 0] step:5021/10000 train_time:1194471ms step_avg:237.89ms +[2025-07-17 10:29:22] [Rank 0] step:5041/10000 train_time:1199370ms step_avg:237.92ms +[2025-07-17 10:29:22] [Rank 0] step:5041/10000 train_time:1199370ms step_avg:237.92ms +[2025-07-17 10:29:27] [Rank 0] step:5061/10000 train_time:1204264ms step_avg:237.95ms +[2025-07-17 10:29:27] [Rank 0] step:5061/10000 train_time:1204264ms step_avg:237.95ms +[2025-07-17 10:29:32] [Rank 0] step:5081/10000 train_time:1209698ms step_avg:238.08ms +[2025-07-17 10:29:32] [Rank 0] step:5081/10000 train_time:1209698ms step_avg:238.08ms +[2025-07-17 10:29:37] [Rank 0] step:5101/10000 train_time:1214574ms step_avg:238.11ms +[2025-07-17 10:29:37] [Rank 0] step:5101/10000 train_time:1214574ms step_avg:238.11ms +[2025-07-17 10:29:42] [Rank 0] step:5121/10000 train_time:1219467ms step_avg:238.13ms +[2025-07-17 10:29:42] [Rank 0] step:5121/10000 train_time:1219467ms step_avg:238.13ms +[2025-07-17 10:29:47] [Rank 0] PRINT: step:5125/10000 val_loss:4.4989 train_time:1220694ms step_avg:238.18ms +[2025-07-17 10:29:47] [Rank 0] PRINT: step:5125/10000 val_loss:4.4989 train_time:1220694ms step_avg:238.18ms +[2025-07-17 10:29:51] [Rank 0] step:5141/10000 train_time:1224364ms step_avg:238.16ms +[2025-07-17 10:29:51] [Rank 0] step:5141/10000 train_time:1224364ms step_avg:238.16ms +[2025-07-17 10:29:56] [Rank 0] step:5161/10000 train_time:1229263ms step_avg:238.18ms +[2025-07-17 10:29:56] [Rank 0] step:5161/10000 train_time:1229263ms step_avg:238.18ms +[2025-07-17 10:30:01] [Rank 0] step:5181/10000 train_time:1234164ms step_avg:238.21ms +[2025-07-17 10:30:01] [Rank 0] step:5181/10000 train_time:1234164ms step_avg:238.21ms +[2025-07-17 10:30:06] [Rank 0] step:5201/10000 train_time:1239114ms step_avg:238.25ms +[2025-07-17 10:30:06] [Rank 0] step:5201/10000 train_time:1239114ms step_avg:238.25ms +[2025-07-17 10:30:11] [Rank 0] step:5221/10000 train_time:1244093ms step_avg:238.29ms +[2025-07-17 10:30:11] [Rank 0] step:5221/10000 train_time:1244093ms step_avg:238.29ms +[2025-07-17 10:30:16] [Rank 0] step:5241/10000 train_time:1249072ms step_avg:238.33ms +[2025-07-17 10:30:16] [Rank 0] step:5241/10000 train_time:1249072ms step_avg:238.33ms +[2025-07-17 10:30:22] [Rank 0] PRINT: step:5250/10000 val_loss:4.5377 train_time:1251555ms step_avg:238.39ms +[2025-07-17 10:30:22] [Rank 0] PRINT: step:5250/10000 val_loss:4.5377 train_time:1251555ms step_avg:238.39ms +[2025-07-17 10:30:25] [Rank 0] step:5261/10000 train_time:1254037ms step_avg:238.36ms +[2025-07-17 10:30:25] [Rank 0] step:5261/10000 train_time:1254037ms step_avg:238.36ms +[2025-07-17 10:30:30] [Rank 0] step:5281/10000 train_time:1259011ms step_avg:238.40ms +[2025-07-17 10:30:30] [Rank 0] step:5281/10000 train_time:1259011ms step_avg:238.40ms +[2025-07-17 10:30:35] [Rank 0] step:5301/10000 train_time:1263981ms step_avg:238.44ms +[2025-07-17 10:30:35] [Rank 0] step:5301/10000 train_time:1263981ms step_avg:238.44ms +[2025-07-17 10:30:40] [Rank 0] step:5321/10000 train_time:1268959ms step_avg:238.48ms +[2025-07-17 10:30:40] [Rank 0] step:5321/10000 train_time:1268959ms step_avg:238.48ms +[2025-07-17 10:30:45] [Rank 0] step:5341/10000 train_time:1273935ms step_avg:238.52ms +[2025-07-17 10:30:45] [Rank 0] step:5341/10000 train_time:1273935ms step_avg:238.52ms +[2025-07-17 10:30:50] [Rank 0] step:5361/10000 train_time:1278908ms step_avg:238.56ms +[2025-07-17 10:30:50] [Rank 0] step:5361/10000 train_time:1278908ms step_avg:238.56ms +[2025-07-17 10:30:58] [Rank 0] PRINT: step:5375/10000 val_loss:4.4655 train_time:1282639ms step_avg:238.63ms +[2025-07-17 10:30:58] [Rank 0] PRINT: step:5375/10000 val_loss:4.4655 train_time:1282639ms step_avg:238.63ms +[2025-07-17 10:30:59] [Rank 0] step:5381/10000 train_time:1283883ms step_avg:238.60ms +[2025-07-17 10:30:59] [Rank 0] step:5381/10000 train_time:1283883ms step_avg:238.60ms +[2025-07-17 10:31:04] [Rank 0] step:5401/10000 train_time:1288856ms step_avg:238.63ms +[2025-07-17 10:31:04] [Rank 0] step:5401/10000 train_time:1288856ms step_avg:238.63ms +[2025-07-17 10:31:09] [Rank 0] step:5421/10000 train_time:1293836ms step_avg:238.67ms +[2025-07-17 10:31:09] [Rank 0] step:5421/10000 train_time:1293836ms step_avg:238.67ms +[2025-07-17 10:31:14] [Rank 0] step:5441/10000 train_time:1298806ms step_avg:238.71ms +[2025-07-17 10:31:14] [Rank 0] step:5441/10000 train_time:1298806ms step_avg:238.71ms +[2025-07-17 10:31:19] [Rank 0] step:5461/10000 train_time:1303781ms step_avg:238.74ms +[2025-07-17 10:31:19] [Rank 0] step:5461/10000 train_time:1303781ms step_avg:238.74ms +[2025-07-17 10:31:24] [Rank 0] step:5481/10000 train_time:1308759ms step_avg:238.78ms +[2025-07-17 10:31:24] [Rank 0] step:5481/10000 train_time:1308759ms step_avg:238.78ms +[2025-07-17 10:31:34] [Rank 0] PRINT: step:5500/10000 val_loss:4.4576 train_time:1313730ms step_avg:238.86ms +[2025-07-17 10:31:34] [Rank 0] PRINT: step:5500/10000 val_loss:4.4576 train_time:1313730ms step_avg:238.86ms +[2025-07-17 10:31:34] [Rank 0] step:5501/10000 train_time:1313751ms step_avg:238.82ms +[2025-07-17 10:31:34] [Rank 0] step:5501/10000 train_time:1313751ms step_avg:238.82ms +[2025-07-17 10:31:39] [Rank 0] step:5521/10000 train_time:1318695ms step_avg:238.85ms +[2025-07-17 10:31:39] [Rank 0] step:5521/10000 train_time:1318695ms step_avg:238.85ms +[2025-07-17 10:31:44] [Rank 0] step:5541/10000 train_time:1323667ms step_avg:238.89ms +[2025-07-17 10:31:44] [Rank 0] step:5541/10000 train_time:1323667ms step_avg:238.89ms +[2025-07-17 10:31:49] [Rank 0] step:5561/10000 train_time:1328632ms step_avg:238.92ms +[2025-07-17 10:31:49] [Rank 0] step:5561/10000 train_time:1328632ms step_avg:238.92ms +[2025-07-17 10:31:54] [Rank 0] step:5581/10000 train_time:1333601ms step_avg:238.95ms +[2025-07-17 10:31:54] [Rank 0] step:5581/10000 train_time:1333601ms step_avg:238.95ms +[2025-07-17 10:31:59] [Rank 0] step:5601/10000 train_time:1339080ms step_avg:239.08ms +[2025-07-17 10:31:59] [Rank 0] step:5601/10000 train_time:1339080ms step_avg:239.08ms +[2025-07-17 10:32:04] [Rank 0] step:5621/10000 train_time:1344049ms step_avg:239.11ms +[2025-07-17 10:32:04] [Rank 0] step:5621/10000 train_time:1344049ms step_avg:239.11ms +[2025-07-17 10:32:10] [Rank 0] PRINT: step:5625/10000 val_loss:4.5463 train_time:1345291ms step_avg:239.16ms +[2025-07-17 10:32:10] [Rank 0] PRINT: step:5625/10000 val_loss:4.5463 train_time:1345291ms step_avg:239.16ms +[2025-07-17 10:32:14] [Rank 0] step:5641/10000 train_time:1349017ms step_avg:239.15ms +[2025-07-17 10:32:14] [Rank 0] step:5641/10000 train_time:1349017ms step_avg:239.15ms +[2025-07-17 10:32:19] [Rank 0] step:5661/10000 train_time:1353990ms step_avg:239.18ms +[2025-07-17 10:32:19] [Rank 0] step:5661/10000 train_time:1353990ms step_avg:239.18ms +[2025-07-17 10:32:24] [Rank 0] step:5681/10000 train_time:1358963ms step_avg:239.21ms +[2025-07-17 10:32:24] [Rank 0] step:5681/10000 train_time:1358963ms step_avg:239.21ms +[2025-07-17 10:32:29] [Rank 0] step:5701/10000 train_time:1363934ms step_avg:239.24ms +[2025-07-17 10:32:29] [Rank 0] step:5701/10000 train_time:1363934ms step_avg:239.24ms +[2025-07-17 10:32:34] [Rank 0] step:5721/10000 train_time:1368900ms step_avg:239.28ms +[2025-07-17 10:32:34] [Rank 0] step:5721/10000 train_time:1368900ms step_avg:239.28ms +[2025-07-17 10:32:39] [Rank 0] step:5741/10000 train_time:1373878ms step_avg:239.31ms +[2025-07-17 10:32:39] [Rank 0] step:5741/10000 train_time:1373878ms step_avg:239.31ms +[2025-07-17 10:32:46] [Rank 0] PRINT: step:5750/10000 val_loss:4.5571 train_time:1376362ms step_avg:239.37ms +[2025-07-17 10:32:46] [Rank 0] PRINT: step:5750/10000 val_loss:4.5571 train_time:1376362ms step_avg:239.37ms +[2025-07-17 10:32:48] [Rank 0] step:5761/10000 train_time:1378846ms step_avg:239.34ms +[2025-07-17 10:32:48] [Rank 0] step:5761/10000 train_time:1378846ms step_avg:239.34ms +[2025-07-17 10:32:53] [Rank 0] step:5781/10000 train_time:1383816ms step_avg:239.37ms +[2025-07-17 10:32:53] [Rank 0] step:5781/10000 train_time:1383816ms step_avg:239.37ms +[2025-07-17 10:32:58] [Rank 0] step:5801/10000 train_time:1388780ms step_avg:239.40ms +[2025-07-17 10:32:58] [Rank 0] step:5801/10000 train_time:1388780ms step_avg:239.40ms +[2025-07-17 10:33:03] [Rank 0] step:5821/10000 train_time:1393750ms step_avg:239.43ms +[2025-07-17 10:33:03] [Rank 0] step:5821/10000 train_time:1393750ms step_avg:239.43ms +[2025-07-17 10:33:08] [Rank 0] step:5841/10000 train_time:1398719ms step_avg:239.47ms +[2025-07-17 10:33:08] [Rank 0] step:5841/10000 train_time:1398719ms step_avg:239.47ms +[2025-07-17 10:33:13] [Rank 0] step:5861/10000 train_time:1403686ms step_avg:239.50ms +[2025-07-17 10:33:13] [Rank 0] step:5861/10000 train_time:1403686ms step_avg:239.50ms +[2025-07-17 10:33:21] [Rank 0] PRINT: step:5875/10000 val_loss:4.5742 train_time:1407410ms step_avg:239.56ms +[2025-07-17 10:33:21] [Rank 0] PRINT: step:5875/10000 val_loss:4.5742 train_time:1407410ms step_avg:239.56ms +[2025-07-17 10:33:23] [Rank 0] step:5881/10000 train_time:1408653ms step_avg:239.53ms +[2025-07-17 10:33:23] [Rank 0] step:5881/10000 train_time:1408653ms step_avg:239.53ms +[2025-07-17 10:33:28] [Rank 0] step:5901/10000 train_time:1413625ms step_avg:239.56ms +[2025-07-17 10:33:28] [Rank 0] step:5901/10000 train_time:1413625ms step_avg:239.56ms +[2025-07-17 10:33:33] [Rank 0] step:5921/10000 train_time:1418599ms step_avg:239.59ms +[2025-07-17 10:33:33] [Rank 0] step:5921/10000 train_time:1418599ms step_avg:239.59ms +[2025-07-17 10:33:38] [Rank 0] step:5941/10000 train_time:1423586ms step_avg:239.62ms +[2025-07-17 10:33:38] [Rank 0] step:5941/10000 train_time:1423586ms step_avg:239.62ms +[2025-07-17 10:33:43] [Rank 0] step:5961/10000 train_time:1428570ms step_avg:239.65ms +[2025-07-17 10:33:43] [Rank 0] step:5961/10000 train_time:1428570ms step_avg:239.65ms +[2025-07-17 10:33:48] [Rank 0] step:5981/10000 train_time:1433548ms step_avg:239.68ms +[2025-07-17 10:33:48] [Rank 0] step:5981/10000 train_time:1433548ms step_avg:239.68ms +[2025-07-17 10:33:57] [Rank 0] PRINT: step:6000/10000 val_loss:4.5157 train_time:1438534ms step_avg:239.76ms +[2025-07-17 10:33:57] [Rank 0] PRINT: step:6000/10000 val_loss:4.5157 train_time:1438534ms step_avg:239.76ms +[2025-07-17 10:33:57] [Rank 0] step:6001/10000 train_time:1438555ms step_avg:239.72ms +[2025-07-17 10:33:57] [Rank 0] step:6001/10000 train_time:1438555ms step_avg:239.72ms +[2025-07-17 10:34:02] [Rank 0] step:6021/10000 train_time:1443517ms step_avg:239.75ms +[2025-07-17 10:34:02] [Rank 0] step:6021/10000 train_time:1443517ms step_avg:239.75ms +[2025-07-17 10:34:07] [Rank 0] step:6041/10000 train_time:1448499ms step_avg:239.78ms +[2025-07-17 10:34:07] [Rank 0] step:6041/10000 train_time:1448499ms step_avg:239.78ms +[2025-07-17 10:34:12] [Rank 0] step:6061/10000 train_time:1453471ms step_avg:239.81ms +[2025-07-17 10:34:12] [Rank 0] step:6061/10000 train_time:1453471ms step_avg:239.81ms +[2025-07-17 10:34:17] [Rank 0] step:6081/10000 train_time:1458453ms step_avg:239.84ms +[2025-07-17 10:34:17] [Rank 0] step:6081/10000 train_time:1458453ms step_avg:239.84ms +[2025-07-17 10:34:22] [Rank 0] step:6101/10000 train_time:1463506ms step_avg:239.88ms +[2025-07-17 10:34:22] [Rank 0] step:6101/10000 train_time:1463506ms step_avg:239.88ms +[2025-07-17 10:34:27] [Rank 0] step:6121/10000 train_time:1468491ms step_avg:239.91ms +[2025-07-17 10:34:27] [Rank 0] step:6121/10000 train_time:1468491ms step_avg:239.91ms +[2025-07-17 10:34:33] [Rank 0] PRINT: step:6125/10000 val_loss:4.5479 train_time:1469738ms step_avg:239.96ms +[2025-07-17 10:34:33] [Rank 0] PRINT: step:6125/10000 val_loss:4.5479 train_time:1469738ms step_avg:239.96ms +[2025-07-17 10:34:37] [Rank 0] step:6141/10000 train_time:1473475ms step_avg:239.94ms +[2025-07-17 10:34:37] [Rank 0] step:6141/10000 train_time:1473475ms step_avg:239.94ms +[2025-07-17 10:34:42] [Rank 0] step:6161/10000 train_time:1478452ms step_avg:239.97ms +[2025-07-17 10:34:42] [Rank 0] step:6161/10000 train_time:1478452ms step_avg:239.97ms +[2025-07-17 10:34:47] [Rank 0] step:6181/10000 train_time:1483440ms step_avg:240.00ms +[2025-07-17 10:34:47] [Rank 0] step:6181/10000 train_time:1483440ms step_avg:240.00ms +[2025-07-17 10:34:52] [Rank 0] step:6201/10000 train_time:1488433ms step_avg:240.03ms +[2025-07-17 10:34:52] [Rank 0] step:6201/10000 train_time:1488433ms step_avg:240.03ms +[2025-07-17 10:34:57] [Rank 0] step:6221/10000 train_time:1493417ms step_avg:240.06ms +[2025-07-17 10:34:57] [Rank 0] step:6221/10000 train_time:1493417ms step_avg:240.06ms +[2025-07-17 10:35:02] [Rank 0] step:6241/10000 train_time:1498404ms step_avg:240.09ms +[2025-07-17 10:35:02] [Rank 0] step:6241/10000 train_time:1498404ms step_avg:240.09ms +[2025-07-17 10:35:09] [Rank 0] PRINT: step:6250/10000 val_loss:4.4744 train_time:1500897ms step_avg:240.14ms +[2025-07-17 10:35:09] [Rank 0] PRINT: step:6250/10000 val_loss:4.4744 train_time:1500897ms step_avg:240.14ms +[2025-07-17 10:35:12] [Rank 0] step:6261/10000 train_time:1503387ms step_avg:240.12ms +[2025-07-17 10:35:12] [Rank 0] step:6261/10000 train_time:1503387ms step_avg:240.12ms +[2025-07-17 10:35:17] [Rank 0] step:6281/10000 train_time:1508381ms step_avg:240.15ms +[2025-07-17 10:35:17] [Rank 0] step:6281/10000 train_time:1508381ms step_avg:240.15ms +[2025-07-17 10:35:22] [Rank 0] step:6301/10000 train_time:1513361ms step_avg:240.18ms +[2025-07-17 10:35:22] [Rank 0] step:6301/10000 train_time:1513361ms step_avg:240.18ms +[2025-07-17 10:35:27] [Rank 0] step:6321/10000 train_time:1518345ms step_avg:240.21ms +[2025-07-17 10:35:27] [Rank 0] step:6321/10000 train_time:1518345ms step_avg:240.21ms +[2025-07-17 10:35:32] [Rank 0] step:6341/10000 train_time:1523334ms step_avg:240.24ms +[2025-07-17 10:35:32] [Rank 0] step:6341/10000 train_time:1523334ms step_avg:240.24ms +[2025-07-17 10:35:37] [Rank 0] step:6361/10000 train_time:1528308ms step_avg:240.26ms +[2025-07-17 10:35:37] [Rank 0] step:6361/10000 train_time:1528308ms step_avg:240.26ms +[2025-07-17 10:35:45] [Rank 0] PRINT: step:6375/10000 val_loss:4.4706 train_time:1532037ms step_avg:240.32ms +[2025-07-17 10:35:45] [Rank 0] PRINT: step:6375/10000 val_loss:4.4706 train_time:1532037ms step_avg:240.32ms +[2025-07-17 10:35:46] [Rank 0] step:6381/10000 train_time:1533280ms step_avg:240.29ms +[2025-07-17 10:35:46] [Rank 0] step:6381/10000 train_time:1533280ms step_avg:240.29ms +[2025-07-17 10:35:51] [Rank 0] step:6401/10000 train_time:1538246ms step_avg:240.31ms +[2025-07-17 10:35:51] [Rank 0] step:6401/10000 train_time:1538246ms step_avg:240.31ms +[2025-07-17 10:35:56] [Rank 0] step:6421/10000 train_time:1543214ms step_avg:240.34ms +[2025-07-17 10:35:56] [Rank 0] step:6421/10000 train_time:1543214ms step_avg:240.34ms +[2025-07-17 10:36:01] [Rank 0] step:6441/10000 train_time:1548182ms step_avg:240.36ms +[2025-07-17 10:36:01] [Rank 0] step:6441/10000 train_time:1548182ms step_avg:240.36ms +[2025-07-17 10:36:06] [Rank 0] step:6461/10000 train_time:1553163ms step_avg:240.39ms +[2025-07-17 10:36:06] [Rank 0] step:6461/10000 train_time:1553163ms step_avg:240.39ms +[2025-07-17 10:36:11] [Rank 0] step:6481/10000 train_time:1558135ms step_avg:240.42ms +[2025-07-17 10:36:11] [Rank 0] step:6481/10000 train_time:1558135ms step_avg:240.42ms +[2025-07-17 10:36:20] [Rank 0] PRINT: step:6500/10000 val_loss:4.5109 train_time:1563101ms step_avg:240.48ms +[2025-07-17 10:36:20] [Rank 0] PRINT: step:6500/10000 val_loss:4.5109 train_time:1563101ms step_avg:240.48ms +[2025-07-17 10:36:21] [Rank 0] step:6501/10000 train_time:1563122ms step_avg:240.44ms +[2025-07-17 10:36:21] [Rank 0] step:6501/10000 train_time:1563122ms step_avg:240.44ms +[2025-07-17 10:36:26] [Rank 0] step:6521/10000 train_time:1568074ms step_avg:240.47ms +[2025-07-17 10:36:26] [Rank 0] step:6521/10000 train_time:1568074ms step_avg:240.47ms +[2025-07-17 10:36:31] [Rank 0] step:6541/10000 train_time:1573048ms step_avg:240.49ms +[2025-07-17 10:36:31] [Rank 0] step:6541/10000 train_time:1573048ms step_avg:240.49ms +[2025-07-17 10:36:36] [Rank 0] step:6561/10000 train_time:1578032ms step_avg:240.52ms +[2025-07-17 10:36:36] [Rank 0] step:6561/10000 train_time:1578032ms step_avg:240.52ms +[2025-07-17 10:36:41] [Rank 0] step:6581/10000 train_time:1583007ms step_avg:240.54ms +[2025-07-17 10:36:41] [Rank 0] step:6581/10000 train_time:1583007ms step_avg:240.54ms +[2025-07-17 10:36:46] [Rank 0] step:6601/10000 train_time:1587993ms step_avg:240.57ms +[2025-07-17 10:36:46] [Rank 0] step:6601/10000 train_time:1587993ms step_avg:240.57ms +[2025-07-17 10:36:51] [Rank 0] step:6621/10000 train_time:1593053ms step_avg:240.61ms +[2025-07-17 10:36:51] [Rank 0] step:6621/10000 train_time:1593053ms step_avg:240.61ms +[2025-07-17 10:36:56] [Rank 0] PRINT: step:6625/10000 val_loss:4.2790 train_time:1594299ms step_avg:240.65ms +[2025-07-17 10:36:56] [Rank 0] PRINT: step:6625/10000 val_loss:4.2790 train_time:1594299ms step_avg:240.65ms +[2025-07-17 10:37:00] [Rank 0] step:6641/10000 train_time:1598017ms step_avg:240.63ms +[2025-07-17 10:37:00] [Rank 0] step:6641/10000 train_time:1598017ms step_avg:240.63ms +[2025-07-17 10:37:05] [Rank 0] step:6661/10000 train_time:1602986ms step_avg:240.65ms +[2025-07-17 10:37:05] [Rank 0] step:6661/10000 train_time:1602986ms step_avg:240.65ms +[2025-07-17 10:37:10] [Rank 0] step:6681/10000 train_time:1608009ms step_avg:240.68ms +[2025-07-17 10:37:10] [Rank 0] step:6681/10000 train_time:1608009ms step_avg:240.68ms +[2025-07-17 10:37:15] [Rank 0] step:6701/10000 train_time:1613042ms step_avg:240.72ms +[2025-07-17 10:37:15] [Rank 0] step:6701/10000 train_time:1613042ms step_avg:240.72ms +[2025-07-17 10:37:20] [Rank 0] step:6721/10000 train_time:1618092ms step_avg:240.75ms +[2025-07-17 10:37:20] [Rank 0] step:6721/10000 train_time:1618092ms step_avg:240.75ms +[2025-07-17 10:37:25] [Rank 0] step:6741/10000 train_time:1623144ms step_avg:240.79ms +[2025-07-17 10:37:25] [Rank 0] step:6741/10000 train_time:1623144ms step_avg:240.79ms +[2025-07-17 10:37:32] [Rank 0] PRINT: step:6750/10000 val_loss:4.4478 train_time:1625656ms step_avg:240.84ms +[2025-07-17 10:37:32] [Rank 0] PRINT: step:6750/10000 val_loss:4.4478 train_time:1625656ms step_avg:240.84ms +[2025-07-17 10:37:35] [Rank 0] step:6761/10000 train_time:1628174ms step_avg:240.82ms +[2025-07-17 10:37:35] [Rank 0] step:6761/10000 train_time:1628174ms step_avg:240.82ms +[2025-07-17 10:37:40] [Rank 0] step:6781/10000 train_time:1633213ms step_avg:240.85ms +[2025-07-17 10:37:40] [Rank 0] step:6781/10000 train_time:1633213ms step_avg:240.85ms +[2025-07-17 10:37:45] [Rank 0] step:6801/10000 train_time:1638258ms step_avg:240.88ms +[2025-07-17 10:37:45] [Rank 0] step:6801/10000 train_time:1638258ms step_avg:240.88ms +[2025-07-17 10:37:50] [Rank 0] step:6821/10000 train_time:1643294ms step_avg:240.92ms +[2025-07-17 10:37:50] [Rank 0] step:6821/10000 train_time:1643294ms step_avg:240.92ms +[2025-07-17 10:37:55] [Rank 0] step:6841/10000 train_time:1648335ms step_avg:240.95ms +[2025-07-17 10:37:55] [Rank 0] step:6841/10000 train_time:1648335ms step_avg:240.95ms +[2025-07-17 10:38:00] [Rank 0] step:6861/10000 train_time:1653368ms step_avg:240.98ms +[2025-07-17 10:38:00] [Rank 0] step:6861/10000 train_time:1653368ms step_avg:240.98ms +[2025-07-17 10:38:09] [Rank 0] PRINT: step:6875/10000 val_loss:4.4316 train_time:1657140ms step_avg:241.04ms +[2025-07-17 10:38:09] [Rank 0] PRINT: step:6875/10000 val_loss:4.4316 train_time:1657140ms step_avg:241.04ms +[2025-07-17 10:38:10] [Rank 0] step:6881/10000 train_time:1658400ms step_avg:241.01ms +[2025-07-17 10:38:10] [Rank 0] step:6881/10000 train_time:1658400ms step_avg:241.01ms +[2025-07-17 10:38:15] [Rank 0] step:6901/10000 train_time:1663433ms step_avg:241.04ms +[2025-07-17 10:38:15] [Rank 0] step:6901/10000 train_time:1663433ms step_avg:241.04ms +[2025-07-17 10:38:20] [Rank 0] step:6921/10000 train_time:1668476ms step_avg:241.07ms +[2025-07-17 10:38:20] [Rank 0] step:6921/10000 train_time:1668476ms step_avg:241.07ms +[2025-07-17 10:38:25] [Rank 0] step:6941/10000 train_time:1673526ms step_avg:241.11ms +[2025-07-17 10:38:25] [Rank 0] step:6941/10000 train_time:1673526ms step_avg:241.11ms +[2025-07-17 10:38:30] [Rank 0] step:6961/10000 train_time:1678573ms step_avg:241.14ms +[2025-07-17 10:38:30] [Rank 0] step:6961/10000 train_time:1678573ms step_avg:241.14ms +[2025-07-17 10:38:35] [Rank 0] step:6981/10000 train_time:1683622ms step_avg:241.17ms +[2025-07-17 10:38:35] [Rank 0] step:6981/10000 train_time:1683622ms step_avg:241.17ms +[2025-07-17 10:38:45] [Rank 0] PRINT: step:7000/10000 val_loss:4.4803 train_time:1688664ms step_avg:241.24ms +[2025-07-17 10:38:45] [Rank 0] PRINT: step:7000/10000 val_loss:4.4803 train_time:1688664ms step_avg:241.24ms +[2025-07-17 10:38:45] [Rank 0] step:7001/10000 train_time:1688685ms step_avg:241.21ms +[2025-07-17 10:38:45] [Rank 0] step:7001/10000 train_time:1688685ms step_avg:241.21ms +[2025-07-17 10:38:50] [Rank 0] step:7021/10000 train_time:1693707ms step_avg:241.23ms +[2025-07-17 10:38:50] [Rank 0] step:7021/10000 train_time:1693707ms step_avg:241.23ms +[2025-07-17 10:38:55] [Rank 0] step:7041/10000 train_time:1698755ms step_avg:241.27ms +[2025-07-17 10:38:55] [Rank 0] step:7041/10000 train_time:1698755ms step_avg:241.27ms +[2025-07-17 10:39:00] [Rank 0] step:7061/10000 train_time:1703787ms step_avg:241.30ms +[2025-07-17 10:39:00] [Rank 0] step:7061/10000 train_time:1703787ms step_avg:241.30ms +[2025-07-17 10:39:05] [Rank 0] step:7081/10000 train_time:1708830ms step_avg:241.33ms +[2025-07-17 10:39:05] [Rank 0] step:7081/10000 train_time:1708830ms step_avg:241.33ms +[2025-07-17 10:39:10] [Rank 0] step:7101/10000 train_time:1713862ms step_avg:241.36ms +[2025-07-17 10:39:10] [Rank 0] step:7101/10000 train_time:1713862ms step_avg:241.36ms +[2025-07-17 10:39:16] [Rank 0] step:7121/10000 train_time:1719048ms step_avg:241.41ms +[2025-07-17 10:39:16] [Rank 0] step:7121/10000 train_time:1719048ms step_avg:241.41ms +[2025-07-17 10:39:21] [Rank 0] PRINT: step:7125/10000 val_loss:4.3621 train_time:1720305ms step_avg:241.45ms +[2025-07-17 10:39:21] [Rank 0] PRINT: step:7125/10000 val_loss:4.3621 train_time:1720305ms step_avg:241.45ms +[2025-07-17 10:39:25] [Rank 0] step:7141/10000 train_time:1724086ms step_avg:241.43ms +[2025-07-17 10:39:25] [Rank 0] step:7141/10000 train_time:1724086ms step_avg:241.43ms +[2025-07-17 10:39:30] [Rank 0] step:7161/10000 train_time:1729126ms step_avg:241.46ms +[2025-07-17 10:39:30] [Rank 0] step:7161/10000 train_time:1729126ms step_avg:241.46ms +[2025-07-17 10:39:35] [Rank 0] step:7181/10000 train_time:1734162ms step_avg:241.49ms +[2025-07-17 10:39:35] [Rank 0] step:7181/10000 train_time:1734162ms step_avg:241.49ms +[2025-07-17 10:39:41] [Rank 0] step:7201/10000 train_time:1739210ms step_avg:241.52ms +[2025-07-17 10:39:41] [Rank 0] step:7201/10000 train_time:1739210ms step_avg:241.52ms +[2025-07-17 10:39:46] [Rank 0] step:7221/10000 train_time:1744245ms step_avg:241.55ms +[2025-07-17 10:39:46] [Rank 0] step:7221/10000 train_time:1744245ms step_avg:241.55ms +[2025-07-17 10:39:51] [Rank 0] step:7241/10000 train_time:1749276ms step_avg:241.58ms +[2025-07-17 10:39:51] [Rank 0] step:7241/10000 train_time:1749276ms step_avg:241.58ms +[2025-07-17 10:39:58] [Rank 0] PRINT: step:7250/10000 val_loss:4.4956 train_time:1751794ms step_avg:241.63ms +[2025-07-17 10:39:58] [Rank 0] PRINT: step:7250/10000 val_loss:4.4956 train_time:1751794ms step_avg:241.63ms +[2025-07-17 10:40:00] [Rank 0] step:7261/10000 train_time:1754299ms step_avg:241.61ms +[2025-07-17 10:40:00] [Rank 0] step:7261/10000 train_time:1754299ms step_avg:241.61ms +[2025-07-17 10:40:05] [Rank 0] step:7281/10000 train_time:1759324ms step_avg:241.63ms +[2025-07-17 10:40:05] [Rank 0] step:7281/10000 train_time:1759324ms step_avg:241.63ms +[2025-07-17 10:40:10] [Rank 0] step:7301/10000 train_time:1764346ms step_avg:241.66ms +[2025-07-17 10:40:10] [Rank 0] step:7301/10000 train_time:1764346ms step_avg:241.66ms +[2025-07-17 10:40:15] [Rank 0] step:7321/10000 train_time:1769383ms step_avg:241.69ms +[2025-07-17 10:40:15] [Rank 0] step:7321/10000 train_time:1769383ms step_avg:241.69ms +[2025-07-17 10:40:20] [Rank 0] step:7341/10000 train_time:1774407ms step_avg:241.71ms +[2025-07-17 10:40:20] [Rank 0] step:7341/10000 train_time:1774407ms step_avg:241.71ms +[2025-07-17 10:40:25] [Rank 0] step:7361/10000 train_time:1779435ms step_avg:241.74ms +[2025-07-17 10:40:25] [Rank 0] step:7361/10000 train_time:1779435ms step_avg:241.74ms +[2025-07-17 10:40:34] [Rank 0] PRINT: step:7375/10000 val_loss:4.5683 train_time:1783208ms step_avg:241.79ms +[2025-07-17 10:40:34] [Rank 0] PRINT: step:7375/10000 val_loss:4.5683 train_time:1783208ms step_avg:241.79ms +[2025-07-17 10:40:35] [Rank 0] step:7381/10000 train_time:1784461ms step_avg:241.76ms +[2025-07-17 10:40:35] [Rank 0] step:7381/10000 train_time:1784461ms step_avg:241.76ms +[2025-07-17 10:40:40] [Rank 0] step:7401/10000 train_time:1789493ms step_avg:241.79ms +[2025-07-17 10:40:40] [Rank 0] step:7401/10000 train_time:1789493ms step_avg:241.79ms +[2025-07-17 10:40:45] [Rank 0] step:7421/10000 train_time:1794520ms step_avg:241.82ms +[2025-07-17 10:40:45] [Rank 0] step:7421/10000 train_time:1794520ms step_avg:241.82ms +[2025-07-17 10:40:50] [Rank 0] step:7441/10000 train_time:1799559ms step_avg:241.84ms +[2025-07-17 10:40:50] [Rank 0] step:7441/10000 train_time:1799559ms step_avg:241.84ms +[2025-07-17 10:40:55] [Rank 0] step:7461/10000 train_time:1804588ms step_avg:241.87ms +[2025-07-17 10:40:55] [Rank 0] step:7461/10000 train_time:1804588ms step_avg:241.87ms +[2025-07-17 10:41:00] [Rank 0] step:7481/10000 train_time:1809632ms step_avg:241.90ms +[2025-07-17 10:41:00] [Rank 0] step:7481/10000 train_time:1809632ms step_avg:241.90ms +[2025-07-17 10:41:10] [Rank 0] PRINT: step:7500/10000 val_loss:4.5210 train_time:1814680ms step_avg:241.96ms +[2025-07-17 10:41:10] [Rank 0] PRINT: step:7500/10000 val_loss:4.5210 train_time:1814680ms step_avg:241.96ms +[2025-07-17 10:41:10] [Rank 0] step:7501/10000 train_time:1814701ms step_avg:241.93ms +[2025-07-17 10:41:10] [Rank 0] step:7501/10000 train_time:1814701ms step_avg:241.93ms +[2025-07-17 10:41:15] [Rank 0] step:7521/10000 train_time:1819726ms step_avg:241.95ms +[2025-07-17 10:41:15] [Rank 0] step:7521/10000 train_time:1819726ms step_avg:241.95ms +[2025-07-17 10:41:20] [Rank 0] step:7541/10000 train_time:1824763ms step_avg:241.98ms +[2025-07-17 10:41:20] [Rank 0] step:7541/10000 train_time:1824763ms step_avg:241.98ms +[2025-07-17 10:41:25] [Rank 0] step:7561/10000 train_time:1829901ms step_avg:242.02ms +[2025-07-17 10:41:25] [Rank 0] step:7561/10000 train_time:1829901ms step_avg:242.02ms +[2025-07-17 10:41:31] [Rank 0] step:7581/10000 train_time:1834944ms step_avg:242.05ms +[2025-07-17 10:41:31] [Rank 0] step:7581/10000 train_time:1834944ms step_avg:242.05ms +[2025-07-17 10:41:36] [Rank 0] step:7601/10000 train_time:1840000ms step_avg:242.07ms +[2025-07-17 10:41:36] [Rank 0] step:7601/10000 train_time:1840000ms step_avg:242.07ms +[2025-07-17 10:41:41] [Rank 0] step:7621/10000 train_time:1845276ms step_avg:242.13ms +[2025-07-17 10:41:41] [Rank 0] step:7621/10000 train_time:1845276ms step_avg:242.13ms +[2025-07-17 10:41:47] [Rank 0] PRINT: step:7625/10000 val_loss:4.5725 train_time:1846521ms step_avg:242.17ms +[2025-07-17 10:41:47] [Rank 0] PRINT: step:7625/10000 val_loss:4.5725 train_time:1846521ms step_avg:242.17ms +[2025-07-17 10:41:51] [Rank 0] step:7641/10000 train_time:1850310ms step_avg:242.16ms +[2025-07-17 10:41:51] [Rank 0] step:7641/10000 train_time:1850310ms step_avg:242.16ms +[2025-07-17 10:41:56] [Rank 0] step:7661/10000 train_time:1855376ms step_avg:242.18ms +[2025-07-17 10:41:56] [Rank 0] step:7661/10000 train_time:1855376ms step_avg:242.18ms +[2025-07-17 10:42:01] [Rank 0] step:7681/10000 train_time:1860457ms step_avg:242.22ms +[2025-07-17 10:42:01] [Rank 0] step:7681/10000 train_time:1860457ms step_avg:242.22ms +[2025-07-17 10:42:06] [Rank 0] step:7701/10000 train_time:1865523ms step_avg:242.24ms +[2025-07-17 10:42:06] [Rank 0] step:7701/10000 train_time:1865523ms step_avg:242.24ms +[2025-07-17 10:42:11] [Rank 0] step:7721/10000 train_time:1870588ms step_avg:242.27ms +[2025-07-17 10:42:11] [Rank 0] step:7721/10000 train_time:1870588ms step_avg:242.27ms +[2025-07-17 10:42:16] [Rank 0] step:7741/10000 train_time:1875653ms step_avg:242.30ms +[2025-07-17 10:42:16] [Rank 0] step:7741/10000 train_time:1875653ms step_avg:242.30ms +[2025-07-17 10:42:23] [Rank 0] PRINT: step:7750/10000 val_loss:4.5369 train_time:1878200ms step_avg:242.35ms +[2025-07-17 10:42:23] [Rank 0] PRINT: step:7750/10000 val_loss:4.5369 train_time:1878200ms step_avg:242.35ms +[2025-07-17 10:42:26] [Rank 0] step:7761/10000 train_time:1880729ms step_avg:242.33ms +[2025-07-17 10:42:26] [Rank 0] step:7761/10000 train_time:1880729ms step_avg:242.33ms +[2025-07-17 10:42:31] [Rank 0] step:7781/10000 train_time:1885792ms step_avg:242.36ms +[2025-07-17 10:42:31] [Rank 0] step:7781/10000 train_time:1885792ms step_avg:242.36ms +[2025-07-17 10:42:36] [Rank 0] step:7801/10000 train_time:1890854ms step_avg:242.39ms +[2025-07-17 10:42:36] [Rank 0] step:7801/10000 train_time:1890854ms step_avg:242.39ms +[2025-07-17 10:42:41] [Rank 0] step:7821/10000 train_time:1895912ms step_avg:242.41ms +[2025-07-17 10:42:41] [Rank 0] step:7821/10000 train_time:1895912ms step_avg:242.41ms +[2025-07-17 10:42:46] [Rank 0] step:7841/10000 train_time:1900974ms step_avg:242.44ms +[2025-07-17 10:42:46] [Rank 0] step:7841/10000 train_time:1900974ms step_avg:242.44ms +[2025-07-17 10:42:51] [Rank 0] step:7861/10000 train_time:1906024ms step_avg:242.47ms +[2025-07-17 10:42:51] [Rank 0] step:7861/10000 train_time:1906024ms step_avg:242.47ms +[2025-07-17 10:42:59] [Rank 0] PRINT: step:7875/10000 val_loss:4.5208 train_time:1909810ms step_avg:242.52ms +[2025-07-17 10:42:59] [Rank 0] PRINT: step:7875/10000 val_loss:4.5208 train_time:1909810ms step_avg:242.52ms +[2025-07-17 10:43:01] [Rank 0] step:7881/10000 train_time:1911068ms step_avg:242.49ms +[2025-07-17 10:43:01] [Rank 0] step:7881/10000 train_time:1911068ms step_avg:242.49ms +[2025-07-17 10:43:06] [Rank 0] step:7901/10000 train_time:1916120ms step_avg:242.52ms +[2025-07-17 10:43:06] [Rank 0] step:7901/10000 train_time:1916120ms step_avg:242.52ms +[2025-07-17 10:43:11] [Rank 0] step:7921/10000 train_time:1921178ms step_avg:242.54ms +[2025-07-17 10:43:11] [Rank 0] step:7921/10000 train_time:1921178ms step_avg:242.54ms +[2025-07-17 10:43:16] [Rank 0] step:7941/10000 train_time:1926249ms step_avg:242.57ms +[2025-07-17 10:43:16] [Rank 0] step:7941/10000 train_time:1926249ms step_avg:242.57ms +[2025-07-17 10:43:21] [Rank 0] step:7961/10000 train_time:1931318ms step_avg:242.60ms +[2025-07-17 10:43:21] [Rank 0] step:7961/10000 train_time:1931318ms step_avg:242.60ms +[2025-07-17 10:43:26] [Rank 0] step:7981/10000 train_time:1936374ms step_avg:242.62ms +[2025-07-17 10:43:26] [Rank 0] step:7981/10000 train_time:1936374ms step_avg:242.62ms +[2025-07-17 10:43:35] [Rank 0] PRINT: step:8000/10000 val_loss:4.6216 train_time:1941443ms step_avg:242.68ms +[2025-07-17 10:43:35] [Rank 0] PRINT: step:8000/10000 val_loss:4.6216 train_time:1941443ms step_avg:242.68ms +[2025-07-17 10:43:35] [Rank 0] step:8001/10000 train_time:1941464ms step_avg:242.65ms +[2025-07-17 10:43:35] [Rank 0] step:8001/10000 train_time:1941464ms step_avg:242.65ms +[2025-07-17 10:43:40] [Rank 0] step:8021/10000 train_time:1946497ms step_avg:242.68ms +[2025-07-17 10:43:40] [Rank 0] step:8021/10000 train_time:1946497ms step_avg:242.68ms +[2025-07-17 10:43:46] [Rank 0] step:8041/10000 train_time:1951575ms step_avg:242.70ms +[2025-07-17 10:43:46] [Rank 0] step:8041/10000 train_time:1951575ms step_avg:242.70ms +[2025-07-17 10:43:51] [Rank 0] step:8061/10000 train_time:1956623ms step_avg:242.73ms +[2025-07-17 10:43:51] [Rank 0] step:8061/10000 train_time:1956623ms step_avg:242.73ms +[2025-07-17 10:43:56] [Rank 0] step:8081/10000 train_time:1961680ms step_avg:242.75ms +[2025-07-17 10:43:56] [Rank 0] step:8081/10000 train_time:1961680ms step_avg:242.75ms +[2025-07-17 10:44:01] [Rank 0] step:8101/10000 train_time:1966725ms step_avg:242.78ms +[2025-07-17 10:44:01] [Rank 0] step:8101/10000 train_time:1966725ms step_avg:242.78ms +[2025-07-17 10:44:06] [Rank 0] step:8121/10000 train_time:1971770ms step_avg:242.80ms +[2025-07-17 10:44:06] [Rank 0] step:8121/10000 train_time:1971770ms step_avg:242.80ms +[2025-07-17 10:44:11] [Rank 0] PRINT: step:8125/10000 val_loss:4.5450 train_time:1973037ms step_avg:242.84ms +[2025-07-17 10:44:11] [Rank 0] PRINT: step:8125/10000 val_loss:4.5450 train_time:1973037ms step_avg:242.84ms +[2025-07-17 10:44:16] [Rank 0] step:8141/10000 train_time:1977314ms step_avg:242.88ms +[2025-07-17 10:44:16] [Rank 0] step:8141/10000 train_time:1977314ms step_avg:242.88ms +[2025-07-17 10:44:21] [Rank 0] step:8161/10000 train_time:1982399ms step_avg:242.91ms +[2025-07-17 10:44:21] [Rank 0] step:8161/10000 train_time:1982399ms step_avg:242.91ms +[2025-07-17 10:44:26] [Rank 0] step:8181/10000 train_time:1987516ms step_avg:242.94ms +[2025-07-17 10:44:26] [Rank 0] step:8181/10000 train_time:1987516ms step_avg:242.94ms +[2025-07-17 10:44:31] [Rank 0] step:8201/10000 train_time:1992618ms step_avg:242.97ms +[2025-07-17 10:44:31] [Rank 0] step:8201/10000 train_time:1992618ms step_avg:242.97ms +[2025-07-17 10:44:36] [Rank 0] step:8221/10000 train_time:1997738ms step_avg:243.00ms +[2025-07-17 10:44:36] [Rank 0] step:8221/10000 train_time:1997738ms step_avg:243.00ms +[2025-07-17 10:44:42] [Rank 0] step:8241/10000 train_time:2002861ms step_avg:243.04ms +[2025-07-17 10:44:42] [Rank 0] step:8241/10000 train_time:2002861ms step_avg:243.04ms +[2025-07-17 10:44:49] [Rank 0] PRINT: step:8250/10000 val_loss:4.5645 train_time:2005431ms step_avg:243.08ms +[2025-07-17 10:44:49] [Rank 0] PRINT: step:8250/10000 val_loss:4.5645 train_time:2005431ms step_avg:243.08ms +[2025-07-17 10:44:52] [Rank 0] step:8261/10000 train_time:2007996ms step_avg:243.07ms +[2025-07-17 10:44:52] [Rank 0] step:8261/10000 train_time:2007996ms step_avg:243.07ms +[2025-07-17 10:44:57] [Rank 0] step:8281/10000 train_time:2013144ms step_avg:243.10ms +[2025-07-17 10:44:57] [Rank 0] step:8281/10000 train_time:2013144ms step_avg:243.10ms +[2025-07-17 10:45:02] [Rank 0] step:8301/10000 train_time:2018262ms step_avg:243.13ms +[2025-07-17 10:45:02] [Rank 0] step:8301/10000 train_time:2018262ms step_avg:243.13ms +[2025-07-17 10:45:07] [Rank 0] step:8321/10000 train_time:2023393ms step_avg:243.17ms +[2025-07-17 10:45:07] [Rank 0] step:8321/10000 train_time:2023393ms step_avg:243.17ms +[2025-07-17 10:45:12] [Rank 0] step:8341/10000 train_time:2028532ms step_avg:243.20ms +[2025-07-17 10:45:12] [Rank 0] step:8341/10000 train_time:2028532ms step_avg:243.20ms +[2025-07-17 10:45:17] [Rank 0] step:8361/10000 train_time:2033659ms step_avg:243.23ms +[2025-07-17 10:45:17] [Rank 0] step:8361/10000 train_time:2033659ms step_avg:243.23ms +[2025-07-17 10:45:26] [Rank 0] PRINT: step:8375/10000 val_loss:4.5859 train_time:2037503ms step_avg:243.28ms +[2025-07-17 10:45:26] [Rank 0] PRINT: step:8375/10000 val_loss:4.5859 train_time:2037503ms step_avg:243.28ms +[2025-07-17 10:45:27] [Rank 0] step:8381/10000 train_time:2038773ms step_avg:243.26ms +[2025-07-17 10:45:27] [Rank 0] step:8381/10000 train_time:2038773ms step_avg:243.26ms +[2025-07-17 10:45:32] [Rank 0] step:8401/10000 train_time:2043882ms step_avg:243.29ms +[2025-07-17 10:45:32] [Rank 0] step:8401/10000 train_time:2043882ms step_avg:243.29ms +[2025-07-17 10:45:37] [Rank 0] step:8421/10000 train_time:2049008ms step_avg:243.32ms +[2025-07-17 10:45:37] [Rank 0] step:8421/10000 train_time:2049008ms step_avg:243.32ms +[2025-07-17 10:45:42] [Rank 0] step:8441/10000 train_time:2054134ms step_avg:243.35ms +[2025-07-17 10:45:42] [Rank 0] step:8441/10000 train_time:2054134ms step_avg:243.35ms +[2025-07-17 10:45:48] [Rank 0] step:8461/10000 train_time:2059270ms step_avg:243.38ms +[2025-07-17 10:45:48] [Rank 0] step:8461/10000 train_time:2059270ms step_avg:243.38ms +[2025-07-17 10:45:53] [Rank 0] step:8481/10000 train_time:2064384ms step_avg:243.41ms +[2025-07-17 10:45:53] [Rank 0] step:8481/10000 train_time:2064384ms step_avg:243.41ms +[2025-07-17 10:46:02] [Rank 0] PRINT: step:8500/10000 val_loss:4.4978 train_time:2069519ms step_avg:243.47ms +[2025-07-17 10:46:02] [Rank 0] PRINT: step:8500/10000 val_loss:4.4978 train_time:2069519ms step_avg:243.47ms +[2025-07-17 10:46:02] [Rank 0] step:8501/10000 train_time:2069539ms step_avg:243.45ms +[2025-07-17 10:46:02] [Rank 0] step:8501/10000 train_time:2069539ms step_avg:243.45ms +[2025-07-17 10:46:07] [Rank 0] step:8521/10000 train_time:2074747ms step_avg:243.49ms +[2025-07-17 10:46:07] [Rank 0] step:8521/10000 train_time:2074747ms step_avg:243.49ms +[2025-07-17 10:46:12] [Rank 0] step:8541/10000 train_time:2079890ms step_avg:243.52ms +[2025-07-17 10:46:12] [Rank 0] step:8541/10000 train_time:2079890ms step_avg:243.52ms +[2025-07-17 10:46:17] [Rank 0] step:8561/10000 train_time:2085012ms step_avg:243.55ms +[2025-07-17 10:46:17] [Rank 0] step:8561/10000 train_time:2085012ms step_avg:243.55ms +[2025-07-17 10:46:23] [Rank 0] step:8581/10000 train_time:2090137ms step_avg:243.58ms +[2025-07-17 10:46:23] [Rank 0] step:8581/10000 train_time:2090137ms step_avg:243.58ms +[2025-07-17 10:46:28] [Rank 0] step:8601/10000 train_time:2095248ms step_avg:243.61ms +[2025-07-17 10:46:28] [Rank 0] step:8601/10000 train_time:2095248ms step_avg:243.61ms +[2025-07-17 10:46:33] [Rank 0] step:8621/10000 train_time:2100369ms step_avg:243.63ms +[2025-07-17 10:46:33] [Rank 0] step:8621/10000 train_time:2100369ms step_avg:243.63ms +[2025-07-17 10:46:39] [Rank 0] PRINT: step:8625/10000 val_loss:4.3858 train_time:2101650ms step_avg:243.67ms +[2025-07-17 10:46:39] [Rank 0] PRINT: step:8625/10000 val_loss:4.3858 train_time:2101650ms step_avg:243.67ms +[2025-07-17 10:46:43] [Rank 0] step:8641/10000 train_time:2106021ms step_avg:243.72ms +[2025-07-17 10:46:43] [Rank 0] step:8641/10000 train_time:2106021ms step_avg:243.72ms +[2025-07-17 10:46:48] [Rank 0] step:8661/10000 train_time:2111148ms step_avg:243.75ms +[2025-07-17 10:46:48] [Rank 0] step:8661/10000 train_time:2111148ms step_avg:243.75ms +[2025-07-17 10:46:53] [Rank 0] step:8681/10000 train_time:2116273ms step_avg:243.78ms +[2025-07-17 10:46:53] [Rank 0] step:8681/10000 train_time:2116273ms step_avg:243.78ms +[2025-07-17 10:46:59] [Rank 0] step:8701/10000 train_time:2121409ms step_avg:243.81ms +[2025-07-17 10:46:59] [Rank 0] step:8701/10000 train_time:2121409ms step_avg:243.81ms +[2025-07-17 10:47:04] [Rank 0] step:8721/10000 train_time:2126543ms step_avg:243.84ms +[2025-07-17 10:47:04] [Rank 0] step:8721/10000 train_time:2126543ms step_avg:243.84ms +[2025-07-17 10:47:09] [Rank 0] step:8741/10000 train_time:2131674ms step_avg:243.87ms +[2025-07-17 10:47:09] [Rank 0] step:8741/10000 train_time:2131674ms step_avg:243.87ms +[2025-07-17 10:47:16] [Rank 0] PRINT: step:8750/10000 val_loss:4.3727 train_time:2134233ms step_avg:243.91ms +[2025-07-17 10:47:16] [Rank 0] PRINT: step:8750/10000 val_loss:4.3727 train_time:2134233ms step_avg:243.91ms +[2025-07-17 10:47:19] [Rank 0] step:8761/10000 train_time:2136794ms step_avg:243.90ms +[2025-07-17 10:47:19] [Rank 0] step:8761/10000 train_time:2136794ms step_avg:243.90ms +[2025-07-17 10:47:24] [Rank 0] step:8781/10000 train_time:2141924ms step_avg:243.93ms +[2025-07-17 10:47:24] [Rank 0] step:8781/10000 train_time:2141924ms step_avg:243.93ms +[2025-07-17 10:47:29] [Rank 0] step:8801/10000 train_time:2147061ms step_avg:243.96ms +[2025-07-17 10:47:29] [Rank 0] step:8801/10000 train_time:2147061ms step_avg:243.96ms +[2025-07-17 10:47:34] [Rank 0] step:8821/10000 train_time:2152192ms step_avg:243.99ms +[2025-07-17 10:47:34] [Rank 0] step:8821/10000 train_time:2152192ms step_avg:243.99ms +[2025-07-17 10:47:39] [Rank 0] step:8841/10000 train_time:2157341ms step_avg:244.02ms +[2025-07-17 10:47:39] [Rank 0] step:8841/10000 train_time:2157341ms step_avg:244.02ms +[2025-07-17 10:47:44] [Rank 0] step:8861/10000 train_time:2162488ms step_avg:244.05ms +[2025-07-17 10:47:44] [Rank 0] step:8861/10000 train_time:2162488ms step_avg:244.05ms +[2025-07-17 10:47:53] [Rank 0] PRINT: step:8875/10000 val_loss:4.4443 train_time:2166325ms step_avg:244.09ms +[2025-07-17 10:47:53] [Rank 0] PRINT: step:8875/10000 val_loss:4.4443 train_time:2166325ms step_avg:244.09ms +[2025-07-17 10:47:54] [Rank 0] step:8881/10000 train_time:2167604ms step_avg:244.07ms +[2025-07-17 10:47:54] [Rank 0] step:8881/10000 train_time:2167604ms step_avg:244.07ms +[2025-07-17 10:48:00] [Rank 0] step:8901/10000 train_time:2172719ms step_avg:244.10ms +[2025-07-17 10:48:00] [Rank 0] step:8901/10000 train_time:2172719ms step_avg:244.10ms +[2025-07-17 10:48:05] [Rank 0] step:8921/10000 train_time:2177839ms step_avg:244.13ms +[2025-07-17 10:48:05] [Rank 0] step:8921/10000 train_time:2177839ms step_avg:244.13ms +[2025-07-17 10:48:10] [Rank 0] step:8941/10000 train_time:2182964ms step_avg:244.15ms +[2025-07-17 10:48:10] [Rank 0] step:8941/10000 train_time:2182964ms step_avg:244.15ms +[2025-07-17 10:48:15] [Rank 0] step:8961/10000 train_time:2188094ms step_avg:244.18ms +[2025-07-17 10:48:15] [Rank 0] step:8961/10000 train_time:2188094ms step_avg:244.18ms +[2025-07-17 10:48:20] [Rank 0] step:8981/10000 train_time:2193221ms step_avg:244.21ms +[2025-07-17 10:48:20] [Rank 0] step:8981/10000 train_time:2193221ms step_avg:244.21ms +[2025-07-17 10:48:30] [Rank 0] PRINT: step:9000/10000 val_loss:4.4491 train_time:2198346ms step_avg:244.26ms +[2025-07-17 10:48:30] [Rank 0] PRINT: step:9000/10000 val_loss:4.4491 train_time:2198346ms step_avg:244.26ms +[2025-07-17 10:48:30] [Rank 0] step:9001/10000 train_time:2198367ms step_avg:244.24ms +[2025-07-17 10:48:30] [Rank 0] step:9001/10000 train_time:2198367ms step_avg:244.24ms +[2025-07-17 10:48:35] [Rank 0] step:9021/10000 train_time:2203463ms step_avg:244.26ms +[2025-07-17 10:48:35] [Rank 0] step:9021/10000 train_time:2203463ms step_avg:244.26ms +[2025-07-17 10:48:40] [Rank 0] step:9041/10000 train_time:2208608ms step_avg:244.29ms +[2025-07-17 10:48:40] [Rank 0] step:9041/10000 train_time:2208608ms step_avg:244.29ms +[2025-07-17 10:48:45] [Rank 0] step:9061/10000 train_time:2213735ms step_avg:244.31ms +[2025-07-17 10:48:45] [Rank 0] step:9061/10000 train_time:2213735ms step_avg:244.31ms +[2025-07-17 10:48:50] [Rank 0] step:9081/10000 train_time:2218884ms step_avg:244.34ms +[2025-07-17 10:48:50] [Rank 0] step:9081/10000 train_time:2218884ms step_avg:244.34ms +[2025-07-17 10:48:56] [Rank 0] step:9101/10000 train_time:2224020ms step_avg:244.37ms +[2025-07-17 10:48:56] [Rank 0] step:9101/10000 train_time:2224020ms step_avg:244.37ms +[2025-07-17 10:49:01] [Rank 0] step:9121/10000 train_time:2229161ms step_avg:244.40ms +[2025-07-17 10:49:01] [Rank 0] step:9121/10000 train_time:2229161ms step_avg:244.40ms +[2025-07-17 10:49:07] [Rank 0] PRINT: step:9125/10000 val_loss:4.4617 train_time:2230439ms step_avg:244.43ms +[2025-07-17 10:49:07] [Rank 0] PRINT: step:9125/10000 val_loss:4.4617 train_time:2230439ms step_avg:244.43ms +[2025-07-17 10:49:11] [Rank 0] step:9141/10000 train_time:2234272ms step_avg:244.42ms +[2025-07-17 10:49:11] [Rank 0] step:9141/10000 train_time:2234272ms step_avg:244.42ms +[2025-07-17 10:49:16] [Rank 0] step:9161/10000 train_time:2239911ms step_avg:244.51ms +[2025-07-17 10:49:16] [Rank 0] step:9161/10000 train_time:2239911ms step_avg:244.51ms +[2025-07-17 10:49:21] [Rank 0] step:9181/10000 train_time:2245035ms step_avg:244.53ms +[2025-07-17 10:49:21] [Rank 0] step:9181/10000 train_time:2245035ms step_avg:244.53ms +[2025-07-17 10:49:27] [Rank 0] step:9201/10000 train_time:2250164ms step_avg:244.56ms +[2025-07-17 10:49:27] [Rank 0] step:9201/10000 train_time:2250164ms step_avg:244.56ms +[2025-07-17 10:49:32] [Rank 0] step:9221/10000 train_time:2255325ms step_avg:244.59ms +[2025-07-17 10:49:32] [Rank 0] step:9221/10000 train_time:2255325ms step_avg:244.59ms +[2025-07-17 10:49:37] [Rank 0] step:9241/10000 train_time:2260462ms step_avg:244.61ms +[2025-07-17 10:49:37] [Rank 0] step:9241/10000 train_time:2260462ms step_avg:244.61ms +[2025-07-17 10:49:44] [Rank 0] PRINT: step:9250/10000 val_loss:4.4632 train_time:2263031ms step_avg:244.65ms +[2025-07-17 10:49:44] [Rank 0] PRINT: step:9250/10000 val_loss:4.4632 train_time:2263031ms step_avg:244.65ms +[2025-07-17 10:49:47] [Rank 0] step:9261/10000 train_time:2265601ms step_avg:244.64ms +[2025-07-17 10:49:47] [Rank 0] step:9261/10000 train_time:2265601ms step_avg:244.64ms +[2025-07-17 10:49:52] [Rank 0] step:9281/10000 train_time:2270719ms step_avg:244.66ms +[2025-07-17 10:49:52] [Rank 0] step:9281/10000 train_time:2270719ms step_avg:244.66ms +[2025-07-17 10:49:57] [Rank 0] step:9301/10000 train_time:2275861ms step_avg:244.69ms +[2025-07-17 10:49:57] [Rank 0] step:9301/10000 train_time:2275861ms step_avg:244.69ms +[2025-07-17 10:50:02] [Rank 0] step:9321/10000 train_time:2281016ms step_avg:244.72ms +[2025-07-17 10:50:02] [Rank 0] step:9321/10000 train_time:2281016ms step_avg:244.72ms +[2025-07-17 10:50:07] [Rank 0] step:9341/10000 train_time:2286153ms step_avg:244.74ms +[2025-07-17 10:50:07] [Rank 0] step:9341/10000 train_time:2286153ms step_avg:244.74ms +[2025-07-17 10:50:13] [Rank 0] step:9361/10000 train_time:2291291ms step_avg:244.77ms +[2025-07-17 10:50:13] [Rank 0] step:9361/10000 train_time:2291291ms step_avg:244.77ms +[2025-07-17 10:50:21] [Rank 0] PRINT: step:9375/10000 val_loss:4.5138 train_time:2295145ms step_avg:244.82ms +[2025-07-17 10:50:21] [Rank 0] PRINT: step:9375/10000 val_loss:4.5138 train_time:2295145ms step_avg:244.82ms +[2025-07-17 10:50:22] [Rank 0] step:9381/10000 train_time:2296421ms step_avg:244.79ms +[2025-07-17 10:50:22] [Rank 0] step:9381/10000 train_time:2296421ms step_avg:244.79ms +[2025-07-17 10:50:28] [Rank 0] step:9401/10000 train_time:2301546ms step_avg:244.82ms +[2025-07-17 10:50:28] [Rank 0] step:9401/10000 train_time:2301546ms step_avg:244.82ms +[2025-07-17 10:50:33] [Rank 0] step:9421/10000 train_time:2306684ms step_avg:244.84ms +[2025-07-17 10:50:33] [Rank 0] step:9421/10000 train_time:2306684ms step_avg:244.84ms +[2025-07-17 10:50:38] [Rank 0] step:9441/10000 train_time:2311824ms step_avg:244.87ms +[2025-07-17 10:50:38] [Rank 0] step:9441/10000 train_time:2311824ms step_avg:244.87ms +[2025-07-17 10:50:43] [Rank 0] step:9461/10000 train_time:2316970ms step_avg:244.90ms +[2025-07-17 10:50:43] [Rank 0] step:9461/10000 train_time:2316970ms step_avg:244.90ms +[2025-07-17 10:50:48] [Rank 0] step:9481/10000 train_time:2322105ms step_avg:244.92ms +[2025-07-17 10:50:48] [Rank 0] step:9481/10000 train_time:2322105ms step_avg:244.92ms +[2025-07-17 10:50:58] [Rank 0] PRINT: step:9500/10000 val_loss:4.4952 train_time:2327266ms step_avg:244.98ms +[2025-07-17 10:50:58] [Rank 0] PRINT: step:9500/10000 val_loss:4.4952 train_time:2327266ms step_avg:244.98ms +[2025-07-17 10:50:58] [Rank 0] step:9501/10000 train_time:2327287ms step_avg:244.95ms +[2025-07-17 10:50:58] [Rank 0] step:9501/10000 train_time:2327287ms step_avg:244.95ms +[2025-07-17 10:51:03] [Rank 0] step:9521/10000 train_time:2332395ms step_avg:244.97ms +[2025-07-17 10:51:03] [Rank 0] step:9521/10000 train_time:2332395ms step_avg:244.97ms +[2025-07-17 10:51:08] [Rank 0] step:9541/10000 train_time:2337532ms step_avg:245.00ms +[2025-07-17 10:51:08] [Rank 0] step:9541/10000 train_time:2337532ms step_avg:245.00ms +[2025-07-17 10:51:14] [Rank 0] step:9561/10000 train_time:2342655ms step_avg:245.02ms +[2025-07-17 10:51:14] [Rank 0] step:9561/10000 train_time:2342655ms step_avg:245.02ms +[2025-07-17 10:51:19] [Rank 0] step:9581/10000 train_time:2347772ms step_avg:245.04ms +[2025-07-17 10:51:19] [Rank 0] step:9581/10000 train_time:2347772ms step_avg:245.04ms +[2025-07-17 10:51:24] [Rank 0] step:9601/10000 train_time:2352888ms step_avg:245.07ms +[2025-07-17 10:51:24] [Rank 0] step:9601/10000 train_time:2352888ms step_avg:245.07ms +[2025-07-17 10:51:29] [Rank 0] step:9621/10000 train_time:2358037ms step_avg:245.09ms +[2025-07-17 10:51:29] [Rank 0] step:9621/10000 train_time:2358037ms step_avg:245.09ms +[2025-07-17 10:51:35] [Rank 0] PRINT: step:9625/10000 val_loss:4.4951 train_time:2359314ms step_avg:245.12ms +[2025-07-17 10:51:35] [Rank 0] PRINT: step:9625/10000 val_loss:4.4951 train_time:2359314ms step_avg:245.12ms +[2025-07-17 10:51:39] [Rank 0] step:9641/10000 train_time:2363181ms step_avg:245.12ms +[2025-07-17 10:51:39] [Rank 0] step:9641/10000 train_time:2363181ms step_avg:245.12ms +[2025-07-17 10:51:45] [Rank 0] step:9661/10000 train_time:2368863ms step_avg:245.20ms +[2025-07-17 10:51:45] [Rank 0] step:9661/10000 train_time:2368863ms step_avg:245.20ms +[2025-07-17 10:51:50] [Rank 0] step:9681/10000 train_time:2374045ms step_avg:245.23ms +[2025-07-17 10:51:50] [Rank 0] step:9681/10000 train_time:2374045ms step_avg:245.23ms +[2025-07-17 10:51:55] [Rank 0] step:9701/10000 train_time:2379235ms step_avg:245.26ms +[2025-07-17 10:51:55] [Rank 0] step:9701/10000 train_time:2379235ms step_avg:245.26ms +[2025-07-17 10:52:00] [Rank 0] step:9721/10000 train_time:2384402ms step_avg:245.28ms +[2025-07-17 10:52:00] [Rank 0] step:9721/10000 train_time:2384402ms step_avg:245.28ms +[2025-07-17 10:52:05] [Rank 0] step:9741/10000 train_time:2389591ms step_avg:245.31ms +[2025-07-17 10:52:05] [Rank 0] step:9741/10000 train_time:2389591ms step_avg:245.31ms +[2025-07-17 10:52:12] [Rank 0] PRINT: step:9750/10000 val_loss:4.5322 train_time:2392172ms step_avg:245.35ms +[2025-07-17 10:52:12] [Rank 0] PRINT: step:9750/10000 val_loss:4.5322 train_time:2392172ms step_avg:245.35ms +[2025-07-17 10:52:15] [Rank 0] step:9761/10000 train_time:2394750ms step_avg:245.34ms +[2025-07-17 10:52:15] [Rank 0] step:9761/10000 train_time:2394750ms step_avg:245.34ms +[2025-07-17 10:52:20] [Rank 0] step:9781/10000 train_time:2399929ms step_avg:245.37ms +[2025-07-17 10:52:20] [Rank 0] step:9781/10000 train_time:2399929ms step_avg:245.37ms +[2025-07-17 10:52:26] [Rank 0] step:9801/10000 train_time:2405100ms step_avg:245.39ms +[2025-07-17 10:52:26] [Rank 0] step:9801/10000 train_time:2405100ms step_avg:245.39ms +[2025-07-17 10:52:31] [Rank 0] step:9821/10000 train_time:2410281ms step_avg:245.42ms +[2025-07-17 10:52:31] [Rank 0] step:9821/10000 train_time:2410281ms step_avg:245.42ms +[2025-07-17 10:52:36] [Rank 0] step:9841/10000 train_time:2415448ms step_avg:245.45ms +[2025-07-17 10:52:36] [Rank 0] step:9841/10000 train_time:2415448ms step_avg:245.45ms +[2025-07-17 10:52:41] [Rank 0] step:9861/10000 train_time:2420617ms step_avg:245.47ms +[2025-07-17 10:52:41] [Rank 0] step:9861/10000 train_time:2420617ms step_avg:245.47ms +[2025-07-17 10:52:50] [Rank 0] PRINT: step:9875/10000 val_loss:4.5355 train_time:2424497ms step_avg:245.52ms +[2025-07-17 10:52:50] [Rank 0] PRINT: step:9875/10000 val_loss:4.5355 train_time:2424497ms step_avg:245.52ms +[2025-07-17 10:52:51] [Rank 0] step:9881/10000 train_time:2425795ms step_avg:245.50ms +[2025-07-17 10:52:51] [Rank 0] step:9881/10000 train_time:2425795ms step_avg:245.50ms +[2025-07-17 10:52:56] [Rank 0] step:9901/10000 train_time:2430971ms step_avg:245.53ms +[2025-07-17 10:52:56] [Rank 0] step:9901/10000 train_time:2430971ms step_avg:245.53ms +[2025-07-17 10:53:01] [Rank 0] step:9921/10000 train_time:2436163ms step_avg:245.56ms +[2025-07-17 10:53:01] [Rank 0] step:9921/10000 train_time:2436163ms step_avg:245.56ms +[2025-07-17 10:53:07] [Rank 0] step:9941/10000 train_time:2441379ms step_avg:245.59ms +[2025-07-17 10:53:07] [Rank 0] step:9941/10000 train_time:2441379ms step_avg:245.59ms +[2025-07-17 10:53:12] [Rank 0] step:9961/10000 train_time:2446576ms step_avg:245.62ms +[2025-07-17 10:53:12] [Rank 0] step:9961/10000 train_time:2446576ms step_avg:245.62ms +[2025-07-17 10:53:17] [Rank 0] step:9981/10000 train_time:2451794ms step_avg:245.65ms +[2025-07-17 10:53:17] [Rank 0] step:9981/10000 train_time:2451794ms step_avg:245.65ms +[2025-07-17 10:53:22] [Rank 0] step:10000/10000 train_time:2456704ms step_avg:245.67ms +[2025-07-17 10:53:22] [Rank 0] step:10000/10000 train_time:2456704ms step_avg:245.67ms +[2025-07-17 10:53:26] [Rank 0] PRINT: step:10000/10000 val_loss:4.5366 train_time:2456966ms step_avg:245.70ms +[2025-07-17 10:53:26] [Rank 0] PRINT: step:10000/10000 val_loss:4.5366 train_time:2456966ms step_avg:245.70ms +[2025-07-17 10:53:26] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 10:53:26 2025 --- +[2025-07-17 10:53:26] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 10:53:26 2025 --- +[2025-07-17 10:53:26] [Rank 0] PRINT: Peak memory allocated: 30687 MiB reserved: 34016 MiB +[2025-07-17 10:53:26] [Rank 0] PRINT: Peak memory allocated: 30687 MiB reserved: 34016 MiB diff --git a/logs_norope/diff_modes/mode_0_param_norope_seed_43/config.json b/logs_norope/diff_modes/mode_0_param_norope_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..7992d5eeb1c079ab09e1724db4c046581bc10bde --- /dev/null +++ b/logs_norope/diff_modes/mode_0_param_norope_seed_43/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 0, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "ddd2cb7b-5a44-427f-8a70-861f8c381d9a", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_0_param_norope_seed_43/training_log_ddd2cb7b-5a44-427f-8a70-861f8c381d9a.txt b/logs_norope/diff_modes/mode_0_param_norope_seed_43/training_log_ddd2cb7b-5a44-427f-8a70-861f8c381d9a.txt new file mode 100644 index 0000000000000000000000000000000000000000..31e6cc4511bf7b700b9929342bd397c2b572138a --- /dev/null +++ b/logs_norope/diff_modes/mode_0_param_norope_seed_43/training_log_ddd2cb7b-5a44-427f-8a70-861f8c381d9a.txt @@ -0,0 +1,2360 @@ +[2025-07-17 17:17:40] [Rank 0] PRINT: --- Script Start: Thu Jul 17 17:17:40 2025 --- +[2025-07-17 17:17:40] [Rank 0] PRINT: --- Script Start: Thu Jul 17 17:17:40 2025 --- +[2025-07-17 17:17:40] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=0, model_parameterization='norope') +[2025-07-17 17:17:40] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=0, model_parameterization='norope') +[2025-07-17 17:17:40] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 17:17:40] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 17:17:40] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 17:17:40] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 17:17:40] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_0_param_norope_seed_43 +[2025-07-17 17:17:40] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_0_param_norope_seed_43 +[2025-07-17 17:17:40] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 17:17:40] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 17:17:40] [Rank 0] PRINT: Constructing model... +[2025-07-17 17:17:40] [Rank 0] PRINT: Constructing model... +[2025-07-17 17:17:43] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 17:17:43] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 17:17:43] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 17:17:43] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 17:17:43] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 17:17:43] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 17:17:43] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 17:17:43] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 17:17:43] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 0 +[2025-07-17 17:17:43] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 0 +[2025-07-17 17:17:43] [Rank 0] PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices. +[2025-07-17 17:17:43] [Rank 0] PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices. +[2025-07-17 17:17:43] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 17:17:43] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 17:17:43] [Rank 0] PRINT: Muon optimizer is active with 68 parameters. +[2025-07-17 17:17:43] [Rank 0] PRINT: Muon optimizer is active with 68 parameters. +[2025-07-17 17:17:43] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 17:17:43] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 17:17:43] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 17:17:43] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 17:17:43] [Rank 0] PRINT: Starting warmup... +[2025-07-17 17:17:43] [Rank 0] PRINT: Starting warmup... +[2025-07-17 17:18:49] [Rank 0] PRINT: Warmup complete. +[2025-07-17 17:18:49] [Rank 0] PRINT: Warmup complete. +[2025-07-17 17:18:50] [Rank 0] PRINT: Starting training... +[2025-07-17 17:18:50] [Rank 0] PRINT: Starting training... +[2025-07-17 17:18:59] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 17:18:59] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 17:19:04] [Rank 0] step:21/10000 train_time:4858ms step_avg:231.32ms +[2025-07-17 17:19:04] [Rank 0] step:21/10000 train_time:4858ms step_avg:231.32ms +[2025-07-17 17:19:09] [Rank 0] step:41/10000 train_time:9425ms step_avg:229.88ms +[2025-07-17 17:19:09] [Rank 0] step:41/10000 train_time:9425ms step_avg:229.88ms +[2025-07-17 17:19:13] [Rank 0] step:61/10000 train_time:13992ms step_avg:229.37ms +[2025-07-17 17:19:13] [Rank 0] step:61/10000 train_time:13992ms step_avg:229.37ms +[2025-07-17 17:19:18] [Rank 0] step:81/10000 train_time:18560ms step_avg:229.13ms +[2025-07-17 17:19:18] [Rank 0] step:81/10000 train_time:18560ms step_avg:229.13ms +[2025-07-17 17:19:22] [Rank 0] step:101/10000 train_time:23124ms step_avg:228.95ms +[2025-07-17 17:19:22] [Rank 0] step:101/10000 train_time:23124ms step_avg:228.95ms +[2025-07-17 17:19:27] [Rank 0] step:121/10000 train_time:27691ms step_avg:228.85ms +[2025-07-17 17:19:27] [Rank 0] step:121/10000 train_time:27691ms step_avg:228.85ms +[2025-07-17 17:19:32] [Rank 0] PRINT: step:125/10000 val_loss:5.1218 train_time:28834ms step_avg:230.68ms +[2025-07-17 17:19:32] [Rank 0] PRINT: step:125/10000 val_loss:5.1218 train_time:28834ms step_avg:230.68ms +[2025-07-17 17:19:36] [Rank 0] step:141/10000 train_time:32252ms step_avg:228.74ms +[2025-07-17 17:19:36] [Rank 0] step:141/10000 train_time:32252ms step_avg:228.74ms +[2025-07-17 17:19:40] [Rank 0] step:161/10000 train_time:36820ms step_avg:228.69ms +[2025-07-17 17:19:40] [Rank 0] step:161/10000 train_time:36820ms step_avg:228.69ms +[2025-07-17 17:19:45] [Rank 0] step:181/10000 train_time:41388ms step_avg:228.67ms +[2025-07-17 17:19:45] [Rank 0] step:181/10000 train_time:41388ms step_avg:228.67ms +[2025-07-17 17:19:50] [Rank 0] step:201/10000 train_time:45960ms step_avg:228.66ms +[2025-07-17 17:19:50] [Rank 0] step:201/10000 train_time:45960ms step_avg:228.66ms +[2025-07-17 17:19:54] [Rank 0] step:221/10000 train_time:50531ms step_avg:228.65ms +[2025-07-17 17:19:54] [Rank 0] step:221/10000 train_time:50531ms step_avg:228.65ms +[2025-07-17 17:19:59] [Rank 0] step:241/10000 train_time:55106ms step_avg:228.65ms +[2025-07-17 17:19:59] [Rank 0] step:241/10000 train_time:55106ms step_avg:228.65ms +[2025-07-17 17:20:05] [Rank 0] PRINT: step:250/10000 val_loss:4.6788 train_time:57392ms step_avg:229.57ms +[2025-07-17 17:20:05] [Rank 0] PRINT: step:250/10000 val_loss:4.6788 train_time:57392ms step_avg:229.57ms +[2025-07-17 17:20:08] [Rank 0] step:261/10000 train_time:59674ms step_avg:228.64ms +[2025-07-17 17:20:08] [Rank 0] step:261/10000 train_time:59674ms step_avg:228.64ms +[2025-07-17 17:20:12] [Rank 0] step:281/10000 train_time:64248ms step_avg:228.64ms +[2025-07-17 17:20:12] [Rank 0] step:281/10000 train_time:64248ms step_avg:228.64ms +[2025-07-17 17:20:17] [Rank 0] step:301/10000 train_time:68821ms step_avg:228.64ms +[2025-07-17 17:20:17] [Rank 0] step:301/10000 train_time:68821ms step_avg:228.64ms +[2025-07-17 17:20:21] [Rank 0] step:321/10000 train_time:73393ms step_avg:228.64ms +[2025-07-17 17:20:21] [Rank 0] step:321/10000 train_time:73393ms step_avg:228.64ms +[2025-07-17 17:20:26] [Rank 0] step:341/10000 train_time:77962ms step_avg:228.63ms +[2025-07-17 17:20:26] [Rank 0] step:341/10000 train_time:77962ms step_avg:228.63ms +[2025-07-17 17:20:31] [Rank 0] step:361/10000 train_time:82533ms step_avg:228.62ms +[2025-07-17 17:20:31] [Rank 0] step:361/10000 train_time:82533ms step_avg:228.62ms +[2025-07-17 17:20:38] [Rank 0] PRINT: step:375/10000 val_loss:4.5453 train_time:85964ms step_avg:229.24ms +[2025-07-17 17:20:38] [Rank 0] PRINT: step:375/10000 val_loss:4.5453 train_time:85964ms step_avg:229.24ms +[2025-07-17 17:20:40] [Rank 0] step:381/10000 train_time:87106ms step_avg:228.62ms +[2025-07-17 17:20:40] [Rank 0] step:381/10000 train_time:87106ms step_avg:228.62ms +[2025-07-17 17:20:44] [Rank 0] step:401/10000 train_time:91679ms step_avg:228.63ms +[2025-07-17 17:20:44] [Rank 0] step:401/10000 train_time:91679ms step_avg:228.63ms +[2025-07-17 17:20:49] [Rank 0] step:421/10000 train_time:96252ms step_avg:228.63ms +[2025-07-17 17:20:49] [Rank 0] step:421/10000 train_time:96252ms step_avg:228.63ms +[2025-07-17 17:20:53] [Rank 0] step:441/10000 train_time:100824ms step_avg:228.63ms +[2025-07-17 17:20:53] [Rank 0] step:441/10000 train_time:100824ms step_avg:228.63ms +[2025-07-17 17:20:58] [Rank 0] step:461/10000 train_time:105398ms step_avg:228.63ms +[2025-07-17 17:20:58] [Rank 0] step:461/10000 train_time:105398ms step_avg:228.63ms +[2025-07-17 17:21:02] [Rank 0] step:481/10000 train_time:109971ms step_avg:228.63ms +[2025-07-17 17:21:02] [Rank 0] step:481/10000 train_time:109971ms step_avg:228.63ms +[2025-07-17 17:21:11] [Rank 0] PRINT: step:500/10000 val_loss:4.4978 train_time:114544ms step_avg:229.09ms +[2025-07-17 17:21:11] [Rank 0] PRINT: step:500/10000 val_loss:4.4978 train_time:114544ms step_avg:229.09ms +[2025-07-17 17:21:12] [Rank 0] step:501/10000 train_time:114563ms step_avg:228.67ms +[2025-07-17 17:21:12] [Rank 0] step:501/10000 train_time:114563ms step_avg:228.67ms +[2025-07-17 17:21:17] [Rank 0] step:521/10000 train_time:119626ms step_avg:229.61ms +[2025-07-17 17:21:17] [Rank 0] step:521/10000 train_time:119626ms step_avg:229.61ms +[2025-07-17 17:21:21] [Rank 0] step:541/10000 train_time:124200ms step_avg:229.57ms +[2025-07-17 17:21:21] [Rank 0] step:541/10000 train_time:124200ms step_avg:229.57ms +[2025-07-17 17:21:26] [Rank 0] step:561/10000 train_time:128775ms step_avg:229.54ms +[2025-07-17 17:21:26] [Rank 0] step:561/10000 train_time:128775ms step_avg:229.54ms +[2025-07-17 17:21:30] [Rank 0] step:581/10000 train_time:133350ms step_avg:229.52ms +[2025-07-17 17:21:30] [Rank 0] step:581/10000 train_time:133350ms step_avg:229.52ms +[2025-07-17 17:21:35] [Rank 0] step:601/10000 train_time:137929ms step_avg:229.50ms +[2025-07-17 17:21:35] [Rank 0] step:601/10000 train_time:137929ms step_avg:229.50ms +[2025-07-17 17:21:39] [Rank 0] step:621/10000 train_time:142507ms step_avg:229.48ms +[2025-07-17 17:21:39] [Rank 0] step:621/10000 train_time:142507ms step_avg:229.48ms +[2025-07-17 17:21:44] [Rank 0] PRINT: step:625/10000 val_loss:4.8216 train_time:143653ms step_avg:229.84ms +[2025-07-17 17:21:44] [Rank 0] PRINT: step:625/10000 val_loss:4.8216 train_time:143653ms step_avg:229.84ms +[2025-07-17 17:21:48] [Rank 0] step:641/10000 train_time:147087ms step_avg:229.47ms +[2025-07-17 17:21:48] [Rank 0] step:641/10000 train_time:147087ms step_avg:229.47ms +[2025-07-17 17:21:53] [Rank 0] step:661/10000 train_time:151667ms step_avg:229.45ms +[2025-07-17 17:21:53] [Rank 0] step:661/10000 train_time:151667ms step_avg:229.45ms +[2025-07-17 17:21:57] [Rank 0] step:681/10000 train_time:156246ms step_avg:229.44ms +[2025-07-17 17:21:57] [Rank 0] step:681/10000 train_time:156246ms step_avg:229.44ms +[2025-07-17 17:22:02] [Rank 0] step:701/10000 train_time:160830ms step_avg:229.43ms +[2025-07-17 17:22:02] [Rank 0] step:701/10000 train_time:160830ms step_avg:229.43ms +[2025-07-17 17:22:06] [Rank 0] step:721/10000 train_time:165411ms step_avg:229.42ms +[2025-07-17 17:22:06] [Rank 0] step:721/10000 train_time:165411ms step_avg:229.42ms +[2025-07-17 17:22:11] [Rank 0] step:741/10000 train_time:169994ms step_avg:229.41ms +[2025-07-17 17:22:11] [Rank 0] step:741/10000 train_time:169994ms step_avg:229.41ms +[2025-07-17 17:22:18] [Rank 0] PRINT: step:750/10000 val_loss:4.5269 train_time:172302ms step_avg:229.74ms +[2025-07-17 17:22:18] [Rank 0] PRINT: step:750/10000 val_loss:4.5269 train_time:172302ms step_avg:229.74ms +[2025-07-17 17:22:20] [Rank 0] step:761/10000 train_time:174608ms step_avg:229.45ms +[2025-07-17 17:22:20] [Rank 0] step:761/10000 train_time:174608ms step_avg:229.45ms +[2025-07-17 17:22:25] [Rank 0] step:781/10000 train_time:179222ms step_avg:229.48ms +[2025-07-17 17:22:25] [Rank 0] step:781/10000 train_time:179222ms step_avg:229.48ms +[2025-07-17 17:22:29] [Rank 0] step:801/10000 train_time:183835ms step_avg:229.51ms +[2025-07-17 17:22:29] [Rank 0] step:801/10000 train_time:183835ms step_avg:229.51ms +[2025-07-17 17:22:34] [Rank 0] step:821/10000 train_time:188454ms step_avg:229.54ms +[2025-07-17 17:22:34] [Rank 0] step:821/10000 train_time:188454ms step_avg:229.54ms +[2025-07-17 17:22:39] [Rank 0] step:841/10000 train_time:193073ms step_avg:229.58ms +[2025-07-17 17:22:39] [Rank 0] step:841/10000 train_time:193073ms step_avg:229.58ms +[2025-07-17 17:22:43] [Rank 0] step:861/10000 train_time:197688ms step_avg:229.60ms +[2025-07-17 17:22:43] [Rank 0] step:861/10000 train_time:197688ms step_avg:229.60ms +[2025-07-17 17:22:51] [Rank 0] PRINT: step:875/10000 val_loss:4.5262 train_time:201152ms step_avg:229.89ms +[2025-07-17 17:22:51] [Rank 0] PRINT: step:875/10000 val_loss:4.5262 train_time:201152ms step_avg:229.89ms +[2025-07-17 17:22:52] [Rank 0] step:881/10000 train_time:202303ms step_avg:229.63ms +[2025-07-17 17:22:52] [Rank 0] step:881/10000 train_time:202303ms step_avg:229.63ms +[2025-07-17 17:22:57] [Rank 0] step:901/10000 train_time:206923ms step_avg:229.66ms +[2025-07-17 17:22:57] [Rank 0] step:901/10000 train_time:206923ms step_avg:229.66ms +[2025-07-17 17:23:02] [Rank 0] step:921/10000 train_time:211544ms step_avg:229.69ms +[2025-07-17 17:23:02] [Rank 0] step:921/10000 train_time:211544ms step_avg:229.69ms +[2025-07-17 17:23:06] [Rank 0] step:941/10000 train_time:216168ms step_avg:229.72ms +[2025-07-17 17:23:06] [Rank 0] step:941/10000 train_time:216168ms step_avg:229.72ms +[2025-07-17 17:23:11] [Rank 0] step:961/10000 train_time:220788ms step_avg:229.75ms +[2025-07-17 17:23:11] [Rank 0] step:961/10000 train_time:220788ms step_avg:229.75ms +[2025-07-17 17:23:15] [Rank 0] step:981/10000 train_time:225413ms step_avg:229.78ms +[2025-07-17 17:23:15] [Rank 0] step:981/10000 train_time:225413ms step_avg:229.78ms +[2025-07-17 17:23:24] [Rank 0] PRINT: step:1000/10000 val_loss:4.5643 train_time:230035ms step_avg:230.04ms +[2025-07-17 17:23:24] [Rank 0] PRINT: step:1000/10000 val_loss:4.5643 train_time:230035ms step_avg:230.04ms +[2025-07-17 17:23:24] [Rank 0] step:1001/10000 train_time:230054ms step_avg:229.82ms +[2025-07-17 17:23:24] [Rank 0] step:1001/10000 train_time:230054ms step_avg:229.82ms +[2025-07-17 17:23:30] [Rank 0] step:1021/10000 train_time:235188ms step_avg:230.35ms +[2025-07-17 17:23:30] [Rank 0] step:1021/10000 train_time:235188ms step_avg:230.35ms +[2025-07-17 17:23:34] [Rank 0] step:1041/10000 train_time:239812ms step_avg:230.37ms +[2025-07-17 17:23:34] [Rank 0] step:1041/10000 train_time:239812ms step_avg:230.37ms +[2025-07-17 17:23:39] [Rank 0] step:1061/10000 train_time:244435ms step_avg:230.38ms +[2025-07-17 17:23:39] [Rank 0] step:1061/10000 train_time:244435ms step_avg:230.38ms +[2025-07-17 17:23:44] [Rank 0] step:1081/10000 train_time:249060ms step_avg:230.40ms +[2025-07-17 17:23:44] [Rank 0] step:1081/10000 train_time:249060ms step_avg:230.40ms +[2025-07-17 17:23:48] [Rank 0] step:1101/10000 train_time:253682ms step_avg:230.41ms +[2025-07-17 17:23:48] [Rank 0] step:1101/10000 train_time:253682ms step_avg:230.41ms +[2025-07-17 17:23:53] [Rank 0] step:1121/10000 train_time:258307ms step_avg:230.43ms +[2025-07-17 17:23:53] [Rank 0] step:1121/10000 train_time:258307ms step_avg:230.43ms +[2025-07-17 17:23:58] [Rank 0] PRINT: step:1125/10000 val_loss:4.5904 train_time:259467ms step_avg:230.64ms +[2025-07-17 17:23:58] [Rank 0] PRINT: step:1125/10000 val_loss:4.5904 train_time:259467ms step_avg:230.64ms +[2025-07-17 17:24:02] [Rank 0] step:1141/10000 train_time:262935ms step_avg:230.44ms +[2025-07-17 17:24:02] [Rank 0] step:1141/10000 train_time:262935ms step_avg:230.44ms +[2025-07-17 17:24:06] [Rank 0] step:1161/10000 train_time:267560ms step_avg:230.46ms +[2025-07-17 17:24:06] [Rank 0] step:1161/10000 train_time:267560ms step_avg:230.46ms +[2025-07-17 17:24:11] [Rank 0] step:1181/10000 train_time:272187ms step_avg:230.47ms +[2025-07-17 17:24:11] [Rank 0] step:1181/10000 train_time:272187ms step_avg:230.47ms +[2025-07-17 17:24:16] [Rank 0] step:1201/10000 train_time:276817ms step_avg:230.49ms +[2025-07-17 17:24:16] [Rank 0] step:1201/10000 train_time:276817ms step_avg:230.49ms +[2025-07-17 17:24:20] [Rank 0] step:1221/10000 train_time:281447ms step_avg:230.51ms +[2025-07-17 17:24:20] [Rank 0] step:1221/10000 train_time:281447ms step_avg:230.51ms +[2025-07-17 17:24:25] [Rank 0] step:1241/10000 train_time:286081ms step_avg:230.52ms +[2025-07-17 17:24:25] [Rank 0] step:1241/10000 train_time:286081ms step_avg:230.52ms +[2025-07-17 17:24:31] [Rank 0] PRINT: step:1250/10000 val_loss:4.6458 train_time:288400ms step_avg:230.72ms +[2025-07-17 17:24:31] [Rank 0] PRINT: step:1250/10000 val_loss:4.6458 train_time:288400ms step_avg:230.72ms +[2025-07-17 17:24:34] [Rank 0] step:1261/10000 train_time:290713ms step_avg:230.54ms +[2025-07-17 17:24:34] [Rank 0] step:1261/10000 train_time:290713ms step_avg:230.54ms +[2025-07-17 17:24:39] [Rank 0] step:1281/10000 train_time:295348ms step_avg:230.56ms +[2025-07-17 17:24:39] [Rank 0] step:1281/10000 train_time:295348ms step_avg:230.56ms +[2025-07-17 17:24:43] [Rank 0] step:1301/10000 train_time:299983ms step_avg:230.58ms +[2025-07-17 17:24:43] [Rank 0] step:1301/10000 train_time:299983ms step_avg:230.58ms +[2025-07-17 17:24:48] [Rank 0] step:1321/10000 train_time:304621ms step_avg:230.60ms +[2025-07-17 17:24:48] [Rank 0] step:1321/10000 train_time:304621ms step_avg:230.60ms +[2025-07-17 17:24:53] [Rank 0] step:1341/10000 train_time:309256ms step_avg:230.62ms +[2025-07-17 17:24:53] [Rank 0] step:1341/10000 train_time:309256ms step_avg:230.62ms +[2025-07-17 17:24:57] [Rank 0] step:1361/10000 train_time:313891ms step_avg:230.63ms +[2025-07-17 17:24:57] [Rank 0] step:1361/10000 train_time:313891ms step_avg:230.63ms +[2025-07-17 17:25:05] [Rank 0] PRINT: step:1375/10000 val_loss:4.6556 train_time:317369ms step_avg:230.81ms +[2025-07-17 17:25:05] [Rank 0] PRINT: step:1375/10000 val_loss:4.6556 train_time:317369ms step_avg:230.81ms +[2025-07-17 17:25:06] [Rank 0] step:1381/10000 train_time:318527ms step_avg:230.65ms +[2025-07-17 17:25:06] [Rank 0] step:1381/10000 train_time:318527ms step_avg:230.65ms +[2025-07-17 17:25:11] [Rank 0] step:1401/10000 train_time:323169ms step_avg:230.67ms +[2025-07-17 17:25:11] [Rank 0] step:1401/10000 train_time:323169ms step_avg:230.67ms +[2025-07-17 17:25:16] [Rank 0] step:1421/10000 train_time:327811ms step_avg:230.69ms +[2025-07-17 17:25:16] [Rank 0] step:1421/10000 train_time:327811ms step_avg:230.69ms +[2025-07-17 17:25:20] [Rank 0] step:1441/10000 train_time:332453ms step_avg:230.71ms +[2025-07-17 17:25:20] [Rank 0] step:1441/10000 train_time:332453ms step_avg:230.71ms +[2025-07-17 17:25:25] [Rank 0] step:1461/10000 train_time:337092ms step_avg:230.73ms +[2025-07-17 17:25:25] [Rank 0] step:1461/10000 train_time:337092ms step_avg:230.73ms +[2025-07-17 17:25:30] [Rank 0] step:1481/10000 train_time:341731ms step_avg:230.74ms +[2025-07-17 17:25:30] [Rank 0] step:1481/10000 train_time:341731ms step_avg:230.74ms +[2025-07-17 17:25:38] [Rank 0] PRINT: step:1500/10000 val_loss:4.6144 train_time:346390ms step_avg:230.93ms +[2025-07-17 17:25:38] [Rank 0] PRINT: step:1500/10000 val_loss:4.6144 train_time:346390ms step_avg:230.93ms +[2025-07-17 17:25:38] [Rank 0] step:1501/10000 train_time:346409ms step_avg:230.79ms +[2025-07-17 17:25:38] [Rank 0] step:1501/10000 train_time:346409ms step_avg:230.79ms +[2025-07-17 17:25:43] [Rank 0] step:1521/10000 train_time:351053ms step_avg:230.80ms +[2025-07-17 17:25:43] [Rank 0] step:1521/10000 train_time:351053ms step_avg:230.80ms +[2025-07-17 17:25:48] [Rank 0] step:1541/10000 train_time:356225ms step_avg:231.16ms +[2025-07-17 17:25:48] [Rank 0] step:1541/10000 train_time:356225ms step_avg:231.16ms +[2025-07-17 17:25:53] [Rank 0] step:1561/10000 train_time:360892ms step_avg:231.19ms +[2025-07-17 17:25:53] [Rank 0] step:1561/10000 train_time:360892ms step_avg:231.19ms +[2025-07-17 17:25:57] [Rank 0] step:1581/10000 train_time:365556ms step_avg:231.22ms +[2025-07-17 17:25:57] [Rank 0] step:1581/10000 train_time:365556ms step_avg:231.22ms +[2025-07-17 17:26:02] [Rank 0] step:1601/10000 train_time:370215ms step_avg:231.24ms +[2025-07-17 17:26:02] [Rank 0] step:1601/10000 train_time:370215ms step_avg:231.24ms +[2025-07-17 17:26:07] [Rank 0] step:1621/10000 train_time:374877ms step_avg:231.26ms +[2025-07-17 17:26:07] [Rank 0] step:1621/10000 train_time:374877ms step_avg:231.26ms +[2025-07-17 17:26:12] [Rank 0] PRINT: step:1625/10000 val_loss:4.6335 train_time:376047ms step_avg:231.41ms +[2025-07-17 17:26:12] [Rank 0] PRINT: step:1625/10000 val_loss:4.6335 train_time:376047ms step_avg:231.41ms +[2025-07-17 17:26:16] [Rank 0] step:1641/10000 train_time:379535ms step_avg:231.28ms +[2025-07-17 17:26:16] [Rank 0] step:1641/10000 train_time:379535ms step_avg:231.28ms +[2025-07-17 17:26:20] [Rank 0] step:1661/10000 train_time:384190ms step_avg:231.30ms +[2025-07-17 17:26:20] [Rank 0] step:1661/10000 train_time:384190ms step_avg:231.30ms +[2025-07-17 17:26:25] [Rank 0] step:1681/10000 train_time:388845ms step_avg:231.32ms +[2025-07-17 17:26:25] [Rank 0] step:1681/10000 train_time:388845ms step_avg:231.32ms +[2025-07-17 17:26:30] [Rank 0] step:1701/10000 train_time:393500ms step_avg:231.33ms +[2025-07-17 17:26:30] [Rank 0] step:1701/10000 train_time:393500ms step_avg:231.33ms +[2025-07-17 17:26:34] [Rank 0] step:1721/10000 train_time:398158ms step_avg:231.35ms +[2025-07-17 17:26:34] [Rank 0] step:1721/10000 train_time:398158ms step_avg:231.35ms +[2025-07-17 17:26:39] [Rank 0] step:1741/10000 train_time:402818ms step_avg:231.37ms +[2025-07-17 17:26:39] [Rank 0] step:1741/10000 train_time:402818ms step_avg:231.37ms +[2025-07-17 17:26:45] [Rank 0] PRINT: step:1750/10000 val_loss:4.6024 train_time:405147ms step_avg:231.51ms +[2025-07-17 17:26:45] [Rank 0] PRINT: step:1750/10000 val_loss:4.6024 train_time:405147ms step_avg:231.51ms +[2025-07-17 17:26:48] [Rank 0] step:1761/10000 train_time:407474ms step_avg:231.39ms +[2025-07-17 17:26:48] [Rank 0] step:1761/10000 train_time:407474ms step_avg:231.39ms +[2025-07-17 17:26:52] [Rank 0] step:1781/10000 train_time:412135ms step_avg:231.41ms +[2025-07-17 17:26:52] [Rank 0] step:1781/10000 train_time:412135ms step_avg:231.41ms +[2025-07-17 17:26:57] [Rank 0] step:1801/10000 train_time:416800ms step_avg:231.43ms +[2025-07-17 17:26:57] [Rank 0] step:1801/10000 train_time:416800ms step_avg:231.43ms +[2025-07-17 17:27:02] [Rank 0] step:1821/10000 train_time:421468ms step_avg:231.45ms +[2025-07-17 17:27:02] [Rank 0] step:1821/10000 train_time:421468ms step_avg:231.45ms +[2025-07-17 17:27:06] [Rank 0] step:1841/10000 train_time:426135ms step_avg:231.47ms +[2025-07-17 17:27:06] [Rank 0] step:1841/10000 train_time:426135ms step_avg:231.47ms +[2025-07-17 17:27:11] [Rank 0] step:1861/10000 train_time:430802ms step_avg:231.49ms +[2025-07-17 17:27:11] [Rank 0] step:1861/10000 train_time:430802ms step_avg:231.49ms +[2025-07-17 17:27:18] [Rank 0] PRINT: step:1875/10000 val_loss:4.6862 train_time:434304ms step_avg:231.63ms +[2025-07-17 17:27:18] [Rank 0] PRINT: step:1875/10000 val_loss:4.6862 train_time:434304ms step_avg:231.63ms +[2025-07-17 17:27:20] [Rank 0] step:1881/10000 train_time:435469ms step_avg:231.51ms +[2025-07-17 17:27:20] [Rank 0] step:1881/10000 train_time:435469ms step_avg:231.51ms +[2025-07-17 17:27:25] [Rank 0] step:1901/10000 train_time:440137ms step_avg:231.53ms +[2025-07-17 17:27:25] [Rank 0] step:1901/10000 train_time:440137ms step_avg:231.53ms +[2025-07-17 17:27:29] [Rank 0] step:1921/10000 train_time:444808ms step_avg:231.55ms +[2025-07-17 17:27:29] [Rank 0] step:1921/10000 train_time:444808ms step_avg:231.55ms +[2025-07-17 17:27:34] [Rank 0] step:1941/10000 train_time:449475ms step_avg:231.57ms +[2025-07-17 17:27:34] [Rank 0] step:1941/10000 train_time:449475ms step_avg:231.57ms +[2025-07-17 17:27:39] [Rank 0] step:1961/10000 train_time:454147ms step_avg:231.59ms +[2025-07-17 17:27:39] [Rank 0] step:1961/10000 train_time:454147ms step_avg:231.59ms +[2025-07-17 17:27:43] [Rank 0] step:1981/10000 train_time:458819ms step_avg:231.61ms +[2025-07-17 17:27:43] [Rank 0] step:1981/10000 train_time:458819ms step_avg:231.61ms +[2025-07-17 17:27:52] [Rank 0] PRINT: step:2000/10000 val_loss:4.7139 train_time:463484ms step_avg:231.74ms +[2025-07-17 17:27:52] [Rank 0] PRINT: step:2000/10000 val_loss:4.7139 train_time:463484ms step_avg:231.74ms +[2025-07-17 17:27:52] [Rank 0] step:2001/10000 train_time:463503ms step_avg:231.64ms +[2025-07-17 17:27:52] [Rank 0] step:2001/10000 train_time:463503ms step_avg:231.64ms +[2025-07-17 17:27:57] [Rank 0] step:2021/10000 train_time:468150ms step_avg:231.64ms +[2025-07-17 17:27:57] [Rank 0] step:2021/10000 train_time:468150ms step_avg:231.64ms +[2025-07-17 17:28:02] [Rank 0] step:2041/10000 train_time:473331ms step_avg:231.91ms +[2025-07-17 17:28:02] [Rank 0] step:2041/10000 train_time:473331ms step_avg:231.91ms +[2025-07-17 17:28:06] [Rank 0] step:2061/10000 train_time:477997ms step_avg:231.92ms +[2025-07-17 17:28:06] [Rank 0] step:2061/10000 train_time:477997ms step_avg:231.92ms +[2025-07-17 17:28:11] [Rank 0] step:2081/10000 train_time:482667ms step_avg:231.94ms +[2025-07-17 17:28:11] [Rank 0] step:2081/10000 train_time:482667ms step_avg:231.94ms +[2025-07-17 17:28:16] [Rank 0] step:2101/10000 train_time:487334ms step_avg:231.95ms +[2025-07-17 17:28:16] [Rank 0] step:2101/10000 train_time:487334ms step_avg:231.95ms +[2025-07-17 17:28:20] [Rank 0] step:2121/10000 train_time:492004ms step_avg:231.97ms +[2025-07-17 17:28:20] [Rank 0] step:2121/10000 train_time:492004ms step_avg:231.97ms +[2025-07-17 17:28:25] [Rank 0] PRINT: step:2125/10000 val_loss:4.6562 train_time:493174ms step_avg:232.08ms +[2025-07-17 17:28:25] [Rank 0] PRINT: step:2125/10000 val_loss:4.6562 train_time:493174ms step_avg:232.08ms +[2025-07-17 17:28:29] [Rank 0] step:2141/10000 train_time:496672ms step_avg:231.98ms +[2025-07-17 17:28:29] [Rank 0] step:2141/10000 train_time:496672ms step_avg:231.98ms +[2025-07-17 17:28:34] [Rank 0] step:2161/10000 train_time:501336ms step_avg:231.99ms +[2025-07-17 17:28:34] [Rank 0] step:2161/10000 train_time:501336ms step_avg:231.99ms +[2025-07-17 17:28:39] [Rank 0] step:2181/10000 train_time:506003ms step_avg:232.01ms +[2025-07-17 17:28:39] [Rank 0] step:2181/10000 train_time:506003ms step_avg:232.01ms +[2025-07-17 17:28:43] [Rank 0] step:2201/10000 train_time:510671ms step_avg:232.02ms +[2025-07-17 17:28:43] [Rank 0] step:2201/10000 train_time:510671ms step_avg:232.02ms +[2025-07-17 17:28:48] [Rank 0] step:2221/10000 train_time:515339ms step_avg:232.03ms +[2025-07-17 17:28:48] [Rank 0] step:2221/10000 train_time:515339ms step_avg:232.03ms +[2025-07-17 17:28:53] [Rank 0] step:2241/10000 train_time:520102ms step_avg:232.08ms +[2025-07-17 17:28:53] [Rank 0] step:2241/10000 train_time:520102ms step_avg:232.08ms +[2025-07-17 17:28:59] [Rank 0] PRINT: step:2250/10000 val_loss:4.1042 train_time:522493ms step_avg:232.22ms +[2025-07-17 17:28:59] [Rank 0] PRINT: step:2250/10000 val_loss:4.1042 train_time:522493ms step_avg:232.22ms +[2025-07-17 17:29:02] [Rank 0] step:2261/10000 train_time:524876ms step_avg:232.14ms +[2025-07-17 17:29:02] [Rank 0] step:2261/10000 train_time:524876ms step_avg:232.14ms +[2025-07-17 17:29:07] [Rank 0] step:2281/10000 train_time:529657ms step_avg:232.20ms +[2025-07-17 17:29:07] [Rank 0] step:2281/10000 train_time:529657ms step_avg:232.20ms +[2025-07-17 17:29:12] [Rank 0] step:2301/10000 train_time:534440ms step_avg:232.26ms +[2025-07-17 17:29:12] [Rank 0] step:2301/10000 train_time:534440ms step_avg:232.26ms +[2025-07-17 17:29:16] [Rank 0] step:2321/10000 train_time:539220ms step_avg:232.32ms +[2025-07-17 17:29:16] [Rank 0] step:2321/10000 train_time:539220ms step_avg:232.32ms +[2025-07-17 17:29:21] [Rank 0] step:2341/10000 train_time:544003ms step_avg:232.38ms +[2025-07-17 17:29:21] [Rank 0] step:2341/10000 train_time:544003ms step_avg:232.38ms +[2025-07-17 17:29:26] [Rank 0] step:2361/10000 train_time:548784ms step_avg:232.44ms +[2025-07-17 17:29:26] [Rank 0] step:2361/10000 train_time:548784ms step_avg:232.44ms +[2025-07-17 17:29:34] [Rank 0] PRINT: step:2375/10000 val_loss:4.2375 train_time:552366ms step_avg:232.58ms +[2025-07-17 17:29:34] [Rank 0] PRINT: step:2375/10000 val_loss:4.2375 train_time:552366ms step_avg:232.58ms +[2025-07-17 17:29:35] [Rank 0] step:2381/10000 train_time:553555ms step_avg:232.49ms +[2025-07-17 17:29:35] [Rank 0] step:2381/10000 train_time:553555ms step_avg:232.49ms +[2025-07-17 17:29:40] [Rank 0] step:2401/10000 train_time:558328ms step_avg:232.54ms +[2025-07-17 17:29:40] [Rank 0] step:2401/10000 train_time:558328ms step_avg:232.54ms +[2025-07-17 17:29:45] [Rank 0] step:2421/10000 train_time:563103ms step_avg:232.59ms +[2025-07-17 17:29:45] [Rank 0] step:2421/10000 train_time:563103ms step_avg:232.59ms +[2025-07-17 17:29:49] [Rank 0] step:2441/10000 train_time:567877ms step_avg:232.64ms +[2025-07-17 17:29:49] [Rank 0] step:2441/10000 train_time:567877ms step_avg:232.64ms +[2025-07-17 17:29:54] [Rank 0] step:2461/10000 train_time:572652ms step_avg:232.69ms +[2025-07-17 17:29:54] [Rank 0] step:2461/10000 train_time:572652ms step_avg:232.69ms +[2025-07-17 17:29:59] [Rank 0] step:2481/10000 train_time:577431ms step_avg:232.74ms +[2025-07-17 17:29:59] [Rank 0] step:2481/10000 train_time:577431ms step_avg:232.74ms +[2025-07-17 17:30:08] [Rank 0] PRINT: step:2500/10000 val_loss:4.3104 train_time:582207ms step_avg:232.88ms +[2025-07-17 17:30:08] [Rank 0] PRINT: step:2500/10000 val_loss:4.3104 train_time:582207ms step_avg:232.88ms +[2025-07-17 17:30:08] [Rank 0] step:2501/10000 train_time:582226ms step_avg:232.80ms +[2025-07-17 17:30:08] [Rank 0] step:2501/10000 train_time:582226ms step_avg:232.80ms +[2025-07-17 17:30:13] [Rank 0] step:2521/10000 train_time:586980ms step_avg:232.84ms +[2025-07-17 17:30:13] [Rank 0] step:2521/10000 train_time:586980ms step_avg:232.84ms +[2025-07-17 17:30:18] [Rank 0] step:2541/10000 train_time:591757ms step_avg:232.88ms +[2025-07-17 17:30:18] [Rank 0] step:2541/10000 train_time:591757ms step_avg:232.88ms +[2025-07-17 17:30:23] [Rank 0] step:2561/10000 train_time:596644ms step_avg:232.97ms +[2025-07-17 17:30:23] [Rank 0] step:2561/10000 train_time:596644ms step_avg:232.97ms +[2025-07-17 17:30:28] [Rank 0] step:2581/10000 train_time:601424ms step_avg:233.02ms +[2025-07-17 17:30:28] [Rank 0] step:2581/10000 train_time:601424ms step_avg:233.02ms +[2025-07-17 17:30:32] [Rank 0] step:2601/10000 train_time:606204ms step_avg:233.07ms +[2025-07-17 17:30:32] [Rank 0] step:2601/10000 train_time:606204ms step_avg:233.07ms +[2025-07-17 17:30:37] [Rank 0] step:2621/10000 train_time:610983ms step_avg:233.11ms +[2025-07-17 17:30:37] [Rank 0] step:2621/10000 train_time:610983ms step_avg:233.11ms +[2025-07-17 17:30:43] [Rank 0] PRINT: step:2625/10000 val_loss:4.3989 train_time:612181ms step_avg:233.21ms +[2025-07-17 17:30:43] [Rank 0] PRINT: step:2625/10000 val_loss:4.3989 train_time:612181ms step_avg:233.21ms +[2025-07-17 17:30:46] [Rank 0] step:2641/10000 train_time:615760ms step_avg:233.15ms +[2025-07-17 17:30:46] [Rank 0] step:2641/10000 train_time:615760ms step_avg:233.15ms +[2025-07-17 17:30:51] [Rank 0] step:2661/10000 train_time:620541ms step_avg:233.20ms +[2025-07-17 17:30:51] [Rank 0] step:2661/10000 train_time:620541ms step_avg:233.20ms +[2025-07-17 17:30:56] [Rank 0] step:2681/10000 train_time:625316ms step_avg:233.24ms +[2025-07-17 17:30:56] [Rank 0] step:2681/10000 train_time:625316ms step_avg:233.24ms +[2025-07-17 17:31:01] [Rank 0] step:2701/10000 train_time:630092ms step_avg:233.28ms +[2025-07-17 17:31:01] [Rank 0] step:2701/10000 train_time:630092ms step_avg:233.28ms +[2025-07-17 17:31:06] [Rank 0] step:2721/10000 train_time:634866ms step_avg:233.32ms +[2025-07-17 17:31:06] [Rank 0] step:2721/10000 train_time:634866ms step_avg:233.32ms +[2025-07-17 17:31:10] [Rank 0] step:2741/10000 train_time:639642ms step_avg:233.36ms +[2025-07-17 17:31:10] [Rank 0] step:2741/10000 train_time:639642ms step_avg:233.36ms +[2025-07-17 17:31:17] [Rank 0] PRINT: step:2750/10000 val_loss:4.7021 train_time:642031ms step_avg:233.47ms +[2025-07-17 17:31:17] [Rank 0] PRINT: step:2750/10000 val_loss:4.7021 train_time:642031ms step_avg:233.47ms +[2025-07-17 17:31:20] [Rank 0] step:2761/10000 train_time:644416ms step_avg:233.40ms +[2025-07-17 17:31:20] [Rank 0] step:2761/10000 train_time:644416ms step_avg:233.40ms +[2025-07-17 17:31:24] [Rank 0] step:2781/10000 train_time:649190ms step_avg:233.44ms +[2025-07-17 17:31:24] [Rank 0] step:2781/10000 train_time:649190ms step_avg:233.44ms +[2025-07-17 17:31:29] [Rank 0] step:2801/10000 train_time:653962ms step_avg:233.47ms +[2025-07-17 17:31:29] [Rank 0] step:2801/10000 train_time:653962ms step_avg:233.47ms +[2025-07-17 17:31:34] [Rank 0] step:2821/10000 train_time:658736ms step_avg:233.51ms +[2025-07-17 17:31:34] [Rank 0] step:2821/10000 train_time:658736ms step_avg:233.51ms +[2025-07-17 17:31:39] [Rank 0] step:2841/10000 train_time:663509ms step_avg:233.55ms +[2025-07-17 17:31:39] [Rank 0] step:2841/10000 train_time:663509ms step_avg:233.55ms +[2025-07-17 17:31:43] [Rank 0] step:2861/10000 train_time:668281ms step_avg:233.58ms +[2025-07-17 17:31:43] [Rank 0] step:2861/10000 train_time:668281ms step_avg:233.58ms +[2025-07-17 17:31:51] [Rank 0] PRINT: step:2875/10000 val_loss:4.4856 train_time:671859ms step_avg:233.69ms +[2025-07-17 17:31:51] [Rank 0] PRINT: step:2875/10000 val_loss:4.4856 train_time:671859ms step_avg:233.69ms +[2025-07-17 17:31:52] [Rank 0] step:2881/10000 train_time:673050ms step_avg:233.62ms +[2025-07-17 17:31:52] [Rank 0] step:2881/10000 train_time:673050ms step_avg:233.62ms +[2025-07-17 17:31:57] [Rank 0] step:2901/10000 train_time:677822ms step_avg:233.65ms +[2025-07-17 17:31:57] [Rank 0] step:2901/10000 train_time:677822ms step_avg:233.65ms +[2025-07-17 17:32:02] [Rank 0] step:2921/10000 train_time:682595ms step_avg:233.69ms +[2025-07-17 17:32:02] [Rank 0] step:2921/10000 train_time:682595ms step_avg:233.69ms +[2025-07-17 17:32:07] [Rank 0] step:2941/10000 train_time:687368ms step_avg:233.72ms +[2025-07-17 17:32:07] [Rank 0] step:2941/10000 train_time:687368ms step_avg:233.72ms +[2025-07-17 17:32:11] [Rank 0] step:2961/10000 train_time:692142ms step_avg:233.75ms +[2025-07-17 17:32:11] [Rank 0] step:2961/10000 train_time:692142ms step_avg:233.75ms +[2025-07-17 17:32:16] [Rank 0] step:2981/10000 train_time:696932ms step_avg:233.79ms +[2025-07-17 17:32:16] [Rank 0] step:2981/10000 train_time:696932ms step_avg:233.79ms +[2025-07-17 17:32:25] [Rank 0] PRINT: step:3000/10000 val_loss:4.3562 train_time:701721ms step_avg:233.91ms +[2025-07-17 17:32:25] [Rank 0] PRINT: step:3000/10000 val_loss:4.3562 train_time:701721ms step_avg:233.91ms +[2025-07-17 17:32:26] [Rank 0] step:3001/10000 train_time:701740ms step_avg:233.84ms +[2025-07-17 17:32:26] [Rank 0] step:3001/10000 train_time:701740ms step_avg:233.84ms +[2025-07-17 17:32:30] [Rank 0] step:3021/10000 train_time:706512ms step_avg:233.87ms +[2025-07-17 17:32:30] [Rank 0] step:3021/10000 train_time:706512ms step_avg:233.87ms +[2025-07-17 17:32:35] [Rank 0] step:3041/10000 train_time:711301ms step_avg:233.90ms +[2025-07-17 17:32:35] [Rank 0] step:3041/10000 train_time:711301ms step_avg:233.90ms +[2025-07-17 17:32:40] [Rank 0] step:3061/10000 train_time:716619ms step_avg:234.11ms +[2025-07-17 17:32:40] [Rank 0] step:3061/10000 train_time:716619ms step_avg:234.11ms +[2025-07-17 17:32:45] [Rank 0] step:3081/10000 train_time:721405ms step_avg:234.15ms +[2025-07-17 17:32:45] [Rank 0] step:3081/10000 train_time:721405ms step_avg:234.15ms +[2025-07-17 17:32:50] [Rank 0] step:3101/10000 train_time:726194ms step_avg:234.18ms +[2025-07-17 17:32:50] [Rank 0] step:3101/10000 train_time:726194ms step_avg:234.18ms +[2025-07-17 17:32:55] [Rank 0] step:3121/10000 train_time:730984ms step_avg:234.21ms +[2025-07-17 17:32:55] [Rank 0] step:3121/10000 train_time:730984ms step_avg:234.21ms +[2025-07-17 17:33:00] [Rank 0] PRINT: step:3125/10000 val_loss:4.2655 train_time:732183ms step_avg:234.30ms +[2025-07-17 17:33:00] [Rank 0] PRINT: step:3125/10000 val_loss:4.2655 train_time:732183ms step_avg:234.30ms +[2025-07-17 17:33:04] [Rank 0] step:3141/10000 train_time:735773ms step_avg:234.25ms +[2025-07-17 17:33:04] [Rank 0] step:3141/10000 train_time:735773ms step_avg:234.25ms +[2025-07-17 17:33:09] [Rank 0] step:3161/10000 train_time:740566ms step_avg:234.28ms +[2025-07-17 17:33:09] [Rank 0] step:3161/10000 train_time:740566ms step_avg:234.28ms +[2025-07-17 17:33:13] [Rank 0] step:3181/10000 train_time:745365ms step_avg:234.32ms +[2025-07-17 17:33:13] [Rank 0] step:3181/10000 train_time:745365ms step_avg:234.32ms +[2025-07-17 17:33:18] [Rank 0] step:3201/10000 train_time:750165ms step_avg:234.35ms +[2025-07-17 17:33:18] [Rank 0] step:3201/10000 train_time:750165ms step_avg:234.35ms +[2025-07-17 17:33:23] [Rank 0] step:3221/10000 train_time:754963ms step_avg:234.39ms +[2025-07-17 17:33:23] [Rank 0] step:3221/10000 train_time:754963ms step_avg:234.39ms +[2025-07-17 17:33:28] [Rank 0] step:3241/10000 train_time:759760ms step_avg:234.42ms +[2025-07-17 17:33:28] [Rank 0] step:3241/10000 train_time:759760ms step_avg:234.42ms +[2025-07-17 17:33:35] [Rank 0] PRINT: step:3250/10000 val_loss:4.2615 train_time:762162ms step_avg:234.51ms +[2025-07-17 17:33:35] [Rank 0] PRINT: step:3250/10000 val_loss:4.2615 train_time:762162ms step_avg:234.51ms +[2025-07-17 17:33:37] [Rank 0] step:3261/10000 train_time:764557ms step_avg:234.45ms +[2025-07-17 17:33:37] [Rank 0] step:3261/10000 train_time:764557ms step_avg:234.45ms +[2025-07-17 17:33:42] [Rank 0] step:3281/10000 train_time:769355ms step_avg:234.49ms +[2025-07-17 17:33:42] [Rank 0] step:3281/10000 train_time:769355ms step_avg:234.49ms +[2025-07-17 17:33:47] [Rank 0] step:3301/10000 train_time:774156ms step_avg:234.52ms +[2025-07-17 17:33:47] [Rank 0] step:3301/10000 train_time:774156ms step_avg:234.52ms +[2025-07-17 17:33:52] [Rank 0] step:3321/10000 train_time:778954ms step_avg:234.55ms +[2025-07-17 17:33:52] [Rank 0] step:3321/10000 train_time:778954ms step_avg:234.55ms +[2025-07-17 17:33:56] [Rank 0] step:3341/10000 train_time:783752ms step_avg:234.59ms +[2025-07-17 17:33:56] [Rank 0] step:3341/10000 train_time:783752ms step_avg:234.59ms +[2025-07-17 17:34:01] [Rank 0] step:3361/10000 train_time:788552ms step_avg:234.62ms +[2025-07-17 17:34:01] [Rank 0] step:3361/10000 train_time:788552ms step_avg:234.62ms +[2025-07-17 17:34:09] [Rank 0] PRINT: step:3375/10000 val_loss:4.4519 train_time:792250ms step_avg:234.74ms +[2025-07-17 17:34:09] [Rank 0] PRINT: step:3375/10000 val_loss:4.4519 train_time:792250ms step_avg:234.74ms +[2025-07-17 17:34:11] [Rank 0] step:3381/10000 train_time:793446ms step_avg:234.68ms +[2025-07-17 17:34:11] [Rank 0] step:3381/10000 train_time:793446ms step_avg:234.68ms +[2025-07-17 17:34:15] [Rank 0] step:3401/10000 train_time:798248ms step_avg:234.71ms +[2025-07-17 17:34:15] [Rank 0] step:3401/10000 train_time:798248ms step_avg:234.71ms +[2025-07-17 17:34:20] [Rank 0] step:3421/10000 train_time:803051ms step_avg:234.74ms +[2025-07-17 17:34:20] [Rank 0] step:3421/10000 train_time:803051ms step_avg:234.74ms +[2025-07-17 17:34:25] [Rank 0] step:3441/10000 train_time:807852ms step_avg:234.77ms +[2025-07-17 17:34:25] [Rank 0] step:3441/10000 train_time:807852ms step_avg:234.77ms +[2025-07-17 17:34:30] [Rank 0] step:3461/10000 train_time:812653ms step_avg:234.80ms +[2025-07-17 17:34:30] [Rank 0] step:3461/10000 train_time:812653ms step_avg:234.80ms +[2025-07-17 17:34:35] [Rank 0] step:3481/10000 train_time:817455ms step_avg:234.83ms +[2025-07-17 17:34:35] [Rank 0] step:3481/10000 train_time:817455ms step_avg:234.83ms +[2025-07-17 17:34:43] [Rank 0] PRINT: step:3500/10000 val_loss:4.3456 train_time:822258ms step_avg:234.93ms +[2025-07-17 17:34:43] [Rank 0] PRINT: step:3500/10000 val_loss:4.3456 train_time:822258ms step_avg:234.93ms +[2025-07-17 17:34:44] [Rank 0] step:3501/10000 train_time:822277ms step_avg:234.87ms +[2025-07-17 17:34:44] [Rank 0] step:3501/10000 train_time:822277ms step_avg:234.87ms +[2025-07-17 17:34:48] [Rank 0] step:3521/10000 train_time:827056ms step_avg:234.89ms +[2025-07-17 17:34:48] [Rank 0] step:3521/10000 train_time:827056ms step_avg:234.89ms +[2025-07-17 17:34:53] [Rank 0] step:3541/10000 train_time:831855ms step_avg:234.92ms +[2025-07-17 17:34:53] [Rank 0] step:3541/10000 train_time:831855ms step_avg:234.92ms +[2025-07-17 17:34:58] [Rank 0] step:3561/10000 train_time:837153ms step_avg:235.09ms +[2025-07-17 17:34:58] [Rank 0] step:3561/10000 train_time:837153ms step_avg:235.09ms +[2025-07-17 17:35:03] [Rank 0] step:3581/10000 train_time:841947ms step_avg:235.12ms +[2025-07-17 17:35:03] [Rank 0] step:3581/10000 train_time:841947ms step_avg:235.12ms +[2025-07-17 17:35:08] [Rank 0] step:3601/10000 train_time:846742ms step_avg:235.14ms +[2025-07-17 17:35:08] [Rank 0] step:3601/10000 train_time:846742ms step_avg:235.14ms +[2025-07-17 17:35:13] [Rank 0] step:3621/10000 train_time:851534ms step_avg:235.17ms +[2025-07-17 17:35:13] [Rank 0] step:3621/10000 train_time:851534ms step_avg:235.17ms +[2025-07-17 17:35:18] [Rank 0] PRINT: step:3625/10000 val_loss:4.3741 train_time:852735ms step_avg:235.24ms +[2025-07-17 17:35:18] [Rank 0] PRINT: step:3625/10000 val_loss:4.3741 train_time:852735ms step_avg:235.24ms +[2025-07-17 17:35:22] [Rank 0] step:3641/10000 train_time:856329ms step_avg:235.19ms +[2025-07-17 17:35:22] [Rank 0] step:3641/10000 train_time:856329ms step_avg:235.19ms +[2025-07-17 17:35:27] [Rank 0] step:3661/10000 train_time:861124ms step_avg:235.22ms +[2025-07-17 17:35:27] [Rank 0] step:3661/10000 train_time:861124ms step_avg:235.22ms +[2025-07-17 17:35:32] [Rank 0] step:3681/10000 train_time:865916ms step_avg:235.24ms +[2025-07-17 17:35:32] [Rank 0] step:3681/10000 train_time:865916ms step_avg:235.24ms +[2025-07-17 17:35:37] [Rank 0] step:3701/10000 train_time:870708ms step_avg:235.26ms +[2025-07-17 17:35:37] [Rank 0] step:3701/10000 train_time:870708ms step_avg:235.26ms +[2025-07-17 17:35:41] [Rank 0] step:3721/10000 train_time:875568ms step_avg:235.30ms +[2025-07-17 17:35:41] [Rank 0] step:3721/10000 train_time:875568ms step_avg:235.30ms +[2025-07-17 17:35:46] [Rank 0] step:3741/10000 train_time:880448ms step_avg:235.35ms +[2025-07-17 17:35:46] [Rank 0] step:3741/10000 train_time:880448ms step_avg:235.35ms +[2025-07-17 17:35:53] [Rank 0] PRINT: step:3750/10000 val_loss:4.5323 train_time:882884ms step_avg:235.44ms +[2025-07-17 17:35:53] [Rank 0] PRINT: step:3750/10000 val_loss:4.5323 train_time:882884ms step_avg:235.44ms +[2025-07-17 17:35:56] [Rank 0] step:3761/10000 train_time:885317ms step_avg:235.39ms +[2025-07-17 17:35:56] [Rank 0] step:3761/10000 train_time:885317ms step_avg:235.39ms +[2025-07-17 17:36:01] [Rank 0] step:3781/10000 train_time:890192ms step_avg:235.44ms +[2025-07-17 17:36:01] [Rank 0] step:3781/10000 train_time:890192ms step_avg:235.44ms +[2025-07-17 17:36:06] [Rank 0] step:3801/10000 train_time:895069ms step_avg:235.48ms +[2025-07-17 17:36:06] [Rank 0] step:3801/10000 train_time:895069ms step_avg:235.48ms +[2025-07-17 17:36:10] [Rank 0] step:3821/10000 train_time:899942ms step_avg:235.53ms +[2025-07-17 17:36:10] [Rank 0] step:3821/10000 train_time:899942ms step_avg:235.53ms +[2025-07-17 17:36:15] [Rank 0] step:3841/10000 train_time:904817ms step_avg:235.57ms +[2025-07-17 17:36:15] [Rank 0] step:3841/10000 train_time:904817ms step_avg:235.57ms +[2025-07-17 17:36:20] [Rank 0] step:3861/10000 train_time:909688ms step_avg:235.61ms +[2025-07-17 17:36:20] [Rank 0] step:3861/10000 train_time:909688ms step_avg:235.61ms +[2025-07-17 17:36:28] [Rank 0] PRINT: step:3875/10000 val_loss:4.5621 train_time:913343ms step_avg:235.70ms +[2025-07-17 17:36:28] [Rank 0] PRINT: step:3875/10000 val_loss:4.5621 train_time:913343ms step_avg:235.70ms +[2025-07-17 17:36:30] [Rank 0] step:3881/10000 train_time:914558ms step_avg:235.65ms +[2025-07-17 17:36:30] [Rank 0] step:3881/10000 train_time:914558ms step_avg:235.65ms +[2025-07-17 17:36:35] [Rank 0] step:3901/10000 train_time:919426ms step_avg:235.69ms +[2025-07-17 17:36:35] [Rank 0] step:3901/10000 train_time:919426ms step_avg:235.69ms +[2025-07-17 17:36:39] [Rank 0] step:3921/10000 train_time:924299ms step_avg:235.73ms +[2025-07-17 17:36:39] [Rank 0] step:3921/10000 train_time:924299ms step_avg:235.73ms +[2025-07-17 17:36:44] [Rank 0] step:3941/10000 train_time:929174ms step_avg:235.77ms +[2025-07-17 17:36:44] [Rank 0] step:3941/10000 train_time:929174ms step_avg:235.77ms +[2025-07-17 17:36:49] [Rank 0] step:3961/10000 train_time:934051ms step_avg:235.81ms +[2025-07-17 17:36:49] [Rank 0] step:3961/10000 train_time:934051ms step_avg:235.81ms +[2025-07-17 17:36:54] [Rank 0] step:3981/10000 train_time:938930ms step_avg:235.85ms +[2025-07-17 17:36:54] [Rank 0] step:3981/10000 train_time:938930ms step_avg:235.85ms +[2025-07-17 17:37:03] [Rank 0] PRINT: step:4000/10000 val_loss:4.5650 train_time:943800ms step_avg:235.95ms +[2025-07-17 17:37:03] [Rank 0] PRINT: step:4000/10000 val_loss:4.5650 train_time:943800ms step_avg:235.95ms +[2025-07-17 17:37:03] [Rank 0] step:4001/10000 train_time:943819ms step_avg:235.90ms +[2025-07-17 17:37:03] [Rank 0] step:4001/10000 train_time:943819ms step_avg:235.90ms +[2025-07-17 17:37:08] [Rank 0] step:4021/10000 train_time:948680ms step_avg:235.93ms +[2025-07-17 17:37:08] [Rank 0] step:4021/10000 train_time:948680ms step_avg:235.93ms +[2025-07-17 17:37:13] [Rank 0] step:4041/10000 train_time:953553ms step_avg:235.97ms +[2025-07-17 17:37:13] [Rank 0] step:4041/10000 train_time:953553ms step_avg:235.97ms +[2025-07-17 17:37:18] [Rank 0] step:4061/10000 train_time:958430ms step_avg:236.01ms +[2025-07-17 17:37:18] [Rank 0] step:4061/10000 train_time:958430ms step_avg:236.01ms +[2025-07-17 17:37:23] [Rank 0] step:4081/10000 train_time:963833ms step_avg:236.18ms +[2025-07-17 17:37:23] [Rank 0] step:4081/10000 train_time:963833ms step_avg:236.18ms +[2025-07-17 17:37:28] [Rank 0] step:4101/10000 train_time:968713ms step_avg:236.21ms +[2025-07-17 17:37:28] [Rank 0] step:4101/10000 train_time:968713ms step_avg:236.21ms +[2025-07-17 17:37:33] [Rank 0] step:4121/10000 train_time:973589ms step_avg:236.25ms +[2025-07-17 17:37:33] [Rank 0] step:4121/10000 train_time:973589ms step_avg:236.25ms +[2025-07-17 17:37:39] [Rank 0] PRINT: step:4125/10000 val_loss:4.3913 train_time:974809ms step_avg:236.32ms +[2025-07-17 17:37:39] [Rank 0] PRINT: step:4125/10000 val_loss:4.3913 train_time:974809ms step_avg:236.32ms +[2025-07-17 17:37:42] [Rank 0] step:4141/10000 train_time:978458ms step_avg:236.29ms +[2025-07-17 17:37:42] [Rank 0] step:4141/10000 train_time:978458ms step_avg:236.29ms +[2025-07-17 17:37:47] [Rank 0] step:4161/10000 train_time:983336ms step_avg:236.32ms +[2025-07-17 17:37:47] [Rank 0] step:4161/10000 train_time:983336ms step_avg:236.32ms +[2025-07-17 17:37:52] [Rank 0] step:4181/10000 train_time:988216ms step_avg:236.36ms +[2025-07-17 17:37:52] [Rank 0] step:4181/10000 train_time:988216ms step_avg:236.36ms +[2025-07-17 17:37:57] [Rank 0] step:4201/10000 train_time:993100ms step_avg:236.40ms +[2025-07-17 17:37:57] [Rank 0] step:4201/10000 train_time:993100ms step_avg:236.40ms +[2025-07-17 17:38:02] [Rank 0] step:4221/10000 train_time:997980ms step_avg:236.43ms +[2025-07-17 17:38:02] [Rank 0] step:4221/10000 train_time:997980ms step_avg:236.43ms +[2025-07-17 17:38:07] [Rank 0] step:4241/10000 train_time:1002861ms step_avg:236.47ms +[2025-07-17 17:38:07] [Rank 0] step:4241/10000 train_time:1002861ms step_avg:236.47ms +[2025-07-17 17:38:14] [Rank 0] PRINT: step:4250/10000 val_loss:4.4362 train_time:1005300ms step_avg:236.54ms +[2025-07-17 17:38:14] [Rank 0] PRINT: step:4250/10000 val_loss:4.4362 train_time:1005300ms step_avg:236.54ms +[2025-07-17 17:38:16] [Rank 0] step:4261/10000 train_time:1007738ms step_avg:236.50ms +[2025-07-17 17:38:16] [Rank 0] step:4261/10000 train_time:1007738ms step_avg:236.50ms +[2025-07-17 17:38:21] [Rank 0] step:4281/10000 train_time:1012619ms step_avg:236.54ms +[2025-07-17 17:38:21] [Rank 0] step:4281/10000 train_time:1012619ms step_avg:236.54ms +[2025-07-17 17:38:26] [Rank 0] step:4301/10000 train_time:1017498ms step_avg:236.57ms +[2025-07-17 17:38:26] [Rank 0] step:4301/10000 train_time:1017498ms step_avg:236.57ms +[2025-07-17 17:38:31] [Rank 0] step:4321/10000 train_time:1022383ms step_avg:236.61ms +[2025-07-17 17:38:31] [Rank 0] step:4321/10000 train_time:1022383ms step_avg:236.61ms +[2025-07-17 17:38:36] [Rank 0] step:4341/10000 train_time:1027262ms step_avg:236.64ms +[2025-07-17 17:38:36] [Rank 0] step:4341/10000 train_time:1027262ms step_avg:236.64ms +[2025-07-17 17:38:41] [Rank 0] step:4361/10000 train_time:1032148ms step_avg:236.68ms +[2025-07-17 17:38:41] [Rank 0] step:4361/10000 train_time:1032148ms step_avg:236.68ms +[2025-07-17 17:38:49] [Rank 0] PRINT: step:4375/10000 val_loss:4.3406 train_time:1035810ms step_avg:236.76ms +[2025-07-17 17:38:49] [Rank 0] PRINT: step:4375/10000 val_loss:4.3406 train_time:1035810ms step_avg:236.76ms +[2025-07-17 17:38:50] [Rank 0] step:4381/10000 train_time:1037030ms step_avg:236.71ms +[2025-07-17 17:38:50] [Rank 0] step:4381/10000 train_time:1037030ms step_avg:236.71ms +[2025-07-17 17:38:55] [Rank 0] step:4401/10000 train_time:1041914ms step_avg:236.74ms +[2025-07-17 17:38:55] [Rank 0] step:4401/10000 train_time:1041914ms step_avg:236.74ms +[2025-07-17 17:39:00] [Rank 0] step:4421/10000 train_time:1046794ms step_avg:236.78ms +[2025-07-17 17:39:00] [Rank 0] step:4421/10000 train_time:1046794ms step_avg:236.78ms +[2025-07-17 17:39:05] [Rank 0] step:4441/10000 train_time:1051677ms step_avg:236.81ms +[2025-07-17 17:39:05] [Rank 0] step:4441/10000 train_time:1051677ms step_avg:236.81ms +[2025-07-17 17:39:10] [Rank 0] step:4461/10000 train_time:1056575ms step_avg:236.85ms +[2025-07-17 17:39:10] [Rank 0] step:4461/10000 train_time:1056575ms step_avg:236.85ms +[2025-07-17 17:39:15] [Rank 0] step:4481/10000 train_time:1061475ms step_avg:236.88ms +[2025-07-17 17:39:15] [Rank 0] step:4481/10000 train_time:1061475ms step_avg:236.88ms +[2025-07-17 17:39:24] [Rank 0] PRINT: step:4500/10000 val_loss:4.4366 train_time:1066375ms step_avg:236.97ms +[2025-07-17 17:39:24] [Rank 0] PRINT: step:4500/10000 val_loss:4.4366 train_time:1066375ms step_avg:236.97ms +[2025-07-17 17:39:24] [Rank 0] step:4501/10000 train_time:1066394ms step_avg:236.92ms +[2025-07-17 17:39:24] [Rank 0] step:4501/10000 train_time:1066394ms step_avg:236.92ms +[2025-07-17 17:39:29] [Rank 0] step:4521/10000 train_time:1071270ms step_avg:236.95ms +[2025-07-17 17:39:29] [Rank 0] step:4521/10000 train_time:1071270ms step_avg:236.95ms +[2025-07-17 17:39:34] [Rank 0] step:4541/10000 train_time:1076165ms step_avg:236.99ms +[2025-07-17 17:39:34] [Rank 0] step:4541/10000 train_time:1076165ms step_avg:236.99ms +[2025-07-17 17:39:39] [Rank 0] step:4561/10000 train_time:1081053ms step_avg:237.02ms +[2025-07-17 17:39:39] [Rank 0] step:4561/10000 train_time:1081053ms step_avg:237.02ms +[2025-07-17 17:39:44] [Rank 0] step:4581/10000 train_time:1086453ms step_avg:237.16ms +[2025-07-17 17:39:44] [Rank 0] step:4581/10000 train_time:1086453ms step_avg:237.16ms +[2025-07-17 17:39:49] [Rank 0] step:4601/10000 train_time:1091348ms step_avg:237.20ms +[2025-07-17 17:39:49] [Rank 0] step:4601/10000 train_time:1091348ms step_avg:237.20ms +[2025-07-17 17:39:54] [Rank 0] step:4621/10000 train_time:1096241ms step_avg:237.23ms +[2025-07-17 17:39:54] [Rank 0] step:4621/10000 train_time:1096241ms step_avg:237.23ms +[2025-07-17 17:39:59] [Rank 0] PRINT: step:4625/10000 val_loss:4.3298 train_time:1097469ms step_avg:237.29ms +[2025-07-17 17:39:59] [Rank 0] PRINT: step:4625/10000 val_loss:4.3298 train_time:1097469ms step_avg:237.29ms +[2025-07-17 17:40:03] [Rank 0] step:4641/10000 train_time:1101140ms step_avg:237.26ms +[2025-07-17 17:40:03] [Rank 0] step:4641/10000 train_time:1101140ms step_avg:237.26ms +[2025-07-17 17:40:08] [Rank 0] step:4661/10000 train_time:1106043ms step_avg:237.30ms +[2025-07-17 17:40:08] [Rank 0] step:4661/10000 train_time:1106043ms step_avg:237.30ms +[2025-07-17 17:40:13] [Rank 0] step:4681/10000 train_time:1110942ms step_avg:237.33ms +[2025-07-17 17:40:13] [Rank 0] step:4681/10000 train_time:1110942ms step_avg:237.33ms +[2025-07-17 17:40:18] [Rank 0] step:4701/10000 train_time:1115844ms step_avg:237.36ms +[2025-07-17 17:40:18] [Rank 0] step:4701/10000 train_time:1115844ms step_avg:237.36ms +[2025-07-17 17:40:23] [Rank 0] step:4721/10000 train_time:1120745ms step_avg:237.40ms +[2025-07-17 17:40:23] [Rank 0] step:4721/10000 train_time:1120745ms step_avg:237.40ms +[2025-07-17 17:40:28] [Rank 0] step:4741/10000 train_time:1125646ms step_avg:237.43ms +[2025-07-17 17:40:28] [Rank 0] step:4741/10000 train_time:1125646ms step_avg:237.43ms +[2025-07-17 17:40:35] [Rank 0] PRINT: step:4750/10000 val_loss:4.4299 train_time:1128099ms step_avg:237.49ms +[2025-07-17 17:40:35] [Rank 0] PRINT: step:4750/10000 val_loss:4.4299 train_time:1128099ms step_avg:237.49ms +[2025-07-17 17:40:37] [Rank 0] step:4761/10000 train_time:1130544ms step_avg:237.46ms +[2025-07-17 17:40:37] [Rank 0] step:4761/10000 train_time:1130544ms step_avg:237.46ms +[2025-07-17 17:40:42] [Rank 0] step:4781/10000 train_time:1135439ms step_avg:237.49ms +[2025-07-17 17:40:42] [Rank 0] step:4781/10000 train_time:1135439ms step_avg:237.49ms +[2025-07-17 17:40:47] [Rank 0] step:4801/10000 train_time:1140331ms step_avg:237.52ms +[2025-07-17 17:40:47] [Rank 0] step:4801/10000 train_time:1140331ms step_avg:237.52ms +[2025-07-17 17:40:52] [Rank 0] step:4821/10000 train_time:1145234ms step_avg:237.55ms +[2025-07-17 17:40:52] [Rank 0] step:4821/10000 train_time:1145234ms step_avg:237.55ms +[2025-07-17 17:40:57] [Rank 0] step:4841/10000 train_time:1150130ms step_avg:237.58ms +[2025-07-17 17:40:57] [Rank 0] step:4841/10000 train_time:1150130ms step_avg:237.58ms +[2025-07-17 17:41:02] [Rank 0] step:4861/10000 train_time:1155022ms step_avg:237.61ms +[2025-07-17 17:41:02] [Rank 0] step:4861/10000 train_time:1155022ms step_avg:237.61ms +[2025-07-17 17:41:10] [Rank 0] PRINT: step:4875/10000 val_loss:4.3924 train_time:1158697ms step_avg:237.68ms +[2025-07-17 17:41:10] [Rank 0] PRINT: step:4875/10000 val_loss:4.3924 train_time:1158697ms step_avg:237.68ms +[2025-07-17 17:41:11] [Rank 0] step:4881/10000 train_time:1159919ms step_avg:237.64ms +[2025-07-17 17:41:11] [Rank 0] step:4881/10000 train_time:1159919ms step_avg:237.64ms +[2025-07-17 17:41:16] [Rank 0] step:4901/10000 train_time:1164820ms step_avg:237.67ms +[2025-07-17 17:41:16] [Rank 0] step:4901/10000 train_time:1164820ms step_avg:237.67ms +[2025-07-17 17:41:21] [Rank 0] step:4921/10000 train_time:1169711ms step_avg:237.70ms +[2025-07-17 17:41:21] [Rank 0] step:4921/10000 train_time:1169711ms step_avg:237.70ms +[2025-07-17 17:41:26] [Rank 0] step:4941/10000 train_time:1174613ms step_avg:237.73ms +[2025-07-17 17:41:26] [Rank 0] step:4941/10000 train_time:1174613ms step_avg:237.73ms +[2025-07-17 17:41:31] [Rank 0] step:4961/10000 train_time:1179507ms step_avg:237.76ms +[2025-07-17 17:41:31] [Rank 0] step:4961/10000 train_time:1179507ms step_avg:237.76ms +[2025-07-17 17:41:36] [Rank 0] step:4981/10000 train_time:1184404ms step_avg:237.78ms +[2025-07-17 17:41:36] [Rank 0] step:4981/10000 train_time:1184404ms step_avg:237.78ms +[2025-07-17 17:41:45] [Rank 0] PRINT: step:5000/10000 val_loss:4.5011 train_time:1189307ms step_avg:237.86ms +[2025-07-17 17:41:45] [Rank 0] PRINT: step:5000/10000 val_loss:4.5011 train_time:1189307ms step_avg:237.86ms +[2025-07-17 17:41:45] [Rank 0] step:5001/10000 train_time:1189327ms step_avg:237.82ms +[2025-07-17 17:41:45] [Rank 0] step:5001/10000 train_time:1189327ms step_avg:237.82ms +[2025-07-17 17:41:50] [Rank 0] step:5021/10000 train_time:1194208ms step_avg:237.84ms +[2025-07-17 17:41:50] [Rank 0] step:5021/10000 train_time:1194208ms step_avg:237.84ms +[2025-07-17 17:41:55] [Rank 0] step:5041/10000 train_time:1199110ms step_avg:237.87ms +[2025-07-17 17:41:55] [Rank 0] step:5041/10000 train_time:1199110ms step_avg:237.87ms +[2025-07-17 17:42:00] [Rank 0] step:5061/10000 train_time:1204008ms step_avg:237.90ms +[2025-07-17 17:42:00] [Rank 0] step:5061/10000 train_time:1204008ms step_avg:237.90ms +[2025-07-17 17:42:05] [Rank 0] step:5081/10000 train_time:1208906ms step_avg:237.93ms +[2025-07-17 17:42:05] [Rank 0] step:5081/10000 train_time:1208906ms step_avg:237.93ms +[2025-07-17 17:42:10] [Rank 0] step:5101/10000 train_time:1214322ms step_avg:238.06ms +[2025-07-17 17:42:10] [Rank 0] step:5101/10000 train_time:1214322ms step_avg:238.06ms +[2025-07-17 17:42:15] [Rank 0] step:5121/10000 train_time:1219218ms step_avg:238.08ms +[2025-07-17 17:42:15] [Rank 0] step:5121/10000 train_time:1219218ms step_avg:238.08ms +[2025-07-17 17:42:21] [Rank 0] PRINT: step:5125/10000 val_loss:4.3222 train_time:1220442ms step_avg:238.14ms +[2025-07-17 17:42:21] [Rank 0] PRINT: step:5125/10000 val_loss:4.3222 train_time:1220442ms step_avg:238.14ms +[2025-07-17 17:42:25] [Rank 0] step:5141/10000 train_time:1224109ms step_avg:238.11ms +[2025-07-17 17:42:25] [Rank 0] step:5141/10000 train_time:1224109ms step_avg:238.11ms +[2025-07-17 17:42:30] [Rank 0] step:5161/10000 train_time:1229004ms step_avg:238.13ms +[2025-07-17 17:42:30] [Rank 0] step:5161/10000 train_time:1229004ms step_avg:238.13ms +[2025-07-17 17:42:34] [Rank 0] step:5181/10000 train_time:1233902ms step_avg:238.16ms +[2025-07-17 17:42:34] [Rank 0] step:5181/10000 train_time:1233902ms step_avg:238.16ms +[2025-07-17 17:42:39] [Rank 0] step:5201/10000 train_time:1238846ms step_avg:238.19ms +[2025-07-17 17:42:39] [Rank 0] step:5201/10000 train_time:1238846ms step_avg:238.19ms +[2025-07-17 17:42:44] [Rank 0] step:5221/10000 train_time:1243817ms step_avg:238.23ms +[2025-07-17 17:42:44] [Rank 0] step:5221/10000 train_time:1243817ms step_avg:238.23ms +[2025-07-17 17:42:49] [Rank 0] step:5241/10000 train_time:1248788ms step_avg:238.27ms +[2025-07-17 17:42:49] [Rank 0] step:5241/10000 train_time:1248788ms step_avg:238.27ms +[2025-07-17 17:42:56] [Rank 0] PRINT: step:5250/10000 val_loss:4.3607 train_time:1251268ms step_avg:238.34ms +[2025-07-17 17:42:56] [Rank 0] PRINT: step:5250/10000 val_loss:4.3607 train_time:1251268ms step_avg:238.34ms +[2025-07-17 17:42:59] [Rank 0] step:5261/10000 train_time:1253743ms step_avg:238.31ms +[2025-07-17 17:42:59] [Rank 0] step:5261/10000 train_time:1253743ms step_avg:238.31ms +[2025-07-17 17:43:04] [Rank 0] step:5281/10000 train_time:1258713ms step_avg:238.35ms +[2025-07-17 17:43:04] [Rank 0] step:5281/10000 train_time:1258713ms step_avg:238.35ms +[2025-07-17 17:43:09] [Rank 0] step:5301/10000 train_time:1263679ms step_avg:238.39ms +[2025-07-17 17:43:09] [Rank 0] step:5301/10000 train_time:1263679ms step_avg:238.39ms +[2025-07-17 17:43:14] [Rank 0] step:5321/10000 train_time:1268644ms step_avg:238.42ms +[2025-07-17 17:43:14] [Rank 0] step:5321/10000 train_time:1268644ms step_avg:238.42ms +[2025-07-17 17:43:19] [Rank 0] step:5341/10000 train_time:1273618ms step_avg:238.46ms +[2025-07-17 17:43:19] [Rank 0] step:5341/10000 train_time:1273618ms step_avg:238.46ms +[2025-07-17 17:43:24] [Rank 0] step:5361/10000 train_time:1278584ms step_avg:238.50ms +[2025-07-17 17:43:24] [Rank 0] step:5361/10000 train_time:1278584ms step_avg:238.50ms +[2025-07-17 17:43:32] [Rank 0] PRINT: step:5375/10000 val_loss:4.2825 train_time:1282317ms step_avg:238.57ms +[2025-07-17 17:43:32] [Rank 0] PRINT: step:5375/10000 val_loss:4.2825 train_time:1282317ms step_avg:238.57ms +[2025-07-17 17:43:33] [Rank 0] step:5381/10000 train_time:1283560ms step_avg:238.54ms +[2025-07-17 17:43:33] [Rank 0] step:5381/10000 train_time:1283560ms step_avg:238.54ms +[2025-07-17 17:43:38] [Rank 0] step:5401/10000 train_time:1288528ms step_avg:238.57ms +[2025-07-17 17:43:38] [Rank 0] step:5401/10000 train_time:1288528ms step_avg:238.57ms +[2025-07-17 17:43:43] [Rank 0] step:5421/10000 train_time:1293505ms step_avg:238.61ms +[2025-07-17 17:43:43] [Rank 0] step:5421/10000 train_time:1293505ms step_avg:238.61ms +[2025-07-17 17:43:48] [Rank 0] step:5441/10000 train_time:1298471ms step_avg:238.65ms +[2025-07-17 17:43:48] [Rank 0] step:5441/10000 train_time:1298471ms step_avg:238.65ms +[2025-07-17 17:43:53] [Rank 0] step:5461/10000 train_time:1303440ms step_avg:238.68ms +[2025-07-17 17:43:53] [Rank 0] step:5461/10000 train_time:1303440ms step_avg:238.68ms +[2025-07-17 17:43:58] [Rank 0] step:5481/10000 train_time:1308414ms step_avg:238.72ms +[2025-07-17 17:43:58] [Rank 0] step:5481/10000 train_time:1308414ms step_avg:238.72ms +[2025-07-17 17:44:07] [Rank 0] PRINT: step:5500/10000 val_loss:4.3632 train_time:1313380ms step_avg:238.80ms +[2025-07-17 17:44:07] [Rank 0] PRINT: step:5500/10000 val_loss:4.3632 train_time:1313380ms step_avg:238.80ms +[2025-07-17 17:44:08] [Rank 0] step:5501/10000 train_time:1313399ms step_avg:238.76ms +[2025-07-17 17:44:08] [Rank 0] step:5501/10000 train_time:1313399ms step_avg:238.76ms +[2025-07-17 17:44:12] [Rank 0] step:5521/10000 train_time:1318342ms step_avg:238.79ms +[2025-07-17 17:44:12] [Rank 0] step:5521/10000 train_time:1318342ms step_avg:238.79ms +[2025-07-17 17:44:17] [Rank 0] step:5541/10000 train_time:1323311ms step_avg:238.82ms +[2025-07-17 17:44:17] [Rank 0] step:5541/10000 train_time:1323311ms step_avg:238.82ms +[2025-07-17 17:44:22] [Rank 0] step:5561/10000 train_time:1328276ms step_avg:238.86ms +[2025-07-17 17:44:22] [Rank 0] step:5561/10000 train_time:1328276ms step_avg:238.86ms +[2025-07-17 17:44:27] [Rank 0] step:5581/10000 train_time:1333245ms step_avg:238.89ms +[2025-07-17 17:44:27] [Rank 0] step:5581/10000 train_time:1333245ms step_avg:238.89ms +[2025-07-17 17:44:32] [Rank 0] step:5601/10000 train_time:1338307ms step_avg:238.94ms +[2025-07-17 17:44:32] [Rank 0] step:5601/10000 train_time:1338307ms step_avg:238.94ms +[2025-07-17 17:44:37] [Rank 0] step:5621/10000 train_time:1343278ms step_avg:238.97ms +[2025-07-17 17:44:37] [Rank 0] step:5621/10000 train_time:1343278ms step_avg:238.97ms +[2025-07-17 17:44:43] [Rank 0] PRINT: step:5625/10000 val_loss:4.4016 train_time:1344521ms step_avg:239.03ms +[2025-07-17 17:44:43] [Rank 0] PRINT: step:5625/10000 val_loss:4.4016 train_time:1344521ms step_avg:239.03ms +[2025-07-17 17:44:47] [Rank 0] step:5641/10000 train_time:1348244ms step_avg:239.01ms +[2025-07-17 17:44:47] [Rank 0] step:5641/10000 train_time:1348244ms step_avg:239.01ms +[2025-07-17 17:44:52] [Rank 0] step:5661/10000 train_time:1353211ms step_avg:239.04ms +[2025-07-17 17:44:52] [Rank 0] step:5661/10000 train_time:1353211ms step_avg:239.04ms +[2025-07-17 17:44:57] [Rank 0] step:5681/10000 train_time:1358182ms step_avg:239.07ms +[2025-07-17 17:44:57] [Rank 0] step:5681/10000 train_time:1358182ms step_avg:239.07ms +[2025-07-17 17:45:02] [Rank 0] step:5701/10000 train_time:1363147ms step_avg:239.11ms +[2025-07-17 17:45:02] [Rank 0] step:5701/10000 train_time:1363147ms step_avg:239.11ms +[2025-07-17 17:45:07] [Rank 0] step:5721/10000 train_time:1368110ms step_avg:239.14ms +[2025-07-17 17:45:07] [Rank 0] step:5721/10000 train_time:1368110ms step_avg:239.14ms +[2025-07-17 17:45:12] [Rank 0] step:5741/10000 train_time:1373082ms step_avg:239.17ms +[2025-07-17 17:45:12] [Rank 0] step:5741/10000 train_time:1373082ms step_avg:239.17ms +[2025-07-17 17:45:19] [Rank 0] PRINT: step:5750/10000 val_loss:4.3387 train_time:1375567ms step_avg:239.23ms +[2025-07-17 17:45:19] [Rank 0] PRINT: step:5750/10000 val_loss:4.3387 train_time:1375567ms step_avg:239.23ms +[2025-07-17 17:45:22] [Rank 0] step:5761/10000 train_time:1378048ms step_avg:239.20ms +[2025-07-17 17:45:22] [Rank 0] step:5761/10000 train_time:1378048ms step_avg:239.20ms +[2025-07-17 17:45:27] [Rank 0] step:5781/10000 train_time:1383017ms step_avg:239.23ms +[2025-07-17 17:45:27] [Rank 0] step:5781/10000 train_time:1383017ms step_avg:239.23ms +[2025-07-17 17:45:31] [Rank 0] step:5801/10000 train_time:1387979ms step_avg:239.27ms +[2025-07-17 17:45:31] [Rank 0] step:5801/10000 train_time:1387979ms step_avg:239.27ms +[2025-07-17 17:45:36] [Rank 0] step:5821/10000 train_time:1392946ms step_avg:239.30ms +[2025-07-17 17:45:36] [Rank 0] step:5821/10000 train_time:1392946ms step_avg:239.30ms +[2025-07-17 17:45:41] [Rank 0] step:5841/10000 train_time:1397915ms step_avg:239.33ms +[2025-07-17 17:45:41] [Rank 0] step:5841/10000 train_time:1397915ms step_avg:239.33ms +[2025-07-17 17:45:46] [Rank 0] step:5861/10000 train_time:1402881ms step_avg:239.36ms +[2025-07-17 17:45:46] [Rank 0] step:5861/10000 train_time:1402881ms step_avg:239.36ms +[2025-07-17 17:45:55] [Rank 0] PRINT: step:5875/10000 val_loss:4.4082 train_time:1406601ms step_avg:239.42ms +[2025-07-17 17:45:55] [Rank 0] PRINT: step:5875/10000 val_loss:4.4082 train_time:1406601ms step_avg:239.42ms +[2025-07-17 17:45:56] [Rank 0] step:5881/10000 train_time:1407842ms step_avg:239.39ms +[2025-07-17 17:45:56] [Rank 0] step:5881/10000 train_time:1407842ms step_avg:239.39ms +[2025-07-17 17:46:01] [Rank 0] step:5901/10000 train_time:1412812ms step_avg:239.42ms +[2025-07-17 17:46:01] [Rank 0] step:5901/10000 train_time:1412812ms step_avg:239.42ms +[2025-07-17 17:46:06] [Rank 0] step:5921/10000 train_time:1417782ms step_avg:239.45ms +[2025-07-17 17:46:06] [Rank 0] step:5921/10000 train_time:1417782ms step_avg:239.45ms +[2025-07-17 17:46:11] [Rank 0] step:5941/10000 train_time:1422761ms step_avg:239.48ms +[2025-07-17 17:46:11] [Rank 0] step:5941/10000 train_time:1422761ms step_avg:239.48ms +[2025-07-17 17:46:16] [Rank 0] step:5961/10000 train_time:1427746ms step_avg:239.51ms +[2025-07-17 17:46:16] [Rank 0] step:5961/10000 train_time:1427746ms step_avg:239.51ms +[2025-07-17 17:46:21] [Rank 0] step:5981/10000 train_time:1432724ms step_avg:239.55ms +[2025-07-17 17:46:21] [Rank 0] step:5981/10000 train_time:1432724ms step_avg:239.55ms +[2025-07-17 17:46:30] [Rank 0] PRINT: step:6000/10000 val_loss:4.3444 train_time:1437711ms step_avg:239.62ms +[2025-07-17 17:46:30] [Rank 0] PRINT: step:6000/10000 val_loss:4.3444 train_time:1437711ms step_avg:239.62ms +[2025-07-17 17:46:31] [Rank 0] step:6001/10000 train_time:1437730ms step_avg:239.58ms +[2025-07-17 17:46:31] [Rank 0] step:6001/10000 train_time:1437730ms step_avg:239.58ms +[2025-07-17 17:46:36] [Rank 0] step:6021/10000 train_time:1442685ms step_avg:239.61ms +[2025-07-17 17:46:36] [Rank 0] step:6021/10000 train_time:1442685ms step_avg:239.61ms +[2025-07-17 17:46:41] [Rank 0] step:6041/10000 train_time:1447660ms step_avg:239.64ms +[2025-07-17 17:46:41] [Rank 0] step:6041/10000 train_time:1447660ms step_avg:239.64ms +[2025-07-17 17:46:46] [Rank 0] step:6061/10000 train_time:1452630ms step_avg:239.67ms +[2025-07-17 17:46:46] [Rank 0] step:6061/10000 train_time:1452630ms step_avg:239.67ms +[2025-07-17 17:46:51] [Rank 0] step:6081/10000 train_time:1457607ms step_avg:239.70ms +[2025-07-17 17:46:51] [Rank 0] step:6081/10000 train_time:1457607ms step_avg:239.70ms +[2025-07-17 17:46:56] [Rank 0] step:6101/10000 train_time:1463104ms step_avg:239.81ms +[2025-07-17 17:46:56] [Rank 0] step:6101/10000 train_time:1463104ms step_avg:239.81ms +[2025-07-17 17:47:01] [Rank 0] step:6121/10000 train_time:1468084ms step_avg:239.84ms +[2025-07-17 17:47:01] [Rank 0] step:6121/10000 train_time:1468084ms step_avg:239.84ms +[2025-07-17 17:47:07] [Rank 0] PRINT: step:6125/10000 val_loss:4.3075 train_time:1469330ms step_avg:239.89ms +[2025-07-17 17:47:07] [Rank 0] PRINT: step:6125/10000 val_loss:4.3075 train_time:1469330ms step_avg:239.89ms +[2025-07-17 17:47:11] [Rank 0] step:6141/10000 train_time:1473061ms step_avg:239.87ms +[2025-07-17 17:47:11] [Rank 0] step:6141/10000 train_time:1473061ms step_avg:239.87ms +[2025-07-17 17:47:16] [Rank 0] step:6161/10000 train_time:1478035ms step_avg:239.90ms +[2025-07-17 17:47:16] [Rank 0] step:6161/10000 train_time:1478035ms step_avg:239.90ms +[2025-07-17 17:47:21] [Rank 0] step:6181/10000 train_time:1483014ms step_avg:239.93ms +[2025-07-17 17:47:21] [Rank 0] step:6181/10000 train_time:1483014ms step_avg:239.93ms +[2025-07-17 17:47:26] [Rank 0] step:6201/10000 train_time:1488002ms step_avg:239.96ms +[2025-07-17 17:47:26] [Rank 0] step:6201/10000 train_time:1488002ms step_avg:239.96ms +[2025-07-17 17:47:31] [Rank 0] step:6221/10000 train_time:1492981ms step_avg:239.99ms +[2025-07-17 17:47:31] [Rank 0] step:6221/10000 train_time:1492981ms step_avg:239.99ms +[2025-07-17 17:47:36] [Rank 0] step:6241/10000 train_time:1497964ms step_avg:240.02ms +[2025-07-17 17:47:36] [Rank 0] step:6241/10000 train_time:1497964ms step_avg:240.02ms +[2025-07-17 17:47:43] [Rank 0] PRINT: step:6250/10000 val_loss:4.4052 train_time:1500454ms step_avg:240.07ms +[2025-07-17 17:47:43] [Rank 0] PRINT: step:6250/10000 val_loss:4.4052 train_time:1500454ms step_avg:240.07ms +[2025-07-17 17:47:45] [Rank 0] step:6261/10000 train_time:1502940ms step_avg:240.05ms +[2025-07-17 17:47:45] [Rank 0] step:6261/10000 train_time:1502940ms step_avg:240.05ms +[2025-07-17 17:47:50] [Rank 0] step:6281/10000 train_time:1507927ms step_avg:240.08ms +[2025-07-17 17:47:50] [Rank 0] step:6281/10000 train_time:1507927ms step_avg:240.08ms +[2025-07-17 17:47:55] [Rank 0] step:6301/10000 train_time:1512903ms step_avg:240.11ms +[2025-07-17 17:47:55] [Rank 0] step:6301/10000 train_time:1512903ms step_avg:240.11ms +[2025-07-17 17:48:00] [Rank 0] step:6321/10000 train_time:1517886ms step_avg:240.13ms +[2025-07-17 17:48:00] [Rank 0] step:6321/10000 train_time:1517886ms step_avg:240.13ms +[2025-07-17 17:48:05] [Rank 0] step:6341/10000 train_time:1522872ms step_avg:240.16ms +[2025-07-17 17:48:05] [Rank 0] step:6341/10000 train_time:1522872ms step_avg:240.16ms +[2025-07-17 17:48:10] [Rank 0] step:6361/10000 train_time:1527849ms step_avg:240.19ms +[2025-07-17 17:48:10] [Rank 0] step:6361/10000 train_time:1527849ms step_avg:240.19ms +[2025-07-17 17:48:18] [Rank 0] PRINT: step:6375/10000 val_loss:4.4763 train_time:1531585ms step_avg:240.25ms +[2025-07-17 17:48:18] [Rank 0] PRINT: step:6375/10000 val_loss:4.4763 train_time:1531585ms step_avg:240.25ms +[2025-07-17 17:48:20] [Rank 0] step:6381/10000 train_time:1532828ms step_avg:240.22ms +[2025-07-17 17:48:20] [Rank 0] step:6381/10000 train_time:1532828ms step_avg:240.22ms +[2025-07-17 17:48:25] [Rank 0] step:6401/10000 train_time:1537798ms step_avg:240.24ms +[2025-07-17 17:48:25] [Rank 0] step:6401/10000 train_time:1537798ms step_avg:240.24ms +[2025-07-17 17:48:30] [Rank 0] step:6421/10000 train_time:1542769ms step_avg:240.27ms +[2025-07-17 17:48:30] [Rank 0] step:6421/10000 train_time:1542769ms step_avg:240.27ms +[2025-07-17 17:48:35] [Rank 0] step:6441/10000 train_time:1547744ms step_avg:240.30ms +[2025-07-17 17:48:35] [Rank 0] step:6441/10000 train_time:1547744ms step_avg:240.30ms +[2025-07-17 17:48:40] [Rank 0] step:6461/10000 train_time:1552730ms step_avg:240.32ms +[2025-07-17 17:48:40] [Rank 0] step:6461/10000 train_time:1552730ms step_avg:240.32ms +[2025-07-17 17:48:45] [Rank 0] step:6481/10000 train_time:1557709ms step_avg:240.35ms +[2025-07-17 17:48:45] [Rank 0] step:6481/10000 train_time:1557709ms step_avg:240.35ms +[2025-07-17 17:48:54] [Rank 0] PRINT: step:6500/10000 val_loss:4.3756 train_time:1562684ms step_avg:240.41ms +[2025-07-17 17:48:54] [Rank 0] PRINT: step:6500/10000 val_loss:4.3756 train_time:1562684ms step_avg:240.41ms +[2025-07-17 17:48:54] [Rank 0] step:6501/10000 train_time:1562703ms step_avg:240.38ms +[2025-07-17 17:48:54] [Rank 0] step:6501/10000 train_time:1562703ms step_avg:240.38ms +[2025-07-17 17:48:59] [Rank 0] step:6521/10000 train_time:1567661ms step_avg:240.40ms +[2025-07-17 17:48:59] [Rank 0] step:6521/10000 train_time:1567661ms step_avg:240.40ms +[2025-07-17 17:49:04] [Rank 0] step:6541/10000 train_time:1572636ms step_avg:240.43ms +[2025-07-17 17:49:04] [Rank 0] step:6541/10000 train_time:1572636ms step_avg:240.43ms +[2025-07-17 17:49:09] [Rank 0] step:6561/10000 train_time:1577623ms step_avg:240.45ms +[2025-07-17 17:49:09] [Rank 0] step:6561/10000 train_time:1577623ms step_avg:240.45ms +[2025-07-17 17:49:14] [Rank 0] step:6581/10000 train_time:1582605ms step_avg:240.48ms +[2025-07-17 17:49:14] [Rank 0] step:6581/10000 train_time:1582605ms step_avg:240.48ms +[2025-07-17 17:49:19] [Rank 0] step:6601/10000 train_time:1587597ms step_avg:240.51ms +[2025-07-17 17:49:19] [Rank 0] step:6601/10000 train_time:1587597ms step_avg:240.51ms +[2025-07-17 17:49:24] [Rank 0] step:6621/10000 train_time:1592669ms step_avg:240.55ms +[2025-07-17 17:49:24] [Rank 0] step:6621/10000 train_time:1592669ms step_avg:240.55ms +[2025-07-17 17:49:30] [Rank 0] PRINT: step:6625/10000 val_loss:4.8603 train_time:1593918ms step_avg:240.59ms +[2025-07-17 17:49:30] [Rank 0] PRINT: step:6625/10000 val_loss:4.8603 train_time:1593918ms step_avg:240.59ms +[2025-07-17 17:49:34] [Rank 0] step:6641/10000 train_time:1597647ms step_avg:240.57ms +[2025-07-17 17:49:34] [Rank 0] step:6641/10000 train_time:1597647ms step_avg:240.57ms +[2025-07-17 17:49:39] [Rank 0] step:6661/10000 train_time:1602630ms step_avg:240.60ms +[2025-07-17 17:49:39] [Rank 0] step:6661/10000 train_time:1602630ms step_avg:240.60ms +[2025-07-17 17:49:44] [Rank 0] step:6681/10000 train_time:1607664ms step_avg:240.63ms +[2025-07-17 17:49:44] [Rank 0] step:6681/10000 train_time:1607664ms step_avg:240.63ms +[2025-07-17 17:49:49] [Rank 0] step:6701/10000 train_time:1612708ms step_avg:240.67ms +[2025-07-17 17:49:49] [Rank 0] step:6701/10000 train_time:1612708ms step_avg:240.67ms +[2025-07-17 17:49:54] [Rank 0] step:6721/10000 train_time:1617768ms step_avg:240.70ms +[2025-07-17 17:49:54] [Rank 0] step:6721/10000 train_time:1617768ms step_avg:240.70ms +[2025-07-17 17:49:59] [Rank 0] step:6741/10000 train_time:1622831ms step_avg:240.74ms +[2025-07-17 17:49:59] [Rank 0] step:6741/10000 train_time:1622831ms step_avg:240.74ms +[2025-07-17 17:50:05] [Rank 0] PRINT: step:6750/10000 val_loss:4.2544 train_time:1625350ms step_avg:240.79ms +[2025-07-17 17:50:05] [Rank 0] PRINT: step:6750/10000 val_loss:4.2544 train_time:1625350ms step_avg:240.79ms +[2025-07-17 17:50:08] [Rank 0] step:6761/10000 train_time:1627871ms step_avg:240.77ms +[2025-07-17 17:50:08] [Rank 0] step:6761/10000 train_time:1627871ms step_avg:240.77ms +[2025-07-17 17:50:13] [Rank 0] step:6781/10000 train_time:1632923ms step_avg:240.81ms +[2025-07-17 17:50:13] [Rank 0] step:6781/10000 train_time:1632923ms step_avg:240.81ms +[2025-07-17 17:50:18] [Rank 0] step:6801/10000 train_time:1637977ms step_avg:240.84ms +[2025-07-17 17:50:18] [Rank 0] step:6801/10000 train_time:1637977ms step_avg:240.84ms +[2025-07-17 17:50:23] [Rank 0] step:6821/10000 train_time:1643025ms step_avg:240.88ms +[2025-07-17 17:50:23] [Rank 0] step:6821/10000 train_time:1643025ms step_avg:240.88ms +[2025-07-17 17:50:28] [Rank 0] step:6841/10000 train_time:1648070ms step_avg:240.91ms +[2025-07-17 17:50:28] [Rank 0] step:6841/10000 train_time:1648070ms step_avg:240.91ms +[2025-07-17 17:50:33] [Rank 0] step:6861/10000 train_time:1653106ms step_avg:240.94ms +[2025-07-17 17:50:33] [Rank 0] step:6861/10000 train_time:1653106ms step_avg:240.94ms +[2025-07-17 17:50:42] [Rank 0] PRINT: step:6875/10000 val_loss:4.4786 train_time:1656881ms step_avg:241.00ms +[2025-07-17 17:50:42] [Rank 0] PRINT: step:6875/10000 val_loss:4.4786 train_time:1656881ms step_avg:241.00ms +[2025-07-17 17:50:43] [Rank 0] step:6881/10000 train_time:1658142ms step_avg:240.97ms +[2025-07-17 17:50:43] [Rank 0] step:6881/10000 train_time:1658142ms step_avg:240.97ms +[2025-07-17 17:50:48] [Rank 0] step:6901/10000 train_time:1663176ms step_avg:241.01ms +[2025-07-17 17:50:48] [Rank 0] step:6901/10000 train_time:1663176ms step_avg:241.01ms +[2025-07-17 17:50:53] [Rank 0] step:6921/10000 train_time:1668213ms step_avg:241.04ms +[2025-07-17 17:50:53] [Rank 0] step:6921/10000 train_time:1668213ms step_avg:241.04ms +[2025-07-17 17:50:58] [Rank 0] step:6941/10000 train_time:1673263ms step_avg:241.07ms +[2025-07-17 17:50:58] [Rank 0] step:6941/10000 train_time:1673263ms step_avg:241.07ms +[2025-07-17 17:51:03] [Rank 0] step:6961/10000 train_time:1678308ms step_avg:241.10ms +[2025-07-17 17:51:03] [Rank 0] step:6961/10000 train_time:1678308ms step_avg:241.10ms +[2025-07-17 17:51:08] [Rank 0] step:6981/10000 train_time:1683352ms step_avg:241.13ms +[2025-07-17 17:51:08] [Rank 0] step:6981/10000 train_time:1683352ms step_avg:241.13ms +[2025-07-17 17:51:18] [Rank 0] PRINT: step:7000/10000 val_loss:4.3719 train_time:1688390ms step_avg:241.20ms +[2025-07-17 17:51:18] [Rank 0] PRINT: step:7000/10000 val_loss:4.3719 train_time:1688390ms step_avg:241.20ms +[2025-07-17 17:51:18] [Rank 0] step:7001/10000 train_time:1688409ms step_avg:241.17ms +[2025-07-17 17:51:18] [Rank 0] step:7001/10000 train_time:1688409ms step_avg:241.17ms +[2025-07-17 17:51:23] [Rank 0] step:7021/10000 train_time:1693428ms step_avg:241.19ms +[2025-07-17 17:51:23] [Rank 0] step:7021/10000 train_time:1693428ms step_avg:241.19ms +[2025-07-17 17:51:28] [Rank 0] step:7041/10000 train_time:1698467ms step_avg:241.23ms +[2025-07-17 17:51:28] [Rank 0] step:7041/10000 train_time:1698467ms step_avg:241.23ms +[2025-07-17 17:51:33] [Rank 0] step:7061/10000 train_time:1703506ms step_avg:241.26ms +[2025-07-17 17:51:33] [Rank 0] step:7061/10000 train_time:1703506ms step_avg:241.26ms +[2025-07-17 17:51:38] [Rank 0] step:7081/10000 train_time:1708549ms step_avg:241.29ms +[2025-07-17 17:51:38] [Rank 0] step:7081/10000 train_time:1708549ms step_avg:241.29ms +[2025-07-17 17:51:43] [Rank 0] step:7101/10000 train_time:1713587ms step_avg:241.32ms +[2025-07-17 17:51:43] [Rank 0] step:7101/10000 train_time:1713587ms step_avg:241.32ms +[2025-07-17 17:51:48] [Rank 0] step:7121/10000 train_time:1718721ms step_avg:241.36ms +[2025-07-17 17:51:48] [Rank 0] step:7121/10000 train_time:1718721ms step_avg:241.36ms +[2025-07-17 17:51:54] [Rank 0] PRINT: step:7125/10000 val_loss:4.3910 train_time:1719980ms step_avg:241.40ms +[2025-07-17 17:51:54] [Rank 0] PRINT: step:7125/10000 val_loss:4.3910 train_time:1719980ms step_avg:241.40ms +[2025-07-17 17:51:58] [Rank 0] step:7141/10000 train_time:1723763ms step_avg:241.39ms +[2025-07-17 17:51:58] [Rank 0] step:7141/10000 train_time:1723763ms step_avg:241.39ms +[2025-07-17 17:52:03] [Rank 0] step:7161/10000 train_time:1728805ms step_avg:241.42ms +[2025-07-17 17:52:03] [Rank 0] step:7161/10000 train_time:1728805ms step_avg:241.42ms +[2025-07-17 17:52:08] [Rank 0] step:7181/10000 train_time:1733850ms step_avg:241.45ms +[2025-07-17 17:52:08] [Rank 0] step:7181/10000 train_time:1733850ms step_avg:241.45ms +[2025-07-17 17:52:13] [Rank 0] step:7201/10000 train_time:1738906ms step_avg:241.48ms +[2025-07-17 17:52:13] [Rank 0] step:7201/10000 train_time:1738906ms step_avg:241.48ms +[2025-07-17 17:52:18] [Rank 0] step:7221/10000 train_time:1743950ms step_avg:241.51ms +[2025-07-17 17:52:18] [Rank 0] step:7221/10000 train_time:1743950ms step_avg:241.51ms +[2025-07-17 17:52:23] [Rank 0] step:7241/10000 train_time:1748995ms step_avg:241.54ms +[2025-07-17 17:52:23] [Rank 0] step:7241/10000 train_time:1748995ms step_avg:241.54ms +[2025-07-17 17:52:30] [Rank 0] PRINT: step:7250/10000 val_loss:4.4894 train_time:1751517ms step_avg:241.59ms +[2025-07-17 17:52:30] [Rank 0] PRINT: step:7250/10000 val_loss:4.4894 train_time:1751517ms step_avg:241.59ms +[2025-07-17 17:52:33] [Rank 0] step:7261/10000 train_time:1754029ms step_avg:241.57ms +[2025-07-17 17:52:33] [Rank 0] step:7261/10000 train_time:1754029ms step_avg:241.57ms +[2025-07-17 17:52:38] [Rank 0] step:7281/10000 train_time:1759076ms step_avg:241.60ms +[2025-07-17 17:52:38] [Rank 0] step:7281/10000 train_time:1759076ms step_avg:241.60ms +[2025-07-17 17:52:43] [Rank 0] step:7301/10000 train_time:1764117ms step_avg:241.63ms +[2025-07-17 17:52:43] [Rank 0] step:7301/10000 train_time:1764117ms step_avg:241.63ms +[2025-07-17 17:52:48] [Rank 0] step:7321/10000 train_time:1769177ms step_avg:241.66ms +[2025-07-17 17:52:48] [Rank 0] step:7321/10000 train_time:1769177ms step_avg:241.66ms +[2025-07-17 17:52:53] [Rank 0] step:7341/10000 train_time:1774221ms step_avg:241.69ms +[2025-07-17 17:52:53] [Rank 0] step:7341/10000 train_time:1774221ms step_avg:241.69ms +[2025-07-17 17:52:58] [Rank 0] step:7361/10000 train_time:1779270ms step_avg:241.72ms +[2025-07-17 17:52:58] [Rank 0] step:7361/10000 train_time:1779270ms step_avg:241.72ms +[2025-07-17 17:53:07] [Rank 0] PRINT: step:7375/10000 val_loss:4.3005 train_time:1783054ms step_avg:241.77ms +[2025-07-17 17:53:07] [Rank 0] PRINT: step:7375/10000 val_loss:4.3005 train_time:1783054ms step_avg:241.77ms +[2025-07-17 17:53:08] [Rank 0] step:7381/10000 train_time:1784315ms step_avg:241.74ms +[2025-07-17 17:53:08] [Rank 0] step:7381/10000 train_time:1784315ms step_avg:241.74ms +[2025-07-17 17:53:13] [Rank 0] step:7401/10000 train_time:1789360ms step_avg:241.77ms +[2025-07-17 17:53:13] [Rank 0] step:7401/10000 train_time:1789360ms step_avg:241.77ms +[2025-07-17 17:53:18] [Rank 0] step:7421/10000 train_time:1794404ms step_avg:241.80ms +[2025-07-17 17:53:18] [Rank 0] step:7421/10000 train_time:1794404ms step_avg:241.80ms +[2025-07-17 17:53:23] [Rank 0] step:7441/10000 train_time:1799458ms step_avg:241.83ms +[2025-07-17 17:53:23] [Rank 0] step:7441/10000 train_time:1799458ms step_avg:241.83ms +[2025-07-17 17:53:28] [Rank 0] step:7461/10000 train_time:1804501ms step_avg:241.86ms +[2025-07-17 17:53:28] [Rank 0] step:7461/10000 train_time:1804501ms step_avg:241.86ms +[2025-07-17 17:53:33] [Rank 0] step:7481/10000 train_time:1809557ms step_avg:241.89ms +[2025-07-17 17:53:33] [Rank 0] step:7481/10000 train_time:1809557ms step_avg:241.89ms +[2025-07-17 17:53:43] [Rank 0] PRINT: step:7500/10000 val_loss:4.3010 train_time:1814619ms step_avg:241.95ms +[2025-07-17 17:53:43] [Rank 0] PRINT: step:7500/10000 val_loss:4.3010 train_time:1814619ms step_avg:241.95ms +[2025-07-17 17:53:43] [Rank 0] step:7501/10000 train_time:1814637ms step_avg:241.92ms +[2025-07-17 17:53:43] [Rank 0] step:7501/10000 train_time:1814637ms step_avg:241.92ms +[2025-07-17 17:53:48] [Rank 0] step:7521/10000 train_time:1819672ms step_avg:241.95ms +[2025-07-17 17:53:48] [Rank 0] step:7521/10000 train_time:1819672ms step_avg:241.95ms +[2025-07-17 17:53:53] [Rank 0] step:7541/10000 train_time:1824715ms step_avg:241.97ms +[2025-07-17 17:53:53] [Rank 0] step:7541/10000 train_time:1824715ms step_avg:241.97ms +[2025-07-17 17:53:58] [Rank 0] step:7561/10000 train_time:1829759ms step_avg:242.00ms +[2025-07-17 17:53:58] [Rank 0] step:7561/10000 train_time:1829759ms step_avg:242.00ms +[2025-07-17 17:54:04] [Rank 0] step:7581/10000 train_time:1834809ms step_avg:242.03ms +[2025-07-17 17:54:04] [Rank 0] step:7581/10000 train_time:1834809ms step_avg:242.03ms +[2025-07-17 17:54:09] [Rank 0] step:7601/10000 train_time:1839872ms step_avg:242.06ms +[2025-07-17 17:54:09] [Rank 0] step:7601/10000 train_time:1839872ms step_avg:242.06ms +[2025-07-17 17:54:14] [Rank 0] step:7621/10000 train_time:1845457ms step_avg:242.15ms +[2025-07-17 17:54:14] [Rank 0] step:7621/10000 train_time:1845457ms step_avg:242.15ms +[2025-07-17 17:54:20] [Rank 0] PRINT: step:7625/10000 val_loss:4.2258 train_time:1846705ms step_avg:242.19ms +[2025-07-17 17:54:20] [Rank 0] PRINT: step:7625/10000 val_loss:4.2258 train_time:1846705ms step_avg:242.19ms +[2025-07-17 17:54:24] [Rank 0] step:7641/10000 train_time:1850495ms step_avg:242.18ms +[2025-07-17 17:54:24] [Rank 0] step:7641/10000 train_time:1850495ms step_avg:242.18ms +[2025-07-17 17:54:29] [Rank 0] step:7661/10000 train_time:1855556ms step_avg:242.21ms +[2025-07-17 17:54:29] [Rank 0] step:7661/10000 train_time:1855556ms step_avg:242.21ms +[2025-07-17 17:54:34] [Rank 0] step:7681/10000 train_time:1860632ms step_avg:242.24ms +[2025-07-17 17:54:34] [Rank 0] step:7681/10000 train_time:1860632ms step_avg:242.24ms +[2025-07-17 17:54:39] [Rank 0] step:7701/10000 train_time:1865688ms step_avg:242.27ms +[2025-07-17 17:54:39] [Rank 0] step:7701/10000 train_time:1865688ms step_avg:242.27ms +[2025-07-17 17:54:44] [Rank 0] step:7721/10000 train_time:1870746ms step_avg:242.29ms +[2025-07-17 17:54:44] [Rank 0] step:7721/10000 train_time:1870746ms step_avg:242.29ms +[2025-07-17 17:54:49] [Rank 0] step:7741/10000 train_time:1875801ms step_avg:242.32ms +[2025-07-17 17:54:49] [Rank 0] step:7741/10000 train_time:1875801ms step_avg:242.32ms +[2025-07-17 17:54:56] [Rank 0] PRINT: step:7750/10000 val_loss:4.3007 train_time:1878342ms step_avg:242.37ms +[2025-07-17 17:54:56] [Rank 0] PRINT: step:7750/10000 val_loss:4.3007 train_time:1878342ms step_avg:242.37ms +[2025-07-17 17:54:59] [Rank 0] step:7761/10000 train_time:1880868ms step_avg:242.35ms +[2025-07-17 17:54:59] [Rank 0] step:7761/10000 train_time:1880868ms step_avg:242.35ms +[2025-07-17 17:55:04] [Rank 0] step:7781/10000 train_time:1885928ms step_avg:242.38ms +[2025-07-17 17:55:04] [Rank 0] step:7781/10000 train_time:1885928ms step_avg:242.38ms +[2025-07-17 17:55:09] [Rank 0] step:7801/10000 train_time:1890988ms step_avg:242.40ms +[2025-07-17 17:55:09] [Rank 0] step:7801/10000 train_time:1890988ms step_avg:242.40ms +[2025-07-17 17:55:14] [Rank 0] step:7821/10000 train_time:1896046ms step_avg:242.43ms +[2025-07-17 17:55:14] [Rank 0] step:7821/10000 train_time:1896046ms step_avg:242.43ms +[2025-07-17 17:55:19] [Rank 0] step:7841/10000 train_time:1901105ms step_avg:242.46ms +[2025-07-17 17:55:19] [Rank 0] step:7841/10000 train_time:1901105ms step_avg:242.46ms +[2025-07-17 17:55:24] [Rank 0] step:7861/10000 train_time:1906149ms step_avg:242.48ms +[2025-07-17 17:55:24] [Rank 0] step:7861/10000 train_time:1906149ms step_avg:242.48ms +[2025-07-17 17:55:32] [Rank 0] PRINT: step:7875/10000 val_loss:4.2819 train_time:1909934ms step_avg:242.53ms +[2025-07-17 17:55:32] [Rank 0] PRINT: step:7875/10000 val_loss:4.2819 train_time:1909934ms step_avg:242.53ms +[2025-07-17 17:55:34] [Rank 0] step:7881/10000 train_time:1911192ms step_avg:242.51ms +[2025-07-17 17:55:34] [Rank 0] step:7881/10000 train_time:1911192ms step_avg:242.51ms +[2025-07-17 17:55:39] [Rank 0] step:7901/10000 train_time:1916241ms step_avg:242.53ms +[2025-07-17 17:55:39] [Rank 0] step:7901/10000 train_time:1916241ms step_avg:242.53ms +[2025-07-17 17:55:44] [Rank 0] step:7921/10000 train_time:1921289ms step_avg:242.56ms +[2025-07-17 17:55:44] [Rank 0] step:7921/10000 train_time:1921289ms step_avg:242.56ms +[2025-07-17 17:55:49] [Rank 0] step:7941/10000 train_time:1926344ms step_avg:242.58ms +[2025-07-17 17:55:49] [Rank 0] step:7941/10000 train_time:1926344ms step_avg:242.58ms +[2025-07-17 17:55:54] [Rank 0] step:7961/10000 train_time:1931400ms step_avg:242.61ms +[2025-07-17 17:55:54] [Rank 0] step:7961/10000 train_time:1931400ms step_avg:242.61ms +[2025-07-17 17:55:59] [Rank 0] step:7981/10000 train_time:1936439ms step_avg:242.63ms +[2025-07-17 17:55:59] [Rank 0] step:7981/10000 train_time:1936439ms step_avg:242.63ms +[2025-07-17 17:56:08] [Rank 0] PRINT: step:8000/10000 val_loss:4.2859 train_time:1941500ms step_avg:242.69ms +[2025-07-17 17:56:08] [Rank 0] PRINT: step:8000/10000 val_loss:4.2859 train_time:1941500ms step_avg:242.69ms +[2025-07-17 17:56:08] [Rank 0] step:8001/10000 train_time:1941518ms step_avg:242.66ms +[2025-07-17 17:56:08] [Rank 0] step:8001/10000 train_time:1941518ms step_avg:242.66ms +[2025-07-17 17:56:13] [Rank 0] step:8021/10000 train_time:1946549ms step_avg:242.68ms +[2025-07-17 17:56:13] [Rank 0] step:8021/10000 train_time:1946549ms step_avg:242.68ms +[2025-07-17 17:56:19] [Rank 0] step:8041/10000 train_time:1951619ms step_avg:242.71ms +[2025-07-17 17:56:19] [Rank 0] step:8041/10000 train_time:1951619ms step_avg:242.71ms +[2025-07-17 17:56:24] [Rank 0] step:8061/10000 train_time:1956659ms step_avg:242.73ms +[2025-07-17 17:56:24] [Rank 0] step:8061/10000 train_time:1956659ms step_avg:242.73ms +[2025-07-17 17:56:29] [Rank 0] step:8081/10000 train_time:1961713ms step_avg:242.76ms +[2025-07-17 17:56:29] [Rank 0] step:8081/10000 train_time:1961713ms step_avg:242.76ms +[2025-07-17 17:56:34] [Rank 0] step:8101/10000 train_time:1966755ms step_avg:242.78ms +[2025-07-17 17:56:34] [Rank 0] step:8101/10000 train_time:1966755ms step_avg:242.78ms +[2025-07-17 17:56:39] [Rank 0] step:8121/10000 train_time:1971812ms step_avg:242.80ms +[2025-07-17 17:56:39] [Rank 0] step:8121/10000 train_time:1971812ms step_avg:242.80ms +[2025-07-17 17:56:44] [Rank 0] PRINT: step:8125/10000 val_loss:4.2458 train_time:1973076ms step_avg:242.84ms +[2025-07-17 17:56:44] [Rank 0] PRINT: step:8125/10000 val_loss:4.2458 train_time:1973076ms step_avg:242.84ms +[2025-07-17 17:56:48] [Rank 0] step:8141/10000 train_time:1976863ms step_avg:242.83ms +[2025-07-17 17:56:48] [Rank 0] step:8141/10000 train_time:1976863ms step_avg:242.83ms +[2025-07-17 17:56:54] [Rank 0] step:8161/10000 train_time:1981947ms step_avg:242.86ms +[2025-07-17 17:56:54] [Rank 0] step:8161/10000 train_time:1981947ms step_avg:242.86ms +[2025-07-17 17:56:59] [Rank 0] step:8181/10000 train_time:1987062ms step_avg:242.89ms +[2025-07-17 17:56:59] [Rank 0] step:8181/10000 train_time:1987062ms step_avg:242.89ms +[2025-07-17 17:57:04] [Rank 0] step:8201/10000 train_time:1992158ms step_avg:242.92ms +[2025-07-17 17:57:04] [Rank 0] step:8201/10000 train_time:1992158ms step_avg:242.92ms +[2025-07-17 17:57:09] [Rank 0] step:8221/10000 train_time:1997268ms step_avg:242.95ms +[2025-07-17 17:57:09] [Rank 0] step:8221/10000 train_time:1997268ms step_avg:242.95ms +[2025-07-17 17:57:14] [Rank 0] step:8241/10000 train_time:2002379ms step_avg:242.98ms +[2025-07-17 17:57:14] [Rank 0] step:8241/10000 train_time:2002379ms step_avg:242.98ms +[2025-07-17 17:57:21] [Rank 0] PRINT: step:8250/10000 val_loss:4.2314 train_time:2004940ms step_avg:243.02ms +[2025-07-17 17:57:21] [Rank 0] PRINT: step:8250/10000 val_loss:4.2314 train_time:2004940ms step_avg:243.02ms +[2025-07-17 17:57:24] [Rank 0] step:8261/10000 train_time:2007492ms step_avg:243.01ms +[2025-07-17 17:57:24] [Rank 0] step:8261/10000 train_time:2007492ms step_avg:243.01ms +[2025-07-17 17:57:29] [Rank 0] step:8281/10000 train_time:2012625ms step_avg:243.04ms +[2025-07-17 17:57:29] [Rank 0] step:8281/10000 train_time:2012625ms step_avg:243.04ms +[2025-07-17 17:57:34] [Rank 0] step:8301/10000 train_time:2017725ms step_avg:243.07ms +[2025-07-17 17:57:34] [Rank 0] step:8301/10000 train_time:2017725ms step_avg:243.07ms +[2025-07-17 17:57:39] [Rank 0] step:8321/10000 train_time:2022844ms step_avg:243.10ms +[2025-07-17 17:57:39] [Rank 0] step:8321/10000 train_time:2022844ms step_avg:243.10ms +[2025-07-17 17:57:44] [Rank 0] step:8341/10000 train_time:2027967ms step_avg:243.13ms +[2025-07-17 17:57:44] [Rank 0] step:8341/10000 train_time:2027967ms step_avg:243.13ms +[2025-07-17 17:57:49] [Rank 0] step:8361/10000 train_time:2033071ms step_avg:243.16ms +[2025-07-17 17:57:49] [Rank 0] step:8361/10000 train_time:2033071ms step_avg:243.16ms +[2025-07-17 17:57:58] [Rank 0] PRINT: step:8375/10000 val_loss:4.1673 train_time:2036901ms step_avg:243.21ms +[2025-07-17 17:57:58] [Rank 0] PRINT: step:8375/10000 val_loss:4.1673 train_time:2036901ms step_avg:243.21ms +[2025-07-17 17:57:59] [Rank 0] step:8381/10000 train_time:2038172ms step_avg:243.19ms +[2025-07-17 17:57:59] [Rank 0] step:8381/10000 train_time:2038172ms step_avg:243.19ms +[2025-07-17 17:58:04] [Rank 0] step:8401/10000 train_time:2043272ms step_avg:243.22ms +[2025-07-17 17:58:04] [Rank 0] step:8401/10000 train_time:2043272ms step_avg:243.22ms +[2025-07-17 17:58:10] [Rank 0] step:8421/10000 train_time:2048389ms step_avg:243.25ms +[2025-07-17 17:58:10] [Rank 0] step:8421/10000 train_time:2048389ms step_avg:243.25ms +[2025-07-17 17:58:15] [Rank 0] step:8441/10000 train_time:2053509ms step_avg:243.28ms +[2025-07-17 17:58:15] [Rank 0] step:8441/10000 train_time:2053509ms step_avg:243.28ms +[2025-07-17 17:58:20] [Rank 0] step:8461/10000 train_time:2058633ms step_avg:243.31ms +[2025-07-17 17:58:20] [Rank 0] step:8461/10000 train_time:2058633ms step_avg:243.31ms +[2025-07-17 17:58:25] [Rank 0] step:8481/10000 train_time:2063741ms step_avg:243.34ms +[2025-07-17 17:58:25] [Rank 0] step:8481/10000 train_time:2063741ms step_avg:243.34ms +[2025-07-17 17:58:35] [Rank 0] PRINT: step:8500/10000 val_loss:4.2110 train_time:2068868ms step_avg:243.40ms +[2025-07-17 17:58:35] [Rank 0] PRINT: step:8500/10000 val_loss:4.2110 train_time:2068868ms step_avg:243.40ms +[2025-07-17 17:58:35] [Rank 0] step:8501/10000 train_time:2068887ms step_avg:243.37ms +[2025-07-17 17:58:35] [Rank 0] step:8501/10000 train_time:2068887ms step_avg:243.37ms +[2025-07-17 17:58:40] [Rank 0] step:8521/10000 train_time:2073984ms step_avg:243.40ms +[2025-07-17 17:58:40] [Rank 0] step:8521/10000 train_time:2073984ms step_avg:243.40ms +[2025-07-17 17:58:45] [Rank 0] step:8541/10000 train_time:2079122ms step_avg:243.43ms +[2025-07-17 17:58:45] [Rank 0] step:8541/10000 train_time:2079122ms step_avg:243.43ms +[2025-07-17 17:58:50] [Rank 0] step:8561/10000 train_time:2084234ms step_avg:243.46ms +[2025-07-17 17:58:50] [Rank 0] step:8561/10000 train_time:2084234ms step_avg:243.46ms +[2025-07-17 17:58:55] [Rank 0] step:8581/10000 train_time:2089355ms step_avg:243.49ms +[2025-07-17 17:58:55] [Rank 0] step:8581/10000 train_time:2089355ms step_avg:243.49ms +[2025-07-17 17:59:00] [Rank 0] step:8601/10000 train_time:2094460ms step_avg:243.51ms +[2025-07-17 17:59:00] [Rank 0] step:8601/10000 train_time:2094460ms step_avg:243.51ms +[2025-07-17 17:59:06] [Rank 0] step:8621/10000 train_time:2099574ms step_avg:243.54ms +[2025-07-17 17:59:06] [Rank 0] step:8621/10000 train_time:2099574ms step_avg:243.54ms +[2025-07-17 17:59:11] [Rank 0] PRINT: step:8625/10000 val_loss:4.1915 train_time:2100855ms step_avg:243.58ms +[2025-07-17 17:59:11] [Rank 0] PRINT: step:8625/10000 val_loss:4.1915 train_time:2100855ms step_avg:243.58ms +[2025-07-17 17:59:15] [Rank 0] step:8641/10000 train_time:2104794ms step_avg:243.58ms +[2025-07-17 17:59:15] [Rank 0] step:8641/10000 train_time:2104794ms step_avg:243.58ms +[2025-07-17 17:59:20] [Rank 0] step:8661/10000 train_time:2109917ms step_avg:243.61ms +[2025-07-17 17:59:20] [Rank 0] step:8661/10000 train_time:2109917ms step_avg:243.61ms +[2025-07-17 17:59:25] [Rank 0] step:8681/10000 train_time:2115034ms step_avg:243.64ms +[2025-07-17 17:59:25] [Rank 0] step:8681/10000 train_time:2115034ms step_avg:243.64ms +[2025-07-17 17:59:31] [Rank 0] step:8701/10000 train_time:2120161ms step_avg:243.67ms +[2025-07-17 17:59:31] [Rank 0] step:8701/10000 train_time:2120161ms step_avg:243.67ms +[2025-07-17 17:59:36] [Rank 0] step:8721/10000 train_time:2125292ms step_avg:243.70ms +[2025-07-17 17:59:36] [Rank 0] step:8721/10000 train_time:2125292ms step_avg:243.70ms +[2025-07-17 17:59:41] [Rank 0] step:8741/10000 train_time:2130414ms step_avg:243.73ms +[2025-07-17 17:59:41] [Rank 0] step:8741/10000 train_time:2130414ms step_avg:243.73ms +[2025-07-17 17:59:48] [Rank 0] PRINT: step:8750/10000 val_loss:4.1919 train_time:2132969ms step_avg:243.77ms +[2025-07-17 17:59:48] [Rank 0] PRINT: step:8750/10000 val_loss:4.1919 train_time:2132969ms step_avg:243.77ms +[2025-07-17 17:59:51] [Rank 0] step:8761/10000 train_time:2135521ms step_avg:243.75ms +[2025-07-17 17:59:51] [Rank 0] step:8761/10000 train_time:2135521ms step_avg:243.75ms +[2025-07-17 17:59:56] [Rank 0] step:8781/10000 train_time:2140634ms step_avg:243.78ms +[2025-07-17 17:59:56] [Rank 0] step:8781/10000 train_time:2140634ms step_avg:243.78ms +[2025-07-17 18:00:01] [Rank 0] step:8801/10000 train_time:2145761ms step_avg:243.81ms +[2025-07-17 18:00:01] [Rank 0] step:8801/10000 train_time:2145761ms step_avg:243.81ms +[2025-07-17 18:00:06] [Rank 0] step:8821/10000 train_time:2150879ms step_avg:243.84ms +[2025-07-17 18:00:06] [Rank 0] step:8821/10000 train_time:2150879ms step_avg:243.84ms +[2025-07-17 18:00:11] [Rank 0] step:8841/10000 train_time:2156023ms step_avg:243.87ms +[2025-07-17 18:00:11] [Rank 0] step:8841/10000 train_time:2156023ms step_avg:243.87ms +[2025-07-17 18:00:16] [Rank 0] step:8861/10000 train_time:2161157ms step_avg:243.90ms +[2025-07-17 18:00:16] [Rank 0] step:8861/10000 train_time:2161157ms step_avg:243.90ms +[2025-07-17 18:00:25] [Rank 0] PRINT: step:8875/10000 val_loss:4.2169 train_time:2164986ms step_avg:243.94ms +[2025-07-17 18:00:25] [Rank 0] PRINT: step:8875/10000 val_loss:4.2169 train_time:2164986ms step_avg:243.94ms +[2025-07-17 18:00:26] [Rank 0] step:8881/10000 train_time:2166263ms step_avg:243.92ms +[2025-07-17 18:00:26] [Rank 0] step:8881/10000 train_time:2166263ms step_avg:243.92ms +[2025-07-17 18:00:31] [Rank 0] step:8901/10000 train_time:2171379ms step_avg:243.95ms +[2025-07-17 18:00:31] [Rank 0] step:8901/10000 train_time:2171379ms step_avg:243.95ms +[2025-07-17 18:00:36] [Rank 0] step:8921/10000 train_time:2176499ms step_avg:243.97ms +[2025-07-17 18:00:36] [Rank 0] step:8921/10000 train_time:2176499ms step_avg:243.97ms +[2025-07-17 18:00:42] [Rank 0] step:8941/10000 train_time:2181621ms step_avg:244.00ms +[2025-07-17 18:00:42] [Rank 0] step:8941/10000 train_time:2181621ms step_avg:244.00ms +[2025-07-17 18:00:47] [Rank 0] step:8961/10000 train_time:2186747ms step_avg:244.03ms +[2025-07-17 18:00:47] [Rank 0] step:8961/10000 train_time:2186747ms step_avg:244.03ms +[2025-07-17 18:00:52] [Rank 0] step:8981/10000 train_time:2191871ms step_avg:244.06ms +[2025-07-17 18:00:52] [Rank 0] step:8981/10000 train_time:2191871ms step_avg:244.06ms +[2025-07-17 18:01:01] [Rank 0] PRINT: step:9000/10000 val_loss:4.1349 train_time:2197000ms step_avg:244.11ms +[2025-07-17 18:01:01] [Rank 0] PRINT: step:9000/10000 val_loss:4.1349 train_time:2197000ms step_avg:244.11ms +[2025-07-17 18:01:02] [Rank 0] step:9001/10000 train_time:2197019ms step_avg:244.09ms +[2025-07-17 18:01:02] [Rank 0] step:9001/10000 train_time:2197019ms step_avg:244.09ms +[2025-07-17 18:01:07] [Rank 0] step:9021/10000 train_time:2202116ms step_avg:244.11ms +[2025-07-17 18:01:07] [Rank 0] step:9021/10000 train_time:2202116ms step_avg:244.11ms +[2025-07-17 18:01:12] [Rank 0] step:9041/10000 train_time:2207262ms step_avg:244.14ms +[2025-07-17 18:01:12] [Rank 0] step:9041/10000 train_time:2207262ms step_avg:244.14ms +[2025-07-17 18:01:17] [Rank 0] step:9061/10000 train_time:2212389ms step_avg:244.17ms +[2025-07-17 18:01:17] [Rank 0] step:9061/10000 train_time:2212389ms step_avg:244.17ms +[2025-07-17 18:01:22] [Rank 0] step:9081/10000 train_time:2217544ms step_avg:244.20ms +[2025-07-17 18:01:22] [Rank 0] step:9081/10000 train_time:2217544ms step_avg:244.20ms +[2025-07-17 18:01:27] [Rank 0] step:9101/10000 train_time:2222691ms step_avg:244.22ms +[2025-07-17 18:01:27] [Rank 0] step:9101/10000 train_time:2222691ms step_avg:244.22ms +[2025-07-17 18:01:32] [Rank 0] step:9121/10000 train_time:2227833ms step_avg:244.25ms +[2025-07-17 18:01:32] [Rank 0] step:9121/10000 train_time:2227833ms step_avg:244.25ms +[2025-07-17 18:01:38] [Rank 0] PRINT: step:9125/10000 val_loss:4.1704 train_time:2229112ms step_avg:244.29ms +[2025-07-17 18:01:38] [Rank 0] PRINT: step:9125/10000 val_loss:4.1704 train_time:2229112ms step_avg:244.29ms +[2025-07-17 18:01:42] [Rank 0] step:9141/10000 train_time:2232950ms step_avg:244.28ms +[2025-07-17 18:01:42] [Rank 0] step:9141/10000 train_time:2232950ms step_avg:244.28ms +[2025-07-17 18:01:48] [Rank 0] step:9161/10000 train_time:2238612ms step_avg:244.36ms +[2025-07-17 18:01:48] [Rank 0] step:9161/10000 train_time:2238612ms step_avg:244.36ms +[2025-07-17 18:01:53] [Rank 0] step:9181/10000 train_time:2243743ms step_avg:244.39ms +[2025-07-17 18:01:53] [Rank 0] step:9181/10000 train_time:2243743ms step_avg:244.39ms +[2025-07-17 18:01:58] [Rank 0] step:9201/10000 train_time:2248872ms step_avg:244.42ms +[2025-07-17 18:01:58] [Rank 0] step:9201/10000 train_time:2248872ms step_avg:244.42ms +[2025-07-17 18:02:03] [Rank 0] step:9221/10000 train_time:2254035ms step_avg:244.45ms +[2025-07-17 18:02:03] [Rank 0] step:9221/10000 train_time:2254035ms step_avg:244.45ms +[2025-07-17 18:02:09] [Rank 0] step:9241/10000 train_time:2259169ms step_avg:244.47ms +[2025-07-17 18:02:09] [Rank 0] step:9241/10000 train_time:2259169ms step_avg:244.47ms +[2025-07-17 18:02:16] [Rank 0] PRINT: step:9250/10000 val_loss:4.1859 train_time:2261740ms step_avg:244.51ms +[2025-07-17 18:02:16] [Rank 0] PRINT: step:9250/10000 val_loss:4.1859 train_time:2261740ms step_avg:244.51ms +[2025-07-17 18:02:18] [Rank 0] step:9261/10000 train_time:2264308ms step_avg:244.50ms +[2025-07-17 18:02:18] [Rank 0] step:9261/10000 train_time:2264308ms step_avg:244.50ms +[2025-07-17 18:02:24] [Rank 0] step:9281/10000 train_time:2269425ms step_avg:244.52ms +[2025-07-17 18:02:24] [Rank 0] step:9281/10000 train_time:2269425ms step_avg:244.52ms +[2025-07-17 18:02:29] [Rank 0] step:9301/10000 train_time:2274558ms step_avg:244.55ms +[2025-07-17 18:02:29] [Rank 0] step:9301/10000 train_time:2274558ms step_avg:244.55ms +[2025-07-17 18:02:34] [Rank 0] step:9321/10000 train_time:2279713ms step_avg:244.58ms +[2025-07-17 18:02:34] [Rank 0] step:9321/10000 train_time:2279713ms step_avg:244.58ms +[2025-07-17 18:02:39] [Rank 0] step:9341/10000 train_time:2284842ms step_avg:244.60ms +[2025-07-17 18:02:39] [Rank 0] step:9341/10000 train_time:2284842ms step_avg:244.60ms +[2025-07-17 18:02:44] [Rank 0] step:9361/10000 train_time:2289969ms step_avg:244.63ms +[2025-07-17 18:02:44] [Rank 0] step:9361/10000 train_time:2289969ms step_avg:244.63ms +[2025-07-17 18:02:52] [Rank 0] PRINT: step:9375/10000 val_loss:4.2059 train_time:2293812ms step_avg:244.67ms +[2025-07-17 18:02:52] [Rank 0] PRINT: step:9375/10000 val_loss:4.2059 train_time:2293812ms step_avg:244.67ms +[2025-07-17 18:02:54] [Rank 0] step:9381/10000 train_time:2295088ms step_avg:244.65ms +[2025-07-17 18:02:54] [Rank 0] step:9381/10000 train_time:2295088ms step_avg:244.65ms +[2025-07-17 18:02:59] [Rank 0] step:9401/10000 train_time:2300195ms step_avg:244.68ms +[2025-07-17 18:02:59] [Rank 0] step:9401/10000 train_time:2300195ms step_avg:244.68ms +[2025-07-17 18:03:04] [Rank 0] step:9421/10000 train_time:2305317ms step_avg:244.70ms +[2025-07-17 18:03:04] [Rank 0] step:9421/10000 train_time:2305317ms step_avg:244.70ms +[2025-07-17 18:03:09] [Rank 0] step:9441/10000 train_time:2310439ms step_avg:244.72ms +[2025-07-17 18:03:09] [Rank 0] step:9441/10000 train_time:2310439ms step_avg:244.72ms +[2025-07-17 18:03:14] [Rank 0] step:9461/10000 train_time:2315571ms step_avg:244.75ms +[2025-07-17 18:03:14] [Rank 0] step:9461/10000 train_time:2315571ms step_avg:244.75ms +[2025-07-17 18:03:19] [Rank 0] step:9481/10000 train_time:2320695ms step_avg:244.77ms +[2025-07-17 18:03:19] [Rank 0] step:9481/10000 train_time:2320695ms step_avg:244.77ms +[2025-07-17 18:03:29] [Rank 0] PRINT: step:9500/10000 val_loss:4.2303 train_time:2325846ms step_avg:244.83ms +[2025-07-17 18:03:29] [Rank 0] PRINT: step:9500/10000 val_loss:4.2303 train_time:2325846ms step_avg:244.83ms +[2025-07-17 18:03:29] [Rank 0] step:9501/10000 train_time:2325865ms step_avg:244.80ms +[2025-07-17 18:03:29] [Rank 0] step:9501/10000 train_time:2325865ms step_avg:244.80ms +[2025-07-17 18:03:34] [Rank 0] step:9521/10000 train_time:2330963ms step_avg:244.82ms +[2025-07-17 18:03:34] [Rank 0] step:9521/10000 train_time:2330963ms step_avg:244.82ms +[2025-07-17 18:03:39] [Rank 0] step:9541/10000 train_time:2336092ms step_avg:244.85ms +[2025-07-17 18:03:39] [Rank 0] step:9541/10000 train_time:2336092ms step_avg:244.85ms +[2025-07-17 18:03:44] [Rank 0] step:9561/10000 train_time:2341209ms step_avg:244.87ms +[2025-07-17 18:03:44] [Rank 0] step:9561/10000 train_time:2341209ms step_avg:244.87ms +[2025-07-17 18:03:50] [Rank 0] step:9581/10000 train_time:2346323ms step_avg:244.89ms +[2025-07-17 18:03:50] [Rank 0] step:9581/10000 train_time:2346323ms step_avg:244.89ms +[2025-07-17 18:03:55] [Rank 0] step:9601/10000 train_time:2351441ms step_avg:244.92ms +[2025-07-17 18:03:55] [Rank 0] step:9601/10000 train_time:2351441ms step_avg:244.92ms +[2025-07-17 18:04:00] [Rank 0] step:9621/10000 train_time:2356591ms step_avg:244.94ms +[2025-07-17 18:04:00] [Rank 0] step:9621/10000 train_time:2356591ms step_avg:244.94ms +[2025-07-17 18:04:06] [Rank 0] PRINT: step:9625/10000 val_loss:4.2328 train_time:2357869ms step_avg:244.97ms +[2025-07-17 18:04:06] [Rank 0] PRINT: step:9625/10000 val_loss:4.2328 train_time:2357869ms step_avg:244.97ms +[2025-07-17 18:04:10] [Rank 0] step:9641/10000 train_time:2361739ms step_avg:244.97ms +[2025-07-17 18:04:10] [Rank 0] step:9641/10000 train_time:2361739ms step_avg:244.97ms +[2025-07-17 18:04:15] [Rank 0] step:9661/10000 train_time:2367418ms step_avg:245.05ms +[2025-07-17 18:04:15] [Rank 0] step:9661/10000 train_time:2367418ms step_avg:245.05ms +[2025-07-17 18:04:21] [Rank 0] step:9681/10000 train_time:2372601ms step_avg:245.08ms +[2025-07-17 18:04:21] [Rank 0] step:9681/10000 train_time:2372601ms step_avg:245.08ms +[2025-07-17 18:04:26] [Rank 0] step:9701/10000 train_time:2377793ms step_avg:245.11ms +[2025-07-17 18:04:26] [Rank 0] step:9701/10000 train_time:2377793ms step_avg:245.11ms +[2025-07-17 18:04:31] [Rank 0] step:9721/10000 train_time:2382964ms step_avg:245.14ms +[2025-07-17 18:04:31] [Rank 0] step:9721/10000 train_time:2382964ms step_avg:245.14ms +[2025-07-17 18:04:36] [Rank 0] step:9741/10000 train_time:2388156ms step_avg:245.17ms +[2025-07-17 18:04:36] [Rank 0] step:9741/10000 train_time:2388156ms step_avg:245.17ms +[2025-07-17 18:04:43] [Rank 0] PRINT: step:9750/10000 val_loss:4.2859 train_time:2390737ms step_avg:245.20ms +[2025-07-17 18:04:43] [Rank 0] PRINT: step:9750/10000 val_loss:4.2859 train_time:2390737ms step_avg:245.20ms +[2025-07-17 18:04:46] [Rank 0] step:9761/10000 train_time:2393318ms step_avg:245.19ms +[2025-07-17 18:04:46] [Rank 0] step:9761/10000 train_time:2393318ms step_avg:245.19ms +[2025-07-17 18:04:51] [Rank 0] step:9781/10000 train_time:2398491ms step_avg:245.22ms +[2025-07-17 18:04:51] [Rank 0] step:9781/10000 train_time:2398491ms step_avg:245.22ms +[2025-07-17 18:04:56] [Rank 0] step:9801/10000 train_time:2403648ms step_avg:245.25ms +[2025-07-17 18:04:56] [Rank 0] step:9801/10000 train_time:2403648ms step_avg:245.25ms +[2025-07-17 18:05:01] [Rank 0] step:9821/10000 train_time:2408816ms step_avg:245.27ms +[2025-07-17 18:05:01] [Rank 0] step:9821/10000 train_time:2408816ms step_avg:245.27ms +[2025-07-17 18:05:06] [Rank 0] step:9841/10000 train_time:2413973ms step_avg:245.30ms +[2025-07-17 18:05:06] [Rank 0] step:9841/10000 train_time:2413973ms step_avg:245.30ms +[2025-07-17 18:05:11] [Rank 0] step:9861/10000 train_time:2419126ms step_avg:245.32ms +[2025-07-17 18:05:11] [Rank 0] step:9861/10000 train_time:2419126ms step_avg:245.32ms +[2025-07-17 18:05:20] [Rank 0] PRINT: step:9875/10000 val_loss:4.2846 train_time:2422992ms step_avg:245.37ms +[2025-07-17 18:05:20] [Rank 0] PRINT: step:9875/10000 val_loss:4.2846 train_time:2422992ms step_avg:245.37ms +[2025-07-17 18:05:21] [Rank 0] step:9881/10000 train_time:2424289ms step_avg:245.35ms +[2025-07-17 18:05:21] [Rank 0] step:9881/10000 train_time:2424289ms step_avg:245.35ms +[2025-07-17 18:05:27] [Rank 0] step:9901/10000 train_time:2429447ms step_avg:245.37ms +[2025-07-17 18:05:27] [Rank 0] step:9901/10000 train_time:2429447ms step_avg:245.37ms +[2025-07-17 18:05:32] [Rank 0] step:9921/10000 train_time:2434621ms step_avg:245.40ms +[2025-07-17 18:05:32] [Rank 0] step:9921/10000 train_time:2434621ms step_avg:245.40ms +[2025-07-17 18:05:37] [Rank 0] step:9941/10000 train_time:2439819ms step_avg:245.43ms +[2025-07-17 18:05:37] [Rank 0] step:9941/10000 train_time:2439819ms step_avg:245.43ms +[2025-07-17 18:05:42] [Rank 0] step:9961/10000 train_time:2445001ms step_avg:245.46ms +[2025-07-17 18:05:42] [Rank 0] step:9961/10000 train_time:2445001ms step_avg:245.46ms +[2025-07-17 18:05:47] [Rank 0] step:9981/10000 train_time:2450201ms step_avg:245.49ms +[2025-07-17 18:05:47] [Rank 0] step:9981/10000 train_time:2450201ms step_avg:245.49ms +[2025-07-17 18:05:52] [Rank 0] step:10000/10000 train_time:2455097ms step_avg:245.51ms +[2025-07-17 18:05:52] [Rank 0] step:10000/10000 train_time:2455097ms step_avg:245.51ms +[2025-07-17 18:05:57] [Rank 0] PRINT: step:10000/10000 val_loss:4.2308 train_time:2455358ms step_avg:245.54ms +[2025-07-17 18:05:57] [Rank 0] PRINT: step:10000/10000 val_loss:4.2308 train_time:2455358ms step_avg:245.54ms +[2025-07-17 18:05:57] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 18:05:57 2025 --- +[2025-07-17 18:05:57] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 18:05:57 2025 --- +[2025-07-17 18:05:57] [Rank 0] PRINT: Peak memory allocated: 30687 MiB reserved: 31036 MiB +[2025-07-17 18:05:57] [Rank 0] PRINT: Peak memory allocated: 30687 MiB reserved: 31036 MiB diff --git a/logs_norope/diff_modes/mode_1_param_norope_seed_42/config.json b/logs_norope/diff_modes/mode_1_param_norope_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..2c7b8190917dc5e39a73a232292bb48d11dd5b90 --- /dev/null +++ b/logs_norope/diff_modes/mode_1_param_norope_seed_42/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 1, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "0761970e-e9a5-4232-9a1f-b24795f2a2db", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_1_param_norope_seed_42/training_log_0761970e-e9a5-4232-9a1f-b24795f2a2db.txt b/logs_norope/diff_modes/mode_1_param_norope_seed_42/training_log_0761970e-e9a5-4232-9a1f-b24795f2a2db.txt new file mode 100644 index 0000000000000000000000000000000000000000..303d2090aea8f1e31e6c7b52161e2246effd16b3 --- /dev/null +++ b/logs_norope/diff_modes/mode_1_param_norope_seed_42/training_log_0761970e-e9a5-4232-9a1f-b24795f2a2db.txt @@ -0,0 +1,2360 @@ +[2025-07-17 10:53:48] [Rank 0] PRINT: --- Script Start: Thu Jul 17 10:53:48 2025 --- +[2025-07-17 10:53:48] [Rank 0] PRINT: --- Script Start: Thu Jul 17 10:53:48 2025 --- +[2025-07-17 10:53:48] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=1, model_parameterization='norope') +[2025-07-17 10:53:48] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=1, model_parameterization='norope') +[2025-07-17 10:53:49] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 10:53:49] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 10:53:49] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 10:53:49] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 10:53:49] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_1_param_norope_seed_42 +[2025-07-17 10:53:49] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_1_param_norope_seed_42 +[2025-07-17 10:53:49] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 10:53:49] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 10:53:49] [Rank 0] PRINT: Constructing model... +[2025-07-17 10:53:49] [Rank 0] PRINT: Constructing model... +[2025-07-17 10:53:51] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 10:53:51] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 10:53:51] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 10:53:51] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 10:53:51] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 10:53:51] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 10:53:51] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 10:53:51] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 10:53:51] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 1 +[2025-07-17 10:53:51] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 1 +[2025-07-17 10:53:51] [Rank 0] PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: 0.001). +[2025-07-17 10:53:51] [Rank 0] PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: 0.001). +[2025-07-17 10:53:51] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 10:53:51] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 10:53:51] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-07-17 10:53:51] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-07-17 10:53:51] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 10:53:51] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 10:53:51] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 10:53:51] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 10:53:51] [Rank 0] PRINT: Starting warmup... +[2025-07-17 10:53:51] [Rank 0] PRINT: Starting warmup... +[2025-07-17 10:55:11] [Rank 0] PRINT: Warmup complete. +[2025-07-17 10:55:11] [Rank 0] PRINT: Warmup complete. +[2025-07-17 10:55:12] [Rank 0] PRINT: Starting training... +[2025-07-17 10:55:12] [Rank 0] PRINT: Starting training... +[2025-07-17 10:55:22] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 10:55:22] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 10:55:27] [Rank 0] step:21/10000 train_time:4516ms step_avg:215.06ms +[2025-07-17 10:55:27] [Rank 0] step:21/10000 train_time:4516ms step_avg:215.06ms +[2025-07-17 10:55:32] [Rank 0] step:41/10000 train_time:8953ms step_avg:218.36ms +[2025-07-17 10:55:32] [Rank 0] step:41/10000 train_time:8953ms step_avg:218.36ms +[2025-07-17 10:55:36] [Rank 0] step:61/10000 train_time:13400ms step_avg:219.67ms +[2025-07-17 10:55:36] [Rank 0] step:61/10000 train_time:13400ms step_avg:219.67ms +[2025-07-17 10:55:41] [Rank 0] step:81/10000 train_time:17851ms step_avg:220.39ms +[2025-07-17 10:55:41] [Rank 0] step:81/10000 train_time:17851ms step_avg:220.39ms +[2025-07-17 10:55:45] [Rank 0] step:101/10000 train_time:22302ms step_avg:220.81ms +[2025-07-17 10:55:45] [Rank 0] step:101/10000 train_time:22302ms step_avg:220.81ms +[2025-07-17 10:55:50] [Rank 0] step:121/10000 train_time:26754ms step_avg:221.11ms +[2025-07-17 10:55:50] [Rank 0] step:121/10000 train_time:26754ms step_avg:221.11ms +[2025-07-17 10:55:55] [Rank 0] PRINT: step:125/10000 val_loss:5.4961 train_time:28094ms step_avg:224.75ms +[2025-07-17 10:55:55] [Rank 0] PRINT: step:125/10000 val_loss:5.4961 train_time:28094ms step_avg:224.75ms +[2025-07-17 10:55:58] [Rank 0] step:141/10000 train_time:31207ms step_avg:221.33ms +[2025-07-17 10:55:58] [Rank 0] step:141/10000 train_time:31207ms step_avg:221.33ms +[2025-07-17 10:56:03] [Rank 0] step:161/10000 train_time:35664ms step_avg:221.51ms +[2025-07-17 10:56:03] [Rank 0] step:161/10000 train_time:35664ms step_avg:221.51ms +[2025-07-17 10:56:07] [Rank 0] step:181/10000 train_time:40121ms step_avg:221.66ms +[2025-07-17 10:56:07] [Rank 0] step:181/10000 train_time:40121ms step_avg:221.66ms +[2025-07-17 10:56:12] [Rank 0] step:201/10000 train_time:44576ms step_avg:221.77ms +[2025-07-17 10:56:12] [Rank 0] step:201/10000 train_time:44576ms step_avg:221.77ms +[2025-07-17 10:56:16] [Rank 0] step:221/10000 train_time:49029ms step_avg:221.85ms +[2025-07-17 10:56:16] [Rank 0] step:221/10000 train_time:49029ms step_avg:221.85ms +[2025-07-17 10:56:21] [Rank 0] step:241/10000 train_time:53489ms step_avg:221.95ms +[2025-07-17 10:56:21] [Rank 0] step:241/10000 train_time:53489ms step_avg:221.95ms +[2025-07-17 10:56:27] [Rank 0] PRINT: step:250/10000 val_loss:5.0245 train_time:55945ms step_avg:223.78ms +[2025-07-17 10:56:27] [Rank 0] PRINT: step:250/10000 val_loss:5.0245 train_time:55945ms step_avg:223.78ms +[2025-07-17 10:56:29] [Rank 0] step:261/10000 train_time:57950ms step_avg:222.03ms +[2025-07-17 10:56:29] [Rank 0] step:261/10000 train_time:57950ms step_avg:222.03ms +[2025-07-17 10:56:34] [Rank 0] step:281/10000 train_time:62403ms step_avg:222.08ms +[2025-07-17 10:56:34] [Rank 0] step:281/10000 train_time:62403ms step_avg:222.08ms +[2025-07-17 10:56:38] [Rank 0] step:301/10000 train_time:66868ms step_avg:222.15ms +[2025-07-17 10:56:38] [Rank 0] step:301/10000 train_time:66868ms step_avg:222.15ms +[2025-07-17 10:56:43] [Rank 0] step:321/10000 train_time:71322ms step_avg:222.19ms +[2025-07-17 10:56:43] [Rank 0] step:321/10000 train_time:71322ms step_avg:222.19ms +[2025-07-17 10:56:47] [Rank 0] step:341/10000 train_time:75788ms step_avg:222.25ms +[2025-07-17 10:56:47] [Rank 0] step:341/10000 train_time:75788ms step_avg:222.25ms +[2025-07-17 10:56:51] [Rank 0] step:361/10000 train_time:80247ms step_avg:222.29ms +[2025-07-17 10:56:51] [Rank 0] step:361/10000 train_time:80247ms step_avg:222.29ms +[2025-07-17 10:56:59] [Rank 0] PRINT: step:375/10000 val_loss:4.7714 train_time:83820ms step_avg:223.52ms +[2025-07-17 10:56:59] [Rank 0] PRINT: step:375/10000 val_loss:4.7714 train_time:83820ms step_avg:223.52ms +[2025-07-17 10:57:00] [Rank 0] step:381/10000 train_time:84708ms step_avg:222.33ms +[2025-07-17 10:57:00] [Rank 0] step:381/10000 train_time:84708ms step_avg:222.33ms +[2025-07-17 10:57:05] [Rank 0] step:401/10000 train_time:89176ms step_avg:222.39ms +[2025-07-17 10:57:05] [Rank 0] step:401/10000 train_time:89176ms step_avg:222.39ms +[2025-07-17 10:57:09] [Rank 0] step:421/10000 train_time:93640ms step_avg:222.42ms +[2025-07-17 10:57:09] [Rank 0] step:421/10000 train_time:93640ms step_avg:222.42ms +[2025-07-17 10:57:14] [Rank 0] step:441/10000 train_time:98102ms step_avg:222.45ms +[2025-07-17 10:57:14] [Rank 0] step:441/10000 train_time:98102ms step_avg:222.45ms +[2025-07-17 10:57:18] [Rank 0] step:461/10000 train_time:102570ms step_avg:222.50ms +[2025-07-17 10:57:18] [Rank 0] step:461/10000 train_time:102570ms step_avg:222.50ms +[2025-07-17 10:57:23] [Rank 0] step:481/10000 train_time:107032ms step_avg:222.52ms +[2025-07-17 10:57:23] [Rank 0] step:481/10000 train_time:107032ms step_avg:222.52ms +[2025-07-17 10:57:31] [Rank 0] PRINT: step:500/10000 val_loss:4.6556 train_time:111720ms step_avg:223.44ms +[2025-07-17 10:57:31] [Rank 0] PRINT: step:500/10000 val_loss:4.6556 train_time:111720ms step_avg:223.44ms +[2025-07-17 10:57:32] [Rank 0] step:501/10000 train_time:111733ms step_avg:223.02ms +[2025-07-17 10:57:32] [Rank 0] step:501/10000 train_time:111733ms step_avg:223.02ms +[2025-07-17 10:57:36] [Rank 0] step:521/10000 train_time:115956ms step_avg:222.57ms +[2025-07-17 10:57:36] [Rank 0] step:521/10000 train_time:115956ms step_avg:222.57ms +[2025-07-17 10:57:40] [Rank 0] step:541/10000 train_time:120427ms step_avg:222.60ms +[2025-07-17 10:57:40] [Rank 0] step:541/10000 train_time:120427ms step_avg:222.60ms +[2025-07-17 10:57:45] [Rank 0] step:561/10000 train_time:124898ms step_avg:222.63ms +[2025-07-17 10:57:45] [Rank 0] step:561/10000 train_time:124898ms step_avg:222.63ms +[2025-07-17 10:57:49] [Rank 0] step:581/10000 train_time:129366ms step_avg:222.66ms +[2025-07-17 10:57:49] [Rank 0] step:581/10000 train_time:129366ms step_avg:222.66ms +[2025-07-17 10:57:54] [Rank 0] step:601/10000 train_time:133833ms step_avg:222.68ms +[2025-07-17 10:57:54] [Rank 0] step:601/10000 train_time:133833ms step_avg:222.68ms +[2025-07-17 10:57:58] [Rank 0] step:621/10000 train_time:138299ms step_avg:222.70ms +[2025-07-17 10:57:58] [Rank 0] step:621/10000 train_time:138299ms step_avg:222.70ms +[2025-07-17 10:58:04] [Rank 0] PRINT: step:625/10000 val_loss:4.5848 train_time:139644ms step_avg:223.43ms +[2025-07-17 10:58:04] [Rank 0] PRINT: step:625/10000 val_loss:4.5848 train_time:139644ms step_avg:223.43ms +[2025-07-17 10:58:07] [Rank 0] step:641/10000 train_time:142770ms step_avg:222.73ms +[2025-07-17 10:58:07] [Rank 0] step:641/10000 train_time:142770ms step_avg:222.73ms +[2025-07-17 10:58:12] [Rank 0] step:661/10000 train_time:147238ms step_avg:222.75ms +[2025-07-17 10:58:12] [Rank 0] step:661/10000 train_time:147238ms step_avg:222.75ms +[2025-07-17 10:58:16] [Rank 0] step:681/10000 train_time:151708ms step_avg:222.77ms +[2025-07-17 10:58:16] [Rank 0] step:681/10000 train_time:151708ms step_avg:222.77ms +[2025-07-17 10:58:21] [Rank 0] step:701/10000 train_time:156176ms step_avg:222.79ms +[2025-07-17 10:58:21] [Rank 0] step:701/10000 train_time:156176ms step_avg:222.79ms +[2025-07-17 10:58:25] [Rank 0] step:721/10000 train_time:160648ms step_avg:222.81ms +[2025-07-17 10:58:25] [Rank 0] step:721/10000 train_time:160648ms step_avg:222.81ms +[2025-07-17 10:58:30] [Rank 0] step:741/10000 train_time:165120ms step_avg:222.83ms +[2025-07-17 10:58:30] [Rank 0] step:741/10000 train_time:165120ms step_avg:222.83ms +[2025-07-17 10:58:36] [Rank 0] PRINT: step:750/10000 val_loss:4.8848 train_time:167597ms step_avg:223.46ms +[2025-07-17 10:58:36] [Rank 0] PRINT: step:750/10000 val_loss:4.8848 train_time:167597ms step_avg:223.46ms +[2025-07-17 10:58:39] [Rank 0] step:761/10000 train_time:169624ms step_avg:222.90ms +[2025-07-17 10:58:39] [Rank 0] step:761/10000 train_time:169624ms step_avg:222.90ms +[2025-07-17 10:58:43] [Rank 0] step:781/10000 train_time:174134ms step_avg:222.96ms +[2025-07-17 10:58:43] [Rank 0] step:781/10000 train_time:174134ms step_avg:222.96ms +[2025-07-17 10:58:48] [Rank 0] step:801/10000 train_time:178644ms step_avg:223.03ms +[2025-07-17 10:58:48] [Rank 0] step:801/10000 train_time:178644ms step_avg:223.03ms +[2025-07-17 10:58:52] [Rank 0] step:821/10000 train_time:183149ms step_avg:223.08ms +[2025-07-17 10:58:52] [Rank 0] step:821/10000 train_time:183149ms step_avg:223.08ms +[2025-07-17 10:58:57] [Rank 0] step:841/10000 train_time:187660ms step_avg:223.14ms +[2025-07-17 10:58:57] [Rank 0] step:841/10000 train_time:187660ms step_avg:223.14ms +[2025-07-17 10:59:01] [Rank 0] step:861/10000 train_time:192172ms step_avg:223.20ms +[2025-07-17 10:59:01] [Rank 0] step:861/10000 train_time:192172ms step_avg:223.20ms +[2025-07-17 10:59:09] [Rank 0] PRINT: step:875/10000 val_loss:4.7543 train_time:195783ms step_avg:223.75ms +[2025-07-17 10:59:09] [Rank 0] PRINT: step:875/10000 val_loss:4.7543 train_time:195783ms step_avg:223.75ms +[2025-07-17 10:59:10] [Rank 0] step:881/10000 train_time:196681ms step_avg:223.25ms +[2025-07-17 10:59:10] [Rank 0] step:881/10000 train_time:196681ms step_avg:223.25ms +[2025-07-17 10:59:15] [Rank 0] step:901/10000 train_time:201195ms step_avg:223.30ms +[2025-07-17 10:59:15] [Rank 0] step:901/10000 train_time:201195ms step_avg:223.30ms +[2025-07-17 10:59:19] [Rank 0] step:921/10000 train_time:205702ms step_avg:223.35ms +[2025-07-17 10:59:19] [Rank 0] step:921/10000 train_time:205702ms step_avg:223.35ms +[2025-07-17 10:59:24] [Rank 0] step:941/10000 train_time:210217ms step_avg:223.40ms +[2025-07-17 10:59:24] [Rank 0] step:941/10000 train_time:210217ms step_avg:223.40ms +[2025-07-17 10:59:28] [Rank 0] step:961/10000 train_time:214733ms step_avg:223.45ms +[2025-07-17 10:59:28] [Rank 0] step:961/10000 train_time:214733ms step_avg:223.45ms +[2025-07-17 10:59:33] [Rank 0] step:981/10000 train_time:219246ms step_avg:223.49ms +[2025-07-17 10:59:33] [Rank 0] step:981/10000 train_time:219246ms step_avg:223.49ms +[2025-07-17 10:59:41] [Rank 0] PRINT: step:1000/10000 val_loss:4.7990 train_time:223986ms step_avg:223.99ms +[2025-07-17 10:59:41] [Rank 0] PRINT: step:1000/10000 val_loss:4.7990 train_time:223986ms step_avg:223.99ms +[2025-07-17 10:59:42] [Rank 0] step:1001/10000 train_time:224000ms step_avg:223.78ms +[2025-07-17 10:59:42] [Rank 0] step:1001/10000 train_time:224000ms step_avg:223.78ms +[2025-07-17 10:59:46] [Rank 0] step:1021/10000 train_time:228278ms step_avg:223.58ms +[2025-07-17 10:59:46] [Rank 0] step:1021/10000 train_time:228278ms step_avg:223.58ms +[2025-07-17 10:59:51] [Rank 0] step:1041/10000 train_time:232793ms step_avg:223.62ms +[2025-07-17 10:59:51] [Rank 0] step:1041/10000 train_time:232793ms step_avg:223.62ms +[2025-07-17 10:59:55] [Rank 0] step:1061/10000 train_time:237304ms step_avg:223.66ms +[2025-07-17 10:59:55] [Rank 0] step:1061/10000 train_time:237304ms step_avg:223.66ms +[2025-07-17 11:00:00] [Rank 0] step:1081/10000 train_time:241820ms step_avg:223.70ms +[2025-07-17 11:00:00] [Rank 0] step:1081/10000 train_time:241820ms step_avg:223.70ms +[2025-07-17 11:00:04] [Rank 0] step:1101/10000 train_time:246340ms step_avg:223.74ms +[2025-07-17 11:00:04] [Rank 0] step:1101/10000 train_time:246340ms step_avg:223.74ms +[2025-07-17 11:00:09] [Rank 0] step:1121/10000 train_time:250859ms step_avg:223.78ms +[2025-07-17 11:00:09] [Rank 0] step:1121/10000 train_time:250859ms step_avg:223.78ms +[2025-07-17 11:00:14] [Rank 0] PRINT: step:1125/10000 val_loss:4.7520 train_time:252220ms step_avg:224.20ms +[2025-07-17 11:00:14] [Rank 0] PRINT: step:1125/10000 val_loss:4.7520 train_time:252220ms step_avg:224.20ms +[2025-07-17 11:00:18] [Rank 0] step:1141/10000 train_time:255375ms step_avg:223.82ms +[2025-07-17 11:00:18] [Rank 0] step:1141/10000 train_time:255375ms step_avg:223.82ms +[2025-07-17 11:00:22] [Rank 0] step:1161/10000 train_time:259897ms step_avg:223.86ms +[2025-07-17 11:00:22] [Rank 0] step:1161/10000 train_time:259897ms step_avg:223.86ms +[2025-07-17 11:00:27] [Rank 0] step:1181/10000 train_time:264415ms step_avg:223.89ms +[2025-07-17 11:00:27] [Rank 0] step:1181/10000 train_time:264415ms step_avg:223.89ms +[2025-07-17 11:00:31] [Rank 0] step:1201/10000 train_time:268938ms step_avg:223.93ms +[2025-07-17 11:00:31] [Rank 0] step:1201/10000 train_time:268938ms step_avg:223.93ms +[2025-07-17 11:00:36] [Rank 0] step:1221/10000 train_time:273469ms step_avg:223.97ms +[2025-07-17 11:00:36] [Rank 0] step:1221/10000 train_time:273469ms step_avg:223.97ms +[2025-07-17 11:00:40] [Rank 0] step:1241/10000 train_time:277998ms step_avg:224.01ms +[2025-07-17 11:00:40] [Rank 0] step:1241/10000 train_time:277998ms step_avg:224.01ms +[2025-07-17 11:00:47] [Rank 0] PRINT: step:1250/10000 val_loss:4.7807 train_time:280495ms step_avg:224.40ms +[2025-07-17 11:00:47] [Rank 0] PRINT: step:1250/10000 val_loss:4.7807 train_time:280495ms step_avg:224.40ms +[2025-07-17 11:00:49] [Rank 0] step:1261/10000 train_time:282529ms step_avg:224.05ms +[2025-07-17 11:00:49] [Rank 0] step:1261/10000 train_time:282529ms step_avg:224.05ms +[2025-07-17 11:00:54] [Rank 0] step:1281/10000 train_time:287058ms step_avg:224.09ms +[2025-07-17 11:00:54] [Rank 0] step:1281/10000 train_time:287058ms step_avg:224.09ms +[2025-07-17 11:00:58] [Rank 0] step:1301/10000 train_time:291585ms step_avg:224.12ms +[2025-07-17 11:00:58] [Rank 0] step:1301/10000 train_time:291585ms step_avg:224.12ms +[2025-07-17 11:01:03] [Rank 0] step:1321/10000 train_time:296113ms step_avg:224.16ms +[2025-07-17 11:01:03] [Rank 0] step:1321/10000 train_time:296113ms step_avg:224.16ms +[2025-07-17 11:01:07] [Rank 0] step:1341/10000 train_time:300640ms step_avg:224.19ms +[2025-07-17 11:01:07] [Rank 0] step:1341/10000 train_time:300640ms step_avg:224.19ms +[2025-07-17 11:01:12] [Rank 0] step:1361/10000 train_time:305167ms step_avg:224.22ms +[2025-07-17 11:01:12] [Rank 0] step:1361/10000 train_time:305167ms step_avg:224.22ms +[2025-07-17 11:01:20] [Rank 0] PRINT: step:1375/10000 val_loss:4.7797 train_time:308791ms step_avg:224.58ms +[2025-07-17 11:01:20] [Rank 0] PRINT: step:1375/10000 val_loss:4.7797 train_time:308791ms step_avg:224.58ms +[2025-07-17 11:01:21] [Rank 0] step:1381/10000 train_time:309691ms step_avg:224.25ms +[2025-07-17 11:01:21] [Rank 0] step:1381/10000 train_time:309691ms step_avg:224.25ms +[2025-07-17 11:01:25] [Rank 0] step:1401/10000 train_time:314215ms step_avg:224.28ms +[2025-07-17 11:01:25] [Rank 0] step:1401/10000 train_time:314215ms step_avg:224.28ms +[2025-07-17 11:01:30] [Rank 0] step:1421/10000 train_time:318742ms step_avg:224.31ms +[2025-07-17 11:01:30] [Rank 0] step:1421/10000 train_time:318742ms step_avg:224.31ms +[2025-07-17 11:01:35] [Rank 0] step:1441/10000 train_time:323267ms step_avg:224.34ms +[2025-07-17 11:01:35] [Rank 0] step:1441/10000 train_time:323267ms step_avg:224.34ms +[2025-07-17 11:01:39] [Rank 0] step:1461/10000 train_time:327791ms step_avg:224.36ms +[2025-07-17 11:01:39] [Rank 0] step:1461/10000 train_time:327791ms step_avg:224.36ms +[2025-07-17 11:01:44] [Rank 0] step:1481/10000 train_time:332312ms step_avg:224.38ms +[2025-07-17 11:01:44] [Rank 0] step:1481/10000 train_time:332312ms step_avg:224.38ms +[2025-07-17 11:01:52] [Rank 0] PRINT: step:1500/10000 val_loss:4.6846 train_time:337089ms step_avg:224.73ms +[2025-07-17 11:01:52] [Rank 0] PRINT: step:1500/10000 val_loss:4.6846 train_time:337089ms step_avg:224.73ms +[2025-07-17 11:01:53] [Rank 0] step:1501/10000 train_time:337102ms step_avg:224.59ms +[2025-07-17 11:01:53] [Rank 0] step:1501/10000 train_time:337102ms step_avg:224.59ms +[2025-07-17 11:01:57] [Rank 0] step:1521/10000 train_time:341411ms step_avg:224.47ms +[2025-07-17 11:01:57] [Rank 0] step:1521/10000 train_time:341411ms step_avg:224.47ms +[2025-07-17 11:02:02] [Rank 0] step:1541/10000 train_time:345962ms step_avg:224.51ms +[2025-07-17 11:02:02] [Rank 0] step:1541/10000 train_time:345962ms step_avg:224.51ms +[2025-07-17 11:02:06] [Rank 0] step:1561/10000 train_time:350520ms step_avg:224.55ms +[2025-07-17 11:02:06] [Rank 0] step:1561/10000 train_time:350520ms step_avg:224.55ms +[2025-07-17 11:02:11] [Rank 0] step:1581/10000 train_time:355075ms step_avg:224.59ms +[2025-07-17 11:02:11] [Rank 0] step:1581/10000 train_time:355075ms step_avg:224.59ms +[2025-07-17 11:02:15] [Rank 0] step:1601/10000 train_time:359631ms step_avg:224.63ms +[2025-07-17 11:02:15] [Rank 0] step:1601/10000 train_time:359631ms step_avg:224.63ms +[2025-07-17 11:02:20] [Rank 0] step:1621/10000 train_time:364191ms step_avg:224.67ms +[2025-07-17 11:02:20] [Rank 0] step:1621/10000 train_time:364191ms step_avg:224.67ms +[2025-07-17 11:02:25] [Rank 0] PRINT: step:1625/10000 val_loss:4.6859 train_time:365560ms step_avg:224.96ms +[2025-07-17 11:02:25] [Rank 0] PRINT: step:1625/10000 val_loss:4.6859 train_time:365560ms step_avg:224.96ms +[2025-07-17 11:02:29] [Rank 0] step:1641/10000 train_time:368743ms step_avg:224.71ms +[2025-07-17 11:02:29] [Rank 0] step:1641/10000 train_time:368743ms step_avg:224.71ms +[2025-07-17 11:02:34] [Rank 0] step:1661/10000 train_time:373303ms step_avg:224.75ms +[2025-07-17 11:02:34] [Rank 0] step:1661/10000 train_time:373303ms step_avg:224.75ms +[2025-07-17 11:02:38] [Rank 0] step:1681/10000 train_time:377860ms step_avg:224.78ms +[2025-07-17 11:02:38] [Rank 0] step:1681/10000 train_time:377860ms step_avg:224.78ms +[2025-07-17 11:02:43] [Rank 0] step:1701/10000 train_time:382416ms step_avg:224.82ms +[2025-07-17 11:02:43] [Rank 0] step:1701/10000 train_time:382416ms step_avg:224.82ms +[2025-07-17 11:02:47] [Rank 0] step:1721/10000 train_time:386976ms step_avg:224.86ms +[2025-07-17 11:02:47] [Rank 0] step:1721/10000 train_time:386976ms step_avg:224.86ms +[2025-07-17 11:02:52] [Rank 0] step:1741/10000 train_time:391532ms step_avg:224.89ms +[2025-07-17 11:02:52] [Rank 0] step:1741/10000 train_time:391532ms step_avg:224.89ms +[2025-07-17 11:02:58] [Rank 0] PRINT: step:1750/10000 val_loss:4.7369 train_time:394043ms step_avg:225.17ms +[2025-07-17 11:02:58] [Rank 0] PRINT: step:1750/10000 val_loss:4.7369 train_time:394043ms step_avg:225.17ms +[2025-07-17 11:03:01] [Rank 0] step:1761/10000 train_time:396090ms step_avg:224.92ms +[2025-07-17 11:03:01] [Rank 0] step:1761/10000 train_time:396090ms step_avg:224.92ms +[2025-07-17 11:03:05] [Rank 0] step:1781/10000 train_time:400650ms step_avg:224.96ms +[2025-07-17 11:03:05] [Rank 0] step:1781/10000 train_time:400650ms step_avg:224.96ms +[2025-07-17 11:03:10] [Rank 0] step:1801/10000 train_time:405214ms step_avg:224.99ms +[2025-07-17 11:03:10] [Rank 0] step:1801/10000 train_time:405214ms step_avg:224.99ms +[2025-07-17 11:03:15] [Rank 0] step:1821/10000 train_time:409775ms step_avg:225.03ms +[2025-07-17 11:03:15] [Rank 0] step:1821/10000 train_time:409775ms step_avg:225.03ms +[2025-07-17 11:03:19] [Rank 0] step:1841/10000 train_time:414333ms step_avg:225.06ms +[2025-07-17 11:03:19] [Rank 0] step:1841/10000 train_time:414333ms step_avg:225.06ms +[2025-07-17 11:03:24] [Rank 0] step:1861/10000 train_time:418889ms step_avg:225.09ms +[2025-07-17 11:03:24] [Rank 0] step:1861/10000 train_time:418889ms step_avg:225.09ms +[2025-07-17 11:03:31] [Rank 0] PRINT: step:1875/10000 val_loss:4.7304 train_time:422538ms step_avg:225.35ms +[2025-07-17 11:03:31] [Rank 0] PRINT: step:1875/10000 val_loss:4.7304 train_time:422538ms step_avg:225.35ms +[2025-07-17 11:03:33] [Rank 0] step:1881/10000 train_time:423449ms step_avg:225.12ms +[2025-07-17 11:03:33] [Rank 0] step:1881/10000 train_time:423449ms step_avg:225.12ms +[2025-07-17 11:03:37] [Rank 0] step:1901/10000 train_time:428003ms step_avg:225.15ms +[2025-07-17 11:03:37] [Rank 0] step:1901/10000 train_time:428003ms step_avg:225.15ms +[2025-07-17 11:03:42] [Rank 0] step:1921/10000 train_time:432565ms step_avg:225.18ms +[2025-07-17 11:03:42] [Rank 0] step:1921/10000 train_time:432565ms step_avg:225.18ms +[2025-07-17 11:03:46] [Rank 0] step:1941/10000 train_time:437123ms step_avg:225.21ms +[2025-07-17 11:03:46] [Rank 0] step:1941/10000 train_time:437123ms step_avg:225.21ms +[2025-07-17 11:03:51] [Rank 0] step:1961/10000 train_time:441681ms step_avg:225.23ms +[2025-07-17 11:03:51] [Rank 0] step:1961/10000 train_time:441681ms step_avg:225.23ms +[2025-07-17 11:03:55] [Rank 0] step:1981/10000 train_time:446243ms step_avg:225.26ms +[2025-07-17 11:03:55] [Rank 0] step:1981/10000 train_time:446243ms step_avg:225.26ms +[2025-07-17 11:04:04] [Rank 0] PRINT: step:2000/10000 val_loss:4.6985 train_time:451029ms step_avg:225.51ms +[2025-07-17 11:04:04] [Rank 0] PRINT: step:2000/10000 val_loss:4.6985 train_time:451029ms step_avg:225.51ms +[2025-07-17 11:04:05] [Rank 0] step:2001/10000 train_time:451042ms step_avg:225.41ms +[2025-07-17 11:04:05] [Rank 0] step:2001/10000 train_time:451042ms step_avg:225.41ms +[2025-07-17 11:04:09] [Rank 0] step:2021/10000 train_time:455360ms step_avg:225.31ms +[2025-07-17 11:04:09] [Rank 0] step:2021/10000 train_time:455360ms step_avg:225.31ms +[2025-07-17 11:04:14] [Rank 0] step:2041/10000 train_time:459918ms step_avg:225.34ms +[2025-07-17 11:04:14] [Rank 0] step:2041/10000 train_time:459918ms step_avg:225.34ms +[2025-07-17 11:04:18] [Rank 0] step:2061/10000 train_time:464477ms step_avg:225.36ms +[2025-07-17 11:04:18] [Rank 0] step:2061/10000 train_time:464477ms step_avg:225.36ms +[2025-07-17 11:04:23] [Rank 0] step:2081/10000 train_time:469034ms step_avg:225.39ms +[2025-07-17 11:04:23] [Rank 0] step:2081/10000 train_time:469034ms step_avg:225.39ms +[2025-07-17 11:04:27] [Rank 0] step:2101/10000 train_time:473591ms step_avg:225.41ms +[2025-07-17 11:04:27] [Rank 0] step:2101/10000 train_time:473591ms step_avg:225.41ms +[2025-07-17 11:04:32] [Rank 0] step:2121/10000 train_time:478152ms step_avg:225.44ms +[2025-07-17 11:04:32] [Rank 0] step:2121/10000 train_time:478152ms step_avg:225.44ms +[2025-07-17 11:04:37] [Rank 0] PRINT: step:2125/10000 val_loss:4.6584 train_time:479525ms step_avg:225.66ms +[2025-07-17 11:04:37] [Rank 0] PRINT: step:2125/10000 val_loss:4.6584 train_time:479525ms step_avg:225.66ms +[2025-07-17 11:04:41] [Rank 0] step:2141/10000 train_time:482712ms step_avg:225.46ms +[2025-07-17 11:04:41] [Rank 0] step:2141/10000 train_time:482712ms step_avg:225.46ms +[2025-07-17 11:04:45] [Rank 0] step:2161/10000 train_time:487277ms step_avg:225.49ms +[2025-07-17 11:04:45] [Rank 0] step:2161/10000 train_time:487277ms step_avg:225.49ms +[2025-07-17 11:04:50] [Rank 0] step:2181/10000 train_time:491842ms step_avg:225.51ms +[2025-07-17 11:04:50] [Rank 0] step:2181/10000 train_time:491842ms step_avg:225.51ms +[2025-07-17 11:04:55] [Rank 0] step:2201/10000 train_time:496406ms step_avg:225.54ms +[2025-07-17 11:04:55] [Rank 0] step:2201/10000 train_time:496406ms step_avg:225.54ms +[2025-07-17 11:04:59] [Rank 0] step:2221/10000 train_time:500973ms step_avg:225.56ms +[2025-07-17 11:04:59] [Rank 0] step:2221/10000 train_time:500973ms step_avg:225.56ms +[2025-07-17 11:05:04] [Rank 0] step:2241/10000 train_time:505632ms step_avg:225.63ms +[2025-07-17 11:05:04] [Rank 0] step:2241/10000 train_time:505632ms step_avg:225.63ms +[2025-07-17 11:05:11] [Rank 0] PRINT: step:2250/10000 val_loss:4.2614 train_time:508211ms step_avg:225.87ms +[2025-07-17 11:05:11] [Rank 0] PRINT: step:2250/10000 val_loss:4.2614 train_time:508211ms step_avg:225.87ms +[2025-07-17 11:05:13] [Rank 0] step:2261/10000 train_time:510311ms step_avg:225.70ms +[2025-07-17 11:05:13] [Rank 0] step:2261/10000 train_time:510311ms step_avg:225.70ms +[2025-07-17 11:05:18] [Rank 0] step:2281/10000 train_time:514990ms step_avg:225.77ms +[2025-07-17 11:05:18] [Rank 0] step:2281/10000 train_time:514990ms step_avg:225.77ms +[2025-07-17 11:05:22] [Rank 0] step:2301/10000 train_time:519665ms step_avg:225.84ms +[2025-07-17 11:05:22] [Rank 0] step:2301/10000 train_time:519665ms step_avg:225.84ms +[2025-07-17 11:05:27] [Rank 0] step:2321/10000 train_time:524346ms step_avg:225.91ms +[2025-07-17 11:05:27] [Rank 0] step:2321/10000 train_time:524346ms step_avg:225.91ms +[2025-07-17 11:05:32] [Rank 0] step:2341/10000 train_time:529024ms step_avg:225.98ms +[2025-07-17 11:05:32] [Rank 0] step:2341/10000 train_time:529024ms step_avg:225.98ms +[2025-07-17 11:05:37] [Rank 0] step:2361/10000 train_time:533703ms step_avg:226.05ms +[2025-07-17 11:05:37] [Rank 0] step:2361/10000 train_time:533703ms step_avg:226.05ms +[2025-07-17 11:05:44] [Rank 0] PRINT: step:2375/10000 val_loss:4.2595 train_time:537448ms step_avg:226.29ms +[2025-07-17 11:05:44] [Rank 0] PRINT: step:2375/10000 val_loss:4.2595 train_time:537448ms step_avg:226.29ms +[2025-07-17 11:05:45] [Rank 0] step:2381/10000 train_time:538383ms step_avg:226.12ms +[2025-07-17 11:05:45] [Rank 0] step:2381/10000 train_time:538383ms step_avg:226.12ms +[2025-07-17 11:05:50] [Rank 0] step:2401/10000 train_time:543061ms step_avg:226.18ms +[2025-07-17 11:05:50] [Rank 0] step:2401/10000 train_time:543061ms step_avg:226.18ms +[2025-07-17 11:05:55] [Rank 0] step:2421/10000 train_time:547740ms step_avg:226.25ms +[2025-07-17 11:05:55] [Rank 0] step:2421/10000 train_time:547740ms step_avg:226.25ms +[2025-07-17 11:05:59] [Rank 0] step:2441/10000 train_time:552416ms step_avg:226.31ms +[2025-07-17 11:05:59] [Rank 0] step:2441/10000 train_time:552416ms step_avg:226.31ms +[2025-07-17 11:06:04] [Rank 0] step:2461/10000 train_time:557093ms step_avg:226.37ms +[2025-07-17 11:06:04] [Rank 0] step:2461/10000 train_time:557093ms step_avg:226.37ms +[2025-07-17 11:06:09] [Rank 0] step:2481/10000 train_time:561769ms step_avg:226.43ms +[2025-07-17 11:06:09] [Rank 0] step:2481/10000 train_time:561769ms step_avg:226.43ms +[2025-07-17 11:06:18] [Rank 0] PRINT: step:2500/10000 val_loss:4.2919 train_time:566680ms step_avg:226.67ms +[2025-07-17 11:06:18] [Rank 0] PRINT: step:2500/10000 val_loss:4.2919 train_time:566680ms step_avg:226.67ms +[2025-07-17 11:06:18] [Rank 0] step:2501/10000 train_time:566694ms step_avg:226.59ms +[2025-07-17 11:06:18] [Rank 0] step:2501/10000 train_time:566694ms step_avg:226.59ms +[2025-07-17 11:06:23] [Rank 0] step:2521/10000 train_time:571119ms step_avg:226.54ms +[2025-07-17 11:06:23] [Rank 0] step:2521/10000 train_time:571119ms step_avg:226.54ms +[2025-07-17 11:06:27] [Rank 0] step:2541/10000 train_time:575796ms step_avg:226.60ms +[2025-07-17 11:06:27] [Rank 0] step:2541/10000 train_time:575796ms step_avg:226.60ms +[2025-07-17 11:06:32] [Rank 0] step:2561/10000 train_time:580474ms step_avg:226.66ms +[2025-07-17 11:06:32] [Rank 0] step:2561/10000 train_time:580474ms step_avg:226.66ms +[2025-07-17 11:06:37] [Rank 0] step:2581/10000 train_time:585148ms step_avg:226.71ms +[2025-07-17 11:06:37] [Rank 0] step:2581/10000 train_time:585148ms step_avg:226.71ms +[2025-07-17 11:06:41] [Rank 0] step:2601/10000 train_time:589824ms step_avg:226.77ms +[2025-07-17 11:06:41] [Rank 0] step:2601/10000 train_time:589824ms step_avg:226.77ms +[2025-07-17 11:06:46] [Rank 0] step:2621/10000 train_time:594498ms step_avg:226.82ms +[2025-07-17 11:06:46] [Rank 0] step:2621/10000 train_time:594498ms step_avg:226.82ms +[2025-07-17 11:06:51] [Rank 0] PRINT: step:2625/10000 val_loss:4.2496 train_time:595904ms step_avg:227.01ms +[2025-07-17 11:06:51] [Rank 0] PRINT: step:2625/10000 val_loss:4.2496 train_time:595904ms step_avg:227.01ms +[2025-07-17 11:06:55] [Rank 0] step:2641/10000 train_time:599166ms step_avg:226.87ms +[2025-07-17 11:06:55] [Rank 0] step:2641/10000 train_time:599166ms step_avg:226.87ms +[2025-07-17 11:07:00] [Rank 0] step:2661/10000 train_time:603836ms step_avg:226.92ms +[2025-07-17 11:07:00] [Rank 0] step:2661/10000 train_time:603836ms step_avg:226.92ms +[2025-07-17 11:07:05] [Rank 0] step:2681/10000 train_time:608509ms step_avg:226.97ms +[2025-07-17 11:07:05] [Rank 0] step:2681/10000 train_time:608509ms step_avg:226.97ms +[2025-07-17 11:07:09] [Rank 0] step:2701/10000 train_time:613183ms step_avg:227.02ms +[2025-07-17 11:07:09] [Rank 0] step:2701/10000 train_time:613183ms step_avg:227.02ms +[2025-07-17 11:07:14] [Rank 0] step:2721/10000 train_time:617859ms step_avg:227.07ms +[2025-07-17 11:07:14] [Rank 0] step:2721/10000 train_time:617859ms step_avg:227.07ms +[2025-07-17 11:07:19] [Rank 0] step:2741/10000 train_time:622534ms step_avg:227.12ms +[2025-07-17 11:07:19] [Rank 0] step:2741/10000 train_time:622534ms step_avg:227.12ms +[2025-07-17 11:07:25] [Rank 0] PRINT: step:2750/10000 val_loss:4.1865 train_time:625108ms step_avg:227.31ms +[2025-07-17 11:07:25] [Rank 0] PRINT: step:2750/10000 val_loss:4.1865 train_time:625108ms step_avg:227.31ms +[2025-07-17 11:07:28] [Rank 0] step:2761/10000 train_time:627208ms step_avg:227.17ms +[2025-07-17 11:07:28] [Rank 0] step:2761/10000 train_time:627208ms step_avg:227.17ms +[2025-07-17 11:07:33] [Rank 0] step:2781/10000 train_time:631886ms step_avg:227.22ms +[2025-07-17 11:07:33] [Rank 0] step:2781/10000 train_time:631886ms step_avg:227.22ms +[2025-07-17 11:07:37] [Rank 0] step:2801/10000 train_time:636566ms step_avg:227.26ms +[2025-07-17 11:07:37] [Rank 0] step:2801/10000 train_time:636566ms step_avg:227.26ms +[2025-07-17 11:07:42] [Rank 0] step:2821/10000 train_time:641248ms step_avg:227.31ms +[2025-07-17 11:07:42] [Rank 0] step:2821/10000 train_time:641248ms step_avg:227.31ms +[2025-07-17 11:07:47] [Rank 0] step:2841/10000 train_time:645928ms step_avg:227.36ms +[2025-07-17 11:07:47] [Rank 0] step:2841/10000 train_time:645928ms step_avg:227.36ms +[2025-07-17 11:07:51] [Rank 0] step:2861/10000 train_time:650606ms step_avg:227.41ms +[2025-07-17 11:07:51] [Rank 0] step:2861/10000 train_time:650606ms step_avg:227.41ms +[2025-07-17 11:07:59] [Rank 0] PRINT: step:2875/10000 val_loss:4.2149 train_time:654352ms step_avg:227.60ms +[2025-07-17 11:07:59] [Rank 0] PRINT: step:2875/10000 val_loss:4.2149 train_time:654352ms step_avg:227.60ms +[2025-07-17 11:08:00] [Rank 0] step:2881/10000 train_time:655289ms step_avg:227.45ms +[2025-07-17 11:08:00] [Rank 0] step:2881/10000 train_time:655289ms step_avg:227.45ms +[2025-07-17 11:08:05] [Rank 0] step:2901/10000 train_time:659968ms step_avg:227.50ms +[2025-07-17 11:08:05] [Rank 0] step:2901/10000 train_time:659968ms step_avg:227.50ms +[2025-07-17 11:08:09] [Rank 0] step:2921/10000 train_time:664649ms step_avg:227.54ms +[2025-07-17 11:08:09] [Rank 0] step:2921/10000 train_time:664649ms step_avg:227.54ms +[2025-07-17 11:08:14] [Rank 0] step:2941/10000 train_time:669331ms step_avg:227.59ms +[2025-07-17 11:08:14] [Rank 0] step:2941/10000 train_time:669331ms step_avg:227.59ms +[2025-07-17 11:08:19] [Rank 0] step:2961/10000 train_time:674015ms step_avg:227.63ms +[2025-07-17 11:08:19] [Rank 0] step:2961/10000 train_time:674015ms step_avg:227.63ms +[2025-07-17 11:08:24] [Rank 0] step:2981/10000 train_time:678713ms step_avg:227.68ms +[2025-07-17 11:08:24] [Rank 0] step:2981/10000 train_time:678713ms step_avg:227.68ms +[2025-07-17 11:08:33] [Rank 0] PRINT: step:3000/10000 val_loss:4.1506 train_time:683650ms step_avg:227.88ms +[2025-07-17 11:08:33] [Rank 0] PRINT: step:3000/10000 val_loss:4.1506 train_time:683650ms step_avg:227.88ms +[2025-07-17 11:08:33] [Rank 0] step:3001/10000 train_time:683664ms step_avg:227.81ms +[2025-07-17 11:08:33] [Rank 0] step:3001/10000 train_time:683664ms step_avg:227.81ms +[2025-07-17 11:08:38] [Rank 0] step:3021/10000 train_time:688107ms step_avg:227.77ms +[2025-07-17 11:08:38] [Rank 0] step:3021/10000 train_time:688107ms step_avg:227.77ms +[2025-07-17 11:08:42] [Rank 0] step:3041/10000 train_time:692803ms step_avg:227.82ms +[2025-07-17 11:08:42] [Rank 0] step:3041/10000 train_time:692803ms step_avg:227.82ms +[2025-07-17 11:08:47] [Rank 0] step:3061/10000 train_time:697498ms step_avg:227.87ms +[2025-07-17 11:08:47] [Rank 0] step:3061/10000 train_time:697498ms step_avg:227.87ms +[2025-07-17 11:08:52] [Rank 0] step:3081/10000 train_time:702194ms step_avg:227.91ms +[2025-07-17 11:08:52] [Rank 0] step:3081/10000 train_time:702194ms step_avg:227.91ms +[2025-07-17 11:08:56] [Rank 0] step:3101/10000 train_time:706890ms step_avg:227.96ms +[2025-07-17 11:08:56] [Rank 0] step:3101/10000 train_time:706890ms step_avg:227.96ms +[2025-07-17 11:09:01] [Rank 0] step:3121/10000 train_time:711589ms step_avg:228.00ms +[2025-07-17 11:09:01] [Rank 0] step:3121/10000 train_time:711589ms step_avg:228.00ms +[2025-07-17 11:09:07] [Rank 0] PRINT: step:3125/10000 val_loss:4.2523 train_time:713003ms step_avg:228.16ms +[2025-07-17 11:09:07] [Rank 0] PRINT: step:3125/10000 val_loss:4.2523 train_time:713003ms step_avg:228.16ms +[2025-07-17 11:09:10] [Rank 0] step:3141/10000 train_time:716285ms step_avg:228.04ms +[2025-07-17 11:09:10] [Rank 0] step:3141/10000 train_time:716285ms step_avg:228.04ms +[2025-07-17 11:09:15] [Rank 0] step:3161/10000 train_time:720983ms step_avg:228.09ms +[2025-07-17 11:09:15] [Rank 0] step:3161/10000 train_time:720983ms step_avg:228.09ms +[2025-07-17 11:09:20] [Rank 0] step:3181/10000 train_time:725679ms step_avg:228.13ms +[2025-07-17 11:09:20] [Rank 0] step:3181/10000 train_time:725679ms step_avg:228.13ms +[2025-07-17 11:09:24] [Rank 0] step:3201/10000 train_time:730379ms step_avg:228.17ms +[2025-07-17 11:09:24] [Rank 0] step:3201/10000 train_time:730379ms step_avg:228.17ms +[2025-07-17 11:09:29] [Rank 0] step:3221/10000 train_time:735076ms step_avg:228.21ms +[2025-07-17 11:09:29] [Rank 0] step:3221/10000 train_time:735076ms step_avg:228.21ms +[2025-07-17 11:09:34] [Rank 0] step:3241/10000 train_time:739770ms step_avg:228.25ms +[2025-07-17 11:09:34] [Rank 0] step:3241/10000 train_time:739770ms step_avg:228.25ms +[2025-07-17 11:09:41] [Rank 0] PRINT: step:3250/10000 val_loss:4.2929 train_time:742357ms step_avg:228.42ms +[2025-07-17 11:09:41] [Rank 0] PRINT: step:3250/10000 val_loss:4.2929 train_time:742357ms step_avg:228.42ms +[2025-07-17 11:09:43] [Rank 0] step:3261/10000 train_time:744465ms step_avg:228.29ms +[2025-07-17 11:09:43] [Rank 0] step:3261/10000 train_time:744465ms step_avg:228.29ms +[2025-07-17 11:09:48] [Rank 0] step:3281/10000 train_time:749163ms step_avg:228.33ms +[2025-07-17 11:09:48] [Rank 0] step:3281/10000 train_time:749163ms step_avg:228.33ms +[2025-07-17 11:09:52] [Rank 0] step:3301/10000 train_time:753862ms step_avg:228.37ms +[2025-07-17 11:09:52] [Rank 0] step:3301/10000 train_time:753862ms step_avg:228.37ms +[2025-07-17 11:09:57] [Rank 0] step:3321/10000 train_time:758558ms step_avg:228.41ms +[2025-07-17 11:09:57] [Rank 0] step:3321/10000 train_time:758558ms step_avg:228.41ms +[2025-07-17 11:10:02] [Rank 0] step:3341/10000 train_time:763247ms step_avg:228.45ms +[2025-07-17 11:10:02] [Rank 0] step:3341/10000 train_time:763247ms step_avg:228.45ms +[2025-07-17 11:10:07] [Rank 0] step:3361/10000 train_time:767936ms step_avg:228.48ms +[2025-07-17 11:10:07] [Rank 0] step:3361/10000 train_time:767936ms step_avg:228.48ms +[2025-07-17 11:10:14] [Rank 0] PRINT: step:3375/10000 val_loss:4.3118 train_time:771688ms step_avg:228.65ms +[2025-07-17 11:10:14] [Rank 0] PRINT: step:3375/10000 val_loss:4.3118 train_time:771688ms step_avg:228.65ms +[2025-07-17 11:10:16] [Rank 0] step:3381/10000 train_time:772622ms step_avg:228.52ms +[2025-07-17 11:10:16] [Rank 0] step:3381/10000 train_time:772622ms step_avg:228.52ms +[2025-07-17 11:10:21] [Rank 0] step:3401/10000 train_time:777302ms step_avg:228.55ms +[2025-07-17 11:10:21] [Rank 0] step:3401/10000 train_time:777302ms step_avg:228.55ms +[2025-07-17 11:10:25] [Rank 0] step:3421/10000 train_time:781984ms step_avg:228.58ms +[2025-07-17 11:10:25] [Rank 0] step:3421/10000 train_time:781984ms step_avg:228.58ms +[2025-07-17 11:10:30] [Rank 0] step:3441/10000 train_time:786666ms step_avg:228.62ms +[2025-07-17 11:10:30] [Rank 0] step:3441/10000 train_time:786666ms step_avg:228.62ms +[2025-07-17 11:10:35] [Rank 0] step:3461/10000 train_time:791346ms step_avg:228.65ms +[2025-07-17 11:10:35] [Rank 0] step:3461/10000 train_time:791346ms step_avg:228.65ms +[2025-07-17 11:10:39] [Rank 0] step:3481/10000 train_time:796029ms step_avg:228.68ms +[2025-07-17 11:10:39] [Rank 0] step:3481/10000 train_time:796029ms step_avg:228.68ms +[2025-07-17 11:10:48] [Rank 0] PRINT: step:3500/10000 val_loss:4.3415 train_time:800946ms step_avg:228.84ms +[2025-07-17 11:10:48] [Rank 0] PRINT: step:3500/10000 val_loss:4.3415 train_time:800946ms step_avg:228.84ms +[2025-07-17 11:10:49] [Rank 0] step:3501/10000 train_time:800959ms step_avg:228.78ms +[2025-07-17 11:10:49] [Rank 0] step:3501/10000 train_time:800959ms step_avg:228.78ms +[2025-07-17 11:10:53] [Rank 0] step:3521/10000 train_time:805395ms step_avg:228.74ms +[2025-07-17 11:10:53] [Rank 0] step:3521/10000 train_time:805395ms step_avg:228.74ms +[2025-07-17 11:10:58] [Rank 0] step:3541/10000 train_time:810082ms step_avg:228.77ms +[2025-07-17 11:10:58] [Rank 0] step:3541/10000 train_time:810082ms step_avg:228.77ms +[2025-07-17 11:11:03] [Rank 0] step:3561/10000 train_time:814765ms step_avg:228.80ms +[2025-07-17 11:11:03] [Rank 0] step:3561/10000 train_time:814765ms step_avg:228.80ms +[2025-07-17 11:11:07] [Rank 0] step:3581/10000 train_time:819450ms step_avg:228.83ms +[2025-07-17 11:11:07] [Rank 0] step:3581/10000 train_time:819450ms step_avg:228.83ms +[2025-07-17 11:11:12] [Rank 0] step:3601/10000 train_time:824138ms step_avg:228.86ms +[2025-07-17 11:11:12] [Rank 0] step:3601/10000 train_time:824138ms step_avg:228.86ms +[2025-07-17 11:11:17] [Rank 0] step:3621/10000 train_time:828825ms step_avg:228.89ms +[2025-07-17 11:11:17] [Rank 0] step:3621/10000 train_time:828825ms step_avg:228.89ms +[2025-07-17 11:11:22] [Rank 0] PRINT: step:3625/10000 val_loss:4.3131 train_time:830235ms step_avg:229.03ms +[2025-07-17 11:11:22] [Rank 0] PRINT: step:3625/10000 val_loss:4.3131 train_time:830235ms step_avg:229.03ms +[2025-07-17 11:11:26] [Rank 0] step:3641/10000 train_time:833507ms step_avg:228.92ms +[2025-07-17 11:11:26] [Rank 0] step:3641/10000 train_time:833507ms step_avg:228.92ms +[2025-07-17 11:11:31] [Rank 0] step:3661/10000 train_time:838197ms step_avg:228.95ms +[2025-07-17 11:11:31] [Rank 0] step:3661/10000 train_time:838197ms step_avg:228.95ms +[2025-07-17 11:11:35] [Rank 0] step:3681/10000 train_time:842885ms step_avg:228.98ms +[2025-07-17 11:11:35] [Rank 0] step:3681/10000 train_time:842885ms step_avg:228.98ms +[2025-07-17 11:11:40] [Rank 0] step:3701/10000 train_time:847576ms step_avg:229.01ms +[2025-07-17 11:11:40] [Rank 0] step:3701/10000 train_time:847576ms step_avg:229.01ms +[2025-07-17 11:11:45] [Rank 0] step:3721/10000 train_time:852328ms step_avg:229.06ms +[2025-07-17 11:11:45] [Rank 0] step:3721/10000 train_time:852328ms step_avg:229.06ms +[2025-07-17 11:11:50] [Rank 0] step:3741/10000 train_time:857102ms step_avg:229.11ms +[2025-07-17 11:11:50] [Rank 0] step:3741/10000 train_time:857102ms step_avg:229.11ms +[2025-07-17 11:11:56] [Rank 0] PRINT: step:3750/10000 val_loss:4.1184 train_time:859729ms step_avg:229.26ms +[2025-07-17 11:11:56] [Rank 0] PRINT: step:3750/10000 val_loss:4.1184 train_time:859729ms step_avg:229.26ms +[2025-07-17 11:11:59] [Rank 0] step:3761/10000 train_time:861875ms step_avg:229.16ms +[2025-07-17 11:11:59] [Rank 0] step:3761/10000 train_time:861875ms step_avg:229.16ms +[2025-07-17 11:12:03] [Rank 0] step:3781/10000 train_time:866654ms step_avg:229.21ms +[2025-07-17 11:12:03] [Rank 0] step:3781/10000 train_time:866654ms step_avg:229.21ms +[2025-07-17 11:12:08] [Rank 0] step:3801/10000 train_time:871433ms step_avg:229.26ms +[2025-07-17 11:12:08] [Rank 0] step:3801/10000 train_time:871433ms step_avg:229.26ms +[2025-07-17 11:12:13] [Rank 0] step:3821/10000 train_time:876215ms step_avg:229.32ms +[2025-07-17 11:12:13] [Rank 0] step:3821/10000 train_time:876215ms step_avg:229.32ms +[2025-07-17 11:12:18] [Rank 0] step:3841/10000 train_time:880995ms step_avg:229.37ms +[2025-07-17 11:12:18] [Rank 0] step:3841/10000 train_time:880995ms step_avg:229.37ms +[2025-07-17 11:12:22] [Rank 0] step:3861/10000 train_time:885779ms step_avg:229.42ms +[2025-07-17 11:12:22] [Rank 0] step:3861/10000 train_time:885779ms step_avg:229.42ms +[2025-07-17 11:12:30] [Rank 0] PRINT: step:3875/10000 val_loss:4.1601 train_time:889608ms step_avg:229.58ms +[2025-07-17 11:12:30] [Rank 0] PRINT: step:3875/10000 val_loss:4.1601 train_time:889608ms step_avg:229.58ms +[2025-07-17 11:12:32] [Rank 0] step:3881/10000 train_time:890562ms step_avg:229.47ms +[2025-07-17 11:12:32] [Rank 0] step:3881/10000 train_time:890562ms step_avg:229.47ms +[2025-07-17 11:12:37] [Rank 0] step:3901/10000 train_time:895343ms step_avg:229.52ms +[2025-07-17 11:12:37] [Rank 0] step:3901/10000 train_time:895343ms step_avg:229.52ms +[2025-07-17 11:12:41] [Rank 0] step:3921/10000 train_time:900124ms step_avg:229.56ms +[2025-07-17 11:12:41] [Rank 0] step:3921/10000 train_time:900124ms step_avg:229.56ms +[2025-07-17 11:12:46] [Rank 0] step:3941/10000 train_time:904905ms step_avg:229.61ms +[2025-07-17 11:12:46] [Rank 0] step:3941/10000 train_time:904905ms step_avg:229.61ms +[2025-07-17 11:12:51] [Rank 0] step:3961/10000 train_time:909688ms step_avg:229.66ms +[2025-07-17 11:12:51] [Rank 0] step:3961/10000 train_time:909688ms step_avg:229.66ms +[2025-07-17 11:12:56] [Rank 0] step:3981/10000 train_time:914472ms step_avg:229.71ms +[2025-07-17 11:12:56] [Rank 0] step:3981/10000 train_time:914472ms step_avg:229.71ms +[2025-07-17 11:13:05] [Rank 0] PRINT: step:4000/10000 val_loss:4.2386 train_time:919491ms step_avg:229.87ms +[2025-07-17 11:13:05] [Rank 0] PRINT: step:4000/10000 val_loss:4.2386 train_time:919491ms step_avg:229.87ms +[2025-07-17 11:13:05] [Rank 0] step:4001/10000 train_time:919505ms step_avg:229.82ms +[2025-07-17 11:13:05] [Rank 0] step:4001/10000 train_time:919505ms step_avg:229.82ms +[2025-07-17 11:13:10] [Rank 0] step:4021/10000 train_time:924031ms step_avg:229.80ms +[2025-07-17 11:13:10] [Rank 0] step:4021/10000 train_time:924031ms step_avg:229.80ms +[2025-07-17 11:13:15] [Rank 0] step:4041/10000 train_time:928816ms step_avg:229.85ms +[2025-07-17 11:13:15] [Rank 0] step:4041/10000 train_time:928816ms step_avg:229.85ms +[2025-07-17 11:13:20] [Rank 0] step:4061/10000 train_time:933602ms step_avg:229.89ms +[2025-07-17 11:13:20] [Rank 0] step:4061/10000 train_time:933602ms step_avg:229.89ms +[2025-07-17 11:13:24] [Rank 0] step:4081/10000 train_time:938389ms step_avg:229.94ms +[2025-07-17 11:13:24] [Rank 0] step:4081/10000 train_time:938389ms step_avg:229.94ms +[2025-07-17 11:13:29] [Rank 0] step:4101/10000 train_time:943179ms step_avg:229.99ms +[2025-07-17 11:13:29] [Rank 0] step:4101/10000 train_time:943179ms step_avg:229.99ms +[2025-07-17 11:13:34] [Rank 0] step:4121/10000 train_time:947962ms step_avg:230.03ms +[2025-07-17 11:13:34] [Rank 0] step:4121/10000 train_time:947962ms step_avg:230.03ms +[2025-07-17 11:13:39] [Rank 0] PRINT: step:4125/10000 val_loss:4.2829 train_time:949401ms step_avg:230.16ms +[2025-07-17 11:13:39] [Rank 0] PRINT: step:4125/10000 val_loss:4.2829 train_time:949401ms step_avg:230.16ms +[2025-07-17 11:13:43] [Rank 0] step:4141/10000 train_time:952742ms step_avg:230.08ms +[2025-07-17 11:13:43] [Rank 0] step:4141/10000 train_time:952742ms step_avg:230.08ms +[2025-07-17 11:13:48] [Rank 0] step:4161/10000 train_time:957525ms step_avg:230.12ms +[2025-07-17 11:13:48] [Rank 0] step:4161/10000 train_time:957525ms step_avg:230.12ms +[2025-07-17 11:13:53] [Rank 0] step:4181/10000 train_time:962305ms step_avg:230.16ms +[2025-07-17 11:13:53] [Rank 0] step:4181/10000 train_time:962305ms step_avg:230.16ms +[2025-07-17 11:13:58] [Rank 0] step:4201/10000 train_time:967092ms step_avg:230.21ms +[2025-07-17 11:13:58] [Rank 0] step:4201/10000 train_time:967092ms step_avg:230.21ms +[2025-07-17 11:14:02] [Rank 0] step:4221/10000 train_time:971874ms step_avg:230.25ms +[2025-07-17 11:14:02] [Rank 0] step:4221/10000 train_time:971874ms step_avg:230.25ms +[2025-07-17 11:14:07] [Rank 0] step:4241/10000 train_time:976659ms step_avg:230.29ms +[2025-07-17 11:14:07] [Rank 0] step:4241/10000 train_time:976659ms step_avg:230.29ms +[2025-07-17 11:14:14] [Rank 0] PRINT: step:4250/10000 val_loss:4.2812 train_time:979292ms step_avg:230.42ms +[2025-07-17 11:14:14] [Rank 0] PRINT: step:4250/10000 val_loss:4.2812 train_time:979292ms step_avg:230.42ms +[2025-07-17 11:14:17] [Rank 0] step:4261/10000 train_time:981444ms step_avg:230.33ms +[2025-07-17 11:14:17] [Rank 0] step:4261/10000 train_time:981444ms step_avg:230.33ms +[2025-07-17 11:14:21] [Rank 0] step:4281/10000 train_time:986231ms step_avg:230.37ms +[2025-07-17 11:14:21] [Rank 0] step:4281/10000 train_time:986231ms step_avg:230.37ms +[2025-07-17 11:14:26] [Rank 0] step:4301/10000 train_time:991021ms step_avg:230.42ms +[2025-07-17 11:14:26] [Rank 0] step:4301/10000 train_time:991021ms step_avg:230.42ms +[2025-07-17 11:14:31] [Rank 0] step:4321/10000 train_time:995818ms step_avg:230.46ms +[2025-07-17 11:14:31] [Rank 0] step:4321/10000 train_time:995818ms step_avg:230.46ms +[2025-07-17 11:14:36] [Rank 0] step:4341/10000 train_time:1000606ms step_avg:230.50ms +[2025-07-17 11:14:36] [Rank 0] step:4341/10000 train_time:1000606ms step_avg:230.50ms +[2025-07-17 11:14:41] [Rank 0] step:4361/10000 train_time:1005395ms step_avg:230.54ms +[2025-07-17 11:14:41] [Rank 0] step:4361/10000 train_time:1005395ms step_avg:230.54ms +[2025-07-17 11:14:49] [Rank 0] PRINT: step:4375/10000 val_loss:4.3622 train_time:1009227ms step_avg:230.68ms +[2025-07-17 11:14:49] [Rank 0] PRINT: step:4375/10000 val_loss:4.3622 train_time:1009227ms step_avg:230.68ms +[2025-07-17 11:14:50] [Rank 0] step:4381/10000 train_time:1010183ms step_avg:230.58ms +[2025-07-17 11:14:50] [Rank 0] step:4381/10000 train_time:1010183ms step_avg:230.58ms +[2025-07-17 11:14:55] [Rank 0] step:4401/10000 train_time:1014967ms step_avg:230.62ms +[2025-07-17 11:14:55] [Rank 0] step:4401/10000 train_time:1014967ms step_avg:230.62ms +[2025-07-17 11:15:00] [Rank 0] step:4421/10000 train_time:1019755ms step_avg:230.66ms +[2025-07-17 11:15:00] [Rank 0] step:4421/10000 train_time:1019755ms step_avg:230.66ms +[2025-07-17 11:15:04] [Rank 0] step:4441/10000 train_time:1024545ms step_avg:230.70ms +[2025-07-17 11:15:04] [Rank 0] step:4441/10000 train_time:1024545ms step_avg:230.70ms +[2025-07-17 11:15:09] [Rank 0] step:4461/10000 train_time:1029348ms step_avg:230.74ms +[2025-07-17 11:15:09] [Rank 0] step:4461/10000 train_time:1029348ms step_avg:230.74ms +[2025-07-17 11:15:14] [Rank 0] step:4481/10000 train_time:1034154ms step_avg:230.79ms +[2025-07-17 11:15:14] [Rank 0] step:4481/10000 train_time:1034154ms step_avg:230.79ms +[2025-07-17 11:15:23] [Rank 0] PRINT: step:4500/10000 val_loss:4.2980 train_time:1039204ms step_avg:230.93ms +[2025-07-17 11:15:23] [Rank 0] PRINT: step:4500/10000 val_loss:4.2980 train_time:1039204ms step_avg:230.93ms +[2025-07-17 11:15:23] [Rank 0] step:4501/10000 train_time:1039219ms step_avg:230.89ms +[2025-07-17 11:15:23] [Rank 0] step:4501/10000 train_time:1039219ms step_avg:230.89ms +[2025-07-17 11:15:28] [Rank 0] step:4521/10000 train_time:1043764ms step_avg:230.87ms +[2025-07-17 11:15:28] [Rank 0] step:4521/10000 train_time:1043764ms step_avg:230.87ms +[2025-07-17 11:15:33] [Rank 0] step:4541/10000 train_time:1048566ms step_avg:230.91ms +[2025-07-17 11:15:33] [Rank 0] step:4541/10000 train_time:1048566ms step_avg:230.91ms +[2025-07-17 11:15:38] [Rank 0] step:4561/10000 train_time:1053363ms step_avg:230.95ms +[2025-07-17 11:15:38] [Rank 0] step:4561/10000 train_time:1053363ms step_avg:230.95ms +[2025-07-17 11:15:43] [Rank 0] step:4581/10000 train_time:1058164ms step_avg:230.99ms +[2025-07-17 11:15:43] [Rank 0] step:4581/10000 train_time:1058164ms step_avg:230.99ms +[2025-07-17 11:15:47] [Rank 0] step:4601/10000 train_time:1062967ms step_avg:231.03ms +[2025-07-17 11:15:47] [Rank 0] step:4601/10000 train_time:1062967ms step_avg:231.03ms +[2025-07-17 11:15:52] [Rank 0] step:4621/10000 train_time:1067760ms step_avg:231.07ms +[2025-07-17 11:15:52] [Rank 0] step:4621/10000 train_time:1067760ms step_avg:231.07ms +[2025-07-17 11:15:58] [Rank 0] PRINT: step:4625/10000 val_loss:4.3440 train_time:1069204ms step_avg:231.18ms +[2025-07-17 11:15:58] [Rank 0] PRINT: step:4625/10000 val_loss:4.3440 train_time:1069204ms step_avg:231.18ms +[2025-07-17 11:16:02] [Rank 0] step:4641/10000 train_time:1072551ms step_avg:231.10ms +[2025-07-17 11:16:02] [Rank 0] step:4641/10000 train_time:1072551ms step_avg:231.10ms +[2025-07-17 11:16:06] [Rank 0] step:4661/10000 train_time:1077347ms step_avg:231.14ms +[2025-07-17 11:16:06] [Rank 0] step:4661/10000 train_time:1077347ms step_avg:231.14ms +[2025-07-17 11:16:11] [Rank 0] step:4681/10000 train_time:1082135ms step_avg:231.18ms +[2025-07-17 11:16:11] [Rank 0] step:4681/10000 train_time:1082135ms step_avg:231.18ms +[2025-07-17 11:16:16] [Rank 0] step:4701/10000 train_time:1086927ms step_avg:231.21ms +[2025-07-17 11:16:16] [Rank 0] step:4701/10000 train_time:1086927ms step_avg:231.21ms +[2025-07-17 11:16:21] [Rank 0] step:4721/10000 train_time:1091713ms step_avg:231.25ms +[2025-07-17 11:16:21] [Rank 0] step:4721/10000 train_time:1091713ms step_avg:231.25ms +[2025-07-17 11:16:26] [Rank 0] step:4741/10000 train_time:1096502ms step_avg:231.28ms +[2025-07-17 11:16:26] [Rank 0] step:4741/10000 train_time:1096502ms step_avg:231.28ms +[2025-07-17 11:16:32] [Rank 0] PRINT: step:4750/10000 val_loss:4.3529 train_time:1099140ms step_avg:231.40ms +[2025-07-17 11:16:32] [Rank 0] PRINT: step:4750/10000 val_loss:4.3529 train_time:1099140ms step_avg:231.40ms +[2025-07-17 11:16:35] [Rank 0] step:4761/10000 train_time:1101290ms step_avg:231.31ms +[2025-07-17 11:16:35] [Rank 0] step:4761/10000 train_time:1101290ms step_avg:231.31ms +[2025-07-17 11:16:40] [Rank 0] step:4781/10000 train_time:1106071ms step_avg:231.35ms +[2025-07-17 11:16:40] [Rank 0] step:4781/10000 train_time:1106071ms step_avg:231.35ms +[2025-07-17 11:16:45] [Rank 0] step:4801/10000 train_time:1110845ms step_avg:231.38ms +[2025-07-17 11:16:45] [Rank 0] step:4801/10000 train_time:1110845ms step_avg:231.38ms +[2025-07-17 11:16:49] [Rank 0] step:4821/10000 train_time:1115623ms step_avg:231.41ms +[2025-07-17 11:16:49] [Rank 0] step:4821/10000 train_time:1115623ms step_avg:231.41ms +[2025-07-17 11:16:54] [Rank 0] step:4841/10000 train_time:1120403ms step_avg:231.44ms +[2025-07-17 11:16:54] [Rank 0] step:4841/10000 train_time:1120403ms step_avg:231.44ms +[2025-07-17 11:16:59] [Rank 0] step:4861/10000 train_time:1125182ms step_avg:231.47ms +[2025-07-17 11:16:59] [Rank 0] step:4861/10000 train_time:1125182ms step_avg:231.47ms +[2025-07-17 11:17:07] [Rank 0] PRINT: step:4875/10000 val_loss:4.4170 train_time:1129012ms step_avg:231.59ms +[2025-07-17 11:17:07] [Rank 0] PRINT: step:4875/10000 val_loss:4.4170 train_time:1129012ms step_avg:231.59ms +[2025-07-17 11:17:08] [Rank 0] step:4881/10000 train_time:1129964ms step_avg:231.50ms +[2025-07-17 11:17:08] [Rank 0] step:4881/10000 train_time:1129964ms step_avg:231.50ms +[2025-07-17 11:17:13] [Rank 0] step:4901/10000 train_time:1134743ms step_avg:231.53ms +[2025-07-17 11:17:13] [Rank 0] step:4901/10000 train_time:1134743ms step_avg:231.53ms +[2025-07-17 11:17:18] [Rank 0] step:4921/10000 train_time:1139517ms step_avg:231.56ms +[2025-07-17 11:17:18] [Rank 0] step:4921/10000 train_time:1139517ms step_avg:231.56ms +[2025-07-17 11:17:23] [Rank 0] step:4941/10000 train_time:1144300ms step_avg:231.59ms +[2025-07-17 11:17:23] [Rank 0] step:4941/10000 train_time:1144300ms step_avg:231.59ms +[2025-07-17 11:17:27] [Rank 0] step:4961/10000 train_time:1149079ms step_avg:231.62ms +[2025-07-17 11:17:27] [Rank 0] step:4961/10000 train_time:1149079ms step_avg:231.62ms +[2025-07-17 11:17:32] [Rank 0] step:4981/10000 train_time:1153860ms step_avg:231.65ms +[2025-07-17 11:17:32] [Rank 0] step:4981/10000 train_time:1153860ms step_avg:231.65ms +[2025-07-17 11:17:41] [Rank 0] PRINT: step:5000/10000 val_loss:4.3274 train_time:1158882ms step_avg:231.78ms +[2025-07-17 11:17:41] [Rank 0] PRINT: step:5000/10000 val_loss:4.3274 train_time:1158882ms step_avg:231.78ms +[2025-07-17 11:17:42] [Rank 0] step:5001/10000 train_time:1158895ms step_avg:231.73ms +[2025-07-17 11:17:42] [Rank 0] step:5001/10000 train_time:1158895ms step_avg:231.73ms +[2025-07-17 11:17:46] [Rank 0] step:5021/10000 train_time:1163426ms step_avg:231.71ms +[2025-07-17 11:17:46] [Rank 0] step:5021/10000 train_time:1163426ms step_avg:231.71ms +[2025-07-17 11:17:51] [Rank 0] step:5041/10000 train_time:1168211ms step_avg:231.74ms +[2025-07-17 11:17:51] [Rank 0] step:5041/10000 train_time:1168211ms step_avg:231.74ms +[2025-07-17 11:17:56] [Rank 0] step:5061/10000 train_time:1172994ms step_avg:231.77ms +[2025-07-17 11:17:56] [Rank 0] step:5061/10000 train_time:1172994ms step_avg:231.77ms +[2025-07-17 11:18:01] [Rank 0] step:5081/10000 train_time:1177776ms step_avg:231.80ms +[2025-07-17 11:18:01] [Rank 0] step:5081/10000 train_time:1177776ms step_avg:231.80ms +[2025-07-17 11:18:06] [Rank 0] step:5101/10000 train_time:1182563ms step_avg:231.83ms +[2025-07-17 11:18:06] [Rank 0] step:5101/10000 train_time:1182563ms step_avg:231.83ms +[2025-07-17 11:18:10] [Rank 0] step:5121/10000 train_time:1187346ms step_avg:231.86ms +[2025-07-17 11:18:10] [Rank 0] step:5121/10000 train_time:1187346ms step_avg:231.86ms +[2025-07-17 11:18:16] [Rank 0] PRINT: step:5125/10000 val_loss:4.3892 train_time:1188786ms step_avg:231.96ms +[2025-07-17 11:18:16] [Rank 0] PRINT: step:5125/10000 val_loss:4.3892 train_time:1188786ms step_avg:231.96ms +[2025-07-17 11:18:20] [Rank 0] step:5141/10000 train_time:1192124ms step_avg:231.89ms +[2025-07-17 11:18:20] [Rank 0] step:5141/10000 train_time:1192124ms step_avg:231.89ms +[2025-07-17 11:18:25] [Rank 0] step:5161/10000 train_time:1196908ms step_avg:231.91ms +[2025-07-17 11:18:25] [Rank 0] step:5161/10000 train_time:1196908ms step_avg:231.91ms +[2025-07-17 11:18:29] [Rank 0] step:5181/10000 train_time:1201695ms step_avg:231.94ms +[2025-07-17 11:18:29] [Rank 0] step:5181/10000 train_time:1201695ms step_avg:231.94ms +[2025-07-17 11:18:34] [Rank 0] step:5201/10000 train_time:1206524ms step_avg:231.98ms +[2025-07-17 11:18:34] [Rank 0] step:5201/10000 train_time:1206524ms step_avg:231.98ms +[2025-07-17 11:18:39] [Rank 0] step:5221/10000 train_time:1211384ms step_avg:232.02ms +[2025-07-17 11:18:39] [Rank 0] step:5221/10000 train_time:1211384ms step_avg:232.02ms +[2025-07-17 11:18:44] [Rank 0] step:5241/10000 train_time:1216239ms step_avg:232.06ms +[2025-07-17 11:18:44] [Rank 0] step:5241/10000 train_time:1216239ms step_avg:232.06ms +[2025-07-17 11:18:50] [Rank 0] PRINT: step:5250/10000 val_loss:4.1653 train_time:1218915ms step_avg:232.17ms +[2025-07-17 11:18:50] [Rank 0] PRINT: step:5250/10000 val_loss:4.1653 train_time:1218915ms step_avg:232.17ms +[2025-07-17 11:18:53] [Rank 0] step:5261/10000 train_time:1221092ms step_avg:232.10ms +[2025-07-17 11:18:53] [Rank 0] step:5261/10000 train_time:1221092ms step_avg:232.10ms +[2025-07-17 11:18:58] [Rank 0] step:5281/10000 train_time:1225947ms step_avg:232.14ms +[2025-07-17 11:18:58] [Rank 0] step:5281/10000 train_time:1225947ms step_avg:232.14ms +[2025-07-17 11:19:03] [Rank 0] step:5301/10000 train_time:1230801ms step_avg:232.18ms +[2025-07-17 11:19:03] [Rank 0] step:5301/10000 train_time:1230801ms step_avg:232.18ms +[2025-07-17 11:19:08] [Rank 0] step:5321/10000 train_time:1235655ms step_avg:232.22ms +[2025-07-17 11:19:08] [Rank 0] step:5321/10000 train_time:1235655ms step_avg:232.22ms +[2025-07-17 11:19:12] [Rank 0] step:5341/10000 train_time:1240518ms step_avg:232.26ms +[2025-07-17 11:19:12] [Rank 0] step:5341/10000 train_time:1240518ms step_avg:232.26ms +[2025-07-17 11:19:17] [Rank 0] step:5361/10000 train_time:1245374ms step_avg:232.30ms +[2025-07-17 11:19:17] [Rank 0] step:5361/10000 train_time:1245374ms step_avg:232.30ms +[2025-07-17 11:19:25] [Rank 0] PRINT: step:5375/10000 val_loss:4.2321 train_time:1249269ms step_avg:232.42ms +[2025-07-17 11:19:25] [Rank 0] PRINT: step:5375/10000 val_loss:4.2321 train_time:1249269ms step_avg:232.42ms +[2025-07-17 11:19:27] [Rank 0] step:5381/10000 train_time:1250238ms step_avg:232.34ms +[2025-07-17 11:19:27] [Rank 0] step:5381/10000 train_time:1250238ms step_avg:232.34ms +[2025-07-17 11:19:32] [Rank 0] step:5401/10000 train_time:1255099ms step_avg:232.38ms +[2025-07-17 11:19:32] [Rank 0] step:5401/10000 train_time:1255099ms step_avg:232.38ms +[2025-07-17 11:19:37] [Rank 0] step:5421/10000 train_time:1259971ms step_avg:232.42ms +[2025-07-17 11:19:37] [Rank 0] step:5421/10000 train_time:1259971ms step_avg:232.42ms +[2025-07-17 11:19:41] [Rank 0] step:5441/10000 train_time:1264828ms step_avg:232.46ms +[2025-07-17 11:19:41] [Rank 0] step:5441/10000 train_time:1264828ms step_avg:232.46ms +[2025-07-17 11:19:46] [Rank 0] step:5461/10000 train_time:1269693ms step_avg:232.50ms +[2025-07-17 11:19:46] [Rank 0] step:5461/10000 train_time:1269693ms step_avg:232.50ms +[2025-07-17 11:19:51] [Rank 0] step:5481/10000 train_time:1274555ms step_avg:232.54ms +[2025-07-17 11:19:51] [Rank 0] step:5481/10000 train_time:1274555ms step_avg:232.54ms +[2025-07-17 11:20:00] [Rank 0] PRINT: step:5500/10000 val_loss:4.3074 train_time:1279656ms step_avg:232.66ms +[2025-07-17 11:20:00] [Rank 0] PRINT: step:5500/10000 val_loss:4.3074 train_time:1279656ms step_avg:232.66ms +[2025-07-17 11:20:01] [Rank 0] step:5501/10000 train_time:1279670ms step_avg:232.62ms +[2025-07-17 11:20:01] [Rank 0] step:5501/10000 train_time:1279670ms step_avg:232.62ms +[2025-07-17 11:20:06] [Rank 0] step:5521/10000 train_time:1284263ms step_avg:232.61ms +[2025-07-17 11:20:06] [Rank 0] step:5521/10000 train_time:1284263ms step_avg:232.61ms +[2025-07-17 11:20:10] [Rank 0] step:5541/10000 train_time:1289127ms step_avg:232.65ms +[2025-07-17 11:20:10] [Rank 0] step:5541/10000 train_time:1289127ms step_avg:232.65ms +[2025-07-17 11:20:15] [Rank 0] step:5561/10000 train_time:1293987ms step_avg:232.69ms +[2025-07-17 11:20:15] [Rank 0] step:5561/10000 train_time:1293987ms step_avg:232.69ms +[2025-07-17 11:20:20] [Rank 0] step:5581/10000 train_time:1298840ms step_avg:232.73ms +[2025-07-17 11:20:20] [Rank 0] step:5581/10000 train_time:1298840ms step_avg:232.73ms +[2025-07-17 11:20:25] [Rank 0] step:5601/10000 train_time:1303699ms step_avg:232.76ms +[2025-07-17 11:20:25] [Rank 0] step:5601/10000 train_time:1303699ms step_avg:232.76ms +[2025-07-17 11:20:30] [Rank 0] step:5621/10000 train_time:1308555ms step_avg:232.80ms +[2025-07-17 11:20:30] [Rank 0] step:5621/10000 train_time:1308555ms step_avg:232.80ms +[2025-07-17 11:20:35] [Rank 0] PRINT: step:5625/10000 val_loss:4.3592 train_time:1310014ms step_avg:232.89ms +[2025-07-17 11:20:35] [Rank 0] PRINT: step:5625/10000 val_loss:4.3592 train_time:1310014ms step_avg:232.89ms +[2025-07-17 11:20:39] [Rank 0] step:5641/10000 train_time:1313413ms step_avg:232.83ms +[2025-07-17 11:20:39] [Rank 0] step:5641/10000 train_time:1313413ms step_avg:232.83ms +[2025-07-17 11:20:44] [Rank 0] step:5661/10000 train_time:1318273ms step_avg:232.87ms +[2025-07-17 11:20:44] [Rank 0] step:5661/10000 train_time:1318273ms step_avg:232.87ms +[2025-07-17 11:20:49] [Rank 0] step:5681/10000 train_time:1323136ms step_avg:232.91ms +[2025-07-17 11:20:49] [Rank 0] step:5681/10000 train_time:1323136ms step_avg:232.91ms +[2025-07-17 11:20:54] [Rank 0] step:5701/10000 train_time:1327997ms step_avg:232.94ms +[2025-07-17 11:20:54] [Rank 0] step:5701/10000 train_time:1327997ms step_avg:232.94ms +[2025-07-17 11:20:58] [Rank 0] step:5721/10000 train_time:1332854ms step_avg:232.98ms +[2025-07-17 11:20:58] [Rank 0] step:5721/10000 train_time:1332854ms step_avg:232.98ms +[2025-07-17 11:21:03] [Rank 0] step:5741/10000 train_time:1337722ms step_avg:233.01ms +[2025-07-17 11:21:03] [Rank 0] step:5741/10000 train_time:1337722ms step_avg:233.01ms +[2025-07-17 11:21:10] [Rank 0] PRINT: step:5750/10000 val_loss:4.3313 train_time:1340399ms step_avg:233.11ms +[2025-07-17 11:21:10] [Rank 0] PRINT: step:5750/10000 val_loss:4.3313 train_time:1340399ms step_avg:233.11ms +[2025-07-17 11:21:13] [Rank 0] step:5761/10000 train_time:1342590ms step_avg:233.05ms +[2025-07-17 11:21:13] [Rank 0] step:5761/10000 train_time:1342590ms step_avg:233.05ms +[2025-07-17 11:21:18] [Rank 0] step:5781/10000 train_time:1347453ms step_avg:233.08ms +[2025-07-17 11:21:18] [Rank 0] step:5781/10000 train_time:1347453ms step_avg:233.08ms +[2025-07-17 11:21:23] [Rank 0] step:5801/10000 train_time:1352308ms step_avg:233.12ms +[2025-07-17 11:21:23] [Rank 0] step:5801/10000 train_time:1352308ms step_avg:233.12ms +[2025-07-17 11:21:27] [Rank 0] step:5821/10000 train_time:1357174ms step_avg:233.15ms +[2025-07-17 11:21:27] [Rank 0] step:5821/10000 train_time:1357174ms step_avg:233.15ms +[2025-07-17 11:21:32] [Rank 0] step:5841/10000 train_time:1362038ms step_avg:233.19ms +[2025-07-17 11:21:32] [Rank 0] step:5841/10000 train_time:1362038ms step_avg:233.19ms +[2025-07-17 11:21:37] [Rank 0] step:5861/10000 train_time:1366901ms step_avg:233.22ms +[2025-07-17 11:21:37] [Rank 0] step:5861/10000 train_time:1366901ms step_avg:233.22ms +[2025-07-17 11:21:45] [Rank 0] PRINT: step:5875/10000 val_loss:4.3298 train_time:1370786ms step_avg:233.33ms +[2025-07-17 11:21:45] [Rank 0] PRINT: step:5875/10000 val_loss:4.3298 train_time:1370786ms step_avg:233.33ms +[2025-07-17 11:21:47] [Rank 0] step:5881/10000 train_time:1371755ms step_avg:233.25ms +[2025-07-17 11:21:47] [Rank 0] step:5881/10000 train_time:1371755ms step_avg:233.25ms +[2025-07-17 11:21:52] [Rank 0] step:5901/10000 train_time:1376622ms step_avg:233.29ms +[2025-07-17 11:21:52] [Rank 0] step:5901/10000 train_time:1376622ms step_avg:233.29ms +[2025-07-17 11:21:56] [Rank 0] step:5921/10000 train_time:1381478ms step_avg:233.32ms +[2025-07-17 11:21:56] [Rank 0] step:5921/10000 train_time:1381478ms step_avg:233.32ms +[2025-07-17 11:22:01] [Rank 0] step:5941/10000 train_time:1386353ms step_avg:233.35ms +[2025-07-17 11:22:01] [Rank 0] step:5941/10000 train_time:1386353ms step_avg:233.35ms +[2025-07-17 11:22:06] [Rank 0] step:5961/10000 train_time:1391229ms step_avg:233.39ms +[2025-07-17 11:22:06] [Rank 0] step:5961/10000 train_time:1391229ms step_avg:233.39ms +[2025-07-17 11:22:11] [Rank 0] step:5981/10000 train_time:1396099ms step_avg:233.42ms +[2025-07-17 11:22:11] [Rank 0] step:5981/10000 train_time:1396099ms step_avg:233.42ms +[2025-07-17 11:22:20] [Rank 0] PRINT: step:6000/10000 val_loss:4.4914 train_time:1401220ms step_avg:233.54ms +[2025-07-17 11:22:20] [Rank 0] PRINT: step:6000/10000 val_loss:4.4914 train_time:1401220ms step_avg:233.54ms +[2025-07-17 11:22:21] [Rank 0] step:6001/10000 train_time:1401234ms step_avg:233.50ms +[2025-07-17 11:22:21] [Rank 0] step:6001/10000 train_time:1401234ms step_avg:233.50ms +[2025-07-17 11:22:25] [Rank 0] step:6021/10000 train_time:1405843ms step_avg:233.49ms +[2025-07-17 11:22:25] [Rank 0] step:6021/10000 train_time:1405843ms step_avg:233.49ms +[2025-07-17 11:22:30] [Rank 0] step:6041/10000 train_time:1410711ms step_avg:233.52ms +[2025-07-17 11:22:30] [Rank 0] step:6041/10000 train_time:1410711ms step_avg:233.52ms +[2025-07-17 11:22:35] [Rank 0] step:6061/10000 train_time:1415576ms step_avg:233.55ms +[2025-07-17 11:22:35] [Rank 0] step:6061/10000 train_time:1415576ms step_avg:233.55ms +[2025-07-17 11:22:40] [Rank 0] step:6081/10000 train_time:1420443ms step_avg:233.59ms +[2025-07-17 11:22:40] [Rank 0] step:6081/10000 train_time:1420443ms step_avg:233.59ms +[2025-07-17 11:22:45] [Rank 0] step:6101/10000 train_time:1425308ms step_avg:233.62ms +[2025-07-17 11:22:45] [Rank 0] step:6101/10000 train_time:1425308ms step_avg:233.62ms +[2025-07-17 11:22:50] [Rank 0] step:6121/10000 train_time:1430186ms step_avg:233.65ms +[2025-07-17 11:22:50] [Rank 0] step:6121/10000 train_time:1430186ms step_avg:233.65ms +[2025-07-17 11:22:55] [Rank 0] PRINT: step:6125/10000 val_loss:4.3456 train_time:1431650ms step_avg:233.74ms +[2025-07-17 11:22:55] [Rank 0] PRINT: step:6125/10000 val_loss:4.3456 train_time:1431650ms step_avg:233.74ms +[2025-07-17 11:22:59] [Rank 0] step:6141/10000 train_time:1435056ms step_avg:233.68ms +[2025-07-17 11:22:59] [Rank 0] step:6141/10000 train_time:1435056ms step_avg:233.68ms +[2025-07-17 11:23:04] [Rank 0] step:6161/10000 train_time:1439925ms step_avg:233.72ms +[2025-07-17 11:23:04] [Rank 0] step:6161/10000 train_time:1439925ms step_avg:233.72ms +[2025-07-17 11:23:09] [Rank 0] step:6181/10000 train_time:1444802ms step_avg:233.75ms +[2025-07-17 11:23:09] [Rank 0] step:6181/10000 train_time:1444802ms step_avg:233.75ms +[2025-07-17 11:23:14] [Rank 0] step:6201/10000 train_time:1449680ms step_avg:233.78ms +[2025-07-17 11:23:14] [Rank 0] step:6201/10000 train_time:1449680ms step_avg:233.78ms +[2025-07-17 11:23:19] [Rank 0] step:6221/10000 train_time:1454557ms step_avg:233.81ms +[2025-07-17 11:23:19] [Rank 0] step:6221/10000 train_time:1454557ms step_avg:233.81ms +[2025-07-17 11:23:24] [Rank 0] step:6241/10000 train_time:1459432ms step_avg:233.85ms +[2025-07-17 11:23:24] [Rank 0] step:6241/10000 train_time:1459432ms step_avg:233.85ms +[2025-07-17 11:23:31] [Rank 0] PRINT: step:6250/10000 val_loss:4.3415 train_time:1462117ms step_avg:233.94ms +[2025-07-17 11:23:31] [Rank 0] PRINT: step:6250/10000 val_loss:4.3415 train_time:1462117ms step_avg:233.94ms +[2025-07-17 11:23:33] [Rank 0] step:6261/10000 train_time:1464304ms step_avg:233.88ms +[2025-07-17 11:23:33] [Rank 0] step:6261/10000 train_time:1464304ms step_avg:233.88ms +[2025-07-17 11:23:38] [Rank 0] step:6281/10000 train_time:1469185ms step_avg:233.91ms +[2025-07-17 11:23:38] [Rank 0] step:6281/10000 train_time:1469185ms step_avg:233.91ms +[2025-07-17 11:23:43] [Rank 0] step:6301/10000 train_time:1474058ms step_avg:233.94ms +[2025-07-17 11:23:43] [Rank 0] step:6301/10000 train_time:1474058ms step_avg:233.94ms +[2025-07-17 11:23:48] [Rank 0] step:6321/10000 train_time:1478935ms step_avg:233.97ms +[2025-07-17 11:23:48] [Rank 0] step:6321/10000 train_time:1478935ms step_avg:233.97ms +[2025-07-17 11:23:53] [Rank 0] step:6341/10000 train_time:1483817ms step_avg:234.00ms +[2025-07-17 11:23:53] [Rank 0] step:6341/10000 train_time:1483817ms step_avg:234.00ms +[2025-07-17 11:23:58] [Rank 0] step:6361/10000 train_time:1488688ms step_avg:234.03ms +[2025-07-17 11:23:58] [Rank 0] step:6361/10000 train_time:1488688ms step_avg:234.03ms +[2025-07-17 11:24:06] [Rank 0] PRINT: step:6375/10000 val_loss:4.3932 train_time:1492584ms step_avg:234.13ms +[2025-07-17 11:24:06] [Rank 0] PRINT: step:6375/10000 val_loss:4.3932 train_time:1492584ms step_avg:234.13ms +[2025-07-17 11:24:07] [Rank 0] step:6381/10000 train_time:1493560ms step_avg:234.06ms +[2025-07-17 11:24:07] [Rank 0] step:6381/10000 train_time:1493560ms step_avg:234.06ms +[2025-07-17 11:24:12] [Rank 0] step:6401/10000 train_time:1498431ms step_avg:234.09ms +[2025-07-17 11:24:12] [Rank 0] step:6401/10000 train_time:1498431ms step_avg:234.09ms +[2025-07-17 11:24:17] [Rank 0] step:6421/10000 train_time:1503303ms step_avg:234.12ms +[2025-07-17 11:24:17] [Rank 0] step:6421/10000 train_time:1503303ms step_avg:234.12ms +[2025-07-17 11:24:22] [Rank 0] step:6441/10000 train_time:1508180ms step_avg:234.15ms +[2025-07-17 11:24:22] [Rank 0] step:6441/10000 train_time:1508180ms step_avg:234.15ms +[2025-07-17 11:24:27] [Rank 0] step:6461/10000 train_time:1513069ms step_avg:234.18ms +[2025-07-17 11:24:27] [Rank 0] step:6461/10000 train_time:1513069ms step_avg:234.18ms +[2025-07-17 11:24:32] [Rank 0] step:6481/10000 train_time:1517952ms step_avg:234.22ms +[2025-07-17 11:24:32] [Rank 0] step:6481/10000 train_time:1517952ms step_avg:234.22ms +[2025-07-17 11:24:41] [Rank 0] PRINT: step:6500/10000 val_loss:4.4741 train_time:1523076ms step_avg:234.32ms +[2025-07-17 11:24:41] [Rank 0] PRINT: step:6500/10000 val_loss:4.4741 train_time:1523076ms step_avg:234.32ms +[2025-07-17 11:24:41] [Rank 0] step:6501/10000 train_time:1523090ms step_avg:234.29ms +[2025-07-17 11:24:41] [Rank 0] step:6501/10000 train_time:1523090ms step_avg:234.29ms +[2025-07-17 11:24:46] [Rank 0] step:6521/10000 train_time:1527715ms step_avg:234.28ms +[2025-07-17 11:24:46] [Rank 0] step:6521/10000 train_time:1527715ms step_avg:234.28ms +[2025-07-17 11:24:51] [Rank 0] step:6541/10000 train_time:1532602ms step_avg:234.31ms +[2025-07-17 11:24:51] [Rank 0] step:6541/10000 train_time:1532602ms step_avg:234.31ms +[2025-07-17 11:24:56] [Rank 0] step:6561/10000 train_time:1537500ms step_avg:234.34ms +[2025-07-17 11:24:56] [Rank 0] step:6561/10000 train_time:1537500ms step_avg:234.34ms +[2025-07-17 11:25:01] [Rank 0] step:6581/10000 train_time:1542394ms step_avg:234.37ms +[2025-07-17 11:25:01] [Rank 0] step:6581/10000 train_time:1542394ms step_avg:234.37ms +[2025-07-17 11:25:06] [Rank 0] step:6601/10000 train_time:1547293ms step_avg:234.40ms +[2025-07-17 11:25:06] [Rank 0] step:6601/10000 train_time:1547293ms step_avg:234.40ms +[2025-07-17 11:25:11] [Rank 0] step:6621/10000 train_time:1552181ms step_avg:234.43ms +[2025-07-17 11:25:11] [Rank 0] step:6621/10000 train_time:1552181ms step_avg:234.43ms +[2025-07-17 11:25:16] [Rank 0] PRINT: step:6625/10000 val_loss:4.4203 train_time:1553653ms step_avg:234.51ms +[2025-07-17 11:25:16] [Rank 0] PRINT: step:6625/10000 val_loss:4.4203 train_time:1553653ms step_avg:234.51ms +[2025-07-17 11:25:20] [Rank 0] step:6641/10000 train_time:1557059ms step_avg:234.46ms +[2025-07-17 11:25:20] [Rank 0] step:6641/10000 train_time:1557059ms step_avg:234.46ms +[2025-07-17 11:25:25] [Rank 0] step:6661/10000 train_time:1561943ms step_avg:234.49ms +[2025-07-17 11:25:25] [Rank 0] step:6661/10000 train_time:1561943ms step_avg:234.49ms +[2025-07-17 11:25:30] [Rank 0] step:6681/10000 train_time:1566879ms step_avg:234.53ms +[2025-07-17 11:25:30] [Rank 0] step:6681/10000 train_time:1566879ms step_avg:234.53ms +[2025-07-17 11:25:35] [Rank 0] step:6701/10000 train_time:1571829ms step_avg:234.57ms +[2025-07-17 11:25:35] [Rank 0] step:6701/10000 train_time:1571829ms step_avg:234.57ms +[2025-07-17 11:25:40] [Rank 0] step:6721/10000 train_time:1576789ms step_avg:234.61ms +[2025-07-17 11:25:40] [Rank 0] step:6721/10000 train_time:1576789ms step_avg:234.61ms +[2025-07-17 11:25:45] [Rank 0] step:6741/10000 train_time:1581754ms step_avg:234.65ms +[2025-07-17 11:25:45] [Rank 0] step:6741/10000 train_time:1581754ms step_avg:234.65ms +[2025-07-17 11:25:52] [Rank 0] PRINT: step:6750/10000 val_loss:4.2124 train_time:1584475ms step_avg:234.74ms +[2025-07-17 11:25:52] [Rank 0] PRINT: step:6750/10000 val_loss:4.2124 train_time:1584475ms step_avg:234.74ms +[2025-07-17 11:25:55] [Rank 0] step:6761/10000 train_time:1586693ms step_avg:234.68ms +[2025-07-17 11:25:55] [Rank 0] step:6761/10000 train_time:1586693ms step_avg:234.68ms +[2025-07-17 11:26:00] [Rank 0] step:6781/10000 train_time:1591633ms step_avg:234.72ms +[2025-07-17 11:26:00] [Rank 0] step:6781/10000 train_time:1591633ms step_avg:234.72ms +[2025-07-17 11:26:04] [Rank 0] step:6801/10000 train_time:1596578ms step_avg:234.76ms +[2025-07-17 11:26:04] [Rank 0] step:6801/10000 train_time:1596578ms step_avg:234.76ms +[2025-07-17 11:26:09] [Rank 0] step:6821/10000 train_time:1601517ms step_avg:234.79ms +[2025-07-17 11:26:09] [Rank 0] step:6821/10000 train_time:1601517ms step_avg:234.79ms +[2025-07-17 11:26:14] [Rank 0] step:6841/10000 train_time:1606454ms step_avg:234.83ms +[2025-07-17 11:26:14] [Rank 0] step:6841/10000 train_time:1606454ms step_avg:234.83ms +[2025-07-17 11:26:19] [Rank 0] step:6861/10000 train_time:1611388ms step_avg:234.86ms +[2025-07-17 11:26:19] [Rank 0] step:6861/10000 train_time:1611388ms step_avg:234.86ms +[2025-07-17 11:26:27] [Rank 0] PRINT: step:6875/10000 val_loss:4.2903 train_time:1615335ms step_avg:234.96ms +[2025-07-17 11:26:27] [Rank 0] PRINT: step:6875/10000 val_loss:4.2903 train_time:1615335ms step_avg:234.96ms +[2025-07-17 11:26:29] [Rank 0] step:6881/10000 train_time:1616320ms step_avg:234.90ms +[2025-07-17 11:26:29] [Rank 0] step:6881/10000 train_time:1616320ms step_avg:234.90ms +[2025-07-17 11:26:34] [Rank 0] step:6901/10000 train_time:1621248ms step_avg:234.93ms +[2025-07-17 11:26:34] [Rank 0] step:6901/10000 train_time:1621248ms step_avg:234.93ms +[2025-07-17 11:26:39] [Rank 0] step:6921/10000 train_time:1626175ms step_avg:234.96ms +[2025-07-17 11:26:39] [Rank 0] step:6921/10000 train_time:1626175ms step_avg:234.96ms +[2025-07-17 11:26:44] [Rank 0] step:6941/10000 train_time:1631121ms step_avg:235.00ms +[2025-07-17 11:26:44] [Rank 0] step:6941/10000 train_time:1631121ms step_avg:235.00ms +[2025-07-17 11:26:49] [Rank 0] step:6961/10000 train_time:1636059ms step_avg:235.03ms +[2025-07-17 11:26:49] [Rank 0] step:6961/10000 train_time:1636059ms step_avg:235.03ms +[2025-07-17 11:26:54] [Rank 0] step:6981/10000 train_time:1640997ms step_avg:235.07ms +[2025-07-17 11:26:54] [Rank 0] step:6981/10000 train_time:1640997ms step_avg:235.07ms +[2025-07-17 11:27:03] [Rank 0] PRINT: step:7000/10000 val_loss:4.3005 train_time:1646176ms step_avg:235.17ms +[2025-07-17 11:27:03] [Rank 0] PRINT: step:7000/10000 val_loss:4.3005 train_time:1646176ms step_avg:235.17ms +[2025-07-17 11:27:03] [Rank 0] step:7001/10000 train_time:1646189ms step_avg:235.14ms +[2025-07-17 11:27:03] [Rank 0] step:7001/10000 train_time:1646189ms step_avg:235.14ms +[2025-07-17 11:27:08] [Rank 0] step:7021/10000 train_time:1650862ms step_avg:235.13ms +[2025-07-17 11:27:08] [Rank 0] step:7021/10000 train_time:1650862ms step_avg:235.13ms +[2025-07-17 11:27:13] [Rank 0] step:7041/10000 train_time:1655798ms step_avg:235.17ms +[2025-07-17 11:27:13] [Rank 0] step:7041/10000 train_time:1655798ms step_avg:235.17ms +[2025-07-17 11:27:18] [Rank 0] step:7061/10000 train_time:1660735ms step_avg:235.20ms +[2025-07-17 11:27:18] [Rank 0] step:7061/10000 train_time:1660735ms step_avg:235.20ms +[2025-07-17 11:27:23] [Rank 0] step:7081/10000 train_time:1665680ms step_avg:235.23ms +[2025-07-17 11:27:23] [Rank 0] step:7081/10000 train_time:1665680ms step_avg:235.23ms +[2025-07-17 11:27:28] [Rank 0] step:7101/10000 train_time:1670614ms step_avg:235.26ms +[2025-07-17 11:27:28] [Rank 0] step:7101/10000 train_time:1670614ms step_avg:235.26ms +[2025-07-17 11:27:33] [Rank 0] step:7121/10000 train_time:1675559ms step_avg:235.30ms +[2025-07-17 11:27:33] [Rank 0] step:7121/10000 train_time:1675559ms step_avg:235.30ms +[2025-07-17 11:27:39] [Rank 0] PRINT: step:7125/10000 val_loss:4.3495 train_time:1677042ms step_avg:235.37ms +[2025-07-17 11:27:39] [Rank 0] PRINT: step:7125/10000 val_loss:4.3495 train_time:1677042ms step_avg:235.37ms +[2025-07-17 11:27:43] [Rank 0] step:7141/10000 train_time:1680501ms step_avg:235.33ms +[2025-07-17 11:27:43] [Rank 0] step:7141/10000 train_time:1680501ms step_avg:235.33ms +[2025-07-17 11:27:48] [Rank 0] step:7161/10000 train_time:1685444ms step_avg:235.36ms +[2025-07-17 11:27:48] [Rank 0] step:7161/10000 train_time:1685444ms step_avg:235.36ms +[2025-07-17 11:27:52] [Rank 0] step:7181/10000 train_time:1690385ms step_avg:235.40ms +[2025-07-17 11:27:52] [Rank 0] step:7181/10000 train_time:1690385ms step_avg:235.40ms +[2025-07-17 11:27:57] [Rank 0] step:7201/10000 train_time:1695337ms step_avg:235.43ms +[2025-07-17 11:27:57] [Rank 0] step:7201/10000 train_time:1695337ms step_avg:235.43ms +[2025-07-17 11:28:02] [Rank 0] step:7221/10000 train_time:1700279ms step_avg:235.46ms +[2025-07-17 11:28:02] [Rank 0] step:7221/10000 train_time:1700279ms step_avg:235.46ms +[2025-07-17 11:28:07] [Rank 0] step:7241/10000 train_time:1705216ms step_avg:235.49ms +[2025-07-17 11:28:07] [Rank 0] step:7241/10000 train_time:1705216ms step_avg:235.49ms +[2025-07-17 11:28:14] [Rank 0] PRINT: step:7250/10000 val_loss:4.4118 train_time:1707938ms step_avg:235.58ms +[2025-07-17 11:28:14] [Rank 0] PRINT: step:7250/10000 val_loss:4.4118 train_time:1707938ms step_avg:235.58ms +[2025-07-17 11:28:17] [Rank 0] step:7261/10000 train_time:1710153ms step_avg:235.53ms +[2025-07-17 11:28:17] [Rank 0] step:7261/10000 train_time:1710153ms step_avg:235.53ms +[2025-07-17 11:28:22] [Rank 0] step:7281/10000 train_time:1715093ms step_avg:235.56ms +[2025-07-17 11:28:22] [Rank 0] step:7281/10000 train_time:1715093ms step_avg:235.56ms +[2025-07-17 11:28:27] [Rank 0] step:7301/10000 train_time:1720027ms step_avg:235.59ms +[2025-07-17 11:28:27] [Rank 0] step:7301/10000 train_time:1720027ms step_avg:235.59ms +[2025-07-17 11:28:32] [Rank 0] step:7321/10000 train_time:1724980ms step_avg:235.62ms +[2025-07-17 11:28:32] [Rank 0] step:7321/10000 train_time:1724980ms step_avg:235.62ms +[2025-07-17 11:28:37] [Rank 0] step:7341/10000 train_time:1729917ms step_avg:235.65ms +[2025-07-17 11:28:37] [Rank 0] step:7341/10000 train_time:1729917ms step_avg:235.65ms +[2025-07-17 11:28:42] [Rank 0] step:7361/10000 train_time:1734864ms step_avg:235.68ms +[2025-07-17 11:28:42] [Rank 0] step:7361/10000 train_time:1734864ms step_avg:235.68ms +[2025-07-17 11:28:50] [Rank 0] PRINT: step:7375/10000 val_loss:4.5126 train_time:1738820ms step_avg:235.77ms +[2025-07-17 11:28:50] [Rank 0] PRINT: step:7375/10000 val_loss:4.5126 train_time:1738820ms step_avg:235.77ms +[2025-07-17 11:28:51] [Rank 0] step:7381/10000 train_time:1739804ms step_avg:235.71ms +[2025-07-17 11:28:51] [Rank 0] step:7381/10000 train_time:1739804ms step_avg:235.71ms +[2025-07-17 11:28:56] [Rank 0] step:7401/10000 train_time:1744753ms step_avg:235.75ms +[2025-07-17 11:28:56] [Rank 0] step:7401/10000 train_time:1744753ms step_avg:235.75ms +[2025-07-17 11:29:01] [Rank 0] step:7421/10000 train_time:1749690ms step_avg:235.78ms +[2025-07-17 11:29:01] [Rank 0] step:7421/10000 train_time:1749690ms step_avg:235.78ms +[2025-07-17 11:29:06] [Rank 0] step:7441/10000 train_time:1754649ms step_avg:235.81ms +[2025-07-17 11:29:06] [Rank 0] step:7441/10000 train_time:1754649ms step_avg:235.81ms +[2025-07-17 11:29:11] [Rank 0] step:7461/10000 train_time:1759592ms step_avg:235.84ms +[2025-07-17 11:29:11] [Rank 0] step:7461/10000 train_time:1759592ms step_avg:235.84ms +[2025-07-17 11:29:16] [Rank 0] step:7481/10000 train_time:1764546ms step_avg:235.87ms +[2025-07-17 11:29:16] [Rank 0] step:7481/10000 train_time:1764546ms step_avg:235.87ms +[2025-07-17 11:29:25] [Rank 0] PRINT: step:7500/10000 val_loss:4.4675 train_time:1769754ms step_avg:235.97ms +[2025-07-17 11:29:25] [Rank 0] PRINT: step:7500/10000 val_loss:4.4675 train_time:1769754ms step_avg:235.97ms +[2025-07-17 11:29:25] [Rank 0] step:7501/10000 train_time:1769768ms step_avg:235.94ms +[2025-07-17 11:29:25] [Rank 0] step:7501/10000 train_time:1769768ms step_avg:235.94ms +[2025-07-17 11:29:30] [Rank 0] step:7521/10000 train_time:1774462ms step_avg:235.93ms +[2025-07-17 11:29:30] [Rank 0] step:7521/10000 train_time:1774462ms step_avg:235.93ms +[2025-07-17 11:29:35] [Rank 0] step:7541/10000 train_time:1779409ms step_avg:235.96ms +[2025-07-17 11:29:35] [Rank 0] step:7541/10000 train_time:1779409ms step_avg:235.96ms +[2025-07-17 11:29:40] [Rank 0] step:7561/10000 train_time:1784357ms step_avg:235.99ms +[2025-07-17 11:29:40] [Rank 0] step:7561/10000 train_time:1784357ms step_avg:235.99ms +[2025-07-17 11:29:45] [Rank 0] step:7581/10000 train_time:1789312ms step_avg:236.03ms +[2025-07-17 11:29:45] [Rank 0] step:7581/10000 train_time:1789312ms step_avg:236.03ms +[2025-07-17 11:29:50] [Rank 0] step:7601/10000 train_time:1794272ms step_avg:236.06ms +[2025-07-17 11:29:50] [Rank 0] step:7601/10000 train_time:1794272ms step_avg:236.06ms +[2025-07-17 11:29:55] [Rank 0] step:7621/10000 train_time:1799243ms step_avg:236.09ms +[2025-07-17 11:29:55] [Rank 0] step:7621/10000 train_time:1799243ms step_avg:236.09ms +[2025-07-17 11:30:01] [Rank 0] PRINT: step:7625/10000 val_loss:4.4286 train_time:1800731ms step_avg:236.16ms +[2025-07-17 11:30:01] [Rank 0] PRINT: step:7625/10000 val_loss:4.4286 train_time:1800731ms step_avg:236.16ms +[2025-07-17 11:30:05] [Rank 0] step:7641/10000 train_time:1804189ms step_avg:236.12ms +[2025-07-17 11:30:05] [Rank 0] step:7641/10000 train_time:1804189ms step_avg:236.12ms +[2025-07-17 11:30:10] [Rank 0] step:7661/10000 train_time:1809150ms step_avg:236.15ms +[2025-07-17 11:30:10] [Rank 0] step:7661/10000 train_time:1809150ms step_avg:236.15ms +[2025-07-17 11:30:15] [Rank 0] step:7681/10000 train_time:1814120ms step_avg:236.18ms +[2025-07-17 11:30:15] [Rank 0] step:7681/10000 train_time:1814120ms step_avg:236.18ms +[2025-07-17 11:30:20] [Rank 0] step:7701/10000 train_time:1819073ms step_avg:236.21ms +[2025-07-17 11:30:20] [Rank 0] step:7701/10000 train_time:1819073ms step_avg:236.21ms +[2025-07-17 11:30:25] [Rank 0] step:7721/10000 train_time:1824028ms step_avg:236.24ms +[2025-07-17 11:30:25] [Rank 0] step:7721/10000 train_time:1824028ms step_avg:236.24ms +[2025-07-17 11:30:30] [Rank 0] step:7741/10000 train_time:1828976ms step_avg:236.27ms +[2025-07-17 11:30:30] [Rank 0] step:7741/10000 train_time:1828976ms step_avg:236.27ms +[2025-07-17 11:30:37] [Rank 0] PRINT: step:7750/10000 val_loss:4.5082 train_time:1831718ms step_avg:236.35ms +[2025-07-17 11:30:37] [Rank 0] PRINT: step:7750/10000 val_loss:4.5082 train_time:1831718ms step_avg:236.35ms +[2025-07-17 11:30:39] [Rank 0] step:7761/10000 train_time:1833947ms step_avg:236.30ms +[2025-07-17 11:30:39] [Rank 0] step:7761/10000 train_time:1833947ms step_avg:236.30ms +[2025-07-17 11:30:44] [Rank 0] step:7781/10000 train_time:1838907ms step_avg:236.33ms +[2025-07-17 11:30:44] [Rank 0] step:7781/10000 train_time:1838907ms step_avg:236.33ms +[2025-07-17 11:30:49] [Rank 0] step:7801/10000 train_time:1843864ms step_avg:236.36ms +[2025-07-17 11:30:49] [Rank 0] step:7801/10000 train_time:1843864ms step_avg:236.36ms +[2025-07-17 11:30:54] [Rank 0] step:7821/10000 train_time:1848816ms step_avg:236.39ms +[2025-07-17 11:30:54] [Rank 0] step:7821/10000 train_time:1848816ms step_avg:236.39ms +[2025-07-17 11:30:59] [Rank 0] step:7841/10000 train_time:1853773ms step_avg:236.42ms +[2025-07-17 11:30:59] [Rank 0] step:7841/10000 train_time:1853773ms step_avg:236.42ms +[2025-07-17 11:31:04] [Rank 0] step:7861/10000 train_time:1858717ms step_avg:236.45ms +[2025-07-17 11:31:04] [Rank 0] step:7861/10000 train_time:1858717ms step_avg:236.45ms +[2025-07-17 11:31:12] [Rank 0] PRINT: step:7875/10000 val_loss:4.4305 train_time:1862675ms step_avg:236.53ms +[2025-07-17 11:31:12] [Rank 0] PRINT: step:7875/10000 val_loss:4.4305 train_time:1862675ms step_avg:236.53ms +[2025-07-17 11:31:14] [Rank 0] step:7881/10000 train_time:1863658ms step_avg:236.47ms +[2025-07-17 11:31:14] [Rank 0] step:7881/10000 train_time:1863658ms step_avg:236.47ms +[2025-07-17 11:31:19] [Rank 0] step:7901/10000 train_time:1868604ms step_avg:236.50ms +[2025-07-17 11:31:19] [Rank 0] step:7901/10000 train_time:1868604ms step_avg:236.50ms +[2025-07-17 11:31:24] [Rank 0] step:7921/10000 train_time:1873555ms step_avg:236.53ms +[2025-07-17 11:31:24] [Rank 0] step:7921/10000 train_time:1873555ms step_avg:236.53ms +[2025-07-17 11:31:29] [Rank 0] step:7941/10000 train_time:1878508ms step_avg:236.56ms +[2025-07-17 11:31:29] [Rank 0] step:7941/10000 train_time:1878508ms step_avg:236.56ms +[2025-07-17 11:31:34] [Rank 0] step:7961/10000 train_time:1883474ms step_avg:236.59ms +[2025-07-17 11:31:34] [Rank 0] step:7961/10000 train_time:1883474ms step_avg:236.59ms +[2025-07-17 11:31:39] [Rank 0] step:7981/10000 train_time:1888419ms step_avg:236.61ms +[2025-07-17 11:31:39] [Rank 0] step:7981/10000 train_time:1888419ms step_avg:236.61ms +[2025-07-17 11:31:48] [Rank 0] PRINT: step:8000/10000 val_loss:4.5000 train_time:1893629ms step_avg:236.70ms +[2025-07-17 11:31:48] [Rank 0] PRINT: step:8000/10000 val_loss:4.5000 train_time:1893629ms step_avg:236.70ms +[2025-07-17 11:31:48] [Rank 0] step:8001/10000 train_time:1893642ms step_avg:236.68ms +[2025-07-17 11:31:48] [Rank 0] step:8001/10000 train_time:1893642ms step_avg:236.68ms +[2025-07-17 11:31:53] [Rank 0] step:8021/10000 train_time:1898331ms step_avg:236.67ms +[2025-07-17 11:31:53] [Rank 0] step:8021/10000 train_time:1898331ms step_avg:236.67ms +[2025-07-17 11:31:58] [Rank 0] step:8041/10000 train_time:1903307ms step_avg:236.70ms +[2025-07-17 11:31:58] [Rank 0] step:8041/10000 train_time:1903307ms step_avg:236.70ms +[2025-07-17 11:32:03] [Rank 0] step:8061/10000 train_time:1908259ms step_avg:236.73ms +[2025-07-17 11:32:03] [Rank 0] step:8061/10000 train_time:1908259ms step_avg:236.73ms +[2025-07-17 11:32:08] [Rank 0] step:8081/10000 train_time:1913220ms step_avg:236.76ms +[2025-07-17 11:32:08] [Rank 0] step:8081/10000 train_time:1913220ms step_avg:236.76ms +[2025-07-17 11:32:13] [Rank 0] step:8101/10000 train_time:1918176ms step_avg:236.78ms +[2025-07-17 11:32:13] [Rank 0] step:8101/10000 train_time:1918176ms step_avg:236.78ms +[2025-07-17 11:32:18] [Rank 0] step:8121/10000 train_time:1923133ms step_avg:236.81ms +[2025-07-17 11:32:18] [Rank 0] step:8121/10000 train_time:1923133ms step_avg:236.81ms +[2025-07-17 11:32:24] [Rank 0] PRINT: step:8125/10000 val_loss:4.4887 train_time:1924623ms step_avg:236.88ms +[2025-07-17 11:32:24] [Rank 0] PRINT: step:8125/10000 val_loss:4.4887 train_time:1924623ms step_avg:236.88ms +[2025-07-17 11:32:28] [Rank 0] step:8141/10000 train_time:1928096ms step_avg:236.84ms +[2025-07-17 11:32:28] [Rank 0] step:8141/10000 train_time:1928096ms step_avg:236.84ms +[2025-07-17 11:32:33] [Rank 0] step:8161/10000 train_time:1933088ms step_avg:236.87ms +[2025-07-17 11:32:33] [Rank 0] step:8161/10000 train_time:1933088ms step_avg:236.87ms +[2025-07-17 11:32:38] [Rank 0] step:8181/10000 train_time:1938113ms step_avg:236.90ms +[2025-07-17 11:32:38] [Rank 0] step:8181/10000 train_time:1938113ms step_avg:236.90ms +[2025-07-17 11:32:43] [Rank 0] step:8201/10000 train_time:1943120ms step_avg:236.94ms +[2025-07-17 11:32:43] [Rank 0] step:8201/10000 train_time:1943120ms step_avg:236.94ms +[2025-07-17 11:32:48] [Rank 0] step:8221/10000 train_time:1948141ms step_avg:236.97ms +[2025-07-17 11:32:48] [Rank 0] step:8221/10000 train_time:1948141ms step_avg:236.97ms +[2025-07-17 11:32:53] [Rank 0] step:8241/10000 train_time:1953157ms step_avg:237.00ms +[2025-07-17 11:32:53] [Rank 0] step:8241/10000 train_time:1953157ms step_avg:237.00ms +[2025-07-17 11:33:00] [Rank 0] PRINT: step:8250/10000 val_loss:4.4526 train_time:1955925ms step_avg:237.08ms +[2025-07-17 11:33:00] [Rank 0] PRINT: step:8250/10000 val_loss:4.4526 train_time:1955925ms step_avg:237.08ms +[2025-07-17 11:33:03] [Rank 0] step:8261/10000 train_time:1958183ms step_avg:237.04ms +[2025-07-17 11:33:03] [Rank 0] step:8261/10000 train_time:1958183ms step_avg:237.04ms +[2025-07-17 11:33:08] [Rank 0] step:8281/10000 train_time:1963225ms step_avg:237.08ms +[2025-07-17 11:33:08] [Rank 0] step:8281/10000 train_time:1963225ms step_avg:237.08ms +[2025-07-17 11:33:13] [Rank 0] step:8301/10000 train_time:1968238ms step_avg:237.11ms +[2025-07-17 11:33:13] [Rank 0] step:8301/10000 train_time:1968238ms step_avg:237.11ms +[2025-07-17 11:33:18] [Rank 0] step:8321/10000 train_time:1973266ms step_avg:237.14ms +[2025-07-17 11:33:18] [Rank 0] step:8321/10000 train_time:1973266ms step_avg:237.14ms +[2025-07-17 11:33:23] [Rank 0] step:8341/10000 train_time:1978300ms step_avg:237.18ms +[2025-07-17 11:33:23] [Rank 0] step:8341/10000 train_time:1978300ms step_avg:237.18ms +[2025-07-17 11:33:28] [Rank 0] step:8361/10000 train_time:1983318ms step_avg:237.21ms +[2025-07-17 11:33:28] [Rank 0] step:8361/10000 train_time:1983318ms step_avg:237.21ms +[2025-07-17 11:33:36] [Rank 0] PRINT: step:8375/10000 val_loss:4.4428 train_time:1987337ms step_avg:237.29ms +[2025-07-17 11:33:36] [Rank 0] PRINT: step:8375/10000 val_loss:4.4428 train_time:1987337ms step_avg:237.29ms +[2025-07-17 11:33:37] [Rank 0] step:8381/10000 train_time:1988329ms step_avg:237.24ms +[2025-07-17 11:33:37] [Rank 0] step:8381/10000 train_time:1988329ms step_avg:237.24ms +[2025-07-17 11:33:42] [Rank 0] step:8401/10000 train_time:1993330ms step_avg:237.27ms +[2025-07-17 11:33:42] [Rank 0] step:8401/10000 train_time:1993330ms step_avg:237.27ms +[2025-07-17 11:33:47] [Rank 0] step:8421/10000 train_time:1998356ms step_avg:237.31ms +[2025-07-17 11:33:47] [Rank 0] step:8421/10000 train_time:1998356ms step_avg:237.31ms +[2025-07-17 11:33:53] [Rank 0] step:8441/10000 train_time:2003383ms step_avg:237.34ms +[2025-07-17 11:33:53] [Rank 0] step:8441/10000 train_time:2003383ms step_avg:237.34ms +[2025-07-17 11:33:58] [Rank 0] step:8461/10000 train_time:2008418ms step_avg:237.37ms +[2025-07-17 11:33:58] [Rank 0] step:8461/10000 train_time:2008418ms step_avg:237.37ms +[2025-07-17 11:34:03] [Rank 0] step:8481/10000 train_time:2013430ms step_avg:237.40ms +[2025-07-17 11:34:03] [Rank 0] step:8481/10000 train_time:2013430ms step_avg:237.40ms +[2025-07-17 11:34:12] [Rank 0] PRINT: step:8500/10000 val_loss:4.4981 train_time:2018716ms step_avg:237.50ms +[2025-07-17 11:34:12] [Rank 0] PRINT: step:8500/10000 val_loss:4.4981 train_time:2018716ms step_avg:237.50ms +[2025-07-17 11:34:12] [Rank 0] step:8501/10000 train_time:2018730ms step_avg:237.47ms +[2025-07-17 11:34:12] [Rank 0] step:8501/10000 train_time:2018730ms step_avg:237.47ms +[2025-07-17 11:34:17] [Rank 0] step:8521/10000 train_time:2023484ms step_avg:237.47ms +[2025-07-17 11:34:17] [Rank 0] step:8521/10000 train_time:2023484ms step_avg:237.47ms +[2025-07-17 11:34:22] [Rank 0] step:8541/10000 train_time:2028522ms step_avg:237.50ms +[2025-07-17 11:34:22] [Rank 0] step:8541/10000 train_time:2028522ms step_avg:237.50ms +[2025-07-17 11:34:27] [Rank 0] step:8561/10000 train_time:2033532ms step_avg:237.53ms +[2025-07-17 11:34:27] [Rank 0] step:8561/10000 train_time:2033532ms step_avg:237.53ms +[2025-07-17 11:34:32] [Rank 0] step:8581/10000 train_time:2038548ms step_avg:237.57ms +[2025-07-17 11:34:32] [Rank 0] step:8581/10000 train_time:2038548ms step_avg:237.57ms +[2025-07-17 11:34:37] [Rank 0] step:8601/10000 train_time:2043554ms step_avg:237.59ms +[2025-07-17 11:34:37] [Rank 0] step:8601/10000 train_time:2043554ms step_avg:237.59ms +[2025-07-17 11:34:42] [Rank 0] step:8621/10000 train_time:2048565ms step_avg:237.62ms +[2025-07-17 11:34:42] [Rank 0] step:8621/10000 train_time:2048565ms step_avg:237.62ms +[2025-07-17 11:34:48] [Rank 0] PRINT: step:8625/10000 val_loss:4.4695 train_time:2050070ms step_avg:237.69ms +[2025-07-17 11:34:48] [Rank 0] PRINT: step:8625/10000 val_loss:4.4695 train_time:2050070ms step_avg:237.69ms +[2025-07-17 11:34:52] [Rank 0] step:8641/10000 train_time:2053588ms step_avg:237.66ms +[2025-07-17 11:34:52] [Rank 0] step:8641/10000 train_time:2053588ms step_avg:237.66ms +[2025-07-17 11:34:57] [Rank 0] step:8661/10000 train_time:2058597ms step_avg:237.69ms +[2025-07-17 11:34:57] [Rank 0] step:8661/10000 train_time:2058597ms step_avg:237.69ms +[2025-07-17 11:35:02] [Rank 0] step:8681/10000 train_time:2063611ms step_avg:237.72ms +[2025-07-17 11:35:02] [Rank 0] step:8681/10000 train_time:2063611ms step_avg:237.72ms +[2025-07-17 11:35:07] [Rank 0] step:8701/10000 train_time:2068637ms step_avg:237.75ms +[2025-07-17 11:35:07] [Rank 0] step:8701/10000 train_time:2068637ms step_avg:237.75ms +[2025-07-17 11:35:12] [Rank 0] step:8721/10000 train_time:2073662ms step_avg:237.78ms +[2025-07-17 11:35:12] [Rank 0] step:8721/10000 train_time:2073662ms step_avg:237.78ms +[2025-07-17 11:35:17] [Rank 0] step:8741/10000 train_time:2078680ms step_avg:237.81ms +[2025-07-17 11:35:17] [Rank 0] step:8741/10000 train_time:2078680ms step_avg:237.81ms +[2025-07-17 11:35:24] [Rank 0] PRINT: step:8750/10000 val_loss:4.4416 train_time:2081437ms step_avg:237.88ms +[2025-07-17 11:35:24] [Rank 0] PRINT: step:8750/10000 val_loss:4.4416 train_time:2081437ms step_avg:237.88ms +[2025-07-17 11:35:26] [Rank 0] step:8761/10000 train_time:2083692ms step_avg:237.84ms +[2025-07-17 11:35:26] [Rank 0] step:8761/10000 train_time:2083692ms step_avg:237.84ms +[2025-07-17 11:35:31] [Rank 0] step:8781/10000 train_time:2088713ms step_avg:237.87ms +[2025-07-17 11:35:31] [Rank 0] step:8781/10000 train_time:2088713ms step_avg:237.87ms +[2025-07-17 11:35:36] [Rank 0] step:8801/10000 train_time:2093730ms step_avg:237.90ms +[2025-07-17 11:35:36] [Rank 0] step:8801/10000 train_time:2093730ms step_avg:237.90ms +[2025-07-17 11:35:41] [Rank 0] step:8821/10000 train_time:2098764ms step_avg:237.93ms +[2025-07-17 11:35:41] [Rank 0] step:8821/10000 train_time:2098764ms step_avg:237.93ms +[2025-07-17 11:35:46] [Rank 0] step:8841/10000 train_time:2103812ms step_avg:237.96ms +[2025-07-17 11:35:46] [Rank 0] step:8841/10000 train_time:2103812ms step_avg:237.96ms +[2025-07-17 11:35:52] [Rank 0] step:8861/10000 train_time:2108839ms step_avg:237.99ms +[2025-07-17 11:35:52] [Rank 0] step:8861/10000 train_time:2108839ms step_avg:237.99ms +[2025-07-17 11:36:00] [Rank 0] PRINT: step:8875/10000 val_loss:4.4105 train_time:2112857ms step_avg:238.07ms +[2025-07-17 11:36:00] [Rank 0] PRINT: step:8875/10000 val_loss:4.4105 train_time:2112857ms step_avg:238.07ms +[2025-07-17 11:36:01] [Rank 0] step:8881/10000 train_time:2113853ms step_avg:238.02ms +[2025-07-17 11:36:01] [Rank 0] step:8881/10000 train_time:2113853ms step_avg:238.02ms +[2025-07-17 11:36:06] [Rank 0] step:8901/10000 train_time:2118868ms step_avg:238.05ms +[2025-07-17 11:36:06] [Rank 0] step:8901/10000 train_time:2118868ms step_avg:238.05ms +[2025-07-17 11:36:11] [Rank 0] step:8921/10000 train_time:2123882ms step_avg:238.08ms +[2025-07-17 11:36:11] [Rank 0] step:8921/10000 train_time:2123882ms step_avg:238.08ms +[2025-07-17 11:36:16] [Rank 0] step:8941/10000 train_time:2128901ms step_avg:238.11ms +[2025-07-17 11:36:16] [Rank 0] step:8941/10000 train_time:2128901ms step_avg:238.11ms +[2025-07-17 11:36:21] [Rank 0] step:8961/10000 train_time:2133932ms step_avg:238.14ms +[2025-07-17 11:36:21] [Rank 0] step:8961/10000 train_time:2133932ms step_avg:238.14ms +[2025-07-17 11:36:26] [Rank 0] step:8981/10000 train_time:2138959ms step_avg:238.16ms +[2025-07-17 11:36:26] [Rank 0] step:8981/10000 train_time:2138959ms step_avg:238.16ms +[2025-07-17 11:36:36] [Rank 0] PRINT: step:9000/10000 val_loss:4.3994 train_time:2144234ms step_avg:238.25ms +[2025-07-17 11:36:36] [Rank 0] PRINT: step:9000/10000 val_loss:4.3994 train_time:2144234ms step_avg:238.25ms +[2025-07-17 11:36:36] [Rank 0] step:9001/10000 train_time:2144248ms step_avg:238.22ms +[2025-07-17 11:36:36] [Rank 0] step:9001/10000 train_time:2144248ms step_avg:238.22ms +[2025-07-17 11:36:41] [Rank 0] step:9021/10000 train_time:2148991ms step_avg:238.22ms +[2025-07-17 11:36:41] [Rank 0] step:9021/10000 train_time:2148991ms step_avg:238.22ms +[2025-07-17 11:36:46] [Rank 0] step:9041/10000 train_time:2154032ms step_avg:238.25ms +[2025-07-17 11:36:46] [Rank 0] step:9041/10000 train_time:2154032ms step_avg:238.25ms +[2025-07-17 11:36:51] [Rank 0] step:9061/10000 train_time:2159044ms step_avg:238.28ms +[2025-07-17 11:36:51] [Rank 0] step:9061/10000 train_time:2159044ms step_avg:238.28ms +[2025-07-17 11:36:56] [Rank 0] step:9081/10000 train_time:2164084ms step_avg:238.31ms +[2025-07-17 11:36:56] [Rank 0] step:9081/10000 train_time:2164084ms step_avg:238.31ms +[2025-07-17 11:37:01] [Rank 0] step:9101/10000 train_time:2169122ms step_avg:238.34ms +[2025-07-17 11:37:01] [Rank 0] step:9101/10000 train_time:2169122ms step_avg:238.34ms +[2025-07-17 11:37:06] [Rank 0] step:9121/10000 train_time:2174150ms step_avg:238.37ms +[2025-07-17 11:37:06] [Rank 0] step:9121/10000 train_time:2174150ms step_avg:238.37ms +[2025-07-17 11:37:12] [Rank 0] PRINT: step:9125/10000 val_loss:4.4315 train_time:2175655ms step_avg:238.43ms +[2025-07-17 11:37:12] [Rank 0] PRINT: step:9125/10000 val_loss:4.4315 train_time:2175655ms step_avg:238.43ms +[2025-07-17 11:37:16] [Rank 0] step:9141/10000 train_time:2179155ms step_avg:238.39ms +[2025-07-17 11:37:16] [Rank 0] step:9141/10000 train_time:2179155ms step_avg:238.39ms +[2025-07-17 11:37:21] [Rank 0] step:9161/10000 train_time:2184209ms step_avg:238.42ms +[2025-07-17 11:37:21] [Rank 0] step:9161/10000 train_time:2184209ms step_avg:238.42ms +[2025-07-17 11:37:26] [Rank 0] step:9181/10000 train_time:2189235ms step_avg:238.45ms +[2025-07-17 11:37:26] [Rank 0] step:9181/10000 train_time:2189235ms step_avg:238.45ms +[2025-07-17 11:37:31] [Rank 0] step:9201/10000 train_time:2194257ms step_avg:238.48ms +[2025-07-17 11:37:31] [Rank 0] step:9201/10000 train_time:2194257ms step_avg:238.48ms +[2025-07-17 11:37:36] [Rank 0] step:9221/10000 train_time:2199308ms step_avg:238.51ms +[2025-07-17 11:37:36] [Rank 0] step:9221/10000 train_time:2199308ms step_avg:238.51ms +[2025-07-17 11:37:41] [Rank 0] step:9241/10000 train_time:2204344ms step_avg:238.54ms +[2025-07-17 11:37:41] [Rank 0] step:9241/10000 train_time:2204344ms step_avg:238.54ms +[2025-07-17 11:37:48] [Rank 0] PRINT: step:9250/10000 val_loss:4.5027 train_time:2207112ms step_avg:238.61ms +[2025-07-17 11:37:48] [Rank 0] PRINT: step:9250/10000 val_loss:4.5027 train_time:2207112ms step_avg:238.61ms +[2025-07-17 11:37:51] [Rank 0] step:9261/10000 train_time:2209377ms step_avg:238.57ms +[2025-07-17 11:37:51] [Rank 0] step:9261/10000 train_time:2209377ms step_avg:238.57ms +[2025-07-17 11:37:56] [Rank 0] step:9281/10000 train_time:2214379ms step_avg:238.59ms +[2025-07-17 11:37:56] [Rank 0] step:9281/10000 train_time:2214379ms step_avg:238.59ms +[2025-07-17 11:38:01] [Rank 0] step:9301/10000 train_time:2219408ms step_avg:238.62ms +[2025-07-17 11:38:01] [Rank 0] step:9301/10000 train_time:2219408ms step_avg:238.62ms +[2025-07-17 11:38:06] [Rank 0] step:9321/10000 train_time:2224450ms step_avg:238.65ms +[2025-07-17 11:38:06] [Rank 0] step:9321/10000 train_time:2224450ms step_avg:238.65ms +[2025-07-17 11:38:11] [Rank 0] step:9341/10000 train_time:2229476ms step_avg:238.68ms +[2025-07-17 11:38:11] [Rank 0] step:9341/10000 train_time:2229476ms step_avg:238.68ms +[2025-07-17 11:38:16] [Rank 0] step:9361/10000 train_time:2234502ms step_avg:238.70ms +[2025-07-17 11:38:16] [Rank 0] step:9361/10000 train_time:2234502ms step_avg:238.70ms +[2025-07-17 11:38:24] [Rank 0] PRINT: step:9375/10000 val_loss:4.4958 train_time:2238524ms step_avg:238.78ms +[2025-07-17 11:38:24] [Rank 0] PRINT: step:9375/10000 val_loss:4.4958 train_time:2238524ms step_avg:238.78ms +[2025-07-17 11:38:26] [Rank 0] step:9381/10000 train_time:2239521ms step_avg:238.73ms +[2025-07-17 11:38:26] [Rank 0] step:9381/10000 train_time:2239521ms step_avg:238.73ms +[2025-07-17 11:38:31] [Rank 0] step:9401/10000 train_time:2244523ms step_avg:238.75ms +[2025-07-17 11:38:31] [Rank 0] step:9401/10000 train_time:2244523ms step_avg:238.75ms +[2025-07-17 11:38:36] [Rank 0] step:9421/10000 train_time:2249546ms step_avg:238.78ms +[2025-07-17 11:38:36] [Rank 0] step:9421/10000 train_time:2249546ms step_avg:238.78ms +[2025-07-17 11:38:41] [Rank 0] step:9441/10000 train_time:2254570ms step_avg:238.81ms +[2025-07-17 11:38:41] [Rank 0] step:9441/10000 train_time:2254570ms step_avg:238.81ms +[2025-07-17 11:38:46] [Rank 0] step:9461/10000 train_time:2259600ms step_avg:238.83ms +[2025-07-17 11:38:46] [Rank 0] step:9461/10000 train_time:2259600ms step_avg:238.83ms +[2025-07-17 11:38:51] [Rank 0] step:9481/10000 train_time:2264634ms step_avg:238.86ms +[2025-07-17 11:38:51] [Rank 0] step:9481/10000 train_time:2264634ms step_avg:238.86ms +[2025-07-17 11:39:00] [Rank 0] PRINT: step:9500/10000 val_loss:4.4932 train_time:2269938ms step_avg:238.94ms +[2025-07-17 11:39:00] [Rank 0] PRINT: step:9500/10000 val_loss:4.4932 train_time:2269938ms step_avg:238.94ms +[2025-07-17 11:39:00] [Rank 0] step:9501/10000 train_time:2269951ms step_avg:238.92ms +[2025-07-17 11:39:00] [Rank 0] step:9501/10000 train_time:2269951ms step_avg:238.92ms +[2025-07-17 11:39:05] [Rank 0] step:9521/10000 train_time:2274704ms step_avg:238.91ms +[2025-07-17 11:39:05] [Rank 0] step:9521/10000 train_time:2274704ms step_avg:238.91ms +[2025-07-17 11:39:10] [Rank 0] step:9541/10000 train_time:2279755ms step_avg:238.94ms +[2025-07-17 11:39:10] [Rank 0] step:9541/10000 train_time:2279755ms step_avg:238.94ms +[2025-07-17 11:39:15] [Rank 0] step:9561/10000 train_time:2284769ms step_avg:238.97ms +[2025-07-17 11:39:15] [Rank 0] step:9561/10000 train_time:2284769ms step_avg:238.97ms +[2025-07-17 11:39:20] [Rank 0] step:9581/10000 train_time:2289786ms step_avg:238.99ms +[2025-07-17 11:39:20] [Rank 0] step:9581/10000 train_time:2289786ms step_avg:238.99ms +[2025-07-17 11:39:25] [Rank 0] step:9601/10000 train_time:2294807ms step_avg:239.02ms +[2025-07-17 11:39:25] [Rank 0] step:9601/10000 train_time:2294807ms step_avg:239.02ms +[2025-07-17 11:39:30] [Rank 0] step:9621/10000 train_time:2299857ms step_avg:239.05ms +[2025-07-17 11:39:30] [Rank 0] step:9621/10000 train_time:2299857ms step_avg:239.05ms +[2025-07-17 11:39:36] [Rank 0] PRINT: step:9625/10000 val_loss:4.4946 train_time:2301358ms step_avg:239.10ms +[2025-07-17 11:39:36] [Rank 0] PRINT: step:9625/10000 val_loss:4.4946 train_time:2301358ms step_avg:239.10ms +[2025-07-17 11:39:40] [Rank 0] step:9641/10000 train_time:2304890ms step_avg:239.07ms +[2025-07-17 11:39:40] [Rank 0] step:9641/10000 train_time:2304890ms step_avg:239.07ms +[2025-07-17 11:39:45] [Rank 0] step:9661/10000 train_time:2309976ms step_avg:239.10ms +[2025-07-17 11:39:45] [Rank 0] step:9661/10000 train_time:2309976ms step_avg:239.10ms +[2025-07-17 11:39:50] [Rank 0] step:9681/10000 train_time:2315047ms step_avg:239.13ms +[2025-07-17 11:39:50] [Rank 0] step:9681/10000 train_time:2315047ms step_avg:239.13ms +[2025-07-17 11:39:55] [Rank 0] step:9701/10000 train_time:2320131ms step_avg:239.16ms +[2025-07-17 11:39:55] [Rank 0] step:9701/10000 train_time:2320131ms step_avg:239.16ms +[2025-07-17 11:40:00] [Rank 0] step:9721/10000 train_time:2325190ms step_avg:239.19ms +[2025-07-17 11:40:00] [Rank 0] step:9721/10000 train_time:2325190ms step_avg:239.19ms +[2025-07-17 11:40:06] [Rank 0] step:9741/10000 train_time:2330276ms step_avg:239.22ms +[2025-07-17 11:40:06] [Rank 0] step:9741/10000 train_time:2330276ms step_avg:239.22ms +[2025-07-17 11:40:13] [Rank 0] PRINT: step:9750/10000 val_loss:4.5080 train_time:2333061ms step_avg:239.29ms +[2025-07-17 11:40:13] [Rank 0] PRINT: step:9750/10000 val_loss:4.5080 train_time:2333061ms step_avg:239.29ms +[2025-07-17 11:40:15] [Rank 0] step:9761/10000 train_time:2335341ms step_avg:239.25ms +[2025-07-17 11:40:15] [Rank 0] step:9761/10000 train_time:2335341ms step_avg:239.25ms +[2025-07-17 11:40:20] [Rank 0] step:9781/10000 train_time:2340401ms step_avg:239.28ms +[2025-07-17 11:40:20] [Rank 0] step:9781/10000 train_time:2340401ms step_avg:239.28ms +[2025-07-17 11:40:26] [Rank 0] step:9801/10000 train_time:2345455ms step_avg:239.31ms +[2025-07-17 11:40:26] [Rank 0] step:9801/10000 train_time:2345455ms step_avg:239.31ms +[2025-07-17 11:40:31] [Rank 0] step:9821/10000 train_time:2350515ms step_avg:239.34ms +[2025-07-17 11:40:31] [Rank 0] step:9821/10000 train_time:2350515ms step_avg:239.34ms +[2025-07-17 11:40:36] [Rank 0] step:9841/10000 train_time:2355571ms step_avg:239.36ms +[2025-07-17 11:40:36] [Rank 0] step:9841/10000 train_time:2355571ms step_avg:239.36ms +[2025-07-17 11:40:41] [Rank 0] step:9861/10000 train_time:2360634ms step_avg:239.39ms +[2025-07-17 11:40:41] [Rank 0] step:9861/10000 train_time:2360634ms step_avg:239.39ms +[2025-07-17 11:40:49] [Rank 0] PRINT: step:9875/10000 val_loss:4.3993 train_time:2364682ms step_avg:239.46ms +[2025-07-17 11:40:49] [Rank 0] PRINT: step:9875/10000 val_loss:4.3993 train_time:2364682ms step_avg:239.46ms +[2025-07-17 11:40:51] [Rank 0] step:9881/10000 train_time:2365689ms step_avg:239.42ms +[2025-07-17 11:40:51] [Rank 0] step:9881/10000 train_time:2365689ms step_avg:239.42ms +[2025-07-17 11:40:56] [Rank 0] step:9901/10000 train_time:2370756ms step_avg:239.45ms +[2025-07-17 11:40:56] [Rank 0] step:9901/10000 train_time:2370756ms step_avg:239.45ms +[2025-07-17 11:41:01] [Rank 0] step:9921/10000 train_time:2375834ms step_avg:239.48ms +[2025-07-17 11:41:01] [Rank 0] step:9921/10000 train_time:2375834ms step_avg:239.48ms +[2025-07-17 11:41:06] [Rank 0] step:9941/10000 train_time:2380932ms step_avg:239.51ms +[2025-07-17 11:41:06] [Rank 0] step:9941/10000 train_time:2380932ms step_avg:239.51ms +[2025-07-17 11:41:11] [Rank 0] step:9961/10000 train_time:2386022ms step_avg:239.54ms +[2025-07-17 11:41:11] [Rank 0] step:9961/10000 train_time:2386022ms step_avg:239.54ms +[2025-07-17 11:41:16] [Rank 0] step:9981/10000 train_time:2391120ms step_avg:239.57ms +[2025-07-17 11:41:16] [Rank 0] step:9981/10000 train_time:2391120ms step_avg:239.57ms +[2025-07-17 11:41:21] [Rank 0] step:10000/10000 train_time:2395918ms step_avg:239.59ms +[2025-07-17 11:41:21] [Rank 0] step:10000/10000 train_time:2395918ms step_avg:239.59ms +[2025-07-17 11:41:25] [Rank 0] PRINT: step:10000/10000 val_loss:4.4710 train_time:2396428ms step_avg:239.64ms +[2025-07-17 11:41:25] [Rank 0] PRINT: step:10000/10000 val_loss:4.4710 train_time:2396428ms step_avg:239.64ms +[2025-07-17 11:41:25] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 11:41:25 2025 --- +[2025-07-17 11:41:25] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 11:41:25 2025 --- +[2025-07-17 11:41:25] [Rank 0] PRINT: Peak memory allocated: 31117 MiB reserved: 31436 MiB +[2025-07-17 11:41:25] [Rank 0] PRINT: Peak memory allocated: 31117 MiB reserved: 31436 MiB diff --git a/logs_norope/diff_modes/mode_1_param_norope_seed_43/config.json b/logs_norope/diff_modes/mode_1_param_norope_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..373b4b22c49f1e1bf37d4cd3f3424baedf8f36d4 --- /dev/null +++ b/logs_norope/diff_modes/mode_1_param_norope_seed_43/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 1, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "35b1495c-b76c-4fe4-8bc9-db488bab6bf6", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_1_param_norope_seed_43/training_log_35b1495c-b76c-4fe4-8bc9-db488bab6bf6.txt b/logs_norope/diff_modes/mode_1_param_norope_seed_43/training_log_35b1495c-b76c-4fe4-8bc9-db488bab6bf6.txt new file mode 100644 index 0000000000000000000000000000000000000000..bdba2da6c7f11c68f0d4f48de5c5d46d2a172cfa --- /dev/null +++ b/logs_norope/diff_modes/mode_1_param_norope_seed_43/training_log_35b1495c-b76c-4fe4-8bc9-db488bab6bf6.txt @@ -0,0 +1,2360 @@ +[2025-07-17 18:06:21] [Rank 0] PRINT: --- Script Start: Thu Jul 17 18:06:21 2025 --- +[2025-07-17 18:06:21] [Rank 0] PRINT: --- Script Start: Thu Jul 17 18:06:21 2025 --- +[2025-07-17 18:06:21] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=1, model_parameterization='norope') +[2025-07-17 18:06:21] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=1, model_parameterization='norope') +[2025-07-17 18:06:21] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 18:06:21] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 18:06:21] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 18:06:21] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 18:06:21] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_1_param_norope_seed_43 +[2025-07-17 18:06:21] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_1_param_norope_seed_43 +[2025-07-17 18:06:21] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 18:06:21] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 18:06:21] [Rank 0] PRINT: Constructing model... +[2025-07-17 18:06:21] [Rank 0] PRINT: Constructing model... +[2025-07-17 18:06:24] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 18:06:24] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 18:06:24] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 18:06:24] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 18:06:24] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 18:06:24] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 18:06:24] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 18:06:24] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 18:06:24] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 1 +[2025-07-17 18:06:24] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 1 +[2025-07-17 18:06:24] [Rank 0] PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: 0.001). +[2025-07-17 18:06:24] [Rank 0] PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: 0.001). +[2025-07-17 18:06:24] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 18:06:24] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 18:06:24] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-07-17 18:06:24] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-07-17 18:06:24] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 18:06:24] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 18:06:24] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 18:06:24] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 18:06:24] [Rank 0] PRINT: Starting warmup... +[2025-07-17 18:06:24] [Rank 0] PRINT: Starting warmup... +[2025-07-17 18:07:29] [Rank 0] PRINT: Warmup complete. +[2025-07-17 18:07:29] [Rank 0] PRINT: Warmup complete. +[2025-07-17 18:07:30] [Rank 0] PRINT: Starting training... +[2025-07-17 18:07:30] [Rank 0] PRINT: Starting training... +[2025-07-17 18:07:42] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 18:07:42] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 18:07:47] [Rank 0] step:21/10000 train_time:4296ms step_avg:204.55ms +[2025-07-17 18:07:47] [Rank 0] step:21/10000 train_time:4296ms step_avg:204.55ms +[2025-07-17 18:07:51] [Rank 0] step:41/10000 train_time:8734ms step_avg:213.01ms +[2025-07-17 18:07:51] [Rank 0] step:41/10000 train_time:8734ms step_avg:213.01ms +[2025-07-17 18:07:56] [Rank 0] step:61/10000 train_time:13177ms step_avg:216.02ms +[2025-07-17 18:07:56] [Rank 0] step:61/10000 train_time:13177ms step_avg:216.02ms +[2025-07-17 18:08:00] [Rank 0] step:81/10000 train_time:17627ms step_avg:217.61ms +[2025-07-17 18:08:00] [Rank 0] step:81/10000 train_time:17627ms step_avg:217.61ms +[2025-07-17 18:08:05] [Rank 0] step:101/10000 train_time:22081ms step_avg:218.63ms +[2025-07-17 18:08:05] [Rank 0] step:101/10000 train_time:22081ms step_avg:218.63ms +[2025-07-17 18:08:09] [Rank 0] step:121/10000 train_time:26535ms step_avg:219.30ms +[2025-07-17 18:08:09] [Rank 0] step:121/10000 train_time:26535ms step_avg:219.30ms +[2025-07-17 18:08:15] [Rank 0] PRINT: step:125/10000 val_loss:5.5049 train_time:27877ms step_avg:223.02ms +[2025-07-17 18:08:15] [Rank 0] PRINT: step:125/10000 val_loss:5.5049 train_time:27877ms step_avg:223.02ms +[2025-07-17 18:08:18] [Rank 0] step:141/10000 train_time:30999ms step_avg:219.85ms +[2025-07-17 18:08:18] [Rank 0] step:141/10000 train_time:30999ms step_avg:219.85ms +[2025-07-17 18:08:23] [Rank 0] step:161/10000 train_time:35451ms step_avg:220.19ms +[2025-07-17 18:08:23] [Rank 0] step:161/10000 train_time:35451ms step_avg:220.19ms +[2025-07-17 18:08:27] [Rank 0] step:181/10000 train_time:39905ms step_avg:220.47ms +[2025-07-17 18:08:27] [Rank 0] step:181/10000 train_time:39905ms step_avg:220.47ms +[2025-07-17 18:08:32] [Rank 0] step:201/10000 train_time:44369ms step_avg:220.74ms +[2025-07-17 18:08:32] [Rank 0] step:201/10000 train_time:44369ms step_avg:220.74ms +[2025-07-17 18:08:36] [Rank 0] step:221/10000 train_time:48836ms step_avg:220.98ms +[2025-07-17 18:08:36] [Rank 0] step:221/10000 train_time:48836ms step_avg:220.98ms +[2025-07-17 18:08:41] [Rank 0] step:241/10000 train_time:53299ms step_avg:221.16ms +[2025-07-17 18:08:41] [Rank 0] step:241/10000 train_time:53299ms step_avg:221.16ms +[2025-07-17 18:08:47] [Rank 0] PRINT: step:250/10000 val_loss:5.0586 train_time:55760ms step_avg:223.04ms +[2025-07-17 18:08:47] [Rank 0] PRINT: step:250/10000 val_loss:5.0586 train_time:55760ms step_avg:223.04ms +[2025-07-17 18:08:49] [Rank 0] step:261/10000 train_time:57763ms step_avg:221.31ms +[2025-07-17 18:08:49] [Rank 0] step:261/10000 train_time:57763ms step_avg:221.31ms +[2025-07-17 18:08:54] [Rank 0] step:281/10000 train_time:62218ms step_avg:221.42ms +[2025-07-17 18:08:54] [Rank 0] step:281/10000 train_time:62218ms step_avg:221.42ms +[2025-07-17 18:08:58] [Rank 0] step:301/10000 train_time:66674ms step_avg:221.51ms +[2025-07-17 18:08:58] [Rank 0] step:301/10000 train_time:66674ms step_avg:221.51ms +[2025-07-17 18:09:03] [Rank 0] step:321/10000 train_time:71143ms step_avg:221.63ms +[2025-07-17 18:09:03] [Rank 0] step:321/10000 train_time:71143ms step_avg:221.63ms +[2025-07-17 18:09:07] [Rank 0] step:341/10000 train_time:75606ms step_avg:221.72ms +[2025-07-17 18:09:07] [Rank 0] step:341/10000 train_time:75606ms step_avg:221.72ms +[2025-07-17 18:09:12] [Rank 0] step:361/10000 train_time:80063ms step_avg:221.78ms +[2025-07-17 18:09:12] [Rank 0] step:361/10000 train_time:80063ms step_avg:221.78ms +[2025-07-17 18:09:19] [Rank 0] PRINT: step:375/10000 val_loss:4.7600 train_time:83635ms step_avg:223.03ms +[2025-07-17 18:09:19] [Rank 0] PRINT: step:375/10000 val_loss:4.7600 train_time:83635ms step_avg:223.03ms +[2025-07-17 18:09:21] [Rank 0] step:381/10000 train_time:84527ms step_avg:221.85ms +[2025-07-17 18:09:21] [Rank 0] step:381/10000 train_time:84527ms step_avg:221.85ms +[2025-07-17 18:09:25] [Rank 0] step:401/10000 train_time:88979ms step_avg:221.89ms +[2025-07-17 18:09:25] [Rank 0] step:401/10000 train_time:88979ms step_avg:221.89ms +[2025-07-17 18:09:30] [Rank 0] step:421/10000 train_time:93435ms step_avg:221.93ms +[2025-07-17 18:09:30] [Rank 0] step:421/10000 train_time:93435ms step_avg:221.93ms +[2025-07-17 18:09:34] [Rank 0] step:441/10000 train_time:97897ms step_avg:221.99ms +[2025-07-17 18:09:34] [Rank 0] step:441/10000 train_time:97897ms step_avg:221.99ms +[2025-07-17 18:09:38] [Rank 0] step:461/10000 train_time:102364ms step_avg:222.05ms +[2025-07-17 18:09:38] [Rank 0] step:461/10000 train_time:102364ms step_avg:222.05ms +[2025-07-17 18:09:43] [Rank 0] step:481/10000 train_time:106826ms step_avg:222.09ms +[2025-07-17 18:09:43] [Rank 0] step:481/10000 train_time:106826ms step_avg:222.09ms +[2025-07-17 18:09:51] [Rank 0] PRINT: step:500/10000 val_loss:4.6925 train_time:111511ms step_avg:223.02ms +[2025-07-17 18:09:51] [Rank 0] PRINT: step:500/10000 val_loss:4.6925 train_time:111511ms step_avg:223.02ms +[2025-07-17 18:09:51] [Rank 0] step:501/10000 train_time:111523ms step_avg:222.60ms +[2025-07-17 18:09:51] [Rank 0] step:501/10000 train_time:111523ms step_avg:222.60ms +[2025-07-17 18:09:56] [Rank 0] step:521/10000 train_time:115756ms step_avg:222.18ms +[2025-07-17 18:09:56] [Rank 0] step:521/10000 train_time:115756ms step_avg:222.18ms +[2025-07-17 18:10:00] [Rank 0] step:541/10000 train_time:120219ms step_avg:222.22ms +[2025-07-17 18:10:00] [Rank 0] step:541/10000 train_time:120219ms step_avg:222.22ms +[2025-07-17 18:10:05] [Rank 0] step:561/10000 train_time:124686ms step_avg:222.26ms +[2025-07-17 18:10:05] [Rank 0] step:561/10000 train_time:124686ms step_avg:222.26ms +[2025-07-17 18:10:09] [Rank 0] step:581/10000 train_time:129154ms step_avg:222.30ms +[2025-07-17 18:10:09] [Rank 0] step:581/10000 train_time:129154ms step_avg:222.30ms +[2025-07-17 18:10:14] [Rank 0] step:601/10000 train_time:133627ms step_avg:222.34ms +[2025-07-17 18:10:14] [Rank 0] step:601/10000 train_time:133627ms step_avg:222.34ms +[2025-07-17 18:10:18] [Rank 0] step:621/10000 train_time:138096ms step_avg:222.38ms +[2025-07-17 18:10:18] [Rank 0] step:621/10000 train_time:138096ms step_avg:222.38ms +[2025-07-17 18:10:24] [Rank 0] PRINT: step:625/10000 val_loss:4.6621 train_time:139444ms step_avg:223.11ms +[2025-07-17 18:10:24] [Rank 0] PRINT: step:625/10000 val_loss:4.6621 train_time:139444ms step_avg:223.11ms +[2025-07-17 18:10:27] [Rank 0] step:641/10000 train_time:142567ms step_avg:222.41ms +[2025-07-17 18:10:27] [Rank 0] step:641/10000 train_time:142567ms step_avg:222.41ms +[2025-07-17 18:10:32] [Rank 0] step:661/10000 train_time:147040ms step_avg:222.45ms +[2025-07-17 18:10:32] [Rank 0] step:661/10000 train_time:147040ms step_avg:222.45ms +[2025-07-17 18:10:36] [Rank 0] step:681/10000 train_time:151511ms step_avg:222.48ms +[2025-07-17 18:10:36] [Rank 0] step:681/10000 train_time:151511ms step_avg:222.48ms +[2025-07-17 18:10:41] [Rank 0] step:701/10000 train_time:155983ms step_avg:222.51ms +[2025-07-17 18:10:41] [Rank 0] step:701/10000 train_time:155983ms step_avg:222.51ms +[2025-07-17 18:10:45] [Rank 0] step:721/10000 train_time:160490ms step_avg:222.59ms +[2025-07-17 18:10:45] [Rank 0] step:721/10000 train_time:160490ms step_avg:222.59ms +[2025-07-17 18:10:50] [Rank 0] step:741/10000 train_time:164963ms step_avg:222.62ms +[2025-07-17 18:10:50] [Rank 0] step:741/10000 train_time:164963ms step_avg:222.62ms +[2025-07-17 18:10:56] [Rank 0] PRINT: step:750/10000 val_loss:4.8570 train_time:167444ms step_avg:223.26ms +[2025-07-17 18:10:56] [Rank 0] PRINT: step:750/10000 val_loss:4.8570 train_time:167444ms step_avg:223.26ms +[2025-07-17 18:10:58] [Rank 0] step:761/10000 train_time:169470ms step_avg:222.69ms +[2025-07-17 18:10:58] [Rank 0] step:761/10000 train_time:169470ms step_avg:222.69ms +[2025-07-17 18:11:03] [Rank 0] step:781/10000 train_time:173979ms step_avg:222.76ms +[2025-07-17 18:11:03] [Rank 0] step:781/10000 train_time:173979ms step_avg:222.76ms +[2025-07-17 18:11:08] [Rank 0] step:801/10000 train_time:178488ms step_avg:222.83ms +[2025-07-17 18:11:08] [Rank 0] step:801/10000 train_time:178488ms step_avg:222.83ms +[2025-07-17 18:11:12] [Rank 0] step:821/10000 train_time:182996ms step_avg:222.89ms +[2025-07-17 18:11:12] [Rank 0] step:821/10000 train_time:182996ms step_avg:222.89ms +[2025-07-17 18:11:17] [Rank 0] step:841/10000 train_time:187504ms step_avg:222.95ms +[2025-07-17 18:11:17] [Rank 0] step:841/10000 train_time:187504ms step_avg:222.95ms +[2025-07-17 18:11:21] [Rank 0] step:861/10000 train_time:192011ms step_avg:223.01ms +[2025-07-17 18:11:21] [Rank 0] step:861/10000 train_time:192011ms step_avg:223.01ms +[2025-07-17 18:11:29] [Rank 0] PRINT: step:875/10000 val_loss:4.7377 train_time:195619ms step_avg:223.57ms +[2025-07-17 18:11:29] [Rank 0] PRINT: step:875/10000 val_loss:4.7377 train_time:195619ms step_avg:223.57ms +[2025-07-17 18:11:30] [Rank 0] step:881/10000 train_time:196517ms step_avg:223.06ms +[2025-07-17 18:11:30] [Rank 0] step:881/10000 train_time:196517ms step_avg:223.06ms +[2025-07-17 18:11:35] [Rank 0] step:901/10000 train_time:201026ms step_avg:223.11ms +[2025-07-17 18:11:35] [Rank 0] step:901/10000 train_time:201026ms step_avg:223.11ms +[2025-07-17 18:11:39] [Rank 0] step:921/10000 train_time:205538ms step_avg:223.17ms +[2025-07-17 18:11:39] [Rank 0] step:921/10000 train_time:205538ms step_avg:223.17ms +[2025-07-17 18:11:44] [Rank 0] step:941/10000 train_time:210048ms step_avg:223.22ms +[2025-07-17 18:11:44] [Rank 0] step:941/10000 train_time:210048ms step_avg:223.22ms +[2025-07-17 18:11:48] [Rank 0] step:961/10000 train_time:214559ms step_avg:223.27ms +[2025-07-17 18:11:48] [Rank 0] step:961/10000 train_time:214559ms step_avg:223.27ms +[2025-07-17 18:11:53] [Rank 0] step:981/10000 train_time:219072ms step_avg:223.31ms +[2025-07-17 18:11:53] [Rank 0] step:981/10000 train_time:219072ms step_avg:223.31ms +[2025-07-17 18:12:01] [Rank 0] PRINT: step:1000/10000 val_loss:4.7567 train_time:223809ms step_avg:223.81ms +[2025-07-17 18:12:01] [Rank 0] PRINT: step:1000/10000 val_loss:4.7567 train_time:223809ms step_avg:223.81ms +[2025-07-17 18:12:02] [Rank 0] step:1001/10000 train_time:223821ms step_avg:223.60ms +[2025-07-17 18:12:02] [Rank 0] step:1001/10000 train_time:223821ms step_avg:223.60ms +[2025-07-17 18:12:06] [Rank 0] step:1021/10000 train_time:228098ms step_avg:223.41ms +[2025-07-17 18:12:06] [Rank 0] step:1021/10000 train_time:228098ms step_avg:223.41ms +[2025-07-17 18:12:11] [Rank 0] step:1041/10000 train_time:232609ms step_avg:223.45ms +[2025-07-17 18:12:11] [Rank 0] step:1041/10000 train_time:232609ms step_avg:223.45ms +[2025-07-17 18:12:15] [Rank 0] step:1061/10000 train_time:237127ms step_avg:223.49ms +[2025-07-17 18:12:15] [Rank 0] step:1061/10000 train_time:237127ms step_avg:223.49ms +[2025-07-17 18:12:20] [Rank 0] step:1081/10000 train_time:241648ms step_avg:223.54ms +[2025-07-17 18:12:20] [Rank 0] step:1081/10000 train_time:241648ms step_avg:223.54ms +[2025-07-17 18:12:24] [Rank 0] step:1101/10000 train_time:246167ms step_avg:223.58ms +[2025-07-17 18:12:24] [Rank 0] step:1101/10000 train_time:246167ms step_avg:223.58ms +[2025-07-17 18:12:29] [Rank 0] step:1121/10000 train_time:250682ms step_avg:223.62ms +[2025-07-17 18:12:29] [Rank 0] step:1121/10000 train_time:250682ms step_avg:223.62ms +[2025-07-17 18:12:34] [Rank 0] PRINT: step:1125/10000 val_loss:4.7265 train_time:252043ms step_avg:224.04ms +[2025-07-17 18:12:34] [Rank 0] PRINT: step:1125/10000 val_loss:4.7265 train_time:252043ms step_avg:224.04ms +[2025-07-17 18:12:38] [Rank 0] step:1141/10000 train_time:255199ms step_avg:223.66ms +[2025-07-17 18:12:38] [Rank 0] step:1141/10000 train_time:255199ms step_avg:223.66ms +[2025-07-17 18:12:42] [Rank 0] step:1161/10000 train_time:259721ms step_avg:223.70ms +[2025-07-17 18:12:42] [Rank 0] step:1161/10000 train_time:259721ms step_avg:223.70ms +[2025-07-17 18:12:47] [Rank 0] step:1181/10000 train_time:264249ms step_avg:223.75ms +[2025-07-17 18:12:47] [Rank 0] step:1181/10000 train_time:264249ms step_avg:223.75ms +[2025-07-17 18:12:51] [Rank 0] step:1201/10000 train_time:268773ms step_avg:223.79ms +[2025-07-17 18:12:51] [Rank 0] step:1201/10000 train_time:268773ms step_avg:223.79ms +[2025-07-17 18:12:56] [Rank 0] step:1221/10000 train_time:273296ms step_avg:223.83ms +[2025-07-17 18:12:56] [Rank 0] step:1221/10000 train_time:273296ms step_avg:223.83ms +[2025-07-17 18:13:00] [Rank 0] step:1241/10000 train_time:277821ms step_avg:223.87ms +[2025-07-17 18:13:00] [Rank 0] step:1241/10000 train_time:277821ms step_avg:223.87ms +[2025-07-17 18:13:06] [Rank 0] PRINT: step:1250/10000 val_loss:4.7631 train_time:280313ms step_avg:224.25ms +[2025-07-17 18:13:06] [Rank 0] PRINT: step:1250/10000 val_loss:4.7631 train_time:280313ms step_avg:224.25ms +[2025-07-17 18:13:09] [Rank 0] step:1261/10000 train_time:282342ms step_avg:223.90ms +[2025-07-17 18:13:09] [Rank 0] step:1261/10000 train_time:282342ms step_avg:223.90ms +[2025-07-17 18:13:13] [Rank 0] step:1281/10000 train_time:286864ms step_avg:223.94ms +[2025-07-17 18:13:13] [Rank 0] step:1281/10000 train_time:286864ms step_avg:223.94ms +[2025-07-17 18:13:18] [Rank 0] step:1301/10000 train_time:291386ms step_avg:223.97ms +[2025-07-17 18:13:18] [Rank 0] step:1301/10000 train_time:291386ms step_avg:223.97ms +[2025-07-17 18:13:22] [Rank 0] step:1321/10000 train_time:295910ms step_avg:224.00ms +[2025-07-17 18:13:22] [Rank 0] step:1321/10000 train_time:295910ms step_avg:224.00ms +[2025-07-17 18:13:27] [Rank 0] step:1341/10000 train_time:300429ms step_avg:224.03ms +[2025-07-17 18:13:27] [Rank 0] step:1341/10000 train_time:300429ms step_avg:224.03ms +[2025-07-17 18:13:31] [Rank 0] step:1361/10000 train_time:304953ms step_avg:224.07ms +[2025-07-17 18:13:31] [Rank 0] step:1361/10000 train_time:304953ms step_avg:224.07ms +[2025-07-17 18:13:39] [Rank 0] PRINT: step:1375/10000 val_loss:4.7606 train_time:308574ms step_avg:224.42ms +[2025-07-17 18:13:39] [Rank 0] PRINT: step:1375/10000 val_loss:4.7606 train_time:308574ms step_avg:224.42ms +[2025-07-17 18:13:40] [Rank 0] step:1381/10000 train_time:309476ms step_avg:224.10ms +[2025-07-17 18:13:40] [Rank 0] step:1381/10000 train_time:309476ms step_avg:224.10ms +[2025-07-17 18:13:45] [Rank 0] step:1401/10000 train_time:313999ms step_avg:224.12ms +[2025-07-17 18:13:45] [Rank 0] step:1401/10000 train_time:313999ms step_avg:224.12ms +[2025-07-17 18:13:49] [Rank 0] step:1421/10000 train_time:318528ms step_avg:224.16ms +[2025-07-17 18:13:49] [Rank 0] step:1421/10000 train_time:318528ms step_avg:224.16ms +[2025-07-17 18:13:54] [Rank 0] step:1441/10000 train_time:323055ms step_avg:224.19ms +[2025-07-17 18:13:54] [Rank 0] step:1441/10000 train_time:323055ms step_avg:224.19ms +[2025-07-17 18:13:58] [Rank 0] step:1461/10000 train_time:327586ms step_avg:224.22ms +[2025-07-17 18:13:58] [Rank 0] step:1461/10000 train_time:327586ms step_avg:224.22ms +[2025-07-17 18:14:03] [Rank 0] step:1481/10000 train_time:332116ms step_avg:224.25ms +[2025-07-17 18:14:03] [Rank 0] step:1481/10000 train_time:332116ms step_avg:224.25ms +[2025-07-17 18:14:12] [Rank 0] PRINT: step:1500/10000 val_loss:4.7406 train_time:336894ms step_avg:224.60ms +[2025-07-17 18:14:12] [Rank 0] PRINT: step:1500/10000 val_loss:4.7406 train_time:336894ms step_avg:224.60ms +[2025-07-17 18:14:12] [Rank 0] step:1501/10000 train_time:336906ms step_avg:224.45ms +[2025-07-17 18:14:12] [Rank 0] step:1501/10000 train_time:336906ms step_avg:224.45ms +[2025-07-17 18:14:17] [Rank 0] step:1521/10000 train_time:341220ms step_avg:224.34ms +[2025-07-17 18:14:17] [Rank 0] step:1521/10000 train_time:341220ms step_avg:224.34ms +[2025-07-17 18:14:21] [Rank 0] step:1541/10000 train_time:345777ms step_avg:224.38ms +[2025-07-17 18:14:21] [Rank 0] step:1541/10000 train_time:345777ms step_avg:224.38ms +[2025-07-17 18:14:26] [Rank 0] step:1561/10000 train_time:350329ms step_avg:224.43ms +[2025-07-17 18:14:26] [Rank 0] step:1561/10000 train_time:350329ms step_avg:224.43ms +[2025-07-17 18:14:30] [Rank 0] step:1581/10000 train_time:354884ms step_avg:224.47ms +[2025-07-17 18:14:30] [Rank 0] step:1581/10000 train_time:354884ms step_avg:224.47ms +[2025-07-17 18:14:35] [Rank 0] step:1601/10000 train_time:359438ms step_avg:224.51ms +[2025-07-17 18:14:35] [Rank 0] step:1601/10000 train_time:359438ms step_avg:224.51ms +[2025-07-17 18:14:39] [Rank 0] step:1621/10000 train_time:363996ms step_avg:224.55ms +[2025-07-17 18:14:39] [Rank 0] step:1621/10000 train_time:363996ms step_avg:224.55ms +[2025-07-17 18:14:44] [Rank 0] PRINT: step:1625/10000 val_loss:4.6991 train_time:365369ms step_avg:224.84ms +[2025-07-17 18:14:44] [Rank 0] PRINT: step:1625/10000 val_loss:4.6991 train_time:365369ms step_avg:224.84ms +[2025-07-17 18:14:48] [Rank 0] step:1641/10000 train_time:368554ms step_avg:224.59ms +[2025-07-17 18:14:48] [Rank 0] step:1641/10000 train_time:368554ms step_avg:224.59ms +[2025-07-17 18:14:52] [Rank 0] step:1661/10000 train_time:373111ms step_avg:224.63ms +[2025-07-17 18:14:52] [Rank 0] step:1661/10000 train_time:373111ms step_avg:224.63ms +[2025-07-17 18:14:57] [Rank 0] step:1681/10000 train_time:377667ms step_avg:224.67ms +[2025-07-17 18:14:57] [Rank 0] step:1681/10000 train_time:377667ms step_avg:224.67ms +[2025-07-17 18:15:02] [Rank 0] step:1701/10000 train_time:382226ms step_avg:224.71ms +[2025-07-17 18:15:02] [Rank 0] step:1701/10000 train_time:382226ms step_avg:224.71ms +[2025-07-17 18:15:06] [Rank 0] step:1721/10000 train_time:386786ms step_avg:224.75ms +[2025-07-17 18:15:06] [Rank 0] step:1721/10000 train_time:386786ms step_avg:224.75ms +[2025-07-17 18:15:11] [Rank 0] step:1741/10000 train_time:391343ms step_avg:224.78ms +[2025-07-17 18:15:11] [Rank 0] step:1741/10000 train_time:391343ms step_avg:224.78ms +[2025-07-17 18:15:17] [Rank 0] PRINT: step:1750/10000 val_loss:4.7499 train_time:393854ms step_avg:225.06ms +[2025-07-17 18:15:17] [Rank 0] PRINT: step:1750/10000 val_loss:4.7499 train_time:393854ms step_avg:225.06ms +[2025-07-17 18:15:19] [Rank 0] step:1761/10000 train_time:395897ms step_avg:224.81ms +[2025-07-17 18:15:19] [Rank 0] step:1761/10000 train_time:395897ms step_avg:224.81ms +[2025-07-17 18:15:24] [Rank 0] step:1781/10000 train_time:400460ms step_avg:224.85ms +[2025-07-17 18:15:24] [Rank 0] step:1781/10000 train_time:400460ms step_avg:224.85ms +[2025-07-17 18:15:28] [Rank 0] step:1801/10000 train_time:405020ms step_avg:224.89ms +[2025-07-17 18:15:28] [Rank 0] step:1801/10000 train_time:405020ms step_avg:224.89ms +[2025-07-17 18:15:33] [Rank 0] step:1821/10000 train_time:409582ms step_avg:224.92ms +[2025-07-17 18:15:33] [Rank 0] step:1821/10000 train_time:409582ms step_avg:224.92ms +[2025-07-17 18:15:37] [Rank 0] step:1841/10000 train_time:414138ms step_avg:224.95ms +[2025-07-17 18:15:37] [Rank 0] step:1841/10000 train_time:414138ms step_avg:224.95ms +[2025-07-17 18:15:42] [Rank 0] step:1861/10000 train_time:418697ms step_avg:224.98ms +[2025-07-17 18:15:42] [Rank 0] step:1861/10000 train_time:418697ms step_avg:224.98ms +[2025-07-17 18:15:49] [Rank 0] PRINT: step:1875/10000 val_loss:4.6359 train_time:422346ms step_avg:225.25ms +[2025-07-17 18:15:49] [Rank 0] PRINT: step:1875/10000 val_loss:4.6359 train_time:422346ms step_avg:225.25ms +[2025-07-17 18:15:51] [Rank 0] step:1881/10000 train_time:423253ms step_avg:225.01ms +[2025-07-17 18:15:51] [Rank 0] step:1881/10000 train_time:423253ms step_avg:225.01ms +[2025-07-17 18:15:55] [Rank 0] step:1901/10000 train_time:427809ms step_avg:225.04ms +[2025-07-17 18:15:55] [Rank 0] step:1901/10000 train_time:427809ms step_avg:225.04ms +[2025-07-17 18:16:00] [Rank 0] step:1921/10000 train_time:432366ms step_avg:225.07ms +[2025-07-17 18:16:00] [Rank 0] step:1921/10000 train_time:432366ms step_avg:225.07ms +[2025-07-17 18:16:04] [Rank 0] step:1941/10000 train_time:436923ms step_avg:225.10ms +[2025-07-17 18:16:04] [Rank 0] step:1941/10000 train_time:436923ms step_avg:225.10ms +[2025-07-17 18:16:09] [Rank 0] step:1961/10000 train_time:441480ms step_avg:225.13ms +[2025-07-17 18:16:09] [Rank 0] step:1961/10000 train_time:441480ms step_avg:225.13ms +[2025-07-17 18:16:13] [Rank 0] step:1981/10000 train_time:446038ms step_avg:225.16ms +[2025-07-17 18:16:13] [Rank 0] step:1981/10000 train_time:446038ms step_avg:225.16ms +[2025-07-17 18:16:22] [Rank 0] PRINT: step:2000/10000 val_loss:4.6742 train_time:450822ms step_avg:225.41ms +[2025-07-17 18:16:22] [Rank 0] PRINT: step:2000/10000 val_loss:4.6742 train_time:450822ms step_avg:225.41ms +[2025-07-17 18:16:23] [Rank 0] step:2001/10000 train_time:450834ms step_avg:225.30ms +[2025-07-17 18:16:23] [Rank 0] step:2001/10000 train_time:450834ms step_avg:225.30ms +[2025-07-17 18:16:27] [Rank 0] step:2021/10000 train_time:455152ms step_avg:225.21ms +[2025-07-17 18:16:27] [Rank 0] step:2021/10000 train_time:455152ms step_avg:225.21ms +[2025-07-17 18:16:32] [Rank 0] step:2041/10000 train_time:459714ms step_avg:225.24ms +[2025-07-17 18:16:32] [Rank 0] step:2041/10000 train_time:459714ms step_avg:225.24ms +[2025-07-17 18:16:36] [Rank 0] step:2061/10000 train_time:464268ms step_avg:225.26ms +[2025-07-17 18:16:36] [Rank 0] step:2061/10000 train_time:464268ms step_avg:225.26ms +[2025-07-17 18:16:41] [Rank 0] step:2081/10000 train_time:468824ms step_avg:225.29ms +[2025-07-17 18:16:41] [Rank 0] step:2081/10000 train_time:468824ms step_avg:225.29ms +[2025-07-17 18:16:45] [Rank 0] step:2101/10000 train_time:473382ms step_avg:225.31ms +[2025-07-17 18:16:45] [Rank 0] step:2101/10000 train_time:473382ms step_avg:225.31ms +[2025-07-17 18:16:50] [Rank 0] step:2121/10000 train_time:477941ms step_avg:225.34ms +[2025-07-17 18:16:50] [Rank 0] step:2121/10000 train_time:477941ms step_avg:225.34ms +[2025-07-17 18:16:55] [Rank 0] PRINT: step:2125/10000 val_loss:4.7977 train_time:479313ms step_avg:225.56ms +[2025-07-17 18:16:55] [Rank 0] PRINT: step:2125/10000 val_loss:4.7977 train_time:479313ms step_avg:225.56ms +[2025-07-17 18:16:59] [Rank 0] step:2141/10000 train_time:482499ms step_avg:225.36ms +[2025-07-17 18:16:59] [Rank 0] step:2141/10000 train_time:482499ms step_avg:225.36ms +[2025-07-17 18:17:03] [Rank 0] step:2161/10000 train_time:487059ms step_avg:225.39ms +[2025-07-17 18:17:03] [Rank 0] step:2161/10000 train_time:487059ms step_avg:225.39ms +[2025-07-17 18:17:08] [Rank 0] step:2181/10000 train_time:491618ms step_avg:225.41ms +[2025-07-17 18:17:08] [Rank 0] step:2181/10000 train_time:491618ms step_avg:225.41ms +[2025-07-17 18:17:12] [Rank 0] step:2201/10000 train_time:496178ms step_avg:225.43ms +[2025-07-17 18:17:12] [Rank 0] step:2201/10000 train_time:496178ms step_avg:225.43ms +[2025-07-17 18:17:17] [Rank 0] step:2221/10000 train_time:500741ms step_avg:225.46ms +[2025-07-17 18:17:17] [Rank 0] step:2221/10000 train_time:500741ms step_avg:225.46ms +[2025-07-17 18:17:21] [Rank 0] step:2241/10000 train_time:505390ms step_avg:225.52ms +[2025-07-17 18:17:21] [Rank 0] step:2241/10000 train_time:505390ms step_avg:225.52ms +[2025-07-17 18:17:28] [Rank 0] PRINT: step:2250/10000 val_loss:4.1686 train_time:507963ms step_avg:225.76ms +[2025-07-17 18:17:28] [Rank 0] PRINT: step:2250/10000 val_loss:4.1686 train_time:507963ms step_avg:225.76ms +[2025-07-17 18:17:31] [Rank 0] step:2261/10000 train_time:510065ms step_avg:225.59ms +[2025-07-17 18:17:31] [Rank 0] step:2261/10000 train_time:510065ms step_avg:225.59ms +[2025-07-17 18:17:35] [Rank 0] step:2281/10000 train_time:514747ms step_avg:225.67ms +[2025-07-17 18:17:35] [Rank 0] step:2281/10000 train_time:514747ms step_avg:225.67ms +[2025-07-17 18:17:40] [Rank 0] step:2301/10000 train_time:519425ms step_avg:225.74ms +[2025-07-17 18:17:40] [Rank 0] step:2301/10000 train_time:519425ms step_avg:225.74ms +[2025-07-17 18:17:45] [Rank 0] step:2321/10000 train_time:524108ms step_avg:225.81ms +[2025-07-17 18:17:45] [Rank 0] step:2321/10000 train_time:524108ms step_avg:225.81ms +[2025-07-17 18:17:49] [Rank 0] step:2341/10000 train_time:528789ms step_avg:225.88ms +[2025-07-17 18:17:49] [Rank 0] step:2341/10000 train_time:528789ms step_avg:225.88ms +[2025-07-17 18:17:54] [Rank 0] step:2361/10000 train_time:533471ms step_avg:225.95ms +[2025-07-17 18:17:54] [Rank 0] step:2361/10000 train_time:533471ms step_avg:225.95ms +[2025-07-17 18:18:02] [Rank 0] PRINT: step:2375/10000 val_loss:4.1374 train_time:537219ms step_avg:226.20ms +[2025-07-17 18:18:02] [Rank 0] PRINT: step:2375/10000 val_loss:4.1374 train_time:537219ms step_avg:226.20ms +[2025-07-17 18:18:03] [Rank 0] step:2381/10000 train_time:538149ms step_avg:226.02ms +[2025-07-17 18:18:03] [Rank 0] step:2381/10000 train_time:538149ms step_avg:226.02ms +[2025-07-17 18:18:08] [Rank 0] step:2401/10000 train_time:542827ms step_avg:226.08ms +[2025-07-17 18:18:08] [Rank 0] step:2401/10000 train_time:542827ms step_avg:226.08ms +[2025-07-17 18:18:13] [Rank 0] step:2421/10000 train_time:547506ms step_avg:226.15ms +[2025-07-17 18:18:13] [Rank 0] step:2421/10000 train_time:547506ms step_avg:226.15ms +[2025-07-17 18:18:17] [Rank 0] step:2441/10000 train_time:552183ms step_avg:226.21ms +[2025-07-17 18:18:17] [Rank 0] step:2441/10000 train_time:552183ms step_avg:226.21ms +[2025-07-17 18:18:22] [Rank 0] step:2461/10000 train_time:556863ms step_avg:226.27ms +[2025-07-17 18:18:22] [Rank 0] step:2461/10000 train_time:556863ms step_avg:226.27ms +[2025-07-17 18:18:27] [Rank 0] step:2481/10000 train_time:561537ms step_avg:226.34ms +[2025-07-17 18:18:27] [Rank 0] step:2481/10000 train_time:561537ms step_avg:226.34ms +[2025-07-17 18:18:36] [Rank 0] PRINT: step:2500/10000 val_loss:4.3608 train_time:566447ms step_avg:226.58ms +[2025-07-17 18:18:36] [Rank 0] PRINT: step:2500/10000 val_loss:4.3608 train_time:566447ms step_avg:226.58ms +[2025-07-17 18:18:36] [Rank 0] step:2501/10000 train_time:566460ms step_avg:226.49ms +[2025-07-17 18:18:36] [Rank 0] step:2501/10000 train_time:566460ms step_avg:226.49ms +[2025-07-17 18:18:41] [Rank 0] step:2521/10000 train_time:570887ms step_avg:226.45ms +[2025-07-17 18:18:41] [Rank 0] step:2521/10000 train_time:570887ms step_avg:226.45ms +[2025-07-17 18:18:45] [Rank 0] step:2541/10000 train_time:575563ms step_avg:226.51ms +[2025-07-17 18:18:45] [Rank 0] step:2541/10000 train_time:575563ms step_avg:226.51ms +[2025-07-17 18:18:50] [Rank 0] step:2561/10000 train_time:580241ms step_avg:226.57ms +[2025-07-17 18:18:50] [Rank 0] step:2561/10000 train_time:580241ms step_avg:226.57ms +[2025-07-17 18:18:55] [Rank 0] step:2581/10000 train_time:584919ms step_avg:226.63ms +[2025-07-17 18:18:55] [Rank 0] step:2581/10000 train_time:584919ms step_avg:226.63ms +[2025-07-17 18:19:00] [Rank 0] step:2601/10000 train_time:589697ms step_avg:226.72ms +[2025-07-17 18:19:00] [Rank 0] step:2601/10000 train_time:589697ms step_avg:226.72ms +[2025-07-17 18:19:04] [Rank 0] step:2621/10000 train_time:594375ms step_avg:226.77ms +[2025-07-17 18:19:04] [Rank 0] step:2621/10000 train_time:594375ms step_avg:226.77ms +[2025-07-17 18:19:10] [Rank 0] PRINT: step:2625/10000 val_loss:4.3818 train_time:595782ms step_avg:226.96ms +[2025-07-17 18:19:10] [Rank 0] PRINT: step:2625/10000 val_loss:4.3818 train_time:595782ms step_avg:226.96ms +[2025-07-17 18:19:13] [Rank 0] step:2641/10000 train_time:599048ms step_avg:226.83ms +[2025-07-17 18:19:13] [Rank 0] step:2641/10000 train_time:599048ms step_avg:226.83ms +[2025-07-17 18:19:18] [Rank 0] step:2661/10000 train_time:603721ms step_avg:226.88ms +[2025-07-17 18:19:18] [Rank 0] step:2661/10000 train_time:603721ms step_avg:226.88ms +[2025-07-17 18:19:23] [Rank 0] step:2681/10000 train_time:608395ms step_avg:226.93ms +[2025-07-17 18:19:23] [Rank 0] step:2681/10000 train_time:608395ms step_avg:226.93ms +[2025-07-17 18:19:27] [Rank 0] step:2701/10000 train_time:613070ms step_avg:226.98ms +[2025-07-17 18:19:27] [Rank 0] step:2701/10000 train_time:613070ms step_avg:226.98ms +[2025-07-17 18:19:32] [Rank 0] step:2721/10000 train_time:617741ms step_avg:227.03ms +[2025-07-17 18:19:32] [Rank 0] step:2721/10000 train_time:617741ms step_avg:227.03ms +[2025-07-17 18:19:37] [Rank 0] step:2741/10000 train_time:622415ms step_avg:227.08ms +[2025-07-17 18:19:37] [Rank 0] step:2741/10000 train_time:622415ms step_avg:227.08ms +[2025-07-17 18:19:43] [Rank 0] PRINT: step:2750/10000 val_loss:4.5735 train_time:624990ms step_avg:227.27ms +[2025-07-17 18:19:43] [Rank 0] PRINT: step:2750/10000 val_loss:4.5735 train_time:624990ms step_avg:227.27ms +[2025-07-17 18:19:46] [Rank 0] step:2761/10000 train_time:627086ms step_avg:227.12ms +[2025-07-17 18:19:46] [Rank 0] step:2761/10000 train_time:627086ms step_avg:227.12ms +[2025-07-17 18:19:51] [Rank 0] step:2781/10000 train_time:631760ms step_avg:227.17ms +[2025-07-17 18:19:51] [Rank 0] step:2781/10000 train_time:631760ms step_avg:227.17ms +[2025-07-17 18:19:55] [Rank 0] step:2801/10000 train_time:636432ms step_avg:227.22ms +[2025-07-17 18:19:55] [Rank 0] step:2801/10000 train_time:636432ms step_avg:227.22ms +[2025-07-17 18:20:00] [Rank 0] step:2821/10000 train_time:641107ms step_avg:227.26ms +[2025-07-17 18:20:00] [Rank 0] step:2821/10000 train_time:641107ms step_avg:227.26ms +[2025-07-17 18:20:05] [Rank 0] step:2841/10000 train_time:645780ms step_avg:227.31ms +[2025-07-17 18:20:05] [Rank 0] step:2841/10000 train_time:645780ms step_avg:227.31ms +[2025-07-17 18:20:09] [Rank 0] step:2861/10000 train_time:650457ms step_avg:227.35ms +[2025-07-17 18:20:09] [Rank 0] step:2861/10000 train_time:650457ms step_avg:227.35ms +[2025-07-17 18:20:17] [Rank 0] PRINT: step:2875/10000 val_loss:4.3563 train_time:654198ms step_avg:227.55ms +[2025-07-17 18:20:17] [Rank 0] PRINT: step:2875/10000 val_loss:4.3563 train_time:654198ms step_avg:227.55ms +[2025-07-17 18:20:19] [Rank 0] step:2881/10000 train_time:655128ms step_avg:227.40ms +[2025-07-17 18:20:19] [Rank 0] step:2881/10000 train_time:655128ms step_avg:227.40ms +[2025-07-17 18:20:23] [Rank 0] step:2901/10000 train_time:659800ms step_avg:227.44ms +[2025-07-17 18:20:23] [Rank 0] step:2901/10000 train_time:659800ms step_avg:227.44ms +[2025-07-17 18:20:28] [Rank 0] step:2921/10000 train_time:664476ms step_avg:227.48ms +[2025-07-17 18:20:28] [Rank 0] step:2921/10000 train_time:664476ms step_avg:227.48ms +[2025-07-17 18:20:33] [Rank 0] step:2941/10000 train_time:669152ms step_avg:227.53ms +[2025-07-17 18:20:33] [Rank 0] step:2941/10000 train_time:669152ms step_avg:227.53ms +[2025-07-17 18:20:37] [Rank 0] step:2961/10000 train_time:673829ms step_avg:227.57ms +[2025-07-17 18:20:37] [Rank 0] step:2961/10000 train_time:673829ms step_avg:227.57ms +[2025-07-17 18:20:42] [Rank 0] step:2981/10000 train_time:678520ms step_avg:227.61ms +[2025-07-17 18:20:42] [Rank 0] step:2981/10000 train_time:678520ms step_avg:227.61ms +[2025-07-17 18:20:51] [Rank 0] PRINT: step:3000/10000 val_loss:4.3671 train_time:683450ms step_avg:227.82ms +[2025-07-17 18:20:51] [Rank 0] PRINT: step:3000/10000 val_loss:4.3671 train_time:683450ms step_avg:227.82ms +[2025-07-17 18:20:51] [Rank 0] step:3001/10000 train_time:683463ms step_avg:227.75ms +[2025-07-17 18:20:51] [Rank 0] step:3001/10000 train_time:683463ms step_avg:227.75ms +[2025-07-17 18:20:56] [Rank 0] step:3021/10000 train_time:687907ms step_avg:227.71ms +[2025-07-17 18:20:56] [Rank 0] step:3021/10000 train_time:687907ms step_avg:227.71ms +[2025-07-17 18:21:01] [Rank 0] step:3041/10000 train_time:692608ms step_avg:227.76ms +[2025-07-17 18:21:01] [Rank 0] step:3041/10000 train_time:692608ms step_avg:227.76ms +[2025-07-17 18:21:05] [Rank 0] step:3061/10000 train_time:697302ms step_avg:227.80ms +[2025-07-17 18:21:05] [Rank 0] step:3061/10000 train_time:697302ms step_avg:227.80ms +[2025-07-17 18:21:10] [Rank 0] step:3081/10000 train_time:701999ms step_avg:227.85ms +[2025-07-17 18:21:10] [Rank 0] step:3081/10000 train_time:701999ms step_avg:227.85ms +[2025-07-17 18:21:15] [Rank 0] step:3101/10000 train_time:706694ms step_avg:227.89ms +[2025-07-17 18:21:15] [Rank 0] step:3101/10000 train_time:706694ms step_avg:227.89ms +[2025-07-17 18:21:20] [Rank 0] step:3121/10000 train_time:711392ms step_avg:227.94ms +[2025-07-17 18:21:20] [Rank 0] step:3121/10000 train_time:711392ms step_avg:227.94ms +[2025-07-17 18:21:25] [Rank 0] PRINT: step:3125/10000 val_loss:4.3907 train_time:712807ms step_avg:228.10ms +[2025-07-17 18:21:25] [Rank 0] PRINT: step:3125/10000 val_loss:4.3907 train_time:712807ms step_avg:228.10ms +[2025-07-17 18:21:29] [Rank 0] step:3141/10000 train_time:716086ms step_avg:227.98ms +[2025-07-17 18:21:29] [Rank 0] step:3141/10000 train_time:716086ms step_avg:227.98ms +[2025-07-17 18:21:33] [Rank 0] step:3161/10000 train_time:720782ms step_avg:228.02ms +[2025-07-17 18:21:33] [Rank 0] step:3161/10000 train_time:720782ms step_avg:228.02ms +[2025-07-17 18:21:38] [Rank 0] step:3181/10000 train_time:725479ms step_avg:228.07ms +[2025-07-17 18:21:38] [Rank 0] step:3181/10000 train_time:725479ms step_avg:228.07ms +[2025-07-17 18:21:43] [Rank 0] step:3201/10000 train_time:730175ms step_avg:228.11ms +[2025-07-17 18:21:43] [Rank 0] step:3201/10000 train_time:730175ms step_avg:228.11ms +[2025-07-17 18:21:48] [Rank 0] step:3221/10000 train_time:734870ms step_avg:228.15ms +[2025-07-17 18:21:48] [Rank 0] step:3221/10000 train_time:734870ms step_avg:228.15ms +[2025-07-17 18:21:52] [Rank 0] step:3241/10000 train_time:739563ms step_avg:228.19ms +[2025-07-17 18:21:52] [Rank 0] step:3241/10000 train_time:739563ms step_avg:228.19ms +[2025-07-17 18:21:59] [Rank 0] PRINT: step:3250/10000 val_loss:4.2666 train_time:742150ms step_avg:228.35ms +[2025-07-17 18:21:59] [Rank 0] PRINT: step:3250/10000 val_loss:4.2666 train_time:742150ms step_avg:228.35ms +[2025-07-17 18:22:02] [Rank 0] step:3261/10000 train_time:744254ms step_avg:228.23ms +[2025-07-17 18:22:02] [Rank 0] step:3261/10000 train_time:744254ms step_avg:228.23ms +[2025-07-17 18:22:06] [Rank 0] step:3281/10000 train_time:748941ms step_avg:228.27ms +[2025-07-17 18:22:06] [Rank 0] step:3281/10000 train_time:748941ms step_avg:228.27ms +[2025-07-17 18:22:11] [Rank 0] step:3301/10000 train_time:753629ms step_avg:228.30ms +[2025-07-17 18:22:11] [Rank 0] step:3301/10000 train_time:753629ms step_avg:228.30ms +[2025-07-17 18:22:16] [Rank 0] step:3321/10000 train_time:758320ms step_avg:228.34ms +[2025-07-17 18:22:16] [Rank 0] step:3321/10000 train_time:758320ms step_avg:228.34ms +[2025-07-17 18:22:20] [Rank 0] step:3341/10000 train_time:763006ms step_avg:228.38ms +[2025-07-17 18:22:20] [Rank 0] step:3341/10000 train_time:763006ms step_avg:228.38ms +[2025-07-17 18:22:25] [Rank 0] step:3361/10000 train_time:767695ms step_avg:228.41ms +[2025-07-17 18:22:25] [Rank 0] step:3361/10000 train_time:767695ms step_avg:228.41ms +[2025-07-17 18:22:33] [Rank 0] PRINT: step:3375/10000 val_loss:4.3917 train_time:771446ms step_avg:228.58ms +[2025-07-17 18:22:33] [Rank 0] PRINT: step:3375/10000 val_loss:4.3917 train_time:771446ms step_avg:228.58ms +[2025-07-17 18:22:34] [Rank 0] step:3381/10000 train_time:772378ms step_avg:228.45ms +[2025-07-17 18:22:34] [Rank 0] step:3381/10000 train_time:772378ms step_avg:228.45ms +[2025-07-17 18:22:39] [Rank 0] step:3401/10000 train_time:777059ms step_avg:228.48ms +[2025-07-17 18:22:39] [Rank 0] step:3401/10000 train_time:777059ms step_avg:228.48ms +[2025-07-17 18:22:44] [Rank 0] step:3421/10000 train_time:781740ms step_avg:228.51ms +[2025-07-17 18:22:44] [Rank 0] step:3421/10000 train_time:781740ms step_avg:228.51ms +[2025-07-17 18:22:48] [Rank 0] step:3441/10000 train_time:786418ms step_avg:228.54ms +[2025-07-17 18:22:48] [Rank 0] step:3441/10000 train_time:786418ms step_avg:228.54ms +[2025-07-17 18:22:53] [Rank 0] step:3461/10000 train_time:791100ms step_avg:228.58ms +[2025-07-17 18:22:53] [Rank 0] step:3461/10000 train_time:791100ms step_avg:228.58ms +[2025-07-17 18:22:58] [Rank 0] step:3481/10000 train_time:795777ms step_avg:228.61ms +[2025-07-17 18:22:58] [Rank 0] step:3481/10000 train_time:795777ms step_avg:228.61ms +[2025-07-17 18:23:07] [Rank 0] PRINT: step:3500/10000 val_loss:4.3644 train_time:800688ms step_avg:228.77ms +[2025-07-17 18:23:07] [Rank 0] PRINT: step:3500/10000 val_loss:4.3644 train_time:800688ms step_avg:228.77ms +[2025-07-17 18:23:07] [Rank 0] step:3501/10000 train_time:800702ms step_avg:228.71ms +[2025-07-17 18:23:07] [Rank 0] step:3501/10000 train_time:800702ms step_avg:228.71ms +[2025-07-17 18:23:12] [Rank 0] step:3521/10000 train_time:805130ms step_avg:228.67ms +[2025-07-17 18:23:12] [Rank 0] step:3521/10000 train_time:805130ms step_avg:228.67ms +[2025-07-17 18:23:16] [Rank 0] step:3541/10000 train_time:809807ms step_avg:228.69ms +[2025-07-17 18:23:16] [Rank 0] step:3541/10000 train_time:809807ms step_avg:228.69ms +[2025-07-17 18:23:21] [Rank 0] step:3561/10000 train_time:814481ms step_avg:228.72ms +[2025-07-17 18:23:21] [Rank 0] step:3561/10000 train_time:814481ms step_avg:228.72ms +[2025-07-17 18:23:26] [Rank 0] step:3581/10000 train_time:819155ms step_avg:228.75ms +[2025-07-17 18:23:26] [Rank 0] step:3581/10000 train_time:819155ms step_avg:228.75ms +[2025-07-17 18:23:30] [Rank 0] step:3601/10000 train_time:823833ms step_avg:228.78ms +[2025-07-17 18:23:30] [Rank 0] step:3601/10000 train_time:823833ms step_avg:228.78ms +[2025-07-17 18:23:35] [Rank 0] step:3621/10000 train_time:828507ms step_avg:228.81ms +[2025-07-17 18:23:35] [Rank 0] step:3621/10000 train_time:828507ms step_avg:228.81ms +[2025-07-17 18:23:40] [Rank 0] PRINT: step:3625/10000 val_loss:4.3413 train_time:829915ms step_avg:228.94ms +[2025-07-17 18:23:40] [Rank 0] PRINT: step:3625/10000 val_loss:4.3413 train_time:829915ms step_avg:228.94ms +[2025-07-17 18:23:44] [Rank 0] step:3641/10000 train_time:833184ms step_avg:228.83ms +[2025-07-17 18:23:44] [Rank 0] step:3641/10000 train_time:833184ms step_avg:228.83ms +[2025-07-17 18:23:48] [Rank 0] step:3661/10000 train_time:837865ms step_avg:228.86ms +[2025-07-17 18:23:48] [Rank 0] step:3661/10000 train_time:837865ms step_avg:228.86ms +[2025-07-17 18:23:53] [Rank 0] step:3681/10000 train_time:842545ms step_avg:228.89ms +[2025-07-17 18:23:53] [Rank 0] step:3681/10000 train_time:842545ms step_avg:228.89ms +[2025-07-17 18:23:58] [Rank 0] step:3701/10000 train_time:847226ms step_avg:228.92ms +[2025-07-17 18:23:58] [Rank 0] step:3701/10000 train_time:847226ms step_avg:228.92ms +[2025-07-17 18:24:02] [Rank 0] step:3721/10000 train_time:851971ms step_avg:228.96ms +[2025-07-17 18:24:02] [Rank 0] step:3721/10000 train_time:851971ms step_avg:228.96ms +[2025-07-17 18:24:07] [Rank 0] step:3741/10000 train_time:856741ms step_avg:229.01ms +[2025-07-17 18:24:07] [Rank 0] step:3741/10000 train_time:856741ms step_avg:229.01ms +[2025-07-17 18:24:13] [Rank 0] PRINT: step:3750/10000 val_loss:4.2666 train_time:859367ms step_avg:229.16ms +[2025-07-17 18:24:13] [Rank 0] PRINT: step:3750/10000 val_loss:4.2666 train_time:859367ms step_avg:229.16ms +[2025-07-17 18:24:16] [Rank 0] step:3761/10000 train_time:861511ms step_avg:229.06ms +[2025-07-17 18:24:16] [Rank 0] step:3761/10000 train_time:861511ms step_avg:229.06ms +[2025-07-17 18:24:21] [Rank 0] step:3781/10000 train_time:866285ms step_avg:229.12ms +[2025-07-17 18:24:21] [Rank 0] step:3781/10000 train_time:866285ms step_avg:229.12ms +[2025-07-17 18:24:25] [Rank 0] step:3801/10000 train_time:871060ms step_avg:229.17ms +[2025-07-17 18:24:25] [Rank 0] step:3801/10000 train_time:871060ms step_avg:229.17ms +[2025-07-17 18:24:30] [Rank 0] step:3821/10000 train_time:875839ms step_avg:229.22ms +[2025-07-17 18:24:30] [Rank 0] step:3821/10000 train_time:875839ms step_avg:229.22ms +[2025-07-17 18:24:35] [Rank 0] step:3841/10000 train_time:880619ms step_avg:229.27ms +[2025-07-17 18:24:35] [Rank 0] step:3841/10000 train_time:880619ms step_avg:229.27ms +[2025-07-17 18:24:40] [Rank 0] step:3861/10000 train_time:885400ms step_avg:229.32ms +[2025-07-17 18:24:40] [Rank 0] step:3861/10000 train_time:885400ms step_avg:229.32ms +[2025-07-17 18:24:48] [Rank 0] PRINT: step:3875/10000 val_loss:4.4313 train_time:889231ms step_avg:229.48ms +[2025-07-17 18:24:48] [Rank 0] PRINT: step:3875/10000 val_loss:4.4313 train_time:889231ms step_avg:229.48ms +[2025-07-17 18:24:49] [Rank 0] step:3881/10000 train_time:890183ms step_avg:229.37ms +[2025-07-17 18:24:49] [Rank 0] step:3881/10000 train_time:890183ms step_avg:229.37ms +[2025-07-17 18:24:54] [Rank 0] step:3901/10000 train_time:894963ms step_avg:229.42ms +[2025-07-17 18:24:54] [Rank 0] step:3901/10000 train_time:894963ms step_avg:229.42ms +[2025-07-17 18:24:59] [Rank 0] step:3921/10000 train_time:899743ms step_avg:229.47ms +[2025-07-17 18:24:59] [Rank 0] step:3921/10000 train_time:899743ms step_avg:229.47ms +[2025-07-17 18:25:04] [Rank 0] step:3941/10000 train_time:904525ms step_avg:229.52ms +[2025-07-17 18:25:04] [Rank 0] step:3941/10000 train_time:904525ms step_avg:229.52ms +[2025-07-17 18:25:08] [Rank 0] step:3961/10000 train_time:909311ms step_avg:229.57ms +[2025-07-17 18:25:08] [Rank 0] step:3961/10000 train_time:909311ms step_avg:229.57ms +[2025-07-17 18:25:13] [Rank 0] step:3981/10000 train_time:914096ms step_avg:229.61ms +[2025-07-17 18:25:13] [Rank 0] step:3981/10000 train_time:914096ms step_avg:229.61ms +[2025-07-17 18:25:22] [Rank 0] PRINT: step:4000/10000 val_loss:4.3865 train_time:919118ms step_avg:229.78ms +[2025-07-17 18:25:22] [Rank 0] PRINT: step:4000/10000 val_loss:4.3865 train_time:919118ms step_avg:229.78ms +[2025-07-17 18:25:23] [Rank 0] step:4001/10000 train_time:919131ms step_avg:229.73ms +[2025-07-17 18:25:23] [Rank 0] step:4001/10000 train_time:919131ms step_avg:229.73ms +[2025-07-17 18:25:27] [Rank 0] step:4021/10000 train_time:923661ms step_avg:229.71ms +[2025-07-17 18:25:27] [Rank 0] step:4021/10000 train_time:923661ms step_avg:229.71ms +[2025-07-17 18:25:32] [Rank 0] step:4041/10000 train_time:928443ms step_avg:229.76ms +[2025-07-17 18:25:32] [Rank 0] step:4041/10000 train_time:928443ms step_avg:229.76ms +[2025-07-17 18:25:37] [Rank 0] step:4061/10000 train_time:933226ms step_avg:229.80ms +[2025-07-17 18:25:37] [Rank 0] step:4061/10000 train_time:933226ms step_avg:229.80ms +[2025-07-17 18:25:42] [Rank 0] step:4081/10000 train_time:938007ms step_avg:229.85ms +[2025-07-17 18:25:42] [Rank 0] step:4081/10000 train_time:938007ms step_avg:229.85ms +[2025-07-17 18:25:47] [Rank 0] step:4101/10000 train_time:942796ms step_avg:229.89ms +[2025-07-17 18:25:47] [Rank 0] step:4101/10000 train_time:942796ms step_avg:229.89ms +[2025-07-17 18:25:51] [Rank 0] step:4121/10000 train_time:947579ms step_avg:229.94ms +[2025-07-17 18:25:51] [Rank 0] step:4121/10000 train_time:947579ms step_avg:229.94ms +[2025-07-17 18:25:57] [Rank 0] PRINT: step:4125/10000 val_loss:4.3528 train_time:949019ms step_avg:230.07ms +[2025-07-17 18:25:57] [Rank 0] PRINT: step:4125/10000 val_loss:4.3528 train_time:949019ms step_avg:230.07ms +[2025-07-17 18:26:01] [Rank 0] step:4141/10000 train_time:952361ms step_avg:229.98ms +[2025-07-17 18:26:01] [Rank 0] step:4141/10000 train_time:952361ms step_avg:229.98ms +[2025-07-17 18:26:06] [Rank 0] step:4161/10000 train_time:957146ms step_avg:230.03ms +[2025-07-17 18:26:06] [Rank 0] step:4161/10000 train_time:957146ms step_avg:230.03ms +[2025-07-17 18:26:10] [Rank 0] step:4181/10000 train_time:961931ms step_avg:230.07ms +[2025-07-17 18:26:10] [Rank 0] step:4181/10000 train_time:961931ms step_avg:230.07ms +[2025-07-17 18:26:15] [Rank 0] step:4201/10000 train_time:966717ms step_avg:230.12ms +[2025-07-17 18:26:15] [Rank 0] step:4201/10000 train_time:966717ms step_avg:230.12ms +[2025-07-17 18:26:20] [Rank 0] step:4221/10000 train_time:971505ms step_avg:230.16ms +[2025-07-17 18:26:20] [Rank 0] step:4221/10000 train_time:971505ms step_avg:230.16ms +[2025-07-17 18:26:25] [Rank 0] step:4241/10000 train_time:976291ms step_avg:230.20ms +[2025-07-17 18:26:25] [Rank 0] step:4241/10000 train_time:976291ms step_avg:230.20ms +[2025-07-17 18:26:31] [Rank 0] PRINT: step:4250/10000 val_loss:4.2756 train_time:978926ms step_avg:230.34ms +[2025-07-17 18:26:31] [Rank 0] PRINT: step:4250/10000 val_loss:4.2756 train_time:978926ms step_avg:230.34ms +[2025-07-17 18:26:34] [Rank 0] step:4261/10000 train_time:981077ms step_avg:230.25ms +[2025-07-17 18:26:34] [Rank 0] step:4261/10000 train_time:981077ms step_avg:230.25ms +[2025-07-17 18:26:38] [Rank 0] step:4281/10000 train_time:985870ms step_avg:230.29ms +[2025-07-17 18:26:38] [Rank 0] step:4281/10000 train_time:985870ms step_avg:230.29ms +[2025-07-17 18:26:43] [Rank 0] step:4301/10000 train_time:990660ms step_avg:230.33ms +[2025-07-17 18:26:43] [Rank 0] step:4301/10000 train_time:990660ms step_avg:230.33ms +[2025-07-17 18:26:48] [Rank 0] step:4321/10000 train_time:995460ms step_avg:230.38ms +[2025-07-17 18:26:48] [Rank 0] step:4321/10000 train_time:995460ms step_avg:230.38ms +[2025-07-17 18:26:53] [Rank 0] step:4341/10000 train_time:1000251ms step_avg:230.42ms +[2025-07-17 18:26:53] [Rank 0] step:4341/10000 train_time:1000251ms step_avg:230.42ms +[2025-07-17 18:26:58] [Rank 0] step:4361/10000 train_time:1005046ms step_avg:230.46ms +[2025-07-17 18:26:58] [Rank 0] step:4361/10000 train_time:1005046ms step_avg:230.46ms +[2025-07-17 18:27:06] [Rank 0] PRINT: step:4375/10000 val_loss:4.4179 train_time:1008880ms step_avg:230.60ms +[2025-07-17 18:27:06] [Rank 0] PRINT: step:4375/10000 val_loss:4.4179 train_time:1008880ms step_avg:230.60ms +[2025-07-17 18:27:07] [Rank 0] step:4381/10000 train_time:1009837ms step_avg:230.50ms +[2025-07-17 18:27:07] [Rank 0] step:4381/10000 train_time:1009837ms step_avg:230.50ms +[2025-07-17 18:27:12] [Rank 0] step:4401/10000 train_time:1014623ms step_avg:230.54ms +[2025-07-17 18:27:12] [Rank 0] step:4401/10000 train_time:1014623ms step_avg:230.54ms +[2025-07-17 18:27:17] [Rank 0] step:4421/10000 train_time:1019410ms step_avg:230.58ms +[2025-07-17 18:27:17] [Rank 0] step:4421/10000 train_time:1019410ms step_avg:230.58ms +[2025-07-17 18:27:21] [Rank 0] step:4441/10000 train_time:1024195ms step_avg:230.62ms +[2025-07-17 18:27:21] [Rank 0] step:4441/10000 train_time:1024195ms step_avg:230.62ms +[2025-07-17 18:27:26] [Rank 0] step:4461/10000 train_time:1028996ms step_avg:230.66ms +[2025-07-17 18:27:26] [Rank 0] step:4461/10000 train_time:1028996ms step_avg:230.66ms +[2025-07-17 18:27:31] [Rank 0] step:4481/10000 train_time:1033799ms step_avg:230.71ms +[2025-07-17 18:27:31] [Rank 0] step:4481/10000 train_time:1033799ms step_avg:230.71ms +[2025-07-17 18:27:40] [Rank 0] PRINT: step:4500/10000 val_loss:4.3497 train_time:1038845ms step_avg:230.85ms +[2025-07-17 18:27:40] [Rank 0] PRINT: step:4500/10000 val_loss:4.3497 train_time:1038845ms step_avg:230.85ms +[2025-07-17 18:27:40] [Rank 0] step:4501/10000 train_time:1038857ms step_avg:230.81ms +[2025-07-17 18:27:40] [Rank 0] step:4501/10000 train_time:1038857ms step_avg:230.81ms +[2025-07-17 18:27:45] [Rank 0] step:4521/10000 train_time:1043407ms step_avg:230.79ms +[2025-07-17 18:27:45] [Rank 0] step:4521/10000 train_time:1043407ms step_avg:230.79ms +[2025-07-17 18:27:50] [Rank 0] step:4541/10000 train_time:1048213ms step_avg:230.83ms +[2025-07-17 18:27:50] [Rank 0] step:4541/10000 train_time:1048213ms step_avg:230.83ms +[2025-07-17 18:27:55] [Rank 0] step:4561/10000 train_time:1053013ms step_avg:230.87ms +[2025-07-17 18:27:55] [Rank 0] step:4561/10000 train_time:1053013ms step_avg:230.87ms +[2025-07-17 18:28:00] [Rank 0] step:4581/10000 train_time:1057819ms step_avg:230.91ms +[2025-07-17 18:28:00] [Rank 0] step:4581/10000 train_time:1057819ms step_avg:230.91ms +[2025-07-17 18:28:05] [Rank 0] step:4601/10000 train_time:1062627ms step_avg:230.96ms +[2025-07-17 18:28:05] [Rank 0] step:4601/10000 train_time:1062627ms step_avg:230.96ms +[2025-07-17 18:28:09] [Rank 0] step:4621/10000 train_time:1067432ms step_avg:231.00ms +[2025-07-17 18:28:09] [Rank 0] step:4621/10000 train_time:1067432ms step_avg:231.00ms +[2025-07-17 18:28:15] [Rank 0] PRINT: step:4625/10000 val_loss:4.3960 train_time:1068880ms step_avg:231.11ms +[2025-07-17 18:28:15] [Rank 0] PRINT: step:4625/10000 val_loss:4.3960 train_time:1068880ms step_avg:231.11ms +[2025-07-17 18:28:19] [Rank 0] step:4641/10000 train_time:1072238ms step_avg:231.04ms +[2025-07-17 18:28:19] [Rank 0] step:4641/10000 train_time:1072238ms step_avg:231.04ms +[2025-07-17 18:28:24] [Rank 0] step:4661/10000 train_time:1077050ms step_avg:231.08ms +[2025-07-17 18:28:24] [Rank 0] step:4661/10000 train_time:1077050ms step_avg:231.08ms +[2025-07-17 18:28:28] [Rank 0] step:4681/10000 train_time:1081857ms step_avg:231.12ms +[2025-07-17 18:28:28] [Rank 0] step:4681/10000 train_time:1081857ms step_avg:231.12ms +[2025-07-17 18:28:33] [Rank 0] step:4701/10000 train_time:1086665ms step_avg:231.16ms +[2025-07-17 18:28:33] [Rank 0] step:4701/10000 train_time:1086665ms step_avg:231.16ms +[2025-07-17 18:28:38] [Rank 0] step:4721/10000 train_time:1091468ms step_avg:231.19ms +[2025-07-17 18:28:38] [Rank 0] step:4721/10000 train_time:1091468ms step_avg:231.19ms +[2025-07-17 18:28:43] [Rank 0] step:4741/10000 train_time:1096276ms step_avg:231.23ms +[2025-07-17 18:28:43] [Rank 0] step:4741/10000 train_time:1096276ms step_avg:231.23ms +[2025-07-17 18:28:49] [Rank 0] PRINT: step:4750/10000 val_loss:4.4126 train_time:1098928ms step_avg:231.35ms +[2025-07-17 18:28:49] [Rank 0] PRINT: step:4750/10000 val_loss:4.4126 train_time:1098928ms step_avg:231.35ms +[2025-07-17 18:28:52] [Rank 0] step:4761/10000 train_time:1101085ms step_avg:231.27ms +[2025-07-17 18:28:52] [Rank 0] step:4761/10000 train_time:1101085ms step_avg:231.27ms +[2025-07-17 18:28:57] [Rank 0] step:4781/10000 train_time:1105889ms step_avg:231.31ms +[2025-07-17 18:28:57] [Rank 0] step:4781/10000 train_time:1105889ms step_avg:231.31ms +[2025-07-17 18:29:01] [Rank 0] step:4801/10000 train_time:1110694ms step_avg:231.35ms +[2025-07-17 18:29:01] [Rank 0] step:4801/10000 train_time:1110694ms step_avg:231.35ms +[2025-07-17 18:29:06] [Rank 0] step:4821/10000 train_time:1115496ms step_avg:231.38ms +[2025-07-17 18:29:06] [Rank 0] step:4821/10000 train_time:1115496ms step_avg:231.38ms +[2025-07-17 18:29:11] [Rank 0] step:4841/10000 train_time:1120300ms step_avg:231.42ms +[2025-07-17 18:29:11] [Rank 0] step:4841/10000 train_time:1120300ms step_avg:231.42ms +[2025-07-17 18:29:16] [Rank 0] step:4861/10000 train_time:1125097ms step_avg:231.45ms +[2025-07-17 18:29:16] [Rank 0] step:4861/10000 train_time:1125097ms step_avg:231.45ms +[2025-07-17 18:29:23] [Rank 0] PRINT: step:4875/10000 val_loss:4.4175 train_time:1128942ms step_avg:231.58ms +[2025-07-17 18:29:23] [Rank 0] PRINT: step:4875/10000 val_loss:4.4175 train_time:1128942ms step_avg:231.58ms +[2025-07-17 18:29:25] [Rank 0] step:4881/10000 train_time:1129896ms step_avg:231.49ms +[2025-07-17 18:29:25] [Rank 0] step:4881/10000 train_time:1129896ms step_avg:231.49ms +[2025-07-17 18:29:30] [Rank 0] step:4901/10000 train_time:1134698ms step_avg:231.52ms +[2025-07-17 18:29:30] [Rank 0] step:4901/10000 train_time:1134698ms step_avg:231.52ms +[2025-07-17 18:29:34] [Rank 0] step:4921/10000 train_time:1139495ms step_avg:231.56ms +[2025-07-17 18:29:34] [Rank 0] step:4921/10000 train_time:1139495ms step_avg:231.56ms +[2025-07-17 18:29:39] [Rank 0] step:4941/10000 train_time:1144299ms step_avg:231.59ms +[2025-07-17 18:29:39] [Rank 0] step:4941/10000 train_time:1144299ms step_avg:231.59ms +[2025-07-17 18:29:44] [Rank 0] step:4961/10000 train_time:1149094ms step_avg:231.63ms +[2025-07-17 18:29:44] [Rank 0] step:4961/10000 train_time:1149094ms step_avg:231.63ms +[2025-07-17 18:29:49] [Rank 0] step:4981/10000 train_time:1153891ms step_avg:231.66ms +[2025-07-17 18:29:49] [Rank 0] step:4981/10000 train_time:1153891ms step_avg:231.66ms +[2025-07-17 18:29:58] [Rank 0] PRINT: step:5000/10000 val_loss:4.4511 train_time:1158935ms step_avg:231.79ms +[2025-07-17 18:29:58] [Rank 0] PRINT: step:5000/10000 val_loss:4.4511 train_time:1158935ms step_avg:231.79ms +[2025-07-17 18:29:58] [Rank 0] step:5001/10000 train_time:1158947ms step_avg:231.74ms +[2025-07-17 18:29:58] [Rank 0] step:5001/10000 train_time:1158947ms step_avg:231.74ms +[2025-07-17 18:30:03] [Rank 0] step:5021/10000 train_time:1163495ms step_avg:231.73ms +[2025-07-17 18:30:03] [Rank 0] step:5021/10000 train_time:1163495ms step_avg:231.73ms +[2025-07-17 18:30:08] [Rank 0] step:5041/10000 train_time:1168296ms step_avg:231.76ms +[2025-07-17 18:30:08] [Rank 0] step:5041/10000 train_time:1168296ms step_avg:231.76ms +[2025-07-17 18:30:13] [Rank 0] step:5061/10000 train_time:1173093ms step_avg:231.79ms +[2025-07-17 18:30:13] [Rank 0] step:5061/10000 train_time:1173093ms step_avg:231.79ms +[2025-07-17 18:30:18] [Rank 0] step:5081/10000 train_time:1177891ms step_avg:231.82ms +[2025-07-17 18:30:18] [Rank 0] step:5081/10000 train_time:1177891ms step_avg:231.82ms +[2025-07-17 18:30:22] [Rank 0] step:5101/10000 train_time:1182693ms step_avg:231.86ms +[2025-07-17 18:30:22] [Rank 0] step:5101/10000 train_time:1182693ms step_avg:231.86ms +[2025-07-17 18:30:27] [Rank 0] step:5121/10000 train_time:1187493ms step_avg:231.89ms +[2025-07-17 18:30:27] [Rank 0] step:5121/10000 train_time:1187493ms step_avg:231.89ms +[2025-07-17 18:30:33] [Rank 0] PRINT: step:5125/10000 val_loss:4.4303 train_time:1188937ms step_avg:231.99ms +[2025-07-17 18:30:33] [Rank 0] PRINT: step:5125/10000 val_loss:4.4303 train_time:1188937ms step_avg:231.99ms +[2025-07-17 18:30:37] [Rank 0] step:5141/10000 train_time:1192292ms step_avg:231.92ms +[2025-07-17 18:30:37] [Rank 0] step:5141/10000 train_time:1192292ms step_avg:231.92ms +[2025-07-17 18:30:41] [Rank 0] step:5161/10000 train_time:1197094ms step_avg:231.95ms +[2025-07-17 18:30:41] [Rank 0] step:5161/10000 train_time:1197094ms step_avg:231.95ms +[2025-07-17 18:30:46] [Rank 0] step:5181/10000 train_time:1201898ms step_avg:231.98ms +[2025-07-17 18:30:46] [Rank 0] step:5181/10000 train_time:1201898ms step_avg:231.98ms +[2025-07-17 18:30:51] [Rank 0] step:5201/10000 train_time:1206750ms step_avg:232.02ms +[2025-07-17 18:30:51] [Rank 0] step:5201/10000 train_time:1206750ms step_avg:232.02ms +[2025-07-17 18:30:56] [Rank 0] step:5221/10000 train_time:1211630ms step_avg:232.07ms +[2025-07-17 18:30:56] [Rank 0] step:5221/10000 train_time:1211630ms step_avg:232.07ms +[2025-07-17 18:31:01] [Rank 0] step:5241/10000 train_time:1216502ms step_avg:232.11ms +[2025-07-17 18:31:01] [Rank 0] step:5241/10000 train_time:1216502ms step_avg:232.11ms +[2025-07-17 18:31:08] [Rank 0] PRINT: step:5250/10000 val_loss:4.3025 train_time:1219187ms step_avg:232.23ms +[2025-07-17 18:31:08] [Rank 0] PRINT: step:5250/10000 val_loss:4.3025 train_time:1219187ms step_avg:232.23ms +[2025-07-17 18:31:10] [Rank 0] step:5261/10000 train_time:1221373ms step_avg:232.16ms +[2025-07-17 18:31:10] [Rank 0] step:5261/10000 train_time:1221373ms step_avg:232.16ms +[2025-07-17 18:31:15] [Rank 0] step:5281/10000 train_time:1226242ms step_avg:232.20ms +[2025-07-17 18:31:15] [Rank 0] step:5281/10000 train_time:1226242ms step_avg:232.20ms +[2025-07-17 18:31:20] [Rank 0] step:5301/10000 train_time:1231118ms step_avg:232.24ms +[2025-07-17 18:31:20] [Rank 0] step:5301/10000 train_time:1231118ms step_avg:232.24ms +[2025-07-17 18:31:25] [Rank 0] step:5321/10000 train_time:1235988ms step_avg:232.28ms +[2025-07-17 18:31:25] [Rank 0] step:5321/10000 train_time:1235988ms step_avg:232.28ms +[2025-07-17 18:31:30] [Rank 0] step:5341/10000 train_time:1240865ms step_avg:232.33ms +[2025-07-17 18:31:30] [Rank 0] step:5341/10000 train_time:1240865ms step_avg:232.33ms +[2025-07-17 18:31:35] [Rank 0] step:5361/10000 train_time:1245741ms step_avg:232.37ms +[2025-07-17 18:31:35] [Rank 0] step:5361/10000 train_time:1245741ms step_avg:232.37ms +[2025-07-17 18:31:42] [Rank 0] PRINT: step:5375/10000 val_loss:4.3160 train_time:1249644ms step_avg:232.49ms +[2025-07-17 18:31:42] [Rank 0] PRINT: step:5375/10000 val_loss:4.3160 train_time:1249644ms step_avg:232.49ms +[2025-07-17 18:31:44] [Rank 0] step:5381/10000 train_time:1250617ms step_avg:232.41ms +[2025-07-17 18:31:44] [Rank 0] step:5381/10000 train_time:1250617ms step_avg:232.41ms +[2025-07-17 18:31:49] [Rank 0] step:5401/10000 train_time:1255497ms step_avg:232.46ms +[2025-07-17 18:31:49] [Rank 0] step:5401/10000 train_time:1255497ms step_avg:232.46ms +[2025-07-17 18:31:54] [Rank 0] step:5421/10000 train_time:1260384ms step_avg:232.50ms +[2025-07-17 18:31:54] [Rank 0] step:5421/10000 train_time:1260384ms step_avg:232.50ms +[2025-07-17 18:31:59] [Rank 0] step:5441/10000 train_time:1265260ms step_avg:232.54ms +[2025-07-17 18:31:59] [Rank 0] step:5441/10000 train_time:1265260ms step_avg:232.54ms +[2025-07-17 18:32:03] [Rank 0] step:5461/10000 train_time:1270144ms step_avg:232.58ms +[2025-07-17 18:32:03] [Rank 0] step:5461/10000 train_time:1270144ms step_avg:232.58ms +[2025-07-17 18:32:08] [Rank 0] step:5481/10000 train_time:1275031ms step_avg:232.63ms +[2025-07-17 18:32:08] [Rank 0] step:5481/10000 train_time:1275031ms step_avg:232.63ms +[2025-07-17 18:32:18] [Rank 0] PRINT: step:5500/10000 val_loss:4.5281 train_time:1280156ms step_avg:232.76ms +[2025-07-17 18:32:18] [Rank 0] PRINT: step:5500/10000 val_loss:4.5281 train_time:1280156ms step_avg:232.76ms +[2025-07-17 18:32:18] [Rank 0] step:5501/10000 train_time:1280168ms step_avg:232.72ms +[2025-07-17 18:32:18] [Rank 0] step:5501/10000 train_time:1280168ms step_avg:232.72ms +[2025-07-17 18:32:23] [Rank 0] step:5521/10000 train_time:1284778ms step_avg:232.71ms +[2025-07-17 18:32:23] [Rank 0] step:5521/10000 train_time:1284778ms step_avg:232.71ms +[2025-07-17 18:32:28] [Rank 0] step:5541/10000 train_time:1289655ms step_avg:232.75ms +[2025-07-17 18:32:28] [Rank 0] step:5541/10000 train_time:1289655ms step_avg:232.75ms +[2025-07-17 18:32:33] [Rank 0] step:5561/10000 train_time:1294531ms step_avg:232.79ms +[2025-07-17 18:32:33] [Rank 0] step:5561/10000 train_time:1294531ms step_avg:232.79ms +[2025-07-17 18:32:37] [Rank 0] step:5581/10000 train_time:1299399ms step_avg:232.83ms +[2025-07-17 18:32:37] [Rank 0] step:5581/10000 train_time:1299399ms step_avg:232.83ms +[2025-07-17 18:32:42] [Rank 0] step:5601/10000 train_time:1304280ms step_avg:232.87ms +[2025-07-17 18:32:42] [Rank 0] step:5601/10000 train_time:1304280ms step_avg:232.87ms +[2025-07-17 18:32:47] [Rank 0] step:5621/10000 train_time:1309152ms step_avg:232.90ms +[2025-07-17 18:32:47] [Rank 0] step:5621/10000 train_time:1309152ms step_avg:232.90ms +[2025-07-17 18:32:53] [Rank 0] PRINT: step:5625/10000 val_loss:4.4211 train_time:1310616ms step_avg:233.00ms +[2025-07-17 18:32:53] [Rank 0] PRINT: step:5625/10000 val_loss:4.4211 train_time:1310616ms step_avg:233.00ms +[2025-07-17 18:32:57] [Rank 0] step:5641/10000 train_time:1314023ms step_avg:232.94ms +[2025-07-17 18:32:57] [Rank 0] step:5641/10000 train_time:1314023ms step_avg:232.94ms +[2025-07-17 18:33:02] [Rank 0] step:5661/10000 train_time:1318900ms step_avg:232.98ms +[2025-07-17 18:33:02] [Rank 0] step:5661/10000 train_time:1318900ms step_avg:232.98ms +[2025-07-17 18:33:06] [Rank 0] step:5681/10000 train_time:1323783ms step_avg:233.02ms +[2025-07-17 18:33:06] [Rank 0] step:5681/10000 train_time:1323783ms step_avg:233.02ms +[2025-07-17 18:33:11] [Rank 0] step:5701/10000 train_time:1328657ms step_avg:233.06ms +[2025-07-17 18:33:11] [Rank 0] step:5701/10000 train_time:1328657ms step_avg:233.06ms +[2025-07-17 18:33:16] [Rank 0] step:5721/10000 train_time:1333530ms step_avg:233.09ms +[2025-07-17 18:33:16] [Rank 0] step:5721/10000 train_time:1333530ms step_avg:233.09ms +[2025-07-17 18:33:21] [Rank 0] step:5741/10000 train_time:1338411ms step_avg:233.13ms +[2025-07-17 18:33:21] [Rank 0] step:5741/10000 train_time:1338411ms step_avg:233.13ms +[2025-07-17 18:33:28] [Rank 0] PRINT: step:5750/10000 val_loss:4.4013 train_time:1341096ms step_avg:233.23ms +[2025-07-17 18:33:28] [Rank 0] PRINT: step:5750/10000 val_loss:4.4013 train_time:1341096ms step_avg:233.23ms +[2025-07-17 18:33:31] [Rank 0] step:5761/10000 train_time:1343290ms step_avg:233.17ms +[2025-07-17 18:33:31] [Rank 0] step:5761/10000 train_time:1343290ms step_avg:233.17ms +[2025-07-17 18:33:36] [Rank 0] step:5781/10000 train_time:1348167ms step_avg:233.21ms +[2025-07-17 18:33:36] [Rank 0] step:5781/10000 train_time:1348167ms step_avg:233.21ms +[2025-07-17 18:33:40] [Rank 0] step:5801/10000 train_time:1353040ms step_avg:233.24ms +[2025-07-17 18:33:40] [Rank 0] step:5801/10000 train_time:1353040ms step_avg:233.24ms +[2025-07-17 18:33:45] [Rank 0] step:5821/10000 train_time:1357921ms step_avg:233.28ms +[2025-07-17 18:33:45] [Rank 0] step:5821/10000 train_time:1357921ms step_avg:233.28ms +[2025-07-17 18:33:50] [Rank 0] step:5841/10000 train_time:1362804ms step_avg:233.32ms +[2025-07-17 18:33:50] [Rank 0] step:5841/10000 train_time:1362804ms step_avg:233.32ms +[2025-07-17 18:33:55] [Rank 0] step:5861/10000 train_time:1367679ms step_avg:233.35ms +[2025-07-17 18:33:55] [Rank 0] step:5861/10000 train_time:1367679ms step_avg:233.35ms +[2025-07-17 18:34:03] [Rank 0] PRINT: step:5875/10000 val_loss:4.3889 train_time:1371579ms step_avg:233.46ms +[2025-07-17 18:34:03] [Rank 0] PRINT: step:5875/10000 val_loss:4.3889 train_time:1371579ms step_avg:233.46ms +[2025-07-17 18:34:05] [Rank 0] step:5881/10000 train_time:1372550ms step_avg:233.39ms +[2025-07-17 18:34:05] [Rank 0] step:5881/10000 train_time:1372550ms step_avg:233.39ms +[2025-07-17 18:34:09] [Rank 0] step:5901/10000 train_time:1377431ms step_avg:233.42ms +[2025-07-17 18:34:09] [Rank 0] step:5901/10000 train_time:1377431ms step_avg:233.42ms +[2025-07-17 18:34:14] [Rank 0] step:5921/10000 train_time:1382299ms step_avg:233.46ms +[2025-07-17 18:34:14] [Rank 0] step:5921/10000 train_time:1382299ms step_avg:233.46ms +[2025-07-17 18:34:19] [Rank 0] step:5941/10000 train_time:1387187ms step_avg:233.49ms +[2025-07-17 18:34:19] [Rank 0] step:5941/10000 train_time:1387187ms step_avg:233.49ms +[2025-07-17 18:34:24] [Rank 0] step:5961/10000 train_time:1392075ms step_avg:233.53ms +[2025-07-17 18:34:24] [Rank 0] step:5961/10000 train_time:1392075ms step_avg:233.53ms +[2025-07-17 18:34:29] [Rank 0] step:5981/10000 train_time:1396963ms step_avg:233.57ms +[2025-07-17 18:34:29] [Rank 0] step:5981/10000 train_time:1396963ms step_avg:233.57ms +[2025-07-17 18:34:38] [Rank 0] PRINT: step:6000/10000 val_loss:4.5355 train_time:1402096ms step_avg:233.68ms +[2025-07-17 18:34:38] [Rank 0] PRINT: step:6000/10000 val_loss:4.5355 train_time:1402096ms step_avg:233.68ms +[2025-07-17 18:34:39] [Rank 0] step:6001/10000 train_time:1402108ms step_avg:233.65ms +[2025-07-17 18:34:39] [Rank 0] step:6001/10000 train_time:1402108ms step_avg:233.65ms +[2025-07-17 18:34:43] [Rank 0] step:6021/10000 train_time:1406724ms step_avg:233.64ms +[2025-07-17 18:34:43] [Rank 0] step:6021/10000 train_time:1406724ms step_avg:233.64ms +[2025-07-17 18:34:48] [Rank 0] step:6041/10000 train_time:1411603ms step_avg:233.67ms +[2025-07-17 18:34:48] [Rank 0] step:6041/10000 train_time:1411603ms step_avg:233.67ms +[2025-07-17 18:34:53] [Rank 0] step:6061/10000 train_time:1416472ms step_avg:233.70ms +[2025-07-17 18:34:53] [Rank 0] step:6061/10000 train_time:1416472ms step_avg:233.70ms +[2025-07-17 18:34:58] [Rank 0] step:6081/10000 train_time:1421349ms step_avg:233.74ms +[2025-07-17 18:34:58] [Rank 0] step:6081/10000 train_time:1421349ms step_avg:233.74ms +[2025-07-17 18:35:03] [Rank 0] step:6101/10000 train_time:1426220ms step_avg:233.77ms +[2025-07-17 18:35:03] [Rank 0] step:6101/10000 train_time:1426220ms step_avg:233.77ms +[2025-07-17 18:35:08] [Rank 0] step:6121/10000 train_time:1431099ms step_avg:233.80ms +[2025-07-17 18:35:08] [Rank 0] step:6121/10000 train_time:1431099ms step_avg:233.80ms +[2025-07-17 18:35:13] [Rank 0] PRINT: step:6125/10000 val_loss:4.5770 train_time:1432565ms step_avg:233.89ms +[2025-07-17 18:35:13] [Rank 0] PRINT: step:6125/10000 val_loss:4.5770 train_time:1432565ms step_avg:233.89ms +[2025-07-17 18:35:17] [Rank 0] step:6141/10000 train_time:1435980ms step_avg:233.83ms +[2025-07-17 18:35:17] [Rank 0] step:6141/10000 train_time:1435980ms step_avg:233.83ms +[2025-07-17 18:35:22] [Rank 0] step:6161/10000 train_time:1440862ms step_avg:233.87ms +[2025-07-17 18:35:22] [Rank 0] step:6161/10000 train_time:1440862ms step_avg:233.87ms +[2025-07-17 18:35:27] [Rank 0] step:6181/10000 train_time:1445751ms step_avg:233.90ms +[2025-07-17 18:35:27] [Rank 0] step:6181/10000 train_time:1445751ms step_avg:233.90ms +[2025-07-17 18:35:32] [Rank 0] step:6201/10000 train_time:1450646ms step_avg:233.94ms +[2025-07-17 18:35:32] [Rank 0] step:6201/10000 train_time:1450646ms step_avg:233.94ms +[2025-07-17 18:35:36] [Rank 0] step:6221/10000 train_time:1455537ms step_avg:233.97ms +[2025-07-17 18:35:36] [Rank 0] step:6221/10000 train_time:1455537ms step_avg:233.97ms +[2025-07-17 18:35:41] [Rank 0] step:6241/10000 train_time:1460422ms step_avg:234.00ms +[2025-07-17 18:35:41] [Rank 0] step:6241/10000 train_time:1460422ms step_avg:234.00ms +[2025-07-17 18:35:48] [Rank 0] PRINT: step:6250/10000 val_loss:4.4838 train_time:1463115ms step_avg:234.10ms +[2025-07-17 18:35:48] [Rank 0] PRINT: step:6250/10000 val_loss:4.4838 train_time:1463115ms step_avg:234.10ms +[2025-07-17 18:35:51] [Rank 0] step:6261/10000 train_time:1465309ms step_avg:234.04ms +[2025-07-17 18:35:51] [Rank 0] step:6261/10000 train_time:1465309ms step_avg:234.04ms +[2025-07-17 18:35:56] [Rank 0] step:6281/10000 train_time:1470200ms step_avg:234.07ms +[2025-07-17 18:35:56] [Rank 0] step:6281/10000 train_time:1470200ms step_avg:234.07ms +[2025-07-17 18:36:01] [Rank 0] step:6301/10000 train_time:1475081ms step_avg:234.10ms +[2025-07-17 18:36:01] [Rank 0] step:6301/10000 train_time:1475081ms step_avg:234.10ms +[2025-07-17 18:36:06] [Rank 0] step:6321/10000 train_time:1479970ms step_avg:234.14ms +[2025-07-17 18:36:06] [Rank 0] step:6321/10000 train_time:1479970ms step_avg:234.14ms +[2025-07-17 18:36:10] [Rank 0] step:6341/10000 train_time:1484861ms step_avg:234.17ms +[2025-07-17 18:36:10] [Rank 0] step:6341/10000 train_time:1484861ms step_avg:234.17ms +[2025-07-17 18:36:15] [Rank 0] step:6361/10000 train_time:1489739ms step_avg:234.20ms +[2025-07-17 18:36:15] [Rank 0] step:6361/10000 train_time:1489739ms step_avg:234.20ms +[2025-07-17 18:36:23] [Rank 0] PRINT: step:6375/10000 val_loss:4.4284 train_time:1493642ms step_avg:234.30ms +[2025-07-17 18:36:23] [Rank 0] PRINT: step:6375/10000 val_loss:4.4284 train_time:1493642ms step_avg:234.30ms +[2025-07-17 18:36:25] [Rank 0] step:6381/10000 train_time:1494620ms step_avg:234.23ms +[2025-07-17 18:36:25] [Rank 0] step:6381/10000 train_time:1494620ms step_avg:234.23ms +[2025-07-17 18:36:30] [Rank 0] step:6401/10000 train_time:1499494ms step_avg:234.26ms +[2025-07-17 18:36:30] [Rank 0] step:6401/10000 train_time:1499494ms step_avg:234.26ms +[2025-07-17 18:36:35] [Rank 0] step:6421/10000 train_time:1504369ms step_avg:234.29ms +[2025-07-17 18:36:35] [Rank 0] step:6421/10000 train_time:1504369ms step_avg:234.29ms +[2025-07-17 18:36:40] [Rank 0] step:6441/10000 train_time:1509253ms step_avg:234.32ms +[2025-07-17 18:36:40] [Rank 0] step:6441/10000 train_time:1509253ms step_avg:234.32ms +[2025-07-17 18:36:44] [Rank 0] step:6461/10000 train_time:1514148ms step_avg:234.35ms +[2025-07-17 18:36:44] [Rank 0] step:6461/10000 train_time:1514148ms step_avg:234.35ms +[2025-07-17 18:36:49] [Rank 0] step:6481/10000 train_time:1519031ms step_avg:234.38ms +[2025-07-17 18:36:49] [Rank 0] step:6481/10000 train_time:1519031ms step_avg:234.38ms +[2025-07-17 18:36:59] [Rank 0] PRINT: step:6500/10000 val_loss:4.3777 train_time:1524163ms step_avg:234.49ms +[2025-07-17 18:36:59] [Rank 0] PRINT: step:6500/10000 val_loss:4.3777 train_time:1524163ms step_avg:234.49ms +[2025-07-17 18:36:59] [Rank 0] step:6501/10000 train_time:1524175ms step_avg:234.45ms +[2025-07-17 18:36:59] [Rank 0] step:6501/10000 train_time:1524175ms step_avg:234.45ms +[2025-07-17 18:37:04] [Rank 0] step:6521/10000 train_time:1528795ms step_avg:234.44ms +[2025-07-17 18:37:04] [Rank 0] step:6521/10000 train_time:1528795ms step_avg:234.44ms +[2025-07-17 18:37:09] [Rank 0] step:6541/10000 train_time:1533678ms step_avg:234.47ms +[2025-07-17 18:37:09] [Rank 0] step:6541/10000 train_time:1533678ms step_avg:234.47ms +[2025-07-17 18:37:14] [Rank 0] step:6561/10000 train_time:1538577ms step_avg:234.50ms +[2025-07-17 18:37:14] [Rank 0] step:6561/10000 train_time:1538577ms step_avg:234.50ms +[2025-07-17 18:37:19] [Rank 0] step:6581/10000 train_time:1543465ms step_avg:234.53ms +[2025-07-17 18:37:19] [Rank 0] step:6581/10000 train_time:1543465ms step_avg:234.53ms +[2025-07-17 18:37:23] [Rank 0] step:6601/10000 train_time:1548362ms step_avg:234.56ms +[2025-07-17 18:37:23] [Rank 0] step:6601/10000 train_time:1548362ms step_avg:234.56ms +[2025-07-17 18:37:28] [Rank 0] step:6621/10000 train_time:1553248ms step_avg:234.59ms +[2025-07-17 18:37:28] [Rank 0] step:6621/10000 train_time:1553248ms step_avg:234.59ms +[2025-07-17 18:37:34] [Rank 0] PRINT: step:6625/10000 val_loss:4.4307 train_time:1554717ms step_avg:234.67ms +[2025-07-17 18:37:34] [Rank 0] PRINT: step:6625/10000 val_loss:4.4307 train_time:1554717ms step_avg:234.67ms +[2025-07-17 18:37:37] [Rank 0] step:6641/10000 train_time:1558127ms step_avg:234.62ms +[2025-07-17 18:37:37] [Rank 0] step:6641/10000 train_time:1558127ms step_avg:234.62ms +[2025-07-17 18:37:42] [Rank 0] step:6661/10000 train_time:1563012ms step_avg:234.65ms +[2025-07-17 18:37:42] [Rank 0] step:6661/10000 train_time:1563012ms step_avg:234.65ms +[2025-07-17 18:37:47] [Rank 0] step:6681/10000 train_time:1567941ms step_avg:234.69ms +[2025-07-17 18:37:47] [Rank 0] step:6681/10000 train_time:1567941ms step_avg:234.69ms +[2025-07-17 18:37:52] [Rank 0] step:6701/10000 train_time:1572877ms step_avg:234.72ms +[2025-07-17 18:37:52] [Rank 0] step:6701/10000 train_time:1572877ms step_avg:234.72ms +[2025-07-17 18:37:57] [Rank 0] step:6721/10000 train_time:1577832ms step_avg:234.76ms +[2025-07-17 18:37:57] [Rank 0] step:6721/10000 train_time:1577832ms step_avg:234.76ms +[2025-07-17 18:38:02] [Rank 0] step:6741/10000 train_time:1582792ms step_avg:234.80ms +[2025-07-17 18:38:02] [Rank 0] step:6741/10000 train_time:1582792ms step_avg:234.80ms +[2025-07-17 18:38:09] [Rank 0] PRINT: step:6750/10000 val_loss:4.3777 train_time:1585513ms step_avg:234.89ms +[2025-07-17 18:38:09] [Rank 0] PRINT: step:6750/10000 val_loss:4.3777 train_time:1585513ms step_avg:234.89ms +[2025-07-17 18:38:12] [Rank 0] step:6761/10000 train_time:1587735ms step_avg:234.84ms +[2025-07-17 18:38:12] [Rank 0] step:6761/10000 train_time:1587735ms step_avg:234.84ms +[2025-07-17 18:38:17] [Rank 0] step:6781/10000 train_time:1592678ms step_avg:234.87ms +[2025-07-17 18:38:17] [Rank 0] step:6781/10000 train_time:1592678ms step_avg:234.87ms +[2025-07-17 18:38:22] [Rank 0] step:6801/10000 train_time:1597628ms step_avg:234.91ms +[2025-07-17 18:38:22] [Rank 0] step:6801/10000 train_time:1597628ms step_avg:234.91ms +[2025-07-17 18:38:27] [Rank 0] step:6821/10000 train_time:1602574ms step_avg:234.95ms +[2025-07-17 18:38:27] [Rank 0] step:6821/10000 train_time:1602574ms step_avg:234.95ms +[2025-07-17 18:38:32] [Rank 0] step:6841/10000 train_time:1607521ms step_avg:234.98ms +[2025-07-17 18:38:32] [Rank 0] step:6841/10000 train_time:1607521ms step_avg:234.98ms +[2025-07-17 18:38:36] [Rank 0] step:6861/10000 train_time:1612461ms step_avg:235.02ms +[2025-07-17 18:38:36] [Rank 0] step:6861/10000 train_time:1612461ms step_avg:235.02ms +[2025-07-17 18:38:44] [Rank 0] PRINT: step:6875/10000 val_loss:4.5596 train_time:1616409ms step_avg:235.11ms +[2025-07-17 18:38:44] [Rank 0] PRINT: step:6875/10000 val_loss:4.5596 train_time:1616409ms step_avg:235.11ms +[2025-07-17 18:38:46] [Rank 0] step:6881/10000 train_time:1617396ms step_avg:235.05ms +[2025-07-17 18:38:46] [Rank 0] step:6881/10000 train_time:1617396ms step_avg:235.05ms +[2025-07-17 18:38:50] [Rank 0] step:6901/10000 train_time:1622332ms step_avg:235.09ms +[2025-07-17 18:38:50] [Rank 0] step:6901/10000 train_time:1622332ms step_avg:235.09ms +[2025-07-17 18:38:55] [Rank 0] step:6921/10000 train_time:1627273ms step_avg:235.12ms +[2025-07-17 18:38:55] [Rank 0] step:6921/10000 train_time:1627273ms step_avg:235.12ms +[2025-07-17 18:39:00] [Rank 0] step:6941/10000 train_time:1632225ms step_avg:235.16ms +[2025-07-17 18:39:00] [Rank 0] step:6941/10000 train_time:1632225ms step_avg:235.16ms +[2025-07-17 18:39:05] [Rank 0] step:6961/10000 train_time:1637171ms step_avg:235.19ms +[2025-07-17 18:39:05] [Rank 0] step:6961/10000 train_time:1637171ms step_avg:235.19ms +[2025-07-17 18:39:10] [Rank 0] step:6981/10000 train_time:1642121ms step_avg:235.23ms +[2025-07-17 18:39:10] [Rank 0] step:6981/10000 train_time:1642121ms step_avg:235.23ms +[2025-07-17 18:39:19] [Rank 0] PRINT: step:7000/10000 val_loss:4.3946 train_time:1647310ms step_avg:235.33ms +[2025-07-17 18:39:19] [Rank 0] PRINT: step:7000/10000 val_loss:4.3946 train_time:1647310ms step_avg:235.33ms +[2025-07-17 18:39:20] [Rank 0] step:7001/10000 train_time:1647322ms step_avg:235.30ms +[2025-07-17 18:39:20] [Rank 0] step:7001/10000 train_time:1647322ms step_avg:235.30ms +[2025-07-17 18:39:24] [Rank 0] step:7021/10000 train_time:1652002ms step_avg:235.29ms +[2025-07-17 18:39:24] [Rank 0] step:7021/10000 train_time:1652002ms step_avg:235.29ms +[2025-07-17 18:39:29] [Rank 0] step:7041/10000 train_time:1656941ms step_avg:235.33ms +[2025-07-17 18:39:29] [Rank 0] step:7041/10000 train_time:1656941ms step_avg:235.33ms +[2025-07-17 18:39:34] [Rank 0] step:7061/10000 train_time:1661880ms step_avg:235.36ms +[2025-07-17 18:39:34] [Rank 0] step:7061/10000 train_time:1661880ms step_avg:235.36ms +[2025-07-17 18:39:39] [Rank 0] step:7081/10000 train_time:1666821ms step_avg:235.39ms +[2025-07-17 18:39:39] [Rank 0] step:7081/10000 train_time:1666821ms step_avg:235.39ms +[2025-07-17 18:39:44] [Rank 0] step:7101/10000 train_time:1671754ms step_avg:235.43ms +[2025-07-17 18:39:44] [Rank 0] step:7101/10000 train_time:1671754ms step_avg:235.43ms +[2025-07-17 18:39:49] [Rank 0] step:7121/10000 train_time:1676697ms step_avg:235.46ms +[2025-07-17 18:39:49] [Rank 0] step:7121/10000 train_time:1676697ms step_avg:235.46ms +[2025-07-17 18:39:54] [Rank 0] PRINT: step:7125/10000 val_loss:4.4829 train_time:1678178ms step_avg:235.53ms +[2025-07-17 18:39:54] [Rank 0] PRINT: step:7125/10000 val_loss:4.4829 train_time:1678178ms step_avg:235.53ms +[2025-07-17 18:39:58] [Rank 0] step:7141/10000 train_time:1681642ms step_avg:235.49ms +[2025-07-17 18:39:58] [Rank 0] step:7141/10000 train_time:1681642ms step_avg:235.49ms +[2025-07-17 18:40:03] [Rank 0] step:7161/10000 train_time:1686590ms step_avg:235.52ms +[2025-07-17 18:40:03] [Rank 0] step:7161/10000 train_time:1686590ms step_avg:235.52ms +[2025-07-17 18:40:08] [Rank 0] step:7181/10000 train_time:1691532ms step_avg:235.56ms +[2025-07-17 18:40:08] [Rank 0] step:7181/10000 train_time:1691532ms step_avg:235.56ms +[2025-07-17 18:40:13] [Rank 0] step:7201/10000 train_time:1696493ms step_avg:235.59ms +[2025-07-17 18:40:13] [Rank 0] step:7201/10000 train_time:1696493ms step_avg:235.59ms +[2025-07-17 18:40:18] [Rank 0] step:7221/10000 train_time:1701440ms step_avg:235.62ms +[2025-07-17 18:40:18] [Rank 0] step:7221/10000 train_time:1701440ms step_avg:235.62ms +[2025-07-17 18:40:23] [Rank 0] step:7241/10000 train_time:1706379ms step_avg:235.66ms +[2025-07-17 18:40:23] [Rank 0] step:7241/10000 train_time:1706379ms step_avg:235.66ms +[2025-07-17 18:40:30] [Rank 0] PRINT: step:7250/10000 val_loss:4.2101 train_time:1709104ms step_avg:235.74ms +[2025-07-17 18:40:30] [Rank 0] PRINT: step:7250/10000 val_loss:4.2101 train_time:1709104ms step_avg:235.74ms +[2025-07-17 18:40:32] [Rank 0] step:7261/10000 train_time:1711321ms step_avg:235.69ms +[2025-07-17 18:40:32] [Rank 0] step:7261/10000 train_time:1711321ms step_avg:235.69ms +[2025-07-17 18:40:37] [Rank 0] step:7281/10000 train_time:1716267ms step_avg:235.72ms +[2025-07-17 18:40:37] [Rank 0] step:7281/10000 train_time:1716267ms step_avg:235.72ms +[2025-07-17 18:40:42] [Rank 0] step:7301/10000 train_time:1721215ms step_avg:235.75ms +[2025-07-17 18:40:42] [Rank 0] step:7301/10000 train_time:1721215ms step_avg:235.75ms +[2025-07-17 18:40:47] [Rank 0] step:7321/10000 train_time:1726180ms step_avg:235.78ms +[2025-07-17 18:40:47] [Rank 0] step:7321/10000 train_time:1726180ms step_avg:235.78ms +[2025-07-17 18:40:52] [Rank 0] step:7341/10000 train_time:1731130ms step_avg:235.82ms +[2025-07-17 18:40:52] [Rank 0] step:7341/10000 train_time:1731130ms step_avg:235.82ms +[2025-07-17 18:40:57] [Rank 0] step:7361/10000 train_time:1736088ms step_avg:235.85ms +[2025-07-17 18:40:57] [Rank 0] step:7361/10000 train_time:1736088ms step_avg:235.85ms +[2025-07-17 18:41:05] [Rank 0] PRINT: step:7375/10000 val_loss:4.6327 train_time:1740056ms step_avg:235.94ms +[2025-07-17 18:41:05] [Rank 0] PRINT: step:7375/10000 val_loss:4.6327 train_time:1740056ms step_avg:235.94ms +[2025-07-17 18:41:07] [Rank 0] step:7381/10000 train_time:1741044ms step_avg:235.88ms +[2025-07-17 18:41:07] [Rank 0] step:7381/10000 train_time:1741044ms step_avg:235.88ms +[2025-07-17 18:41:11] [Rank 0] step:7401/10000 train_time:1746002ms step_avg:235.91ms +[2025-07-17 18:41:11] [Rank 0] step:7401/10000 train_time:1746002ms step_avg:235.91ms +[2025-07-17 18:41:16] [Rank 0] step:7421/10000 train_time:1750953ms step_avg:235.95ms +[2025-07-17 18:41:16] [Rank 0] step:7421/10000 train_time:1750953ms step_avg:235.95ms +[2025-07-17 18:41:21] [Rank 0] step:7441/10000 train_time:1755921ms step_avg:235.98ms +[2025-07-17 18:41:21] [Rank 0] step:7441/10000 train_time:1755921ms step_avg:235.98ms +[2025-07-17 18:41:26] [Rank 0] step:7461/10000 train_time:1760875ms step_avg:236.01ms +[2025-07-17 18:41:26] [Rank 0] step:7461/10000 train_time:1760875ms step_avg:236.01ms +[2025-07-17 18:41:31] [Rank 0] step:7481/10000 train_time:1765836ms step_avg:236.04ms +[2025-07-17 18:41:31] [Rank 0] step:7481/10000 train_time:1765836ms step_avg:236.04ms +[2025-07-17 18:41:41] [Rank 0] PRINT: step:7500/10000 val_loss:4.5567 train_time:1771060ms step_avg:236.14ms +[2025-07-17 18:41:41] [Rank 0] PRINT: step:7500/10000 val_loss:4.5567 train_time:1771060ms step_avg:236.14ms +[2025-07-17 18:41:41] [Rank 0] step:7501/10000 train_time:1771076ms step_avg:236.11ms +[2025-07-17 18:41:41] [Rank 0] step:7501/10000 train_time:1771076ms step_avg:236.11ms +[2025-07-17 18:41:46] [Rank 0] step:7521/10000 train_time:1775776ms step_avg:236.11ms +[2025-07-17 18:41:46] [Rank 0] step:7521/10000 train_time:1775776ms step_avg:236.11ms +[2025-07-17 18:41:51] [Rank 0] step:7541/10000 train_time:1780740ms step_avg:236.14ms +[2025-07-17 18:41:51] [Rank 0] step:7541/10000 train_time:1780740ms step_avg:236.14ms +[2025-07-17 18:41:56] [Rank 0] step:7561/10000 train_time:1785707ms step_avg:236.17ms +[2025-07-17 18:41:56] [Rank 0] step:7561/10000 train_time:1785707ms step_avg:236.17ms +[2025-07-17 18:42:01] [Rank 0] step:7581/10000 train_time:1790679ms step_avg:236.21ms +[2025-07-17 18:42:01] [Rank 0] step:7581/10000 train_time:1790679ms step_avg:236.21ms +[2025-07-17 18:42:06] [Rank 0] step:7601/10000 train_time:1795655ms step_avg:236.24ms +[2025-07-17 18:42:06] [Rank 0] step:7601/10000 train_time:1795655ms step_avg:236.24ms +[2025-07-17 18:42:11] [Rank 0] step:7621/10000 train_time:1800639ms step_avg:236.27ms +[2025-07-17 18:42:11] [Rank 0] step:7621/10000 train_time:1800639ms step_avg:236.27ms +[2025-07-17 18:42:17] [Rank 0] PRINT: step:7625/10000 val_loss:4.4184 train_time:1802136ms step_avg:236.35ms +[2025-07-17 18:42:17] [Rank 0] PRINT: step:7625/10000 val_loss:4.4184 train_time:1802136ms step_avg:236.35ms +[2025-07-17 18:42:21] [Rank 0] step:7641/10000 train_time:1805605ms step_avg:236.30ms +[2025-07-17 18:42:21] [Rank 0] step:7641/10000 train_time:1805605ms step_avg:236.30ms +[2025-07-17 18:42:26] [Rank 0] step:7661/10000 train_time:1810581ms step_avg:236.34ms +[2025-07-17 18:42:26] [Rank 0] step:7661/10000 train_time:1810581ms step_avg:236.34ms +[2025-07-17 18:42:30] [Rank 0] step:7681/10000 train_time:1815565ms step_avg:236.37ms +[2025-07-17 18:42:30] [Rank 0] step:7681/10000 train_time:1815565ms step_avg:236.37ms +[2025-07-17 18:42:35] [Rank 0] step:7701/10000 train_time:1820529ms step_avg:236.40ms +[2025-07-17 18:42:35] [Rank 0] step:7701/10000 train_time:1820529ms step_avg:236.40ms +[2025-07-17 18:42:40] [Rank 0] step:7721/10000 train_time:1825498ms step_avg:236.43ms +[2025-07-17 18:42:40] [Rank 0] step:7721/10000 train_time:1825498ms step_avg:236.43ms +[2025-07-17 18:42:45] [Rank 0] step:7741/10000 train_time:1830460ms step_avg:236.46ms +[2025-07-17 18:42:45] [Rank 0] step:7741/10000 train_time:1830460ms step_avg:236.46ms +[2025-07-17 18:42:52] [Rank 0] PRINT: step:7750/10000 val_loss:4.6292 train_time:1833207ms step_avg:236.54ms +[2025-07-17 18:42:52] [Rank 0] PRINT: step:7750/10000 val_loss:4.6292 train_time:1833207ms step_avg:236.54ms +[2025-07-17 18:42:55] [Rank 0] step:7761/10000 train_time:1835437ms step_avg:236.49ms +[2025-07-17 18:42:55] [Rank 0] step:7761/10000 train_time:1835437ms step_avg:236.49ms +[2025-07-17 18:43:00] [Rank 0] step:7781/10000 train_time:1840401ms step_avg:236.52ms +[2025-07-17 18:43:00] [Rank 0] step:7781/10000 train_time:1840401ms step_avg:236.52ms +[2025-07-17 18:43:05] [Rank 0] step:7801/10000 train_time:1845368ms step_avg:236.56ms +[2025-07-17 18:43:05] [Rank 0] step:7801/10000 train_time:1845368ms step_avg:236.56ms +[2025-07-17 18:43:10] [Rank 0] step:7821/10000 train_time:1850329ms step_avg:236.58ms +[2025-07-17 18:43:10] [Rank 0] step:7821/10000 train_time:1850329ms step_avg:236.58ms +[2025-07-17 18:43:15] [Rank 0] step:7841/10000 train_time:1855294ms step_avg:236.61ms +[2025-07-17 18:43:15] [Rank 0] step:7841/10000 train_time:1855294ms step_avg:236.61ms +[2025-07-17 18:43:20] [Rank 0] step:7861/10000 train_time:1860246ms step_avg:236.64ms +[2025-07-17 18:43:20] [Rank 0] step:7861/10000 train_time:1860246ms step_avg:236.64ms +[2025-07-17 18:43:28] [Rank 0] PRINT: step:7875/10000 val_loss:4.4432 train_time:1864214ms step_avg:236.73ms +[2025-07-17 18:43:28] [Rank 0] PRINT: step:7875/10000 val_loss:4.4432 train_time:1864214ms step_avg:236.73ms +[2025-07-17 18:43:29] [Rank 0] step:7881/10000 train_time:1865201ms step_avg:236.67ms +[2025-07-17 18:43:29] [Rank 0] step:7881/10000 train_time:1865201ms step_avg:236.67ms +[2025-07-17 18:43:34] [Rank 0] step:7901/10000 train_time:1870166ms step_avg:236.70ms +[2025-07-17 18:43:34] [Rank 0] step:7901/10000 train_time:1870166ms step_avg:236.70ms +[2025-07-17 18:43:39] [Rank 0] step:7921/10000 train_time:1875133ms step_avg:236.73ms +[2025-07-17 18:43:39] [Rank 0] step:7921/10000 train_time:1875133ms step_avg:236.73ms +[2025-07-17 18:43:44] [Rank 0] step:7941/10000 train_time:1880105ms step_avg:236.76ms +[2025-07-17 18:43:44] [Rank 0] step:7941/10000 train_time:1880105ms step_avg:236.76ms +[2025-07-17 18:43:49] [Rank 0] step:7961/10000 train_time:1885087ms step_avg:236.79ms +[2025-07-17 18:43:49] [Rank 0] step:7961/10000 train_time:1885087ms step_avg:236.79ms +[2025-07-17 18:43:54] [Rank 0] step:7981/10000 train_time:1890048ms step_avg:236.82ms +[2025-07-17 18:43:54] [Rank 0] step:7981/10000 train_time:1890048ms step_avg:236.82ms +[2025-07-17 18:44:04] [Rank 0] PRINT: step:8000/10000 val_loss:4.5770 train_time:1895277ms step_avg:236.91ms +[2025-07-17 18:44:04] [Rank 0] PRINT: step:8000/10000 val_loss:4.5770 train_time:1895277ms step_avg:236.91ms +[2025-07-17 18:44:04] [Rank 0] step:8001/10000 train_time:1895289ms step_avg:236.88ms +[2025-07-17 18:44:04] [Rank 0] step:8001/10000 train_time:1895289ms step_avg:236.88ms +[2025-07-17 18:44:09] [Rank 0] step:8021/10000 train_time:1899988ms step_avg:236.88ms +[2025-07-17 18:44:09] [Rank 0] step:8021/10000 train_time:1899988ms step_avg:236.88ms +[2025-07-17 18:44:14] [Rank 0] step:8041/10000 train_time:1904974ms step_avg:236.91ms +[2025-07-17 18:44:14] [Rank 0] step:8041/10000 train_time:1904974ms step_avg:236.91ms +[2025-07-17 18:44:19] [Rank 0] step:8061/10000 train_time:1909935ms step_avg:236.94ms +[2025-07-17 18:44:19] [Rank 0] step:8061/10000 train_time:1909935ms step_avg:236.94ms +[2025-07-17 18:44:24] [Rank 0] step:8081/10000 train_time:1914905ms step_avg:236.96ms +[2025-07-17 18:44:24] [Rank 0] step:8081/10000 train_time:1914905ms step_avg:236.96ms +[2025-07-17 18:44:29] [Rank 0] step:8101/10000 train_time:1919864ms step_avg:236.99ms +[2025-07-17 18:44:29] [Rank 0] step:8101/10000 train_time:1919864ms step_avg:236.99ms +[2025-07-17 18:44:34] [Rank 0] step:8121/10000 train_time:1924828ms step_avg:237.02ms +[2025-07-17 18:44:34] [Rank 0] step:8121/10000 train_time:1924828ms step_avg:237.02ms +[2025-07-17 18:44:39] [Rank 0] PRINT: step:8125/10000 val_loss:4.5523 train_time:1926323ms step_avg:237.09ms +[2025-07-17 18:44:39] [Rank 0] PRINT: step:8125/10000 val_loss:4.5523 train_time:1926323ms step_avg:237.09ms +[2025-07-17 18:44:43] [Rank 0] step:8141/10000 train_time:1929798ms step_avg:237.05ms +[2025-07-17 18:44:43] [Rank 0] step:8141/10000 train_time:1929798ms step_avg:237.05ms +[2025-07-17 18:44:48] [Rank 0] step:8161/10000 train_time:1934795ms step_avg:237.08ms +[2025-07-17 18:44:48] [Rank 0] step:8161/10000 train_time:1934795ms step_avg:237.08ms +[2025-07-17 18:44:53] [Rank 0] step:8181/10000 train_time:1939825ms step_avg:237.11ms +[2025-07-17 18:44:53] [Rank 0] step:8181/10000 train_time:1939825ms step_avg:237.11ms +[2025-07-17 18:44:58] [Rank 0] step:8201/10000 train_time:1944838ms step_avg:237.15ms +[2025-07-17 18:44:58] [Rank 0] step:8201/10000 train_time:1944838ms step_avg:237.15ms +[2025-07-17 18:45:03] [Rank 0] step:8221/10000 train_time:1949865ms step_avg:237.18ms +[2025-07-17 18:45:03] [Rank 0] step:8221/10000 train_time:1949865ms step_avg:237.18ms +[2025-07-17 18:45:08] [Rank 0] step:8241/10000 train_time:1954892ms step_avg:237.22ms +[2025-07-17 18:45:08] [Rank 0] step:8241/10000 train_time:1954892ms step_avg:237.22ms +[2025-07-17 18:45:15] [Rank 0] PRINT: step:8250/10000 val_loss:4.3240 train_time:1957662ms step_avg:237.29ms +[2025-07-17 18:45:15] [Rank 0] PRINT: step:8250/10000 val_loss:4.3240 train_time:1957662ms step_avg:237.29ms +[2025-07-17 18:45:18] [Rank 0] step:8261/10000 train_time:1959922ms step_avg:237.25ms +[2025-07-17 18:45:18] [Rank 0] step:8261/10000 train_time:1959922ms step_avg:237.25ms +[2025-07-17 18:45:23] [Rank 0] step:8281/10000 train_time:1964969ms step_avg:237.29ms +[2025-07-17 18:45:23] [Rank 0] step:8281/10000 train_time:1964969ms step_avg:237.29ms +[2025-07-17 18:45:28] [Rank 0] step:8301/10000 train_time:1969985ms step_avg:237.32ms +[2025-07-17 18:45:28] [Rank 0] step:8301/10000 train_time:1969985ms step_avg:237.32ms +[2025-07-17 18:45:33] [Rank 0] step:8321/10000 train_time:1975016ms step_avg:237.35ms +[2025-07-17 18:45:33] [Rank 0] step:8321/10000 train_time:1975016ms step_avg:237.35ms +[2025-07-17 18:45:38] [Rank 0] step:8341/10000 train_time:1980055ms step_avg:237.39ms +[2025-07-17 18:45:38] [Rank 0] step:8341/10000 train_time:1980055ms step_avg:237.39ms +[2025-07-17 18:45:43] [Rank 0] step:8361/10000 train_time:1985074ms step_avg:237.42ms +[2025-07-17 18:45:43] [Rank 0] step:8361/10000 train_time:1985074ms step_avg:237.42ms +[2025-07-17 18:45:51] [Rank 0] PRINT: step:8375/10000 val_loss:4.4098 train_time:1989095ms step_avg:237.50ms +[2025-07-17 18:45:51] [Rank 0] PRINT: step:8375/10000 val_loss:4.4098 train_time:1989095ms step_avg:237.50ms +[2025-07-17 18:45:53] [Rank 0] step:8381/10000 train_time:1990091ms step_avg:237.45ms +[2025-07-17 18:45:53] [Rank 0] step:8381/10000 train_time:1990091ms step_avg:237.45ms +[2025-07-17 18:45:58] [Rank 0] step:8401/10000 train_time:1995095ms step_avg:237.48ms +[2025-07-17 18:45:58] [Rank 0] step:8401/10000 train_time:1995095ms step_avg:237.48ms +[2025-07-17 18:46:03] [Rank 0] step:8421/10000 train_time:2000116ms step_avg:237.52ms +[2025-07-17 18:46:03] [Rank 0] step:8421/10000 train_time:2000116ms step_avg:237.52ms +[2025-07-17 18:46:08] [Rank 0] step:8441/10000 train_time:2005141ms step_avg:237.55ms +[2025-07-17 18:46:08] [Rank 0] step:8441/10000 train_time:2005141ms step_avg:237.55ms +[2025-07-17 18:46:13] [Rank 0] step:8461/10000 train_time:2010179ms step_avg:237.58ms +[2025-07-17 18:46:13] [Rank 0] step:8461/10000 train_time:2010179ms step_avg:237.58ms +[2025-07-17 18:46:18] [Rank 0] step:8481/10000 train_time:2015189ms step_avg:237.61ms +[2025-07-17 18:46:18] [Rank 0] step:8481/10000 train_time:2015189ms step_avg:237.61ms +[2025-07-17 18:46:27] [Rank 0] PRINT: step:8500/10000 val_loss:4.4858 train_time:2020472ms step_avg:237.70ms +[2025-07-17 18:46:27] [Rank 0] PRINT: step:8500/10000 val_loss:4.4858 train_time:2020472ms step_avg:237.70ms +[2025-07-17 18:46:28] [Rank 0] step:8501/10000 train_time:2020485ms step_avg:237.68ms +[2025-07-17 18:46:28] [Rank 0] step:8501/10000 train_time:2020485ms step_avg:237.68ms +[2025-07-17 18:46:33] [Rank 0] step:8521/10000 train_time:2025239ms step_avg:237.68ms +[2025-07-17 18:46:33] [Rank 0] step:8521/10000 train_time:2025239ms step_avg:237.68ms +[2025-07-17 18:46:38] [Rank 0] step:8541/10000 train_time:2030274ms step_avg:237.71ms +[2025-07-17 18:46:38] [Rank 0] step:8541/10000 train_time:2030274ms step_avg:237.71ms +[2025-07-17 18:46:43] [Rank 0] step:8561/10000 train_time:2035285ms step_avg:237.74ms +[2025-07-17 18:46:43] [Rank 0] step:8561/10000 train_time:2035285ms step_avg:237.74ms +[2025-07-17 18:46:48] [Rank 0] step:8581/10000 train_time:2040309ms step_avg:237.77ms +[2025-07-17 18:46:48] [Rank 0] step:8581/10000 train_time:2040309ms step_avg:237.77ms +[2025-07-17 18:46:53] [Rank 0] step:8601/10000 train_time:2045318ms step_avg:237.80ms +[2025-07-17 18:46:53] [Rank 0] step:8601/10000 train_time:2045318ms step_avg:237.80ms +[2025-07-17 18:46:58] [Rank 0] step:8621/10000 train_time:2050333ms step_avg:237.83ms +[2025-07-17 18:46:58] [Rank 0] step:8621/10000 train_time:2050333ms step_avg:237.83ms +[2025-07-17 18:47:03] [Rank 0] PRINT: step:8625/10000 val_loss:4.4501 train_time:2051839ms step_avg:237.89ms +[2025-07-17 18:47:03] [Rank 0] PRINT: step:8625/10000 val_loss:4.4501 train_time:2051839ms step_avg:237.89ms +[2025-07-17 18:47:07] [Rank 0] step:8641/10000 train_time:2055369ms step_avg:237.86ms +[2025-07-17 18:47:07] [Rank 0] step:8641/10000 train_time:2055369ms step_avg:237.86ms +[2025-07-17 18:47:12] [Rank 0] step:8661/10000 train_time:2060390ms step_avg:237.89ms +[2025-07-17 18:47:12] [Rank 0] step:8661/10000 train_time:2060390ms step_avg:237.89ms +[2025-07-17 18:47:17] [Rank 0] step:8681/10000 train_time:2065413ms step_avg:237.92ms +[2025-07-17 18:47:17] [Rank 0] step:8681/10000 train_time:2065413ms step_avg:237.92ms +[2025-07-17 18:47:22] [Rank 0] step:8701/10000 train_time:2070445ms step_avg:237.95ms +[2025-07-17 18:47:22] [Rank 0] step:8701/10000 train_time:2070445ms step_avg:237.95ms +[2025-07-17 18:47:27] [Rank 0] step:8721/10000 train_time:2075474ms step_avg:237.99ms +[2025-07-17 18:47:27] [Rank 0] step:8721/10000 train_time:2075474ms step_avg:237.99ms +[2025-07-17 18:47:32] [Rank 0] step:8741/10000 train_time:2080494ms step_avg:238.02ms +[2025-07-17 18:47:32] [Rank 0] step:8741/10000 train_time:2080494ms step_avg:238.02ms +[2025-07-17 18:47:39] [Rank 0] PRINT: step:8750/10000 val_loss:4.5507 train_time:2083252ms step_avg:238.09ms +[2025-07-17 18:47:39] [Rank 0] PRINT: step:8750/10000 val_loss:4.5507 train_time:2083252ms step_avg:238.09ms +[2025-07-17 18:47:42] [Rank 0] step:8761/10000 train_time:2085508ms step_avg:238.04ms +[2025-07-17 18:47:42] [Rank 0] step:8761/10000 train_time:2085508ms step_avg:238.04ms +[2025-07-17 18:47:47] [Rank 0] step:8781/10000 train_time:2090529ms step_avg:238.07ms +[2025-07-17 18:47:47] [Rank 0] step:8781/10000 train_time:2090529ms step_avg:238.07ms +[2025-07-17 18:47:52] [Rank 0] step:8801/10000 train_time:2095545ms step_avg:238.10ms +[2025-07-17 18:47:52] [Rank 0] step:8801/10000 train_time:2095545ms step_avg:238.10ms +[2025-07-17 18:47:57] [Rank 0] step:8821/10000 train_time:2100576ms step_avg:238.13ms +[2025-07-17 18:47:57] [Rank 0] step:8821/10000 train_time:2100576ms step_avg:238.13ms +[2025-07-17 18:48:02] [Rank 0] step:8841/10000 train_time:2105617ms step_avg:238.17ms +[2025-07-17 18:48:02] [Rank 0] step:8841/10000 train_time:2105617ms step_avg:238.17ms +[2025-07-17 18:48:07] [Rank 0] step:8861/10000 train_time:2110639ms step_avg:238.19ms +[2025-07-17 18:48:07] [Rank 0] step:8861/10000 train_time:2110639ms step_avg:238.19ms +[2025-07-17 18:48:15] [Rank 0] PRINT: step:8875/10000 val_loss:4.5913 train_time:2114657ms step_avg:238.27ms +[2025-07-17 18:48:15] [Rank 0] PRINT: step:8875/10000 val_loss:4.5913 train_time:2114657ms step_avg:238.27ms +[2025-07-17 18:48:17] [Rank 0] step:8881/10000 train_time:2115657ms step_avg:238.22ms +[2025-07-17 18:48:17] [Rank 0] step:8881/10000 train_time:2115657ms step_avg:238.22ms +[2025-07-17 18:48:22] [Rank 0] step:8901/10000 train_time:2120670ms step_avg:238.25ms +[2025-07-17 18:48:22] [Rank 0] step:8901/10000 train_time:2120670ms step_avg:238.25ms +[2025-07-17 18:48:27] [Rank 0] step:8921/10000 train_time:2125689ms step_avg:238.28ms +[2025-07-17 18:48:27] [Rank 0] step:8921/10000 train_time:2125689ms step_avg:238.28ms +[2025-07-17 18:48:32] [Rank 0] step:8941/10000 train_time:2130711ms step_avg:238.31ms +[2025-07-17 18:48:32] [Rank 0] step:8941/10000 train_time:2130711ms step_avg:238.31ms +[2025-07-17 18:48:37] [Rank 0] step:8961/10000 train_time:2135735ms step_avg:238.34ms +[2025-07-17 18:48:37] [Rank 0] step:8961/10000 train_time:2135735ms step_avg:238.34ms +[2025-07-17 18:48:42] [Rank 0] step:8981/10000 train_time:2140760ms step_avg:238.37ms +[2025-07-17 18:48:42] [Rank 0] step:8981/10000 train_time:2140760ms step_avg:238.37ms +[2025-07-17 18:48:51] [Rank 0] PRINT: step:9000/10000 val_loss:4.4699 train_time:2146040ms step_avg:238.45ms +[2025-07-17 18:48:51] [Rank 0] PRINT: step:9000/10000 val_loss:4.4699 train_time:2146040ms step_avg:238.45ms +[2025-07-17 18:48:51] [Rank 0] step:9001/10000 train_time:2146052ms step_avg:238.42ms +[2025-07-17 18:48:51] [Rank 0] step:9001/10000 train_time:2146052ms step_avg:238.42ms +[2025-07-17 18:48:56] [Rank 0] step:9021/10000 train_time:2150803ms step_avg:238.42ms +[2025-07-17 18:48:56] [Rank 0] step:9021/10000 train_time:2150803ms step_avg:238.42ms +[2025-07-17 18:49:02] [Rank 0] step:9041/10000 train_time:2155849ms step_avg:238.45ms +[2025-07-17 18:49:02] [Rank 0] step:9041/10000 train_time:2155849ms step_avg:238.45ms +[2025-07-17 18:49:07] [Rank 0] step:9061/10000 train_time:2160869ms step_avg:238.48ms +[2025-07-17 18:49:07] [Rank 0] step:9061/10000 train_time:2160869ms step_avg:238.48ms +[2025-07-17 18:49:12] [Rank 0] step:9081/10000 train_time:2165919ms step_avg:238.51ms +[2025-07-17 18:49:12] [Rank 0] step:9081/10000 train_time:2165919ms step_avg:238.51ms +[2025-07-17 18:49:17] [Rank 0] step:9101/10000 train_time:2170967ms step_avg:238.54ms +[2025-07-17 18:49:17] [Rank 0] step:9101/10000 train_time:2170967ms step_avg:238.54ms +[2025-07-17 18:49:22] [Rank 0] step:9121/10000 train_time:2176008ms step_avg:238.57ms +[2025-07-17 18:49:22] [Rank 0] step:9121/10000 train_time:2176008ms step_avg:238.57ms +[2025-07-17 18:49:27] [Rank 0] PRINT: step:9125/10000 val_loss:4.5406 train_time:2177517ms step_avg:238.63ms +[2025-07-17 18:49:27] [Rank 0] PRINT: step:9125/10000 val_loss:4.5406 train_time:2177517ms step_avg:238.63ms +[2025-07-17 18:49:31] [Rank 0] step:9141/10000 train_time:2181028ms step_avg:238.60ms +[2025-07-17 18:49:31] [Rank 0] step:9141/10000 train_time:2181028ms step_avg:238.60ms +[2025-07-17 18:49:36] [Rank 0] step:9161/10000 train_time:2186089ms step_avg:238.63ms +[2025-07-17 18:49:36] [Rank 0] step:9161/10000 train_time:2186089ms step_avg:238.63ms +[2025-07-17 18:49:41] [Rank 0] step:9181/10000 train_time:2191118ms step_avg:238.66ms +[2025-07-17 18:49:41] [Rank 0] step:9181/10000 train_time:2191118ms step_avg:238.66ms +[2025-07-17 18:49:46] [Rank 0] step:9201/10000 train_time:2196148ms step_avg:238.69ms +[2025-07-17 18:49:46] [Rank 0] step:9201/10000 train_time:2196148ms step_avg:238.69ms +[2025-07-17 18:49:51] [Rank 0] step:9221/10000 train_time:2201206ms step_avg:238.72ms +[2025-07-17 18:49:51] [Rank 0] step:9221/10000 train_time:2201206ms step_avg:238.72ms +[2025-07-17 18:49:56] [Rank 0] step:9241/10000 train_time:2206251ms step_avg:238.75ms +[2025-07-17 18:49:56] [Rank 0] step:9241/10000 train_time:2206251ms step_avg:238.75ms +[2025-07-17 18:50:03] [Rank 0] PRINT: step:9250/10000 val_loss:4.6228 train_time:2209021ms step_avg:238.81ms +[2025-07-17 18:50:03] [Rank 0] PRINT: step:9250/10000 val_loss:4.6228 train_time:2209021ms step_avg:238.81ms +[2025-07-17 18:50:06] [Rank 0] step:9261/10000 train_time:2211290ms step_avg:238.77ms +[2025-07-17 18:50:06] [Rank 0] step:9261/10000 train_time:2211290ms step_avg:238.77ms +[2025-07-17 18:50:11] [Rank 0] step:9281/10000 train_time:2216298ms step_avg:238.80ms +[2025-07-17 18:50:11] [Rank 0] step:9281/10000 train_time:2216298ms step_avg:238.80ms +[2025-07-17 18:50:16] [Rank 0] step:9301/10000 train_time:2221327ms step_avg:238.83ms +[2025-07-17 18:50:16] [Rank 0] step:9301/10000 train_time:2221327ms step_avg:238.83ms +[2025-07-17 18:50:21] [Rank 0] step:9321/10000 train_time:2226374ms step_avg:238.86ms +[2025-07-17 18:50:21] [Rank 0] step:9321/10000 train_time:2226374ms step_avg:238.86ms +[2025-07-17 18:50:26] [Rank 0] step:9341/10000 train_time:2231397ms step_avg:238.88ms +[2025-07-17 18:50:26] [Rank 0] step:9341/10000 train_time:2231397ms step_avg:238.88ms +[2025-07-17 18:50:31] [Rank 0] step:9361/10000 train_time:2236429ms step_avg:238.91ms +[2025-07-17 18:50:31] [Rank 0] step:9361/10000 train_time:2236429ms step_avg:238.91ms +[2025-07-17 18:50:40] [Rank 0] PRINT: step:9375/10000 val_loss:4.6393 train_time:2240452ms step_avg:238.98ms +[2025-07-17 18:50:40] [Rank 0] PRINT: step:9375/10000 val_loss:4.6393 train_time:2240452ms step_avg:238.98ms +[2025-07-17 18:50:41] [Rank 0] step:9381/10000 train_time:2241451ms step_avg:238.94ms +[2025-07-17 18:50:41] [Rank 0] step:9381/10000 train_time:2241451ms step_avg:238.94ms +[2025-07-17 18:50:46] [Rank 0] step:9401/10000 train_time:2246457ms step_avg:238.96ms +[2025-07-17 18:50:46] [Rank 0] step:9401/10000 train_time:2246457ms step_avg:238.96ms +[2025-07-17 18:50:51] [Rank 0] step:9421/10000 train_time:2251484ms step_avg:238.99ms +[2025-07-17 18:50:51] [Rank 0] step:9421/10000 train_time:2251484ms step_avg:238.99ms +[2025-07-17 18:50:56] [Rank 0] step:9441/10000 train_time:2256510ms step_avg:239.01ms +[2025-07-17 18:50:56] [Rank 0] step:9441/10000 train_time:2256510ms step_avg:239.01ms +[2025-07-17 18:51:01] [Rank 0] step:9461/10000 train_time:2261549ms step_avg:239.04ms +[2025-07-17 18:51:01] [Rank 0] step:9461/10000 train_time:2261549ms step_avg:239.04ms +[2025-07-17 18:51:06] [Rank 0] step:9481/10000 train_time:2266585ms step_avg:239.07ms +[2025-07-17 18:51:06] [Rank 0] step:9481/10000 train_time:2266585ms step_avg:239.07ms +[2025-07-17 18:51:16] [Rank 0] PRINT: step:9500/10000 val_loss:4.5798 train_time:2271898ms step_avg:239.15ms +[2025-07-17 18:51:16] [Rank 0] PRINT: step:9500/10000 val_loss:4.5798 train_time:2271898ms step_avg:239.15ms +[2025-07-17 18:51:16] [Rank 0] step:9501/10000 train_time:2271910ms step_avg:239.12ms +[2025-07-17 18:51:16] [Rank 0] step:9501/10000 train_time:2271910ms step_avg:239.12ms +[2025-07-17 18:51:21] [Rank 0] step:9521/10000 train_time:2276660ms step_avg:239.12ms +[2025-07-17 18:51:21] [Rank 0] step:9521/10000 train_time:2276660ms step_avg:239.12ms +[2025-07-17 18:51:26] [Rank 0] step:9541/10000 train_time:2281703ms step_avg:239.15ms +[2025-07-17 18:51:26] [Rank 0] step:9541/10000 train_time:2281703ms step_avg:239.15ms +[2025-07-17 18:51:31] [Rank 0] step:9561/10000 train_time:2286714ms step_avg:239.17ms +[2025-07-17 18:51:31] [Rank 0] step:9561/10000 train_time:2286714ms step_avg:239.17ms +[2025-07-17 18:51:36] [Rank 0] step:9581/10000 train_time:2291731ms step_avg:239.20ms +[2025-07-17 18:51:36] [Rank 0] step:9581/10000 train_time:2291731ms step_avg:239.20ms +[2025-07-17 18:51:41] [Rank 0] step:9601/10000 train_time:2296748ms step_avg:239.22ms +[2025-07-17 18:51:41] [Rank 0] step:9601/10000 train_time:2296748ms step_avg:239.22ms +[2025-07-17 18:51:46] [Rank 0] step:9621/10000 train_time:2301800ms step_avg:239.25ms +[2025-07-17 18:51:46] [Rank 0] step:9621/10000 train_time:2301800ms step_avg:239.25ms +[2025-07-17 18:51:52] [Rank 0] PRINT: step:9625/10000 val_loss:4.6615 train_time:2303304ms step_avg:239.30ms +[2025-07-17 18:51:52] [Rank 0] PRINT: step:9625/10000 val_loss:4.6615 train_time:2303304ms step_avg:239.30ms +[2025-07-17 18:51:56] [Rank 0] step:9641/10000 train_time:2306841ms step_avg:239.27ms +[2025-07-17 18:51:56] [Rank 0] step:9641/10000 train_time:2306841ms step_avg:239.27ms +[2025-07-17 18:52:01] [Rank 0] step:9661/10000 train_time:2311934ms step_avg:239.31ms +[2025-07-17 18:52:01] [Rank 0] step:9661/10000 train_time:2311934ms step_avg:239.31ms +[2025-07-17 18:52:06] [Rank 0] step:9681/10000 train_time:2317014ms step_avg:239.34ms +[2025-07-17 18:52:06] [Rank 0] step:9681/10000 train_time:2317014ms step_avg:239.34ms +[2025-07-17 18:52:11] [Rank 0] step:9701/10000 train_time:2322113ms step_avg:239.37ms +[2025-07-17 18:52:11] [Rank 0] step:9701/10000 train_time:2322113ms step_avg:239.37ms +[2025-07-17 18:52:16] [Rank 0] step:9721/10000 train_time:2327183ms step_avg:239.40ms +[2025-07-17 18:52:16] [Rank 0] step:9721/10000 train_time:2327183ms step_avg:239.40ms +[2025-07-17 18:52:21] [Rank 0] step:9741/10000 train_time:2332271ms step_avg:239.43ms +[2025-07-17 18:52:21] [Rank 0] step:9741/10000 train_time:2332271ms step_avg:239.43ms +[2025-07-17 18:52:28] [Rank 0] PRINT: step:9750/10000 val_loss:4.4826 train_time:2335063ms step_avg:239.49ms +[2025-07-17 18:52:28] [Rank 0] PRINT: step:9750/10000 val_loss:4.4826 train_time:2335063ms step_avg:239.49ms +[2025-07-17 18:52:31] [Rank 0] step:9761/10000 train_time:2337346ms step_avg:239.46ms +[2025-07-17 18:52:31] [Rank 0] step:9761/10000 train_time:2337346ms step_avg:239.46ms +[2025-07-17 18:52:36] [Rank 0] step:9781/10000 train_time:2342419ms step_avg:239.49ms +[2025-07-17 18:52:36] [Rank 0] step:9781/10000 train_time:2342419ms step_avg:239.49ms +[2025-07-17 18:52:41] [Rank 0] step:9801/10000 train_time:2347488ms step_avg:239.52ms +[2025-07-17 18:52:41] [Rank 0] step:9801/10000 train_time:2347488ms step_avg:239.52ms +[2025-07-17 18:52:46] [Rank 0] step:9821/10000 train_time:2352559ms step_avg:239.54ms +[2025-07-17 18:52:46] [Rank 0] step:9821/10000 train_time:2352559ms step_avg:239.54ms +[2025-07-17 18:52:51] [Rank 0] step:9841/10000 train_time:2357628ms step_avg:239.57ms +[2025-07-17 18:52:51] [Rank 0] step:9841/10000 train_time:2357628ms step_avg:239.57ms +[2025-07-17 18:52:56] [Rank 0] step:9861/10000 train_time:2362696ms step_avg:239.60ms +[2025-07-17 18:52:56] [Rank 0] step:9861/10000 train_time:2362696ms step_avg:239.60ms +[2025-07-17 18:53:05] [Rank 0] PRINT: step:9875/10000 val_loss:4.5249 train_time:2366749ms step_avg:239.67ms +[2025-07-17 18:53:05] [Rank 0] PRINT: step:9875/10000 val_loss:4.5249 train_time:2366749ms step_avg:239.67ms +[2025-07-17 18:53:06] [Rank 0] step:9881/10000 train_time:2367762ms step_avg:239.63ms +[2025-07-17 18:53:06] [Rank 0] step:9881/10000 train_time:2367762ms step_avg:239.63ms +[2025-07-17 18:53:11] [Rank 0] step:9901/10000 train_time:2372835ms step_avg:239.66ms +[2025-07-17 18:53:11] [Rank 0] step:9901/10000 train_time:2372835ms step_avg:239.66ms +[2025-07-17 18:53:16] [Rank 0] step:9921/10000 train_time:2377920ms step_avg:239.69ms +[2025-07-17 18:53:16] [Rank 0] step:9921/10000 train_time:2377920ms step_avg:239.69ms +[2025-07-17 18:53:21] [Rank 0] step:9941/10000 train_time:2383028ms step_avg:239.72ms +[2025-07-17 18:53:21] [Rank 0] step:9941/10000 train_time:2383028ms step_avg:239.72ms +[2025-07-17 18:53:26] [Rank 0] step:9961/10000 train_time:2388123ms step_avg:239.75ms +[2025-07-17 18:53:26] [Rank 0] step:9961/10000 train_time:2388123ms step_avg:239.75ms +[2025-07-17 18:53:32] [Rank 0] step:9981/10000 train_time:2393230ms step_avg:239.78ms +[2025-07-17 18:53:32] [Rank 0] step:9981/10000 train_time:2393230ms step_avg:239.78ms +[2025-07-17 18:53:36] [Rank 0] step:10000/10000 train_time:2398040ms step_avg:239.80ms +[2025-07-17 18:53:36] [Rank 0] step:10000/10000 train_time:2398040ms step_avg:239.80ms +[2025-07-17 18:53:41] [Rank 0] PRINT: step:10000/10000 val_loss:4.6853 train_time:2398550ms step_avg:239.85ms +[2025-07-17 18:53:41] [Rank 0] PRINT: step:10000/10000 val_loss:4.6853 train_time:2398550ms step_avg:239.85ms +[2025-07-17 18:53:41] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 18:53:41 2025 --- +[2025-07-17 18:53:41] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 18:53:41 2025 --- +[2025-07-17 18:53:41] [Rank 0] PRINT: Peak memory allocated: 31117 MiB reserved: 31436 MiB +[2025-07-17 18:53:41] [Rank 0] PRINT: Peak memory allocated: 31117 MiB reserved: 31436 MiB diff --git a/logs_norope/diff_modes/mode_2_param_norope_seed_42/config.json b/logs_norope/diff_modes/mode_2_param_norope_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..da980610fc450194871dcac78879840115613611 --- /dev/null +++ b/logs_norope/diff_modes/mode_2_param_norope_seed_42/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 2, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "f76b7281-0602-4317-ae4b-2dc54dafcac6", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_2_param_norope_seed_42/training_log_f76b7281-0602-4317-ae4b-2dc54dafcac6.txt b/logs_norope/diff_modes/mode_2_param_norope_seed_42/training_log_f76b7281-0602-4317-ae4b-2dc54dafcac6.txt new file mode 100644 index 0000000000000000000000000000000000000000..444689635fe1188bc04a08826d7f7cad8d4f1f4e --- /dev/null +++ b/logs_norope/diff_modes/mode_2_param_norope_seed_42/training_log_f76b7281-0602-4317-ae4b-2dc54dafcac6.txt @@ -0,0 +1,2360 @@ +[2025-07-17 11:41:44] [Rank 0] PRINT: --- Script Start: Thu Jul 17 11:41:44 2025 --- +[2025-07-17 11:41:44] [Rank 0] PRINT: --- Script Start: Thu Jul 17 11:41:44 2025 --- +[2025-07-17 11:41:44] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=2, model_parameterization='norope') +[2025-07-17 11:41:44] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=2, model_parameterization='norope') +[2025-07-17 11:41:44] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 11:41:44] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 11:41:44] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 11:41:44] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 11:41:44] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_2_param_norope_seed_42 +[2025-07-17 11:41:44] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_2_param_norope_seed_42 +[2025-07-17 11:41:44] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 11:41:44] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 11:41:44] [Rank 0] PRINT: Constructing model... +[2025-07-17 11:41:44] [Rank 0] PRINT: Constructing model... +[2025-07-17 11:41:47] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 11:41:47] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 11:41:47] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 11:41:47] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 11:41:47] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 11:41:47] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 11:41:47] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 11:41:47] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 11:41:47] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 2 +[2025-07-17 11:41:47] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 2 +[2025-07-17 11:41:47] [Rank 0] PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: 0.001). +[2025-07-17 11:41:47] [Rank 0] PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: 0.001). +[2025-07-17 11:41:47] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 11:41:47] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 11:41:47] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-07-17 11:41:47] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-07-17 11:41:47] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 11:41:47] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 11:41:47] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 11:41:47] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 11:41:47] [Rank 0] PRINT: Starting warmup... +[2025-07-17 11:41:47] [Rank 0] PRINT: Starting warmup... +[2025-07-17 11:42:58] [Rank 0] PRINT: Warmup complete. +[2025-07-17 11:42:58] [Rank 0] PRINT: Warmup complete. +[2025-07-17 11:42:58] [Rank 0] PRINT: Starting training... +[2025-07-17 11:42:58] [Rank 0] PRINT: Starting training... +[2025-07-17 11:43:12] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 11:43:12] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 11:43:17] [Rank 0] step:21/10000 train_time:4506ms step_avg:214.55ms +[2025-07-17 11:43:17] [Rank 0] step:21/10000 train_time:4506ms step_avg:214.55ms +[2025-07-17 11:43:22] [Rank 0] step:41/10000 train_time:8936ms step_avg:217.94ms +[2025-07-17 11:43:22] [Rank 0] step:41/10000 train_time:8936ms step_avg:217.94ms +[2025-07-17 11:43:26] [Rank 0] step:61/10000 train_time:13371ms step_avg:219.20ms +[2025-07-17 11:43:26] [Rank 0] step:61/10000 train_time:13371ms step_avg:219.20ms +[2025-07-17 11:43:31] [Rank 0] step:81/10000 train_time:17820ms step_avg:220.00ms +[2025-07-17 11:43:31] [Rank 0] step:81/10000 train_time:17820ms step_avg:220.00ms +[2025-07-17 11:43:35] [Rank 0] step:101/10000 train_time:22269ms step_avg:220.48ms +[2025-07-17 11:43:35] [Rank 0] step:101/10000 train_time:22269ms step_avg:220.48ms +[2025-07-17 11:43:40] [Rank 0] step:121/10000 train_time:26723ms step_avg:220.85ms +[2025-07-17 11:43:40] [Rank 0] step:121/10000 train_time:26723ms step_avg:220.85ms +[2025-07-17 11:43:45] [Rank 0] PRINT: step:125/10000 val_loss:5.2287 train_time:28063ms step_avg:224.50ms +[2025-07-17 11:43:45] [Rank 0] PRINT: step:125/10000 val_loss:5.2287 train_time:28063ms step_avg:224.50ms +[2025-07-17 11:43:48] [Rank 0] step:141/10000 train_time:31176ms step_avg:221.11ms +[2025-07-17 11:43:48] [Rank 0] step:141/10000 train_time:31176ms step_avg:221.11ms +[2025-07-17 11:43:53] [Rank 0] step:161/10000 train_time:35627ms step_avg:221.28ms +[2025-07-17 11:43:53] [Rank 0] step:161/10000 train_time:35627ms step_avg:221.28ms +[2025-07-17 11:43:57] [Rank 0] step:181/10000 train_time:40090ms step_avg:221.49ms +[2025-07-17 11:43:57] [Rank 0] step:181/10000 train_time:40090ms step_avg:221.49ms +[2025-07-17 11:44:02] [Rank 0] step:201/10000 train_time:44546ms step_avg:221.62ms +[2025-07-17 11:44:02] [Rank 0] step:201/10000 train_time:44546ms step_avg:221.62ms +[2025-07-17 11:44:06] [Rank 0] step:221/10000 train_time:49012ms step_avg:221.78ms +[2025-07-17 11:44:06] [Rank 0] step:221/10000 train_time:49012ms step_avg:221.78ms +[2025-07-17 11:44:11] [Rank 0] step:241/10000 train_time:53472ms step_avg:221.88ms +[2025-07-17 11:44:11] [Rank 0] step:241/10000 train_time:53472ms step_avg:221.88ms +[2025-07-17 11:44:17] [Rank 0] PRINT: step:250/10000 val_loss:4.8427 train_time:55933ms step_avg:223.73ms +[2025-07-17 11:44:17] [Rank 0] PRINT: step:250/10000 val_loss:4.8427 train_time:55933ms step_avg:223.73ms +[2025-07-17 11:44:20] [Rank 0] step:261/10000 train_time:57938ms step_avg:221.98ms +[2025-07-17 11:44:20] [Rank 0] step:261/10000 train_time:57938ms step_avg:221.98ms +[2025-07-17 11:44:24] [Rank 0] step:281/10000 train_time:62395ms step_avg:222.05ms +[2025-07-17 11:44:24] [Rank 0] step:281/10000 train_time:62395ms step_avg:222.05ms +[2025-07-17 11:44:29] [Rank 0] step:301/10000 train_time:66853ms step_avg:222.10ms +[2025-07-17 11:44:29] [Rank 0] step:301/10000 train_time:66853ms step_avg:222.10ms +[2025-07-17 11:44:33] [Rank 0] step:321/10000 train_time:71313ms step_avg:222.16ms +[2025-07-17 11:44:33] [Rank 0] step:321/10000 train_time:71313ms step_avg:222.16ms +[2025-07-17 11:44:38] [Rank 0] step:341/10000 train_time:75781ms step_avg:222.23ms +[2025-07-17 11:44:38] [Rank 0] step:341/10000 train_time:75781ms step_avg:222.23ms +[2025-07-17 11:44:42] [Rank 0] step:361/10000 train_time:80246ms step_avg:222.29ms +[2025-07-17 11:44:42] [Rank 0] step:361/10000 train_time:80246ms step_avg:222.29ms +[2025-07-17 11:44:50] [Rank 0] PRINT: step:375/10000 val_loss:4.6009 train_time:83814ms step_avg:223.50ms +[2025-07-17 11:44:50] [Rank 0] PRINT: step:375/10000 val_loss:4.6009 train_time:83814ms step_avg:223.50ms +[2025-07-17 11:44:51] [Rank 0] step:381/10000 train_time:84703ms step_avg:222.32ms +[2025-07-17 11:44:51] [Rank 0] step:381/10000 train_time:84703ms step_avg:222.32ms +[2025-07-17 11:44:55] [Rank 0] step:401/10000 train_time:89166ms step_avg:222.36ms +[2025-07-17 11:44:55] [Rank 0] step:401/10000 train_time:89166ms step_avg:222.36ms +[2025-07-17 11:45:00] [Rank 0] step:421/10000 train_time:93633ms step_avg:222.41ms +[2025-07-17 11:45:00] [Rank 0] step:421/10000 train_time:93633ms step_avg:222.41ms +[2025-07-17 11:45:04] [Rank 0] step:441/10000 train_time:98098ms step_avg:222.44ms +[2025-07-17 11:45:04] [Rank 0] step:441/10000 train_time:98098ms step_avg:222.44ms +[2025-07-17 11:45:09] [Rank 0] step:461/10000 train_time:102561ms step_avg:222.48ms +[2025-07-17 11:45:09] [Rank 0] step:461/10000 train_time:102561ms step_avg:222.48ms +[2025-07-17 11:45:13] [Rank 0] step:481/10000 train_time:107027ms step_avg:222.51ms +[2025-07-17 11:45:13] [Rank 0] step:481/10000 train_time:107027ms step_avg:222.51ms +[2025-07-17 11:45:22] [Rank 0] PRINT: step:500/10000 val_loss:4.4929 train_time:111720ms step_avg:223.44ms +[2025-07-17 11:45:22] [Rank 0] PRINT: step:500/10000 val_loss:4.4929 train_time:111720ms step_avg:223.44ms +[2025-07-17 11:45:22] [Rank 0] step:501/10000 train_time:111734ms step_avg:223.02ms +[2025-07-17 11:45:22] [Rank 0] step:501/10000 train_time:111734ms step_avg:223.02ms +[2025-07-17 11:45:27] [Rank 0] step:521/10000 train_time:115958ms step_avg:222.57ms +[2025-07-17 11:45:27] [Rank 0] step:521/10000 train_time:115958ms step_avg:222.57ms +[2025-07-17 11:45:31] [Rank 0] step:541/10000 train_time:120425ms step_avg:222.60ms +[2025-07-17 11:45:31] [Rank 0] step:541/10000 train_time:120425ms step_avg:222.60ms +[2025-07-17 11:45:35] [Rank 0] step:561/10000 train_time:124892ms step_avg:222.62ms +[2025-07-17 11:45:35] [Rank 0] step:561/10000 train_time:124892ms step_avg:222.62ms +[2025-07-17 11:45:40] [Rank 0] step:581/10000 train_time:129362ms step_avg:222.65ms +[2025-07-17 11:45:40] [Rank 0] step:581/10000 train_time:129362ms step_avg:222.65ms +[2025-07-17 11:45:44] [Rank 0] step:601/10000 train_time:133830ms step_avg:222.68ms +[2025-07-17 11:45:44] [Rank 0] step:601/10000 train_time:133830ms step_avg:222.68ms +[2025-07-17 11:45:49] [Rank 0] step:621/10000 train_time:138299ms step_avg:222.70ms +[2025-07-17 11:45:49] [Rank 0] step:621/10000 train_time:138299ms step_avg:222.70ms +[2025-07-17 11:45:54] [Rank 0] PRINT: step:625/10000 val_loss:4.4119 train_time:139644ms step_avg:223.43ms +[2025-07-17 11:45:54] [Rank 0] PRINT: step:625/10000 val_loss:4.4119 train_time:139644ms step_avg:223.43ms +[2025-07-17 11:45:58] [Rank 0] step:641/10000 train_time:142767ms step_avg:222.73ms +[2025-07-17 11:45:58] [Rank 0] step:641/10000 train_time:142767ms step_avg:222.73ms +[2025-07-17 11:46:02] [Rank 0] step:661/10000 train_time:147239ms step_avg:222.75ms +[2025-07-17 11:46:02] [Rank 0] step:661/10000 train_time:147239ms step_avg:222.75ms +[2025-07-17 11:46:07] [Rank 0] step:681/10000 train_time:151709ms step_avg:222.77ms +[2025-07-17 11:46:07] [Rank 0] step:681/10000 train_time:151709ms step_avg:222.77ms +[2025-07-17 11:46:11] [Rank 0] step:701/10000 train_time:156180ms step_avg:222.80ms +[2025-07-17 11:46:11] [Rank 0] step:701/10000 train_time:156180ms step_avg:222.80ms +[2025-07-17 11:46:16] [Rank 0] step:721/10000 train_time:160649ms step_avg:222.81ms +[2025-07-17 11:46:16] [Rank 0] step:721/10000 train_time:160649ms step_avg:222.81ms +[2025-07-17 11:46:20] [Rank 0] step:741/10000 train_time:165118ms step_avg:222.83ms +[2025-07-17 11:46:20] [Rank 0] step:741/10000 train_time:165118ms step_avg:222.83ms +[2025-07-17 11:46:27] [Rank 0] PRINT: step:750/10000 val_loss:4.5337 train_time:167599ms step_avg:223.46ms +[2025-07-17 11:46:27] [Rank 0] PRINT: step:750/10000 val_loss:4.5337 train_time:167599ms step_avg:223.46ms +[2025-07-17 11:46:29] [Rank 0] step:761/10000 train_time:169621ms step_avg:222.89ms +[2025-07-17 11:46:29] [Rank 0] step:761/10000 train_time:169621ms step_avg:222.89ms +[2025-07-17 11:46:34] [Rank 0] step:781/10000 train_time:174125ms step_avg:222.95ms +[2025-07-17 11:46:34] [Rank 0] step:781/10000 train_time:174125ms step_avg:222.95ms +[2025-07-17 11:46:38] [Rank 0] step:801/10000 train_time:178635ms step_avg:223.01ms +[2025-07-17 11:46:38] [Rank 0] step:801/10000 train_time:178635ms step_avg:223.01ms +[2025-07-17 11:46:43] [Rank 0] step:821/10000 train_time:183143ms step_avg:223.07ms +[2025-07-17 11:46:43] [Rank 0] step:821/10000 train_time:183143ms step_avg:223.07ms +[2025-07-17 11:46:47] [Rank 0] step:841/10000 train_time:187653ms step_avg:223.13ms +[2025-07-17 11:46:47] [Rank 0] step:841/10000 train_time:187653ms step_avg:223.13ms +[2025-07-17 11:46:52] [Rank 0] step:861/10000 train_time:192164ms step_avg:223.19ms +[2025-07-17 11:46:52] [Rank 0] step:861/10000 train_time:192164ms step_avg:223.19ms +[2025-07-17 11:46:59] [Rank 0] PRINT: step:875/10000 val_loss:4.6201 train_time:195776ms step_avg:223.74ms +[2025-07-17 11:46:59] [Rank 0] PRINT: step:875/10000 val_loss:4.6201 train_time:195776ms step_avg:223.74ms +[2025-07-17 11:47:01] [Rank 0] step:881/10000 train_time:196677ms step_avg:223.24ms +[2025-07-17 11:47:01] [Rank 0] step:881/10000 train_time:196677ms step_avg:223.24ms +[2025-07-17 11:47:05] [Rank 0] step:901/10000 train_time:201186ms step_avg:223.29ms +[2025-07-17 11:47:05] [Rank 0] step:901/10000 train_time:201186ms step_avg:223.29ms +[2025-07-17 11:47:10] [Rank 0] step:921/10000 train_time:205699ms step_avg:223.34ms +[2025-07-17 11:47:10] [Rank 0] step:921/10000 train_time:205699ms step_avg:223.34ms +[2025-07-17 11:47:14] [Rank 0] step:941/10000 train_time:210213ms step_avg:223.39ms +[2025-07-17 11:47:14] [Rank 0] step:941/10000 train_time:210213ms step_avg:223.39ms +[2025-07-17 11:47:19] [Rank 0] step:961/10000 train_time:214724ms step_avg:223.44ms +[2025-07-17 11:47:19] [Rank 0] step:961/10000 train_time:214724ms step_avg:223.44ms +[2025-07-17 11:47:23] [Rank 0] step:981/10000 train_time:219234ms step_avg:223.48ms +[2025-07-17 11:47:23] [Rank 0] step:981/10000 train_time:219234ms step_avg:223.48ms +[2025-07-17 11:47:32] [Rank 0] PRINT: step:1000/10000 val_loss:4.6313 train_time:223974ms step_avg:223.97ms +[2025-07-17 11:47:32] [Rank 0] PRINT: step:1000/10000 val_loss:4.6313 train_time:223974ms step_avg:223.97ms +[2025-07-17 11:47:32] [Rank 0] step:1001/10000 train_time:223988ms step_avg:223.76ms +[2025-07-17 11:47:32] [Rank 0] step:1001/10000 train_time:223988ms step_avg:223.76ms +[2025-07-17 11:47:37] [Rank 0] step:1021/10000 train_time:228260ms step_avg:223.57ms +[2025-07-17 11:47:37] [Rank 0] step:1021/10000 train_time:228260ms step_avg:223.57ms +[2025-07-17 11:47:41] [Rank 0] step:1041/10000 train_time:232771ms step_avg:223.60ms +[2025-07-17 11:47:41] [Rank 0] step:1041/10000 train_time:232771ms step_avg:223.60ms +[2025-07-17 11:47:46] [Rank 0] step:1061/10000 train_time:237285ms step_avg:223.64ms +[2025-07-17 11:47:46] [Rank 0] step:1061/10000 train_time:237285ms step_avg:223.64ms +[2025-07-17 11:47:50] [Rank 0] step:1081/10000 train_time:241799ms step_avg:223.68ms +[2025-07-17 11:47:50] [Rank 0] step:1081/10000 train_time:241799ms step_avg:223.68ms +[2025-07-17 11:47:55] [Rank 0] step:1101/10000 train_time:246314ms step_avg:223.72ms +[2025-07-17 11:47:55] [Rank 0] step:1101/10000 train_time:246314ms step_avg:223.72ms +[2025-07-17 11:47:59] [Rank 0] step:1121/10000 train_time:250829ms step_avg:223.75ms +[2025-07-17 11:47:59] [Rank 0] step:1121/10000 train_time:250829ms step_avg:223.75ms +[2025-07-17 11:48:05] [Rank 0] PRINT: step:1125/10000 val_loss:4.5546 train_time:252186ms step_avg:224.17ms +[2025-07-17 11:48:05] [Rank 0] PRINT: step:1125/10000 val_loss:4.5546 train_time:252186ms step_avg:224.17ms +[2025-07-17 11:48:08] [Rank 0] step:1141/10000 train_time:255347ms step_avg:223.79ms +[2025-07-17 11:48:08] [Rank 0] step:1141/10000 train_time:255347ms step_avg:223.79ms +[2025-07-17 11:48:13] [Rank 0] step:1161/10000 train_time:259864ms step_avg:223.83ms +[2025-07-17 11:48:13] [Rank 0] step:1161/10000 train_time:259864ms step_avg:223.83ms +[2025-07-17 11:48:17] [Rank 0] step:1181/10000 train_time:264384ms step_avg:223.86ms +[2025-07-17 11:48:17] [Rank 0] step:1181/10000 train_time:264384ms step_avg:223.86ms +[2025-07-17 11:48:22] [Rank 0] step:1201/10000 train_time:268903ms step_avg:223.90ms +[2025-07-17 11:48:22] [Rank 0] step:1201/10000 train_time:268903ms step_avg:223.90ms +[2025-07-17 11:48:26] [Rank 0] step:1221/10000 train_time:273422ms step_avg:223.93ms +[2025-07-17 11:48:26] [Rank 0] step:1221/10000 train_time:273422ms step_avg:223.93ms +[2025-07-17 11:48:31] [Rank 0] step:1241/10000 train_time:277941ms step_avg:223.97ms +[2025-07-17 11:48:31] [Rank 0] step:1241/10000 train_time:277941ms step_avg:223.97ms +[2025-07-17 11:48:37] [Rank 0] PRINT: step:1250/10000 val_loss:4.6171 train_time:280432ms step_avg:224.35ms +[2025-07-17 11:48:37] [Rank 0] PRINT: step:1250/10000 val_loss:4.6171 train_time:280432ms step_avg:224.35ms +[2025-07-17 11:48:40] [Rank 0] step:1261/10000 train_time:282463ms step_avg:224.00ms +[2025-07-17 11:48:40] [Rank 0] step:1261/10000 train_time:282463ms step_avg:224.00ms +[2025-07-17 11:48:44] [Rank 0] step:1281/10000 train_time:286986ms step_avg:224.03ms +[2025-07-17 11:48:44] [Rank 0] step:1281/10000 train_time:286986ms step_avg:224.03ms +[2025-07-17 11:48:49] [Rank 0] step:1301/10000 train_time:291510ms step_avg:224.07ms +[2025-07-17 11:48:49] [Rank 0] step:1301/10000 train_time:291510ms step_avg:224.07ms +[2025-07-17 11:48:53] [Rank 0] step:1321/10000 train_time:296034ms step_avg:224.10ms +[2025-07-17 11:48:53] [Rank 0] step:1321/10000 train_time:296034ms step_avg:224.10ms +[2025-07-17 11:48:58] [Rank 0] step:1341/10000 train_time:300560ms step_avg:224.13ms +[2025-07-17 11:48:58] [Rank 0] step:1341/10000 train_time:300560ms step_avg:224.13ms +[2025-07-17 11:49:02] [Rank 0] step:1361/10000 train_time:305085ms step_avg:224.16ms +[2025-07-17 11:49:02] [Rank 0] step:1361/10000 train_time:305085ms step_avg:224.16ms +[2025-07-17 11:49:10] [Rank 0] PRINT: step:1375/10000 val_loss:4.6116 train_time:308710ms step_avg:224.52ms +[2025-07-17 11:49:10] [Rank 0] PRINT: step:1375/10000 val_loss:4.6116 train_time:308710ms step_avg:224.52ms +[2025-07-17 11:49:11] [Rank 0] step:1381/10000 train_time:309613ms step_avg:224.19ms +[2025-07-17 11:49:11] [Rank 0] step:1381/10000 train_time:309613ms step_avg:224.19ms +[2025-07-17 11:49:16] [Rank 0] step:1401/10000 train_time:314136ms step_avg:224.22ms +[2025-07-17 11:49:16] [Rank 0] step:1401/10000 train_time:314136ms step_avg:224.22ms +[2025-07-17 11:49:21] [Rank 0] step:1421/10000 train_time:318662ms step_avg:224.25ms +[2025-07-17 11:49:21] [Rank 0] step:1421/10000 train_time:318662ms step_avg:224.25ms +[2025-07-17 11:49:25] [Rank 0] step:1441/10000 train_time:323188ms step_avg:224.28ms +[2025-07-17 11:49:25] [Rank 0] step:1441/10000 train_time:323188ms step_avg:224.28ms +[2025-07-17 11:49:30] [Rank 0] step:1461/10000 train_time:327709ms step_avg:224.30ms +[2025-07-17 11:49:30] [Rank 0] step:1461/10000 train_time:327709ms step_avg:224.30ms +[2025-07-17 11:49:34] [Rank 0] step:1481/10000 train_time:332236ms step_avg:224.33ms +[2025-07-17 11:49:34] [Rank 0] step:1481/10000 train_time:332236ms step_avg:224.33ms +[2025-07-17 11:49:42] [Rank 0] PRINT: step:1500/10000 val_loss:4.5527 train_time:337013ms step_avg:224.68ms +[2025-07-17 11:49:42] [Rank 0] PRINT: step:1500/10000 val_loss:4.5527 train_time:337013ms step_avg:224.68ms +[2025-07-17 11:49:43] [Rank 0] step:1501/10000 train_time:337028ms step_avg:224.54ms +[2025-07-17 11:49:43] [Rank 0] step:1501/10000 train_time:337028ms step_avg:224.54ms +[2025-07-17 11:49:47] [Rank 0] step:1521/10000 train_time:341337ms step_avg:224.42ms +[2025-07-17 11:49:47] [Rank 0] step:1521/10000 train_time:341337ms step_avg:224.42ms +[2025-07-17 11:49:52] [Rank 0] step:1541/10000 train_time:345892ms step_avg:224.46ms +[2025-07-17 11:49:52] [Rank 0] step:1541/10000 train_time:345892ms step_avg:224.46ms +[2025-07-17 11:49:56] [Rank 0] step:1561/10000 train_time:350447ms step_avg:224.50ms +[2025-07-17 11:49:56] [Rank 0] step:1561/10000 train_time:350447ms step_avg:224.50ms +[2025-07-17 11:50:01] [Rank 0] step:1581/10000 train_time:355004ms step_avg:224.54ms +[2025-07-17 11:50:01] [Rank 0] step:1581/10000 train_time:355004ms step_avg:224.54ms +[2025-07-17 11:50:05] [Rank 0] step:1601/10000 train_time:359563ms step_avg:224.59ms +[2025-07-17 11:50:05] [Rank 0] step:1601/10000 train_time:359563ms step_avg:224.59ms +[2025-07-17 11:50:10] [Rank 0] step:1621/10000 train_time:364126ms step_avg:224.63ms +[2025-07-17 11:50:10] [Rank 0] step:1621/10000 train_time:364126ms step_avg:224.63ms +[2025-07-17 11:50:15] [Rank 0] PRINT: step:1625/10000 val_loss:4.5099 train_time:365499ms step_avg:224.92ms +[2025-07-17 11:50:15] [Rank 0] PRINT: step:1625/10000 val_loss:4.5099 train_time:365499ms step_avg:224.92ms +[2025-07-17 11:50:19] [Rank 0] step:1641/10000 train_time:368684ms step_avg:224.67ms +[2025-07-17 11:50:19] [Rank 0] step:1641/10000 train_time:368684ms step_avg:224.67ms +[2025-07-17 11:50:24] [Rank 0] step:1661/10000 train_time:373241ms step_avg:224.71ms +[2025-07-17 11:50:24] [Rank 0] step:1661/10000 train_time:373241ms step_avg:224.71ms +[2025-07-17 11:50:28] [Rank 0] step:1681/10000 train_time:377797ms step_avg:224.75ms +[2025-07-17 11:50:28] [Rank 0] step:1681/10000 train_time:377797ms step_avg:224.75ms +[2025-07-17 11:50:33] [Rank 0] step:1701/10000 train_time:382354ms step_avg:224.78ms +[2025-07-17 11:50:33] [Rank 0] step:1701/10000 train_time:382354ms step_avg:224.78ms +[2025-07-17 11:50:37] [Rank 0] step:1721/10000 train_time:386915ms step_avg:224.82ms +[2025-07-17 11:50:37] [Rank 0] step:1721/10000 train_time:386915ms step_avg:224.82ms +[2025-07-17 11:50:42] [Rank 0] step:1741/10000 train_time:391475ms step_avg:224.86ms +[2025-07-17 11:50:42] [Rank 0] step:1741/10000 train_time:391475ms step_avg:224.86ms +[2025-07-17 11:50:48] [Rank 0] PRINT: step:1750/10000 val_loss:4.5732 train_time:393985ms step_avg:225.13ms +[2025-07-17 11:50:48] [Rank 0] PRINT: step:1750/10000 val_loss:4.5732 train_time:393985ms step_avg:225.13ms +[2025-07-17 11:50:51] [Rank 0] step:1761/10000 train_time:396033ms step_avg:224.89ms +[2025-07-17 11:50:51] [Rank 0] step:1761/10000 train_time:396033ms step_avg:224.89ms +[2025-07-17 11:50:56] [Rank 0] step:1781/10000 train_time:400592ms step_avg:224.93ms +[2025-07-17 11:50:56] [Rank 0] step:1781/10000 train_time:400592ms step_avg:224.93ms +[2025-07-17 11:51:00] [Rank 0] step:1801/10000 train_time:405154ms step_avg:224.96ms +[2025-07-17 11:51:00] [Rank 0] step:1801/10000 train_time:405154ms step_avg:224.96ms +[2025-07-17 11:51:05] [Rank 0] step:1821/10000 train_time:409718ms step_avg:225.00ms +[2025-07-17 11:51:05] [Rank 0] step:1821/10000 train_time:409718ms step_avg:225.00ms +[2025-07-17 11:51:09] [Rank 0] step:1841/10000 train_time:414280ms step_avg:225.03ms +[2025-07-17 11:51:09] [Rank 0] step:1841/10000 train_time:414280ms step_avg:225.03ms +[2025-07-17 11:51:14] [Rank 0] step:1861/10000 train_time:418841ms step_avg:225.06ms +[2025-07-17 11:51:14] [Rank 0] step:1861/10000 train_time:418841ms step_avg:225.06ms +[2025-07-17 11:51:22] [Rank 0] PRINT: step:1875/10000 val_loss:4.5850 train_time:422495ms step_avg:225.33ms +[2025-07-17 11:51:22] [Rank 0] PRINT: step:1875/10000 val_loss:4.5850 train_time:422495ms step_avg:225.33ms +[2025-07-17 11:51:23] [Rank 0] step:1881/10000 train_time:423404ms step_avg:225.10ms +[2025-07-17 11:51:23] [Rank 0] step:1881/10000 train_time:423404ms step_avg:225.10ms +[2025-07-17 11:51:27] [Rank 0] step:1901/10000 train_time:427962ms step_avg:225.12ms +[2025-07-17 11:51:27] [Rank 0] step:1901/10000 train_time:427962ms step_avg:225.12ms +[2025-07-17 11:51:32] [Rank 0] step:1921/10000 train_time:432520ms step_avg:225.15ms +[2025-07-17 11:51:32] [Rank 0] step:1921/10000 train_time:432520ms step_avg:225.15ms +[2025-07-17 11:51:37] [Rank 0] step:1941/10000 train_time:437078ms step_avg:225.18ms +[2025-07-17 11:51:37] [Rank 0] step:1941/10000 train_time:437078ms step_avg:225.18ms +[2025-07-17 11:51:41] [Rank 0] step:1961/10000 train_time:441638ms step_avg:225.21ms +[2025-07-17 11:51:41] [Rank 0] step:1961/10000 train_time:441638ms step_avg:225.21ms +[2025-07-17 11:51:46] [Rank 0] step:1981/10000 train_time:446193ms step_avg:225.24ms +[2025-07-17 11:51:46] [Rank 0] step:1981/10000 train_time:446193ms step_avg:225.24ms +[2025-07-17 11:51:54] [Rank 0] PRINT: step:2000/10000 val_loss:4.5638 train_time:450979ms step_avg:225.49ms +[2025-07-17 11:51:54] [Rank 0] PRINT: step:2000/10000 val_loss:4.5638 train_time:450979ms step_avg:225.49ms +[2025-07-17 11:51:55] [Rank 0] step:2001/10000 train_time:450993ms step_avg:225.38ms +[2025-07-17 11:51:55] [Rank 0] step:2001/10000 train_time:450993ms step_avg:225.38ms +[2025-07-17 11:51:59] [Rank 0] step:2021/10000 train_time:455305ms step_avg:225.29ms +[2025-07-17 11:51:59] [Rank 0] step:2021/10000 train_time:455305ms step_avg:225.29ms +[2025-07-17 11:52:04] [Rank 0] step:2041/10000 train_time:459863ms step_avg:225.31ms +[2025-07-17 11:52:04] [Rank 0] step:2041/10000 train_time:459863ms step_avg:225.31ms +[2025-07-17 11:52:08] [Rank 0] step:2061/10000 train_time:464422ms step_avg:225.34ms +[2025-07-17 11:52:08] [Rank 0] step:2061/10000 train_time:464422ms step_avg:225.34ms +[2025-07-17 11:52:13] [Rank 0] step:2081/10000 train_time:468982ms step_avg:225.36ms +[2025-07-17 11:52:13] [Rank 0] step:2081/10000 train_time:468982ms step_avg:225.36ms +[2025-07-17 11:52:18] [Rank 0] step:2101/10000 train_time:473540ms step_avg:225.39ms +[2025-07-17 11:52:18] [Rank 0] step:2101/10000 train_time:473540ms step_avg:225.39ms +[2025-07-17 11:52:22] [Rank 0] step:2121/10000 train_time:478097ms step_avg:225.41ms +[2025-07-17 11:52:22] [Rank 0] step:2121/10000 train_time:478097ms step_avg:225.41ms +[2025-07-17 11:52:27] [Rank 0] PRINT: step:2125/10000 val_loss:4.5835 train_time:479467ms step_avg:225.63ms +[2025-07-17 11:52:27] [Rank 0] PRINT: step:2125/10000 val_loss:4.5835 train_time:479467ms step_avg:225.63ms +[2025-07-17 11:52:31] [Rank 0] step:2141/10000 train_time:482650ms step_avg:225.43ms +[2025-07-17 11:52:31] [Rank 0] step:2141/10000 train_time:482650ms step_avg:225.43ms +[2025-07-17 11:52:36] [Rank 0] step:2161/10000 train_time:487206ms step_avg:225.45ms +[2025-07-17 11:52:36] [Rank 0] step:2161/10000 train_time:487206ms step_avg:225.45ms +[2025-07-17 11:52:40] [Rank 0] step:2181/10000 train_time:491763ms step_avg:225.48ms +[2025-07-17 11:52:40] [Rank 0] step:2181/10000 train_time:491763ms step_avg:225.48ms +[2025-07-17 11:52:45] [Rank 0] step:2201/10000 train_time:496322ms step_avg:225.50ms +[2025-07-17 11:52:45] [Rank 0] step:2201/10000 train_time:496322ms step_avg:225.50ms +[2025-07-17 11:52:49] [Rank 0] step:2221/10000 train_time:500878ms step_avg:225.52ms +[2025-07-17 11:52:49] [Rank 0] step:2221/10000 train_time:500878ms step_avg:225.52ms +[2025-07-17 11:52:54] [Rank 0] step:2241/10000 train_time:505526ms step_avg:225.58ms +[2025-07-17 11:52:54] [Rank 0] step:2241/10000 train_time:505526ms step_avg:225.58ms +[2025-07-17 11:53:01] [Rank 0] PRINT: step:2250/10000 val_loss:3.9725 train_time:508099ms step_avg:225.82ms +[2025-07-17 11:53:01] [Rank 0] PRINT: step:2250/10000 val_loss:3.9725 train_time:508099ms step_avg:225.82ms +[2025-07-17 11:53:03] [Rank 0] step:2261/10000 train_time:510194ms step_avg:225.65ms +[2025-07-17 11:53:03] [Rank 0] step:2261/10000 train_time:510194ms step_avg:225.65ms +[2025-07-17 11:53:08] [Rank 0] step:2281/10000 train_time:514864ms step_avg:225.72ms +[2025-07-17 11:53:08] [Rank 0] step:2281/10000 train_time:514864ms step_avg:225.72ms +[2025-07-17 11:53:13] [Rank 0] step:2301/10000 train_time:519537ms step_avg:225.79ms +[2025-07-17 11:53:13] [Rank 0] step:2301/10000 train_time:519537ms step_avg:225.79ms +[2025-07-17 11:53:17] [Rank 0] step:2321/10000 train_time:524207ms step_avg:225.85ms +[2025-07-17 11:53:17] [Rank 0] step:2321/10000 train_time:524207ms step_avg:225.85ms +[2025-07-17 11:53:22] [Rank 0] step:2341/10000 train_time:528876ms step_avg:225.92ms +[2025-07-17 11:53:22] [Rank 0] step:2341/10000 train_time:528876ms step_avg:225.92ms +[2025-07-17 11:53:27] [Rank 0] step:2361/10000 train_time:533545ms step_avg:225.98ms +[2025-07-17 11:53:27] [Rank 0] step:2361/10000 train_time:533545ms step_avg:225.98ms +[2025-07-17 11:53:34] [Rank 0] PRINT: step:2375/10000 val_loss:3.9647 train_time:537283ms step_avg:226.22ms +[2025-07-17 11:53:34] [Rank 0] PRINT: step:2375/10000 val_loss:3.9647 train_time:537283ms step_avg:226.22ms +[2025-07-17 11:53:36] [Rank 0] step:2381/10000 train_time:538213ms step_avg:226.05ms +[2025-07-17 11:53:36] [Rank 0] step:2381/10000 train_time:538213ms step_avg:226.05ms +[2025-07-17 11:53:40] [Rank 0] step:2401/10000 train_time:542885ms step_avg:226.11ms +[2025-07-17 11:53:40] [Rank 0] step:2401/10000 train_time:542885ms step_avg:226.11ms +[2025-07-17 11:53:45] [Rank 0] step:2421/10000 train_time:547552ms step_avg:226.17ms +[2025-07-17 11:53:45] [Rank 0] step:2421/10000 train_time:547552ms step_avg:226.17ms +[2025-07-17 11:53:50] [Rank 0] step:2441/10000 train_time:552220ms step_avg:226.23ms +[2025-07-17 11:53:50] [Rank 0] step:2441/10000 train_time:552220ms step_avg:226.23ms +[2025-07-17 11:53:54] [Rank 0] step:2461/10000 train_time:556887ms step_avg:226.29ms +[2025-07-17 11:53:54] [Rank 0] step:2461/10000 train_time:556887ms step_avg:226.29ms +[2025-07-17 11:53:59] [Rank 0] step:2481/10000 train_time:561556ms step_avg:226.34ms +[2025-07-17 11:53:59] [Rank 0] step:2481/10000 train_time:561556ms step_avg:226.34ms +[2025-07-17 11:54:08] [Rank 0] PRINT: step:2500/10000 val_loss:4.0430 train_time:566459ms step_avg:226.58ms +[2025-07-17 11:54:08] [Rank 0] PRINT: step:2500/10000 val_loss:4.0430 train_time:566459ms step_avg:226.58ms +[2025-07-17 11:54:08] [Rank 0] step:2501/10000 train_time:566472ms step_avg:226.50ms +[2025-07-17 11:54:08] [Rank 0] step:2501/10000 train_time:566472ms step_avg:226.50ms +[2025-07-17 11:54:13] [Rank 0] step:2521/10000 train_time:570899ms step_avg:226.46ms +[2025-07-17 11:54:13] [Rank 0] step:2521/10000 train_time:570899ms step_avg:226.46ms +[2025-07-17 11:54:17] [Rank 0] step:2541/10000 train_time:575573ms step_avg:226.51ms +[2025-07-17 11:54:17] [Rank 0] step:2541/10000 train_time:575573ms step_avg:226.51ms +[2025-07-17 11:54:22] [Rank 0] step:2561/10000 train_time:580247ms step_avg:226.57ms +[2025-07-17 11:54:22] [Rank 0] step:2561/10000 train_time:580247ms step_avg:226.57ms +[2025-07-17 11:54:27] [Rank 0] step:2581/10000 train_time:584923ms step_avg:226.63ms +[2025-07-17 11:54:27] [Rank 0] step:2581/10000 train_time:584923ms step_avg:226.63ms +[2025-07-17 11:54:31] [Rank 0] step:2601/10000 train_time:589596ms step_avg:226.68ms +[2025-07-17 11:54:31] [Rank 0] step:2601/10000 train_time:589596ms step_avg:226.68ms +[2025-07-17 11:54:36] [Rank 0] step:2621/10000 train_time:594271ms step_avg:226.73ms +[2025-07-17 11:54:36] [Rank 0] step:2621/10000 train_time:594271ms step_avg:226.73ms +[2025-07-17 11:54:42] [Rank 0] PRINT: step:2625/10000 val_loss:4.0833 train_time:595676ms step_avg:226.92ms +[2025-07-17 11:54:42] [Rank 0] PRINT: step:2625/10000 val_loss:4.0833 train_time:595676ms step_avg:226.92ms +[2025-07-17 11:54:45] [Rank 0] step:2641/10000 train_time:598942ms step_avg:226.79ms +[2025-07-17 11:54:45] [Rank 0] step:2641/10000 train_time:598942ms step_avg:226.79ms +[2025-07-17 11:54:50] [Rank 0] step:2661/10000 train_time:603615ms step_avg:226.84ms +[2025-07-17 11:54:50] [Rank 0] step:2661/10000 train_time:603615ms step_avg:226.84ms +[2025-07-17 11:54:55] [Rank 0] step:2681/10000 train_time:608287ms step_avg:226.89ms +[2025-07-17 11:54:55] [Rank 0] step:2681/10000 train_time:608287ms step_avg:226.89ms +[2025-07-17 11:54:59] [Rank 0] step:2701/10000 train_time:612958ms step_avg:226.94ms +[2025-07-17 11:54:59] [Rank 0] step:2701/10000 train_time:612958ms step_avg:226.94ms +[2025-07-17 11:55:04] [Rank 0] step:2721/10000 train_time:617631ms step_avg:226.99ms +[2025-07-17 11:55:04] [Rank 0] step:2721/10000 train_time:617631ms step_avg:226.99ms +[2025-07-17 11:55:09] [Rank 0] step:2741/10000 train_time:622303ms step_avg:227.04ms +[2025-07-17 11:55:09] [Rank 0] step:2741/10000 train_time:622303ms step_avg:227.04ms +[2025-07-17 11:55:15] [Rank 0] PRINT: step:2750/10000 val_loss:4.1499 train_time:624878ms step_avg:227.23ms +[2025-07-17 11:55:15] [Rank 0] PRINT: step:2750/10000 val_loss:4.1499 train_time:624878ms step_avg:227.23ms +[2025-07-17 11:55:18] [Rank 0] step:2761/10000 train_time:626977ms step_avg:227.08ms +[2025-07-17 11:55:18] [Rank 0] step:2761/10000 train_time:626977ms step_avg:227.08ms +[2025-07-17 11:55:23] [Rank 0] step:2781/10000 train_time:631647ms step_avg:227.13ms +[2025-07-17 11:55:23] [Rank 0] step:2781/10000 train_time:631647ms step_avg:227.13ms +[2025-07-17 11:55:27] [Rank 0] step:2801/10000 train_time:636315ms step_avg:227.17ms +[2025-07-17 11:55:27] [Rank 0] step:2801/10000 train_time:636315ms step_avg:227.17ms +[2025-07-17 11:55:32] [Rank 0] step:2821/10000 train_time:640984ms step_avg:227.22ms +[2025-07-17 11:55:32] [Rank 0] step:2821/10000 train_time:640984ms step_avg:227.22ms +[2025-07-17 11:55:37] [Rank 0] step:2841/10000 train_time:645651ms step_avg:227.26ms +[2025-07-17 11:55:37] [Rank 0] step:2841/10000 train_time:645651ms step_avg:227.26ms +[2025-07-17 11:55:41] [Rank 0] step:2861/10000 train_time:650318ms step_avg:227.30ms +[2025-07-17 11:55:41] [Rank 0] step:2861/10000 train_time:650318ms step_avg:227.30ms +[2025-07-17 11:55:49] [Rank 0] PRINT: step:2875/10000 val_loss:4.0940 train_time:654054ms step_avg:227.50ms +[2025-07-17 11:55:49] [Rank 0] PRINT: step:2875/10000 val_loss:4.0940 train_time:654054ms step_avg:227.50ms +[2025-07-17 11:55:50] [Rank 0] step:2881/10000 train_time:654984ms step_avg:227.35ms +[2025-07-17 11:55:50] [Rank 0] step:2881/10000 train_time:654984ms step_avg:227.35ms +[2025-07-17 11:55:55] [Rank 0] step:2901/10000 train_time:659651ms step_avg:227.39ms +[2025-07-17 11:55:55] [Rank 0] step:2901/10000 train_time:659651ms step_avg:227.39ms +[2025-07-17 11:56:00] [Rank 0] step:2921/10000 train_time:664317ms step_avg:227.43ms +[2025-07-17 11:56:00] [Rank 0] step:2921/10000 train_time:664317ms step_avg:227.43ms +[2025-07-17 11:56:04] [Rank 0] step:2941/10000 train_time:668981ms step_avg:227.47ms +[2025-07-17 11:56:04] [Rank 0] step:2941/10000 train_time:668981ms step_avg:227.47ms +[2025-07-17 11:56:09] [Rank 0] step:2961/10000 train_time:673645ms step_avg:227.51ms +[2025-07-17 11:56:09] [Rank 0] step:2961/10000 train_time:673645ms step_avg:227.51ms +[2025-07-17 11:56:14] [Rank 0] step:2981/10000 train_time:678328ms step_avg:227.55ms +[2025-07-17 11:56:14] [Rank 0] step:2981/10000 train_time:678328ms step_avg:227.55ms +[2025-07-17 11:56:23] [Rank 0] PRINT: step:3000/10000 val_loss:4.0533 train_time:683244ms step_avg:227.75ms +[2025-07-17 11:56:23] [Rank 0] PRINT: step:3000/10000 val_loss:4.0533 train_time:683244ms step_avg:227.75ms +[2025-07-17 11:56:23] [Rank 0] step:3001/10000 train_time:683257ms step_avg:227.68ms +[2025-07-17 11:56:23] [Rank 0] step:3001/10000 train_time:683257ms step_avg:227.68ms +[2025-07-17 11:56:28] [Rank 0] step:3021/10000 train_time:687692ms step_avg:227.64ms +[2025-07-17 11:56:28] [Rank 0] step:3021/10000 train_time:687692ms step_avg:227.64ms +[2025-07-17 11:56:32] [Rank 0] step:3041/10000 train_time:692377ms step_avg:227.68ms +[2025-07-17 11:56:32] [Rank 0] step:3041/10000 train_time:692377ms step_avg:227.68ms +[2025-07-17 11:56:37] [Rank 0] step:3061/10000 train_time:697062ms step_avg:227.72ms +[2025-07-17 11:56:37] [Rank 0] step:3061/10000 train_time:697062ms step_avg:227.72ms +[2025-07-17 11:56:42] [Rank 0] step:3081/10000 train_time:701745ms step_avg:227.77ms +[2025-07-17 11:56:42] [Rank 0] step:3081/10000 train_time:701745ms step_avg:227.77ms +[2025-07-17 11:56:46] [Rank 0] step:3101/10000 train_time:706433ms step_avg:227.81ms +[2025-07-17 11:56:46] [Rank 0] step:3101/10000 train_time:706433ms step_avg:227.81ms +[2025-07-17 11:56:51] [Rank 0] step:3121/10000 train_time:711118ms step_avg:227.85ms +[2025-07-17 11:56:51] [Rank 0] step:3121/10000 train_time:711118ms step_avg:227.85ms +[2025-07-17 11:56:56] [Rank 0] PRINT: step:3125/10000 val_loss:4.0992 train_time:712530ms step_avg:228.01ms +[2025-07-17 11:56:56] [Rank 0] PRINT: step:3125/10000 val_loss:4.0992 train_time:712530ms step_avg:228.01ms +[2025-07-17 11:57:00] [Rank 0] step:3141/10000 train_time:715807ms step_avg:227.89ms +[2025-07-17 11:57:00] [Rank 0] step:3141/10000 train_time:715807ms step_avg:227.89ms +[2025-07-17 11:57:05] [Rank 0] step:3161/10000 train_time:720497ms step_avg:227.93ms +[2025-07-17 11:57:05] [Rank 0] step:3161/10000 train_time:720497ms step_avg:227.93ms +[2025-07-17 11:57:09] [Rank 0] step:3181/10000 train_time:725187ms step_avg:227.97ms +[2025-07-17 11:57:09] [Rank 0] step:3181/10000 train_time:725187ms step_avg:227.97ms +[2025-07-17 11:57:14] [Rank 0] step:3201/10000 train_time:729878ms step_avg:228.02ms +[2025-07-17 11:57:14] [Rank 0] step:3201/10000 train_time:729878ms step_avg:228.02ms +[2025-07-17 11:57:19] [Rank 0] step:3221/10000 train_time:734569ms step_avg:228.06ms +[2025-07-17 11:57:19] [Rank 0] step:3221/10000 train_time:734569ms step_avg:228.06ms +[2025-07-17 11:57:23] [Rank 0] step:3241/10000 train_time:739259ms step_avg:228.10ms +[2025-07-17 11:57:23] [Rank 0] step:3241/10000 train_time:739259ms step_avg:228.10ms +[2025-07-17 11:57:30] [Rank 0] PRINT: step:3250/10000 val_loss:4.1387 train_time:741840ms step_avg:228.26ms +[2025-07-17 11:57:30] [Rank 0] PRINT: step:3250/10000 val_loss:4.1387 train_time:741840ms step_avg:228.26ms +[2025-07-17 11:57:33] [Rank 0] step:3261/10000 train_time:743947ms step_avg:228.13ms +[2025-07-17 11:57:33] [Rank 0] step:3261/10000 train_time:743947ms step_avg:228.13ms +[2025-07-17 11:57:37] [Rank 0] step:3281/10000 train_time:748635ms step_avg:228.17ms +[2025-07-17 11:57:37] [Rank 0] step:3281/10000 train_time:748635ms step_avg:228.17ms +[2025-07-17 11:57:42] [Rank 0] step:3301/10000 train_time:753327ms step_avg:228.21ms +[2025-07-17 11:57:42] [Rank 0] step:3301/10000 train_time:753327ms step_avg:228.21ms +[2025-07-17 11:57:47] [Rank 0] step:3321/10000 train_time:758018ms step_avg:228.25ms +[2025-07-17 11:57:47] [Rank 0] step:3321/10000 train_time:758018ms step_avg:228.25ms +[2025-07-17 11:57:51] [Rank 0] step:3341/10000 train_time:762712ms step_avg:228.29ms +[2025-07-17 11:57:51] [Rank 0] step:3341/10000 train_time:762712ms step_avg:228.29ms +[2025-07-17 11:57:56] [Rank 0] step:3361/10000 train_time:767405ms step_avg:228.33ms +[2025-07-17 11:57:56] [Rank 0] step:3361/10000 train_time:767405ms step_avg:228.33ms +[2025-07-17 11:58:04] [Rank 0] PRINT: step:3375/10000 val_loss:4.1706 train_time:771160ms step_avg:228.49ms +[2025-07-17 11:58:04] [Rank 0] PRINT: step:3375/10000 val_loss:4.1706 train_time:771160ms step_avg:228.49ms +[2025-07-17 11:58:05] [Rank 0] step:3381/10000 train_time:772096ms step_avg:228.36ms +[2025-07-17 11:58:05] [Rank 0] step:3381/10000 train_time:772096ms step_avg:228.36ms +[2025-07-17 11:58:10] [Rank 0] step:3401/10000 train_time:776794ms step_avg:228.40ms +[2025-07-17 11:58:10] [Rank 0] step:3401/10000 train_time:776794ms step_avg:228.40ms +[2025-07-17 11:58:15] [Rank 0] step:3421/10000 train_time:781487ms step_avg:228.44ms +[2025-07-17 11:58:15] [Rank 0] step:3421/10000 train_time:781487ms step_avg:228.44ms +[2025-07-17 11:58:19] [Rank 0] step:3441/10000 train_time:786183ms step_avg:228.48ms +[2025-07-17 11:58:19] [Rank 0] step:3441/10000 train_time:786183ms step_avg:228.48ms +[2025-07-17 11:58:24] [Rank 0] step:3461/10000 train_time:790878ms step_avg:228.51ms +[2025-07-17 11:58:24] [Rank 0] step:3461/10000 train_time:790878ms step_avg:228.51ms +[2025-07-17 11:58:29] [Rank 0] step:3481/10000 train_time:795574ms step_avg:228.55ms +[2025-07-17 11:58:29] [Rank 0] step:3481/10000 train_time:795574ms step_avg:228.55ms +[2025-07-17 11:58:38] [Rank 0] PRINT: step:3500/10000 val_loss:4.2069 train_time:800506ms step_avg:228.72ms +[2025-07-17 11:58:38] [Rank 0] PRINT: step:3500/10000 val_loss:4.2069 train_time:800506ms step_avg:228.72ms +[2025-07-17 11:58:38] [Rank 0] step:3501/10000 train_time:800519ms step_avg:228.65ms +[2025-07-17 11:58:38] [Rank 0] step:3501/10000 train_time:800519ms step_avg:228.65ms +[2025-07-17 11:58:43] [Rank 0] step:3521/10000 train_time:804968ms step_avg:228.62ms +[2025-07-17 11:58:43] [Rank 0] step:3521/10000 train_time:804968ms step_avg:228.62ms +[2025-07-17 11:58:48] [Rank 0] step:3541/10000 train_time:809670ms step_avg:228.66ms +[2025-07-17 11:58:48] [Rank 0] step:3541/10000 train_time:809670ms step_avg:228.66ms +[2025-07-17 11:58:52] [Rank 0] step:3561/10000 train_time:814371ms step_avg:228.69ms +[2025-07-17 11:58:52] [Rank 0] step:3561/10000 train_time:814371ms step_avg:228.69ms +[2025-07-17 11:58:57] [Rank 0] step:3581/10000 train_time:819066ms step_avg:228.73ms +[2025-07-17 11:58:57] [Rank 0] step:3581/10000 train_time:819066ms step_avg:228.73ms +[2025-07-17 11:59:02] [Rank 0] step:3601/10000 train_time:823762ms step_avg:228.76ms +[2025-07-17 11:59:02] [Rank 0] step:3601/10000 train_time:823762ms step_avg:228.76ms +[2025-07-17 11:59:06] [Rank 0] step:3621/10000 train_time:828456ms step_avg:228.79ms +[2025-07-17 11:59:06] [Rank 0] step:3621/10000 train_time:828456ms step_avg:228.79ms +[2025-07-17 11:59:12] [Rank 0] PRINT: step:3625/10000 val_loss:4.2189 train_time:829868ms step_avg:228.93ms +[2025-07-17 11:59:12] [Rank 0] PRINT: step:3625/10000 val_loss:4.2189 train_time:829868ms step_avg:228.93ms +[2025-07-17 11:59:16] [Rank 0] step:3641/10000 train_time:833152ms step_avg:228.83ms +[2025-07-17 11:59:16] [Rank 0] step:3641/10000 train_time:833152ms step_avg:228.83ms +[2025-07-17 11:59:20] [Rank 0] step:3661/10000 train_time:837851ms step_avg:228.86ms +[2025-07-17 11:59:20] [Rank 0] step:3661/10000 train_time:837851ms step_avg:228.86ms +[2025-07-17 11:59:25] [Rank 0] step:3681/10000 train_time:842549ms step_avg:228.89ms +[2025-07-17 11:59:25] [Rank 0] step:3681/10000 train_time:842549ms step_avg:228.89ms +[2025-07-17 11:59:30] [Rank 0] step:3701/10000 train_time:847253ms step_avg:228.93ms +[2025-07-17 11:59:30] [Rank 0] step:3701/10000 train_time:847253ms step_avg:228.93ms +[2025-07-17 11:59:35] [Rank 0] step:3721/10000 train_time:852017ms step_avg:228.98ms +[2025-07-17 11:59:35] [Rank 0] step:3721/10000 train_time:852017ms step_avg:228.98ms +[2025-07-17 11:59:39] [Rank 0] step:3741/10000 train_time:856804ms step_avg:229.03ms +[2025-07-17 11:59:39] [Rank 0] step:3741/10000 train_time:856804ms step_avg:229.03ms +[2025-07-17 11:59:46] [Rank 0] PRINT: step:3750/10000 val_loss:4.1107 train_time:859439ms step_avg:229.18ms +[2025-07-17 11:59:46] [Rank 0] PRINT: step:3750/10000 val_loss:4.1107 train_time:859439ms step_avg:229.18ms +[2025-07-17 11:59:49] [Rank 0] step:3761/10000 train_time:861586ms step_avg:229.08ms +[2025-07-17 11:59:49] [Rank 0] step:3761/10000 train_time:861586ms step_avg:229.08ms +[2025-07-17 11:59:54] [Rank 0] step:3781/10000 train_time:866371ms step_avg:229.14ms +[2025-07-17 11:59:54] [Rank 0] step:3781/10000 train_time:866371ms step_avg:229.14ms +[2025-07-17 11:59:58] [Rank 0] step:3801/10000 train_time:871157ms step_avg:229.19ms +[2025-07-17 11:59:58] [Rank 0] step:3801/10000 train_time:871157ms step_avg:229.19ms +[2025-07-17 12:00:03] [Rank 0] step:3821/10000 train_time:875942ms step_avg:229.24ms +[2025-07-17 12:00:03] [Rank 0] step:3821/10000 train_time:875942ms step_avg:229.24ms +[2025-07-17 12:00:08] [Rank 0] step:3841/10000 train_time:880730ms step_avg:229.30ms +[2025-07-17 12:00:08] [Rank 0] step:3841/10000 train_time:880730ms step_avg:229.30ms +[2025-07-17 12:00:13] [Rank 0] step:3861/10000 train_time:885515ms step_avg:229.35ms +[2025-07-17 12:00:13] [Rank 0] step:3861/10000 train_time:885515ms step_avg:229.35ms +[2025-07-17 12:00:21] [Rank 0] PRINT: step:3875/10000 val_loss:4.1964 train_time:889347ms step_avg:229.51ms +[2025-07-17 12:00:21] [Rank 0] PRINT: step:3875/10000 val_loss:4.1964 train_time:889347ms step_avg:229.51ms +[2025-07-17 12:00:22] [Rank 0] step:3881/10000 train_time:890302ms step_avg:229.40ms +[2025-07-17 12:00:22] [Rank 0] step:3881/10000 train_time:890302ms step_avg:229.40ms +[2025-07-17 12:00:27] [Rank 0] step:3901/10000 train_time:895092ms step_avg:229.45ms +[2025-07-17 12:00:27] [Rank 0] step:3901/10000 train_time:895092ms step_avg:229.45ms +[2025-07-17 12:00:32] [Rank 0] step:3921/10000 train_time:899880ms step_avg:229.50ms +[2025-07-17 12:00:32] [Rank 0] step:3921/10000 train_time:899880ms step_avg:229.50ms +[2025-07-17 12:00:36] [Rank 0] step:3941/10000 train_time:904668ms step_avg:229.55ms +[2025-07-17 12:00:36] [Rank 0] step:3941/10000 train_time:904668ms step_avg:229.55ms +[2025-07-17 12:00:41] [Rank 0] step:3961/10000 train_time:909458ms step_avg:229.60ms +[2025-07-17 12:00:41] [Rank 0] step:3961/10000 train_time:909458ms step_avg:229.60ms +[2025-07-17 12:00:46] [Rank 0] step:3981/10000 train_time:914250ms step_avg:229.65ms +[2025-07-17 12:00:46] [Rank 0] step:3981/10000 train_time:914250ms step_avg:229.65ms +[2025-07-17 12:00:55] [Rank 0] PRINT: step:4000/10000 val_loss:4.2578 train_time:919276ms step_avg:229.82ms +[2025-07-17 12:00:55] [Rank 0] PRINT: step:4000/10000 val_loss:4.2578 train_time:919276ms step_avg:229.82ms +[2025-07-17 12:00:55] [Rank 0] step:4001/10000 train_time:919289ms step_avg:229.76ms +[2025-07-17 12:00:55] [Rank 0] step:4001/10000 train_time:919289ms step_avg:229.76ms +[2025-07-17 12:01:00] [Rank 0] step:4021/10000 train_time:923829ms step_avg:229.75ms +[2025-07-17 12:01:00] [Rank 0] step:4021/10000 train_time:923829ms step_avg:229.75ms +[2025-07-17 12:01:05] [Rank 0] step:4041/10000 train_time:928619ms step_avg:229.80ms +[2025-07-17 12:01:05] [Rank 0] step:4041/10000 train_time:928619ms step_avg:229.80ms +[2025-07-17 12:01:10] [Rank 0] step:4061/10000 train_time:933413ms step_avg:229.85ms +[2025-07-17 12:01:10] [Rank 0] step:4061/10000 train_time:933413ms step_avg:229.85ms +[2025-07-17 12:01:15] [Rank 0] step:4081/10000 train_time:938204ms step_avg:229.90ms +[2025-07-17 12:01:15] [Rank 0] step:4081/10000 train_time:938204ms step_avg:229.90ms +[2025-07-17 12:01:19] [Rank 0] step:4101/10000 train_time:942995ms step_avg:229.94ms +[2025-07-17 12:01:19] [Rank 0] step:4101/10000 train_time:942995ms step_avg:229.94ms +[2025-07-17 12:01:24] [Rank 0] step:4121/10000 train_time:947785ms step_avg:229.99ms +[2025-07-17 12:01:24] [Rank 0] step:4121/10000 train_time:947785ms step_avg:229.99ms +[2025-07-17 12:01:30] [Rank 0] PRINT: step:4125/10000 val_loss:4.2097 train_time:949227ms step_avg:230.12ms +[2025-07-17 12:01:30] [Rank 0] PRINT: step:4125/10000 val_loss:4.2097 train_time:949227ms step_avg:230.12ms +[2025-07-17 12:01:34] [Rank 0] step:4141/10000 train_time:952574ms step_avg:230.03ms +[2025-07-17 12:01:34] [Rank 0] step:4141/10000 train_time:952574ms step_avg:230.03ms +[2025-07-17 12:01:38] [Rank 0] step:4161/10000 train_time:957359ms step_avg:230.08ms +[2025-07-17 12:01:38] [Rank 0] step:4161/10000 train_time:957359ms step_avg:230.08ms +[2025-07-17 12:01:43] [Rank 0] step:4181/10000 train_time:962142ms step_avg:230.12ms +[2025-07-17 12:01:43] [Rank 0] step:4181/10000 train_time:962142ms step_avg:230.12ms +[2025-07-17 12:01:48] [Rank 0] step:4201/10000 train_time:966924ms step_avg:230.17ms +[2025-07-17 12:01:48] [Rank 0] step:4201/10000 train_time:966924ms step_avg:230.17ms +[2025-07-17 12:01:53] [Rank 0] step:4221/10000 train_time:971710ms step_avg:230.21ms +[2025-07-17 12:01:53] [Rank 0] step:4221/10000 train_time:971710ms step_avg:230.21ms +[2025-07-17 12:01:57] [Rank 0] step:4241/10000 train_time:976496ms step_avg:230.25ms +[2025-07-17 12:01:57] [Rank 0] step:4241/10000 train_time:976496ms step_avg:230.25ms +[2025-07-17 12:02:04] [Rank 0] PRINT: step:4250/10000 val_loss:4.2962 train_time:979130ms step_avg:230.38ms +[2025-07-17 12:02:04] [Rank 0] PRINT: step:4250/10000 val_loss:4.2962 train_time:979130ms step_avg:230.38ms +[2025-07-17 12:02:07] [Rank 0] step:4261/10000 train_time:981276ms step_avg:230.29ms +[2025-07-17 12:02:07] [Rank 0] step:4261/10000 train_time:981276ms step_avg:230.29ms +[2025-07-17 12:02:12] [Rank 0] step:4281/10000 train_time:986062ms step_avg:230.33ms +[2025-07-17 12:02:12] [Rank 0] step:4281/10000 train_time:986062ms step_avg:230.33ms +[2025-07-17 12:02:16] [Rank 0] step:4301/10000 train_time:990844ms step_avg:230.38ms +[2025-07-17 12:02:16] [Rank 0] step:4301/10000 train_time:990844ms step_avg:230.38ms +[2025-07-17 12:02:21] [Rank 0] step:4321/10000 train_time:995638ms step_avg:230.42ms +[2025-07-17 12:02:21] [Rank 0] step:4321/10000 train_time:995638ms step_avg:230.42ms +[2025-07-17 12:02:26] [Rank 0] step:4341/10000 train_time:1000424ms step_avg:230.46ms +[2025-07-17 12:02:26] [Rank 0] step:4341/10000 train_time:1000424ms step_avg:230.46ms +[2025-07-17 12:02:31] [Rank 0] step:4361/10000 train_time:1005210ms step_avg:230.50ms +[2025-07-17 12:02:31] [Rank 0] step:4361/10000 train_time:1005210ms step_avg:230.50ms +[2025-07-17 12:02:39] [Rank 0] PRINT: step:4375/10000 val_loss:4.2356 train_time:1009042ms step_avg:230.64ms +[2025-07-17 12:02:39] [Rank 0] PRINT: step:4375/10000 val_loss:4.2356 train_time:1009042ms step_avg:230.64ms +[2025-07-17 12:02:40] [Rank 0] step:4381/10000 train_time:1009995ms step_avg:230.54ms +[2025-07-17 12:02:40] [Rank 0] step:4381/10000 train_time:1009995ms step_avg:230.54ms +[2025-07-17 12:02:45] [Rank 0] step:4401/10000 train_time:1014781ms step_avg:230.58ms +[2025-07-17 12:02:45] [Rank 0] step:4401/10000 train_time:1014781ms step_avg:230.58ms +[2025-07-17 12:02:50] [Rank 0] step:4421/10000 train_time:1019565ms step_avg:230.62ms +[2025-07-17 12:02:50] [Rank 0] step:4421/10000 train_time:1019565ms step_avg:230.62ms +[2025-07-17 12:02:55] [Rank 0] step:4441/10000 train_time:1024352ms step_avg:230.66ms +[2025-07-17 12:02:55] [Rank 0] step:4441/10000 train_time:1024352ms step_avg:230.66ms +[2025-07-17 12:02:59] [Rank 0] step:4461/10000 train_time:1029149ms step_avg:230.70ms +[2025-07-17 12:02:59] [Rank 0] step:4461/10000 train_time:1029149ms step_avg:230.70ms +[2025-07-17 12:03:04] [Rank 0] step:4481/10000 train_time:1033949ms step_avg:230.74ms +[2025-07-17 12:03:04] [Rank 0] step:4481/10000 train_time:1033949ms step_avg:230.74ms +[2025-07-17 12:03:13] [Rank 0] PRINT: step:4500/10000 val_loss:4.2761 train_time:1038991ms step_avg:230.89ms +[2025-07-17 12:03:13] [Rank 0] PRINT: step:4500/10000 val_loss:4.2761 train_time:1038991ms step_avg:230.89ms +[2025-07-17 12:03:14] [Rank 0] step:4501/10000 train_time:1039004ms step_avg:230.84ms +[2025-07-17 12:03:14] [Rank 0] step:4501/10000 train_time:1039004ms step_avg:230.84ms +[2025-07-17 12:03:18] [Rank 0] step:4521/10000 train_time:1043546ms step_avg:230.82ms +[2025-07-17 12:03:18] [Rank 0] step:4521/10000 train_time:1043546ms step_avg:230.82ms +[2025-07-17 12:03:23] [Rank 0] step:4541/10000 train_time:1048342ms step_avg:230.86ms +[2025-07-17 12:03:23] [Rank 0] step:4541/10000 train_time:1048342ms step_avg:230.86ms +[2025-07-17 12:03:28] [Rank 0] step:4561/10000 train_time:1053132ms step_avg:230.90ms +[2025-07-17 12:03:28] [Rank 0] step:4561/10000 train_time:1053132ms step_avg:230.90ms +[2025-07-17 12:03:33] [Rank 0] step:4581/10000 train_time:1057926ms step_avg:230.94ms +[2025-07-17 12:03:33] [Rank 0] step:4581/10000 train_time:1057926ms step_avg:230.94ms +[2025-07-17 12:03:38] [Rank 0] step:4601/10000 train_time:1062724ms step_avg:230.98ms +[2025-07-17 12:03:38] [Rank 0] step:4601/10000 train_time:1062724ms step_avg:230.98ms +[2025-07-17 12:03:42] [Rank 0] step:4621/10000 train_time:1067519ms step_avg:231.01ms +[2025-07-17 12:03:42] [Rank 0] step:4621/10000 train_time:1067519ms step_avg:231.01ms +[2025-07-17 12:03:48] [Rank 0] PRINT: step:4625/10000 val_loss:4.2935 train_time:1068965ms step_avg:231.13ms +[2025-07-17 12:03:48] [Rank 0] PRINT: step:4625/10000 val_loss:4.2935 train_time:1068965ms step_avg:231.13ms +[2025-07-17 12:03:52] [Rank 0] step:4641/10000 train_time:1072314ms step_avg:231.05ms +[2025-07-17 12:03:52] [Rank 0] step:4641/10000 train_time:1072314ms step_avg:231.05ms +[2025-07-17 12:03:57] [Rank 0] step:4661/10000 train_time:1077109ms step_avg:231.09ms +[2025-07-17 12:03:57] [Rank 0] step:4661/10000 train_time:1077109ms step_avg:231.09ms +[2025-07-17 12:04:01] [Rank 0] step:4681/10000 train_time:1081902ms step_avg:231.13ms +[2025-07-17 12:04:01] [Rank 0] step:4681/10000 train_time:1081902ms step_avg:231.13ms +[2025-07-17 12:04:06] [Rank 0] step:4701/10000 train_time:1086699ms step_avg:231.16ms +[2025-07-17 12:04:06] [Rank 0] step:4701/10000 train_time:1086699ms step_avg:231.16ms +[2025-07-17 12:04:11] [Rank 0] step:4721/10000 train_time:1091487ms step_avg:231.20ms +[2025-07-17 12:04:11] [Rank 0] step:4721/10000 train_time:1091487ms step_avg:231.20ms +[2025-07-17 12:04:16] [Rank 0] step:4741/10000 train_time:1096276ms step_avg:231.23ms +[2025-07-17 12:04:16] [Rank 0] step:4741/10000 train_time:1096276ms step_avg:231.23ms +[2025-07-17 12:04:23] [Rank 0] PRINT: step:4750/10000 val_loss:4.2764 train_time:1098915ms step_avg:231.35ms +[2025-07-17 12:04:23] [Rank 0] PRINT: step:4750/10000 val_loss:4.2764 train_time:1098915ms step_avg:231.35ms +[2025-07-17 12:04:25] [Rank 0] step:4761/10000 train_time:1101065ms step_avg:231.27ms +[2025-07-17 12:04:25] [Rank 0] step:4761/10000 train_time:1101065ms step_avg:231.27ms +[2025-07-17 12:04:30] [Rank 0] step:4781/10000 train_time:1105847ms step_avg:231.30ms +[2025-07-17 12:04:30] [Rank 0] step:4781/10000 train_time:1105847ms step_avg:231.30ms +[2025-07-17 12:04:35] [Rank 0] step:4801/10000 train_time:1110633ms step_avg:231.33ms +[2025-07-17 12:04:35] [Rank 0] step:4801/10000 train_time:1110633ms step_avg:231.33ms +[2025-07-17 12:04:40] [Rank 0] step:4821/10000 train_time:1115419ms step_avg:231.37ms +[2025-07-17 12:04:40] [Rank 0] step:4821/10000 train_time:1115419ms step_avg:231.37ms +[2025-07-17 12:04:44] [Rank 0] step:4841/10000 train_time:1120206ms step_avg:231.40ms +[2025-07-17 12:04:44] [Rank 0] step:4841/10000 train_time:1120206ms step_avg:231.40ms +[2025-07-17 12:04:49] [Rank 0] step:4861/10000 train_time:1124990ms step_avg:231.43ms +[2025-07-17 12:04:49] [Rank 0] step:4861/10000 train_time:1124990ms step_avg:231.43ms +[2025-07-17 12:04:57] [Rank 0] PRINT: step:4875/10000 val_loss:4.3052 train_time:1128824ms step_avg:231.55ms +[2025-07-17 12:04:57] [Rank 0] PRINT: step:4875/10000 val_loss:4.3052 train_time:1128824ms step_avg:231.55ms +[2025-07-17 12:04:59] [Rank 0] step:4881/10000 train_time:1129782ms step_avg:231.47ms +[2025-07-17 12:04:59] [Rank 0] step:4881/10000 train_time:1129782ms step_avg:231.47ms +[2025-07-17 12:05:03] [Rank 0] step:4901/10000 train_time:1134579ms step_avg:231.50ms +[2025-07-17 12:05:03] [Rank 0] step:4901/10000 train_time:1134579ms step_avg:231.50ms +[2025-07-17 12:05:08] [Rank 0] step:4921/10000 train_time:1139372ms step_avg:231.53ms +[2025-07-17 12:05:08] [Rank 0] step:4921/10000 train_time:1139372ms step_avg:231.53ms +[2025-07-17 12:05:13] [Rank 0] step:4941/10000 train_time:1144169ms step_avg:231.57ms +[2025-07-17 12:05:13] [Rank 0] step:4941/10000 train_time:1144169ms step_avg:231.57ms +[2025-07-17 12:05:18] [Rank 0] step:4961/10000 train_time:1148964ms step_avg:231.60ms +[2025-07-17 12:05:18] [Rank 0] step:4961/10000 train_time:1148964ms step_avg:231.60ms +[2025-07-17 12:05:23] [Rank 0] step:4981/10000 train_time:1153755ms step_avg:231.63ms +[2025-07-17 12:05:23] [Rank 0] step:4981/10000 train_time:1153755ms step_avg:231.63ms +[2025-07-17 12:05:32] [Rank 0] PRINT: step:5000/10000 val_loss:4.2503 train_time:1158792ms step_avg:231.76ms +[2025-07-17 12:05:32] [Rank 0] PRINT: step:5000/10000 val_loss:4.2503 train_time:1158792ms step_avg:231.76ms +[2025-07-17 12:05:32] [Rank 0] step:5001/10000 train_time:1158805ms step_avg:231.71ms +[2025-07-17 12:05:32] [Rank 0] step:5001/10000 train_time:1158805ms step_avg:231.71ms +[2025-07-17 12:05:37] [Rank 0] step:5021/10000 train_time:1163342ms step_avg:231.70ms +[2025-07-17 12:05:37] [Rank 0] step:5021/10000 train_time:1163342ms step_avg:231.70ms +[2025-07-17 12:05:42] [Rank 0] step:5041/10000 train_time:1168134ms step_avg:231.73ms +[2025-07-17 12:05:42] [Rank 0] step:5041/10000 train_time:1168134ms step_avg:231.73ms +[2025-07-17 12:05:46] [Rank 0] step:5061/10000 train_time:1172924ms step_avg:231.76ms +[2025-07-17 12:05:46] [Rank 0] step:5061/10000 train_time:1172924ms step_avg:231.76ms +[2025-07-17 12:05:51] [Rank 0] step:5081/10000 train_time:1177712ms step_avg:231.79ms +[2025-07-17 12:05:51] [Rank 0] step:5081/10000 train_time:1177712ms step_avg:231.79ms +[2025-07-17 12:05:56] [Rank 0] step:5101/10000 train_time:1182502ms step_avg:231.82ms +[2025-07-17 12:05:56] [Rank 0] step:5101/10000 train_time:1182502ms step_avg:231.82ms +[2025-07-17 12:06:01] [Rank 0] step:5121/10000 train_time:1187288ms step_avg:231.85ms +[2025-07-17 12:06:01] [Rank 0] step:5121/10000 train_time:1187288ms step_avg:231.85ms +[2025-07-17 12:06:06] [Rank 0] PRINT: step:5125/10000 val_loss:4.2792 train_time:1188733ms step_avg:231.95ms +[2025-07-17 12:06:06] [Rank 0] PRINT: step:5125/10000 val_loss:4.2792 train_time:1188733ms step_avg:231.95ms +[2025-07-17 12:06:10] [Rank 0] step:5141/10000 train_time:1192083ms step_avg:231.88ms +[2025-07-17 12:06:10] [Rank 0] step:5141/10000 train_time:1192083ms step_avg:231.88ms +[2025-07-17 12:06:15] [Rank 0] step:5161/10000 train_time:1196875ms step_avg:231.91ms +[2025-07-17 12:06:15] [Rank 0] step:5161/10000 train_time:1196875ms step_avg:231.91ms +[2025-07-17 12:06:20] [Rank 0] step:5181/10000 train_time:1201672ms step_avg:231.94ms +[2025-07-17 12:06:20] [Rank 0] step:5181/10000 train_time:1201672ms step_avg:231.94ms +[2025-07-17 12:06:25] [Rank 0] step:5201/10000 train_time:1206510ms step_avg:231.98ms +[2025-07-17 12:06:25] [Rank 0] step:5201/10000 train_time:1206510ms step_avg:231.98ms +[2025-07-17 12:06:29] [Rank 0] step:5221/10000 train_time:1211377ms step_avg:232.02ms +[2025-07-17 12:06:29] [Rank 0] step:5221/10000 train_time:1211377ms step_avg:232.02ms +[2025-07-17 12:06:34] [Rank 0] step:5241/10000 train_time:1216239ms step_avg:232.06ms +[2025-07-17 12:06:34] [Rank 0] step:5241/10000 train_time:1216239ms step_avg:232.06ms +[2025-07-17 12:06:41] [Rank 0] PRINT: step:5250/10000 val_loss:4.0729 train_time:1218919ms step_avg:232.18ms +[2025-07-17 12:06:41] [Rank 0] PRINT: step:5250/10000 val_loss:4.0729 train_time:1218919ms step_avg:232.18ms +[2025-07-17 12:06:44] [Rank 0] step:5261/10000 train_time:1221100ms step_avg:232.10ms +[2025-07-17 12:06:44] [Rank 0] step:5261/10000 train_time:1221100ms step_avg:232.10ms +[2025-07-17 12:06:49] [Rank 0] step:5281/10000 train_time:1225962ms step_avg:232.15ms +[2025-07-17 12:06:49] [Rank 0] step:5281/10000 train_time:1225962ms step_avg:232.15ms +[2025-07-17 12:06:54] [Rank 0] step:5301/10000 train_time:1230821ms step_avg:232.19ms +[2025-07-17 12:06:54] [Rank 0] step:5301/10000 train_time:1230821ms step_avg:232.19ms +[2025-07-17 12:06:58] [Rank 0] step:5321/10000 train_time:1235685ms step_avg:232.23ms +[2025-07-17 12:06:58] [Rank 0] step:5321/10000 train_time:1235685ms step_avg:232.23ms +[2025-07-17 12:07:03] [Rank 0] step:5341/10000 train_time:1240550ms step_avg:232.27ms +[2025-07-17 12:07:03] [Rank 0] step:5341/10000 train_time:1240550ms step_avg:232.27ms +[2025-07-17 12:07:08] [Rank 0] step:5361/10000 train_time:1245411ms step_avg:232.31ms +[2025-07-17 12:07:08] [Rank 0] step:5361/10000 train_time:1245411ms step_avg:232.31ms +[2025-07-17 12:07:16] [Rank 0] PRINT: step:5375/10000 val_loss:4.1157 train_time:1249308ms step_avg:232.43ms +[2025-07-17 12:07:16] [Rank 0] PRINT: step:5375/10000 val_loss:4.1157 train_time:1249308ms step_avg:232.43ms +[2025-07-17 12:07:18] [Rank 0] step:5381/10000 train_time:1250280ms step_avg:232.35ms +[2025-07-17 12:07:18] [Rank 0] step:5381/10000 train_time:1250280ms step_avg:232.35ms +[2025-07-17 12:07:23] [Rank 0] step:5401/10000 train_time:1255142ms step_avg:232.39ms +[2025-07-17 12:07:23] [Rank 0] step:5401/10000 train_time:1255142ms step_avg:232.39ms +[2025-07-17 12:07:27] [Rank 0] step:5421/10000 train_time:1260011ms step_avg:232.43ms +[2025-07-17 12:07:27] [Rank 0] step:5421/10000 train_time:1260011ms step_avg:232.43ms +[2025-07-17 12:07:32] [Rank 0] step:5441/10000 train_time:1264877ms step_avg:232.47ms +[2025-07-17 12:07:32] [Rank 0] step:5441/10000 train_time:1264877ms step_avg:232.47ms +[2025-07-17 12:07:37] [Rank 0] step:5461/10000 train_time:1269749ms step_avg:232.51ms +[2025-07-17 12:07:37] [Rank 0] step:5461/10000 train_time:1269749ms step_avg:232.51ms +[2025-07-17 12:07:42] [Rank 0] step:5481/10000 train_time:1274621ms step_avg:232.55ms +[2025-07-17 12:07:42] [Rank 0] step:5481/10000 train_time:1274621ms step_avg:232.55ms +[2025-07-17 12:07:51] [Rank 0] PRINT: step:5500/10000 val_loss:4.1934 train_time:1279726ms step_avg:232.68ms +[2025-07-17 12:07:51] [Rank 0] PRINT: step:5500/10000 val_loss:4.1934 train_time:1279726ms step_avg:232.68ms +[2025-07-17 12:07:52] [Rank 0] step:5501/10000 train_time:1279739ms step_avg:232.64ms +[2025-07-17 12:07:52] [Rank 0] step:5501/10000 train_time:1279739ms step_avg:232.64ms +[2025-07-17 12:07:57] [Rank 0] step:5521/10000 train_time:1284344ms step_avg:232.63ms +[2025-07-17 12:07:57] [Rank 0] step:5521/10000 train_time:1284344ms step_avg:232.63ms +[2025-07-17 12:08:01] [Rank 0] step:5541/10000 train_time:1289214ms step_avg:232.67ms +[2025-07-17 12:08:01] [Rank 0] step:5541/10000 train_time:1289214ms step_avg:232.67ms +[2025-07-17 12:08:06] [Rank 0] step:5561/10000 train_time:1294078ms step_avg:232.71ms +[2025-07-17 12:08:06] [Rank 0] step:5561/10000 train_time:1294078ms step_avg:232.71ms +[2025-07-17 12:08:11] [Rank 0] step:5581/10000 train_time:1298936ms step_avg:232.74ms +[2025-07-17 12:08:11] [Rank 0] step:5581/10000 train_time:1298936ms step_avg:232.74ms +[2025-07-17 12:08:16] [Rank 0] step:5601/10000 train_time:1303803ms step_avg:232.78ms +[2025-07-17 12:08:16] [Rank 0] step:5601/10000 train_time:1303803ms step_avg:232.78ms +[2025-07-17 12:08:21] [Rank 0] step:5621/10000 train_time:1308671ms step_avg:232.82ms +[2025-07-17 12:08:21] [Rank 0] step:5621/10000 train_time:1308671ms step_avg:232.82ms +[2025-07-17 12:08:27] [Rank 0] PRINT: step:5625/10000 val_loss:4.2202 train_time:1310133ms step_avg:232.91ms +[2025-07-17 12:08:27] [Rank 0] PRINT: step:5625/10000 val_loss:4.2202 train_time:1310133ms step_avg:232.91ms +[2025-07-17 12:08:30] [Rank 0] step:5641/10000 train_time:1313530ms step_avg:232.85ms +[2025-07-17 12:08:30] [Rank 0] step:5641/10000 train_time:1313530ms step_avg:232.85ms +[2025-07-17 12:08:35] [Rank 0] step:5661/10000 train_time:1318397ms step_avg:232.89ms +[2025-07-17 12:08:35] [Rank 0] step:5661/10000 train_time:1318397ms step_avg:232.89ms +[2025-07-17 12:08:40] [Rank 0] step:5681/10000 train_time:1323269ms step_avg:232.93ms +[2025-07-17 12:08:40] [Rank 0] step:5681/10000 train_time:1323269ms step_avg:232.93ms +[2025-07-17 12:08:45] [Rank 0] step:5701/10000 train_time:1328134ms step_avg:232.97ms +[2025-07-17 12:08:45] [Rank 0] step:5701/10000 train_time:1328134ms step_avg:232.97ms +[2025-07-17 12:08:50] [Rank 0] step:5721/10000 train_time:1332994ms step_avg:233.00ms +[2025-07-17 12:08:50] [Rank 0] step:5721/10000 train_time:1332994ms step_avg:233.00ms +[2025-07-17 12:08:55] [Rank 0] step:5741/10000 train_time:1337861ms step_avg:233.04ms +[2025-07-17 12:08:55] [Rank 0] step:5741/10000 train_time:1337861ms step_avg:233.04ms +[2025-07-17 12:09:02] [Rank 0] PRINT: step:5750/10000 val_loss:4.1960 train_time:1340537ms step_avg:233.14ms +[2025-07-17 12:09:02] [Rank 0] PRINT: step:5750/10000 val_loss:4.1960 train_time:1340537ms step_avg:233.14ms +[2025-07-17 12:09:04] [Rank 0] step:5761/10000 train_time:1342722ms step_avg:233.07ms +[2025-07-17 12:09:04] [Rank 0] step:5761/10000 train_time:1342722ms step_avg:233.07ms +[2025-07-17 12:09:09] [Rank 0] step:5781/10000 train_time:1347578ms step_avg:233.10ms +[2025-07-17 12:09:09] [Rank 0] step:5781/10000 train_time:1347578ms step_avg:233.10ms +[2025-07-17 12:09:14] [Rank 0] step:5801/10000 train_time:1352426ms step_avg:233.14ms +[2025-07-17 12:09:14] [Rank 0] step:5801/10000 train_time:1352426ms step_avg:233.14ms +[2025-07-17 12:09:19] [Rank 0] step:5821/10000 train_time:1357278ms step_avg:233.17ms +[2025-07-17 12:09:19] [Rank 0] step:5821/10000 train_time:1357278ms step_avg:233.17ms +[2025-07-17 12:09:24] [Rank 0] step:5841/10000 train_time:1362132ms step_avg:233.20ms +[2025-07-17 12:09:24] [Rank 0] step:5841/10000 train_time:1362132ms step_avg:233.20ms +[2025-07-17 12:09:29] [Rank 0] step:5861/10000 train_time:1366981ms step_avg:233.23ms +[2025-07-17 12:09:29] [Rank 0] step:5861/10000 train_time:1366981ms step_avg:233.23ms +[2025-07-17 12:09:37] [Rank 0] PRINT: step:5875/10000 val_loss:4.2433 train_time:1370860ms step_avg:233.34ms +[2025-07-17 12:09:37] [Rank 0] PRINT: step:5875/10000 val_loss:4.2433 train_time:1370860ms step_avg:233.34ms +[2025-07-17 12:09:38] [Rank 0] step:5881/10000 train_time:1371831ms step_avg:233.26ms +[2025-07-17 12:09:38] [Rank 0] step:5881/10000 train_time:1371831ms step_avg:233.26ms +[2025-07-17 12:09:43] [Rank 0] step:5901/10000 train_time:1376693ms step_avg:233.30ms +[2025-07-17 12:09:43] [Rank 0] step:5901/10000 train_time:1376693ms step_avg:233.30ms +[2025-07-17 12:09:48] [Rank 0] step:5921/10000 train_time:1381548ms step_avg:233.33ms +[2025-07-17 12:09:48] [Rank 0] step:5921/10000 train_time:1381548ms step_avg:233.33ms +[2025-07-17 12:09:53] [Rank 0] step:5941/10000 train_time:1386417ms step_avg:233.36ms +[2025-07-17 12:09:53] [Rank 0] step:5941/10000 train_time:1386417ms step_avg:233.36ms +[2025-07-17 12:09:58] [Rank 0] step:5961/10000 train_time:1391289ms step_avg:233.40ms +[2025-07-17 12:09:58] [Rank 0] step:5961/10000 train_time:1391289ms step_avg:233.40ms +[2025-07-17 12:10:02] [Rank 0] step:5981/10000 train_time:1396155ms step_avg:233.43ms +[2025-07-17 12:10:02] [Rank 0] step:5981/10000 train_time:1396155ms step_avg:233.43ms +[2025-07-17 12:10:12] [Rank 0] PRINT: step:6000/10000 val_loss:4.1443 train_time:1401274ms step_avg:233.55ms +[2025-07-17 12:10:12] [Rank 0] PRINT: step:6000/10000 val_loss:4.1443 train_time:1401274ms step_avg:233.55ms +[2025-07-17 12:10:12] [Rank 0] step:6001/10000 train_time:1401288ms step_avg:233.51ms +[2025-07-17 12:10:12] [Rank 0] step:6001/10000 train_time:1401288ms step_avg:233.51ms +[2025-07-17 12:10:17] [Rank 0] step:6021/10000 train_time:1405899ms step_avg:233.50ms +[2025-07-17 12:10:17] [Rank 0] step:6021/10000 train_time:1405899ms step_avg:233.50ms +[2025-07-17 12:10:22] [Rank 0] step:6041/10000 train_time:1410769ms step_avg:233.53ms +[2025-07-17 12:10:22] [Rank 0] step:6041/10000 train_time:1410769ms step_avg:233.53ms +[2025-07-17 12:10:27] [Rank 0] step:6061/10000 train_time:1415630ms step_avg:233.56ms +[2025-07-17 12:10:27] [Rank 0] step:6061/10000 train_time:1415630ms step_avg:233.56ms +[2025-07-17 12:10:32] [Rank 0] step:6081/10000 train_time:1420494ms step_avg:233.60ms +[2025-07-17 12:10:32] [Rank 0] step:6081/10000 train_time:1420494ms step_avg:233.60ms +[2025-07-17 12:10:36] [Rank 0] step:6101/10000 train_time:1425359ms step_avg:233.63ms +[2025-07-17 12:10:36] [Rank 0] step:6101/10000 train_time:1425359ms step_avg:233.63ms +[2025-07-17 12:10:41] [Rank 0] step:6121/10000 train_time:1430230ms step_avg:233.66ms +[2025-07-17 12:10:41] [Rank 0] step:6121/10000 train_time:1430230ms step_avg:233.66ms +[2025-07-17 12:10:47] [Rank 0] PRINT: step:6125/10000 val_loss:4.2410 train_time:1431694ms step_avg:233.75ms +[2025-07-17 12:10:47] [Rank 0] PRINT: step:6125/10000 val_loss:4.2410 train_time:1431694ms step_avg:233.75ms +[2025-07-17 12:10:51] [Rank 0] step:6141/10000 train_time:1435099ms step_avg:233.69ms +[2025-07-17 12:10:51] [Rank 0] step:6141/10000 train_time:1435099ms step_avg:233.69ms +[2025-07-17 12:10:56] [Rank 0] step:6161/10000 train_time:1439964ms step_avg:233.72ms +[2025-07-17 12:10:56] [Rank 0] step:6161/10000 train_time:1439964ms step_avg:233.72ms +[2025-07-17 12:11:01] [Rank 0] step:6181/10000 train_time:1444840ms step_avg:233.76ms +[2025-07-17 12:11:01] [Rank 0] step:6181/10000 train_time:1444840ms step_avg:233.76ms +[2025-07-17 12:11:05] [Rank 0] step:6201/10000 train_time:1449722ms step_avg:233.79ms +[2025-07-17 12:11:05] [Rank 0] step:6201/10000 train_time:1449722ms step_avg:233.79ms +[2025-07-17 12:11:10] [Rank 0] step:6221/10000 train_time:1454599ms step_avg:233.82ms +[2025-07-17 12:11:10] [Rank 0] step:6221/10000 train_time:1454599ms step_avg:233.82ms +[2025-07-17 12:11:15] [Rank 0] step:6241/10000 train_time:1459480ms step_avg:233.85ms +[2025-07-17 12:11:15] [Rank 0] step:6241/10000 train_time:1459480ms step_avg:233.85ms +[2025-07-17 12:11:22] [Rank 0] PRINT: step:6250/10000 val_loss:4.2806 train_time:1462168ms step_avg:233.95ms +[2025-07-17 12:11:22] [Rank 0] PRINT: step:6250/10000 val_loss:4.2806 train_time:1462168ms step_avg:233.95ms +[2025-07-17 12:11:25] [Rank 0] step:6261/10000 train_time:1464356ms step_avg:233.89ms +[2025-07-17 12:11:25] [Rank 0] step:6261/10000 train_time:1464356ms step_avg:233.89ms +[2025-07-17 12:11:30] [Rank 0] step:6281/10000 train_time:1469238ms step_avg:233.92ms +[2025-07-17 12:11:30] [Rank 0] step:6281/10000 train_time:1469238ms step_avg:233.92ms +[2025-07-17 12:11:35] [Rank 0] step:6301/10000 train_time:1474116ms step_avg:233.95ms +[2025-07-17 12:11:35] [Rank 0] step:6301/10000 train_time:1474116ms step_avg:233.95ms +[2025-07-17 12:11:39] [Rank 0] step:6321/10000 train_time:1478999ms step_avg:233.98ms +[2025-07-17 12:11:39] [Rank 0] step:6321/10000 train_time:1478999ms step_avg:233.98ms +[2025-07-17 12:11:44] [Rank 0] step:6341/10000 train_time:1483882ms step_avg:234.01ms +[2025-07-17 12:11:44] [Rank 0] step:6341/10000 train_time:1483882ms step_avg:234.01ms +[2025-07-17 12:11:49] [Rank 0] step:6361/10000 train_time:1488759ms step_avg:234.04ms +[2025-07-17 12:11:49] [Rank 0] step:6361/10000 train_time:1488759ms step_avg:234.04ms +[2025-07-17 12:11:57] [Rank 0] PRINT: step:6375/10000 val_loss:4.2375 train_time:1492659ms step_avg:234.14ms +[2025-07-17 12:11:57] [Rank 0] PRINT: step:6375/10000 val_loss:4.2375 train_time:1492659ms step_avg:234.14ms +[2025-07-17 12:11:59] [Rank 0] step:6381/10000 train_time:1493634ms step_avg:234.08ms +[2025-07-17 12:11:59] [Rank 0] step:6381/10000 train_time:1493634ms step_avg:234.08ms +[2025-07-17 12:12:04] [Rank 0] step:6401/10000 train_time:1498507ms step_avg:234.11ms +[2025-07-17 12:12:04] [Rank 0] step:6401/10000 train_time:1498507ms step_avg:234.11ms +[2025-07-17 12:12:09] [Rank 0] step:6421/10000 train_time:1503384ms step_avg:234.14ms +[2025-07-17 12:12:09] [Rank 0] step:6421/10000 train_time:1503384ms step_avg:234.14ms +[2025-07-17 12:12:13] [Rank 0] step:6441/10000 train_time:1508264ms step_avg:234.17ms +[2025-07-17 12:12:13] [Rank 0] step:6441/10000 train_time:1508264ms step_avg:234.17ms +[2025-07-17 12:12:18] [Rank 0] step:6461/10000 train_time:1513157ms step_avg:234.20ms +[2025-07-17 12:12:18] [Rank 0] step:6461/10000 train_time:1513157ms step_avg:234.20ms +[2025-07-17 12:12:23] [Rank 0] step:6481/10000 train_time:1518038ms step_avg:234.23ms +[2025-07-17 12:12:23] [Rank 0] step:6481/10000 train_time:1518038ms step_avg:234.23ms +[2025-07-17 12:12:32] [Rank 0] PRINT: step:6500/10000 val_loss:4.1764 train_time:1523162ms step_avg:234.33ms +[2025-07-17 12:12:32] [Rank 0] PRINT: step:6500/10000 val_loss:4.1764 train_time:1523162ms step_avg:234.33ms +[2025-07-17 12:12:32] [Rank 0] step:6501/10000 train_time:1523175ms step_avg:234.30ms +[2025-07-17 12:12:32] [Rank 0] step:6501/10000 train_time:1523175ms step_avg:234.30ms +[2025-07-17 12:12:37] [Rank 0] step:6521/10000 train_time:1527796ms step_avg:234.29ms +[2025-07-17 12:12:37] [Rank 0] step:6521/10000 train_time:1527796ms step_avg:234.29ms +[2025-07-17 12:12:42] [Rank 0] step:6541/10000 train_time:1532676ms step_avg:234.32ms +[2025-07-17 12:12:42] [Rank 0] step:6541/10000 train_time:1532676ms step_avg:234.32ms +[2025-07-17 12:12:47] [Rank 0] step:6561/10000 train_time:1537567ms step_avg:234.35ms +[2025-07-17 12:12:47] [Rank 0] step:6561/10000 train_time:1537567ms step_avg:234.35ms +[2025-07-17 12:12:52] [Rank 0] step:6581/10000 train_time:1542455ms step_avg:234.38ms +[2025-07-17 12:12:52] [Rank 0] step:6581/10000 train_time:1542455ms step_avg:234.38ms +[2025-07-17 12:12:57] [Rank 0] step:6601/10000 train_time:1547346ms step_avg:234.41ms +[2025-07-17 12:12:57] [Rank 0] step:6601/10000 train_time:1547346ms step_avg:234.41ms +[2025-07-17 12:13:02] [Rank 0] step:6621/10000 train_time:1552226ms step_avg:234.44ms +[2025-07-17 12:13:02] [Rank 0] step:6621/10000 train_time:1552226ms step_avg:234.44ms +[2025-07-17 12:13:07] [Rank 0] PRINT: step:6625/10000 val_loss:4.2462 train_time:1553695ms step_avg:234.52ms +[2025-07-17 12:13:07] [Rank 0] PRINT: step:6625/10000 val_loss:4.2462 train_time:1553695ms step_avg:234.52ms +[2025-07-17 12:13:11] [Rank 0] step:6641/10000 train_time:1557104ms step_avg:234.47ms +[2025-07-17 12:13:11] [Rank 0] step:6641/10000 train_time:1557104ms step_avg:234.47ms +[2025-07-17 12:13:16] [Rank 0] step:6661/10000 train_time:1561987ms step_avg:234.50ms +[2025-07-17 12:13:16] [Rank 0] step:6661/10000 train_time:1561987ms step_avg:234.50ms +[2025-07-17 12:13:21] [Rank 0] step:6681/10000 train_time:1566915ms step_avg:234.53ms +[2025-07-17 12:13:21] [Rank 0] step:6681/10000 train_time:1566915ms step_avg:234.53ms +[2025-07-17 12:13:26] [Rank 0] step:6701/10000 train_time:1571856ms step_avg:234.57ms +[2025-07-17 12:13:26] [Rank 0] step:6701/10000 train_time:1571856ms step_avg:234.57ms +[2025-07-17 12:13:31] [Rank 0] step:6721/10000 train_time:1576817ms step_avg:234.61ms +[2025-07-17 12:13:31] [Rank 0] step:6721/10000 train_time:1576817ms step_avg:234.61ms +[2025-07-17 12:13:36] [Rank 0] step:6741/10000 train_time:1581776ms step_avg:234.65ms +[2025-07-17 12:13:36] [Rank 0] step:6741/10000 train_time:1581776ms step_avg:234.65ms +[2025-07-17 12:13:43] [Rank 0] PRINT: step:6750/10000 val_loss:4.1360 train_time:1584494ms step_avg:234.74ms +[2025-07-17 12:13:43] [Rank 0] PRINT: step:6750/10000 val_loss:4.1360 train_time:1584494ms step_avg:234.74ms +[2025-07-17 12:13:46] [Rank 0] step:6761/10000 train_time:1586714ms step_avg:234.69ms +[2025-07-17 12:13:46] [Rank 0] step:6761/10000 train_time:1586714ms step_avg:234.69ms +[2025-07-17 12:13:51] [Rank 0] step:6781/10000 train_time:1591658ms step_avg:234.72ms +[2025-07-17 12:13:51] [Rank 0] step:6781/10000 train_time:1591658ms step_avg:234.72ms +[2025-07-17 12:13:55] [Rank 0] step:6801/10000 train_time:1596610ms step_avg:234.76ms +[2025-07-17 12:13:55] [Rank 0] step:6801/10000 train_time:1596610ms step_avg:234.76ms +[2025-07-17 12:14:00] [Rank 0] step:6821/10000 train_time:1601555ms step_avg:234.80ms +[2025-07-17 12:14:00] [Rank 0] step:6821/10000 train_time:1601555ms step_avg:234.80ms +[2025-07-17 12:14:05] [Rank 0] step:6841/10000 train_time:1606502ms step_avg:234.83ms +[2025-07-17 12:14:05] [Rank 0] step:6841/10000 train_time:1606502ms step_avg:234.83ms +[2025-07-17 12:14:10] [Rank 0] step:6861/10000 train_time:1611438ms step_avg:234.87ms +[2025-07-17 12:14:10] [Rank 0] step:6861/10000 train_time:1611438ms step_avg:234.87ms +[2025-07-17 12:14:19] [Rank 0] PRINT: step:6875/10000 val_loss:4.1771 train_time:1615389ms step_avg:234.97ms +[2025-07-17 12:14:19] [Rank 0] PRINT: step:6875/10000 val_loss:4.1771 train_time:1615389ms step_avg:234.97ms +[2025-07-17 12:14:20] [Rank 0] step:6881/10000 train_time:1616374ms step_avg:234.90ms +[2025-07-17 12:14:20] [Rank 0] step:6881/10000 train_time:1616374ms step_avg:234.90ms +[2025-07-17 12:14:25] [Rank 0] step:6901/10000 train_time:1621309ms step_avg:234.94ms +[2025-07-17 12:14:25] [Rank 0] step:6901/10000 train_time:1621309ms step_avg:234.94ms +[2025-07-17 12:14:30] [Rank 0] step:6921/10000 train_time:1626248ms step_avg:234.97ms +[2025-07-17 12:14:30] [Rank 0] step:6921/10000 train_time:1626248ms step_avg:234.97ms +[2025-07-17 12:14:35] [Rank 0] step:6941/10000 train_time:1631200ms step_avg:235.01ms +[2025-07-17 12:14:35] [Rank 0] step:6941/10000 train_time:1631200ms step_avg:235.01ms +[2025-07-17 12:14:40] [Rank 0] step:6961/10000 train_time:1636148ms step_avg:235.04ms +[2025-07-17 12:14:40] [Rank 0] step:6961/10000 train_time:1636148ms step_avg:235.04ms +[2025-07-17 12:14:45] [Rank 0] step:6981/10000 train_time:1641097ms step_avg:235.08ms +[2025-07-17 12:14:45] [Rank 0] step:6981/10000 train_time:1641097ms step_avg:235.08ms +[2025-07-17 12:14:54] [Rank 0] PRINT: step:7000/10000 val_loss:4.2167 train_time:1646290ms step_avg:235.18ms +[2025-07-17 12:14:54] [Rank 0] PRINT: step:7000/10000 val_loss:4.2167 train_time:1646290ms step_avg:235.18ms +[2025-07-17 12:14:54] [Rank 0] step:7001/10000 train_time:1646303ms step_avg:235.15ms +[2025-07-17 12:14:54] [Rank 0] step:7001/10000 train_time:1646303ms step_avg:235.15ms +[2025-07-17 12:14:59] [Rank 0] step:7021/10000 train_time:1650985ms step_avg:235.15ms +[2025-07-17 12:14:59] [Rank 0] step:7021/10000 train_time:1650985ms step_avg:235.15ms +[2025-07-17 12:15:04] [Rank 0] step:7041/10000 train_time:1655924ms step_avg:235.18ms +[2025-07-17 12:15:04] [Rank 0] step:7041/10000 train_time:1655924ms step_avg:235.18ms +[2025-07-17 12:15:09] [Rank 0] step:7061/10000 train_time:1660862ms step_avg:235.22ms +[2025-07-17 12:15:09] [Rank 0] step:7061/10000 train_time:1660862ms step_avg:235.22ms +[2025-07-17 12:15:14] [Rank 0] step:7081/10000 train_time:1665807ms step_avg:235.25ms +[2025-07-17 12:15:14] [Rank 0] step:7081/10000 train_time:1665807ms step_avg:235.25ms +[2025-07-17 12:15:19] [Rank 0] step:7101/10000 train_time:1670738ms step_avg:235.28ms +[2025-07-17 12:15:19] [Rank 0] step:7101/10000 train_time:1670738ms step_avg:235.28ms +[2025-07-17 12:15:24] [Rank 0] step:7121/10000 train_time:1675681ms step_avg:235.32ms +[2025-07-17 12:15:24] [Rank 0] step:7121/10000 train_time:1675681ms step_avg:235.32ms +[2025-07-17 12:15:30] [Rank 0] PRINT: step:7125/10000 val_loss:4.1796 train_time:1677162ms step_avg:235.39ms +[2025-07-17 12:15:30] [Rank 0] PRINT: step:7125/10000 val_loss:4.1796 train_time:1677162ms step_avg:235.39ms +[2025-07-17 12:15:34] [Rank 0] step:7141/10000 train_time:1680624ms step_avg:235.35ms +[2025-07-17 12:15:34] [Rank 0] step:7141/10000 train_time:1680624ms step_avg:235.35ms +[2025-07-17 12:15:39] [Rank 0] step:7161/10000 train_time:1685568ms step_avg:235.38ms +[2025-07-17 12:15:39] [Rank 0] step:7161/10000 train_time:1685568ms step_avg:235.38ms +[2025-07-17 12:15:44] [Rank 0] step:7181/10000 train_time:1690505ms step_avg:235.41ms +[2025-07-17 12:15:44] [Rank 0] step:7181/10000 train_time:1690505ms step_avg:235.41ms +[2025-07-17 12:15:49] [Rank 0] step:7201/10000 train_time:1695459ms step_avg:235.45ms +[2025-07-17 12:15:49] [Rank 0] step:7201/10000 train_time:1695459ms step_avg:235.45ms +[2025-07-17 12:15:54] [Rank 0] step:7221/10000 train_time:1700403ms step_avg:235.48ms +[2025-07-17 12:15:54] [Rank 0] step:7221/10000 train_time:1700403ms step_avg:235.48ms +[2025-07-17 12:15:59] [Rank 0] step:7241/10000 train_time:1705343ms step_avg:235.51ms +[2025-07-17 12:15:59] [Rank 0] step:7241/10000 train_time:1705343ms step_avg:235.51ms +[2025-07-17 12:16:06] [Rank 0] PRINT: step:7250/10000 val_loss:4.1762 train_time:1708066ms step_avg:235.60ms +[2025-07-17 12:16:06] [Rank 0] PRINT: step:7250/10000 val_loss:4.1762 train_time:1708066ms step_avg:235.60ms +[2025-07-17 12:16:08] [Rank 0] step:7261/10000 train_time:1710280ms step_avg:235.54ms +[2025-07-17 12:16:08] [Rank 0] step:7261/10000 train_time:1710280ms step_avg:235.54ms +[2025-07-17 12:16:13] [Rank 0] step:7281/10000 train_time:1715221ms step_avg:235.57ms +[2025-07-17 12:16:13] [Rank 0] step:7281/10000 train_time:1715221ms step_avg:235.57ms +[2025-07-17 12:16:18] [Rank 0] step:7301/10000 train_time:1720160ms step_avg:235.61ms +[2025-07-17 12:16:18] [Rank 0] step:7301/10000 train_time:1720160ms step_avg:235.61ms +[2025-07-17 12:16:23] [Rank 0] step:7321/10000 train_time:1725112ms step_avg:235.64ms +[2025-07-17 12:16:23] [Rank 0] step:7321/10000 train_time:1725112ms step_avg:235.64ms +[2025-07-17 12:16:28] [Rank 0] step:7341/10000 train_time:1730048ms step_avg:235.67ms +[2025-07-17 12:16:28] [Rank 0] step:7341/10000 train_time:1730048ms step_avg:235.67ms +[2025-07-17 12:16:33] [Rank 0] step:7361/10000 train_time:1734996ms step_avg:235.70ms +[2025-07-17 12:16:33] [Rank 0] step:7361/10000 train_time:1734996ms step_avg:235.70ms +[2025-07-17 12:16:41] [Rank 0] PRINT: step:7375/10000 val_loss:4.2418 train_time:1738950ms step_avg:235.79ms +[2025-07-17 12:16:41] [Rank 0] PRINT: step:7375/10000 val_loss:4.2418 train_time:1738950ms step_avg:235.79ms +[2025-07-17 12:16:43] [Rank 0] step:7381/10000 train_time:1739934ms step_avg:235.73ms +[2025-07-17 12:16:43] [Rank 0] step:7381/10000 train_time:1739934ms step_avg:235.73ms +[2025-07-17 12:16:48] [Rank 0] step:7401/10000 train_time:1744880ms step_avg:235.76ms +[2025-07-17 12:16:48] [Rank 0] step:7401/10000 train_time:1744880ms step_avg:235.76ms +[2025-07-17 12:16:53] [Rank 0] step:7421/10000 train_time:1749818ms step_avg:235.79ms +[2025-07-17 12:16:53] [Rank 0] step:7421/10000 train_time:1749818ms step_avg:235.79ms +[2025-07-17 12:16:57] [Rank 0] step:7441/10000 train_time:1754773ms step_avg:235.82ms +[2025-07-17 12:16:57] [Rank 0] step:7441/10000 train_time:1754773ms step_avg:235.82ms +[2025-07-17 12:17:02] [Rank 0] step:7461/10000 train_time:1759714ms step_avg:235.85ms +[2025-07-17 12:17:02] [Rank 0] step:7461/10000 train_time:1759714ms step_avg:235.85ms +[2025-07-17 12:17:07] [Rank 0] step:7481/10000 train_time:1764667ms step_avg:235.89ms +[2025-07-17 12:17:07] [Rank 0] step:7481/10000 train_time:1764667ms step_avg:235.89ms +[2025-07-17 12:17:17] [Rank 0] PRINT: step:7500/10000 val_loss:4.2392 train_time:1769878ms step_avg:235.98ms +[2025-07-17 12:17:17] [Rank 0] PRINT: step:7500/10000 val_loss:4.2392 train_time:1769878ms step_avg:235.98ms +[2025-07-17 12:17:17] [Rank 0] step:7501/10000 train_time:1769892ms step_avg:235.95ms +[2025-07-17 12:17:17] [Rank 0] step:7501/10000 train_time:1769892ms step_avg:235.95ms +[2025-07-17 12:17:22] [Rank 0] step:7521/10000 train_time:1774585ms step_avg:235.95ms +[2025-07-17 12:17:22] [Rank 0] step:7521/10000 train_time:1774585ms step_avg:235.95ms +[2025-07-17 12:17:27] [Rank 0] step:7541/10000 train_time:1779534ms step_avg:235.98ms +[2025-07-17 12:17:27] [Rank 0] step:7541/10000 train_time:1779534ms step_avg:235.98ms +[2025-07-17 12:17:32] [Rank 0] step:7561/10000 train_time:1784476ms step_avg:236.01ms +[2025-07-17 12:17:32] [Rank 0] step:7561/10000 train_time:1784476ms step_avg:236.01ms +[2025-07-17 12:17:37] [Rank 0] step:7581/10000 train_time:1789430ms step_avg:236.04ms +[2025-07-17 12:17:37] [Rank 0] step:7581/10000 train_time:1789430ms step_avg:236.04ms +[2025-07-17 12:17:42] [Rank 0] step:7601/10000 train_time:1794389ms step_avg:236.07ms +[2025-07-17 12:17:42] [Rank 0] step:7601/10000 train_time:1794389ms step_avg:236.07ms +[2025-07-17 12:17:47] [Rank 0] step:7621/10000 train_time:1799362ms step_avg:236.11ms +[2025-07-17 12:17:47] [Rank 0] step:7621/10000 train_time:1799362ms step_avg:236.11ms +[2025-07-17 12:17:53] [Rank 0] PRINT: step:7625/10000 val_loss:4.2691 train_time:1800850ms step_avg:236.18ms +[2025-07-17 12:17:53] [Rank 0] PRINT: step:7625/10000 val_loss:4.2691 train_time:1800850ms step_avg:236.18ms +[2025-07-17 12:17:57] [Rank 0] step:7641/10000 train_time:1804308ms step_avg:236.14ms +[2025-07-17 12:17:57] [Rank 0] step:7641/10000 train_time:1804308ms step_avg:236.14ms +[2025-07-17 12:18:02] [Rank 0] step:7661/10000 train_time:1809266ms step_avg:236.17ms +[2025-07-17 12:18:02] [Rank 0] step:7661/10000 train_time:1809266ms step_avg:236.17ms +[2025-07-17 12:18:06] [Rank 0] step:7681/10000 train_time:1814238ms step_avg:236.20ms +[2025-07-17 12:18:06] [Rank 0] step:7681/10000 train_time:1814238ms step_avg:236.20ms +[2025-07-17 12:18:11] [Rank 0] step:7701/10000 train_time:1819197ms step_avg:236.23ms +[2025-07-17 12:18:11] [Rank 0] step:7701/10000 train_time:1819197ms step_avg:236.23ms +[2025-07-17 12:18:16] [Rank 0] step:7721/10000 train_time:1824158ms step_avg:236.26ms +[2025-07-17 12:18:16] [Rank 0] step:7721/10000 train_time:1824158ms step_avg:236.26ms +[2025-07-17 12:18:21] [Rank 0] step:7741/10000 train_time:1829110ms step_avg:236.29ms +[2025-07-17 12:18:21] [Rank 0] step:7741/10000 train_time:1829110ms step_avg:236.29ms +[2025-07-17 12:18:28] [Rank 0] PRINT: step:7750/10000 val_loss:4.3037 train_time:1831854ms step_avg:236.37ms +[2025-07-17 12:18:28] [Rank 0] PRINT: step:7750/10000 val_loss:4.3037 train_time:1831854ms step_avg:236.37ms +[2025-07-17 12:18:30] [Rank 0] step:7761/10000 train_time:1834085ms step_avg:236.32ms +[2025-07-17 12:18:30] [Rank 0] step:7761/10000 train_time:1834085ms step_avg:236.32ms +[2025-07-17 12:18:35] [Rank 0] step:7781/10000 train_time:1839053ms step_avg:236.35ms +[2025-07-17 12:18:35] [Rank 0] step:7781/10000 train_time:1839053ms step_avg:236.35ms +[2025-07-17 12:18:40] [Rank 0] step:7801/10000 train_time:1844022ms step_avg:236.38ms +[2025-07-17 12:18:40] [Rank 0] step:7801/10000 train_time:1844022ms step_avg:236.38ms +[2025-07-17 12:18:45] [Rank 0] step:7821/10000 train_time:1848986ms step_avg:236.41ms +[2025-07-17 12:18:45] [Rank 0] step:7821/10000 train_time:1848986ms step_avg:236.41ms +[2025-07-17 12:18:50] [Rank 0] step:7841/10000 train_time:1853950ms step_avg:236.44ms +[2025-07-17 12:18:50] [Rank 0] step:7841/10000 train_time:1853950ms step_avg:236.44ms +[2025-07-17 12:18:55] [Rank 0] step:7861/10000 train_time:1858901ms step_avg:236.47ms +[2025-07-17 12:18:55] [Rank 0] step:7861/10000 train_time:1858901ms step_avg:236.47ms +[2025-07-17 12:19:03] [Rank 0] PRINT: step:7875/10000 val_loss:4.2796 train_time:1862872ms step_avg:236.56ms +[2025-07-17 12:19:03] [Rank 0] PRINT: step:7875/10000 val_loss:4.2796 train_time:1862872ms step_avg:236.56ms +[2025-07-17 12:19:05] [Rank 0] step:7881/10000 train_time:1863855ms step_avg:236.50ms +[2025-07-17 12:19:05] [Rank 0] step:7881/10000 train_time:1863855ms step_avg:236.50ms +[2025-07-17 12:19:09] [Rank 0] step:7901/10000 train_time:1868805ms step_avg:236.53ms +[2025-07-17 12:19:09] [Rank 0] step:7901/10000 train_time:1868805ms step_avg:236.53ms +[2025-07-17 12:19:14] [Rank 0] step:7921/10000 train_time:1873760ms step_avg:236.56ms +[2025-07-17 12:19:14] [Rank 0] step:7921/10000 train_time:1873760ms step_avg:236.56ms +[2025-07-17 12:19:19] [Rank 0] step:7941/10000 train_time:1878719ms step_avg:236.58ms +[2025-07-17 12:19:19] [Rank 0] step:7941/10000 train_time:1878719ms step_avg:236.58ms +[2025-07-17 12:19:24] [Rank 0] step:7961/10000 train_time:1883693ms step_avg:236.62ms +[2025-07-17 12:19:24] [Rank 0] step:7961/10000 train_time:1883693ms step_avg:236.62ms +[2025-07-17 12:19:29] [Rank 0] step:7981/10000 train_time:1888644ms step_avg:236.64ms +[2025-07-17 12:19:29] [Rank 0] step:7981/10000 train_time:1888644ms step_avg:236.64ms +[2025-07-17 12:19:39] [Rank 0] PRINT: step:8000/10000 val_loss:4.3417 train_time:1893864ms step_avg:236.73ms +[2025-07-17 12:19:39] [Rank 0] PRINT: step:8000/10000 val_loss:4.3417 train_time:1893864ms step_avg:236.73ms +[2025-07-17 12:19:39] [Rank 0] step:8001/10000 train_time:1893879ms step_avg:236.71ms +[2025-07-17 12:19:39] [Rank 0] step:8001/10000 train_time:1893879ms step_avg:236.71ms +[2025-07-17 12:19:44] [Rank 0] step:8021/10000 train_time:1898565ms step_avg:236.70ms +[2025-07-17 12:19:44] [Rank 0] step:8021/10000 train_time:1898565ms step_avg:236.70ms +[2025-07-17 12:19:49] [Rank 0] step:8041/10000 train_time:1903541ms step_avg:236.73ms +[2025-07-17 12:19:49] [Rank 0] step:8041/10000 train_time:1903541ms step_avg:236.73ms +[2025-07-17 12:19:54] [Rank 0] step:8061/10000 train_time:1908490ms step_avg:236.76ms +[2025-07-17 12:19:54] [Rank 0] step:8061/10000 train_time:1908490ms step_avg:236.76ms +[2025-07-17 12:19:59] [Rank 0] step:8081/10000 train_time:1913451ms step_avg:236.78ms +[2025-07-17 12:19:59] [Rank 0] step:8081/10000 train_time:1913451ms step_avg:236.78ms +[2025-07-17 12:20:04] [Rank 0] step:8101/10000 train_time:1918396ms step_avg:236.81ms +[2025-07-17 12:20:04] [Rank 0] step:8101/10000 train_time:1918396ms step_avg:236.81ms +[2025-07-17 12:20:09] [Rank 0] step:8121/10000 train_time:1923352ms step_avg:236.84ms +[2025-07-17 12:20:09] [Rank 0] step:8121/10000 train_time:1923352ms step_avg:236.84ms +[2025-07-17 12:20:15] [Rank 0] PRINT: step:8125/10000 val_loss:4.2831 train_time:1924840ms step_avg:236.90ms +[2025-07-17 12:20:15] [Rank 0] PRINT: step:8125/10000 val_loss:4.2831 train_time:1924840ms step_avg:236.90ms +[2025-07-17 12:20:19] [Rank 0] step:8141/10000 train_time:1928310ms step_avg:236.86ms +[2025-07-17 12:20:19] [Rank 0] step:8141/10000 train_time:1928310ms step_avg:236.86ms +[2025-07-17 12:20:24] [Rank 0] step:8161/10000 train_time:1933301ms step_avg:236.90ms +[2025-07-17 12:20:24] [Rank 0] step:8161/10000 train_time:1933301ms step_avg:236.90ms +[2025-07-17 12:20:29] [Rank 0] step:8181/10000 train_time:1938322ms step_avg:236.93ms +[2025-07-17 12:20:29] [Rank 0] step:8181/10000 train_time:1938322ms step_avg:236.93ms +[2025-07-17 12:20:34] [Rank 0] step:8201/10000 train_time:1943325ms step_avg:236.96ms +[2025-07-17 12:20:34] [Rank 0] step:8201/10000 train_time:1943325ms step_avg:236.96ms +[2025-07-17 12:20:39] [Rank 0] step:8221/10000 train_time:1948343ms step_avg:237.00ms +[2025-07-17 12:20:39] [Rank 0] step:8221/10000 train_time:1948343ms step_avg:237.00ms +[2025-07-17 12:20:44] [Rank 0] step:8241/10000 train_time:1953356ms step_avg:237.03ms +[2025-07-17 12:20:44] [Rank 0] step:8241/10000 train_time:1953356ms step_avg:237.03ms +[2025-07-17 12:20:51] [Rank 0] PRINT: step:8250/10000 val_loss:4.4174 train_time:1956122ms step_avg:237.11ms +[2025-07-17 12:20:51] [Rank 0] PRINT: step:8250/10000 val_loss:4.4174 train_time:1956122ms step_avg:237.11ms +[2025-07-17 12:20:53] [Rank 0] step:8261/10000 train_time:1958373ms step_avg:237.06ms +[2025-07-17 12:20:53] [Rank 0] step:8261/10000 train_time:1958373ms step_avg:237.06ms +[2025-07-17 12:20:58] [Rank 0] step:8281/10000 train_time:1963404ms step_avg:237.10ms +[2025-07-17 12:20:58] [Rank 0] step:8281/10000 train_time:1963404ms step_avg:237.10ms +[2025-07-17 12:21:03] [Rank 0] step:8301/10000 train_time:1968406ms step_avg:237.13ms +[2025-07-17 12:21:03] [Rank 0] step:8301/10000 train_time:1968406ms step_avg:237.13ms +[2025-07-17 12:21:08] [Rank 0] step:8321/10000 train_time:1973421ms step_avg:237.16ms +[2025-07-17 12:21:08] [Rank 0] step:8321/10000 train_time:1973421ms step_avg:237.16ms +[2025-07-17 12:21:13] [Rank 0] step:8341/10000 train_time:1978446ms step_avg:237.20ms +[2025-07-17 12:21:13] [Rank 0] step:8341/10000 train_time:1978446ms step_avg:237.20ms +[2025-07-17 12:21:18] [Rank 0] step:8361/10000 train_time:1983451ms step_avg:237.23ms +[2025-07-17 12:21:18] [Rank 0] step:8361/10000 train_time:1983451ms step_avg:237.23ms +[2025-07-17 12:21:27] [Rank 0] PRINT: step:8375/10000 val_loss:4.2403 train_time:1987564ms step_avg:237.32ms +[2025-07-17 12:21:27] [Rank 0] PRINT: step:8375/10000 val_loss:4.2403 train_time:1987564ms step_avg:237.32ms +[2025-07-17 12:21:28] [Rank 0] step:8381/10000 train_time:1988559ms step_avg:237.27ms +[2025-07-17 12:21:28] [Rank 0] step:8381/10000 train_time:1988559ms step_avg:237.27ms +[2025-07-17 12:21:33] [Rank 0] step:8401/10000 train_time:1993556ms step_avg:237.30ms +[2025-07-17 12:21:33] [Rank 0] step:8401/10000 train_time:1993556ms step_avg:237.30ms +[2025-07-17 12:21:38] [Rank 0] step:8421/10000 train_time:1998578ms step_avg:237.33ms +[2025-07-17 12:21:38] [Rank 0] step:8421/10000 train_time:1998578ms step_avg:237.33ms +[2025-07-17 12:21:43] [Rank 0] step:8441/10000 train_time:2003594ms step_avg:237.36ms +[2025-07-17 12:21:43] [Rank 0] step:8441/10000 train_time:2003594ms step_avg:237.36ms +[2025-07-17 12:21:48] [Rank 0] step:8461/10000 train_time:2008623ms step_avg:237.40ms +[2025-07-17 12:21:48] [Rank 0] step:8461/10000 train_time:2008623ms step_avg:237.40ms +[2025-07-17 12:21:53] [Rank 0] step:8481/10000 train_time:2013634ms step_avg:237.43ms +[2025-07-17 12:21:53] [Rank 0] step:8481/10000 train_time:2013634ms step_avg:237.43ms +[2025-07-17 12:22:03] [Rank 0] PRINT: step:8500/10000 val_loss:4.2512 train_time:2018907ms step_avg:237.52ms +[2025-07-17 12:22:03] [Rank 0] PRINT: step:8500/10000 val_loss:4.2512 train_time:2018907ms step_avg:237.52ms +[2025-07-17 12:22:03] [Rank 0] step:8501/10000 train_time:2018921ms step_avg:237.49ms +[2025-07-17 12:22:03] [Rank 0] step:8501/10000 train_time:2018921ms step_avg:237.49ms +[2025-07-17 12:22:08] [Rank 0] step:8521/10000 train_time:2023677ms step_avg:237.49ms +[2025-07-17 12:22:08] [Rank 0] step:8521/10000 train_time:2023677ms step_avg:237.49ms +[2025-07-17 12:22:13] [Rank 0] step:8541/10000 train_time:2028714ms step_avg:237.53ms +[2025-07-17 12:22:13] [Rank 0] step:8541/10000 train_time:2028714ms step_avg:237.53ms +[2025-07-17 12:22:18] [Rank 0] step:8561/10000 train_time:2033722ms step_avg:237.56ms +[2025-07-17 12:22:18] [Rank 0] step:8561/10000 train_time:2033722ms step_avg:237.56ms +[2025-07-17 12:22:23] [Rank 0] step:8581/10000 train_time:2038744ms step_avg:237.59ms +[2025-07-17 12:22:23] [Rank 0] step:8581/10000 train_time:2038744ms step_avg:237.59ms +[2025-07-17 12:22:28] [Rank 0] step:8601/10000 train_time:2043750ms step_avg:237.62ms +[2025-07-17 12:22:28] [Rank 0] step:8601/10000 train_time:2043750ms step_avg:237.62ms +[2025-07-17 12:22:33] [Rank 0] step:8621/10000 train_time:2048762ms step_avg:237.65ms +[2025-07-17 12:22:33] [Rank 0] step:8621/10000 train_time:2048762ms step_avg:237.65ms +[2025-07-17 12:22:39] [Rank 0] PRINT: step:8625/10000 val_loss:4.2367 train_time:2050268ms step_avg:237.71ms +[2025-07-17 12:22:39] [Rank 0] PRINT: step:8625/10000 val_loss:4.2367 train_time:2050268ms step_avg:237.71ms +[2025-07-17 12:22:43] [Rank 0] step:8641/10000 train_time:2053791ms step_avg:237.68ms +[2025-07-17 12:22:43] [Rank 0] step:8641/10000 train_time:2053791ms step_avg:237.68ms +[2025-07-17 12:22:48] [Rank 0] step:8661/10000 train_time:2058806ms step_avg:237.71ms +[2025-07-17 12:22:48] [Rank 0] step:8661/10000 train_time:2058806ms step_avg:237.71ms +[2025-07-17 12:22:53] [Rank 0] step:8681/10000 train_time:2063824ms step_avg:237.74ms +[2025-07-17 12:22:53] [Rank 0] step:8681/10000 train_time:2063824ms step_avg:237.74ms +[2025-07-17 12:22:58] [Rank 0] step:8701/10000 train_time:2068849ms step_avg:237.77ms +[2025-07-17 12:22:58] [Rank 0] step:8701/10000 train_time:2068849ms step_avg:237.77ms +[2025-07-17 12:23:03] [Rank 0] step:8721/10000 train_time:2073872ms step_avg:237.80ms +[2025-07-17 12:23:03] [Rank 0] step:8721/10000 train_time:2073872ms step_avg:237.80ms +[2025-07-17 12:23:08] [Rank 0] step:8741/10000 train_time:2078893ms step_avg:237.83ms +[2025-07-17 12:23:08] [Rank 0] step:8741/10000 train_time:2078893ms step_avg:237.83ms +[2025-07-17 12:23:15] [Rank 0] PRINT: step:8750/10000 val_loss:4.3061 train_time:2081645ms step_avg:237.90ms +[2025-07-17 12:23:15] [Rank 0] PRINT: step:8750/10000 val_loss:4.3061 train_time:2081645ms step_avg:237.90ms +[2025-07-17 12:23:18] [Rank 0] step:8761/10000 train_time:2083900ms step_avg:237.86ms +[2025-07-17 12:23:18] [Rank 0] step:8761/10000 train_time:2083900ms step_avg:237.86ms +[2025-07-17 12:23:23] [Rank 0] step:8781/10000 train_time:2088914ms step_avg:237.89ms +[2025-07-17 12:23:23] [Rank 0] step:8781/10000 train_time:2088914ms step_avg:237.89ms +[2025-07-17 12:23:28] [Rank 0] step:8801/10000 train_time:2093930ms step_avg:237.92ms +[2025-07-17 12:23:28] [Rank 0] step:8801/10000 train_time:2093930ms step_avg:237.92ms +[2025-07-17 12:23:33] [Rank 0] step:8821/10000 train_time:2098956ms step_avg:237.95ms +[2025-07-17 12:23:33] [Rank 0] step:8821/10000 train_time:2098956ms step_avg:237.95ms +[2025-07-17 12:23:38] [Rank 0] step:8841/10000 train_time:2103989ms step_avg:237.98ms +[2025-07-17 12:23:38] [Rank 0] step:8841/10000 train_time:2103989ms step_avg:237.98ms +[2025-07-17 12:23:43] [Rank 0] step:8861/10000 train_time:2109006ms step_avg:238.01ms +[2025-07-17 12:23:43] [Rank 0] step:8861/10000 train_time:2109006ms step_avg:238.01ms +[2025-07-17 12:23:51] [Rank 0] PRINT: step:8875/10000 val_loss:4.2701 train_time:2113011ms step_avg:238.09ms +[2025-07-17 12:23:51] [Rank 0] PRINT: step:8875/10000 val_loss:4.2701 train_time:2113011ms step_avg:238.09ms +[2025-07-17 12:23:53] [Rank 0] step:8881/10000 train_time:2114005ms step_avg:238.04ms +[2025-07-17 12:23:53] [Rank 0] step:8881/10000 train_time:2114005ms step_avg:238.04ms +[2025-07-17 12:23:58] [Rank 0] step:8901/10000 train_time:2119006ms step_avg:238.06ms +[2025-07-17 12:23:58] [Rank 0] step:8901/10000 train_time:2119006ms step_avg:238.06ms +[2025-07-17 12:24:03] [Rank 0] step:8921/10000 train_time:2124006ms step_avg:238.09ms +[2025-07-17 12:24:03] [Rank 0] step:8921/10000 train_time:2124006ms step_avg:238.09ms +[2025-07-17 12:24:08] [Rank 0] step:8941/10000 train_time:2129011ms step_avg:238.12ms +[2025-07-17 12:24:08] [Rank 0] step:8941/10000 train_time:2129011ms step_avg:238.12ms +[2025-07-17 12:24:13] [Rank 0] step:8961/10000 train_time:2134021ms step_avg:238.15ms +[2025-07-17 12:24:13] [Rank 0] step:8961/10000 train_time:2134021ms step_avg:238.15ms +[2025-07-17 12:24:18] [Rank 0] step:8981/10000 train_time:2139032ms step_avg:238.17ms +[2025-07-17 12:24:18] [Rank 0] step:8981/10000 train_time:2139032ms step_avg:238.17ms +[2025-07-17 12:24:27] [Rank 0] PRINT: step:9000/10000 val_loss:4.2692 train_time:2144298ms step_avg:238.26ms +[2025-07-17 12:24:27] [Rank 0] PRINT: step:9000/10000 val_loss:4.2692 train_time:2144298ms step_avg:238.26ms +[2025-07-17 12:24:27] [Rank 0] step:9001/10000 train_time:2144313ms step_avg:238.23ms +[2025-07-17 12:24:27] [Rank 0] step:9001/10000 train_time:2144313ms step_avg:238.23ms +[2025-07-17 12:24:32] [Rank 0] step:9021/10000 train_time:2149048ms step_avg:238.23ms +[2025-07-17 12:24:32] [Rank 0] step:9021/10000 train_time:2149048ms step_avg:238.23ms +[2025-07-17 12:24:37] [Rank 0] step:9041/10000 train_time:2154081ms step_avg:238.26ms +[2025-07-17 12:24:37] [Rank 0] step:9041/10000 train_time:2154081ms step_avg:238.26ms +[2025-07-17 12:24:42] [Rank 0] step:9061/10000 train_time:2159087ms step_avg:238.28ms +[2025-07-17 12:24:42] [Rank 0] step:9061/10000 train_time:2159087ms step_avg:238.28ms +[2025-07-17 12:24:47] [Rank 0] step:9081/10000 train_time:2164119ms step_avg:238.31ms +[2025-07-17 12:24:47] [Rank 0] step:9081/10000 train_time:2164119ms step_avg:238.31ms +[2025-07-17 12:24:52] [Rank 0] step:9101/10000 train_time:2169150ms step_avg:238.34ms +[2025-07-17 12:24:52] [Rank 0] step:9101/10000 train_time:2169150ms step_avg:238.34ms +[2025-07-17 12:24:58] [Rank 0] step:9121/10000 train_time:2174167ms step_avg:238.37ms +[2025-07-17 12:24:58] [Rank 0] step:9121/10000 train_time:2174167ms step_avg:238.37ms +[2025-07-17 12:25:03] [Rank 0] PRINT: step:9125/10000 val_loss:4.2463 train_time:2175670ms step_avg:238.43ms +[2025-07-17 12:25:03] [Rank 0] PRINT: step:9125/10000 val_loss:4.2463 train_time:2175670ms step_avg:238.43ms +[2025-07-17 12:25:07] [Rank 0] step:9141/10000 train_time:2179162ms step_avg:238.39ms +[2025-07-17 12:25:07] [Rank 0] step:9141/10000 train_time:2179162ms step_avg:238.39ms +[2025-07-17 12:25:12] [Rank 0] step:9161/10000 train_time:2184204ms step_avg:238.42ms +[2025-07-17 12:25:12] [Rank 0] step:9161/10000 train_time:2184204ms step_avg:238.42ms +[2025-07-17 12:25:17] [Rank 0] step:9181/10000 train_time:2189223ms step_avg:238.45ms +[2025-07-17 12:25:17] [Rank 0] step:9181/10000 train_time:2189223ms step_avg:238.45ms +[2025-07-17 12:25:22] [Rank 0] step:9201/10000 train_time:2194236ms step_avg:238.48ms +[2025-07-17 12:25:22] [Rank 0] step:9201/10000 train_time:2194236ms step_avg:238.48ms +[2025-07-17 12:25:27] [Rank 0] step:9221/10000 train_time:2199281ms step_avg:238.51ms +[2025-07-17 12:25:27] [Rank 0] step:9221/10000 train_time:2199281ms step_avg:238.51ms +[2025-07-17 12:25:32] [Rank 0] step:9241/10000 train_time:2204308ms step_avg:238.54ms +[2025-07-17 12:25:32] [Rank 0] step:9241/10000 train_time:2204308ms step_avg:238.54ms +[2025-07-17 12:25:40] [Rank 0] PRINT: step:9250/10000 val_loss:4.2581 train_time:2207074ms step_avg:238.60ms +[2025-07-17 12:25:40] [Rank 0] PRINT: step:9250/10000 val_loss:4.2581 train_time:2207074ms step_avg:238.60ms +[2025-07-17 12:25:42] [Rank 0] step:9261/10000 train_time:2209336ms step_avg:238.56ms +[2025-07-17 12:25:42] [Rank 0] step:9261/10000 train_time:2209336ms step_avg:238.56ms +[2025-07-17 12:25:47] [Rank 0] step:9281/10000 train_time:2214334ms step_avg:238.59ms +[2025-07-17 12:25:47] [Rank 0] step:9281/10000 train_time:2214334ms step_avg:238.59ms +[2025-07-17 12:25:52] [Rank 0] step:9301/10000 train_time:2219353ms step_avg:238.61ms +[2025-07-17 12:25:52] [Rank 0] step:9301/10000 train_time:2219353ms step_avg:238.61ms +[2025-07-17 12:25:57] [Rank 0] step:9321/10000 train_time:2224390ms step_avg:238.64ms +[2025-07-17 12:25:57] [Rank 0] step:9321/10000 train_time:2224390ms step_avg:238.64ms +[2025-07-17 12:26:02] [Rank 0] step:9341/10000 train_time:2229411ms step_avg:238.67ms +[2025-07-17 12:26:02] [Rank 0] step:9341/10000 train_time:2229411ms step_avg:238.67ms +[2025-07-17 12:26:07] [Rank 0] step:9361/10000 train_time:2234437ms step_avg:238.70ms +[2025-07-17 12:26:07] [Rank 0] step:9361/10000 train_time:2234437ms step_avg:238.70ms +[2025-07-17 12:26:16] [Rank 0] PRINT: step:9375/10000 val_loss:4.2506 train_time:2238455ms step_avg:238.77ms +[2025-07-17 12:26:16] [Rank 0] PRINT: step:9375/10000 val_loss:4.2506 train_time:2238455ms step_avg:238.77ms +[2025-07-17 12:26:17] [Rank 0] step:9381/10000 train_time:2239455ms step_avg:238.72ms +[2025-07-17 12:26:17] [Rank 0] step:9381/10000 train_time:2239455ms step_avg:238.72ms +[2025-07-17 12:26:22] [Rank 0] step:9401/10000 train_time:2244453ms step_avg:238.75ms +[2025-07-17 12:26:22] [Rank 0] step:9401/10000 train_time:2244453ms step_avg:238.75ms +[2025-07-17 12:26:27] [Rank 0] step:9421/10000 train_time:2249469ms step_avg:238.77ms +[2025-07-17 12:26:27] [Rank 0] step:9421/10000 train_time:2249469ms step_avg:238.77ms +[2025-07-17 12:26:32] [Rank 0] step:9441/10000 train_time:2254487ms step_avg:238.80ms +[2025-07-17 12:26:32] [Rank 0] step:9441/10000 train_time:2254487ms step_avg:238.80ms +[2025-07-17 12:26:37] [Rank 0] step:9461/10000 train_time:2259515ms step_avg:238.82ms +[2025-07-17 12:26:37] [Rank 0] step:9461/10000 train_time:2259515ms step_avg:238.82ms +[2025-07-17 12:26:42] [Rank 0] step:9481/10000 train_time:2264541ms step_avg:238.85ms +[2025-07-17 12:26:42] [Rank 0] step:9481/10000 train_time:2264541ms step_avg:238.85ms +[2025-07-17 12:26:51] [Rank 0] PRINT: step:9500/10000 val_loss:4.2779 train_time:2269839ms step_avg:238.93ms +[2025-07-17 12:26:51] [Rank 0] PRINT: step:9500/10000 val_loss:4.2779 train_time:2269839ms step_avg:238.93ms +[2025-07-17 12:26:52] [Rank 0] step:9501/10000 train_time:2269853ms step_avg:238.91ms +[2025-07-17 12:26:52] [Rank 0] step:9501/10000 train_time:2269853ms step_avg:238.91ms +[2025-07-17 12:26:57] [Rank 0] step:9521/10000 train_time:2274603ms step_avg:238.90ms +[2025-07-17 12:26:57] [Rank 0] step:9521/10000 train_time:2274603ms step_avg:238.90ms +[2025-07-17 12:27:02] [Rank 0] step:9541/10000 train_time:2279638ms step_avg:238.93ms +[2025-07-17 12:27:02] [Rank 0] step:9541/10000 train_time:2279638ms step_avg:238.93ms +[2025-07-17 12:27:07] [Rank 0] step:9561/10000 train_time:2284642ms step_avg:238.95ms +[2025-07-17 12:27:07] [Rank 0] step:9561/10000 train_time:2284642ms step_avg:238.95ms +[2025-07-17 12:27:12] [Rank 0] step:9581/10000 train_time:2289663ms step_avg:238.98ms +[2025-07-17 12:27:12] [Rank 0] step:9581/10000 train_time:2289663ms step_avg:238.98ms +[2025-07-17 12:27:17] [Rank 0] step:9601/10000 train_time:2294678ms step_avg:239.00ms +[2025-07-17 12:27:17] [Rank 0] step:9601/10000 train_time:2294678ms step_avg:239.00ms +[2025-07-17 12:27:22] [Rank 0] step:9621/10000 train_time:2299727ms step_avg:239.03ms +[2025-07-17 12:27:22] [Rank 0] step:9621/10000 train_time:2299727ms step_avg:239.03ms +[2025-07-17 12:27:28] [Rank 0] PRINT: step:9625/10000 val_loss:4.2812 train_time:2301229ms step_avg:239.09ms +[2025-07-17 12:27:28] [Rank 0] PRINT: step:9625/10000 val_loss:4.2812 train_time:2301229ms step_avg:239.09ms +[2025-07-17 12:27:32] [Rank 0] step:9641/10000 train_time:2304770ms step_avg:239.06ms +[2025-07-17 12:27:32] [Rank 0] step:9641/10000 train_time:2304770ms step_avg:239.06ms +[2025-07-17 12:27:37] [Rank 0] step:9661/10000 train_time:2309859ms step_avg:239.09ms +[2025-07-17 12:27:37] [Rank 0] step:9661/10000 train_time:2309859ms step_avg:239.09ms +[2025-07-17 12:27:42] [Rank 0] step:9681/10000 train_time:2314935ms step_avg:239.12ms +[2025-07-17 12:27:42] [Rank 0] step:9681/10000 train_time:2314935ms step_avg:239.12ms +[2025-07-17 12:27:47] [Rank 0] step:9701/10000 train_time:2320028ms step_avg:239.15ms +[2025-07-17 12:27:47] [Rank 0] step:9701/10000 train_time:2320028ms step_avg:239.15ms +[2025-07-17 12:27:52] [Rank 0] step:9721/10000 train_time:2325096ms step_avg:239.18ms +[2025-07-17 12:27:52] [Rank 0] step:9721/10000 train_time:2325096ms step_avg:239.18ms +[2025-07-17 12:27:57] [Rank 0] step:9741/10000 train_time:2330182ms step_avg:239.21ms +[2025-07-17 12:27:57] [Rank 0] step:9741/10000 train_time:2330182ms step_avg:239.21ms +[2025-07-17 12:28:04] [Rank 0] PRINT: step:9750/10000 val_loss:4.3349 train_time:2332970ms step_avg:239.28ms +[2025-07-17 12:28:04] [Rank 0] PRINT: step:9750/10000 val_loss:4.3349 train_time:2332970ms step_avg:239.28ms +[2025-07-17 12:28:07] [Rank 0] step:9761/10000 train_time:2335252ms step_avg:239.24ms +[2025-07-17 12:28:07] [Rank 0] step:9761/10000 train_time:2335252ms step_avg:239.24ms +[2025-07-17 12:28:12] [Rank 0] step:9781/10000 train_time:2340326ms step_avg:239.27ms +[2025-07-17 12:28:12] [Rank 0] step:9781/10000 train_time:2340326ms step_avg:239.27ms +[2025-07-17 12:28:17] [Rank 0] step:9801/10000 train_time:2345389ms step_avg:239.30ms +[2025-07-17 12:28:17] [Rank 0] step:9801/10000 train_time:2345389ms step_avg:239.30ms +[2025-07-17 12:28:22] [Rank 0] step:9821/10000 train_time:2350455ms step_avg:239.33ms +[2025-07-17 12:28:22] [Rank 0] step:9821/10000 train_time:2350455ms step_avg:239.33ms +[2025-07-17 12:28:27] [Rank 0] step:9841/10000 train_time:2355518ms step_avg:239.36ms +[2025-07-17 12:28:27] [Rank 0] step:9841/10000 train_time:2355518ms step_avg:239.36ms +[2025-07-17 12:28:32] [Rank 0] step:9861/10000 train_time:2360586ms step_avg:239.39ms +[2025-07-17 12:28:32] [Rank 0] step:9861/10000 train_time:2360586ms step_avg:239.39ms +[2025-07-17 12:28:41] [Rank 0] PRINT: step:9875/10000 val_loss:4.2445 train_time:2364637ms step_avg:239.46ms +[2025-07-17 12:28:41] [Rank 0] PRINT: step:9875/10000 val_loss:4.2445 train_time:2364637ms step_avg:239.46ms +[2025-07-17 12:28:42] [Rank 0] step:9881/10000 train_time:2365646ms step_avg:239.41ms +[2025-07-17 12:28:42] [Rank 0] step:9881/10000 train_time:2365646ms step_avg:239.41ms +[2025-07-17 12:28:47] [Rank 0] step:9901/10000 train_time:2370718ms step_avg:239.44ms +[2025-07-17 12:28:47] [Rank 0] step:9901/10000 train_time:2370718ms step_avg:239.44ms +[2025-07-17 12:28:52] [Rank 0] step:9921/10000 train_time:2375803ms step_avg:239.47ms +[2025-07-17 12:28:52] [Rank 0] step:9921/10000 train_time:2375803ms step_avg:239.47ms +[2025-07-17 12:28:57] [Rank 0] step:9941/10000 train_time:2380909ms step_avg:239.50ms +[2025-07-17 12:28:57] [Rank 0] step:9941/10000 train_time:2380909ms step_avg:239.50ms +[2025-07-17 12:29:03] [Rank 0] step:9961/10000 train_time:2386003ms step_avg:239.53ms +[2025-07-17 12:29:03] [Rank 0] step:9961/10000 train_time:2386003ms step_avg:239.53ms +[2025-07-17 12:29:08] [Rank 0] step:9981/10000 train_time:2391109ms step_avg:239.57ms +[2025-07-17 12:29:08] [Rank 0] step:9981/10000 train_time:2391109ms step_avg:239.57ms +[2025-07-17 12:29:12] [Rank 0] step:10000/10000 train_time:2395918ms step_avg:239.59ms +[2025-07-17 12:29:12] [Rank 0] step:10000/10000 train_time:2395918ms step_avg:239.59ms +[2025-07-17 12:29:17] [Rank 0] PRINT: step:10000/10000 val_loss:4.2521 train_time:2396429ms step_avg:239.64ms +[2025-07-17 12:29:17] [Rank 0] PRINT: step:10000/10000 val_loss:4.2521 train_time:2396429ms step_avg:239.64ms +[2025-07-17 12:29:17] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 12:29:17 2025 --- +[2025-07-17 12:29:17] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 12:29:17 2025 --- +[2025-07-17 12:29:17] [Rank 0] PRINT: Peak memory allocated: 31117 MiB reserved: 31436 MiB +[2025-07-17 12:29:17] [Rank 0] PRINT: Peak memory allocated: 31117 MiB reserved: 31436 MiB diff --git a/logs_norope/diff_modes/mode_2_param_norope_seed_43/config.json b/logs_norope/diff_modes/mode_2_param_norope_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..6dd1a20ced7e8fe844c00473eaf9de3aa3ad515f --- /dev/null +++ b/logs_norope/diff_modes/mode_2_param_norope_seed_43/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 2, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "05eddba1-be2f-40f4-ac7f-b60dfd659452", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_2_param_norope_seed_43/training_log_05eddba1-be2f-40f4-ac7f-b60dfd659452.txt b/logs_norope/diff_modes/mode_2_param_norope_seed_43/training_log_05eddba1-be2f-40f4-ac7f-b60dfd659452.txt new file mode 100644 index 0000000000000000000000000000000000000000..c6669d61e62f595c4953008b03abbf3aeb6c3b70 --- /dev/null +++ b/logs_norope/diff_modes/mode_2_param_norope_seed_43/training_log_05eddba1-be2f-40f4-ac7f-b60dfd659452.txt @@ -0,0 +1,2360 @@ +[2025-07-17 18:54:00] [Rank 0] PRINT: --- Script Start: Thu Jul 17 18:54:00 2025 --- +[2025-07-17 18:54:00] [Rank 0] PRINT: --- Script Start: Thu Jul 17 18:54:00 2025 --- +[2025-07-17 18:54:00] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=2, model_parameterization='norope') +[2025-07-17 18:54:00] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=2, model_parameterization='norope') +[2025-07-17 18:54:00] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 18:54:00] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 18:54:00] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 18:54:00] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 18:54:00] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_2_param_norope_seed_43 +[2025-07-17 18:54:00] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_2_param_norope_seed_43 +[2025-07-17 18:54:00] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 18:54:00] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 18:54:00] [Rank 0] PRINT: Constructing model... +[2025-07-17 18:54:00] [Rank 0] PRINT: Constructing model... +[2025-07-17 18:54:02] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 18:54:02] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 18:54:02] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 18:54:02] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 18:54:02] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 18:54:02] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 18:54:02] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 18:54:02] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 18:54:02] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 2 +[2025-07-17 18:54:02] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 2 +[2025-07-17 18:54:02] [Rank 0] PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: 0.001). +[2025-07-17 18:54:02] [Rank 0] PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: 0.001). +[2025-07-17 18:54:02] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 18:54:02] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 18:54:02] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-07-17 18:54:02] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-07-17 18:54:02] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 18:54:02] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 18:54:03] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 18:54:03] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 18:54:03] [Rank 0] PRINT: Starting warmup... +[2025-07-17 18:54:03] [Rank 0] PRINT: Starting warmup... +[2025-07-17 18:55:07] [Rank 0] PRINT: Warmup complete. +[2025-07-17 18:55:07] [Rank 0] PRINT: Warmup complete. +[2025-07-17 18:55:07] [Rank 0] PRINT: Starting training... +[2025-07-17 18:55:07] [Rank 0] PRINT: Starting training... +[2025-07-17 18:55:16] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 18:55:16] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 18:55:22] [Rank 0] step:21/10000 train_time:5308ms step_avg:252.78ms +[2025-07-17 18:55:22] [Rank 0] step:21/10000 train_time:5308ms step_avg:252.78ms +[2025-07-17 18:55:26] [Rank 0] step:41/10000 train_time:9730ms step_avg:237.32ms +[2025-07-17 18:55:26] [Rank 0] step:41/10000 train_time:9730ms step_avg:237.32ms +[2025-07-17 18:55:31] [Rank 0] step:61/10000 train_time:14161ms step_avg:232.14ms +[2025-07-17 18:55:31] [Rank 0] step:61/10000 train_time:14161ms step_avg:232.14ms +[2025-07-17 18:55:35] [Rank 0] step:81/10000 train_time:18599ms step_avg:229.61ms +[2025-07-17 18:55:35] [Rank 0] step:81/10000 train_time:18599ms step_avg:229.61ms +[2025-07-17 18:55:40] [Rank 0] step:101/10000 train_time:23051ms step_avg:228.22ms +[2025-07-17 18:55:40] [Rank 0] step:101/10000 train_time:23051ms step_avg:228.22ms +[2025-07-17 18:55:44] [Rank 0] step:121/10000 train_time:27504ms step_avg:227.30ms +[2025-07-17 18:55:44] [Rank 0] step:121/10000 train_time:27504ms step_avg:227.30ms +[2025-07-17 18:55:50] [Rank 0] PRINT: step:125/10000 val_loss:5.2338 train_time:28843ms step_avg:230.74ms +[2025-07-17 18:55:50] [Rank 0] PRINT: step:125/10000 val_loss:5.2338 train_time:28843ms step_avg:230.74ms +[2025-07-17 18:55:53] [Rank 0] step:141/10000 train_time:31955ms step_avg:226.63ms +[2025-07-17 18:55:53] [Rank 0] step:141/10000 train_time:31955ms step_avg:226.63ms +[2025-07-17 18:55:58] [Rank 0] step:161/10000 train_time:36414ms step_avg:226.18ms +[2025-07-17 18:55:58] [Rank 0] step:161/10000 train_time:36414ms step_avg:226.18ms +[2025-07-17 18:56:02] [Rank 0] step:181/10000 train_time:40875ms step_avg:225.83ms +[2025-07-17 18:56:02] [Rank 0] step:181/10000 train_time:40875ms step_avg:225.83ms +[2025-07-17 18:56:06] [Rank 0] step:201/10000 train_time:45338ms step_avg:225.56ms +[2025-07-17 18:56:06] [Rank 0] step:201/10000 train_time:45338ms step_avg:225.56ms +[2025-07-17 18:56:11] [Rank 0] step:221/10000 train_time:49795ms step_avg:225.32ms +[2025-07-17 18:56:11] [Rank 0] step:221/10000 train_time:49795ms step_avg:225.32ms +[2025-07-17 18:56:15] [Rank 0] step:241/10000 train_time:54251ms step_avg:225.11ms +[2025-07-17 18:56:15] [Rank 0] step:241/10000 train_time:54251ms step_avg:225.11ms +[2025-07-17 18:56:22] [Rank 0] PRINT: step:250/10000 val_loss:4.8779 train_time:56706ms step_avg:226.82ms +[2025-07-17 18:56:22] [Rank 0] PRINT: step:250/10000 val_loss:4.8779 train_time:56706ms step_avg:226.82ms +[2025-07-17 18:56:24] [Rank 0] step:261/10000 train_time:58709ms step_avg:224.94ms +[2025-07-17 18:56:24] [Rank 0] step:261/10000 train_time:58709ms step_avg:224.94ms +[2025-07-17 18:56:29] [Rank 0] step:281/10000 train_time:63173ms step_avg:224.82ms +[2025-07-17 18:56:29] [Rank 0] step:281/10000 train_time:63173ms step_avg:224.82ms +[2025-07-17 18:56:33] [Rank 0] step:301/10000 train_time:67628ms step_avg:224.68ms +[2025-07-17 18:56:33] [Rank 0] step:301/10000 train_time:67628ms step_avg:224.68ms +[2025-07-17 18:56:38] [Rank 0] step:321/10000 train_time:72095ms step_avg:224.59ms +[2025-07-17 18:56:38] [Rank 0] step:321/10000 train_time:72095ms step_avg:224.59ms +[2025-07-17 18:56:42] [Rank 0] step:341/10000 train_time:76561ms step_avg:224.52ms +[2025-07-17 18:56:42] [Rank 0] step:341/10000 train_time:76561ms step_avg:224.52ms +[2025-07-17 18:56:47] [Rank 0] step:361/10000 train_time:81020ms step_avg:224.43ms +[2025-07-17 18:56:47] [Rank 0] step:361/10000 train_time:81020ms step_avg:224.43ms +[2025-07-17 18:56:53] [Rank 0] PRINT: step:375/10000 val_loss:4.6537 train_time:84594ms step_avg:225.58ms +[2025-07-17 18:56:53] [Rank 0] PRINT: step:375/10000 val_loss:4.6537 train_time:84594ms step_avg:225.58ms +[2025-07-17 18:56:55] [Rank 0] step:381/10000 train_time:85484ms step_avg:224.37ms +[2025-07-17 18:56:55] [Rank 0] step:381/10000 train_time:85484ms step_avg:224.37ms +[2025-07-17 18:56:59] [Rank 0] step:401/10000 train_time:89952ms step_avg:224.32ms +[2025-07-17 18:56:59] [Rank 0] step:401/10000 train_time:89952ms step_avg:224.32ms +[2025-07-17 18:57:04] [Rank 0] step:421/10000 train_time:94412ms step_avg:224.26ms +[2025-07-17 18:57:04] [Rank 0] step:421/10000 train_time:94412ms step_avg:224.26ms +[2025-07-17 18:57:08] [Rank 0] step:441/10000 train_time:98872ms step_avg:224.20ms +[2025-07-17 18:57:08] [Rank 0] step:441/10000 train_time:98872ms step_avg:224.20ms +[2025-07-17 18:57:13] [Rank 0] step:461/10000 train_time:103336ms step_avg:224.16ms +[2025-07-17 18:57:13] [Rank 0] step:461/10000 train_time:103336ms step_avg:224.16ms +[2025-07-17 18:57:17] [Rank 0] step:481/10000 train_time:107798ms step_avg:224.11ms +[2025-07-17 18:57:17] [Rank 0] step:481/10000 train_time:107798ms step_avg:224.11ms +[2025-07-17 18:57:26] [Rank 0] PRINT: step:500/10000 val_loss:4.5344 train_time:112480ms step_avg:224.96ms +[2025-07-17 18:57:26] [Rank 0] PRINT: step:500/10000 val_loss:4.5344 train_time:112480ms step_avg:224.96ms +[2025-07-17 18:57:26] [Rank 0] step:501/10000 train_time:112494ms step_avg:224.54ms +[2025-07-17 18:57:26] [Rank 0] step:501/10000 train_time:112494ms step_avg:224.54ms +[2025-07-17 18:57:30] [Rank 0] step:521/10000 train_time:116717ms step_avg:224.03ms +[2025-07-17 18:57:30] [Rank 0] step:521/10000 train_time:116717ms step_avg:224.03ms +[2025-07-17 18:57:35] [Rank 0] step:541/10000 train_time:121179ms step_avg:223.99ms +[2025-07-17 18:57:35] [Rank 0] step:541/10000 train_time:121179ms step_avg:223.99ms +[2025-07-17 18:57:39] [Rank 0] step:561/10000 train_time:125638ms step_avg:223.95ms +[2025-07-17 18:57:39] [Rank 0] step:561/10000 train_time:125638ms step_avg:223.95ms +[2025-07-17 18:57:44] [Rank 0] step:581/10000 train_time:130103ms step_avg:223.93ms +[2025-07-17 18:57:44] [Rank 0] step:581/10000 train_time:130103ms step_avg:223.93ms +[2025-07-17 18:57:48] [Rank 0] step:601/10000 train_time:134565ms step_avg:223.90ms +[2025-07-17 18:57:48] [Rank 0] step:601/10000 train_time:134565ms step_avg:223.90ms +[2025-07-17 18:57:53] [Rank 0] step:621/10000 train_time:139026ms step_avg:223.87ms +[2025-07-17 18:57:53] [Rank 0] step:621/10000 train_time:139026ms step_avg:223.87ms +[2025-07-17 18:57:58] [Rank 0] PRINT: step:625/10000 val_loss:4.4374 train_time:140370ms step_avg:224.59ms +[2025-07-17 18:57:58] [Rank 0] PRINT: step:625/10000 val_loss:4.4374 train_time:140370ms step_avg:224.59ms +[2025-07-17 18:58:02] [Rank 0] step:641/10000 train_time:143494ms step_avg:223.86ms +[2025-07-17 18:58:02] [Rank 0] step:641/10000 train_time:143494ms step_avg:223.86ms +[2025-07-17 18:58:06] [Rank 0] step:661/10000 train_time:147964ms step_avg:223.85ms +[2025-07-17 18:58:06] [Rank 0] step:661/10000 train_time:147964ms step_avg:223.85ms +[2025-07-17 18:58:11] [Rank 0] step:681/10000 train_time:152428ms step_avg:223.83ms +[2025-07-17 18:58:11] [Rank 0] step:681/10000 train_time:152428ms step_avg:223.83ms +[2025-07-17 18:58:15] [Rank 0] step:701/10000 train_time:156898ms step_avg:223.82ms +[2025-07-17 18:58:15] [Rank 0] step:701/10000 train_time:156898ms step_avg:223.82ms +[2025-07-17 18:58:20] [Rank 0] step:721/10000 train_time:161362ms step_avg:223.80ms +[2025-07-17 18:58:20] [Rank 0] step:721/10000 train_time:161362ms step_avg:223.80ms +[2025-07-17 18:58:24] [Rank 0] step:741/10000 train_time:165832ms step_avg:223.79ms +[2025-07-17 18:58:24] [Rank 0] step:741/10000 train_time:165832ms step_avg:223.79ms +[2025-07-17 18:58:31] [Rank 0] PRINT: step:750/10000 val_loss:4.3173 train_time:168309ms step_avg:224.41ms +[2025-07-17 18:58:31] [Rank 0] PRINT: step:750/10000 val_loss:4.3173 train_time:168309ms step_avg:224.41ms +[2025-07-17 18:58:33] [Rank 0] step:761/10000 train_time:170331ms step_avg:223.83ms +[2025-07-17 18:58:33] [Rank 0] step:761/10000 train_time:170331ms step_avg:223.83ms +[2025-07-17 18:58:37] [Rank 0] step:781/10000 train_time:174834ms step_avg:223.86ms +[2025-07-17 18:58:37] [Rank 0] step:781/10000 train_time:174834ms step_avg:223.86ms +[2025-07-17 18:58:42] [Rank 0] step:801/10000 train_time:179338ms step_avg:223.89ms +[2025-07-17 18:58:42] [Rank 0] step:801/10000 train_time:179338ms step_avg:223.89ms +[2025-07-17 18:58:46] [Rank 0] step:821/10000 train_time:183844ms step_avg:223.93ms +[2025-07-17 18:58:46] [Rank 0] step:821/10000 train_time:183844ms step_avg:223.93ms +[2025-07-17 18:58:51] [Rank 0] step:841/10000 train_time:188352ms step_avg:223.96ms +[2025-07-17 18:58:51] [Rank 0] step:841/10000 train_time:188352ms step_avg:223.96ms +[2025-07-17 18:58:56] [Rank 0] step:861/10000 train_time:192861ms step_avg:224.00ms +[2025-07-17 18:58:56] [Rank 0] step:861/10000 train_time:192861ms step_avg:224.00ms +[2025-07-17 18:59:03] [Rank 0] PRINT: step:875/10000 val_loss:4.1376 train_time:196472ms step_avg:224.54ms +[2025-07-17 18:59:03] [Rank 0] PRINT: step:875/10000 val_loss:4.1376 train_time:196472ms step_avg:224.54ms +[2025-07-17 18:59:05] [Rank 0] step:881/10000 train_time:197370ms step_avg:224.03ms +[2025-07-17 18:59:05] [Rank 0] step:881/10000 train_time:197370ms step_avg:224.03ms +[2025-07-17 18:59:09] [Rank 0] step:901/10000 train_time:201879ms step_avg:224.06ms +[2025-07-17 18:59:09] [Rank 0] step:901/10000 train_time:201879ms step_avg:224.06ms +[2025-07-17 18:59:14] [Rank 0] step:921/10000 train_time:206389ms step_avg:224.09ms +[2025-07-17 18:59:14] [Rank 0] step:921/10000 train_time:206389ms step_avg:224.09ms +[2025-07-17 18:59:18] [Rank 0] step:941/10000 train_time:210898ms step_avg:224.12ms +[2025-07-17 18:59:18] [Rank 0] step:941/10000 train_time:210898ms step_avg:224.12ms +[2025-07-17 18:59:23] [Rank 0] step:961/10000 train_time:215417ms step_avg:224.16ms +[2025-07-17 18:59:23] [Rank 0] step:961/10000 train_time:215417ms step_avg:224.16ms +[2025-07-17 18:59:27] [Rank 0] step:981/10000 train_time:219935ms step_avg:224.20ms +[2025-07-17 18:59:27] [Rank 0] step:981/10000 train_time:219935ms step_avg:224.20ms +[2025-07-17 18:59:36] [Rank 0] PRINT: step:1000/10000 val_loss:4.0694 train_time:224685ms step_avg:224.68ms +[2025-07-17 18:59:36] [Rank 0] PRINT: step:1000/10000 val_loss:4.0694 train_time:224685ms step_avg:224.68ms +[2025-07-17 18:59:36] [Rank 0] step:1001/10000 train_time:224698ms step_avg:224.47ms +[2025-07-17 18:59:36] [Rank 0] step:1001/10000 train_time:224698ms step_avg:224.47ms +[2025-07-17 18:59:41] [Rank 0] step:1021/10000 train_time:228973ms step_avg:224.26ms +[2025-07-17 18:59:41] [Rank 0] step:1021/10000 train_time:228973ms step_avg:224.26ms +[2025-07-17 18:59:45] [Rank 0] step:1041/10000 train_time:233489ms step_avg:224.29ms +[2025-07-17 18:59:45] [Rank 0] step:1041/10000 train_time:233489ms step_avg:224.29ms +[2025-07-17 18:59:50] [Rank 0] step:1061/10000 train_time:238003ms step_avg:224.32ms +[2025-07-17 18:59:50] [Rank 0] step:1061/10000 train_time:238003ms step_avg:224.32ms +[2025-07-17 18:59:54] [Rank 0] step:1081/10000 train_time:242520ms step_avg:224.35ms +[2025-07-17 18:59:54] [Rank 0] step:1081/10000 train_time:242520ms step_avg:224.35ms +[2025-07-17 18:59:59] [Rank 0] step:1101/10000 train_time:247032ms step_avg:224.37ms +[2025-07-17 18:59:59] [Rank 0] step:1101/10000 train_time:247032ms step_avg:224.37ms +[2025-07-17 19:00:03] [Rank 0] step:1121/10000 train_time:251543ms step_avg:224.39ms +[2025-07-17 19:00:03] [Rank 0] step:1121/10000 train_time:251543ms step_avg:224.39ms +[2025-07-17 19:00:08] [Rank 0] PRINT: step:1125/10000 val_loss:4.0548 train_time:252901ms step_avg:224.80ms +[2025-07-17 19:00:08] [Rank 0] PRINT: step:1125/10000 val_loss:4.0548 train_time:252901ms step_avg:224.80ms +[2025-07-17 19:00:12] [Rank 0] step:1141/10000 train_time:256056ms step_avg:224.41ms +[2025-07-17 19:00:12] [Rank 0] step:1141/10000 train_time:256056ms step_avg:224.41ms +[2025-07-17 19:00:17] [Rank 0] step:1161/10000 train_time:260568ms step_avg:224.43ms +[2025-07-17 19:00:17] [Rank 0] step:1161/10000 train_time:260568ms step_avg:224.43ms +[2025-07-17 19:00:21] [Rank 0] step:1181/10000 train_time:265081ms step_avg:224.45ms +[2025-07-17 19:00:21] [Rank 0] step:1181/10000 train_time:265081ms step_avg:224.45ms +[2025-07-17 19:00:26] [Rank 0] step:1201/10000 train_time:269596ms step_avg:224.48ms +[2025-07-17 19:00:26] [Rank 0] step:1201/10000 train_time:269596ms step_avg:224.48ms +[2025-07-17 19:00:30] [Rank 0] step:1221/10000 train_time:274108ms step_avg:224.49ms +[2025-07-17 19:00:30] [Rank 0] step:1221/10000 train_time:274108ms step_avg:224.49ms +[2025-07-17 19:00:35] [Rank 0] step:1241/10000 train_time:278624ms step_avg:224.52ms +[2025-07-17 19:00:35] [Rank 0] step:1241/10000 train_time:278624ms step_avg:224.52ms +[2025-07-17 19:00:41] [Rank 0] PRINT: step:1250/10000 val_loss:4.0401 train_time:281112ms step_avg:224.89ms +[2025-07-17 19:00:41] [Rank 0] PRINT: step:1250/10000 val_loss:4.0401 train_time:281112ms step_avg:224.89ms +[2025-07-17 19:00:44] [Rank 0] step:1261/10000 train_time:283138ms step_avg:224.53ms +[2025-07-17 19:00:44] [Rank 0] step:1261/10000 train_time:283138ms step_avg:224.53ms +[2025-07-17 19:00:48] [Rank 0] step:1281/10000 train_time:287652ms step_avg:224.55ms +[2025-07-17 19:00:48] [Rank 0] step:1281/10000 train_time:287652ms step_avg:224.55ms +[2025-07-17 19:00:53] [Rank 0] step:1301/10000 train_time:292167ms step_avg:224.57ms +[2025-07-17 19:00:53] [Rank 0] step:1301/10000 train_time:292167ms step_avg:224.57ms +[2025-07-17 19:00:57] [Rank 0] step:1321/10000 train_time:296683ms step_avg:224.59ms +[2025-07-17 19:00:57] [Rank 0] step:1321/10000 train_time:296683ms step_avg:224.59ms +[2025-07-17 19:01:02] [Rank 0] step:1341/10000 train_time:301202ms step_avg:224.61ms +[2025-07-17 19:01:02] [Rank 0] step:1341/10000 train_time:301202ms step_avg:224.61ms +[2025-07-17 19:01:06] [Rank 0] step:1361/10000 train_time:305718ms step_avg:224.63ms +[2025-07-17 19:01:06] [Rank 0] step:1361/10000 train_time:305718ms step_avg:224.63ms +[2025-07-17 19:01:14] [Rank 0] PRINT: step:1375/10000 val_loss:4.0694 train_time:309336ms step_avg:224.97ms +[2025-07-17 19:01:14] [Rank 0] PRINT: step:1375/10000 val_loss:4.0694 train_time:309336ms step_avg:224.97ms +[2025-07-17 19:01:15] [Rank 0] step:1381/10000 train_time:310235ms step_avg:224.65ms +[2025-07-17 19:01:15] [Rank 0] step:1381/10000 train_time:310235ms step_avg:224.65ms +[2025-07-17 19:01:20] [Rank 0] step:1401/10000 train_time:314754ms step_avg:224.66ms +[2025-07-17 19:01:20] [Rank 0] step:1401/10000 train_time:314754ms step_avg:224.66ms +[2025-07-17 19:01:24] [Rank 0] step:1421/10000 train_time:319276ms step_avg:224.68ms +[2025-07-17 19:01:24] [Rank 0] step:1421/10000 train_time:319276ms step_avg:224.68ms +[2025-07-17 19:01:29] [Rank 0] step:1441/10000 train_time:323797ms step_avg:224.70ms +[2025-07-17 19:01:29] [Rank 0] step:1441/10000 train_time:323797ms step_avg:224.70ms +[2025-07-17 19:01:33] [Rank 0] step:1461/10000 train_time:328318ms step_avg:224.72ms +[2025-07-17 19:01:33] [Rank 0] step:1461/10000 train_time:328318ms step_avg:224.72ms +[2025-07-17 19:01:38] [Rank 0] step:1481/10000 train_time:332839ms step_avg:224.74ms +[2025-07-17 19:01:38] [Rank 0] step:1481/10000 train_time:332839ms step_avg:224.74ms +[2025-07-17 19:01:47] [Rank 0] PRINT: step:1500/10000 val_loss:4.0261 train_time:337609ms step_avg:225.07ms +[2025-07-17 19:01:47] [Rank 0] PRINT: step:1500/10000 val_loss:4.0261 train_time:337609ms step_avg:225.07ms +[2025-07-17 19:01:47] [Rank 0] step:1501/10000 train_time:337624ms step_avg:224.93ms +[2025-07-17 19:01:47] [Rank 0] step:1501/10000 train_time:337624ms step_avg:224.93ms +[2025-07-17 19:01:51] [Rank 0] step:1521/10000 train_time:341930ms step_avg:224.81ms +[2025-07-17 19:01:51] [Rank 0] step:1521/10000 train_time:341930ms step_avg:224.81ms +[2025-07-17 19:01:56] [Rank 0] step:1541/10000 train_time:346484ms step_avg:224.84ms +[2025-07-17 19:01:56] [Rank 0] step:1541/10000 train_time:346484ms step_avg:224.84ms +[2025-07-17 19:02:01] [Rank 0] step:1561/10000 train_time:351041ms step_avg:224.88ms +[2025-07-17 19:02:01] [Rank 0] step:1561/10000 train_time:351041ms step_avg:224.88ms +[2025-07-17 19:02:05] [Rank 0] step:1581/10000 train_time:355595ms step_avg:224.92ms +[2025-07-17 19:02:05] [Rank 0] step:1581/10000 train_time:355595ms step_avg:224.92ms +[2025-07-17 19:02:10] [Rank 0] step:1601/10000 train_time:360151ms step_avg:224.95ms +[2025-07-17 19:02:10] [Rank 0] step:1601/10000 train_time:360151ms step_avg:224.95ms +[2025-07-17 19:02:14] [Rank 0] step:1621/10000 train_time:364706ms step_avg:224.99ms +[2025-07-17 19:02:14] [Rank 0] step:1621/10000 train_time:364706ms step_avg:224.99ms +[2025-07-17 19:02:20] [Rank 0] PRINT: step:1625/10000 val_loss:4.0401 train_time:366078ms step_avg:225.28ms +[2025-07-17 19:02:20] [Rank 0] PRINT: step:1625/10000 val_loss:4.0401 train_time:366078ms step_avg:225.28ms +[2025-07-17 19:02:23] [Rank 0] step:1641/10000 train_time:369264ms step_avg:225.02ms +[2025-07-17 19:02:23] [Rank 0] step:1641/10000 train_time:369264ms step_avg:225.02ms +[2025-07-17 19:02:28] [Rank 0] step:1661/10000 train_time:373823ms step_avg:225.06ms +[2025-07-17 19:02:28] [Rank 0] step:1661/10000 train_time:373823ms step_avg:225.06ms +[2025-07-17 19:02:32] [Rank 0] step:1681/10000 train_time:378385ms step_avg:225.10ms +[2025-07-17 19:02:32] [Rank 0] step:1681/10000 train_time:378385ms step_avg:225.10ms +[2025-07-17 19:02:37] [Rank 0] step:1701/10000 train_time:382947ms step_avg:225.13ms +[2025-07-17 19:02:37] [Rank 0] step:1701/10000 train_time:382947ms step_avg:225.13ms +[2025-07-17 19:02:41] [Rank 0] step:1721/10000 train_time:387509ms step_avg:225.17ms +[2025-07-17 19:02:41] [Rank 0] step:1721/10000 train_time:387509ms step_avg:225.17ms +[2025-07-17 19:02:46] [Rank 0] step:1741/10000 train_time:392072ms step_avg:225.20ms +[2025-07-17 19:02:46] [Rank 0] step:1741/10000 train_time:392072ms step_avg:225.20ms +[2025-07-17 19:02:53] [Rank 0] PRINT: step:1750/10000 val_loss:4.0335 train_time:394586ms step_avg:225.48ms +[2025-07-17 19:02:53] [Rank 0] PRINT: step:1750/10000 val_loss:4.0335 train_time:394586ms step_avg:225.48ms +[2025-07-17 19:02:55] [Rank 0] step:1761/10000 train_time:396630ms step_avg:225.23ms +[2025-07-17 19:02:55] [Rank 0] step:1761/10000 train_time:396630ms step_avg:225.23ms +[2025-07-17 19:03:00] [Rank 0] step:1781/10000 train_time:401191ms step_avg:225.26ms +[2025-07-17 19:03:00] [Rank 0] step:1781/10000 train_time:401191ms step_avg:225.26ms +[2025-07-17 19:03:04] [Rank 0] step:1801/10000 train_time:405752ms step_avg:225.29ms +[2025-07-17 19:03:04] [Rank 0] step:1801/10000 train_time:405752ms step_avg:225.29ms +[2025-07-17 19:03:09] [Rank 0] step:1821/10000 train_time:410313ms step_avg:225.32ms +[2025-07-17 19:03:09] [Rank 0] step:1821/10000 train_time:410313ms step_avg:225.32ms +[2025-07-17 19:03:13] [Rank 0] step:1841/10000 train_time:414872ms step_avg:225.35ms +[2025-07-17 19:03:13] [Rank 0] step:1841/10000 train_time:414872ms step_avg:225.35ms +[2025-07-17 19:03:18] [Rank 0] step:1861/10000 train_time:419427ms step_avg:225.38ms +[2025-07-17 19:03:18] [Rank 0] step:1861/10000 train_time:419427ms step_avg:225.38ms +[2025-07-17 19:03:25] [Rank 0] PRINT: step:1875/10000 val_loss:4.0194 train_time:423075ms step_avg:225.64ms +[2025-07-17 19:03:25] [Rank 0] PRINT: step:1875/10000 val_loss:4.0194 train_time:423075ms step_avg:225.64ms +[2025-07-17 19:03:26] [Rank 0] step:1881/10000 train_time:423983ms step_avg:225.40ms +[2025-07-17 19:03:26] [Rank 0] step:1881/10000 train_time:423983ms step_avg:225.40ms +[2025-07-17 19:03:31] [Rank 0] step:1901/10000 train_time:428542ms step_avg:225.43ms +[2025-07-17 19:03:31] [Rank 0] step:1901/10000 train_time:428542ms step_avg:225.43ms +[2025-07-17 19:03:36] [Rank 0] step:1921/10000 train_time:433101ms step_avg:225.46ms +[2025-07-17 19:03:36] [Rank 0] step:1921/10000 train_time:433101ms step_avg:225.46ms +[2025-07-17 19:03:40] [Rank 0] step:1941/10000 train_time:437662ms step_avg:225.48ms +[2025-07-17 19:03:40] [Rank 0] step:1941/10000 train_time:437662ms step_avg:225.48ms +[2025-07-17 19:03:45] [Rank 0] step:1961/10000 train_time:442220ms step_avg:225.51ms +[2025-07-17 19:03:45] [Rank 0] step:1961/10000 train_time:442220ms step_avg:225.51ms +[2025-07-17 19:03:49] [Rank 0] step:1981/10000 train_time:446778ms step_avg:225.53ms +[2025-07-17 19:03:49] [Rank 0] step:1981/10000 train_time:446778ms step_avg:225.53ms +[2025-07-17 19:03:58] [Rank 0] PRINT: step:2000/10000 val_loss:3.9961 train_time:451568ms step_avg:225.78ms +[2025-07-17 19:03:58] [Rank 0] PRINT: step:2000/10000 val_loss:3.9961 train_time:451568ms step_avg:225.78ms +[2025-07-17 19:03:58] [Rank 0] step:2001/10000 train_time:451581ms step_avg:225.68ms +[2025-07-17 19:03:58] [Rank 0] step:2001/10000 train_time:451581ms step_avg:225.68ms +[2025-07-17 19:04:03] [Rank 0] step:2021/10000 train_time:455897ms step_avg:225.58ms +[2025-07-17 19:04:03] [Rank 0] step:2021/10000 train_time:455897ms step_avg:225.58ms +[2025-07-17 19:04:07] [Rank 0] step:2041/10000 train_time:460461ms step_avg:225.61ms +[2025-07-17 19:04:07] [Rank 0] step:2041/10000 train_time:460461ms step_avg:225.61ms +[2025-07-17 19:04:12] [Rank 0] step:2061/10000 train_time:465022ms step_avg:225.63ms +[2025-07-17 19:04:12] [Rank 0] step:2061/10000 train_time:465022ms step_avg:225.63ms +[2025-07-17 19:04:17] [Rank 0] step:2081/10000 train_time:469585ms step_avg:225.65ms +[2025-07-17 19:04:17] [Rank 0] step:2081/10000 train_time:469585ms step_avg:225.65ms +[2025-07-17 19:04:21] [Rank 0] step:2101/10000 train_time:474146ms step_avg:225.68ms +[2025-07-17 19:04:21] [Rank 0] step:2101/10000 train_time:474146ms step_avg:225.68ms +[2025-07-17 19:04:26] [Rank 0] step:2121/10000 train_time:478710ms step_avg:225.70ms +[2025-07-17 19:04:26] [Rank 0] step:2121/10000 train_time:478710ms step_avg:225.70ms +[2025-07-17 19:04:31] [Rank 0] PRINT: step:2125/10000 val_loss:4.1262 train_time:480086ms step_avg:225.92ms +[2025-07-17 19:04:31] [Rank 0] PRINT: step:2125/10000 val_loss:4.1262 train_time:480086ms step_avg:225.92ms +[2025-07-17 19:04:35] [Rank 0] step:2141/10000 train_time:483273ms step_avg:225.72ms +[2025-07-17 19:04:35] [Rank 0] step:2141/10000 train_time:483273ms step_avg:225.72ms +[2025-07-17 19:04:39] [Rank 0] step:2161/10000 train_time:487841ms step_avg:225.75ms +[2025-07-17 19:04:39] [Rank 0] step:2161/10000 train_time:487841ms step_avg:225.75ms +[2025-07-17 19:04:44] [Rank 0] step:2181/10000 train_time:492410ms step_avg:225.77ms +[2025-07-17 19:04:44] [Rank 0] step:2181/10000 train_time:492410ms step_avg:225.77ms +[2025-07-17 19:04:48] [Rank 0] step:2201/10000 train_time:496973ms step_avg:225.79ms +[2025-07-17 19:04:48] [Rank 0] step:2201/10000 train_time:496973ms step_avg:225.79ms +[2025-07-17 19:04:53] [Rank 0] step:2221/10000 train_time:501540ms step_avg:225.82ms +[2025-07-17 19:04:53] [Rank 0] step:2221/10000 train_time:501540ms step_avg:225.82ms +[2025-07-17 19:04:58] [Rank 0] step:2241/10000 train_time:506202ms step_avg:225.88ms +[2025-07-17 19:04:58] [Rank 0] step:2241/10000 train_time:506202ms step_avg:225.88ms +[2025-07-17 19:05:04] [Rank 0] PRINT: step:2250/10000 val_loss:3.9012 train_time:508782ms step_avg:226.13ms +[2025-07-17 19:05:04] [Rank 0] PRINT: step:2250/10000 val_loss:3.9012 train_time:508782ms step_avg:226.13ms +[2025-07-17 19:05:07] [Rank 0] step:2261/10000 train_time:510883ms step_avg:225.95ms +[2025-07-17 19:05:07] [Rank 0] step:2261/10000 train_time:510883ms step_avg:225.95ms +[2025-07-17 19:05:12] [Rank 0] step:2281/10000 train_time:515559ms step_avg:226.02ms +[2025-07-17 19:05:12] [Rank 0] step:2281/10000 train_time:515559ms step_avg:226.02ms +[2025-07-17 19:05:16] [Rank 0] step:2301/10000 train_time:520234ms step_avg:226.09ms +[2025-07-17 19:05:16] [Rank 0] step:2301/10000 train_time:520234ms step_avg:226.09ms +[2025-07-17 19:05:21] [Rank 0] step:2321/10000 train_time:524912ms step_avg:226.16ms +[2025-07-17 19:05:21] [Rank 0] step:2321/10000 train_time:524912ms step_avg:226.16ms +[2025-07-17 19:05:26] [Rank 0] step:2341/10000 train_time:529591ms step_avg:226.22ms +[2025-07-17 19:05:26] [Rank 0] step:2341/10000 train_time:529591ms step_avg:226.22ms +[2025-07-17 19:05:30] [Rank 0] step:2361/10000 train_time:534264ms step_avg:226.29ms +[2025-07-17 19:05:30] [Rank 0] step:2361/10000 train_time:534264ms step_avg:226.29ms +[2025-07-17 19:05:38] [Rank 0] PRINT: step:2375/10000 val_loss:3.7570 train_time:538004ms step_avg:226.53ms +[2025-07-17 19:05:38] [Rank 0] PRINT: step:2375/10000 val_loss:3.7570 train_time:538004ms step_avg:226.53ms +[2025-07-17 19:05:40] [Rank 0] step:2381/10000 train_time:538935ms step_avg:226.35ms +[2025-07-17 19:05:40] [Rank 0] step:2381/10000 train_time:538935ms step_avg:226.35ms +[2025-07-17 19:05:44] [Rank 0] step:2401/10000 train_time:543604ms step_avg:226.41ms +[2025-07-17 19:05:44] [Rank 0] step:2401/10000 train_time:543604ms step_avg:226.41ms +[2025-07-17 19:05:49] [Rank 0] step:2421/10000 train_time:548273ms step_avg:226.47ms +[2025-07-17 19:05:49] [Rank 0] step:2421/10000 train_time:548273ms step_avg:226.47ms +[2025-07-17 19:05:54] [Rank 0] step:2441/10000 train_time:552943ms step_avg:226.52ms +[2025-07-17 19:05:54] [Rank 0] step:2441/10000 train_time:552943ms step_avg:226.52ms +[2025-07-17 19:05:58] [Rank 0] step:2461/10000 train_time:557614ms step_avg:226.58ms +[2025-07-17 19:05:58] [Rank 0] step:2461/10000 train_time:557614ms step_avg:226.58ms +[2025-07-17 19:06:03] [Rank 0] step:2481/10000 train_time:562284ms step_avg:226.64ms +[2025-07-17 19:06:03] [Rank 0] step:2481/10000 train_time:562284ms step_avg:226.64ms +[2025-07-17 19:06:12] [Rank 0] PRINT: step:2500/10000 val_loss:3.7468 train_time:567186ms step_avg:226.87ms +[2025-07-17 19:06:12] [Rank 0] PRINT: step:2500/10000 val_loss:3.7468 train_time:567186ms step_avg:226.87ms +[2025-07-17 19:06:12] [Rank 0] step:2501/10000 train_time:567199ms step_avg:226.79ms +[2025-07-17 19:06:12] [Rank 0] step:2501/10000 train_time:567199ms step_avg:226.79ms +[2025-07-17 19:06:17] [Rank 0] step:2521/10000 train_time:571625ms step_avg:226.75ms +[2025-07-17 19:06:17] [Rank 0] step:2521/10000 train_time:571625ms step_avg:226.75ms +[2025-07-17 19:06:22] [Rank 0] step:2541/10000 train_time:576294ms step_avg:226.80ms +[2025-07-17 19:06:22] [Rank 0] step:2541/10000 train_time:576294ms step_avg:226.80ms +[2025-07-17 19:06:26] [Rank 0] step:2561/10000 train_time:580969ms step_avg:226.85ms +[2025-07-17 19:06:26] [Rank 0] step:2561/10000 train_time:580969ms step_avg:226.85ms +[2025-07-17 19:06:31] [Rank 0] step:2581/10000 train_time:585640ms step_avg:226.90ms +[2025-07-17 19:06:31] [Rank 0] step:2581/10000 train_time:585640ms step_avg:226.90ms +[2025-07-17 19:06:36] [Rank 0] step:2601/10000 train_time:590316ms step_avg:226.96ms +[2025-07-17 19:06:36] [Rank 0] step:2601/10000 train_time:590316ms step_avg:226.96ms +[2025-07-17 19:06:40] [Rank 0] step:2621/10000 train_time:594991ms step_avg:227.01ms +[2025-07-17 19:06:40] [Rank 0] step:2621/10000 train_time:594991ms step_avg:227.01ms +[2025-07-17 19:06:45] [Rank 0] PRINT: step:2625/10000 val_loss:3.7168 train_time:596398ms step_avg:227.20ms +[2025-07-17 19:06:45] [Rank 0] PRINT: step:2625/10000 val_loss:3.7168 train_time:596398ms step_avg:227.20ms +[2025-07-17 19:06:49] [Rank 0] step:2641/10000 train_time:599661ms step_avg:227.06ms +[2025-07-17 19:06:49] [Rank 0] step:2641/10000 train_time:599661ms step_avg:227.06ms +[2025-07-17 19:06:54] [Rank 0] step:2661/10000 train_time:604331ms step_avg:227.11ms +[2025-07-17 19:06:54] [Rank 0] step:2661/10000 train_time:604331ms step_avg:227.11ms +[2025-07-17 19:06:58] [Rank 0] step:2681/10000 train_time:609002ms step_avg:227.15ms +[2025-07-17 19:06:58] [Rank 0] step:2681/10000 train_time:609002ms step_avg:227.15ms +[2025-07-17 19:07:03] [Rank 0] step:2701/10000 train_time:613675ms step_avg:227.20ms +[2025-07-17 19:07:03] [Rank 0] step:2701/10000 train_time:613675ms step_avg:227.20ms +[2025-07-17 19:07:08] [Rank 0] step:2721/10000 train_time:618347ms step_avg:227.25ms +[2025-07-17 19:07:08] [Rank 0] step:2721/10000 train_time:618347ms step_avg:227.25ms +[2025-07-17 19:07:12] [Rank 0] step:2741/10000 train_time:623019ms step_avg:227.30ms +[2025-07-17 19:07:12] [Rank 0] step:2741/10000 train_time:623019ms step_avg:227.30ms +[2025-07-17 19:07:19] [Rank 0] PRINT: step:2750/10000 val_loss:3.7654 train_time:625593ms step_avg:227.49ms +[2025-07-17 19:07:19] [Rank 0] PRINT: step:2750/10000 val_loss:3.7654 train_time:625593ms step_avg:227.49ms +[2025-07-17 19:07:22] [Rank 0] step:2761/10000 train_time:627692ms step_avg:227.34ms +[2025-07-17 19:07:22] [Rank 0] step:2761/10000 train_time:627692ms step_avg:227.34ms +[2025-07-17 19:07:26] [Rank 0] step:2781/10000 train_time:632363ms step_avg:227.39ms +[2025-07-17 19:07:26] [Rank 0] step:2781/10000 train_time:632363ms step_avg:227.39ms +[2025-07-17 19:07:31] [Rank 0] step:2801/10000 train_time:637032ms step_avg:227.43ms +[2025-07-17 19:07:31] [Rank 0] step:2801/10000 train_time:637032ms step_avg:227.43ms +[2025-07-17 19:07:36] [Rank 0] step:2821/10000 train_time:641702ms step_avg:227.47ms +[2025-07-17 19:07:36] [Rank 0] step:2821/10000 train_time:641702ms step_avg:227.47ms +[2025-07-17 19:07:40] [Rank 0] step:2841/10000 train_time:646375ms step_avg:227.52ms +[2025-07-17 19:07:40] [Rank 0] step:2841/10000 train_time:646375ms step_avg:227.52ms +[2025-07-17 19:07:45] [Rank 0] step:2861/10000 train_time:651046ms step_avg:227.56ms +[2025-07-17 19:07:45] [Rank 0] step:2861/10000 train_time:651046ms step_avg:227.56ms +[2025-07-17 19:07:53] [Rank 0] PRINT: step:2875/10000 val_loss:3.7953 train_time:654784ms step_avg:227.75ms +[2025-07-17 19:07:53] [Rank 0] PRINT: step:2875/10000 val_loss:3.7953 train_time:654784ms step_avg:227.75ms +[2025-07-17 19:07:54] [Rank 0] step:2881/10000 train_time:655712ms step_avg:227.60ms +[2025-07-17 19:07:54] [Rank 0] step:2881/10000 train_time:655712ms step_avg:227.60ms +[2025-07-17 19:07:59] [Rank 0] step:2901/10000 train_time:660379ms step_avg:227.64ms +[2025-07-17 19:07:59] [Rank 0] step:2901/10000 train_time:660379ms step_avg:227.64ms +[2025-07-17 19:08:04] [Rank 0] step:2921/10000 train_time:665046ms step_avg:227.68ms +[2025-07-17 19:08:04] [Rank 0] step:2921/10000 train_time:665046ms step_avg:227.68ms +[2025-07-17 19:08:08] [Rank 0] step:2941/10000 train_time:669712ms step_avg:227.72ms +[2025-07-17 19:08:08] [Rank 0] step:2941/10000 train_time:669712ms step_avg:227.72ms +[2025-07-17 19:08:13] [Rank 0] step:2961/10000 train_time:674379ms step_avg:227.75ms +[2025-07-17 19:08:13] [Rank 0] step:2961/10000 train_time:674379ms step_avg:227.75ms +[2025-07-17 19:08:18] [Rank 0] step:2981/10000 train_time:679061ms step_avg:227.80ms +[2025-07-17 19:08:18] [Rank 0] step:2981/10000 train_time:679061ms step_avg:227.80ms +[2025-07-17 19:08:27] [Rank 0] PRINT: step:3000/10000 val_loss:3.7617 train_time:683981ms step_avg:227.99ms +[2025-07-17 19:08:27] [Rank 0] PRINT: step:3000/10000 val_loss:3.7617 train_time:683981ms step_avg:227.99ms +[2025-07-17 19:08:27] [Rank 0] step:3001/10000 train_time:683995ms step_avg:227.92ms +[2025-07-17 19:08:27] [Rank 0] step:3001/10000 train_time:683995ms step_avg:227.92ms +[2025-07-17 19:08:32] [Rank 0] step:3021/10000 train_time:688422ms step_avg:227.88ms +[2025-07-17 19:08:32] [Rank 0] step:3021/10000 train_time:688422ms step_avg:227.88ms +[2025-07-17 19:08:36] [Rank 0] step:3041/10000 train_time:693102ms step_avg:227.92ms +[2025-07-17 19:08:36] [Rank 0] step:3041/10000 train_time:693102ms step_avg:227.92ms +[2025-07-17 19:08:41] [Rank 0] step:3061/10000 train_time:697781ms step_avg:227.96ms +[2025-07-17 19:08:41] [Rank 0] step:3061/10000 train_time:697781ms step_avg:227.96ms +[2025-07-17 19:08:46] [Rank 0] step:3081/10000 train_time:702463ms step_avg:228.00ms +[2025-07-17 19:08:46] [Rank 0] step:3081/10000 train_time:702463ms step_avg:228.00ms +[2025-07-17 19:08:50] [Rank 0] step:3101/10000 train_time:707143ms step_avg:228.04ms +[2025-07-17 19:08:50] [Rank 0] step:3101/10000 train_time:707143ms step_avg:228.04ms +[2025-07-17 19:08:55] [Rank 0] step:3121/10000 train_time:711825ms step_avg:228.08ms +[2025-07-17 19:08:55] [Rank 0] step:3121/10000 train_time:711825ms step_avg:228.08ms +[2025-07-17 19:09:00] [Rank 0] PRINT: step:3125/10000 val_loss:3.7834 train_time:713233ms step_avg:228.23ms +[2025-07-17 19:09:00] [Rank 0] PRINT: step:3125/10000 val_loss:3.7834 train_time:713233ms step_avg:228.23ms +[2025-07-17 19:09:04] [Rank 0] step:3141/10000 train_time:716504ms step_avg:228.11ms +[2025-07-17 19:09:04] [Rank 0] step:3141/10000 train_time:716504ms step_avg:228.11ms +[2025-07-17 19:09:09] [Rank 0] step:3161/10000 train_time:721184ms step_avg:228.15ms +[2025-07-17 19:09:09] [Rank 0] step:3161/10000 train_time:721184ms step_avg:228.15ms +[2025-07-17 19:09:14] [Rank 0] step:3181/10000 train_time:725866ms step_avg:228.19ms +[2025-07-17 19:09:14] [Rank 0] step:3181/10000 train_time:725866ms step_avg:228.19ms +[2025-07-17 19:09:18] [Rank 0] step:3201/10000 train_time:730552ms step_avg:228.23ms +[2025-07-17 19:09:18] [Rank 0] step:3201/10000 train_time:730552ms step_avg:228.23ms +[2025-07-17 19:09:23] [Rank 0] step:3221/10000 train_time:735240ms step_avg:228.26ms +[2025-07-17 19:09:23] [Rank 0] step:3221/10000 train_time:735240ms step_avg:228.26ms +[2025-07-17 19:09:28] [Rank 0] step:3241/10000 train_time:739932ms step_avg:228.30ms +[2025-07-17 19:09:28] [Rank 0] step:3241/10000 train_time:739932ms step_avg:228.30ms +[2025-07-17 19:09:34] [Rank 0] PRINT: step:3250/10000 val_loss:3.7798 train_time:742514ms step_avg:228.47ms +[2025-07-17 19:09:34] [Rank 0] PRINT: step:3250/10000 val_loss:3.7798 train_time:742514ms step_avg:228.47ms +[2025-07-17 19:09:37] [Rank 0] step:3261/10000 train_time:744616ms step_avg:228.34ms +[2025-07-17 19:09:37] [Rank 0] step:3261/10000 train_time:744616ms step_avg:228.34ms +[2025-07-17 19:09:42] [Rank 0] step:3281/10000 train_time:749307ms step_avg:228.38ms +[2025-07-17 19:09:42] [Rank 0] step:3281/10000 train_time:749307ms step_avg:228.38ms +[2025-07-17 19:09:46] [Rank 0] step:3301/10000 train_time:754006ms step_avg:228.42ms +[2025-07-17 19:09:46] [Rank 0] step:3301/10000 train_time:754006ms step_avg:228.42ms +[2025-07-17 19:09:51] [Rank 0] step:3321/10000 train_time:758696ms step_avg:228.45ms +[2025-07-17 19:09:51] [Rank 0] step:3321/10000 train_time:758696ms step_avg:228.45ms +[2025-07-17 19:09:56] [Rank 0] step:3341/10000 train_time:763388ms step_avg:228.49ms +[2025-07-17 19:09:56] [Rank 0] step:3341/10000 train_time:763388ms step_avg:228.49ms +[2025-07-17 19:10:00] [Rank 0] step:3361/10000 train_time:768077ms step_avg:228.53ms +[2025-07-17 19:10:00] [Rank 0] step:3361/10000 train_time:768077ms step_avg:228.53ms +[2025-07-17 19:10:08] [Rank 0] PRINT: step:3375/10000 val_loss:3.8019 train_time:771834ms step_avg:228.69ms +[2025-07-17 19:10:08] [Rank 0] PRINT: step:3375/10000 val_loss:3.8019 train_time:771834ms step_avg:228.69ms +[2025-07-17 19:10:10] [Rank 0] step:3381/10000 train_time:772768ms step_avg:228.56ms +[2025-07-17 19:10:10] [Rank 0] step:3381/10000 train_time:772768ms step_avg:228.56ms +[2025-07-17 19:10:14] [Rank 0] step:3401/10000 train_time:777459ms step_avg:228.60ms +[2025-07-17 19:10:14] [Rank 0] step:3401/10000 train_time:777459ms step_avg:228.60ms +[2025-07-17 19:10:19] [Rank 0] step:3421/10000 train_time:782150ms step_avg:228.63ms +[2025-07-17 19:10:19] [Rank 0] step:3421/10000 train_time:782150ms step_avg:228.63ms +[2025-07-17 19:10:24] [Rank 0] step:3441/10000 train_time:786841ms step_avg:228.67ms +[2025-07-17 19:10:24] [Rank 0] step:3441/10000 train_time:786841ms step_avg:228.67ms +[2025-07-17 19:10:28] [Rank 0] step:3461/10000 train_time:791535ms step_avg:228.70ms +[2025-07-17 19:10:28] [Rank 0] step:3461/10000 train_time:791535ms step_avg:228.70ms +[2025-07-17 19:10:33] [Rank 0] step:3481/10000 train_time:796227ms step_avg:228.74ms +[2025-07-17 19:10:33] [Rank 0] step:3481/10000 train_time:796227ms step_avg:228.74ms +[2025-07-17 19:10:42] [Rank 0] PRINT: step:3500/10000 val_loss:3.7891 train_time:801151ms step_avg:228.90ms +[2025-07-17 19:10:42] [Rank 0] PRINT: step:3500/10000 val_loss:3.7891 train_time:801151ms step_avg:228.90ms +[2025-07-17 19:10:42] [Rank 0] step:3501/10000 train_time:801165ms step_avg:228.84ms +[2025-07-17 19:10:42] [Rank 0] step:3501/10000 train_time:801165ms step_avg:228.84ms +[2025-07-17 19:10:47] [Rank 0] step:3521/10000 train_time:805606ms step_avg:228.80ms +[2025-07-17 19:10:47] [Rank 0] step:3521/10000 train_time:805606ms step_avg:228.80ms +[2025-07-17 19:10:52] [Rank 0] step:3541/10000 train_time:810297ms step_avg:228.83ms +[2025-07-17 19:10:52] [Rank 0] step:3541/10000 train_time:810297ms step_avg:228.83ms +[2025-07-17 19:10:56] [Rank 0] step:3561/10000 train_time:814985ms step_avg:228.86ms +[2025-07-17 19:10:56] [Rank 0] step:3561/10000 train_time:814985ms step_avg:228.86ms +[2025-07-17 19:11:01] [Rank 0] step:3581/10000 train_time:819675ms step_avg:228.90ms +[2025-07-17 19:11:01] [Rank 0] step:3581/10000 train_time:819675ms step_avg:228.90ms +[2025-07-17 19:11:06] [Rank 0] step:3601/10000 train_time:824367ms step_avg:228.93ms +[2025-07-17 19:11:06] [Rank 0] step:3601/10000 train_time:824367ms step_avg:228.93ms +[2025-07-17 19:11:10] [Rank 0] step:3621/10000 train_time:829056ms step_avg:228.96ms +[2025-07-17 19:11:10] [Rank 0] step:3621/10000 train_time:829056ms step_avg:228.96ms +[2025-07-17 19:11:16] [Rank 0] PRINT: step:3625/10000 val_loss:3.7855 train_time:830465ms step_avg:229.09ms +[2025-07-17 19:11:16] [Rank 0] PRINT: step:3625/10000 val_loss:3.7855 train_time:830465ms step_avg:229.09ms +[2025-07-17 19:11:20] [Rank 0] step:3641/10000 train_time:833741ms step_avg:228.99ms +[2025-07-17 19:11:20] [Rank 0] step:3641/10000 train_time:833741ms step_avg:228.99ms +[2025-07-17 19:11:24] [Rank 0] step:3661/10000 train_time:838428ms step_avg:229.02ms +[2025-07-17 19:11:24] [Rank 0] step:3661/10000 train_time:838428ms step_avg:229.02ms +[2025-07-17 19:11:29] [Rank 0] step:3681/10000 train_time:843112ms step_avg:229.04ms +[2025-07-17 19:11:29] [Rank 0] step:3681/10000 train_time:843112ms step_avg:229.04ms +[2025-07-17 19:11:34] [Rank 0] step:3701/10000 train_time:847797ms step_avg:229.07ms +[2025-07-17 19:11:34] [Rank 0] step:3701/10000 train_time:847797ms step_avg:229.07ms +[2025-07-17 19:11:38] [Rank 0] step:3721/10000 train_time:852546ms step_avg:229.12ms +[2025-07-17 19:11:38] [Rank 0] step:3721/10000 train_time:852546ms step_avg:229.12ms +[2025-07-17 19:11:43] [Rank 0] step:3741/10000 train_time:857317ms step_avg:229.17ms +[2025-07-17 19:11:43] [Rank 0] step:3741/10000 train_time:857317ms step_avg:229.17ms +[2025-07-17 19:11:50] [Rank 0] PRINT: step:3750/10000 val_loss:3.7306 train_time:859945ms step_avg:229.32ms +[2025-07-17 19:11:50] [Rank 0] PRINT: step:3750/10000 val_loss:3.7306 train_time:859945ms step_avg:229.32ms +[2025-07-17 19:11:53] [Rank 0] step:3761/10000 train_time:862089ms step_avg:229.22ms +[2025-07-17 19:11:53] [Rank 0] step:3761/10000 train_time:862089ms step_avg:229.22ms +[2025-07-17 19:11:57] [Rank 0] step:3781/10000 train_time:866865ms step_avg:229.27ms +[2025-07-17 19:11:57] [Rank 0] step:3781/10000 train_time:866865ms step_avg:229.27ms +[2025-07-17 19:12:02] [Rank 0] step:3801/10000 train_time:871640ms step_avg:229.32ms +[2025-07-17 19:12:02] [Rank 0] step:3801/10000 train_time:871640ms step_avg:229.32ms +[2025-07-17 19:12:07] [Rank 0] step:3821/10000 train_time:876416ms step_avg:229.37ms +[2025-07-17 19:12:07] [Rank 0] step:3821/10000 train_time:876416ms step_avg:229.37ms +[2025-07-17 19:12:12] [Rank 0] step:3841/10000 train_time:881194ms step_avg:229.42ms +[2025-07-17 19:12:12] [Rank 0] step:3841/10000 train_time:881194ms step_avg:229.42ms +[2025-07-17 19:12:17] [Rank 0] step:3861/10000 train_time:885970ms step_avg:229.47ms +[2025-07-17 19:12:17] [Rank 0] step:3861/10000 train_time:885970ms step_avg:229.47ms +[2025-07-17 19:12:25] [Rank 0] PRINT: step:3875/10000 val_loss:3.7552 train_time:889796ms step_avg:229.62ms +[2025-07-17 19:12:25] [Rank 0] PRINT: step:3875/10000 val_loss:3.7552 train_time:889796ms step_avg:229.62ms +[2025-07-17 19:12:26] [Rank 0] step:3881/10000 train_time:890747ms step_avg:229.51ms +[2025-07-17 19:12:26] [Rank 0] step:3881/10000 train_time:890747ms step_avg:229.51ms +[2025-07-17 19:12:31] [Rank 0] step:3901/10000 train_time:895516ms step_avg:229.56ms +[2025-07-17 19:12:31] [Rank 0] step:3901/10000 train_time:895516ms step_avg:229.56ms +[2025-07-17 19:12:36] [Rank 0] step:3921/10000 train_time:900283ms step_avg:229.61ms +[2025-07-17 19:12:36] [Rank 0] step:3921/10000 train_time:900283ms step_avg:229.61ms +[2025-07-17 19:12:40] [Rank 0] step:3941/10000 train_time:905050ms step_avg:229.65ms +[2025-07-17 19:12:40] [Rank 0] step:3941/10000 train_time:905050ms step_avg:229.65ms +[2025-07-17 19:12:45] [Rank 0] step:3961/10000 train_time:909819ms step_avg:229.69ms +[2025-07-17 19:12:45] [Rank 0] step:3961/10000 train_time:909819ms step_avg:229.69ms +[2025-07-17 19:12:50] [Rank 0] step:3981/10000 train_time:914587ms step_avg:229.74ms +[2025-07-17 19:12:50] [Rank 0] step:3981/10000 train_time:914587ms step_avg:229.74ms +[2025-07-17 19:12:59] [Rank 0] PRINT: step:4000/10000 val_loss:3.7625 train_time:919590ms step_avg:229.90ms +[2025-07-17 19:12:59] [Rank 0] PRINT: step:4000/10000 val_loss:3.7625 train_time:919590ms step_avg:229.90ms +[2025-07-17 19:12:59] [Rank 0] step:4001/10000 train_time:919603ms step_avg:229.84ms +[2025-07-17 19:12:59] [Rank 0] step:4001/10000 train_time:919603ms step_avg:229.84ms +[2025-07-17 19:13:04] [Rank 0] step:4021/10000 train_time:924114ms step_avg:229.82ms +[2025-07-17 19:13:04] [Rank 0] step:4021/10000 train_time:924114ms step_avg:229.82ms +[2025-07-17 19:13:09] [Rank 0] step:4041/10000 train_time:928876ms step_avg:229.86ms +[2025-07-17 19:13:09] [Rank 0] step:4041/10000 train_time:928876ms step_avg:229.86ms +[2025-07-17 19:13:13] [Rank 0] step:4061/10000 train_time:933637ms step_avg:229.90ms +[2025-07-17 19:13:13] [Rank 0] step:4061/10000 train_time:933637ms step_avg:229.90ms +[2025-07-17 19:13:18] [Rank 0] step:4081/10000 train_time:938400ms step_avg:229.94ms +[2025-07-17 19:13:18] [Rank 0] step:4081/10000 train_time:938400ms step_avg:229.94ms +[2025-07-17 19:13:23] [Rank 0] step:4101/10000 train_time:943165ms step_avg:229.98ms +[2025-07-17 19:13:23] [Rank 0] step:4101/10000 train_time:943165ms step_avg:229.98ms +[2025-07-17 19:13:28] [Rank 0] step:4121/10000 train_time:947930ms step_avg:230.02ms +[2025-07-17 19:13:28] [Rank 0] step:4121/10000 train_time:947930ms step_avg:230.02ms +[2025-07-17 19:13:33] [Rank 0] PRINT: step:4125/10000 val_loss:3.7655 train_time:949364ms step_avg:230.15ms +[2025-07-17 19:13:33] [Rank 0] PRINT: step:4125/10000 val_loss:3.7655 train_time:949364ms step_avg:230.15ms +[2025-07-17 19:13:37] [Rank 0] step:4141/10000 train_time:952694ms step_avg:230.06ms +[2025-07-17 19:13:37] [Rank 0] step:4141/10000 train_time:952694ms step_avg:230.06ms +[2025-07-17 19:13:42] [Rank 0] step:4161/10000 train_time:957459ms step_avg:230.10ms +[2025-07-17 19:13:42] [Rank 0] step:4161/10000 train_time:957459ms step_avg:230.10ms +[2025-07-17 19:13:47] [Rank 0] step:4181/10000 train_time:962219ms step_avg:230.14ms +[2025-07-17 19:13:47] [Rank 0] step:4181/10000 train_time:962219ms step_avg:230.14ms +[2025-07-17 19:13:51] [Rank 0] step:4201/10000 train_time:966988ms step_avg:230.18ms +[2025-07-17 19:13:51] [Rank 0] step:4201/10000 train_time:966988ms step_avg:230.18ms +[2025-07-17 19:13:56] [Rank 0] step:4221/10000 train_time:971756ms step_avg:230.22ms +[2025-07-17 19:13:56] [Rank 0] step:4221/10000 train_time:971756ms step_avg:230.22ms +[2025-07-17 19:14:01] [Rank 0] step:4241/10000 train_time:976527ms step_avg:230.26ms +[2025-07-17 19:14:01] [Rank 0] step:4241/10000 train_time:976527ms step_avg:230.26ms +[2025-07-17 19:14:08] [Rank 0] PRINT: step:4250/10000 val_loss:3.7769 train_time:979153ms step_avg:230.39ms +[2025-07-17 19:14:08] [Rank 0] PRINT: step:4250/10000 val_loss:3.7769 train_time:979153ms step_avg:230.39ms +[2025-07-17 19:14:10] [Rank 0] step:4261/10000 train_time:981293ms step_avg:230.30ms +[2025-07-17 19:14:10] [Rank 0] step:4261/10000 train_time:981293ms step_avg:230.30ms +[2025-07-17 19:14:15] [Rank 0] step:4281/10000 train_time:986060ms step_avg:230.33ms +[2025-07-17 19:14:15] [Rank 0] step:4281/10000 train_time:986060ms step_avg:230.33ms +[2025-07-17 19:14:20] [Rank 0] step:4301/10000 train_time:990829ms step_avg:230.37ms +[2025-07-17 19:14:20] [Rank 0] step:4301/10000 train_time:990829ms step_avg:230.37ms +[2025-07-17 19:14:25] [Rank 0] step:4321/10000 train_time:995603ms step_avg:230.41ms +[2025-07-17 19:14:25] [Rank 0] step:4321/10000 train_time:995603ms step_avg:230.41ms +[2025-07-17 19:14:29] [Rank 0] step:4341/10000 train_time:1000369ms step_avg:230.45ms +[2025-07-17 19:14:29] [Rank 0] step:4341/10000 train_time:1000369ms step_avg:230.45ms +[2025-07-17 19:14:34] [Rank 0] step:4361/10000 train_time:1005140ms step_avg:230.48ms +[2025-07-17 19:14:34] [Rank 0] step:4361/10000 train_time:1005140ms step_avg:230.48ms +[2025-07-17 19:14:42] [Rank 0] PRINT: step:4375/10000 val_loss:3.7626 train_time:1008959ms step_avg:230.62ms +[2025-07-17 19:14:42] [Rank 0] PRINT: step:4375/10000 val_loss:3.7626 train_time:1008959ms step_avg:230.62ms +[2025-07-17 19:14:44] [Rank 0] step:4381/10000 train_time:1009909ms step_avg:230.52ms +[2025-07-17 19:14:44] [Rank 0] step:4381/10000 train_time:1009909ms step_avg:230.52ms +[2025-07-17 19:14:48] [Rank 0] step:4401/10000 train_time:1014686ms step_avg:230.56ms +[2025-07-17 19:14:48] [Rank 0] step:4401/10000 train_time:1014686ms step_avg:230.56ms +[2025-07-17 19:14:53] [Rank 0] step:4421/10000 train_time:1019460ms step_avg:230.59ms +[2025-07-17 19:14:53] [Rank 0] step:4421/10000 train_time:1019460ms step_avg:230.59ms +[2025-07-17 19:14:58] [Rank 0] step:4441/10000 train_time:1024232ms step_avg:230.63ms +[2025-07-17 19:14:58] [Rank 0] step:4441/10000 train_time:1024232ms step_avg:230.63ms +[2025-07-17 19:15:03] [Rank 0] step:4461/10000 train_time:1029019ms step_avg:230.67ms +[2025-07-17 19:15:03] [Rank 0] step:4461/10000 train_time:1029019ms step_avg:230.67ms +[2025-07-17 19:15:07] [Rank 0] step:4481/10000 train_time:1033812ms step_avg:230.71ms +[2025-07-17 19:15:07] [Rank 0] step:4481/10000 train_time:1033812ms step_avg:230.71ms +[2025-07-17 19:15:17] [Rank 0] PRINT: step:4500/10000 val_loss:3.7453 train_time:1038843ms step_avg:230.85ms +[2025-07-17 19:15:17] [Rank 0] PRINT: step:4500/10000 val_loss:3.7453 train_time:1038843ms step_avg:230.85ms +[2025-07-17 19:15:17] [Rank 0] step:4501/10000 train_time:1038856ms step_avg:230.81ms +[2025-07-17 19:15:17] [Rank 0] step:4501/10000 train_time:1038856ms step_avg:230.81ms +[2025-07-17 19:15:22] [Rank 0] step:4521/10000 train_time:1043394ms step_avg:230.79ms +[2025-07-17 19:15:22] [Rank 0] step:4521/10000 train_time:1043394ms step_avg:230.79ms +[2025-07-17 19:15:26] [Rank 0] step:4541/10000 train_time:1048194ms step_avg:230.83ms +[2025-07-17 19:15:26] [Rank 0] step:4541/10000 train_time:1048194ms step_avg:230.83ms +[2025-07-17 19:15:31] [Rank 0] step:4561/10000 train_time:1052985ms step_avg:230.87ms +[2025-07-17 19:15:31] [Rank 0] step:4561/10000 train_time:1052985ms step_avg:230.87ms +[2025-07-17 19:15:36] [Rank 0] step:4581/10000 train_time:1057780ms step_avg:230.91ms +[2025-07-17 19:15:36] [Rank 0] step:4581/10000 train_time:1057780ms step_avg:230.91ms +[2025-07-17 19:15:41] [Rank 0] step:4601/10000 train_time:1062581ms step_avg:230.95ms +[2025-07-17 19:15:41] [Rank 0] step:4601/10000 train_time:1062581ms step_avg:230.95ms +[2025-07-17 19:15:46] [Rank 0] step:4621/10000 train_time:1067374ms step_avg:230.98ms +[2025-07-17 19:15:46] [Rank 0] step:4621/10000 train_time:1067374ms step_avg:230.98ms +[2025-07-17 19:15:51] [Rank 0] PRINT: step:4625/10000 val_loss:3.7685 train_time:1068818ms step_avg:231.10ms +[2025-07-17 19:15:51] [Rank 0] PRINT: step:4625/10000 val_loss:3.7685 train_time:1068818ms step_avg:231.10ms +[2025-07-17 19:15:55] [Rank 0] step:4641/10000 train_time:1072164ms step_avg:231.02ms +[2025-07-17 19:15:55] [Rank 0] step:4641/10000 train_time:1072164ms step_avg:231.02ms +[2025-07-17 19:16:00] [Rank 0] step:4661/10000 train_time:1076960ms step_avg:231.06ms +[2025-07-17 19:16:00] [Rank 0] step:4661/10000 train_time:1076960ms step_avg:231.06ms +[2025-07-17 19:16:05] [Rank 0] step:4681/10000 train_time:1081755ms step_avg:231.09ms +[2025-07-17 19:16:05] [Rank 0] step:4681/10000 train_time:1081755ms step_avg:231.09ms +[2025-07-17 19:16:09] [Rank 0] step:4701/10000 train_time:1086548ms step_avg:231.13ms +[2025-07-17 19:16:09] [Rank 0] step:4701/10000 train_time:1086548ms step_avg:231.13ms +[2025-07-17 19:16:14] [Rank 0] step:4721/10000 train_time:1091341ms step_avg:231.17ms +[2025-07-17 19:16:14] [Rank 0] step:4721/10000 train_time:1091341ms step_avg:231.17ms +[2025-07-17 19:16:19] [Rank 0] step:4741/10000 train_time:1096134ms step_avg:231.20ms +[2025-07-17 19:16:19] [Rank 0] step:4741/10000 train_time:1096134ms step_avg:231.20ms +[2025-07-17 19:16:26] [Rank 0] PRINT: step:4750/10000 val_loss:3.7501 train_time:1098775ms step_avg:231.32ms +[2025-07-17 19:16:26] [Rank 0] PRINT: step:4750/10000 val_loss:3.7501 train_time:1098775ms step_avg:231.32ms +[2025-07-17 19:16:28] [Rank 0] step:4761/10000 train_time:1100925ms step_avg:231.24ms +[2025-07-17 19:16:28] [Rank 0] step:4761/10000 train_time:1100925ms step_avg:231.24ms +[2025-07-17 19:16:33] [Rank 0] step:4781/10000 train_time:1105713ms step_avg:231.27ms +[2025-07-17 19:16:33] [Rank 0] step:4781/10000 train_time:1105713ms step_avg:231.27ms +[2025-07-17 19:16:38] [Rank 0] step:4801/10000 train_time:1110501ms step_avg:231.31ms +[2025-07-17 19:16:38] [Rank 0] step:4801/10000 train_time:1110501ms step_avg:231.31ms +[2025-07-17 19:16:43] [Rank 0] step:4821/10000 train_time:1115292ms step_avg:231.34ms +[2025-07-17 19:16:43] [Rank 0] step:4821/10000 train_time:1115292ms step_avg:231.34ms +[2025-07-17 19:16:48] [Rank 0] step:4841/10000 train_time:1120088ms step_avg:231.38ms +[2025-07-17 19:16:48] [Rank 0] step:4841/10000 train_time:1120088ms step_avg:231.38ms +[2025-07-17 19:16:52] [Rank 0] step:4861/10000 train_time:1124879ms step_avg:231.41ms +[2025-07-17 19:16:52] [Rank 0] step:4861/10000 train_time:1124879ms step_avg:231.41ms +[2025-07-17 19:17:00] [Rank 0] PRINT: step:4875/10000 val_loss:3.7656 train_time:1128719ms step_avg:231.53ms +[2025-07-17 19:17:00] [Rank 0] PRINT: step:4875/10000 val_loss:3.7656 train_time:1128719ms step_avg:231.53ms +[2025-07-17 19:17:02] [Rank 0] step:4881/10000 train_time:1129676ms step_avg:231.44ms +[2025-07-17 19:17:02] [Rank 0] step:4881/10000 train_time:1129676ms step_avg:231.44ms +[2025-07-17 19:17:07] [Rank 0] step:4901/10000 train_time:1134472ms step_avg:231.48ms +[2025-07-17 19:17:07] [Rank 0] step:4901/10000 train_time:1134472ms step_avg:231.48ms +[2025-07-17 19:17:11] [Rank 0] step:4921/10000 train_time:1139261ms step_avg:231.51ms +[2025-07-17 19:17:11] [Rank 0] step:4921/10000 train_time:1139261ms step_avg:231.51ms +[2025-07-17 19:17:16] [Rank 0] step:4941/10000 train_time:1144058ms step_avg:231.54ms +[2025-07-17 19:17:16] [Rank 0] step:4941/10000 train_time:1144058ms step_avg:231.54ms +[2025-07-17 19:17:21] [Rank 0] step:4961/10000 train_time:1148850ms step_avg:231.58ms +[2025-07-17 19:17:21] [Rank 0] step:4961/10000 train_time:1148850ms step_avg:231.58ms +[2025-07-17 19:17:26] [Rank 0] step:4981/10000 train_time:1153645ms step_avg:231.61ms +[2025-07-17 19:17:26] [Rank 0] step:4981/10000 train_time:1153645ms step_avg:231.61ms +[2025-07-17 19:17:35] [Rank 0] PRINT: step:5000/10000 val_loss:3.7295 train_time:1158682ms step_avg:231.74ms +[2025-07-17 19:17:35] [Rank 0] PRINT: step:5000/10000 val_loss:3.7295 train_time:1158682ms step_avg:231.74ms +[2025-07-17 19:17:35] [Rank 0] step:5001/10000 train_time:1158695ms step_avg:231.69ms +[2025-07-17 19:17:35] [Rank 0] step:5001/10000 train_time:1158695ms step_avg:231.69ms +[2025-07-17 19:17:40] [Rank 0] step:5021/10000 train_time:1163244ms step_avg:231.68ms +[2025-07-17 19:17:40] [Rank 0] step:5021/10000 train_time:1163244ms step_avg:231.68ms +[2025-07-17 19:17:44] [Rank 0] step:5041/10000 train_time:1168047ms step_avg:231.71ms +[2025-07-17 19:17:44] [Rank 0] step:5041/10000 train_time:1168047ms step_avg:231.71ms +[2025-07-17 19:17:49] [Rank 0] step:5061/10000 train_time:1172851ms step_avg:231.74ms +[2025-07-17 19:17:49] [Rank 0] step:5061/10000 train_time:1172851ms step_avg:231.74ms +[2025-07-17 19:17:54] [Rank 0] step:5081/10000 train_time:1177654ms step_avg:231.78ms +[2025-07-17 19:17:54] [Rank 0] step:5081/10000 train_time:1177654ms step_avg:231.78ms +[2025-07-17 19:17:59] [Rank 0] step:5101/10000 train_time:1182456ms step_avg:231.81ms +[2025-07-17 19:17:59] [Rank 0] step:5101/10000 train_time:1182456ms step_avg:231.81ms +[2025-07-17 19:18:04] [Rank 0] step:5121/10000 train_time:1187254ms step_avg:231.84ms +[2025-07-17 19:18:04] [Rank 0] step:5121/10000 train_time:1187254ms step_avg:231.84ms +[2025-07-17 19:18:09] [Rank 0] PRINT: step:5125/10000 val_loss:3.7479 train_time:1188699ms step_avg:231.94ms +[2025-07-17 19:18:09] [Rank 0] PRINT: step:5125/10000 val_loss:3.7479 train_time:1188699ms step_avg:231.94ms +[2025-07-17 19:18:13] [Rank 0] step:5141/10000 train_time:1192057ms step_avg:231.87ms +[2025-07-17 19:18:13] [Rank 0] step:5141/10000 train_time:1192057ms step_avg:231.87ms +[2025-07-17 19:18:18] [Rank 0] step:5161/10000 train_time:1196860ms step_avg:231.90ms +[2025-07-17 19:18:18] [Rank 0] step:5161/10000 train_time:1196860ms step_avg:231.90ms +[2025-07-17 19:18:23] [Rank 0] step:5181/10000 train_time:1201668ms step_avg:231.94ms +[2025-07-17 19:18:23] [Rank 0] step:5181/10000 train_time:1201668ms step_avg:231.94ms +[2025-07-17 19:18:28] [Rank 0] step:5201/10000 train_time:1206517ms step_avg:231.98ms +[2025-07-17 19:18:28] [Rank 0] step:5201/10000 train_time:1206517ms step_avg:231.98ms +[2025-07-17 19:18:32] [Rank 0] step:5221/10000 train_time:1211399ms step_avg:232.02ms +[2025-07-17 19:18:32] [Rank 0] step:5221/10000 train_time:1211399ms step_avg:232.02ms +[2025-07-17 19:18:37] [Rank 0] step:5241/10000 train_time:1216274ms step_avg:232.07ms +[2025-07-17 19:18:37] [Rank 0] step:5241/10000 train_time:1216274ms step_avg:232.07ms +[2025-07-17 19:18:44] [Rank 0] PRINT: step:5250/10000 val_loss:3.6325 train_time:1218954ms step_avg:232.18ms +[2025-07-17 19:18:44] [Rank 0] PRINT: step:5250/10000 val_loss:3.6325 train_time:1218954ms step_avg:232.18ms +[2025-07-17 19:18:47] [Rank 0] step:5261/10000 train_time:1221139ms step_avg:232.11ms +[2025-07-17 19:18:47] [Rank 0] step:5261/10000 train_time:1221139ms step_avg:232.11ms +[2025-07-17 19:18:52] [Rank 0] step:5281/10000 train_time:1226009ms step_avg:232.15ms +[2025-07-17 19:18:52] [Rank 0] step:5281/10000 train_time:1226009ms step_avg:232.15ms +[2025-07-17 19:18:57] [Rank 0] step:5301/10000 train_time:1230883ms step_avg:232.20ms +[2025-07-17 19:18:57] [Rank 0] step:5301/10000 train_time:1230883ms step_avg:232.20ms +[2025-07-17 19:19:01] [Rank 0] step:5321/10000 train_time:1235758ms step_avg:232.24ms +[2025-07-17 19:19:01] [Rank 0] step:5321/10000 train_time:1235758ms step_avg:232.24ms +[2025-07-17 19:19:06] [Rank 0] step:5341/10000 train_time:1240645ms step_avg:232.29ms +[2025-07-17 19:19:06] [Rank 0] step:5341/10000 train_time:1240645ms step_avg:232.29ms +[2025-07-17 19:19:11] [Rank 0] step:5361/10000 train_time:1245523ms step_avg:232.33ms +[2025-07-17 19:19:11] [Rank 0] step:5361/10000 train_time:1245523ms step_avg:232.33ms +[2025-07-17 19:19:19] [Rank 0] PRINT: step:5375/10000 val_loss:3.6633 train_time:1249430ms step_avg:232.45ms +[2025-07-17 19:19:19] [Rank 0] PRINT: step:5375/10000 val_loss:3.6633 train_time:1249430ms step_avg:232.45ms +[2025-07-17 19:19:20] [Rank 0] step:5381/10000 train_time:1250406ms step_avg:232.37ms +[2025-07-17 19:19:20] [Rank 0] step:5381/10000 train_time:1250406ms step_avg:232.37ms +[2025-07-17 19:19:25] [Rank 0] step:5401/10000 train_time:1255287ms step_avg:232.42ms +[2025-07-17 19:19:25] [Rank 0] step:5401/10000 train_time:1255287ms step_avg:232.42ms +[2025-07-17 19:19:30] [Rank 0] step:5421/10000 train_time:1260173ms step_avg:232.46ms +[2025-07-17 19:19:30] [Rank 0] step:5421/10000 train_time:1260173ms step_avg:232.46ms +[2025-07-17 19:19:35] [Rank 0] step:5441/10000 train_time:1265048ms step_avg:232.50ms +[2025-07-17 19:19:35] [Rank 0] step:5441/10000 train_time:1265048ms step_avg:232.50ms +[2025-07-17 19:19:40] [Rank 0] step:5461/10000 train_time:1269933ms step_avg:232.55ms +[2025-07-17 19:19:40] [Rank 0] step:5461/10000 train_time:1269933ms step_avg:232.55ms +[2025-07-17 19:19:45] [Rank 0] step:5481/10000 train_time:1274820ms step_avg:232.59ms +[2025-07-17 19:19:45] [Rank 0] step:5481/10000 train_time:1274820ms step_avg:232.59ms +[2025-07-17 19:19:54] [Rank 0] PRINT: step:5500/10000 val_loss:3.6477 train_time:1279946ms step_avg:232.72ms +[2025-07-17 19:19:54] [Rank 0] PRINT: step:5500/10000 val_loss:3.6477 train_time:1279946ms step_avg:232.72ms +[2025-07-17 19:19:54] [Rank 0] step:5501/10000 train_time:1279960ms step_avg:232.68ms +[2025-07-17 19:19:54] [Rank 0] step:5501/10000 train_time:1279960ms step_avg:232.68ms +[2025-07-17 19:19:59] [Rank 0] step:5521/10000 train_time:1284570ms step_avg:232.67ms +[2025-07-17 19:19:59] [Rank 0] step:5521/10000 train_time:1284570ms step_avg:232.67ms +[2025-07-17 19:20:04] [Rank 0] step:5541/10000 train_time:1289453ms step_avg:232.71ms +[2025-07-17 19:20:04] [Rank 0] step:5541/10000 train_time:1289453ms step_avg:232.71ms +[2025-07-17 19:20:09] [Rank 0] step:5561/10000 train_time:1294331ms step_avg:232.75ms +[2025-07-17 19:20:09] [Rank 0] step:5561/10000 train_time:1294331ms step_avg:232.75ms +[2025-07-17 19:20:14] [Rank 0] step:5581/10000 train_time:1299202ms step_avg:232.79ms +[2025-07-17 19:20:14] [Rank 0] step:5581/10000 train_time:1299202ms step_avg:232.79ms +[2025-07-17 19:20:19] [Rank 0] step:5601/10000 train_time:1304084ms step_avg:232.83ms +[2025-07-17 19:20:19] [Rank 0] step:5601/10000 train_time:1304084ms step_avg:232.83ms +[2025-07-17 19:20:24] [Rank 0] step:5621/10000 train_time:1308963ms step_avg:232.87ms +[2025-07-17 19:20:24] [Rank 0] step:5621/10000 train_time:1308963ms step_avg:232.87ms +[2025-07-17 19:20:29] [Rank 0] PRINT: step:5625/10000 val_loss:3.6727 train_time:1310428ms step_avg:232.97ms +[2025-07-17 19:20:29] [Rank 0] PRINT: step:5625/10000 val_loss:3.6727 train_time:1310428ms step_avg:232.97ms +[2025-07-17 19:20:33] [Rank 0] step:5641/10000 train_time:1313834ms step_avg:232.91ms +[2025-07-17 19:20:33] [Rank 0] step:5641/10000 train_time:1313834ms step_avg:232.91ms +[2025-07-17 19:20:38] [Rank 0] step:5661/10000 train_time:1318710ms step_avg:232.95ms +[2025-07-17 19:20:38] [Rank 0] step:5661/10000 train_time:1318710ms step_avg:232.95ms +[2025-07-17 19:20:43] [Rank 0] step:5681/10000 train_time:1323587ms step_avg:232.98ms +[2025-07-17 19:20:43] [Rank 0] step:5681/10000 train_time:1323587ms step_avg:232.98ms +[2025-07-17 19:20:48] [Rank 0] step:5701/10000 train_time:1328457ms step_avg:233.02ms +[2025-07-17 19:20:48] [Rank 0] step:5701/10000 train_time:1328457ms step_avg:233.02ms +[2025-07-17 19:20:53] [Rank 0] step:5721/10000 train_time:1333324ms step_avg:233.06ms +[2025-07-17 19:20:53] [Rank 0] step:5721/10000 train_time:1333324ms step_avg:233.06ms +[2025-07-17 19:20:58] [Rank 0] step:5741/10000 train_time:1338199ms step_avg:233.10ms +[2025-07-17 19:20:58] [Rank 0] step:5741/10000 train_time:1338199ms step_avg:233.10ms +[2025-07-17 19:21:05] [Rank 0] PRINT: step:5750/10000 val_loss:3.6711 train_time:1340880ms step_avg:233.20ms +[2025-07-17 19:21:05] [Rank 0] PRINT: step:5750/10000 val_loss:3.6711 train_time:1340880ms step_avg:233.20ms +[2025-07-17 19:21:07] [Rank 0] step:5761/10000 train_time:1343065ms step_avg:233.13ms +[2025-07-17 19:21:07] [Rank 0] step:5761/10000 train_time:1343065ms step_avg:233.13ms +[2025-07-17 19:21:12] [Rank 0] step:5781/10000 train_time:1347928ms step_avg:233.17ms +[2025-07-17 19:21:12] [Rank 0] step:5781/10000 train_time:1347928ms step_avg:233.17ms +[2025-07-17 19:21:17] [Rank 0] step:5801/10000 train_time:1352782ms step_avg:233.20ms +[2025-07-17 19:21:17] [Rank 0] step:5801/10000 train_time:1352782ms step_avg:233.20ms +[2025-07-17 19:21:22] [Rank 0] step:5821/10000 train_time:1357641ms step_avg:233.23ms +[2025-07-17 19:21:22] [Rank 0] step:5821/10000 train_time:1357641ms step_avg:233.23ms +[2025-07-17 19:21:27] [Rank 0] step:5841/10000 train_time:1362505ms step_avg:233.27ms +[2025-07-17 19:21:27] [Rank 0] step:5841/10000 train_time:1362505ms step_avg:233.27ms +[2025-07-17 19:21:31] [Rank 0] step:5861/10000 train_time:1367363ms step_avg:233.30ms +[2025-07-17 19:21:31] [Rank 0] step:5861/10000 train_time:1367363ms step_avg:233.30ms +[2025-07-17 19:21:40] [Rank 0] PRINT: step:5875/10000 val_loss:3.6844 train_time:1371250ms step_avg:233.40ms +[2025-07-17 19:21:40] [Rank 0] PRINT: step:5875/10000 val_loss:3.6844 train_time:1371250ms step_avg:233.40ms +[2025-07-17 19:21:41] [Rank 0] step:5881/10000 train_time:1372220ms step_avg:233.33ms +[2025-07-17 19:21:41] [Rank 0] step:5881/10000 train_time:1372220ms step_avg:233.33ms +[2025-07-17 19:21:46] [Rank 0] step:5901/10000 train_time:1377095ms step_avg:233.37ms +[2025-07-17 19:21:46] [Rank 0] step:5901/10000 train_time:1377095ms step_avg:233.37ms +[2025-07-17 19:21:51] [Rank 0] step:5921/10000 train_time:1381961ms step_avg:233.40ms +[2025-07-17 19:21:51] [Rank 0] step:5921/10000 train_time:1381961ms step_avg:233.40ms +[2025-07-17 19:21:56] [Rank 0] step:5941/10000 train_time:1386835ms step_avg:233.43ms +[2025-07-17 19:21:56] [Rank 0] step:5941/10000 train_time:1386835ms step_avg:233.43ms +[2025-07-17 19:22:01] [Rank 0] step:5961/10000 train_time:1391713ms step_avg:233.47ms +[2025-07-17 19:22:01] [Rank 0] step:5961/10000 train_time:1391713ms step_avg:233.47ms +[2025-07-17 19:22:05] [Rank 0] step:5981/10000 train_time:1396592ms step_avg:233.50ms +[2025-07-17 19:22:05] [Rank 0] step:5981/10000 train_time:1396592ms step_avg:233.50ms +[2025-07-17 19:22:15] [Rank 0] PRINT: step:6000/10000 val_loss:3.6458 train_time:1401716ms step_avg:233.62ms +[2025-07-17 19:22:15] [Rank 0] PRINT: step:6000/10000 val_loss:3.6458 train_time:1401716ms step_avg:233.62ms +[2025-07-17 19:22:15] [Rank 0] step:6001/10000 train_time:1401729ms step_avg:233.58ms +[2025-07-17 19:22:15] [Rank 0] step:6001/10000 train_time:1401729ms step_avg:233.58ms +[2025-07-17 19:22:20] [Rank 0] step:6021/10000 train_time:1406354ms step_avg:233.57ms +[2025-07-17 19:22:20] [Rank 0] step:6021/10000 train_time:1406354ms step_avg:233.57ms +[2025-07-17 19:22:25] [Rank 0] step:6041/10000 train_time:1411235ms step_avg:233.61ms +[2025-07-17 19:22:25] [Rank 0] step:6041/10000 train_time:1411235ms step_avg:233.61ms +[2025-07-17 19:22:30] [Rank 0] step:6061/10000 train_time:1416117ms step_avg:233.64ms +[2025-07-17 19:22:30] [Rank 0] step:6061/10000 train_time:1416117ms step_avg:233.64ms +[2025-07-17 19:22:35] [Rank 0] step:6081/10000 train_time:1421001ms step_avg:233.68ms +[2025-07-17 19:22:35] [Rank 0] step:6081/10000 train_time:1421001ms step_avg:233.68ms +[2025-07-17 19:22:39] [Rank 0] step:6101/10000 train_time:1425879ms step_avg:233.71ms +[2025-07-17 19:22:39] [Rank 0] step:6101/10000 train_time:1425879ms step_avg:233.71ms +[2025-07-17 19:22:44] [Rank 0] step:6121/10000 train_time:1430770ms step_avg:233.75ms +[2025-07-17 19:22:44] [Rank 0] step:6121/10000 train_time:1430770ms step_avg:233.75ms +[2025-07-17 19:22:50] [Rank 0] PRINT: step:6125/10000 val_loss:3.6479 train_time:1432239ms step_avg:233.83ms +[2025-07-17 19:22:50] [Rank 0] PRINT: step:6125/10000 val_loss:3.6479 train_time:1432239ms step_avg:233.83ms +[2025-07-17 19:22:54] [Rank 0] step:6141/10000 train_time:1435655ms step_avg:233.78ms +[2025-07-17 19:22:54] [Rank 0] step:6141/10000 train_time:1435655ms step_avg:233.78ms +[2025-07-17 19:22:59] [Rank 0] step:6161/10000 train_time:1440536ms step_avg:233.82ms +[2025-07-17 19:22:59] [Rank 0] step:6161/10000 train_time:1440536ms step_avg:233.82ms +[2025-07-17 19:23:04] [Rank 0] step:6181/10000 train_time:1445427ms step_avg:233.85ms +[2025-07-17 19:23:04] [Rank 0] step:6181/10000 train_time:1445427ms step_avg:233.85ms +[2025-07-17 19:23:09] [Rank 0] step:6201/10000 train_time:1450318ms step_avg:233.88ms +[2025-07-17 19:23:09] [Rank 0] step:6201/10000 train_time:1450318ms step_avg:233.88ms +[2025-07-17 19:23:13] [Rank 0] step:6221/10000 train_time:1455203ms step_avg:233.92ms +[2025-07-17 19:23:13] [Rank 0] step:6221/10000 train_time:1455203ms step_avg:233.92ms +[2025-07-17 19:23:18] [Rank 0] step:6241/10000 train_time:1460084ms step_avg:233.95ms +[2025-07-17 19:23:18] [Rank 0] step:6241/10000 train_time:1460084ms step_avg:233.95ms +[2025-07-17 19:23:25] [Rank 0] PRINT: step:6250/10000 val_loss:3.6603 train_time:1462770ms step_avg:234.04ms +[2025-07-17 19:23:25] [Rank 0] PRINT: step:6250/10000 val_loss:3.6603 train_time:1462770ms step_avg:234.04ms +[2025-07-17 19:23:28] [Rank 0] step:6261/10000 train_time:1464964ms step_avg:233.98ms +[2025-07-17 19:23:28] [Rank 0] step:6261/10000 train_time:1464964ms step_avg:233.98ms +[2025-07-17 19:23:33] [Rank 0] step:6281/10000 train_time:1469855ms step_avg:234.02ms +[2025-07-17 19:23:33] [Rank 0] step:6281/10000 train_time:1469855ms step_avg:234.02ms +[2025-07-17 19:23:38] [Rank 0] step:6301/10000 train_time:1474739ms step_avg:234.05ms +[2025-07-17 19:23:38] [Rank 0] step:6301/10000 train_time:1474739ms step_avg:234.05ms +[2025-07-17 19:23:43] [Rank 0] step:6321/10000 train_time:1479627ms step_avg:234.08ms +[2025-07-17 19:23:43] [Rank 0] step:6321/10000 train_time:1479627ms step_avg:234.08ms +[2025-07-17 19:23:47] [Rank 0] step:6341/10000 train_time:1484518ms step_avg:234.11ms +[2025-07-17 19:23:47] [Rank 0] step:6341/10000 train_time:1484518ms step_avg:234.11ms +[2025-07-17 19:23:52] [Rank 0] step:6361/10000 train_time:1489402ms step_avg:234.15ms +[2025-07-17 19:23:52] [Rank 0] step:6361/10000 train_time:1489402ms step_avg:234.15ms +[2025-07-17 19:24:00] [Rank 0] PRINT: step:6375/10000 val_loss:3.6746 train_time:1493308ms step_avg:234.24ms +[2025-07-17 19:24:00] [Rank 0] PRINT: step:6375/10000 val_loss:3.6746 train_time:1493308ms step_avg:234.24ms +[2025-07-17 19:24:02] [Rank 0] step:6381/10000 train_time:1494281ms step_avg:234.18ms +[2025-07-17 19:24:02] [Rank 0] step:6381/10000 train_time:1494281ms step_avg:234.18ms +[2025-07-17 19:24:07] [Rank 0] step:6401/10000 train_time:1499156ms step_avg:234.21ms +[2025-07-17 19:24:07] [Rank 0] step:6401/10000 train_time:1499156ms step_avg:234.21ms +[2025-07-17 19:24:12] [Rank 0] step:6421/10000 train_time:1504031ms step_avg:234.24ms +[2025-07-17 19:24:12] [Rank 0] step:6421/10000 train_time:1504031ms step_avg:234.24ms +[2025-07-17 19:24:17] [Rank 0] step:6441/10000 train_time:1508916ms step_avg:234.27ms +[2025-07-17 19:24:17] [Rank 0] step:6441/10000 train_time:1508916ms step_avg:234.27ms +[2025-07-17 19:24:21] [Rank 0] step:6461/10000 train_time:1513806ms step_avg:234.30ms +[2025-07-17 19:24:21] [Rank 0] step:6461/10000 train_time:1513806ms step_avg:234.30ms +[2025-07-17 19:24:26] [Rank 0] step:6481/10000 train_time:1518689ms step_avg:234.33ms +[2025-07-17 19:24:26] [Rank 0] step:6481/10000 train_time:1518689ms step_avg:234.33ms +[2025-07-17 19:24:36] [Rank 0] PRINT: step:6500/10000 val_loss:3.6656 train_time:1523812ms step_avg:234.43ms +[2025-07-17 19:24:36] [Rank 0] PRINT: step:6500/10000 val_loss:3.6656 train_time:1523812ms step_avg:234.43ms +[2025-07-17 19:24:36] [Rank 0] step:6501/10000 train_time:1523826ms step_avg:234.40ms +[2025-07-17 19:24:36] [Rank 0] step:6501/10000 train_time:1523826ms step_avg:234.40ms +[2025-07-17 19:24:41] [Rank 0] step:6521/10000 train_time:1528442ms step_avg:234.39ms +[2025-07-17 19:24:41] [Rank 0] step:6521/10000 train_time:1528442ms step_avg:234.39ms +[2025-07-17 19:24:46] [Rank 0] step:6541/10000 train_time:1533322ms step_avg:234.42ms +[2025-07-17 19:24:46] [Rank 0] step:6541/10000 train_time:1533322ms step_avg:234.42ms +[2025-07-17 19:24:51] [Rank 0] step:6561/10000 train_time:1538212ms step_avg:234.45ms +[2025-07-17 19:24:51] [Rank 0] step:6561/10000 train_time:1538212ms step_avg:234.45ms +[2025-07-17 19:24:55] [Rank 0] step:6581/10000 train_time:1543097ms step_avg:234.48ms +[2025-07-17 19:24:55] [Rank 0] step:6581/10000 train_time:1543097ms step_avg:234.48ms +[2025-07-17 19:25:00] [Rank 0] step:6601/10000 train_time:1547983ms step_avg:234.51ms +[2025-07-17 19:25:00] [Rank 0] step:6601/10000 train_time:1547983ms step_avg:234.51ms +[2025-07-17 19:25:05] [Rank 0] step:6621/10000 train_time:1552861ms step_avg:234.54ms +[2025-07-17 19:25:05] [Rank 0] step:6621/10000 train_time:1552861ms step_avg:234.54ms +[2025-07-17 19:25:11] [Rank 0] PRINT: step:6625/10000 val_loss:3.6620 train_time:1554329ms step_avg:234.62ms +[2025-07-17 19:25:11] [Rank 0] PRINT: step:6625/10000 val_loss:3.6620 train_time:1554329ms step_avg:234.62ms +[2025-07-17 19:25:15] [Rank 0] step:6641/10000 train_time:1557733ms step_avg:234.56ms +[2025-07-17 19:25:15] [Rank 0] step:6641/10000 train_time:1557733ms step_avg:234.56ms +[2025-07-17 19:25:20] [Rank 0] step:6661/10000 train_time:1562613ms step_avg:234.59ms +[2025-07-17 19:25:20] [Rank 0] step:6661/10000 train_time:1562613ms step_avg:234.59ms +[2025-07-17 19:25:25] [Rank 0] step:6681/10000 train_time:1567538ms step_avg:234.63ms +[2025-07-17 19:25:25] [Rank 0] step:6681/10000 train_time:1567538ms step_avg:234.63ms +[2025-07-17 19:25:29] [Rank 0] step:6701/10000 train_time:1572482ms step_avg:234.66ms +[2025-07-17 19:25:29] [Rank 0] step:6701/10000 train_time:1572482ms step_avg:234.66ms +[2025-07-17 19:25:34] [Rank 0] step:6721/10000 train_time:1577441ms step_avg:234.70ms +[2025-07-17 19:25:34] [Rank 0] step:6721/10000 train_time:1577441ms step_avg:234.70ms +[2025-07-17 19:25:39] [Rank 0] step:6741/10000 train_time:1582401ms step_avg:234.74ms +[2025-07-17 19:25:39] [Rank 0] step:6741/10000 train_time:1582401ms step_avg:234.74ms +[2025-07-17 19:25:46] [Rank 0] PRINT: step:6750/10000 val_loss:3.5682 train_time:1585120ms step_avg:234.83ms +[2025-07-17 19:25:46] [Rank 0] PRINT: step:6750/10000 val_loss:3.5682 train_time:1585120ms step_avg:234.83ms +[2025-07-17 19:25:49] [Rank 0] step:6761/10000 train_time:1587340ms step_avg:234.78ms +[2025-07-17 19:25:49] [Rank 0] step:6761/10000 train_time:1587340ms step_avg:234.78ms +[2025-07-17 19:25:54] [Rank 0] step:6781/10000 train_time:1592285ms step_avg:234.82ms +[2025-07-17 19:25:54] [Rank 0] step:6781/10000 train_time:1592285ms step_avg:234.82ms +[2025-07-17 19:25:59] [Rank 0] step:6801/10000 train_time:1597237ms step_avg:234.85ms +[2025-07-17 19:25:59] [Rank 0] step:6801/10000 train_time:1597237ms step_avg:234.85ms +[2025-07-17 19:26:03] [Rank 0] step:6821/10000 train_time:1602181ms step_avg:234.89ms +[2025-07-17 19:26:03] [Rank 0] step:6821/10000 train_time:1602181ms step_avg:234.89ms +[2025-07-17 19:26:08] [Rank 0] step:6841/10000 train_time:1607126ms step_avg:234.93ms +[2025-07-17 19:26:08] [Rank 0] step:6841/10000 train_time:1607126ms step_avg:234.93ms +[2025-07-17 19:26:13] [Rank 0] step:6861/10000 train_time:1612068ms step_avg:234.96ms +[2025-07-17 19:26:13] [Rank 0] step:6861/10000 train_time:1612068ms step_avg:234.96ms +[2025-07-17 19:26:21] [Rank 0] PRINT: step:6875/10000 val_loss:3.5661 train_time:1616017ms step_avg:235.06ms +[2025-07-17 19:26:21] [Rank 0] PRINT: step:6875/10000 val_loss:3.5661 train_time:1616017ms step_avg:235.06ms +[2025-07-17 19:26:23] [Rank 0] step:6881/10000 train_time:1617003ms step_avg:235.00ms +[2025-07-17 19:26:23] [Rank 0] step:6881/10000 train_time:1617003ms step_avg:235.00ms +[2025-07-17 19:26:28] [Rank 0] step:6901/10000 train_time:1621937ms step_avg:235.03ms +[2025-07-17 19:26:28] [Rank 0] step:6901/10000 train_time:1621937ms step_avg:235.03ms +[2025-07-17 19:26:33] [Rank 0] step:6921/10000 train_time:1626877ms step_avg:235.06ms +[2025-07-17 19:26:33] [Rank 0] step:6921/10000 train_time:1626877ms step_avg:235.06ms +[2025-07-17 19:26:37] [Rank 0] step:6941/10000 train_time:1631831ms step_avg:235.10ms +[2025-07-17 19:26:37] [Rank 0] step:6941/10000 train_time:1631831ms step_avg:235.10ms +[2025-07-17 19:26:42] [Rank 0] step:6961/10000 train_time:1636774ms step_avg:235.13ms +[2025-07-17 19:26:42] [Rank 0] step:6961/10000 train_time:1636774ms step_avg:235.13ms +[2025-07-17 19:26:47] [Rank 0] step:6981/10000 train_time:1641720ms step_avg:235.17ms +[2025-07-17 19:26:47] [Rank 0] step:6981/10000 train_time:1641720ms step_avg:235.17ms +[2025-07-17 19:26:57] [Rank 0] PRINT: step:7000/10000 val_loss:3.5771 train_time:1646908ms step_avg:235.27ms +[2025-07-17 19:26:57] [Rank 0] PRINT: step:7000/10000 val_loss:3.5771 train_time:1646908ms step_avg:235.27ms +[2025-07-17 19:26:57] [Rank 0] step:7001/10000 train_time:1646922ms step_avg:235.24ms +[2025-07-17 19:26:57] [Rank 0] step:7001/10000 train_time:1646922ms step_avg:235.24ms +[2025-07-17 19:27:02] [Rank 0] step:7021/10000 train_time:1651603ms step_avg:235.24ms +[2025-07-17 19:27:02] [Rank 0] step:7021/10000 train_time:1651603ms step_avg:235.24ms +[2025-07-17 19:27:07] [Rank 0] step:7041/10000 train_time:1656539ms step_avg:235.27ms +[2025-07-17 19:27:07] [Rank 0] step:7041/10000 train_time:1656539ms step_avg:235.27ms +[2025-07-17 19:27:12] [Rank 0] step:7061/10000 train_time:1661470ms step_avg:235.30ms +[2025-07-17 19:27:12] [Rank 0] step:7061/10000 train_time:1661470ms step_avg:235.30ms +[2025-07-17 19:27:17] [Rank 0] step:7081/10000 train_time:1666412ms step_avg:235.34ms +[2025-07-17 19:27:17] [Rank 0] step:7081/10000 train_time:1666412ms step_avg:235.34ms +[2025-07-17 19:27:22] [Rank 0] step:7101/10000 train_time:1671341ms step_avg:235.37ms +[2025-07-17 19:27:22] [Rank 0] step:7101/10000 train_time:1671341ms step_avg:235.37ms +[2025-07-17 19:27:27] [Rank 0] step:7121/10000 train_time:1676284ms step_avg:235.40ms +[2025-07-17 19:27:27] [Rank 0] step:7121/10000 train_time:1676284ms step_avg:235.40ms +[2025-07-17 19:27:32] [Rank 0] PRINT: step:7125/10000 val_loss:3.5744 train_time:1677766ms step_avg:235.48ms +[2025-07-17 19:27:32] [Rank 0] PRINT: step:7125/10000 val_loss:3.5744 train_time:1677766ms step_avg:235.48ms +[2025-07-17 19:27:36] [Rank 0] step:7141/10000 train_time:1681229ms step_avg:235.43ms +[2025-07-17 19:27:36] [Rank 0] step:7141/10000 train_time:1681229ms step_avg:235.43ms +[2025-07-17 19:27:41] [Rank 0] step:7161/10000 train_time:1686172ms step_avg:235.47ms +[2025-07-17 19:27:41] [Rank 0] step:7161/10000 train_time:1686172ms step_avg:235.47ms +[2025-07-17 19:27:46] [Rank 0] step:7181/10000 train_time:1691106ms step_avg:235.50ms +[2025-07-17 19:27:46] [Rank 0] step:7181/10000 train_time:1691106ms step_avg:235.50ms +[2025-07-17 19:27:51] [Rank 0] step:7201/10000 train_time:1696056ms step_avg:235.53ms +[2025-07-17 19:27:51] [Rank 0] step:7201/10000 train_time:1696056ms step_avg:235.53ms +[2025-07-17 19:27:56] [Rank 0] step:7221/10000 train_time:1700997ms step_avg:235.56ms +[2025-07-17 19:27:56] [Rank 0] step:7221/10000 train_time:1700997ms step_avg:235.56ms +[2025-07-17 19:28:01] [Rank 0] step:7241/10000 train_time:1705934ms step_avg:235.59ms +[2025-07-17 19:28:01] [Rank 0] step:7241/10000 train_time:1705934ms step_avg:235.59ms +[2025-07-17 19:28:08] [Rank 0] PRINT: step:7250/10000 val_loss:3.5719 train_time:1708657ms step_avg:235.68ms +[2025-07-17 19:28:08] [Rank 0] PRINT: step:7250/10000 val_loss:3.5719 train_time:1708657ms step_avg:235.68ms +[2025-07-17 19:28:11] [Rank 0] step:7261/10000 train_time:1710872ms step_avg:235.62ms +[2025-07-17 19:28:11] [Rank 0] step:7261/10000 train_time:1710872ms step_avg:235.62ms +[2025-07-17 19:28:16] [Rank 0] step:7281/10000 train_time:1715810ms step_avg:235.66ms +[2025-07-17 19:28:16] [Rank 0] step:7281/10000 train_time:1715810ms step_avg:235.66ms +[2025-07-17 19:28:21] [Rank 0] step:7301/10000 train_time:1720746ms step_avg:235.69ms +[2025-07-17 19:28:21] [Rank 0] step:7301/10000 train_time:1720746ms step_avg:235.69ms +[2025-07-17 19:28:26] [Rank 0] step:7321/10000 train_time:1725701ms step_avg:235.72ms +[2025-07-17 19:28:26] [Rank 0] step:7321/10000 train_time:1725701ms step_avg:235.72ms +[2025-07-17 19:28:31] [Rank 0] step:7341/10000 train_time:1730642ms step_avg:235.75ms +[2025-07-17 19:28:31] [Rank 0] step:7341/10000 train_time:1730642ms step_avg:235.75ms +[2025-07-17 19:28:35] [Rank 0] step:7361/10000 train_time:1735587ms step_avg:235.78ms +[2025-07-17 19:28:35] [Rank 0] step:7361/10000 train_time:1735587ms step_avg:235.78ms +[2025-07-17 19:28:44] [Rank 0] PRINT: step:7375/10000 val_loss:3.5833 train_time:1739546ms step_avg:235.87ms +[2025-07-17 19:28:44] [Rank 0] PRINT: step:7375/10000 val_loss:3.5833 train_time:1739546ms step_avg:235.87ms +[2025-07-17 19:28:45] [Rank 0] step:7381/10000 train_time:1740530ms step_avg:235.81ms +[2025-07-17 19:28:45] [Rank 0] step:7381/10000 train_time:1740530ms step_avg:235.81ms +[2025-07-17 19:28:50] [Rank 0] step:7401/10000 train_time:1745474ms step_avg:235.84ms +[2025-07-17 19:28:50] [Rank 0] step:7401/10000 train_time:1745474ms step_avg:235.84ms +[2025-07-17 19:28:55] [Rank 0] step:7421/10000 train_time:1750416ms step_avg:235.87ms +[2025-07-17 19:28:55] [Rank 0] step:7421/10000 train_time:1750416ms step_avg:235.87ms +[2025-07-17 19:29:00] [Rank 0] step:7441/10000 train_time:1755373ms step_avg:235.91ms +[2025-07-17 19:29:00] [Rank 0] step:7441/10000 train_time:1755373ms step_avg:235.91ms +[2025-07-17 19:29:05] [Rank 0] step:7461/10000 train_time:1760314ms step_avg:235.94ms +[2025-07-17 19:29:05] [Rank 0] step:7461/10000 train_time:1760314ms step_avg:235.94ms +[2025-07-17 19:29:10] [Rank 0] step:7481/10000 train_time:1765270ms step_avg:235.97ms +[2025-07-17 19:29:10] [Rank 0] step:7481/10000 train_time:1765270ms step_avg:235.97ms +[2025-07-17 19:29:19] [Rank 0] PRINT: step:7500/10000 val_loss:3.5798 train_time:1770481ms step_avg:236.06ms +[2025-07-17 19:29:19] [Rank 0] PRINT: step:7500/10000 val_loss:3.5798 train_time:1770481ms step_avg:236.06ms +[2025-07-17 19:29:20] [Rank 0] step:7501/10000 train_time:1770495ms step_avg:236.03ms +[2025-07-17 19:29:20] [Rank 0] step:7501/10000 train_time:1770495ms step_avg:236.03ms +[2025-07-17 19:29:25] [Rank 0] step:7521/10000 train_time:1775187ms step_avg:236.03ms +[2025-07-17 19:29:25] [Rank 0] step:7521/10000 train_time:1775187ms step_avg:236.03ms +[2025-07-17 19:29:30] [Rank 0] step:7541/10000 train_time:1780138ms step_avg:236.06ms +[2025-07-17 19:29:30] [Rank 0] step:7541/10000 train_time:1780138ms step_avg:236.06ms +[2025-07-17 19:29:35] [Rank 0] step:7561/10000 train_time:1785086ms step_avg:236.09ms +[2025-07-17 19:29:35] [Rank 0] step:7561/10000 train_time:1785086ms step_avg:236.09ms +[2025-07-17 19:29:39] [Rank 0] step:7581/10000 train_time:1790047ms step_avg:236.12ms +[2025-07-17 19:29:39] [Rank 0] step:7581/10000 train_time:1790047ms step_avg:236.12ms +[2025-07-17 19:29:44] [Rank 0] step:7601/10000 train_time:1795009ms step_avg:236.15ms +[2025-07-17 19:29:44] [Rank 0] step:7601/10000 train_time:1795009ms step_avg:236.15ms +[2025-07-17 19:29:49] [Rank 0] step:7621/10000 train_time:1799982ms step_avg:236.19ms +[2025-07-17 19:29:49] [Rank 0] step:7621/10000 train_time:1799982ms step_avg:236.19ms +[2025-07-17 19:29:55] [Rank 0] PRINT: step:7625/10000 val_loss:3.5797 train_time:1801471ms step_avg:236.26ms +[2025-07-17 19:29:55] [Rank 0] PRINT: step:7625/10000 val_loss:3.5797 train_time:1801471ms step_avg:236.26ms +[2025-07-17 19:29:59] [Rank 0] step:7641/10000 train_time:1804936ms step_avg:236.22ms +[2025-07-17 19:29:59] [Rank 0] step:7641/10000 train_time:1804936ms step_avg:236.22ms +[2025-07-17 19:30:04] [Rank 0] step:7661/10000 train_time:1809903ms step_avg:236.25ms +[2025-07-17 19:30:04] [Rank 0] step:7661/10000 train_time:1809903ms step_avg:236.25ms +[2025-07-17 19:30:09] [Rank 0] step:7681/10000 train_time:1814880ms step_avg:236.28ms +[2025-07-17 19:30:09] [Rank 0] step:7681/10000 train_time:1814880ms step_avg:236.28ms +[2025-07-17 19:30:14] [Rank 0] step:7701/10000 train_time:1819839ms step_avg:236.31ms +[2025-07-17 19:30:14] [Rank 0] step:7701/10000 train_time:1819839ms step_avg:236.31ms +[2025-07-17 19:30:19] [Rank 0] step:7721/10000 train_time:1824803ms step_avg:236.34ms +[2025-07-17 19:30:19] [Rank 0] step:7721/10000 train_time:1824803ms step_avg:236.34ms +[2025-07-17 19:30:24] [Rank 0] step:7741/10000 train_time:1829760ms step_avg:236.37ms +[2025-07-17 19:30:24] [Rank 0] step:7741/10000 train_time:1829760ms step_avg:236.37ms +[2025-07-17 19:30:31] [Rank 0] PRINT: step:7750/10000 val_loss:3.5864 train_time:1832505ms step_avg:236.45ms +[2025-07-17 19:30:31] [Rank 0] PRINT: step:7750/10000 val_loss:3.5864 train_time:1832505ms step_avg:236.45ms +[2025-07-17 19:30:34] [Rank 0] step:7761/10000 train_time:1834733ms step_avg:236.40ms +[2025-07-17 19:30:34] [Rank 0] step:7761/10000 train_time:1834733ms step_avg:236.40ms +[2025-07-17 19:30:39] [Rank 0] step:7781/10000 train_time:1839693ms step_avg:236.43ms +[2025-07-17 19:30:39] [Rank 0] step:7781/10000 train_time:1839693ms step_avg:236.43ms +[2025-07-17 19:30:44] [Rank 0] step:7801/10000 train_time:1844653ms step_avg:236.46ms +[2025-07-17 19:30:44] [Rank 0] step:7801/10000 train_time:1844653ms step_avg:236.46ms +[2025-07-17 19:30:49] [Rank 0] step:7821/10000 train_time:1849614ms step_avg:236.49ms +[2025-07-17 19:30:49] [Rank 0] step:7821/10000 train_time:1849614ms step_avg:236.49ms +[2025-07-17 19:30:54] [Rank 0] step:7841/10000 train_time:1854576ms step_avg:236.52ms +[2025-07-17 19:30:54] [Rank 0] step:7841/10000 train_time:1854576ms step_avg:236.52ms +[2025-07-17 19:30:59] [Rank 0] step:7861/10000 train_time:1859521ms step_avg:236.55ms +[2025-07-17 19:30:59] [Rank 0] step:7861/10000 train_time:1859521ms step_avg:236.55ms +[2025-07-17 19:31:07] [Rank 0] PRINT: step:7875/10000 val_loss:3.5715 train_time:1863481ms step_avg:236.63ms +[2025-07-17 19:31:07] [Rank 0] PRINT: step:7875/10000 val_loss:3.5715 train_time:1863481ms step_avg:236.63ms +[2025-07-17 19:31:08] [Rank 0] step:7881/10000 train_time:1864466ms step_avg:236.58ms +[2025-07-17 19:31:08] [Rank 0] step:7881/10000 train_time:1864466ms step_avg:236.58ms +[2025-07-17 19:31:13] [Rank 0] step:7901/10000 train_time:1869417ms step_avg:236.61ms +[2025-07-17 19:31:13] [Rank 0] step:7901/10000 train_time:1869417ms step_avg:236.61ms +[2025-07-17 19:31:18] [Rank 0] step:7921/10000 train_time:1874374ms step_avg:236.63ms +[2025-07-17 19:31:18] [Rank 0] step:7921/10000 train_time:1874374ms step_avg:236.63ms +[2025-07-17 19:31:23] [Rank 0] step:7941/10000 train_time:1879336ms step_avg:236.66ms +[2025-07-17 19:31:23] [Rank 0] step:7941/10000 train_time:1879336ms step_avg:236.66ms +[2025-07-17 19:31:28] [Rank 0] step:7961/10000 train_time:1884308ms step_avg:236.69ms +[2025-07-17 19:31:28] [Rank 0] step:7961/10000 train_time:1884308ms step_avg:236.69ms +[2025-07-17 19:31:33] [Rank 0] step:7981/10000 train_time:1889258ms step_avg:236.72ms +[2025-07-17 19:31:33] [Rank 0] step:7981/10000 train_time:1889258ms step_avg:236.72ms +[2025-07-17 19:31:42] [Rank 0] PRINT: step:8000/10000 val_loss:3.5745 train_time:1894473ms step_avg:236.81ms +[2025-07-17 19:31:42] [Rank 0] PRINT: step:8000/10000 val_loss:3.5745 train_time:1894473ms step_avg:236.81ms +[2025-07-17 19:31:42] [Rank 0] step:8001/10000 train_time:1894486ms step_avg:236.78ms +[2025-07-17 19:31:42] [Rank 0] step:8001/10000 train_time:1894486ms step_avg:236.78ms +[2025-07-17 19:31:47] [Rank 0] step:8021/10000 train_time:1899172ms step_avg:236.77ms +[2025-07-17 19:31:47] [Rank 0] step:8021/10000 train_time:1899172ms step_avg:236.77ms +[2025-07-17 19:31:52] [Rank 0] step:8041/10000 train_time:1904148ms step_avg:236.80ms +[2025-07-17 19:31:52] [Rank 0] step:8041/10000 train_time:1904148ms step_avg:236.80ms +[2025-07-17 19:31:57] [Rank 0] step:8061/10000 train_time:1909097ms step_avg:236.83ms +[2025-07-17 19:31:57] [Rank 0] step:8061/10000 train_time:1909097ms step_avg:236.83ms +[2025-07-17 19:32:02] [Rank 0] step:8081/10000 train_time:1914057ms step_avg:236.86ms +[2025-07-17 19:32:02] [Rank 0] step:8081/10000 train_time:1914057ms step_avg:236.86ms +[2025-07-17 19:32:07] [Rank 0] step:8101/10000 train_time:1919008ms step_avg:236.89ms +[2025-07-17 19:32:07] [Rank 0] step:8101/10000 train_time:1919008ms step_avg:236.89ms +[2025-07-17 19:32:12] [Rank 0] step:8121/10000 train_time:1923963ms step_avg:236.91ms +[2025-07-17 19:32:12] [Rank 0] step:8121/10000 train_time:1923963ms step_avg:236.91ms +[2025-07-17 19:32:18] [Rank 0] PRINT: step:8125/10000 val_loss:3.5762 train_time:1925452ms step_avg:236.98ms +[2025-07-17 19:32:18] [Rank 0] PRINT: step:8125/10000 val_loss:3.5762 train_time:1925452ms step_avg:236.98ms +[2025-07-17 19:32:22] [Rank 0] step:8141/10000 train_time:1928924ms step_avg:236.94ms +[2025-07-17 19:32:22] [Rank 0] step:8141/10000 train_time:1928924ms step_avg:236.94ms +[2025-07-17 19:32:27] [Rank 0] step:8161/10000 train_time:1933916ms step_avg:236.97ms +[2025-07-17 19:32:27] [Rank 0] step:8161/10000 train_time:1933916ms step_avg:236.97ms +[2025-07-17 19:32:32] [Rank 0] step:8181/10000 train_time:1938937ms step_avg:237.00ms +[2025-07-17 19:32:32] [Rank 0] step:8181/10000 train_time:1938937ms step_avg:237.00ms +[2025-07-17 19:32:37] [Rank 0] step:8201/10000 train_time:1943941ms step_avg:237.04ms +[2025-07-17 19:32:37] [Rank 0] step:8201/10000 train_time:1943941ms step_avg:237.04ms +[2025-07-17 19:32:42] [Rank 0] step:8221/10000 train_time:1948956ms step_avg:237.07ms +[2025-07-17 19:32:42] [Rank 0] step:8221/10000 train_time:1948956ms step_avg:237.07ms +[2025-07-17 19:32:47] [Rank 0] step:8241/10000 train_time:1953974ms step_avg:237.10ms +[2025-07-17 19:32:47] [Rank 0] step:8241/10000 train_time:1953974ms step_avg:237.10ms +[2025-07-17 19:32:54] [Rank 0] PRINT: step:8250/10000 val_loss:3.5431 train_time:1956747ms step_avg:237.18ms +[2025-07-17 19:32:54] [Rank 0] PRINT: step:8250/10000 val_loss:3.5431 train_time:1956747ms step_avg:237.18ms +[2025-07-17 19:32:57] [Rank 0] step:8261/10000 train_time:1959008ms step_avg:237.14ms +[2025-07-17 19:32:57] [Rank 0] step:8261/10000 train_time:1959008ms step_avg:237.14ms +[2025-07-17 19:33:02] [Rank 0] step:8281/10000 train_time:1964055ms step_avg:237.18ms +[2025-07-17 19:33:02] [Rank 0] step:8281/10000 train_time:1964055ms step_avg:237.18ms +[2025-07-17 19:33:07] [Rank 0] step:8301/10000 train_time:1969071ms step_avg:237.21ms +[2025-07-17 19:33:07] [Rank 0] step:8301/10000 train_time:1969071ms step_avg:237.21ms +[2025-07-17 19:33:12] [Rank 0] step:8321/10000 train_time:1974098ms step_avg:237.24ms +[2025-07-17 19:33:12] [Rank 0] step:8321/10000 train_time:1974098ms step_avg:237.24ms +[2025-07-17 19:33:17] [Rank 0] step:8341/10000 train_time:1979130ms step_avg:237.28ms +[2025-07-17 19:33:17] [Rank 0] step:8341/10000 train_time:1979130ms step_avg:237.28ms +[2025-07-17 19:33:22] [Rank 0] step:8361/10000 train_time:1984147ms step_avg:237.31ms +[2025-07-17 19:33:22] [Rank 0] step:8361/10000 train_time:1984147ms step_avg:237.31ms +[2025-07-17 19:33:30] [Rank 0] PRINT: step:8375/10000 val_loss:3.5403 train_time:1988166ms step_avg:237.39ms +[2025-07-17 19:33:30] [Rank 0] PRINT: step:8375/10000 val_loss:3.5403 train_time:1988166ms step_avg:237.39ms +[2025-07-17 19:33:32] [Rank 0] step:8381/10000 train_time:1989160ms step_avg:237.34ms +[2025-07-17 19:33:32] [Rank 0] step:8381/10000 train_time:1989160ms step_avg:237.34ms +[2025-07-17 19:33:37] [Rank 0] step:8401/10000 train_time:1994162ms step_avg:237.37ms +[2025-07-17 19:33:37] [Rank 0] step:8401/10000 train_time:1994162ms step_avg:237.37ms +[2025-07-17 19:33:42] [Rank 0] step:8421/10000 train_time:1999195ms step_avg:237.41ms +[2025-07-17 19:33:42] [Rank 0] step:8421/10000 train_time:1999195ms step_avg:237.41ms +[2025-07-17 19:33:47] [Rank 0] step:8441/10000 train_time:2004220ms step_avg:237.44ms +[2025-07-17 19:33:47] [Rank 0] step:8441/10000 train_time:2004220ms step_avg:237.44ms +[2025-07-17 19:33:52] [Rank 0] step:8461/10000 train_time:2009268ms step_avg:237.47ms +[2025-07-17 19:33:52] [Rank 0] step:8461/10000 train_time:2009268ms step_avg:237.47ms +[2025-07-17 19:33:57] [Rank 0] step:8481/10000 train_time:2014284ms step_avg:237.51ms +[2025-07-17 19:33:57] [Rank 0] step:8481/10000 train_time:2014284ms step_avg:237.51ms +[2025-07-17 19:34:06] [Rank 0] PRINT: step:8500/10000 val_loss:3.5306 train_time:2019567ms step_avg:237.60ms +[2025-07-17 19:34:06] [Rank 0] PRINT: step:8500/10000 val_loss:3.5306 train_time:2019567ms step_avg:237.60ms +[2025-07-17 19:34:06] [Rank 0] step:8501/10000 train_time:2019580ms step_avg:237.57ms +[2025-07-17 19:34:06] [Rank 0] step:8501/10000 train_time:2019580ms step_avg:237.57ms +[2025-07-17 19:34:11] [Rank 0] step:8521/10000 train_time:2024334ms step_avg:237.57ms +[2025-07-17 19:34:11] [Rank 0] step:8521/10000 train_time:2024334ms step_avg:237.57ms +[2025-07-17 19:34:17] [Rank 0] step:8541/10000 train_time:2029365ms step_avg:237.60ms +[2025-07-17 19:34:17] [Rank 0] step:8541/10000 train_time:2029365ms step_avg:237.60ms +[2025-07-17 19:34:22] [Rank 0] step:8561/10000 train_time:2034373ms step_avg:237.63ms +[2025-07-17 19:34:22] [Rank 0] step:8561/10000 train_time:2034373ms step_avg:237.63ms +[2025-07-17 19:34:27] [Rank 0] step:8581/10000 train_time:2039383ms step_avg:237.66ms +[2025-07-17 19:34:27] [Rank 0] step:8581/10000 train_time:2039383ms step_avg:237.66ms +[2025-07-17 19:34:32] [Rank 0] step:8601/10000 train_time:2044383ms step_avg:237.69ms +[2025-07-17 19:34:32] [Rank 0] step:8601/10000 train_time:2044383ms step_avg:237.69ms +[2025-07-17 19:34:37] [Rank 0] step:8621/10000 train_time:2049383ms step_avg:237.72ms +[2025-07-17 19:34:37] [Rank 0] step:8621/10000 train_time:2049383ms step_avg:237.72ms +[2025-07-17 19:34:42] [Rank 0] PRINT: step:8625/10000 val_loss:3.5231 train_time:2050887ms step_avg:237.78ms +[2025-07-17 19:34:42] [Rank 0] PRINT: step:8625/10000 val_loss:3.5231 train_time:2050887ms step_avg:237.78ms +[2025-07-17 19:34:46] [Rank 0] step:8641/10000 train_time:2054411ms step_avg:237.75ms +[2025-07-17 19:34:46] [Rank 0] step:8641/10000 train_time:2054411ms step_avg:237.75ms +[2025-07-17 19:34:51] [Rank 0] step:8661/10000 train_time:2059417ms step_avg:237.78ms +[2025-07-17 19:34:51] [Rank 0] step:8661/10000 train_time:2059417ms step_avg:237.78ms +[2025-07-17 19:34:56] [Rank 0] step:8681/10000 train_time:2064428ms step_avg:237.81ms +[2025-07-17 19:34:56] [Rank 0] step:8681/10000 train_time:2064428ms step_avg:237.81ms +[2025-07-17 19:35:01] [Rank 0] step:8701/10000 train_time:2069450ms step_avg:237.84ms +[2025-07-17 19:35:01] [Rank 0] step:8701/10000 train_time:2069450ms step_avg:237.84ms +[2025-07-17 19:35:06] [Rank 0] step:8721/10000 train_time:2074470ms step_avg:237.87ms +[2025-07-17 19:35:06] [Rank 0] step:8721/10000 train_time:2074470ms step_avg:237.87ms +[2025-07-17 19:35:11] [Rank 0] step:8741/10000 train_time:2079485ms step_avg:237.90ms +[2025-07-17 19:35:11] [Rank 0] step:8741/10000 train_time:2079485ms step_avg:237.90ms +[2025-07-17 19:35:18] [Rank 0] PRINT: step:8750/10000 val_loss:3.5365 train_time:2082237ms step_avg:237.97ms +[2025-07-17 19:35:18] [Rank 0] PRINT: step:8750/10000 val_loss:3.5365 train_time:2082237ms step_avg:237.97ms +[2025-07-17 19:35:21] [Rank 0] step:8761/10000 train_time:2084489ms step_avg:237.93ms +[2025-07-17 19:35:21] [Rank 0] step:8761/10000 train_time:2084489ms step_avg:237.93ms +[2025-07-17 19:35:26] [Rank 0] step:8781/10000 train_time:2089496ms step_avg:237.96ms +[2025-07-17 19:35:26] [Rank 0] step:8781/10000 train_time:2089496ms step_avg:237.96ms +[2025-07-17 19:35:31] [Rank 0] step:8801/10000 train_time:2094500ms step_avg:237.98ms +[2025-07-17 19:35:31] [Rank 0] step:8801/10000 train_time:2094500ms step_avg:237.98ms +[2025-07-17 19:35:36] [Rank 0] step:8821/10000 train_time:2099517ms step_avg:238.01ms +[2025-07-17 19:35:36] [Rank 0] step:8821/10000 train_time:2099517ms step_avg:238.01ms +[2025-07-17 19:35:41] [Rank 0] step:8841/10000 train_time:2104548ms step_avg:238.04ms +[2025-07-17 19:35:41] [Rank 0] step:8841/10000 train_time:2104548ms step_avg:238.04ms +[2025-07-17 19:35:46] [Rank 0] step:8861/10000 train_time:2109563ms step_avg:238.07ms +[2025-07-17 19:35:46] [Rank 0] step:8861/10000 train_time:2109563ms step_avg:238.07ms +[2025-07-17 19:35:54] [Rank 0] PRINT: step:8875/10000 val_loss:3.5330 train_time:2113571ms step_avg:238.15ms +[2025-07-17 19:35:54] [Rank 0] PRINT: step:8875/10000 val_loss:3.5330 train_time:2113571ms step_avg:238.15ms +[2025-07-17 19:35:56] [Rank 0] step:8881/10000 train_time:2114570ms step_avg:238.10ms +[2025-07-17 19:35:56] [Rank 0] step:8881/10000 train_time:2114570ms step_avg:238.10ms +[2025-07-17 19:36:01] [Rank 0] step:8901/10000 train_time:2119574ms step_avg:238.13ms +[2025-07-17 19:36:01] [Rank 0] step:8901/10000 train_time:2119574ms step_avg:238.13ms +[2025-07-17 19:36:06] [Rank 0] step:8921/10000 train_time:2124588ms step_avg:238.16ms +[2025-07-17 19:36:06] [Rank 0] step:8921/10000 train_time:2124588ms step_avg:238.16ms +[2025-07-17 19:36:11] [Rank 0] step:8941/10000 train_time:2129604ms step_avg:238.18ms +[2025-07-17 19:36:11] [Rank 0] step:8941/10000 train_time:2129604ms step_avg:238.18ms +[2025-07-17 19:36:16] [Rank 0] step:8961/10000 train_time:2134622ms step_avg:238.21ms +[2025-07-17 19:36:16] [Rank 0] step:8961/10000 train_time:2134622ms step_avg:238.21ms +[2025-07-17 19:36:21] [Rank 0] step:8981/10000 train_time:2139642ms step_avg:238.24ms +[2025-07-17 19:36:21] [Rank 0] step:8981/10000 train_time:2139642ms step_avg:238.24ms +[2025-07-17 19:36:30] [Rank 0] PRINT: step:9000/10000 val_loss:3.5205 train_time:2144914ms step_avg:238.32ms +[2025-07-17 19:36:30] [Rank 0] PRINT: step:9000/10000 val_loss:3.5205 train_time:2144914ms step_avg:238.32ms +[2025-07-17 19:36:30] [Rank 0] step:9001/10000 train_time:2144928ms step_avg:238.30ms +[2025-07-17 19:36:30] [Rank 0] step:9001/10000 train_time:2144928ms step_avg:238.30ms +[2025-07-17 19:36:35] [Rank 0] step:9021/10000 train_time:2149668ms step_avg:238.30ms +[2025-07-17 19:36:35] [Rank 0] step:9021/10000 train_time:2149668ms step_avg:238.30ms +[2025-07-17 19:36:40] [Rank 0] step:9041/10000 train_time:2154711ms step_avg:238.33ms +[2025-07-17 19:36:40] [Rank 0] step:9041/10000 train_time:2154711ms step_avg:238.33ms +[2025-07-17 19:36:45] [Rank 0] step:9061/10000 train_time:2159727ms step_avg:238.35ms +[2025-07-17 19:36:45] [Rank 0] step:9061/10000 train_time:2159727ms step_avg:238.35ms +[2025-07-17 19:36:50] [Rank 0] step:9081/10000 train_time:2164770ms step_avg:238.38ms +[2025-07-17 19:36:50] [Rank 0] step:9081/10000 train_time:2164770ms step_avg:238.38ms +[2025-07-17 19:36:55] [Rank 0] step:9101/10000 train_time:2169816ms step_avg:238.42ms +[2025-07-17 19:36:55] [Rank 0] step:9101/10000 train_time:2169816ms step_avg:238.42ms +[2025-07-17 19:37:00] [Rank 0] step:9121/10000 train_time:2174848ms step_avg:238.44ms +[2025-07-17 19:37:00] [Rank 0] step:9121/10000 train_time:2174848ms step_avg:238.44ms +[2025-07-17 19:37:06] [Rank 0] PRINT: step:9125/10000 val_loss:3.5030 train_time:2176355ms step_avg:238.50ms +[2025-07-17 19:37:06] [Rank 0] PRINT: step:9125/10000 val_loss:3.5030 train_time:2176355ms step_avg:238.50ms +[2025-07-17 19:37:10] [Rank 0] step:9141/10000 train_time:2179859ms step_avg:238.47ms +[2025-07-17 19:37:10] [Rank 0] step:9141/10000 train_time:2179859ms step_avg:238.47ms +[2025-07-17 19:37:15] [Rank 0] step:9161/10000 train_time:2184915ms step_avg:238.50ms +[2025-07-17 19:37:15] [Rank 0] step:9161/10000 train_time:2184915ms step_avg:238.50ms +[2025-07-17 19:37:20] [Rank 0] step:9181/10000 train_time:2189943ms step_avg:238.53ms +[2025-07-17 19:37:20] [Rank 0] step:9181/10000 train_time:2189943ms step_avg:238.53ms +[2025-07-17 19:37:25] [Rank 0] step:9201/10000 train_time:2194966ms step_avg:238.56ms +[2025-07-17 19:37:25] [Rank 0] step:9201/10000 train_time:2194966ms step_avg:238.56ms +[2025-07-17 19:37:30] [Rank 0] step:9221/10000 train_time:2200024ms step_avg:238.59ms +[2025-07-17 19:37:30] [Rank 0] step:9221/10000 train_time:2200024ms step_avg:238.59ms +[2025-07-17 19:37:35] [Rank 0] step:9241/10000 train_time:2205060ms step_avg:238.62ms +[2025-07-17 19:37:35] [Rank 0] step:9241/10000 train_time:2205060ms step_avg:238.62ms +[2025-07-17 19:37:42] [Rank 0] PRINT: step:9250/10000 val_loss:3.5187 train_time:2207828ms step_avg:238.68ms +[2025-07-17 19:37:42] [Rank 0] PRINT: step:9250/10000 val_loss:3.5187 train_time:2207828ms step_avg:238.68ms +[2025-07-17 19:37:45] [Rank 0] step:9261/10000 train_time:2210089ms step_avg:238.64ms +[2025-07-17 19:37:45] [Rank 0] step:9261/10000 train_time:2210089ms step_avg:238.64ms +[2025-07-17 19:37:50] [Rank 0] step:9281/10000 train_time:2215091ms step_avg:238.67ms +[2025-07-17 19:37:50] [Rank 0] step:9281/10000 train_time:2215091ms step_avg:238.67ms +[2025-07-17 19:37:55] [Rank 0] step:9301/10000 train_time:2220120ms step_avg:238.70ms +[2025-07-17 19:37:55] [Rank 0] step:9301/10000 train_time:2220120ms step_avg:238.70ms +[2025-07-17 19:38:00] [Rank 0] step:9321/10000 train_time:2225166ms step_avg:238.73ms +[2025-07-17 19:38:00] [Rank 0] step:9321/10000 train_time:2225166ms step_avg:238.73ms +[2025-07-17 19:38:05] [Rank 0] step:9341/10000 train_time:2230188ms step_avg:238.75ms +[2025-07-17 19:38:05] [Rank 0] step:9341/10000 train_time:2230188ms step_avg:238.75ms +[2025-07-17 19:38:10] [Rank 0] step:9361/10000 train_time:2235217ms step_avg:238.78ms +[2025-07-17 19:38:10] [Rank 0] step:9361/10000 train_time:2235217ms step_avg:238.78ms +[2025-07-17 19:38:18] [Rank 0] PRINT: step:9375/10000 val_loss:3.5152 train_time:2239242ms step_avg:238.85ms +[2025-07-17 19:38:18] [Rank 0] PRINT: step:9375/10000 val_loss:3.5152 train_time:2239242ms step_avg:238.85ms +[2025-07-17 19:38:20] [Rank 0] step:9381/10000 train_time:2240242ms step_avg:238.81ms +[2025-07-17 19:38:20] [Rank 0] step:9381/10000 train_time:2240242ms step_avg:238.81ms +[2025-07-17 19:38:25] [Rank 0] step:9401/10000 train_time:2245250ms step_avg:238.83ms +[2025-07-17 19:38:25] [Rank 0] step:9401/10000 train_time:2245250ms step_avg:238.83ms +[2025-07-17 19:38:30] [Rank 0] step:9421/10000 train_time:2250279ms step_avg:238.86ms +[2025-07-17 19:38:30] [Rank 0] step:9421/10000 train_time:2250279ms step_avg:238.86ms +[2025-07-17 19:38:35] [Rank 0] step:9441/10000 train_time:2255305ms step_avg:238.88ms +[2025-07-17 19:38:35] [Rank 0] step:9441/10000 train_time:2255305ms step_avg:238.88ms +[2025-07-17 19:38:40] [Rank 0] step:9461/10000 train_time:2260348ms step_avg:238.91ms +[2025-07-17 19:38:40] [Rank 0] step:9461/10000 train_time:2260348ms step_avg:238.91ms +[2025-07-17 19:38:45] [Rank 0] step:9481/10000 train_time:2265386ms step_avg:238.94ms +[2025-07-17 19:38:45] [Rank 0] step:9481/10000 train_time:2265386ms step_avg:238.94ms +[2025-07-17 19:38:54] [Rank 0] PRINT: step:9500/10000 val_loss:3.5170 train_time:2270697ms step_avg:239.02ms +[2025-07-17 19:38:54] [Rank 0] PRINT: step:9500/10000 val_loss:3.5170 train_time:2270697ms step_avg:239.02ms +[2025-07-17 19:38:55] [Rank 0] step:9501/10000 train_time:2270710ms step_avg:239.00ms +[2025-07-17 19:38:55] [Rank 0] step:9501/10000 train_time:2270710ms step_avg:239.00ms +[2025-07-17 19:39:00] [Rank 0] step:9521/10000 train_time:2275467ms step_avg:238.99ms +[2025-07-17 19:39:00] [Rank 0] step:9521/10000 train_time:2275467ms step_avg:238.99ms +[2025-07-17 19:39:05] [Rank 0] step:9541/10000 train_time:2280517ms step_avg:239.02ms +[2025-07-17 19:39:05] [Rank 0] step:9541/10000 train_time:2280517ms step_avg:239.02ms +[2025-07-17 19:39:10] [Rank 0] step:9561/10000 train_time:2285531ms step_avg:239.05ms +[2025-07-17 19:39:10] [Rank 0] step:9561/10000 train_time:2285531ms step_avg:239.05ms +[2025-07-17 19:39:15] [Rank 0] step:9581/10000 train_time:2290552ms step_avg:239.07ms +[2025-07-17 19:39:15] [Rank 0] step:9581/10000 train_time:2290552ms step_avg:239.07ms +[2025-07-17 19:39:20] [Rank 0] step:9601/10000 train_time:2295581ms step_avg:239.10ms +[2025-07-17 19:39:20] [Rank 0] step:9601/10000 train_time:2295581ms step_avg:239.10ms +[2025-07-17 19:39:25] [Rank 0] step:9621/10000 train_time:2300638ms step_avg:239.13ms +[2025-07-17 19:39:25] [Rank 0] step:9621/10000 train_time:2300638ms step_avg:239.13ms +[2025-07-17 19:39:31] [Rank 0] PRINT: step:9625/10000 val_loss:3.5110 train_time:2302144ms step_avg:239.18ms +[2025-07-17 19:39:31] [Rank 0] PRINT: step:9625/10000 val_loss:3.5110 train_time:2302144ms step_avg:239.18ms +[2025-07-17 19:39:35] [Rank 0] step:9641/10000 train_time:2305682ms step_avg:239.15ms +[2025-07-17 19:39:35] [Rank 0] step:9641/10000 train_time:2305682ms step_avg:239.15ms +[2025-07-17 19:39:40] [Rank 0] step:9661/10000 train_time:2310777ms step_avg:239.19ms +[2025-07-17 19:39:40] [Rank 0] step:9661/10000 train_time:2310777ms step_avg:239.19ms +[2025-07-17 19:39:45] [Rank 0] step:9681/10000 train_time:2315857ms step_avg:239.22ms +[2025-07-17 19:39:45] [Rank 0] step:9681/10000 train_time:2315857ms step_avg:239.22ms +[2025-07-17 19:39:50] [Rank 0] step:9701/10000 train_time:2320950ms step_avg:239.25ms +[2025-07-17 19:39:50] [Rank 0] step:9701/10000 train_time:2320950ms step_avg:239.25ms +[2025-07-17 19:39:55] [Rank 0] step:9721/10000 train_time:2326021ms step_avg:239.28ms +[2025-07-17 19:39:55] [Rank 0] step:9721/10000 train_time:2326021ms step_avg:239.28ms +[2025-07-17 19:40:00] [Rank 0] step:9741/10000 train_time:2331113ms step_avg:239.31ms +[2025-07-17 19:40:00] [Rank 0] step:9741/10000 train_time:2331113ms step_avg:239.31ms +[2025-07-17 19:40:07] [Rank 0] PRINT: step:9750/10000 val_loss:3.5016 train_time:2333903ms step_avg:239.37ms +[2025-07-17 19:40:07] [Rank 0] PRINT: step:9750/10000 val_loss:3.5016 train_time:2333903ms step_avg:239.37ms +[2025-07-17 19:40:10] [Rank 0] step:9761/10000 train_time:2336186ms step_avg:239.34ms +[2025-07-17 19:40:10] [Rank 0] step:9761/10000 train_time:2336186ms step_avg:239.34ms +[2025-07-17 19:40:15] [Rank 0] step:9781/10000 train_time:2341260ms step_avg:239.37ms +[2025-07-17 19:40:15] [Rank 0] step:9781/10000 train_time:2341260ms step_avg:239.37ms +[2025-07-17 19:40:20] [Rank 0] step:9801/10000 train_time:2346327ms step_avg:239.40ms +[2025-07-17 19:40:20] [Rank 0] step:9801/10000 train_time:2346327ms step_avg:239.40ms +[2025-07-17 19:40:25] [Rank 0] step:9821/10000 train_time:2351397ms step_avg:239.43ms +[2025-07-17 19:40:25] [Rank 0] step:9821/10000 train_time:2351397ms step_avg:239.43ms +[2025-07-17 19:40:30] [Rank 0] step:9841/10000 train_time:2356461ms step_avg:239.45ms +[2025-07-17 19:40:30] [Rank 0] step:9841/10000 train_time:2356461ms step_avg:239.45ms +[2025-07-17 19:40:35] [Rank 0] step:9861/10000 train_time:2361528ms step_avg:239.48ms +[2025-07-17 19:40:35] [Rank 0] step:9861/10000 train_time:2361528ms step_avg:239.48ms +[2025-07-17 19:40:43] [Rank 0] PRINT: step:9875/10000 val_loss:3.4993 train_time:2365579ms step_avg:239.55ms +[2025-07-17 19:40:43] [Rank 0] PRINT: step:9875/10000 val_loss:3.4993 train_time:2365579ms step_avg:239.55ms +[2025-07-17 19:40:44] [Rank 0] step:9881/10000 train_time:2366590ms step_avg:239.51ms +[2025-07-17 19:40:44] [Rank 0] step:9881/10000 train_time:2366590ms step_avg:239.51ms +[2025-07-17 19:40:49] [Rank 0] step:9901/10000 train_time:2371664ms step_avg:239.54ms +[2025-07-17 19:40:49] [Rank 0] step:9901/10000 train_time:2371664ms step_avg:239.54ms +[2025-07-17 19:40:54] [Rank 0] step:9921/10000 train_time:2376749ms step_avg:239.57ms +[2025-07-17 19:40:54] [Rank 0] step:9921/10000 train_time:2376749ms step_avg:239.57ms +[2025-07-17 19:41:00] [Rank 0] step:9941/10000 train_time:2381857ms step_avg:239.60ms +[2025-07-17 19:41:00] [Rank 0] step:9941/10000 train_time:2381857ms step_avg:239.60ms +[2025-07-17 19:41:05] [Rank 0] step:9961/10000 train_time:2386954ms step_avg:239.63ms +[2025-07-17 19:41:05] [Rank 0] step:9961/10000 train_time:2386954ms step_avg:239.63ms +[2025-07-17 19:41:10] [Rank 0] step:9981/10000 train_time:2392067ms step_avg:239.66ms +[2025-07-17 19:41:10] [Rank 0] step:9981/10000 train_time:2392067ms step_avg:239.66ms +[2025-07-17 19:41:15] [Rank 0] step:10000/10000 train_time:2396880ms step_avg:239.69ms +[2025-07-17 19:41:15] [Rank 0] step:10000/10000 train_time:2396880ms step_avg:239.69ms +[2025-07-17 19:41:19] [Rank 0] PRINT: step:10000/10000 val_loss:3.4964 train_time:2397392ms step_avg:239.74ms +[2025-07-17 19:41:19] [Rank 0] PRINT: step:10000/10000 val_loss:3.4964 train_time:2397392ms step_avg:239.74ms +[2025-07-17 19:41:19] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 19:41:19 2025 --- +[2025-07-17 19:41:19] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 19:41:19 2025 --- +[2025-07-17 19:41:19] [Rank 0] PRINT: Peak memory allocated: 31117 MiB reserved: 31436 MiB +[2025-07-17 19:41:19] [Rank 0] PRINT: Peak memory allocated: 31117 MiB reserved: 31436 MiB diff --git a/logs_norope/diff_modes/mode_3_param_norope_seed_42/config.json b/logs_norope/diff_modes/mode_3_param_norope_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..5204666625688e41b7dee5aa75c5b1adb639c019 --- /dev/null +++ b/logs_norope/diff_modes/mode_3_param_norope_seed_42/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 3, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "5dc923f2-6b2d-4156-9ad4-ab2d49ed13bc", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_3_param_norope_seed_42/training_log_5dc923f2-6b2d-4156-9ad4-ab2d49ed13bc.txt b/logs_norope/diff_modes/mode_3_param_norope_seed_42/training_log_5dc923f2-6b2d-4156-9ad4-ab2d49ed13bc.txt new file mode 100644 index 0000000000000000000000000000000000000000..3a17ee675e2a6911603b374a53b25fab63e136db --- /dev/null +++ b/logs_norope/diff_modes/mode_3_param_norope_seed_42/training_log_5dc923f2-6b2d-4156-9ad4-ab2d49ed13bc.txt @@ -0,0 +1,2360 @@ +[2025-07-17 12:29:36] [Rank 0] PRINT: --- Script Start: Thu Jul 17 12:29:36 2025 --- +[2025-07-17 12:29:36] [Rank 0] PRINT: --- Script Start: Thu Jul 17 12:29:36 2025 --- +[2025-07-17 12:29:36] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=3, model_parameterization='norope') +[2025-07-17 12:29:36] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=3, model_parameterization='norope') +[2025-07-17 12:29:36] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 12:29:36] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 12:29:36] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 12:29:36] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 12:29:36] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_3_param_norope_seed_42 +[2025-07-17 12:29:36] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_3_param_norope_seed_42 +[2025-07-17 12:29:36] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 12:29:36] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 12:29:36] [Rank 0] PRINT: Constructing model... +[2025-07-17 12:29:36] [Rank 0] PRINT: Constructing model... +[2025-07-17 12:29:39] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 12:29:39] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 12:29:39] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 12:29:39] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 12:29:39] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 12:29:39] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 12:29:39] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 12:29:39] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 12:29:39] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 3 +[2025-07-17 12:29:39] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 3 +[2025-07-17 12:29:39] [Rank 0] PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: 0.001). +[2025-07-17 12:29:39] [Rank 0] PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: 0.001). +[2025-07-17 12:29:39] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 12:29:39] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 12:29:39] [Rank 0] PRINT: Muon optimizer is active with 44 parameters. +[2025-07-17 12:29:39] [Rank 0] PRINT: Muon optimizer is active with 44 parameters. +[2025-07-17 12:29:39] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 12:29:39] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 12:29:39] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 12:29:39] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 12:29:39] [Rank 0] PRINT: Starting warmup... +[2025-07-17 12:29:39] [Rank 0] PRINT: Starting warmup... +[2025-07-17 12:30:44] [Rank 0] PRINT: Warmup complete. +[2025-07-17 12:30:44] [Rank 0] PRINT: Warmup complete. +[2025-07-17 12:30:45] [Rank 0] PRINT: Starting training... +[2025-07-17 12:30:45] [Rank 0] PRINT: Starting training... +[2025-07-17 12:30:54] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 12:30:54] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 12:30:59] [Rank 0] step:21/10000 train_time:4740ms step_avg:225.73ms +[2025-07-17 12:30:59] [Rank 0] step:21/10000 train_time:4740ms step_avg:225.73ms +[2025-07-17 12:31:03] [Rank 0] step:41/10000 train_time:9192ms step_avg:224.19ms +[2025-07-17 12:31:03] [Rank 0] step:41/10000 train_time:9192ms step_avg:224.19ms +[2025-07-17 12:31:08] [Rank 0] step:61/10000 train_time:13647ms step_avg:223.71ms +[2025-07-17 12:31:08] [Rank 0] step:61/10000 train_time:13647ms step_avg:223.71ms +[2025-07-17 12:31:12] [Rank 0] step:81/10000 train_time:18112ms step_avg:223.60ms +[2025-07-17 12:31:12] [Rank 0] step:81/10000 train_time:18112ms step_avg:223.60ms +[2025-07-17 12:31:17] [Rank 0] step:101/10000 train_time:22581ms step_avg:223.57ms +[2025-07-17 12:31:17] [Rank 0] step:101/10000 train_time:22581ms step_avg:223.57ms +[2025-07-17 12:31:21] [Rank 0] step:121/10000 train_time:27058ms step_avg:223.62ms +[2025-07-17 12:31:21] [Rank 0] step:121/10000 train_time:27058ms step_avg:223.62ms +[2025-07-17 12:31:26] [Rank 0] PRINT: step:125/10000 val_loss:5.1663 train_time:28185ms step_avg:225.48ms +[2025-07-17 12:31:26] [Rank 0] PRINT: step:125/10000 val_loss:5.1663 train_time:28185ms step_avg:225.48ms +[2025-07-17 12:31:30] [Rank 0] step:141/10000 train_time:31535ms step_avg:223.65ms +[2025-07-17 12:31:30] [Rank 0] step:141/10000 train_time:31535ms step_avg:223.65ms +[2025-07-17 12:31:35] [Rank 0] step:161/10000 train_time:36015ms step_avg:223.69ms +[2025-07-17 12:31:35] [Rank 0] step:161/10000 train_time:36015ms step_avg:223.69ms +[2025-07-17 12:31:39] [Rank 0] step:181/10000 train_time:40502ms step_avg:223.77ms +[2025-07-17 12:31:39] [Rank 0] step:181/10000 train_time:40502ms step_avg:223.77ms +[2025-07-17 12:31:44] [Rank 0] step:201/10000 train_time:44987ms step_avg:223.82ms +[2025-07-17 12:31:44] [Rank 0] step:201/10000 train_time:44987ms step_avg:223.82ms +[2025-07-17 12:31:48] [Rank 0] step:221/10000 train_time:49474ms step_avg:223.86ms +[2025-07-17 12:31:48] [Rank 0] step:221/10000 train_time:49474ms step_avg:223.86ms +[2025-07-17 12:31:53] [Rank 0] step:241/10000 train_time:53960ms step_avg:223.90ms +[2025-07-17 12:31:53] [Rank 0] step:241/10000 train_time:53960ms step_avg:223.90ms +[2025-07-17 12:31:59] [Rank 0] PRINT: step:250/10000 val_loss:4.7664 train_time:56213ms step_avg:224.85ms +[2025-07-17 12:31:59] [Rank 0] PRINT: step:250/10000 val_loss:4.7664 train_time:56213ms step_avg:224.85ms +[2025-07-17 12:32:01] [Rank 0] step:261/10000 train_time:58451ms step_avg:223.95ms +[2025-07-17 12:32:01] [Rank 0] step:261/10000 train_time:58451ms step_avg:223.95ms +[2025-07-17 12:32:06] [Rank 0] step:281/10000 train_time:62937ms step_avg:223.97ms +[2025-07-17 12:32:06] [Rank 0] step:281/10000 train_time:62937ms step_avg:223.97ms +[2025-07-17 12:32:10] [Rank 0] step:301/10000 train_time:67426ms step_avg:224.01ms +[2025-07-17 12:32:10] [Rank 0] step:301/10000 train_time:67426ms step_avg:224.01ms +[2025-07-17 12:32:15] [Rank 0] step:321/10000 train_time:71914ms step_avg:224.03ms +[2025-07-17 12:32:15] [Rank 0] step:321/10000 train_time:71914ms step_avg:224.03ms +[2025-07-17 12:32:19] [Rank 0] step:341/10000 train_time:76406ms step_avg:224.06ms +[2025-07-17 12:32:19] [Rank 0] step:341/10000 train_time:76406ms step_avg:224.06ms +[2025-07-17 12:32:24] [Rank 0] step:361/10000 train_time:80897ms step_avg:224.09ms +[2025-07-17 12:32:24] [Rank 0] step:361/10000 train_time:80897ms step_avg:224.09ms +[2025-07-17 12:32:31] [Rank 0] PRINT: step:375/10000 val_loss:4.5854 train_time:84268ms step_avg:224.71ms +[2025-07-17 12:32:31] [Rank 0] PRINT: step:375/10000 val_loss:4.5854 train_time:84268ms step_avg:224.71ms +[2025-07-17 12:32:33] [Rank 0] step:381/10000 train_time:85386ms step_avg:224.11ms +[2025-07-17 12:32:33] [Rank 0] step:381/10000 train_time:85386ms step_avg:224.11ms +[2025-07-17 12:32:37] [Rank 0] step:401/10000 train_time:89874ms step_avg:224.12ms +[2025-07-17 12:32:37] [Rank 0] step:401/10000 train_time:89874ms step_avg:224.12ms +[2025-07-17 12:32:42] [Rank 0] step:421/10000 train_time:94363ms step_avg:224.14ms +[2025-07-17 12:32:42] [Rank 0] step:421/10000 train_time:94363ms step_avg:224.14ms +[2025-07-17 12:32:46] [Rank 0] step:441/10000 train_time:98852ms step_avg:224.16ms +[2025-07-17 12:32:46] [Rank 0] step:441/10000 train_time:98852ms step_avg:224.16ms +[2025-07-17 12:32:51] [Rank 0] step:461/10000 train_time:103344ms step_avg:224.17ms +[2025-07-17 12:32:51] [Rank 0] step:461/10000 train_time:103344ms step_avg:224.17ms +[2025-07-17 12:32:55] [Rank 0] step:481/10000 train_time:107837ms step_avg:224.19ms +[2025-07-17 12:32:55] [Rank 0] step:481/10000 train_time:107837ms step_avg:224.19ms +[2025-07-17 12:33:04] [Rank 0] PRINT: step:500/10000 val_loss:4.4621 train_time:112333ms step_avg:224.67ms +[2025-07-17 12:33:04] [Rank 0] PRINT: step:500/10000 val_loss:4.4621 train_time:112333ms step_avg:224.67ms +[2025-07-17 12:33:04] [Rank 0] step:501/10000 train_time:112348ms step_avg:224.25ms +[2025-07-17 12:33:04] [Rank 0] step:501/10000 train_time:112348ms step_avg:224.25ms +[2025-07-17 12:33:09] [Rank 0] step:521/10000 train_time:117351ms step_avg:225.24ms +[2025-07-17 12:33:09] [Rank 0] step:521/10000 train_time:117351ms step_avg:225.24ms +[2025-07-17 12:33:14] [Rank 0] step:541/10000 train_time:121841ms step_avg:225.22ms +[2025-07-17 12:33:14] [Rank 0] step:541/10000 train_time:121841ms step_avg:225.22ms +[2025-07-17 12:33:18] [Rank 0] step:561/10000 train_time:126333ms step_avg:225.19ms +[2025-07-17 12:33:18] [Rank 0] step:561/10000 train_time:126333ms step_avg:225.19ms +[2025-07-17 12:33:23] [Rank 0] step:581/10000 train_time:130827ms step_avg:225.18ms +[2025-07-17 12:33:23] [Rank 0] step:581/10000 train_time:130827ms step_avg:225.18ms +[2025-07-17 12:33:27] [Rank 0] step:601/10000 train_time:135324ms step_avg:225.16ms +[2025-07-17 12:33:27] [Rank 0] step:601/10000 train_time:135324ms step_avg:225.16ms +[2025-07-17 12:33:32] [Rank 0] step:621/10000 train_time:139819ms step_avg:225.15ms +[2025-07-17 12:33:32] [Rank 0] step:621/10000 train_time:139819ms step_avg:225.15ms +[2025-07-17 12:33:37] [Rank 0] PRINT: step:625/10000 val_loss:4.3856 train_time:140950ms step_avg:225.52ms +[2025-07-17 12:33:37] [Rank 0] PRINT: step:625/10000 val_loss:4.3856 train_time:140950ms step_avg:225.52ms +[2025-07-17 12:33:40] [Rank 0] step:641/10000 train_time:144315ms step_avg:225.14ms +[2025-07-17 12:33:40] [Rank 0] step:641/10000 train_time:144315ms step_avg:225.14ms +[2025-07-17 12:33:45] [Rank 0] step:661/10000 train_time:148812ms step_avg:225.13ms +[2025-07-17 12:33:45] [Rank 0] step:661/10000 train_time:148812ms step_avg:225.13ms +[2025-07-17 12:33:49] [Rank 0] step:681/10000 train_time:153311ms step_avg:225.13ms +[2025-07-17 12:33:49] [Rank 0] step:681/10000 train_time:153311ms step_avg:225.13ms +[2025-07-17 12:33:54] [Rank 0] step:701/10000 train_time:157812ms step_avg:225.12ms +[2025-07-17 12:33:54] [Rank 0] step:701/10000 train_time:157812ms step_avg:225.12ms +[2025-07-17 12:33:58] [Rank 0] step:721/10000 train_time:162313ms step_avg:225.12ms +[2025-07-17 12:33:58] [Rank 0] step:721/10000 train_time:162313ms step_avg:225.12ms +[2025-07-17 12:34:03] [Rank 0] step:741/10000 train_time:166810ms step_avg:225.11ms +[2025-07-17 12:34:03] [Rank 0] step:741/10000 train_time:166810ms step_avg:225.11ms +[2025-07-17 12:34:09] [Rank 0] PRINT: step:750/10000 val_loss:4.3390 train_time:169081ms step_avg:225.44ms +[2025-07-17 12:34:09] [Rank 0] PRINT: step:750/10000 val_loss:4.3390 train_time:169081ms step_avg:225.44ms +[2025-07-17 12:34:12] [Rank 0] step:761/10000 train_time:171343ms step_avg:225.15ms +[2025-07-17 12:34:12] [Rank 0] step:761/10000 train_time:171343ms step_avg:225.15ms +[2025-07-17 12:34:16] [Rank 0] step:781/10000 train_time:175877ms step_avg:225.19ms +[2025-07-17 12:34:16] [Rank 0] step:781/10000 train_time:175877ms step_avg:225.19ms +[2025-07-17 12:34:21] [Rank 0] step:801/10000 train_time:180416ms step_avg:225.24ms +[2025-07-17 12:34:21] [Rank 0] step:801/10000 train_time:180416ms step_avg:225.24ms +[2025-07-17 12:34:26] [Rank 0] step:821/10000 train_time:184951ms step_avg:225.28ms +[2025-07-17 12:34:26] [Rank 0] step:821/10000 train_time:184951ms step_avg:225.28ms +[2025-07-17 12:34:30] [Rank 0] step:841/10000 train_time:189486ms step_avg:225.31ms +[2025-07-17 12:34:30] [Rank 0] step:841/10000 train_time:189486ms step_avg:225.31ms +[2025-07-17 12:34:35] [Rank 0] step:861/10000 train_time:194023ms step_avg:225.35ms +[2025-07-17 12:34:35] [Rank 0] step:861/10000 train_time:194023ms step_avg:225.35ms +[2025-07-17 12:34:42] [Rank 0] PRINT: step:875/10000 val_loss:4.2081 train_time:197429ms step_avg:225.63ms +[2025-07-17 12:34:42] [Rank 0] PRINT: step:875/10000 val_loss:4.2081 train_time:197429ms step_avg:225.63ms +[2025-07-17 12:34:44] [Rank 0] step:881/10000 train_time:198557ms step_avg:225.38ms +[2025-07-17 12:34:44] [Rank 0] step:881/10000 train_time:198557ms step_avg:225.38ms +[2025-07-17 12:34:48] [Rank 0] step:901/10000 train_time:203095ms step_avg:225.41ms +[2025-07-17 12:34:48] [Rank 0] step:901/10000 train_time:203095ms step_avg:225.41ms +[2025-07-17 12:34:53] [Rank 0] step:921/10000 train_time:207634ms step_avg:225.44ms +[2025-07-17 12:34:53] [Rank 0] step:921/10000 train_time:207634ms step_avg:225.44ms +[2025-07-17 12:34:57] [Rank 0] step:941/10000 train_time:212176ms step_avg:225.48ms +[2025-07-17 12:34:57] [Rank 0] step:941/10000 train_time:212176ms step_avg:225.48ms +[2025-07-17 12:35:02] [Rank 0] step:961/10000 train_time:216717ms step_avg:225.51ms +[2025-07-17 12:35:02] [Rank 0] step:961/10000 train_time:216717ms step_avg:225.51ms +[2025-07-17 12:35:06] [Rank 0] step:981/10000 train_time:221257ms step_avg:225.54ms +[2025-07-17 12:35:06] [Rank 0] step:981/10000 train_time:221257ms step_avg:225.54ms +[2025-07-17 12:35:15] [Rank 0] PRINT: step:1000/10000 val_loss:4.2817 train_time:225798ms step_avg:225.80ms +[2025-07-17 12:35:15] [Rank 0] PRINT: step:1000/10000 val_loss:4.2817 train_time:225798ms step_avg:225.80ms +[2025-07-17 12:35:15] [Rank 0] step:1001/10000 train_time:225814ms step_avg:225.59ms +[2025-07-17 12:35:15] [Rank 0] step:1001/10000 train_time:225814ms step_avg:225.59ms +[2025-07-17 12:35:20] [Rank 0] step:1021/10000 train_time:230850ms step_avg:226.10ms +[2025-07-17 12:35:20] [Rank 0] step:1021/10000 train_time:230850ms step_avg:226.10ms +[2025-07-17 12:35:25] [Rank 0] step:1041/10000 train_time:235391ms step_avg:226.12ms +[2025-07-17 12:35:25] [Rank 0] step:1041/10000 train_time:235391ms step_avg:226.12ms +[2025-07-17 12:35:29] [Rank 0] step:1061/10000 train_time:239933ms step_avg:226.14ms +[2025-07-17 12:35:29] [Rank 0] step:1061/10000 train_time:239933ms step_avg:226.14ms +[2025-07-17 12:35:34] [Rank 0] step:1081/10000 train_time:244478ms step_avg:226.16ms +[2025-07-17 12:35:34] [Rank 0] step:1081/10000 train_time:244478ms step_avg:226.16ms +[2025-07-17 12:35:38] [Rank 0] step:1101/10000 train_time:249023ms step_avg:226.18ms +[2025-07-17 12:35:38] [Rank 0] step:1101/10000 train_time:249023ms step_avg:226.18ms +[2025-07-17 12:35:43] [Rank 0] step:1121/10000 train_time:253575ms step_avg:226.20ms +[2025-07-17 12:35:43] [Rank 0] step:1121/10000 train_time:253575ms step_avg:226.20ms +[2025-07-17 12:35:48] [Rank 0] PRINT: step:1125/10000 val_loss:4.2266 train_time:254719ms step_avg:226.42ms +[2025-07-17 12:35:48] [Rank 0] PRINT: step:1125/10000 val_loss:4.2266 train_time:254719ms step_avg:226.42ms +[2025-07-17 12:35:52] [Rank 0] step:1141/10000 train_time:258118ms step_avg:226.22ms +[2025-07-17 12:35:52] [Rank 0] step:1141/10000 train_time:258118ms step_avg:226.22ms +[2025-07-17 12:35:57] [Rank 0] step:1161/10000 train_time:262664ms step_avg:226.24ms +[2025-07-17 12:35:57] [Rank 0] step:1161/10000 train_time:262664ms step_avg:226.24ms +[2025-07-17 12:36:01] [Rank 0] step:1181/10000 train_time:267209ms step_avg:226.26ms +[2025-07-17 12:36:01] [Rank 0] step:1181/10000 train_time:267209ms step_avg:226.26ms +[2025-07-17 12:36:06] [Rank 0] step:1201/10000 train_time:271757ms step_avg:226.28ms +[2025-07-17 12:36:06] [Rank 0] step:1201/10000 train_time:271757ms step_avg:226.28ms +[2025-07-17 12:36:10] [Rank 0] step:1221/10000 train_time:276301ms step_avg:226.29ms +[2025-07-17 12:36:10] [Rank 0] step:1221/10000 train_time:276301ms step_avg:226.29ms +[2025-07-17 12:36:15] [Rank 0] step:1241/10000 train_time:280849ms step_avg:226.31ms +[2025-07-17 12:36:15] [Rank 0] step:1241/10000 train_time:280849ms step_avg:226.31ms +[2025-07-17 12:36:21] [Rank 0] PRINT: step:1250/10000 val_loss:4.3232 train_time:283126ms step_avg:226.50ms +[2025-07-17 12:36:21] [Rank 0] PRINT: step:1250/10000 val_loss:4.3232 train_time:283126ms step_avg:226.50ms +[2025-07-17 12:36:24] [Rank 0] step:1261/10000 train_time:285395ms step_avg:226.32ms +[2025-07-17 12:36:24] [Rank 0] step:1261/10000 train_time:285395ms step_avg:226.32ms +[2025-07-17 12:36:28] [Rank 0] step:1281/10000 train_time:289943ms step_avg:226.34ms +[2025-07-17 12:36:28] [Rank 0] step:1281/10000 train_time:289943ms step_avg:226.34ms +[2025-07-17 12:36:33] [Rank 0] step:1301/10000 train_time:294493ms step_avg:226.36ms +[2025-07-17 12:36:33] [Rank 0] step:1301/10000 train_time:294493ms step_avg:226.36ms +[2025-07-17 12:36:37] [Rank 0] step:1321/10000 train_time:299044ms step_avg:226.38ms +[2025-07-17 12:36:37] [Rank 0] step:1321/10000 train_time:299044ms step_avg:226.38ms +[2025-07-17 12:36:42] [Rank 0] step:1341/10000 train_time:303593ms step_avg:226.39ms +[2025-07-17 12:36:42] [Rank 0] step:1341/10000 train_time:303593ms step_avg:226.39ms +[2025-07-17 12:36:47] [Rank 0] step:1361/10000 train_time:308147ms step_avg:226.41ms +[2025-07-17 12:36:47] [Rank 0] step:1361/10000 train_time:308147ms step_avg:226.41ms +[2025-07-17 12:36:54] [Rank 0] PRINT: step:1375/10000 val_loss:4.3455 train_time:311562ms step_avg:226.59ms +[2025-07-17 12:36:54] [Rank 0] PRINT: step:1375/10000 val_loss:4.3455 train_time:311562ms step_avg:226.59ms +[2025-07-17 12:36:56] [Rank 0] step:1381/10000 train_time:312695ms step_avg:226.43ms +[2025-07-17 12:36:56] [Rank 0] step:1381/10000 train_time:312695ms step_avg:226.43ms +[2025-07-17 12:37:00] [Rank 0] step:1401/10000 train_time:317243ms step_avg:226.44ms +[2025-07-17 12:37:00] [Rank 0] step:1401/10000 train_time:317243ms step_avg:226.44ms +[2025-07-17 12:37:05] [Rank 0] step:1421/10000 train_time:321789ms step_avg:226.45ms +[2025-07-17 12:37:05] [Rank 0] step:1421/10000 train_time:321789ms step_avg:226.45ms +[2025-07-17 12:37:09] [Rank 0] step:1441/10000 train_time:326338ms step_avg:226.47ms +[2025-07-17 12:37:09] [Rank 0] step:1441/10000 train_time:326338ms step_avg:226.47ms +[2025-07-17 12:37:14] [Rank 0] step:1461/10000 train_time:330883ms step_avg:226.48ms +[2025-07-17 12:37:14] [Rank 0] step:1461/10000 train_time:330883ms step_avg:226.48ms +[2025-07-17 12:37:18] [Rank 0] step:1481/10000 train_time:335433ms step_avg:226.49ms +[2025-07-17 12:37:18] [Rank 0] step:1481/10000 train_time:335433ms step_avg:226.49ms +[2025-07-17 12:37:27] [Rank 0] PRINT: step:1500/10000 val_loss:4.2972 train_time:340007ms step_avg:226.67ms +[2025-07-17 12:37:27] [Rank 0] PRINT: step:1500/10000 val_loss:4.2972 train_time:340007ms step_avg:226.67ms +[2025-07-17 12:37:27] [Rank 0] step:1501/10000 train_time:340022ms step_avg:226.53ms +[2025-07-17 12:37:27] [Rank 0] step:1501/10000 train_time:340022ms step_avg:226.53ms +[2025-07-17 12:37:32] [Rank 0] step:1521/10000 train_time:344577ms step_avg:226.55ms +[2025-07-17 12:37:32] [Rank 0] step:1521/10000 train_time:344577ms step_avg:226.55ms +[2025-07-17 12:37:37] [Rank 0] step:1541/10000 train_time:349677ms step_avg:226.92ms +[2025-07-17 12:37:37] [Rank 0] step:1541/10000 train_time:349677ms step_avg:226.92ms +[2025-07-17 12:37:42] [Rank 0] step:1561/10000 train_time:354259ms step_avg:226.94ms +[2025-07-17 12:37:42] [Rank 0] step:1561/10000 train_time:354259ms step_avg:226.94ms +[2025-07-17 12:37:46] [Rank 0] step:1581/10000 train_time:358839ms step_avg:226.97ms +[2025-07-17 12:37:46] [Rank 0] step:1581/10000 train_time:358839ms step_avg:226.97ms +[2025-07-17 12:37:51] [Rank 0] step:1601/10000 train_time:363424ms step_avg:227.00ms +[2025-07-17 12:37:51] [Rank 0] step:1601/10000 train_time:363424ms step_avg:227.00ms +[2025-07-17 12:37:55] [Rank 0] step:1621/10000 train_time:368005ms step_avg:227.02ms +[2025-07-17 12:37:55] [Rank 0] step:1621/10000 train_time:368005ms step_avg:227.02ms +[2025-07-17 12:38:01] [Rank 0] PRINT: step:1625/10000 val_loss:4.2990 train_time:369158ms step_avg:227.17ms +[2025-07-17 12:38:01] [Rank 0] PRINT: step:1625/10000 val_loss:4.2990 train_time:369158ms step_avg:227.17ms +[2025-07-17 12:38:04] [Rank 0] step:1641/10000 train_time:372583ms step_avg:227.05ms +[2025-07-17 12:38:04] [Rank 0] step:1641/10000 train_time:372583ms step_avg:227.05ms +[2025-07-17 12:38:09] [Rank 0] step:1661/10000 train_time:377162ms step_avg:227.07ms +[2025-07-17 12:38:09] [Rank 0] step:1661/10000 train_time:377162ms step_avg:227.07ms +[2025-07-17 12:38:14] [Rank 0] step:1681/10000 train_time:381741ms step_avg:227.09ms +[2025-07-17 12:38:14] [Rank 0] step:1681/10000 train_time:381741ms step_avg:227.09ms +[2025-07-17 12:38:18] [Rank 0] step:1701/10000 train_time:386323ms step_avg:227.12ms +[2025-07-17 12:38:18] [Rank 0] step:1701/10000 train_time:386323ms step_avg:227.12ms +[2025-07-17 12:38:23] [Rank 0] step:1721/10000 train_time:390904ms step_avg:227.14ms +[2025-07-17 12:38:23] [Rank 0] step:1721/10000 train_time:390904ms step_avg:227.14ms +[2025-07-17 12:38:27] [Rank 0] step:1741/10000 train_time:395484ms step_avg:227.16ms +[2025-07-17 12:38:27] [Rank 0] step:1741/10000 train_time:395484ms step_avg:227.16ms +[2025-07-17 12:38:34] [Rank 0] PRINT: step:1750/10000 val_loss:4.4696 train_time:397780ms step_avg:227.30ms +[2025-07-17 12:38:34] [Rank 0] PRINT: step:1750/10000 val_loss:4.4696 train_time:397780ms step_avg:227.30ms +[2025-07-17 12:38:36] [Rank 0] step:1761/10000 train_time:400062ms step_avg:227.18ms +[2025-07-17 12:38:36] [Rank 0] step:1761/10000 train_time:400062ms step_avg:227.18ms +[2025-07-17 12:38:41] [Rank 0] step:1781/10000 train_time:404644ms step_avg:227.20ms +[2025-07-17 12:38:41] [Rank 0] step:1781/10000 train_time:404644ms step_avg:227.20ms +[2025-07-17 12:38:46] [Rank 0] step:1801/10000 train_time:409227ms step_avg:227.22ms +[2025-07-17 12:38:46] [Rank 0] step:1801/10000 train_time:409227ms step_avg:227.22ms +[2025-07-17 12:38:50] [Rank 0] step:1821/10000 train_time:413814ms step_avg:227.25ms +[2025-07-17 12:38:50] [Rank 0] step:1821/10000 train_time:413814ms step_avg:227.25ms +[2025-07-17 12:38:55] [Rank 0] step:1841/10000 train_time:418401ms step_avg:227.27ms +[2025-07-17 12:38:55] [Rank 0] step:1841/10000 train_time:418401ms step_avg:227.27ms +[2025-07-17 12:38:59] [Rank 0] step:1861/10000 train_time:422989ms step_avg:227.29ms +[2025-07-17 12:38:59] [Rank 0] step:1861/10000 train_time:422989ms step_avg:227.29ms +[2025-07-17 12:39:07] [Rank 0] PRINT: step:1875/10000 val_loss:4.3156 train_time:426431ms step_avg:227.43ms +[2025-07-17 12:39:07] [Rank 0] PRINT: step:1875/10000 val_loss:4.3156 train_time:426431ms step_avg:227.43ms +[2025-07-17 12:39:08] [Rank 0] step:1881/10000 train_time:427571ms step_avg:227.31ms +[2025-07-17 12:39:08] [Rank 0] step:1881/10000 train_time:427571ms step_avg:227.31ms +[2025-07-17 12:39:13] [Rank 0] step:1901/10000 train_time:432154ms step_avg:227.33ms +[2025-07-17 12:39:13] [Rank 0] step:1901/10000 train_time:432154ms step_avg:227.33ms +[2025-07-17 12:39:18] [Rank 0] step:1921/10000 train_time:436736ms step_avg:227.35ms +[2025-07-17 12:39:18] [Rank 0] step:1921/10000 train_time:436736ms step_avg:227.35ms +[2025-07-17 12:39:22] [Rank 0] step:1941/10000 train_time:441319ms step_avg:227.37ms +[2025-07-17 12:39:22] [Rank 0] step:1941/10000 train_time:441319ms step_avg:227.37ms +[2025-07-17 12:39:27] [Rank 0] step:1961/10000 train_time:445903ms step_avg:227.39ms +[2025-07-17 12:39:27] [Rank 0] step:1961/10000 train_time:445903ms step_avg:227.39ms +[2025-07-17 12:39:31] [Rank 0] step:1981/10000 train_time:450484ms step_avg:227.40ms +[2025-07-17 12:39:31] [Rank 0] step:1981/10000 train_time:450484ms step_avg:227.40ms +[2025-07-17 12:39:40] [Rank 0] PRINT: step:2000/10000 val_loss:4.4232 train_time:455069ms step_avg:227.53ms +[2025-07-17 12:39:40] [Rank 0] PRINT: step:2000/10000 val_loss:4.4232 train_time:455069ms step_avg:227.53ms +[2025-07-17 12:39:40] [Rank 0] step:2001/10000 train_time:455085ms step_avg:227.43ms +[2025-07-17 12:39:40] [Rank 0] step:2001/10000 train_time:455085ms step_avg:227.43ms +[2025-07-17 12:39:45] [Rank 0] step:2021/10000 train_time:459644ms step_avg:227.43ms +[2025-07-17 12:39:45] [Rank 0] step:2021/10000 train_time:459644ms step_avg:227.43ms +[2025-07-17 12:39:50] [Rank 0] step:2041/10000 train_time:464759ms step_avg:227.71ms +[2025-07-17 12:39:50] [Rank 0] step:2041/10000 train_time:464759ms step_avg:227.71ms +[2025-07-17 12:39:55] [Rank 0] step:2061/10000 train_time:469340ms step_avg:227.72ms +[2025-07-17 12:39:55] [Rank 0] step:2061/10000 train_time:469340ms step_avg:227.72ms +[2025-07-17 12:39:59] [Rank 0] step:2081/10000 train_time:473920ms step_avg:227.74ms +[2025-07-17 12:39:59] [Rank 0] step:2081/10000 train_time:473920ms step_avg:227.74ms +[2025-07-17 12:40:04] [Rank 0] step:2101/10000 train_time:478501ms step_avg:227.75ms +[2025-07-17 12:40:04] [Rank 0] step:2101/10000 train_time:478501ms step_avg:227.75ms +[2025-07-17 12:40:08] [Rank 0] step:2121/10000 train_time:483080ms step_avg:227.76ms +[2025-07-17 12:40:08] [Rank 0] step:2121/10000 train_time:483080ms step_avg:227.76ms +[2025-07-17 12:40:14] [Rank 0] PRINT: step:2125/10000 val_loss:4.3844 train_time:484232ms step_avg:227.87ms +[2025-07-17 12:40:14] [Rank 0] PRINT: step:2125/10000 val_loss:4.3844 train_time:484232ms step_avg:227.87ms +[2025-07-17 12:40:17] [Rank 0] step:2141/10000 train_time:487659ms step_avg:227.77ms +[2025-07-17 12:40:17] [Rank 0] step:2141/10000 train_time:487659ms step_avg:227.77ms +[2025-07-17 12:40:22] [Rank 0] step:2161/10000 train_time:492240ms step_avg:227.78ms +[2025-07-17 12:40:22] [Rank 0] step:2161/10000 train_time:492240ms step_avg:227.78ms +[2025-07-17 12:40:27] [Rank 0] step:2181/10000 train_time:496818ms step_avg:227.79ms +[2025-07-17 12:40:27] [Rank 0] step:2181/10000 train_time:496818ms step_avg:227.79ms +[2025-07-17 12:40:31] [Rank 0] step:2201/10000 train_time:501401ms step_avg:227.81ms +[2025-07-17 12:40:31] [Rank 0] step:2201/10000 train_time:501401ms step_avg:227.81ms +[2025-07-17 12:40:36] [Rank 0] step:2221/10000 train_time:505983ms step_avg:227.82ms +[2025-07-17 12:40:36] [Rank 0] step:2221/10000 train_time:505983ms step_avg:227.82ms +[2025-07-17 12:40:40] [Rank 0] step:2241/10000 train_time:510663ms step_avg:227.87ms +[2025-07-17 12:40:40] [Rank 0] step:2241/10000 train_time:510663ms step_avg:227.87ms +[2025-07-17 12:40:47] [Rank 0] PRINT: step:2250/10000 val_loss:4.0444 train_time:513019ms step_avg:228.01ms +[2025-07-17 12:40:47] [Rank 0] PRINT: step:2250/10000 val_loss:4.0444 train_time:513019ms step_avg:228.01ms +[2025-07-17 12:40:49] [Rank 0] step:2261/10000 train_time:515359ms step_avg:227.93ms +[2025-07-17 12:40:49] [Rank 0] step:2261/10000 train_time:515359ms step_avg:227.93ms +[2025-07-17 12:40:54] [Rank 0] step:2281/10000 train_time:520057ms step_avg:228.00ms +[2025-07-17 12:40:54] [Rank 0] step:2281/10000 train_time:520057ms step_avg:228.00ms +[2025-07-17 12:40:59] [Rank 0] step:2301/10000 train_time:524756ms step_avg:228.06ms +[2025-07-17 12:40:59] [Rank 0] step:2301/10000 train_time:524756ms step_avg:228.06ms +[2025-07-17 12:41:03] [Rank 0] step:2321/10000 train_time:529451ms step_avg:228.11ms +[2025-07-17 12:41:03] [Rank 0] step:2321/10000 train_time:529451ms step_avg:228.11ms +[2025-07-17 12:41:08] [Rank 0] step:2341/10000 train_time:534146ms step_avg:228.17ms +[2025-07-17 12:41:08] [Rank 0] step:2341/10000 train_time:534146ms step_avg:228.17ms +[2025-07-17 12:41:13] [Rank 0] step:2361/10000 train_time:538842ms step_avg:228.23ms +[2025-07-17 12:41:13] [Rank 0] step:2361/10000 train_time:538842ms step_avg:228.23ms +[2025-07-17 12:41:21] [Rank 0] PRINT: step:2375/10000 val_loss:3.8036 train_time:542365ms step_avg:228.36ms +[2025-07-17 12:41:21] [Rank 0] PRINT: step:2375/10000 val_loss:3.8036 train_time:542365ms step_avg:228.36ms +[2025-07-17 12:41:22] [Rank 0] step:2381/10000 train_time:543537ms step_avg:228.28ms +[2025-07-17 12:41:22] [Rank 0] step:2381/10000 train_time:543537ms step_avg:228.28ms +[2025-07-17 12:41:27] [Rank 0] step:2401/10000 train_time:548233ms step_avg:228.34ms +[2025-07-17 12:41:27] [Rank 0] step:2401/10000 train_time:548233ms step_avg:228.34ms +[2025-07-17 12:41:31] [Rank 0] step:2421/10000 train_time:552932ms step_avg:228.39ms +[2025-07-17 12:41:31] [Rank 0] step:2421/10000 train_time:552932ms step_avg:228.39ms +[2025-07-17 12:41:36] [Rank 0] step:2441/10000 train_time:557629ms step_avg:228.44ms +[2025-07-17 12:41:36] [Rank 0] step:2441/10000 train_time:557629ms step_avg:228.44ms +[2025-07-17 12:41:41] [Rank 0] step:2461/10000 train_time:562327ms step_avg:228.50ms +[2025-07-17 12:41:41] [Rank 0] step:2461/10000 train_time:562327ms step_avg:228.50ms +[2025-07-17 12:41:45] [Rank 0] step:2481/10000 train_time:567026ms step_avg:228.55ms +[2025-07-17 12:41:45] [Rank 0] step:2481/10000 train_time:567026ms step_avg:228.55ms +[2025-07-17 12:41:55] [Rank 0] PRINT: step:2500/10000 val_loss:3.8687 train_time:571726ms step_avg:228.69ms +[2025-07-17 12:41:55] [Rank 0] PRINT: step:2500/10000 val_loss:3.8687 train_time:571726ms step_avg:228.69ms +[2025-07-17 12:41:55] [Rank 0] step:2501/10000 train_time:571742ms step_avg:228.61ms +[2025-07-17 12:41:55] [Rank 0] step:2501/10000 train_time:571742ms step_avg:228.61ms +[2025-07-17 12:41:59] [Rank 0] step:2521/10000 train_time:576425ms step_avg:228.65ms +[2025-07-17 12:41:59] [Rank 0] step:2521/10000 train_time:576425ms step_avg:228.65ms +[2025-07-17 12:42:05] [Rank 0] step:2541/10000 train_time:581237ms step_avg:228.74ms +[2025-07-17 12:42:05] [Rank 0] step:2541/10000 train_time:581237ms step_avg:228.74ms +[2025-07-17 12:42:09] [Rank 0] step:2561/10000 train_time:586341ms step_avg:228.95ms +[2025-07-17 12:42:09] [Rank 0] step:2561/10000 train_time:586341ms step_avg:228.95ms +[2025-07-17 12:42:14] [Rank 0] step:2581/10000 train_time:591036ms step_avg:228.99ms +[2025-07-17 12:42:14] [Rank 0] step:2581/10000 train_time:591036ms step_avg:228.99ms +[2025-07-17 12:42:19] [Rank 0] step:2601/10000 train_time:595730ms step_avg:229.04ms +[2025-07-17 12:42:19] [Rank 0] step:2601/10000 train_time:595730ms step_avg:229.04ms +[2025-07-17 12:42:23] [Rank 0] step:2621/10000 train_time:600425ms step_avg:229.08ms +[2025-07-17 12:42:23] [Rank 0] step:2621/10000 train_time:600425ms step_avg:229.08ms +[2025-07-17 12:42:29] [Rank 0] PRINT: step:2625/10000 val_loss:3.9661 train_time:601605ms step_avg:229.18ms +[2025-07-17 12:42:29] [Rank 0] PRINT: step:2625/10000 val_loss:3.9661 train_time:601605ms step_avg:229.18ms +[2025-07-17 12:42:33] [Rank 0] step:2641/10000 train_time:605119ms step_avg:229.12ms +[2025-07-17 12:42:33] [Rank 0] step:2641/10000 train_time:605119ms step_avg:229.12ms +[2025-07-17 12:42:37] [Rank 0] step:2661/10000 train_time:609819ms step_avg:229.17ms +[2025-07-17 12:42:37] [Rank 0] step:2661/10000 train_time:609819ms step_avg:229.17ms +[2025-07-17 12:42:42] [Rank 0] step:2681/10000 train_time:614517ms step_avg:229.21ms +[2025-07-17 12:42:42] [Rank 0] step:2681/10000 train_time:614517ms step_avg:229.21ms +[2025-07-17 12:42:47] [Rank 0] step:2701/10000 train_time:619217ms step_avg:229.25ms +[2025-07-17 12:42:47] [Rank 0] step:2701/10000 train_time:619217ms step_avg:229.25ms +[2025-07-17 12:42:52] [Rank 0] step:2721/10000 train_time:623917ms step_avg:229.30ms +[2025-07-17 12:42:52] [Rank 0] step:2721/10000 train_time:623917ms step_avg:229.30ms +[2025-07-17 12:42:56] [Rank 0] step:2741/10000 train_time:628616ms step_avg:229.34ms +[2025-07-17 12:42:56] [Rank 0] step:2741/10000 train_time:628616ms step_avg:229.34ms +[2025-07-17 12:43:03] [Rank 0] PRINT: step:2750/10000 val_loss:4.0039 train_time:630970ms step_avg:229.44ms +[2025-07-17 12:43:03] [Rank 0] PRINT: step:2750/10000 val_loss:4.0039 train_time:630970ms step_avg:229.44ms +[2025-07-17 12:43:05] [Rank 0] step:2761/10000 train_time:633311ms step_avg:229.38ms +[2025-07-17 12:43:05] [Rank 0] step:2761/10000 train_time:633311ms step_avg:229.38ms +[2025-07-17 12:43:10] [Rank 0] step:2781/10000 train_time:638013ms step_avg:229.42ms +[2025-07-17 12:43:10] [Rank 0] step:2781/10000 train_time:638013ms step_avg:229.42ms +[2025-07-17 12:43:15] [Rank 0] step:2801/10000 train_time:642712ms step_avg:229.46ms +[2025-07-17 12:43:15] [Rank 0] step:2801/10000 train_time:642712ms step_avg:229.46ms +[2025-07-17 12:43:20] [Rank 0] step:2821/10000 train_time:647416ms step_avg:229.50ms +[2025-07-17 12:43:20] [Rank 0] step:2821/10000 train_time:647416ms step_avg:229.50ms +[2025-07-17 12:43:24] [Rank 0] step:2841/10000 train_time:652119ms step_avg:229.54ms +[2025-07-17 12:43:24] [Rank 0] step:2841/10000 train_time:652119ms step_avg:229.54ms +[2025-07-17 12:43:29] [Rank 0] step:2861/10000 train_time:656820ms step_avg:229.58ms +[2025-07-17 12:43:29] [Rank 0] step:2861/10000 train_time:656820ms step_avg:229.58ms +[2025-07-17 12:43:37] [Rank 0] PRINT: step:2875/10000 val_loss:3.9723 train_time:660351ms step_avg:229.69ms +[2025-07-17 12:43:37] [Rank 0] PRINT: step:2875/10000 val_loss:3.9723 train_time:660351ms step_avg:229.69ms +[2025-07-17 12:43:38] [Rank 0] step:2881/10000 train_time:661523ms step_avg:229.62ms +[2025-07-17 12:43:38] [Rank 0] step:2881/10000 train_time:661523ms step_avg:229.62ms +[2025-07-17 12:43:43] [Rank 0] step:2901/10000 train_time:666221ms step_avg:229.65ms +[2025-07-17 12:43:43] [Rank 0] step:2901/10000 train_time:666221ms step_avg:229.65ms +[2025-07-17 12:43:48] [Rank 0] step:2921/10000 train_time:670920ms step_avg:229.69ms +[2025-07-17 12:43:48] [Rank 0] step:2921/10000 train_time:670920ms step_avg:229.69ms +[2025-07-17 12:43:52] [Rank 0] step:2941/10000 train_time:675620ms step_avg:229.72ms +[2025-07-17 12:43:52] [Rank 0] step:2941/10000 train_time:675620ms step_avg:229.72ms +[2025-07-17 12:43:57] [Rank 0] step:2961/10000 train_time:680321ms step_avg:229.76ms +[2025-07-17 12:43:57] [Rank 0] step:2961/10000 train_time:680321ms step_avg:229.76ms +[2025-07-17 12:44:02] [Rank 0] step:2981/10000 train_time:685034ms step_avg:229.80ms +[2025-07-17 12:44:02] [Rank 0] step:2981/10000 train_time:685034ms step_avg:229.80ms +[2025-07-17 12:44:10] [Rank 0] PRINT: step:3000/10000 val_loss:4.0027 train_time:689755ms step_avg:229.92ms +[2025-07-17 12:44:10] [Rank 0] PRINT: step:3000/10000 val_loss:4.0027 train_time:689755ms step_avg:229.92ms +[2025-07-17 12:44:11] [Rank 0] step:3001/10000 train_time:689770ms step_avg:229.85ms +[2025-07-17 12:44:11] [Rank 0] step:3001/10000 train_time:689770ms step_avg:229.85ms +[2025-07-17 12:44:15] [Rank 0] step:3021/10000 train_time:694466ms step_avg:229.88ms +[2025-07-17 12:44:15] [Rank 0] step:3021/10000 train_time:694466ms step_avg:229.88ms +[2025-07-17 12:44:20] [Rank 0] step:3041/10000 train_time:699179ms step_avg:229.92ms +[2025-07-17 12:44:20] [Rank 0] step:3041/10000 train_time:699179ms step_avg:229.92ms +[2025-07-17 12:44:25] [Rank 0] step:3061/10000 train_time:704420ms step_avg:230.13ms +[2025-07-17 12:44:25] [Rank 0] step:3061/10000 train_time:704420ms step_avg:230.13ms +[2025-07-17 12:44:30] [Rank 0] step:3081/10000 train_time:709135ms step_avg:230.16ms +[2025-07-17 12:44:30] [Rank 0] step:3081/10000 train_time:709135ms step_avg:230.16ms +[2025-07-17 12:44:35] [Rank 0] step:3101/10000 train_time:713847ms step_avg:230.20ms +[2025-07-17 12:44:35] [Rank 0] step:3101/10000 train_time:713847ms step_avg:230.20ms +[2025-07-17 12:44:39] [Rank 0] step:3121/10000 train_time:718561ms step_avg:230.23ms +[2025-07-17 12:44:39] [Rank 0] step:3121/10000 train_time:718561ms step_avg:230.23ms +[2025-07-17 12:44:45] [Rank 0] PRINT: step:3125/10000 val_loss:3.9171 train_time:719747ms step_avg:230.32ms +[2025-07-17 12:44:45] [Rank 0] PRINT: step:3125/10000 val_loss:3.9171 train_time:719747ms step_avg:230.32ms +[2025-07-17 12:44:49] [Rank 0] step:3141/10000 train_time:723276ms step_avg:230.27ms +[2025-07-17 12:44:49] [Rank 0] step:3141/10000 train_time:723276ms step_avg:230.27ms +[2025-07-17 12:44:53] [Rank 0] step:3161/10000 train_time:727994ms step_avg:230.30ms +[2025-07-17 12:44:53] [Rank 0] step:3161/10000 train_time:727994ms step_avg:230.30ms +[2025-07-17 12:44:58] [Rank 0] step:3181/10000 train_time:732810ms step_avg:230.37ms +[2025-07-17 12:44:58] [Rank 0] step:3181/10000 train_time:732810ms step_avg:230.37ms +[2025-07-17 12:45:03] [Rank 0] step:3201/10000 train_time:737529ms step_avg:230.41ms +[2025-07-17 12:45:03] [Rank 0] step:3201/10000 train_time:737529ms step_avg:230.41ms +[2025-07-17 12:45:08] [Rank 0] step:3221/10000 train_time:742247ms step_avg:230.44ms +[2025-07-17 12:45:08] [Rank 0] step:3221/10000 train_time:742247ms step_avg:230.44ms +[2025-07-17 12:45:12] [Rank 0] step:3241/10000 train_time:746967ms step_avg:230.47ms +[2025-07-17 12:45:12] [Rank 0] step:3241/10000 train_time:746967ms step_avg:230.47ms +[2025-07-17 12:45:19] [Rank 0] PRINT: step:3250/10000 val_loss:3.9333 train_time:749333ms step_avg:230.56ms +[2025-07-17 12:45:19] [Rank 0] PRINT: step:3250/10000 val_loss:3.9333 train_time:749333ms step_avg:230.56ms +[2025-07-17 12:45:22] [Rank 0] step:3261/10000 train_time:751682ms step_avg:230.51ms +[2025-07-17 12:45:22] [Rank 0] step:3261/10000 train_time:751682ms step_avg:230.51ms +[2025-07-17 12:45:26] [Rank 0] step:3281/10000 train_time:756399ms step_avg:230.54ms +[2025-07-17 12:45:26] [Rank 0] step:3281/10000 train_time:756399ms step_avg:230.54ms +[2025-07-17 12:45:31] [Rank 0] step:3301/10000 train_time:761117ms step_avg:230.57ms +[2025-07-17 12:45:31] [Rank 0] step:3301/10000 train_time:761117ms step_avg:230.57ms +[2025-07-17 12:45:36] [Rank 0] step:3321/10000 train_time:765837ms step_avg:230.60ms +[2025-07-17 12:45:36] [Rank 0] step:3321/10000 train_time:765837ms step_avg:230.60ms +[2025-07-17 12:45:41] [Rank 0] step:3341/10000 train_time:770553ms step_avg:230.64ms +[2025-07-17 12:45:41] [Rank 0] step:3341/10000 train_time:770553ms step_avg:230.64ms +[2025-07-17 12:45:45] [Rank 0] step:3361/10000 train_time:775275ms step_avg:230.67ms +[2025-07-17 12:45:45] [Rank 0] step:3361/10000 train_time:775275ms step_avg:230.67ms +[2025-07-17 12:45:53] [Rank 0] PRINT: step:3375/10000 val_loss:3.9514 train_time:778819ms step_avg:230.76ms +[2025-07-17 12:45:53] [Rank 0] PRINT: step:3375/10000 val_loss:3.9514 train_time:778819ms step_avg:230.76ms +[2025-07-17 12:45:55] [Rank 0] step:3381/10000 train_time:779994ms step_avg:230.70ms +[2025-07-17 12:45:55] [Rank 0] step:3381/10000 train_time:779994ms step_avg:230.70ms +[2025-07-17 12:45:59] [Rank 0] step:3401/10000 train_time:784716ms step_avg:230.73ms +[2025-07-17 12:45:59] [Rank 0] step:3401/10000 train_time:784716ms step_avg:230.73ms +[2025-07-17 12:46:04] [Rank 0] step:3421/10000 train_time:789442ms step_avg:230.76ms +[2025-07-17 12:46:04] [Rank 0] step:3421/10000 train_time:789442ms step_avg:230.76ms +[2025-07-17 12:46:09] [Rank 0] step:3441/10000 train_time:794164ms step_avg:230.79ms +[2025-07-17 12:46:09] [Rank 0] step:3441/10000 train_time:794164ms step_avg:230.79ms +[2025-07-17 12:46:14] [Rank 0] step:3461/10000 train_time:798884ms step_avg:230.82ms +[2025-07-17 12:46:14] [Rank 0] step:3461/10000 train_time:798884ms step_avg:230.82ms +[2025-07-17 12:46:18] [Rank 0] step:3481/10000 train_time:803606ms step_avg:230.85ms +[2025-07-17 12:46:18] [Rank 0] step:3481/10000 train_time:803606ms step_avg:230.85ms +[2025-07-17 12:46:27] [Rank 0] PRINT: step:3500/10000 val_loss:3.9476 train_time:808334ms step_avg:230.95ms +[2025-07-17 12:46:27] [Rank 0] PRINT: step:3500/10000 val_loss:3.9476 train_time:808334ms step_avg:230.95ms +[2025-07-17 12:46:28] [Rank 0] step:3501/10000 train_time:808351ms step_avg:230.89ms +[2025-07-17 12:46:28] [Rank 0] step:3501/10000 train_time:808351ms step_avg:230.89ms +[2025-07-17 12:46:32] [Rank 0] step:3521/10000 train_time:813040ms step_avg:230.91ms +[2025-07-17 12:46:32] [Rank 0] step:3521/10000 train_time:813040ms step_avg:230.91ms +[2025-07-17 12:46:37] [Rank 0] step:3541/10000 train_time:817749ms step_avg:230.94ms +[2025-07-17 12:46:37] [Rank 0] step:3541/10000 train_time:817749ms step_avg:230.94ms +[2025-07-17 12:46:42] [Rank 0] step:3561/10000 train_time:822976ms step_avg:231.11ms +[2025-07-17 12:46:42] [Rank 0] step:3561/10000 train_time:822976ms step_avg:231.11ms +[2025-07-17 12:46:47] [Rank 0] step:3581/10000 train_time:827680ms step_avg:231.13ms +[2025-07-17 12:46:47] [Rank 0] step:3581/10000 train_time:827680ms step_avg:231.13ms +[2025-07-17 12:46:52] [Rank 0] step:3601/10000 train_time:832383ms step_avg:231.15ms +[2025-07-17 12:46:52] [Rank 0] step:3601/10000 train_time:832383ms step_avg:231.15ms +[2025-07-17 12:46:56] [Rank 0] step:3621/10000 train_time:837081ms step_avg:231.17ms +[2025-07-17 12:46:56] [Rank 0] step:3621/10000 train_time:837081ms step_avg:231.17ms +[2025-07-17 12:47:02] [Rank 0] PRINT: step:3625/10000 val_loss:3.9720 train_time:838262ms step_avg:231.24ms +[2025-07-17 12:47:02] [Rank 0] PRINT: step:3625/10000 val_loss:3.9720 train_time:838262ms step_avg:231.24ms +[2025-07-17 12:47:06] [Rank 0] step:3641/10000 train_time:841778ms step_avg:231.19ms +[2025-07-17 12:47:06] [Rank 0] step:3641/10000 train_time:841778ms step_avg:231.19ms +[2025-07-17 12:47:10] [Rank 0] step:3661/10000 train_time:846481ms step_avg:231.22ms +[2025-07-17 12:47:10] [Rank 0] step:3661/10000 train_time:846481ms step_avg:231.22ms +[2025-07-17 12:47:15] [Rank 0] step:3681/10000 train_time:851179ms step_avg:231.24ms +[2025-07-17 12:47:15] [Rank 0] step:3681/10000 train_time:851179ms step_avg:231.24ms +[2025-07-17 12:47:20] [Rank 0] step:3701/10000 train_time:855882ms step_avg:231.26ms +[2025-07-17 12:47:20] [Rank 0] step:3701/10000 train_time:855882ms step_avg:231.26ms +[2025-07-17 12:47:25] [Rank 0] step:3721/10000 train_time:860662ms step_avg:231.30ms +[2025-07-17 12:47:25] [Rank 0] step:3721/10000 train_time:860662ms step_avg:231.30ms +[2025-07-17 12:47:29] [Rank 0] step:3741/10000 train_time:865453ms step_avg:231.34ms +[2025-07-17 12:47:29] [Rank 0] step:3741/10000 train_time:865453ms step_avg:231.34ms +[2025-07-17 12:47:36] [Rank 0] PRINT: step:3750/10000 val_loss:3.8786 train_time:867854ms step_avg:231.43ms +[2025-07-17 12:47:36] [Rank 0] PRINT: step:3750/10000 val_loss:3.8786 train_time:867854ms step_avg:231.43ms +[2025-07-17 12:47:39] [Rank 0] step:3761/10000 train_time:870240ms step_avg:231.39ms +[2025-07-17 12:47:39] [Rank 0] step:3761/10000 train_time:870240ms step_avg:231.39ms +[2025-07-17 12:47:43] [Rank 0] step:3781/10000 train_time:875033ms step_avg:231.43ms +[2025-07-17 12:47:43] [Rank 0] step:3781/10000 train_time:875033ms step_avg:231.43ms +[2025-07-17 12:47:48] [Rank 0] step:3801/10000 train_time:879827ms step_avg:231.47ms +[2025-07-17 12:47:48] [Rank 0] step:3801/10000 train_time:879827ms step_avg:231.47ms +[2025-07-17 12:47:53] [Rank 0] step:3821/10000 train_time:884622ms step_avg:231.52ms +[2025-07-17 12:47:53] [Rank 0] step:3821/10000 train_time:884622ms step_avg:231.52ms +[2025-07-17 12:47:58] [Rank 0] step:3841/10000 train_time:889421ms step_avg:231.56ms +[2025-07-17 12:47:58] [Rank 0] step:3841/10000 train_time:889421ms step_avg:231.56ms +[2025-07-17 12:48:03] [Rank 0] step:3861/10000 train_time:894219ms step_avg:231.60ms +[2025-07-17 12:48:03] [Rank 0] step:3861/10000 train_time:894219ms step_avg:231.60ms +[2025-07-17 12:48:11] [Rank 0] PRINT: step:3875/10000 val_loss:3.9456 train_time:897824ms step_avg:231.70ms +[2025-07-17 12:48:11] [Rank 0] PRINT: step:3875/10000 val_loss:3.9456 train_time:897824ms step_avg:231.70ms +[2025-07-17 12:48:12] [Rank 0] step:3881/10000 train_time:899018ms step_avg:231.65ms +[2025-07-17 12:48:12] [Rank 0] step:3881/10000 train_time:899018ms step_avg:231.65ms +[2025-07-17 12:48:17] [Rank 0] step:3901/10000 train_time:903817ms step_avg:231.69ms +[2025-07-17 12:48:17] [Rank 0] step:3901/10000 train_time:903817ms step_avg:231.69ms +[2025-07-17 12:48:22] [Rank 0] step:3921/10000 train_time:908614ms step_avg:231.73ms +[2025-07-17 12:48:22] [Rank 0] step:3921/10000 train_time:908614ms step_avg:231.73ms +[2025-07-17 12:48:27] [Rank 0] step:3941/10000 train_time:913516ms step_avg:231.80ms +[2025-07-17 12:48:27] [Rank 0] step:3941/10000 train_time:913516ms step_avg:231.80ms +[2025-07-17 12:48:31] [Rank 0] step:3961/10000 train_time:918313ms step_avg:231.84ms +[2025-07-17 12:48:31] [Rank 0] step:3961/10000 train_time:918313ms step_avg:231.84ms +[2025-07-17 12:48:36] [Rank 0] step:3981/10000 train_time:923111ms step_avg:231.88ms +[2025-07-17 12:48:36] [Rank 0] step:3981/10000 train_time:923111ms step_avg:231.88ms +[2025-07-17 12:48:45] [Rank 0] PRINT: step:4000/10000 val_loss:3.9370 train_time:927909ms step_avg:231.98ms +[2025-07-17 12:48:45] [Rank 0] PRINT: step:4000/10000 val_loss:3.9370 train_time:927909ms step_avg:231.98ms +[2025-07-17 12:48:46] [Rank 0] step:4001/10000 train_time:927926ms step_avg:231.92ms +[2025-07-17 12:48:46] [Rank 0] step:4001/10000 train_time:927926ms step_avg:231.92ms +[2025-07-17 12:48:50] [Rank 0] step:4021/10000 train_time:932708ms step_avg:231.96ms +[2025-07-17 12:48:50] [Rank 0] step:4021/10000 train_time:932708ms step_avg:231.96ms +[2025-07-17 12:48:55] [Rank 0] step:4041/10000 train_time:937509ms step_avg:232.00ms +[2025-07-17 12:48:55] [Rank 0] step:4041/10000 train_time:937509ms step_avg:232.00ms +[2025-07-17 12:49:00] [Rank 0] step:4061/10000 train_time:942312ms step_avg:232.04ms +[2025-07-17 12:49:00] [Rank 0] step:4061/10000 train_time:942312ms step_avg:232.04ms +[2025-07-17 12:49:05] [Rank 0] step:4081/10000 train_time:947623ms step_avg:232.20ms +[2025-07-17 12:49:05] [Rank 0] step:4081/10000 train_time:947623ms step_avg:232.20ms +[2025-07-17 12:49:10] [Rank 0] step:4101/10000 train_time:952424ms step_avg:232.24ms +[2025-07-17 12:49:10] [Rank 0] step:4101/10000 train_time:952424ms step_avg:232.24ms +[2025-07-17 12:49:15] [Rank 0] step:4121/10000 train_time:957225ms step_avg:232.28ms +[2025-07-17 12:49:15] [Rank 0] step:4121/10000 train_time:957225ms step_avg:232.28ms +[2025-07-17 12:49:21] [Rank 0] PRINT: step:4125/10000 val_loss:3.9686 train_time:958433ms step_avg:232.35ms +[2025-07-17 12:49:21] [Rank 0] PRINT: step:4125/10000 val_loss:3.9686 train_time:958433ms step_avg:232.35ms +[2025-07-17 12:49:24] [Rank 0] step:4141/10000 train_time:962026ms step_avg:232.32ms +[2025-07-17 12:49:24] [Rank 0] step:4141/10000 train_time:962026ms step_avg:232.32ms +[2025-07-17 12:49:29] [Rank 0] step:4161/10000 train_time:966827ms step_avg:232.35ms +[2025-07-17 12:49:29] [Rank 0] step:4161/10000 train_time:966827ms step_avg:232.35ms +[2025-07-17 12:49:34] [Rank 0] step:4181/10000 train_time:971629ms step_avg:232.39ms +[2025-07-17 12:49:34] [Rank 0] step:4181/10000 train_time:971629ms step_avg:232.39ms +[2025-07-17 12:49:39] [Rank 0] step:4201/10000 train_time:976435ms step_avg:232.43ms +[2025-07-17 12:49:39] [Rank 0] step:4201/10000 train_time:976435ms step_avg:232.43ms +[2025-07-17 12:49:44] [Rank 0] step:4221/10000 train_time:981238ms step_avg:232.47ms +[2025-07-17 12:49:44] [Rank 0] step:4221/10000 train_time:981238ms step_avg:232.47ms +[2025-07-17 12:49:48] [Rank 0] step:4241/10000 train_time:986044ms step_avg:232.50ms +[2025-07-17 12:49:48] [Rank 0] step:4241/10000 train_time:986044ms step_avg:232.50ms +[2025-07-17 12:49:55] [Rank 0] PRINT: step:4250/10000 val_loss:3.9504 train_time:988451ms step_avg:232.58ms +[2025-07-17 12:49:55] [Rank 0] PRINT: step:4250/10000 val_loss:3.9504 train_time:988451ms step_avg:232.58ms +[2025-07-17 12:49:58] [Rank 0] step:4261/10000 train_time:990846ms step_avg:232.54ms +[2025-07-17 12:49:58] [Rank 0] step:4261/10000 train_time:990846ms step_avg:232.54ms +[2025-07-17 12:50:03] [Rank 0] step:4281/10000 train_time:995653ms step_avg:232.57ms +[2025-07-17 12:50:03] [Rank 0] step:4281/10000 train_time:995653ms step_avg:232.57ms +[2025-07-17 12:50:07] [Rank 0] step:4301/10000 train_time:1000458ms step_avg:232.61ms +[2025-07-17 12:50:07] [Rank 0] step:4301/10000 train_time:1000458ms step_avg:232.61ms +[2025-07-17 12:50:12] [Rank 0] step:4321/10000 train_time:1005268ms step_avg:232.65ms +[2025-07-17 12:50:12] [Rank 0] step:4321/10000 train_time:1005268ms step_avg:232.65ms +[2025-07-17 12:50:17] [Rank 0] step:4341/10000 train_time:1010076ms step_avg:232.68ms +[2025-07-17 12:50:17] [Rank 0] step:4341/10000 train_time:1010076ms step_avg:232.68ms +[2025-07-17 12:50:22] [Rank 0] step:4361/10000 train_time:1014885ms step_avg:232.72ms +[2025-07-17 12:50:22] [Rank 0] step:4361/10000 train_time:1014885ms step_avg:232.72ms +[2025-07-17 12:50:30] [Rank 0] PRINT: step:4375/10000 val_loss:3.9648 train_time:1018493ms step_avg:232.80ms +[2025-07-17 12:50:30] [Rank 0] PRINT: step:4375/10000 val_loss:3.9648 train_time:1018493ms step_avg:232.80ms +[2025-07-17 12:50:31] [Rank 0] step:4381/10000 train_time:1019690ms step_avg:232.75ms +[2025-07-17 12:50:31] [Rank 0] step:4381/10000 train_time:1019690ms step_avg:232.75ms +[2025-07-17 12:50:36] [Rank 0] step:4401/10000 train_time:1024490ms step_avg:232.79ms +[2025-07-17 12:50:36] [Rank 0] step:4401/10000 train_time:1024490ms step_avg:232.79ms +[2025-07-17 12:50:41] [Rank 0] step:4421/10000 train_time:1029291ms step_avg:232.82ms +[2025-07-17 12:50:41] [Rank 0] step:4421/10000 train_time:1029291ms step_avg:232.82ms +[2025-07-17 12:50:46] [Rank 0] step:4441/10000 train_time:1034092ms step_avg:232.85ms +[2025-07-17 12:50:46] [Rank 0] step:4441/10000 train_time:1034092ms step_avg:232.85ms +[2025-07-17 12:50:51] [Rank 0] step:4461/10000 train_time:1038908ms step_avg:232.89ms +[2025-07-17 12:50:51] [Rank 0] step:4461/10000 train_time:1038908ms step_avg:232.89ms +[2025-07-17 12:50:55] [Rank 0] step:4481/10000 train_time:1043726ms step_avg:232.92ms +[2025-07-17 12:50:55] [Rank 0] step:4481/10000 train_time:1043726ms step_avg:232.92ms +[2025-07-17 12:51:04] [Rank 0] PRINT: step:4500/10000 val_loss:3.9648 train_time:1048548ms step_avg:233.01ms +[2025-07-17 12:51:04] [Rank 0] PRINT: step:4500/10000 val_loss:3.9648 train_time:1048548ms step_avg:233.01ms +[2025-07-17 12:51:04] [Rank 0] step:4501/10000 train_time:1048566ms step_avg:232.96ms +[2025-07-17 12:51:04] [Rank 0] step:4501/10000 train_time:1048566ms step_avg:232.96ms +[2025-07-17 12:51:09] [Rank 0] step:4521/10000 train_time:1053363ms step_avg:232.99ms +[2025-07-17 12:51:09] [Rank 0] step:4521/10000 train_time:1053363ms step_avg:232.99ms +[2025-07-17 12:51:14] [Rank 0] step:4541/10000 train_time:1058183ms step_avg:233.03ms +[2025-07-17 12:51:14] [Rank 0] step:4541/10000 train_time:1058183ms step_avg:233.03ms +[2025-07-17 12:51:19] [Rank 0] step:4561/10000 train_time:1062998ms step_avg:233.06ms +[2025-07-17 12:51:19] [Rank 0] step:4561/10000 train_time:1062998ms step_avg:233.06ms +[2025-07-17 12:51:24] [Rank 0] step:4581/10000 train_time:1068332ms step_avg:233.21ms +[2025-07-17 12:51:24] [Rank 0] step:4581/10000 train_time:1068332ms step_avg:233.21ms +[2025-07-17 12:51:29] [Rank 0] step:4601/10000 train_time:1073154ms step_avg:233.24ms +[2025-07-17 12:51:29] [Rank 0] step:4601/10000 train_time:1073154ms step_avg:233.24ms +[2025-07-17 12:51:34] [Rank 0] step:4621/10000 train_time:1077969ms step_avg:233.28ms +[2025-07-17 12:51:34] [Rank 0] step:4621/10000 train_time:1077969ms step_avg:233.28ms +[2025-07-17 12:51:39] [Rank 0] PRINT: step:4625/10000 val_loss:4.0132 train_time:1079181ms step_avg:233.34ms +[2025-07-17 12:51:39] [Rank 0] PRINT: step:4625/10000 val_loss:4.0132 train_time:1079181ms step_avg:233.34ms +[2025-07-17 12:51:43] [Rank 0] step:4641/10000 train_time:1082786ms step_avg:233.31ms +[2025-07-17 12:51:43] [Rank 0] step:4641/10000 train_time:1082786ms step_avg:233.31ms +[2025-07-17 12:51:48] [Rank 0] step:4661/10000 train_time:1087608ms step_avg:233.34ms +[2025-07-17 12:51:48] [Rank 0] step:4661/10000 train_time:1087608ms step_avg:233.34ms +[2025-07-17 12:51:53] [Rank 0] step:4681/10000 train_time:1092429ms step_avg:233.38ms +[2025-07-17 12:51:53] [Rank 0] step:4681/10000 train_time:1092429ms step_avg:233.38ms +[2025-07-17 12:51:58] [Rank 0] step:4701/10000 train_time:1097254ms step_avg:233.41ms +[2025-07-17 12:51:58] [Rank 0] step:4701/10000 train_time:1097254ms step_avg:233.41ms +[2025-07-17 12:52:03] [Rank 0] step:4721/10000 train_time:1102071ms step_avg:233.44ms +[2025-07-17 12:52:03] [Rank 0] step:4721/10000 train_time:1102071ms step_avg:233.44ms +[2025-07-17 12:52:07] [Rank 0] step:4741/10000 train_time:1106893ms step_avg:233.47ms +[2025-07-17 12:52:07] [Rank 0] step:4741/10000 train_time:1106893ms step_avg:233.47ms +[2025-07-17 12:52:14] [Rank 0] PRINT: step:4750/10000 val_loss:4.0017 train_time:1109313ms step_avg:233.54ms +[2025-07-17 12:52:14] [Rank 0] PRINT: step:4750/10000 val_loss:4.0017 train_time:1109313ms step_avg:233.54ms +[2025-07-17 12:52:17] [Rank 0] step:4761/10000 train_time:1111712ms step_avg:233.50ms +[2025-07-17 12:52:17] [Rank 0] step:4761/10000 train_time:1111712ms step_avg:233.50ms +[2025-07-17 12:52:22] [Rank 0] step:4781/10000 train_time:1116529ms step_avg:233.53ms +[2025-07-17 12:52:22] [Rank 0] step:4781/10000 train_time:1116529ms step_avg:233.53ms +[2025-07-17 12:52:26] [Rank 0] step:4801/10000 train_time:1121345ms step_avg:233.56ms +[2025-07-17 12:52:26] [Rank 0] step:4801/10000 train_time:1121345ms step_avg:233.56ms +[2025-07-17 12:52:31] [Rank 0] step:4821/10000 train_time:1126164ms step_avg:233.60ms +[2025-07-17 12:52:31] [Rank 0] step:4821/10000 train_time:1126164ms step_avg:233.60ms +[2025-07-17 12:52:36] [Rank 0] step:4841/10000 train_time:1130981ms step_avg:233.63ms +[2025-07-17 12:52:36] [Rank 0] step:4841/10000 train_time:1130981ms step_avg:233.63ms +[2025-07-17 12:52:41] [Rank 0] step:4861/10000 train_time:1135799ms step_avg:233.66ms +[2025-07-17 12:52:41] [Rank 0] step:4861/10000 train_time:1135799ms step_avg:233.66ms +[2025-07-17 12:52:49] [Rank 0] PRINT: step:4875/10000 val_loss:4.0363 train_time:1139417ms step_avg:233.73ms +[2025-07-17 12:52:49] [Rank 0] PRINT: step:4875/10000 val_loss:4.0363 train_time:1139417ms step_avg:233.73ms +[2025-07-17 12:52:50] [Rank 0] step:4881/10000 train_time:1140618ms step_avg:233.69ms +[2025-07-17 12:52:50] [Rank 0] step:4881/10000 train_time:1140618ms step_avg:233.69ms +[2025-07-17 12:52:55] [Rank 0] step:4901/10000 train_time:1145436ms step_avg:233.71ms +[2025-07-17 12:52:55] [Rank 0] step:4901/10000 train_time:1145436ms step_avg:233.71ms +[2025-07-17 12:53:00] [Rank 0] step:4921/10000 train_time:1150248ms step_avg:233.74ms +[2025-07-17 12:53:00] [Rank 0] step:4921/10000 train_time:1150248ms step_avg:233.74ms +[2025-07-17 12:53:05] [Rank 0] step:4941/10000 train_time:1155065ms step_avg:233.77ms +[2025-07-17 12:53:05] [Rank 0] step:4941/10000 train_time:1155065ms step_avg:233.77ms +[2025-07-17 12:53:10] [Rank 0] step:4961/10000 train_time:1159881ms step_avg:233.80ms +[2025-07-17 12:53:10] [Rank 0] step:4961/10000 train_time:1159881ms step_avg:233.80ms +[2025-07-17 12:53:14] [Rank 0] step:4981/10000 train_time:1164696ms step_avg:233.83ms +[2025-07-17 12:53:14] [Rank 0] step:4981/10000 train_time:1164696ms step_avg:233.83ms +[2025-07-17 12:53:24] [Rank 0] PRINT: step:5000/10000 val_loss:4.0419 train_time:1169516ms step_avg:233.90ms +[2025-07-17 12:53:24] [Rank 0] PRINT: step:5000/10000 val_loss:4.0419 train_time:1169516ms step_avg:233.90ms +[2025-07-17 12:53:24] [Rank 0] step:5001/10000 train_time:1169533ms step_avg:233.86ms +[2025-07-17 12:53:24] [Rank 0] step:5001/10000 train_time:1169533ms step_avg:233.86ms +[2025-07-17 12:53:29] [Rank 0] step:5021/10000 train_time:1174334ms step_avg:233.88ms +[2025-07-17 12:53:29] [Rank 0] step:5021/10000 train_time:1174334ms step_avg:233.88ms +[2025-07-17 12:53:34] [Rank 0] step:5041/10000 train_time:1179154ms step_avg:233.91ms +[2025-07-17 12:53:34] [Rank 0] step:5041/10000 train_time:1179154ms step_avg:233.91ms +[2025-07-17 12:53:38] [Rank 0] step:5061/10000 train_time:1183970ms step_avg:233.94ms +[2025-07-17 12:53:38] [Rank 0] step:5061/10000 train_time:1183970ms step_avg:233.94ms +[2025-07-17 12:53:44] [Rank 0] step:5081/10000 train_time:1189322ms step_avg:234.07ms +[2025-07-17 12:53:44] [Rank 0] step:5081/10000 train_time:1189322ms step_avg:234.07ms +[2025-07-17 12:53:49] [Rank 0] step:5101/10000 train_time:1194119ms step_avg:234.10ms +[2025-07-17 12:53:49] [Rank 0] step:5101/10000 train_time:1194119ms step_avg:234.10ms +[2025-07-17 12:53:53] [Rank 0] step:5121/10000 train_time:1198940ms step_avg:234.12ms +[2025-07-17 12:53:53] [Rank 0] step:5121/10000 train_time:1198940ms step_avg:234.12ms +[2025-07-17 12:53:59] [Rank 0] PRINT: step:5125/10000 val_loss:4.0291 train_time:1200152ms step_avg:234.18ms +[2025-07-17 12:53:59] [Rank 0] PRINT: step:5125/10000 val_loss:4.0291 train_time:1200152ms step_avg:234.18ms +[2025-07-17 12:54:03] [Rank 0] step:5141/10000 train_time:1203760ms step_avg:234.15ms +[2025-07-17 12:54:03] [Rank 0] step:5141/10000 train_time:1203760ms step_avg:234.15ms +[2025-07-17 12:54:08] [Rank 0] step:5161/10000 train_time:1208579ms step_avg:234.18ms +[2025-07-17 12:54:08] [Rank 0] step:5161/10000 train_time:1208579ms step_avg:234.18ms +[2025-07-17 12:54:12] [Rank 0] step:5181/10000 train_time:1213404ms step_avg:234.20ms +[2025-07-17 12:54:12] [Rank 0] step:5181/10000 train_time:1213404ms step_avg:234.20ms +[2025-07-17 12:54:17] [Rank 0] step:5201/10000 train_time:1218270ms step_avg:234.24ms +[2025-07-17 12:54:17] [Rank 0] step:5201/10000 train_time:1218270ms step_avg:234.24ms +[2025-07-17 12:54:22] [Rank 0] step:5221/10000 train_time:1223167ms step_avg:234.28ms +[2025-07-17 12:54:22] [Rank 0] step:5221/10000 train_time:1223167ms step_avg:234.28ms +[2025-07-17 12:54:27] [Rank 0] step:5241/10000 train_time:1228061ms step_avg:234.32ms +[2025-07-17 12:54:27] [Rank 0] step:5241/10000 train_time:1228061ms step_avg:234.32ms +[2025-07-17 12:54:34] [Rank 0] PRINT: step:5250/10000 val_loss:3.9033 train_time:1230508ms step_avg:234.38ms +[2025-07-17 12:54:34] [Rank 0] PRINT: step:5250/10000 val_loss:3.9033 train_time:1230508ms step_avg:234.38ms +[2025-07-17 12:54:37] [Rank 0] step:5261/10000 train_time:1232946ms step_avg:234.36ms +[2025-07-17 12:54:37] [Rank 0] step:5261/10000 train_time:1232946ms step_avg:234.36ms +[2025-07-17 12:54:42] [Rank 0] step:5281/10000 train_time:1237840ms step_avg:234.39ms +[2025-07-17 12:54:42] [Rank 0] step:5281/10000 train_time:1237840ms step_avg:234.39ms +[2025-07-17 12:54:47] [Rank 0] step:5301/10000 train_time:1242726ms step_avg:234.43ms +[2025-07-17 12:54:47] [Rank 0] step:5301/10000 train_time:1242726ms step_avg:234.43ms +[2025-07-17 12:54:51] [Rank 0] step:5321/10000 train_time:1247611ms step_avg:234.47ms +[2025-07-17 12:54:51] [Rank 0] step:5321/10000 train_time:1247611ms step_avg:234.47ms +[2025-07-17 12:54:56] [Rank 0] step:5341/10000 train_time:1252502ms step_avg:234.51ms +[2025-07-17 12:54:56] [Rank 0] step:5341/10000 train_time:1252502ms step_avg:234.51ms +[2025-07-17 12:55:01] [Rank 0] step:5361/10000 train_time:1257387ms step_avg:234.54ms +[2025-07-17 12:55:01] [Rank 0] step:5361/10000 train_time:1257387ms step_avg:234.54ms +[2025-07-17 12:55:09] [Rank 0] PRINT: step:5375/10000 val_loss:3.9206 train_time:1261060ms step_avg:234.62ms +[2025-07-17 12:55:09] [Rank 0] PRINT: step:5375/10000 val_loss:3.9206 train_time:1261060ms step_avg:234.62ms +[2025-07-17 12:55:11] [Rank 0] step:5381/10000 train_time:1262279ms step_avg:234.58ms +[2025-07-17 12:55:11] [Rank 0] step:5381/10000 train_time:1262279ms step_avg:234.58ms +[2025-07-17 12:55:16] [Rank 0] step:5401/10000 train_time:1267164ms step_avg:234.62ms +[2025-07-17 12:55:16] [Rank 0] step:5401/10000 train_time:1267164ms step_avg:234.62ms +[2025-07-17 12:55:21] [Rank 0] step:5421/10000 train_time:1272060ms step_avg:234.65ms +[2025-07-17 12:55:21] [Rank 0] step:5421/10000 train_time:1272060ms step_avg:234.65ms +[2025-07-17 12:55:25] [Rank 0] step:5441/10000 train_time:1276944ms step_avg:234.69ms +[2025-07-17 12:55:25] [Rank 0] step:5441/10000 train_time:1276944ms step_avg:234.69ms +[2025-07-17 12:55:30] [Rank 0] step:5461/10000 train_time:1281834ms step_avg:234.73ms +[2025-07-17 12:55:30] [Rank 0] step:5461/10000 train_time:1281834ms step_avg:234.73ms +[2025-07-17 12:55:35] [Rank 0] step:5481/10000 train_time:1286726ms step_avg:234.76ms +[2025-07-17 12:55:35] [Rank 0] step:5481/10000 train_time:1286726ms step_avg:234.76ms +[2025-07-17 12:55:45] [Rank 0] PRINT: step:5500/10000 val_loss:3.9043 train_time:1291614ms step_avg:234.84ms +[2025-07-17 12:55:45] [Rank 0] PRINT: step:5500/10000 val_loss:3.9043 train_time:1291614ms step_avg:234.84ms +[2025-07-17 12:55:45] [Rank 0] step:5501/10000 train_time:1291632ms step_avg:234.80ms +[2025-07-17 12:55:45] [Rank 0] step:5501/10000 train_time:1291632ms step_avg:234.80ms +[2025-07-17 12:55:50] [Rank 0] step:5521/10000 train_time:1296494ms step_avg:234.83ms +[2025-07-17 12:55:50] [Rank 0] step:5521/10000 train_time:1296494ms step_avg:234.83ms +[2025-07-17 12:55:55] [Rank 0] step:5541/10000 train_time:1301388ms step_avg:234.87ms +[2025-07-17 12:55:55] [Rank 0] step:5541/10000 train_time:1301388ms step_avg:234.87ms +[2025-07-17 12:55:59] [Rank 0] step:5561/10000 train_time:1306279ms step_avg:234.90ms +[2025-07-17 12:55:59] [Rank 0] step:5561/10000 train_time:1306279ms step_avg:234.90ms +[2025-07-17 12:56:04] [Rank 0] step:5581/10000 train_time:1311169ms step_avg:234.93ms +[2025-07-17 12:56:04] [Rank 0] step:5581/10000 train_time:1311169ms step_avg:234.93ms +[2025-07-17 12:56:10] [Rank 0] step:5601/10000 train_time:1316567ms step_avg:235.06ms +[2025-07-17 12:56:10] [Rank 0] step:5601/10000 train_time:1316567ms step_avg:235.06ms +[2025-07-17 12:56:15] [Rank 0] step:5621/10000 train_time:1321459ms step_avg:235.09ms +[2025-07-17 12:56:15] [Rank 0] step:5621/10000 train_time:1321459ms step_avg:235.09ms +[2025-07-17 12:56:20] [Rank 0] PRINT: step:5625/10000 val_loss:3.9243 train_time:1322687ms step_avg:235.14ms +[2025-07-17 12:56:20] [Rank 0] PRINT: step:5625/10000 val_loss:3.9243 train_time:1322687ms step_avg:235.14ms +[2025-07-17 12:56:24] [Rank 0] step:5641/10000 train_time:1326346ms step_avg:235.13ms +[2025-07-17 12:56:24] [Rank 0] step:5641/10000 train_time:1326346ms step_avg:235.13ms +[2025-07-17 12:56:29] [Rank 0] step:5661/10000 train_time:1331240ms step_avg:235.16ms +[2025-07-17 12:56:29] [Rank 0] step:5661/10000 train_time:1331240ms step_avg:235.16ms +[2025-07-17 12:56:34] [Rank 0] step:5681/10000 train_time:1336137ms step_avg:235.19ms +[2025-07-17 12:56:34] [Rank 0] step:5681/10000 train_time:1336137ms step_avg:235.19ms +[2025-07-17 12:56:39] [Rank 0] step:5701/10000 train_time:1341023ms step_avg:235.23ms +[2025-07-17 12:56:39] [Rank 0] step:5701/10000 train_time:1341023ms step_avg:235.23ms +[2025-07-17 12:56:44] [Rank 0] step:5721/10000 train_time:1345905ms step_avg:235.26ms +[2025-07-17 12:56:44] [Rank 0] step:5721/10000 train_time:1345905ms step_avg:235.26ms +[2025-07-17 12:56:49] [Rank 0] step:5741/10000 train_time:1350797ms step_avg:235.29ms +[2025-07-17 12:56:49] [Rank 0] step:5741/10000 train_time:1350797ms step_avg:235.29ms +[2025-07-17 12:56:55] [Rank 0] PRINT: step:5750/10000 val_loss:3.9112 train_time:1353245ms step_avg:235.35ms +[2025-07-17 12:56:55] [Rank 0] PRINT: step:5750/10000 val_loss:3.9112 train_time:1353245ms step_avg:235.35ms +[2025-07-17 12:56:58] [Rank 0] step:5761/10000 train_time:1355682ms step_avg:235.32ms +[2025-07-17 12:56:58] [Rank 0] step:5761/10000 train_time:1355682ms step_avg:235.32ms +[2025-07-17 12:57:03] [Rank 0] step:5781/10000 train_time:1360562ms step_avg:235.35ms +[2025-07-17 12:57:03] [Rank 0] step:5781/10000 train_time:1360562ms step_avg:235.35ms +[2025-07-17 12:57:08] [Rank 0] step:5801/10000 train_time:1365440ms step_avg:235.38ms +[2025-07-17 12:57:08] [Rank 0] step:5801/10000 train_time:1365440ms step_avg:235.38ms +[2025-07-17 12:57:13] [Rank 0] step:5821/10000 train_time:1370324ms step_avg:235.41ms +[2025-07-17 12:57:13] [Rank 0] step:5821/10000 train_time:1370324ms step_avg:235.41ms +[2025-07-17 12:57:18] [Rank 0] step:5841/10000 train_time:1375213ms step_avg:235.44ms +[2025-07-17 12:57:18] [Rank 0] step:5841/10000 train_time:1375213ms step_avg:235.44ms +[2025-07-17 12:57:23] [Rank 0] step:5861/10000 train_time:1380095ms step_avg:235.47ms +[2025-07-17 12:57:23] [Rank 0] step:5861/10000 train_time:1380095ms step_avg:235.47ms +[2025-07-17 12:57:31] [Rank 0] PRINT: step:5875/10000 val_loss:3.8872 train_time:1383761ms step_avg:235.53ms +[2025-07-17 12:57:31] [Rank 0] PRINT: step:5875/10000 val_loss:3.8872 train_time:1383761ms step_avg:235.53ms +[2025-07-17 12:57:32] [Rank 0] step:5881/10000 train_time:1384979ms step_avg:235.50ms +[2025-07-17 12:57:32] [Rank 0] step:5881/10000 train_time:1384979ms step_avg:235.50ms +[2025-07-17 12:57:37] [Rank 0] step:5901/10000 train_time:1389869ms step_avg:235.53ms +[2025-07-17 12:57:37] [Rank 0] step:5901/10000 train_time:1389869ms step_avg:235.53ms +[2025-07-17 12:57:42] [Rank 0] step:5921/10000 train_time:1394757ms step_avg:235.56ms +[2025-07-17 12:57:42] [Rank 0] step:5921/10000 train_time:1394757ms step_avg:235.56ms +[2025-07-17 12:57:47] [Rank 0] step:5941/10000 train_time:1399657ms step_avg:235.59ms +[2025-07-17 12:57:47] [Rank 0] step:5941/10000 train_time:1399657ms step_avg:235.59ms +[2025-07-17 12:57:52] [Rank 0] step:5961/10000 train_time:1404558ms step_avg:235.62ms +[2025-07-17 12:57:52] [Rank 0] step:5961/10000 train_time:1404558ms step_avg:235.62ms +[2025-07-17 12:57:57] [Rank 0] step:5981/10000 train_time:1409455ms step_avg:235.66ms +[2025-07-17 12:57:57] [Rank 0] step:5981/10000 train_time:1409455ms step_avg:235.66ms +[2025-07-17 12:58:06] [Rank 0] PRINT: step:6000/10000 val_loss:3.9681 train_time:1414359ms step_avg:235.73ms +[2025-07-17 12:58:06] [Rank 0] PRINT: step:6000/10000 val_loss:3.9681 train_time:1414359ms step_avg:235.73ms +[2025-07-17 12:58:06] [Rank 0] step:6001/10000 train_time:1414376ms step_avg:235.69ms +[2025-07-17 12:58:06] [Rank 0] step:6001/10000 train_time:1414376ms step_avg:235.69ms +[2025-07-17 12:58:11] [Rank 0] step:6021/10000 train_time:1419255ms step_avg:235.72ms +[2025-07-17 12:58:11] [Rank 0] step:6021/10000 train_time:1419255ms step_avg:235.72ms +[2025-07-17 12:58:16] [Rank 0] step:6041/10000 train_time:1424155ms step_avg:235.75ms +[2025-07-17 12:58:16] [Rank 0] step:6041/10000 train_time:1424155ms step_avg:235.75ms +[2025-07-17 12:58:21] [Rank 0] step:6061/10000 train_time:1429052ms step_avg:235.78ms +[2025-07-17 12:58:21] [Rank 0] step:6061/10000 train_time:1429052ms step_avg:235.78ms +[2025-07-17 12:58:26] [Rank 0] step:6081/10000 train_time:1433952ms step_avg:235.81ms +[2025-07-17 12:58:26] [Rank 0] step:6081/10000 train_time:1433952ms step_avg:235.81ms +[2025-07-17 12:58:31] [Rank 0] step:6101/10000 train_time:1439338ms step_avg:235.92ms +[2025-07-17 12:58:31] [Rank 0] step:6101/10000 train_time:1439338ms step_avg:235.92ms +[2025-07-17 12:58:36] [Rank 0] step:6121/10000 train_time:1444246ms step_avg:235.95ms +[2025-07-17 12:58:36] [Rank 0] step:6121/10000 train_time:1444246ms step_avg:235.95ms +[2025-07-17 12:58:42] [Rank 0] PRINT: step:6125/10000 val_loss:3.9338 train_time:1445477ms step_avg:236.00ms +[2025-07-17 12:58:42] [Rank 0] PRINT: step:6125/10000 val_loss:3.9338 train_time:1445477ms step_avg:236.00ms +[2025-07-17 12:58:46] [Rank 0] step:6141/10000 train_time:1449145ms step_avg:235.98ms +[2025-07-17 12:58:46] [Rank 0] step:6141/10000 train_time:1449145ms step_avg:235.98ms +[2025-07-17 12:58:51] [Rank 0] step:6161/10000 train_time:1454042ms step_avg:236.01ms +[2025-07-17 12:58:51] [Rank 0] step:6161/10000 train_time:1454042ms step_avg:236.01ms +[2025-07-17 12:58:56] [Rank 0] step:6181/10000 train_time:1458952ms step_avg:236.04ms +[2025-07-17 12:58:56] [Rank 0] step:6181/10000 train_time:1458952ms step_avg:236.04ms +[2025-07-17 12:59:00] [Rank 0] step:6201/10000 train_time:1463864ms step_avg:236.07ms +[2025-07-17 12:59:00] [Rank 0] step:6201/10000 train_time:1463864ms step_avg:236.07ms +[2025-07-17 12:59:05] [Rank 0] step:6221/10000 train_time:1468773ms step_avg:236.10ms +[2025-07-17 12:59:05] [Rank 0] step:6221/10000 train_time:1468773ms step_avg:236.10ms +[2025-07-17 12:59:10] [Rank 0] step:6241/10000 train_time:1473681ms step_avg:236.13ms +[2025-07-17 12:59:10] [Rank 0] step:6241/10000 train_time:1473681ms step_avg:236.13ms +[2025-07-17 12:59:17] [Rank 0] PRINT: step:6250/10000 val_loss:3.9379 train_time:1476139ms step_avg:236.18ms +[2025-07-17 12:59:17] [Rank 0] PRINT: step:6250/10000 val_loss:3.9379 train_time:1476139ms step_avg:236.18ms +[2025-07-17 12:59:20] [Rank 0] step:6261/10000 train_time:1478584ms step_avg:236.16ms +[2025-07-17 12:59:20] [Rank 0] step:6261/10000 train_time:1478584ms step_avg:236.16ms +[2025-07-17 12:59:25] [Rank 0] step:6281/10000 train_time:1483498ms step_avg:236.19ms +[2025-07-17 12:59:25] [Rank 0] step:6281/10000 train_time:1483498ms step_avg:236.19ms +[2025-07-17 12:59:30] [Rank 0] step:6301/10000 train_time:1488403ms step_avg:236.22ms +[2025-07-17 12:59:30] [Rank 0] step:6301/10000 train_time:1488403ms step_avg:236.22ms +[2025-07-17 12:59:35] [Rank 0] step:6321/10000 train_time:1493313ms step_avg:236.25ms +[2025-07-17 12:59:35] [Rank 0] step:6321/10000 train_time:1493313ms step_avg:236.25ms +[2025-07-17 12:59:40] [Rank 0] step:6341/10000 train_time:1498229ms step_avg:236.28ms +[2025-07-17 12:59:40] [Rank 0] step:6341/10000 train_time:1498229ms step_avg:236.28ms +[2025-07-17 12:59:44] [Rank 0] step:6361/10000 train_time:1503133ms step_avg:236.30ms +[2025-07-17 12:59:44] [Rank 0] step:6361/10000 train_time:1503133ms step_avg:236.30ms +[2025-07-17 12:59:53] [Rank 0] PRINT: step:6375/10000 val_loss:3.9361 train_time:1506814ms step_avg:236.36ms +[2025-07-17 12:59:53] [Rank 0] PRINT: step:6375/10000 val_loss:3.9361 train_time:1506814ms step_avg:236.36ms +[2025-07-17 12:59:54] [Rank 0] step:6381/10000 train_time:1508036ms step_avg:236.33ms +[2025-07-17 12:59:54] [Rank 0] step:6381/10000 train_time:1508036ms step_avg:236.33ms +[2025-07-17 12:59:59] [Rank 0] step:6401/10000 train_time:1512936ms step_avg:236.36ms +[2025-07-17 12:59:59] [Rank 0] step:6401/10000 train_time:1512936ms step_avg:236.36ms +[2025-07-17 13:00:04] [Rank 0] step:6421/10000 train_time:1517840ms step_avg:236.39ms +[2025-07-17 13:00:04] [Rank 0] step:6421/10000 train_time:1517840ms step_avg:236.39ms +[2025-07-17 13:00:09] [Rank 0] step:6441/10000 train_time:1522743ms step_avg:236.41ms +[2025-07-17 13:00:09] [Rank 0] step:6441/10000 train_time:1522743ms step_avg:236.41ms +[2025-07-17 13:00:14] [Rank 0] step:6461/10000 train_time:1527659ms step_avg:236.44ms +[2025-07-17 13:00:14] [Rank 0] step:6461/10000 train_time:1527659ms step_avg:236.44ms +[2025-07-17 13:00:19] [Rank 0] step:6481/10000 train_time:1532566ms step_avg:236.47ms +[2025-07-17 13:00:19] [Rank 0] step:6481/10000 train_time:1532566ms step_avg:236.47ms +[2025-07-17 13:00:28] [Rank 0] PRINT: step:6500/10000 val_loss:3.9914 train_time:1537480ms step_avg:236.54ms +[2025-07-17 13:00:28] [Rank 0] PRINT: step:6500/10000 val_loss:3.9914 train_time:1537480ms step_avg:236.54ms +[2025-07-17 13:00:28] [Rank 0] step:6501/10000 train_time:1537495ms step_avg:236.50ms +[2025-07-17 13:00:28] [Rank 0] step:6501/10000 train_time:1537495ms step_avg:236.50ms +[2025-07-17 13:00:33] [Rank 0] step:6521/10000 train_time:1542382ms step_avg:236.53ms +[2025-07-17 13:00:33] [Rank 0] step:6521/10000 train_time:1542382ms step_avg:236.53ms +[2025-07-17 13:00:38] [Rank 0] step:6541/10000 train_time:1547286ms step_avg:236.55ms +[2025-07-17 13:00:38] [Rank 0] step:6541/10000 train_time:1547286ms step_avg:236.55ms +[2025-07-17 13:00:43] [Rank 0] step:6561/10000 train_time:1552203ms step_avg:236.58ms +[2025-07-17 13:00:43] [Rank 0] step:6561/10000 train_time:1552203ms step_avg:236.58ms +[2025-07-17 13:00:48] [Rank 0] step:6581/10000 train_time:1557115ms step_avg:236.61ms +[2025-07-17 13:00:48] [Rank 0] step:6581/10000 train_time:1557115ms step_avg:236.61ms +[2025-07-17 13:00:53] [Rank 0] step:6601/10000 train_time:1562033ms step_avg:236.64ms +[2025-07-17 13:00:53] [Rank 0] step:6601/10000 train_time:1562033ms step_avg:236.64ms +[2025-07-17 13:00:58] [Rank 0] step:6621/10000 train_time:1567449ms step_avg:236.74ms +[2025-07-17 13:00:58] [Rank 0] step:6621/10000 train_time:1567449ms step_avg:236.74ms +[2025-07-17 13:01:04] [Rank 0] PRINT: step:6625/10000 val_loss:3.9398 train_time:1568683ms step_avg:236.78ms +[2025-07-17 13:01:04] [Rank 0] PRINT: step:6625/10000 val_loss:3.9398 train_time:1568683ms step_avg:236.78ms +[2025-07-17 13:01:08] [Rank 0] step:6641/10000 train_time:1572347ms step_avg:236.76ms +[2025-07-17 13:01:08] [Rank 0] step:6641/10000 train_time:1572347ms step_avg:236.76ms +[2025-07-17 13:01:13] [Rank 0] step:6661/10000 train_time:1577250ms step_avg:236.79ms +[2025-07-17 13:01:13] [Rank 0] step:6661/10000 train_time:1577250ms step_avg:236.79ms +[2025-07-17 13:01:18] [Rank 0] step:6681/10000 train_time:1582205ms step_avg:236.82ms +[2025-07-17 13:01:18] [Rank 0] step:6681/10000 train_time:1582205ms step_avg:236.82ms +[2025-07-17 13:01:23] [Rank 0] step:6701/10000 train_time:1587174ms step_avg:236.86ms +[2025-07-17 13:01:23] [Rank 0] step:6701/10000 train_time:1587174ms step_avg:236.86ms +[2025-07-17 13:01:28] [Rank 0] step:6721/10000 train_time:1592160ms step_avg:236.89ms +[2025-07-17 13:01:28] [Rank 0] step:6721/10000 train_time:1592160ms step_avg:236.89ms +[2025-07-17 13:01:33] [Rank 0] step:6741/10000 train_time:1597149ms step_avg:236.93ms +[2025-07-17 13:01:33] [Rank 0] step:6741/10000 train_time:1597149ms step_avg:236.93ms +[2025-07-17 13:01:39] [Rank 0] PRINT: step:6750/10000 val_loss:3.6986 train_time:1599636ms step_avg:236.98ms +[2025-07-17 13:01:39] [Rank 0] PRINT: step:6750/10000 val_loss:3.6986 train_time:1599636ms step_avg:236.98ms +[2025-07-17 13:01:42] [Rank 0] step:6761/10000 train_time:1602117ms step_avg:236.96ms +[2025-07-17 13:01:42] [Rank 0] step:6761/10000 train_time:1602117ms step_avg:236.96ms +[2025-07-17 13:01:47] [Rank 0] step:6781/10000 train_time:1607092ms step_avg:237.00ms +[2025-07-17 13:01:47] [Rank 0] step:6781/10000 train_time:1607092ms step_avg:237.00ms +[2025-07-17 13:01:52] [Rank 0] step:6801/10000 train_time:1612069ms step_avg:237.03ms +[2025-07-17 13:01:52] [Rank 0] step:6801/10000 train_time:1612069ms step_avg:237.03ms +[2025-07-17 13:01:57] [Rank 0] step:6821/10000 train_time:1617043ms step_avg:237.07ms +[2025-07-17 13:01:57] [Rank 0] step:6821/10000 train_time:1617043ms step_avg:237.07ms +[2025-07-17 13:02:02] [Rank 0] step:6841/10000 train_time:1622017ms step_avg:237.10ms +[2025-07-17 13:02:02] [Rank 0] step:6841/10000 train_time:1622017ms step_avg:237.10ms +[2025-07-17 13:02:07] [Rank 0] step:6861/10000 train_time:1626984ms step_avg:237.14ms +[2025-07-17 13:02:07] [Rank 0] step:6861/10000 train_time:1626984ms step_avg:237.14ms +[2025-07-17 13:02:15] [Rank 0] PRINT: step:6875/10000 val_loss:3.7184 train_time:1630708ms step_avg:237.19ms +[2025-07-17 13:02:15] [Rank 0] PRINT: step:6875/10000 val_loss:3.7184 train_time:1630708ms step_avg:237.19ms +[2025-07-17 13:02:17] [Rank 0] step:6881/10000 train_time:1631948ms step_avg:237.17ms +[2025-07-17 13:02:17] [Rank 0] step:6881/10000 train_time:1631948ms step_avg:237.17ms +[2025-07-17 13:02:22] [Rank 0] step:6901/10000 train_time:1636907ms step_avg:237.20ms +[2025-07-17 13:02:22] [Rank 0] step:6901/10000 train_time:1636907ms step_avg:237.20ms +[2025-07-17 13:02:27] [Rank 0] step:6921/10000 train_time:1641871ms step_avg:237.23ms +[2025-07-17 13:02:27] [Rank 0] step:6921/10000 train_time:1641871ms step_avg:237.23ms +[2025-07-17 13:02:32] [Rank 0] step:6941/10000 train_time:1646849ms step_avg:237.26ms +[2025-07-17 13:02:32] [Rank 0] step:6941/10000 train_time:1646849ms step_avg:237.26ms +[2025-07-17 13:02:37] [Rank 0] step:6961/10000 train_time:1651822ms step_avg:237.30ms +[2025-07-17 13:02:37] [Rank 0] step:6961/10000 train_time:1651822ms step_avg:237.30ms +[2025-07-17 13:02:42] [Rank 0] step:6981/10000 train_time:1656795ms step_avg:237.33ms +[2025-07-17 13:02:42] [Rank 0] step:6981/10000 train_time:1656795ms step_avg:237.33ms +[2025-07-17 13:02:51] [Rank 0] PRINT: step:7000/10000 val_loss:3.6762 train_time:1661767ms step_avg:237.40ms +[2025-07-17 13:02:51] [Rank 0] PRINT: step:7000/10000 val_loss:3.6762 train_time:1661767ms step_avg:237.40ms +[2025-07-17 13:02:51] [Rank 0] step:7001/10000 train_time:1661782ms step_avg:237.36ms +[2025-07-17 13:02:51] [Rank 0] step:7001/10000 train_time:1661782ms step_avg:237.36ms +[2025-07-17 13:02:56] [Rank 0] step:7021/10000 train_time:1666731ms step_avg:237.39ms +[2025-07-17 13:02:56] [Rank 0] step:7021/10000 train_time:1666731ms step_avg:237.39ms +[2025-07-17 13:03:01] [Rank 0] step:7041/10000 train_time:1671700ms step_avg:237.42ms +[2025-07-17 13:03:01] [Rank 0] step:7041/10000 train_time:1671700ms step_avg:237.42ms +[2025-07-17 13:03:06] [Rank 0] step:7061/10000 train_time:1676665ms step_avg:237.45ms +[2025-07-17 13:03:06] [Rank 0] step:7061/10000 train_time:1676665ms step_avg:237.45ms +[2025-07-17 13:03:11] [Rank 0] step:7081/10000 train_time:1681635ms step_avg:237.49ms +[2025-07-17 13:03:11] [Rank 0] step:7081/10000 train_time:1681635ms step_avg:237.49ms +[2025-07-17 13:03:16] [Rank 0] step:7101/10000 train_time:1686596ms step_avg:237.52ms +[2025-07-17 13:03:16] [Rank 0] step:7101/10000 train_time:1686596ms step_avg:237.52ms +[2025-07-17 13:03:22] [Rank 0] step:7121/10000 train_time:1692075ms step_avg:237.62ms +[2025-07-17 13:03:22] [Rank 0] step:7121/10000 train_time:1692075ms step_avg:237.62ms +[2025-07-17 13:03:27] [Rank 0] PRINT: step:7125/10000 val_loss:3.7301 train_time:1693319ms step_avg:237.66ms +[2025-07-17 13:03:27] [Rank 0] PRINT: step:7125/10000 val_loss:3.7301 train_time:1693319ms step_avg:237.66ms +[2025-07-17 13:03:31] [Rank 0] step:7141/10000 train_time:1697043ms step_avg:237.65ms +[2025-07-17 13:03:31] [Rank 0] step:7141/10000 train_time:1697043ms step_avg:237.65ms +[2025-07-17 13:03:36] [Rank 0] step:7161/10000 train_time:1702013ms step_avg:237.68ms +[2025-07-17 13:03:36] [Rank 0] step:7161/10000 train_time:1702013ms step_avg:237.68ms +[2025-07-17 13:03:41] [Rank 0] step:7181/10000 train_time:1706975ms step_avg:237.71ms +[2025-07-17 13:03:41] [Rank 0] step:7181/10000 train_time:1706975ms step_avg:237.71ms +[2025-07-17 13:03:46] [Rank 0] step:7201/10000 train_time:1711950ms step_avg:237.74ms +[2025-07-17 13:03:46] [Rank 0] step:7201/10000 train_time:1711950ms step_avg:237.74ms +[2025-07-17 13:03:51] [Rank 0] step:7221/10000 train_time:1716914ms step_avg:237.77ms +[2025-07-17 13:03:51] [Rank 0] step:7221/10000 train_time:1716914ms step_avg:237.77ms +[2025-07-17 13:03:56] [Rank 0] step:7241/10000 train_time:1721883ms step_avg:237.80ms +[2025-07-17 13:03:56] [Rank 0] step:7241/10000 train_time:1721883ms step_avg:237.80ms +[2025-07-17 13:04:03] [Rank 0] PRINT: step:7250/10000 val_loss:3.7367 train_time:1724369ms step_avg:237.84ms +[2025-07-17 13:04:03] [Rank 0] PRINT: step:7250/10000 val_loss:3.7367 train_time:1724369ms step_avg:237.84ms +[2025-07-17 13:04:06] [Rank 0] step:7261/10000 train_time:1726842ms step_avg:237.82ms +[2025-07-17 13:04:06] [Rank 0] step:7261/10000 train_time:1726842ms step_avg:237.82ms +[2025-07-17 13:04:11] [Rank 0] step:7281/10000 train_time:1731809ms step_avg:237.85ms +[2025-07-17 13:04:11] [Rank 0] step:7281/10000 train_time:1731809ms step_avg:237.85ms +[2025-07-17 13:04:16] [Rank 0] step:7301/10000 train_time:1736772ms step_avg:237.88ms +[2025-07-17 13:04:16] [Rank 0] step:7301/10000 train_time:1736772ms step_avg:237.88ms +[2025-07-17 13:04:21] [Rank 0] step:7321/10000 train_time:1741753ms step_avg:237.91ms +[2025-07-17 13:04:21] [Rank 0] step:7321/10000 train_time:1741753ms step_avg:237.91ms +[2025-07-17 13:04:26] [Rank 0] step:7341/10000 train_time:1746719ms step_avg:237.94ms +[2025-07-17 13:04:26] [Rank 0] step:7341/10000 train_time:1746719ms step_avg:237.94ms +[2025-07-17 13:04:31] [Rank 0] step:7361/10000 train_time:1751692ms step_avg:237.97ms +[2025-07-17 13:04:31] [Rank 0] step:7361/10000 train_time:1751692ms step_avg:237.97ms +[2025-07-17 13:04:39] [Rank 0] PRINT: step:7375/10000 val_loss:3.7612 train_time:1755426ms step_avg:238.02ms +[2025-07-17 13:04:39] [Rank 0] PRINT: step:7375/10000 val_loss:3.7612 train_time:1755426ms step_avg:238.02ms +[2025-07-17 13:04:40] [Rank 0] step:7381/10000 train_time:1756662ms step_avg:238.00ms +[2025-07-17 13:04:40] [Rank 0] step:7381/10000 train_time:1756662ms step_avg:238.00ms +[2025-07-17 13:04:45] [Rank 0] step:7401/10000 train_time:1761629ms step_avg:238.03ms +[2025-07-17 13:04:45] [Rank 0] step:7401/10000 train_time:1761629ms step_avg:238.03ms +[2025-07-17 13:04:50] [Rank 0] step:7421/10000 train_time:1766594ms step_avg:238.05ms +[2025-07-17 13:04:50] [Rank 0] step:7421/10000 train_time:1766594ms step_avg:238.05ms +[2025-07-17 13:04:55] [Rank 0] step:7441/10000 train_time:1771570ms step_avg:238.08ms +[2025-07-17 13:04:55] [Rank 0] step:7441/10000 train_time:1771570ms step_avg:238.08ms +[2025-07-17 13:05:00] [Rank 0] step:7461/10000 train_time:1776534ms step_avg:238.11ms +[2025-07-17 13:05:00] [Rank 0] step:7461/10000 train_time:1776534ms step_avg:238.11ms +[2025-07-17 13:05:05] [Rank 0] step:7481/10000 train_time:1781507ms step_avg:238.14ms +[2025-07-17 13:05:05] [Rank 0] step:7481/10000 train_time:1781507ms step_avg:238.14ms +[2025-07-17 13:05:15] [Rank 0] PRINT: step:7500/10000 val_loss:3.7855 train_time:1786485ms step_avg:238.20ms +[2025-07-17 13:05:15] [Rank 0] PRINT: step:7500/10000 val_loss:3.7855 train_time:1786485ms step_avg:238.20ms +[2025-07-17 13:05:15] [Rank 0] step:7501/10000 train_time:1786500ms step_avg:238.17ms +[2025-07-17 13:05:15] [Rank 0] step:7501/10000 train_time:1786500ms step_avg:238.17ms +[2025-07-17 13:05:20] [Rank 0] step:7521/10000 train_time:1791452ms step_avg:238.19ms +[2025-07-17 13:05:20] [Rank 0] step:7521/10000 train_time:1791452ms step_avg:238.19ms +[2025-07-17 13:05:25] [Rank 0] step:7541/10000 train_time:1796412ms step_avg:238.22ms +[2025-07-17 13:05:25] [Rank 0] step:7541/10000 train_time:1796412ms step_avg:238.22ms +[2025-07-17 13:05:30] [Rank 0] step:7561/10000 train_time:1801374ms step_avg:238.25ms +[2025-07-17 13:05:30] [Rank 0] step:7561/10000 train_time:1801374ms step_avg:238.25ms +[2025-07-17 13:05:35] [Rank 0] step:7581/10000 train_time:1806342ms step_avg:238.27ms +[2025-07-17 13:05:35] [Rank 0] step:7581/10000 train_time:1806342ms step_avg:238.27ms +[2025-07-17 13:05:40] [Rank 0] step:7601/10000 train_time:1811323ms step_avg:238.30ms +[2025-07-17 13:05:40] [Rank 0] step:7601/10000 train_time:1811323ms step_avg:238.30ms +[2025-07-17 13:05:45] [Rank 0] step:7621/10000 train_time:1816414ms step_avg:238.34ms +[2025-07-17 13:05:45] [Rank 0] step:7621/10000 train_time:1816414ms step_avg:238.34ms +[2025-07-17 13:05:51] [Rank 0] PRINT: step:7625/10000 val_loss:3.7639 train_time:1818055ms step_avg:238.43ms +[2025-07-17 13:05:51] [Rank 0] PRINT: step:7625/10000 val_loss:3.7639 train_time:1818055ms step_avg:238.43ms +[2025-07-17 13:05:55] [Rank 0] step:7641/10000 train_time:1821775ms step_avg:238.42ms +[2025-07-17 13:05:55] [Rank 0] step:7641/10000 train_time:1821775ms step_avg:238.42ms +[2025-07-17 13:06:00] [Rank 0] step:7661/10000 train_time:1826759ms step_avg:238.45ms +[2025-07-17 13:06:00] [Rank 0] step:7661/10000 train_time:1826759ms step_avg:238.45ms +[2025-07-17 13:06:05] [Rank 0] step:7681/10000 train_time:1831755ms step_avg:238.48ms +[2025-07-17 13:06:05] [Rank 0] step:7681/10000 train_time:1831755ms step_avg:238.48ms +[2025-07-17 13:06:10] [Rank 0] step:7701/10000 train_time:1836731ms step_avg:238.51ms +[2025-07-17 13:06:10] [Rank 0] step:7701/10000 train_time:1836731ms step_avg:238.51ms +[2025-07-17 13:06:15] [Rank 0] step:7721/10000 train_time:1841709ms step_avg:238.53ms +[2025-07-17 13:06:15] [Rank 0] step:7721/10000 train_time:1841709ms step_avg:238.53ms +[2025-07-17 13:06:20] [Rank 0] step:7741/10000 train_time:1846688ms step_avg:238.56ms +[2025-07-17 13:06:20] [Rank 0] step:7741/10000 train_time:1846688ms step_avg:238.56ms +[2025-07-17 13:06:27] [Rank 0] PRINT: step:7750/10000 val_loss:3.8448 train_time:1849193ms step_avg:238.61ms +[2025-07-17 13:06:27] [Rank 0] PRINT: step:7750/10000 val_loss:3.8448 train_time:1849193ms step_avg:238.61ms +[2025-07-17 13:06:30] [Rank 0] step:7761/10000 train_time:1851677ms step_avg:238.59ms +[2025-07-17 13:06:30] [Rank 0] step:7761/10000 train_time:1851677ms step_avg:238.59ms +[2025-07-17 13:06:35] [Rank 0] step:7781/10000 train_time:1856659ms step_avg:238.61ms +[2025-07-17 13:06:35] [Rank 0] step:7781/10000 train_time:1856659ms step_avg:238.61ms +[2025-07-17 13:06:40] [Rank 0] step:7801/10000 train_time:1861640ms step_avg:238.64ms +[2025-07-17 13:06:40] [Rank 0] step:7801/10000 train_time:1861640ms step_avg:238.64ms +[2025-07-17 13:06:45] [Rank 0] step:7821/10000 train_time:1866618ms step_avg:238.67ms +[2025-07-17 13:06:45] [Rank 0] step:7821/10000 train_time:1866618ms step_avg:238.67ms +[2025-07-17 13:06:50] [Rank 0] step:7841/10000 train_time:1871595ms step_avg:238.69ms +[2025-07-17 13:06:50] [Rank 0] step:7841/10000 train_time:1871595ms step_avg:238.69ms +[2025-07-17 13:06:55] [Rank 0] step:7861/10000 train_time:1876560ms step_avg:238.72ms +[2025-07-17 13:06:55] [Rank 0] step:7861/10000 train_time:1876560ms step_avg:238.72ms +[2025-07-17 13:07:03] [Rank 0] PRINT: step:7875/10000 val_loss:3.7616 train_time:1880292ms step_avg:238.77ms +[2025-07-17 13:07:03] [Rank 0] PRINT: step:7875/10000 val_loss:3.7616 train_time:1880292ms step_avg:238.77ms +[2025-07-17 13:07:04] [Rank 0] step:7881/10000 train_time:1881527ms step_avg:238.74ms +[2025-07-17 13:07:04] [Rank 0] step:7881/10000 train_time:1881527ms step_avg:238.74ms +[2025-07-17 13:07:09] [Rank 0] step:7901/10000 train_time:1886497ms step_avg:238.77ms +[2025-07-17 13:07:09] [Rank 0] step:7901/10000 train_time:1886497ms step_avg:238.77ms +[2025-07-17 13:07:14] [Rank 0] step:7921/10000 train_time:1891470ms step_avg:238.79ms +[2025-07-17 13:07:14] [Rank 0] step:7921/10000 train_time:1891470ms step_avg:238.79ms +[2025-07-17 13:07:19] [Rank 0] step:7941/10000 train_time:1896453ms step_avg:238.82ms +[2025-07-17 13:07:19] [Rank 0] step:7941/10000 train_time:1896453ms step_avg:238.82ms +[2025-07-17 13:07:24] [Rank 0] step:7961/10000 train_time:1901437ms step_avg:238.84ms +[2025-07-17 13:07:24] [Rank 0] step:7961/10000 train_time:1901437ms step_avg:238.84ms +[2025-07-17 13:07:29] [Rank 0] step:7981/10000 train_time:1906406ms step_avg:238.87ms +[2025-07-17 13:07:29] [Rank 0] step:7981/10000 train_time:1906406ms step_avg:238.87ms +[2025-07-17 13:07:39] [Rank 0] PRINT: step:8000/10000 val_loss:3.7476 train_time:1911395ms step_avg:238.92ms +[2025-07-17 13:07:39] [Rank 0] PRINT: step:8000/10000 val_loss:3.7476 train_time:1911395ms step_avg:238.92ms +[2025-07-17 13:07:39] [Rank 0] step:8001/10000 train_time:1911410ms step_avg:238.90ms +[2025-07-17 13:07:39] [Rank 0] step:8001/10000 train_time:1911410ms step_avg:238.90ms +[2025-07-17 13:07:44] [Rank 0] step:8021/10000 train_time:1916358ms step_avg:238.92ms +[2025-07-17 13:07:44] [Rank 0] step:8021/10000 train_time:1916358ms step_avg:238.92ms +[2025-07-17 13:07:49] [Rank 0] step:8041/10000 train_time:1921350ms step_avg:238.94ms +[2025-07-17 13:07:49] [Rank 0] step:8041/10000 train_time:1921350ms step_avg:238.94ms +[2025-07-17 13:07:54] [Rank 0] step:8061/10000 train_time:1926317ms step_avg:238.97ms +[2025-07-17 13:07:54] [Rank 0] step:8061/10000 train_time:1926317ms step_avg:238.97ms +[2025-07-17 13:07:59] [Rank 0] step:8081/10000 train_time:1931297ms step_avg:238.99ms +[2025-07-17 13:07:59] [Rank 0] step:8081/10000 train_time:1931297ms step_avg:238.99ms +[2025-07-17 13:08:04] [Rank 0] step:8101/10000 train_time:1936266ms step_avg:239.02ms +[2025-07-17 13:08:04] [Rank 0] step:8101/10000 train_time:1936266ms step_avg:239.02ms +[2025-07-17 13:08:09] [Rank 0] step:8121/10000 train_time:1941235ms step_avg:239.04ms +[2025-07-17 13:08:09] [Rank 0] step:8121/10000 train_time:1941235ms step_avg:239.04ms +[2025-07-17 13:08:15] [Rank 0] PRINT: step:8125/10000 val_loss:3.7362 train_time:1942484ms step_avg:239.07ms +[2025-07-17 13:08:15] [Rank 0] PRINT: step:8125/10000 val_loss:3.7362 train_time:1942484ms step_avg:239.07ms +[2025-07-17 13:08:19] [Rank 0] step:8141/10000 train_time:1946697ms step_avg:239.12ms +[2025-07-17 13:08:19] [Rank 0] step:8141/10000 train_time:1946697ms step_avg:239.12ms +[2025-07-17 13:08:24] [Rank 0] step:8161/10000 train_time:1951711ms step_avg:239.15ms +[2025-07-17 13:08:24] [Rank 0] step:8161/10000 train_time:1951711ms step_avg:239.15ms +[2025-07-17 13:08:29] [Rank 0] step:8181/10000 train_time:1956755ms step_avg:239.18ms +[2025-07-17 13:08:29] [Rank 0] step:8181/10000 train_time:1956755ms step_avg:239.18ms +[2025-07-17 13:08:34] [Rank 0] step:8201/10000 train_time:1961779ms step_avg:239.21ms +[2025-07-17 13:08:34] [Rank 0] step:8201/10000 train_time:1961779ms step_avg:239.21ms +[2025-07-17 13:08:39] [Rank 0] step:8221/10000 train_time:1966818ms step_avg:239.24ms +[2025-07-17 13:08:39] [Rank 0] step:8221/10000 train_time:1966818ms step_avg:239.24ms +[2025-07-17 13:08:44] [Rank 0] step:8241/10000 train_time:1971856ms step_avg:239.27ms +[2025-07-17 13:08:44] [Rank 0] step:8241/10000 train_time:1971856ms step_avg:239.27ms +[2025-07-17 13:08:51] [Rank 0] PRINT: step:8250/10000 val_loss:3.6639 train_time:1974386ms step_avg:239.32ms +[2025-07-17 13:08:51] [Rank 0] PRINT: step:8250/10000 val_loss:3.6639 train_time:1974386ms step_avg:239.32ms +[2025-07-17 13:08:54] [Rank 0] step:8261/10000 train_time:1976905ms step_avg:239.31ms +[2025-07-17 13:08:54] [Rank 0] step:8261/10000 train_time:1976905ms step_avg:239.31ms +[2025-07-17 13:08:59] [Rank 0] step:8281/10000 train_time:1981970ms step_avg:239.34ms +[2025-07-17 13:08:59] [Rank 0] step:8281/10000 train_time:1981970ms step_avg:239.34ms +[2025-07-17 13:09:04] [Rank 0] step:8301/10000 train_time:1987005ms step_avg:239.37ms +[2025-07-17 13:09:04] [Rank 0] step:8301/10000 train_time:1987005ms step_avg:239.37ms +[2025-07-17 13:09:09] [Rank 0] step:8321/10000 train_time:1992054ms step_avg:239.40ms +[2025-07-17 13:09:09] [Rank 0] step:8321/10000 train_time:1992054ms step_avg:239.40ms +[2025-07-17 13:09:14] [Rank 0] step:8341/10000 train_time:1997108ms step_avg:239.43ms +[2025-07-17 13:09:14] [Rank 0] step:8341/10000 train_time:1997108ms step_avg:239.43ms +[2025-07-17 13:09:19] [Rank 0] step:8361/10000 train_time:2002148ms step_avg:239.46ms +[2025-07-17 13:09:19] [Rank 0] step:8361/10000 train_time:2002148ms step_avg:239.46ms +[2025-07-17 13:09:27] [Rank 0] PRINT: step:8375/10000 val_loss:3.6817 train_time:2005934ms step_avg:239.51ms +[2025-07-17 13:09:27] [Rank 0] PRINT: step:8375/10000 val_loss:3.6817 train_time:2005934ms step_avg:239.51ms +[2025-07-17 13:09:29] [Rank 0] step:8381/10000 train_time:2007179ms step_avg:239.49ms +[2025-07-17 13:09:29] [Rank 0] step:8381/10000 train_time:2007179ms step_avg:239.49ms +[2025-07-17 13:09:34] [Rank 0] step:8401/10000 train_time:2012203ms step_avg:239.52ms +[2025-07-17 13:09:34] [Rank 0] step:8401/10000 train_time:2012203ms step_avg:239.52ms +[2025-07-17 13:09:39] [Rank 0] step:8421/10000 train_time:2017249ms step_avg:239.55ms +[2025-07-17 13:09:39] [Rank 0] step:8421/10000 train_time:2017249ms step_avg:239.55ms +[2025-07-17 13:09:44] [Rank 0] step:8441/10000 train_time:2022297ms step_avg:239.58ms +[2025-07-17 13:09:44] [Rank 0] step:8441/10000 train_time:2022297ms step_avg:239.58ms +[2025-07-17 13:09:49] [Rank 0] step:8461/10000 train_time:2027353ms step_avg:239.61ms +[2025-07-17 13:09:49] [Rank 0] step:8461/10000 train_time:2027353ms step_avg:239.61ms +[2025-07-17 13:09:54] [Rank 0] step:8481/10000 train_time:2032385ms step_avg:239.64ms +[2025-07-17 13:09:54] [Rank 0] step:8481/10000 train_time:2032385ms step_avg:239.64ms +[2025-07-17 13:10:03] [Rank 0] PRINT: step:8500/10000 val_loss:3.7089 train_time:2037433ms step_avg:239.70ms +[2025-07-17 13:10:03] [Rank 0] PRINT: step:8500/10000 val_loss:3.7089 train_time:2037433ms step_avg:239.70ms +[2025-07-17 13:10:04] [Rank 0] step:8501/10000 train_time:2037449ms step_avg:239.67ms +[2025-07-17 13:10:04] [Rank 0] step:8501/10000 train_time:2037449ms step_avg:239.67ms +[2025-07-17 13:10:09] [Rank 0] step:8521/10000 train_time:2042465ms step_avg:239.70ms +[2025-07-17 13:10:09] [Rank 0] step:8521/10000 train_time:2042465ms step_avg:239.70ms +[2025-07-17 13:10:14] [Rank 0] step:8541/10000 train_time:2047517ms step_avg:239.73ms +[2025-07-17 13:10:14] [Rank 0] step:8541/10000 train_time:2047517ms step_avg:239.73ms +[2025-07-17 13:10:19] [Rank 0] step:8561/10000 train_time:2052543ms step_avg:239.76ms +[2025-07-17 13:10:19] [Rank 0] step:8561/10000 train_time:2052543ms step_avg:239.76ms +[2025-07-17 13:10:24] [Rank 0] step:8581/10000 train_time:2057587ms step_avg:239.78ms +[2025-07-17 13:10:24] [Rank 0] step:8581/10000 train_time:2057587ms step_avg:239.78ms +[2025-07-17 13:10:29] [Rank 0] step:8601/10000 train_time:2062597ms step_avg:239.81ms +[2025-07-17 13:10:29] [Rank 0] step:8601/10000 train_time:2062597ms step_avg:239.81ms +[2025-07-17 13:10:34] [Rank 0] step:8621/10000 train_time:2067623ms step_avg:239.84ms +[2025-07-17 13:10:34] [Rank 0] step:8621/10000 train_time:2067623ms step_avg:239.84ms +[2025-07-17 13:10:40] [Rank 0] PRINT: step:8625/10000 val_loss:3.6955 train_time:2068886ms step_avg:239.87ms +[2025-07-17 13:10:40] [Rank 0] PRINT: step:8625/10000 val_loss:3.6955 train_time:2068886ms step_avg:239.87ms +[2025-07-17 13:10:44] [Rank 0] step:8641/10000 train_time:2073167ms step_avg:239.92ms +[2025-07-17 13:10:44] [Rank 0] step:8641/10000 train_time:2073167ms step_avg:239.92ms +[2025-07-17 13:10:49] [Rank 0] step:8661/10000 train_time:2078199ms step_avg:239.95ms +[2025-07-17 13:10:49] [Rank 0] step:8661/10000 train_time:2078199ms step_avg:239.95ms +[2025-07-17 13:10:54] [Rank 0] step:8681/10000 train_time:2083229ms step_avg:239.98ms +[2025-07-17 13:10:54] [Rank 0] step:8681/10000 train_time:2083229ms step_avg:239.98ms +[2025-07-17 13:10:59] [Rank 0] step:8701/10000 train_time:2088271ms step_avg:240.00ms +[2025-07-17 13:10:59] [Rank 0] step:8701/10000 train_time:2088271ms step_avg:240.00ms +[2025-07-17 13:11:04] [Rank 0] step:8721/10000 train_time:2093315ms step_avg:240.03ms +[2025-07-17 13:11:04] [Rank 0] step:8721/10000 train_time:2093315ms step_avg:240.03ms +[2025-07-17 13:11:09] [Rank 0] step:8741/10000 train_time:2098351ms step_avg:240.06ms +[2025-07-17 13:11:09] [Rank 0] step:8741/10000 train_time:2098351ms step_avg:240.06ms +[2025-07-17 13:11:16] [Rank 0] PRINT: step:8750/10000 val_loss:3.7561 train_time:2100865ms step_avg:240.10ms +[2025-07-17 13:11:16] [Rank 0] PRINT: step:8750/10000 val_loss:3.7561 train_time:2100865ms step_avg:240.10ms +[2025-07-17 13:11:19] [Rank 0] step:8761/10000 train_time:2103370ms step_avg:240.08ms +[2025-07-17 13:11:19] [Rank 0] step:8761/10000 train_time:2103370ms step_avg:240.08ms +[2025-07-17 13:11:24] [Rank 0] step:8781/10000 train_time:2108394ms step_avg:240.11ms +[2025-07-17 13:11:24] [Rank 0] step:8781/10000 train_time:2108394ms step_avg:240.11ms +[2025-07-17 13:11:29] [Rank 0] step:8801/10000 train_time:2113431ms step_avg:240.14ms +[2025-07-17 13:11:29] [Rank 0] step:8801/10000 train_time:2113431ms step_avg:240.14ms +[2025-07-17 13:11:34] [Rank 0] step:8821/10000 train_time:2118460ms step_avg:240.16ms +[2025-07-17 13:11:34] [Rank 0] step:8821/10000 train_time:2118460ms step_avg:240.16ms +[2025-07-17 13:11:39] [Rank 0] step:8841/10000 train_time:2123511ms step_avg:240.19ms +[2025-07-17 13:11:39] [Rank 0] step:8841/10000 train_time:2123511ms step_avg:240.19ms +[2025-07-17 13:11:44] [Rank 0] step:8861/10000 train_time:2128556ms step_avg:240.22ms +[2025-07-17 13:11:44] [Rank 0] step:8861/10000 train_time:2128556ms step_avg:240.22ms +[2025-07-17 13:11:53] [Rank 0] PRINT: step:8875/10000 val_loss:3.6506 train_time:2132323ms step_avg:240.26ms +[2025-07-17 13:11:53] [Rank 0] PRINT: step:8875/10000 val_loss:3.6506 train_time:2132323ms step_avg:240.26ms +[2025-07-17 13:11:54] [Rank 0] step:8881/10000 train_time:2133573ms step_avg:240.24ms +[2025-07-17 13:11:54] [Rank 0] step:8881/10000 train_time:2133573ms step_avg:240.24ms +[2025-07-17 13:11:59] [Rank 0] step:8901/10000 train_time:2138594ms step_avg:240.26ms +[2025-07-17 13:11:59] [Rank 0] step:8901/10000 train_time:2138594ms step_avg:240.26ms +[2025-07-17 13:12:04] [Rank 0] step:8921/10000 train_time:2143618ms step_avg:240.29ms +[2025-07-17 13:12:04] [Rank 0] step:8921/10000 train_time:2143618ms step_avg:240.29ms +[2025-07-17 13:12:09] [Rank 0] step:8941/10000 train_time:2148651ms step_avg:240.31ms +[2025-07-17 13:12:09] [Rank 0] step:8941/10000 train_time:2148651ms step_avg:240.31ms +[2025-07-17 13:12:14] [Rank 0] step:8961/10000 train_time:2153683ms step_avg:240.34ms +[2025-07-17 13:12:14] [Rank 0] step:8961/10000 train_time:2153683ms step_avg:240.34ms +[2025-07-17 13:12:19] [Rank 0] step:8981/10000 train_time:2158719ms step_avg:240.37ms +[2025-07-17 13:12:19] [Rank 0] step:8981/10000 train_time:2158719ms step_avg:240.37ms +[2025-07-17 13:12:29] [Rank 0] PRINT: step:9000/10000 val_loss:3.7332 train_time:2163756ms step_avg:240.42ms +[2025-07-17 13:12:29] [Rank 0] PRINT: step:9000/10000 val_loss:3.7332 train_time:2163756ms step_avg:240.42ms +[2025-07-17 13:12:29] [Rank 0] step:9001/10000 train_time:2163771ms step_avg:240.39ms +[2025-07-17 13:12:29] [Rank 0] step:9001/10000 train_time:2163771ms step_avg:240.39ms +[2025-07-17 13:12:34] [Rank 0] step:9021/10000 train_time:2168780ms step_avg:240.41ms +[2025-07-17 13:12:34] [Rank 0] step:9021/10000 train_time:2168780ms step_avg:240.41ms +[2025-07-17 13:12:39] [Rank 0] step:9041/10000 train_time:2173833ms step_avg:240.44ms +[2025-07-17 13:12:39] [Rank 0] step:9041/10000 train_time:2173833ms step_avg:240.44ms +[2025-07-17 13:12:44] [Rank 0] step:9061/10000 train_time:2178871ms step_avg:240.47ms +[2025-07-17 13:12:44] [Rank 0] step:9061/10000 train_time:2178871ms step_avg:240.47ms +[2025-07-17 13:12:49] [Rank 0] step:9081/10000 train_time:2183935ms step_avg:240.50ms +[2025-07-17 13:12:49] [Rank 0] step:9081/10000 train_time:2183935ms step_avg:240.50ms +[2025-07-17 13:12:54] [Rank 0] step:9101/10000 train_time:2188995ms step_avg:240.52ms +[2025-07-17 13:12:54] [Rank 0] step:9101/10000 train_time:2188995ms step_avg:240.52ms +[2025-07-17 13:12:59] [Rank 0] step:9121/10000 train_time:2194048ms step_avg:240.55ms +[2025-07-17 13:12:59] [Rank 0] step:9121/10000 train_time:2194048ms step_avg:240.55ms +[2025-07-17 13:13:05] [Rank 0] PRINT: step:9125/10000 val_loss:3.7127 train_time:2195308ms step_avg:240.58ms +[2025-07-17 13:13:05] [Rank 0] PRINT: step:9125/10000 val_loss:3.7127 train_time:2195308ms step_avg:240.58ms +[2025-07-17 13:13:09] [Rank 0] step:9141/10000 train_time:2199073ms step_avg:240.57ms +[2025-07-17 13:13:09] [Rank 0] step:9141/10000 train_time:2199073ms step_avg:240.57ms +[2025-07-17 13:13:15] [Rank 0] step:9161/10000 train_time:2204638ms step_avg:240.65ms +[2025-07-17 13:13:15] [Rank 0] step:9161/10000 train_time:2204638ms step_avg:240.65ms +[2025-07-17 13:13:20] [Rank 0] step:9181/10000 train_time:2209682ms step_avg:240.68ms +[2025-07-17 13:13:20] [Rank 0] step:9181/10000 train_time:2209682ms step_avg:240.68ms +[2025-07-17 13:13:25] [Rank 0] step:9201/10000 train_time:2214725ms step_avg:240.70ms +[2025-07-17 13:13:25] [Rank 0] step:9201/10000 train_time:2214725ms step_avg:240.70ms +[2025-07-17 13:13:30] [Rank 0] step:9221/10000 train_time:2219799ms step_avg:240.73ms +[2025-07-17 13:13:30] [Rank 0] step:9221/10000 train_time:2219799ms step_avg:240.73ms +[2025-07-17 13:13:35] [Rank 0] step:9241/10000 train_time:2224852ms step_avg:240.76ms +[2025-07-17 13:13:35] [Rank 0] step:9241/10000 train_time:2224852ms step_avg:240.76ms +[2025-07-17 13:13:42] [Rank 0] PRINT: step:9250/10000 val_loss:3.6732 train_time:2227382ms step_avg:240.80ms +[2025-07-17 13:13:42] [Rank 0] PRINT: step:9250/10000 val_loss:3.6732 train_time:2227382ms step_avg:240.80ms +[2025-07-17 13:13:45] [Rank 0] step:9261/10000 train_time:2229903ms step_avg:240.78ms +[2025-07-17 13:13:45] [Rank 0] step:9261/10000 train_time:2229903ms step_avg:240.78ms +[2025-07-17 13:13:50] [Rank 0] step:9281/10000 train_time:2234923ms step_avg:240.81ms +[2025-07-17 13:13:50] [Rank 0] step:9281/10000 train_time:2234923ms step_avg:240.81ms +[2025-07-17 13:13:55] [Rank 0] step:9301/10000 train_time:2239972ms step_avg:240.83ms +[2025-07-17 13:13:55] [Rank 0] step:9301/10000 train_time:2239972ms step_avg:240.83ms +[2025-07-17 13:14:00] [Rank 0] step:9321/10000 train_time:2245035ms step_avg:240.86ms +[2025-07-17 13:14:00] [Rank 0] step:9321/10000 train_time:2245035ms step_avg:240.86ms +[2025-07-17 13:14:05] [Rank 0] step:9341/10000 train_time:2250078ms step_avg:240.88ms +[2025-07-17 13:14:05] [Rank 0] step:9341/10000 train_time:2250078ms step_avg:240.88ms +[2025-07-17 13:14:10] [Rank 0] step:9361/10000 train_time:2255126ms step_avg:240.91ms +[2025-07-17 13:14:10] [Rank 0] step:9361/10000 train_time:2255126ms step_avg:240.91ms +[2025-07-17 13:14:18] [Rank 0] PRINT: step:9375/10000 val_loss:3.7083 train_time:2258919ms step_avg:240.95ms +[2025-07-17 13:14:18] [Rank 0] PRINT: step:9375/10000 val_loss:3.7083 train_time:2258919ms step_avg:240.95ms +[2025-07-17 13:14:20] [Rank 0] step:9381/10000 train_time:2260171ms step_avg:240.93ms +[2025-07-17 13:14:20] [Rank 0] step:9381/10000 train_time:2260171ms step_avg:240.93ms +[2025-07-17 13:14:25] [Rank 0] step:9401/10000 train_time:2265197ms step_avg:240.95ms +[2025-07-17 13:14:25] [Rank 0] step:9401/10000 train_time:2265197ms step_avg:240.95ms +[2025-07-17 13:14:30] [Rank 0] step:9421/10000 train_time:2270243ms step_avg:240.98ms +[2025-07-17 13:14:30] [Rank 0] step:9421/10000 train_time:2270243ms step_avg:240.98ms +[2025-07-17 13:14:35] [Rank 0] step:9441/10000 train_time:2275289ms step_avg:241.00ms +[2025-07-17 13:14:35] [Rank 0] step:9441/10000 train_time:2275289ms step_avg:241.00ms +[2025-07-17 13:14:40] [Rank 0] step:9461/10000 train_time:2280347ms step_avg:241.03ms +[2025-07-17 13:14:40] [Rank 0] step:9461/10000 train_time:2280347ms step_avg:241.03ms +[2025-07-17 13:14:45] [Rank 0] step:9481/10000 train_time:2285398ms step_avg:241.05ms +[2025-07-17 13:14:45] [Rank 0] step:9481/10000 train_time:2285398ms step_avg:241.05ms +[2025-07-17 13:14:55] [Rank 0] PRINT: step:9500/10000 val_loss:3.7221 train_time:2290481ms step_avg:241.10ms +[2025-07-17 13:14:55] [Rank 0] PRINT: step:9500/10000 val_loss:3.7221 train_time:2290481ms step_avg:241.10ms +[2025-07-17 13:14:55] [Rank 0] step:9501/10000 train_time:2290497ms step_avg:241.08ms +[2025-07-17 13:14:55] [Rank 0] step:9501/10000 train_time:2290497ms step_avg:241.08ms +[2025-07-17 13:15:00] [Rank 0] step:9521/10000 train_time:2295528ms step_avg:241.10ms +[2025-07-17 13:15:00] [Rank 0] step:9521/10000 train_time:2295528ms step_avg:241.10ms +[2025-07-17 13:15:05] [Rank 0] step:9541/10000 train_time:2300587ms step_avg:241.13ms +[2025-07-17 13:15:05] [Rank 0] step:9541/10000 train_time:2300587ms step_avg:241.13ms +[2025-07-17 13:15:10] [Rank 0] step:9561/10000 train_time:2305632ms step_avg:241.15ms +[2025-07-17 13:15:10] [Rank 0] step:9561/10000 train_time:2305632ms step_avg:241.15ms +[2025-07-17 13:15:15] [Rank 0] step:9581/10000 train_time:2310674ms step_avg:241.17ms +[2025-07-17 13:15:15] [Rank 0] step:9581/10000 train_time:2310674ms step_avg:241.17ms +[2025-07-17 13:15:20] [Rank 0] step:9601/10000 train_time:2315719ms step_avg:241.20ms +[2025-07-17 13:15:20] [Rank 0] step:9601/10000 train_time:2315719ms step_avg:241.20ms +[2025-07-17 13:15:25] [Rank 0] step:9621/10000 train_time:2320805ms step_avg:241.22ms +[2025-07-17 13:15:25] [Rank 0] step:9621/10000 train_time:2320805ms step_avg:241.22ms +[2025-07-17 13:15:31] [Rank 0] PRINT: step:9625/10000 val_loss:3.7125 train_time:2322069ms step_avg:241.25ms +[2025-07-17 13:15:31] [Rank 0] PRINT: step:9625/10000 val_loss:3.7125 train_time:2322069ms step_avg:241.25ms +[2025-07-17 13:15:35] [Rank 0] step:9641/10000 train_time:2325882ms step_avg:241.25ms +[2025-07-17 13:15:35] [Rank 0] step:9641/10000 train_time:2325882ms step_avg:241.25ms +[2025-07-17 13:15:41] [Rank 0] step:9661/10000 train_time:2331498ms step_avg:241.33ms +[2025-07-17 13:15:41] [Rank 0] step:9661/10000 train_time:2331498ms step_avg:241.33ms +[2025-07-17 13:15:46] [Rank 0] step:9681/10000 train_time:2336612ms step_avg:241.36ms +[2025-07-17 13:15:46] [Rank 0] step:9681/10000 train_time:2336612ms step_avg:241.36ms +[2025-07-17 13:15:51] [Rank 0] step:9701/10000 train_time:2341732ms step_avg:241.39ms +[2025-07-17 13:15:51] [Rank 0] step:9701/10000 train_time:2341732ms step_avg:241.39ms +[2025-07-17 13:15:56] [Rank 0] step:9721/10000 train_time:2346832ms step_avg:241.42ms +[2025-07-17 13:15:56] [Rank 0] step:9721/10000 train_time:2346832ms step_avg:241.42ms +[2025-07-17 13:16:01] [Rank 0] step:9741/10000 train_time:2351955ms step_avg:241.45ms +[2025-07-17 13:16:01] [Rank 0] step:9741/10000 train_time:2351955ms step_avg:241.45ms +[2025-07-17 13:16:08] [Rank 0] PRINT: step:9750/10000 val_loss:3.7931 train_time:2354508ms step_avg:241.49ms +[2025-07-17 13:16:08] [Rank 0] PRINT: step:9750/10000 val_loss:3.7931 train_time:2354508ms step_avg:241.49ms +[2025-07-17 13:16:11] [Rank 0] step:9761/10000 train_time:2357050ms step_avg:241.48ms +[2025-07-17 13:16:11] [Rank 0] step:9761/10000 train_time:2357050ms step_avg:241.48ms +[2025-07-17 13:16:16] [Rank 0] step:9781/10000 train_time:2362157ms step_avg:241.50ms +[2025-07-17 13:16:16] [Rank 0] step:9781/10000 train_time:2362157ms step_avg:241.50ms +[2025-07-17 13:16:21] [Rank 0] step:9801/10000 train_time:2367254ms step_avg:241.53ms +[2025-07-17 13:16:21] [Rank 0] step:9801/10000 train_time:2367254ms step_avg:241.53ms +[2025-07-17 13:16:26] [Rank 0] step:9821/10000 train_time:2372360ms step_avg:241.56ms +[2025-07-17 13:16:26] [Rank 0] step:9821/10000 train_time:2372360ms step_avg:241.56ms +[2025-07-17 13:16:31] [Rank 0] step:9841/10000 train_time:2377454ms step_avg:241.59ms +[2025-07-17 13:16:31] [Rank 0] step:9841/10000 train_time:2377454ms step_avg:241.59ms +[2025-07-17 13:16:36] [Rank 0] step:9861/10000 train_time:2382542ms step_avg:241.61ms +[2025-07-17 13:16:36] [Rank 0] step:9861/10000 train_time:2382542ms step_avg:241.61ms +[2025-07-17 13:16:44] [Rank 0] PRINT: step:9875/10000 val_loss:3.8000 train_time:2386366ms step_avg:241.66ms +[2025-07-17 13:16:44] [Rank 0] PRINT: step:9875/10000 val_loss:3.8000 train_time:2386366ms step_avg:241.66ms +[2025-07-17 13:16:46] [Rank 0] step:9881/10000 train_time:2387642ms step_avg:241.64ms +[2025-07-17 13:16:46] [Rank 0] step:9881/10000 train_time:2387642ms step_avg:241.64ms +[2025-07-17 13:16:51] [Rank 0] step:9901/10000 train_time:2392742ms step_avg:241.67ms +[2025-07-17 13:16:51] [Rank 0] step:9901/10000 train_time:2392742ms step_avg:241.67ms +[2025-07-17 13:16:56] [Rank 0] step:9921/10000 train_time:2397851ms step_avg:241.69ms +[2025-07-17 13:16:56] [Rank 0] step:9921/10000 train_time:2397851ms step_avg:241.69ms +[2025-07-17 13:17:01] [Rank 0] step:9941/10000 train_time:2402988ms step_avg:241.73ms +[2025-07-17 13:17:01] [Rank 0] step:9941/10000 train_time:2402988ms step_avg:241.73ms +[2025-07-17 13:17:06] [Rank 0] step:9961/10000 train_time:2408103ms step_avg:241.75ms +[2025-07-17 13:17:06] [Rank 0] step:9961/10000 train_time:2408103ms step_avg:241.75ms +[2025-07-17 13:17:11] [Rank 0] step:9981/10000 train_time:2413241ms step_avg:241.78ms +[2025-07-17 13:17:11] [Rank 0] step:9981/10000 train_time:2413241ms step_avg:241.78ms +[2025-07-17 13:17:16] [Rank 0] step:10000/10000 train_time:2418073ms step_avg:241.81ms +[2025-07-17 13:17:16] [Rank 0] step:10000/10000 train_time:2418073ms step_avg:241.81ms +[2025-07-17 13:17:21] [Rank 0] PRINT: step:10000/10000 val_loss:3.7505 train_time:2418335ms step_avg:241.83ms +[2025-07-17 13:17:21] [Rank 0] PRINT: step:10000/10000 val_loss:3.7505 train_time:2418335ms step_avg:241.83ms +[2025-07-17 13:17:21] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 13:17:21 2025 --- +[2025-07-17 13:17:21] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 13:17:21 2025 --- +[2025-07-17 13:17:21] [Rank 0] PRINT: Peak memory allocated: 31029 MiB reserved: 31336 MiB +[2025-07-17 13:17:21] [Rank 0] PRINT: Peak memory allocated: 31029 MiB reserved: 31336 MiB diff --git a/logs_norope/diff_modes/mode_3_param_norope_seed_43/config.json b/logs_norope/diff_modes/mode_3_param_norope_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..39e1923a25acf769e7638dca3328ff16a8b0abdb --- /dev/null +++ b/logs_norope/diff_modes/mode_3_param_norope_seed_43/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 3, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "72dce7d6-b409-46e7-8eee-b6a6be15159f", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_3_param_norope_seed_43/training_log_72dce7d6-b409-46e7-8eee-b6a6be15159f.txt b/logs_norope/diff_modes/mode_3_param_norope_seed_43/training_log_72dce7d6-b409-46e7-8eee-b6a6be15159f.txt new file mode 100644 index 0000000000000000000000000000000000000000..266cc8d0118ecf662afae6a55b8f8d80ec3cf3bd --- /dev/null +++ b/logs_norope/diff_modes/mode_3_param_norope_seed_43/training_log_72dce7d6-b409-46e7-8eee-b6a6be15159f.txt @@ -0,0 +1,2360 @@ +[2025-07-17 19:41:38] [Rank 0] PRINT: --- Script Start: Thu Jul 17 19:41:38 2025 --- +[2025-07-17 19:41:38] [Rank 0] PRINT: --- Script Start: Thu Jul 17 19:41:38 2025 --- +[2025-07-17 19:41:38] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=3, model_parameterization='norope') +[2025-07-17 19:41:38] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=3, model_parameterization='norope') +[2025-07-17 19:41:38] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 19:41:38] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 19:41:38] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 19:41:38] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 19:41:38] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_3_param_norope_seed_43 +[2025-07-17 19:41:38] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_3_param_norope_seed_43 +[2025-07-17 19:41:38] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 19:41:38] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 19:41:38] [Rank 0] PRINT: Constructing model... +[2025-07-17 19:41:38] [Rank 0] PRINT: Constructing model... +[2025-07-17 19:41:41] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 19:41:41] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 19:41:41] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 19:41:41] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 19:41:41] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 19:41:41] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 19:41:41] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 19:41:41] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 19:41:41] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 3 +[2025-07-17 19:41:41] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 3 +[2025-07-17 19:41:41] [Rank 0] PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: 0.001). +[2025-07-17 19:41:41] [Rank 0] PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: 0.001). +[2025-07-17 19:41:41] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 19:41:41] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 19:41:41] [Rank 0] PRINT: Muon optimizer is active with 44 parameters. +[2025-07-17 19:41:41] [Rank 0] PRINT: Muon optimizer is active with 44 parameters. +[2025-07-17 19:41:41] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 19:41:41] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 19:41:41] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 19:41:41] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 19:41:41] [Rank 0] PRINT: Starting warmup... +[2025-07-17 19:41:41] [Rank 0] PRINT: Starting warmup... +[2025-07-17 19:42:48] [Rank 0] PRINT: Warmup complete. +[2025-07-17 19:42:48] [Rank 0] PRINT: Warmup complete. +[2025-07-17 19:42:48] [Rank 0] PRINT: Starting training... +[2025-07-17 19:42:48] [Rank 0] PRINT: Starting training... +[2025-07-17 19:42:58] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 19:42:58] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 19:43:03] [Rank 0] step:21/10000 train_time:4746ms step_avg:225.99ms +[2025-07-17 19:43:03] [Rank 0] step:21/10000 train_time:4746ms step_avg:225.99ms +[2025-07-17 19:43:08] [Rank 0] step:41/10000 train_time:9204ms step_avg:224.49ms +[2025-07-17 19:43:08] [Rank 0] step:41/10000 train_time:9204ms step_avg:224.49ms +[2025-07-17 19:43:12] [Rank 0] step:61/10000 train_time:13678ms step_avg:224.22ms +[2025-07-17 19:43:12] [Rank 0] step:61/10000 train_time:13678ms step_avg:224.22ms +[2025-07-17 19:43:17] [Rank 0] step:81/10000 train_time:18156ms step_avg:224.15ms +[2025-07-17 19:43:17] [Rank 0] step:81/10000 train_time:18156ms step_avg:224.15ms +[2025-07-17 19:43:21] [Rank 0] step:101/10000 train_time:22634ms step_avg:224.10ms +[2025-07-17 19:43:21] [Rank 0] step:101/10000 train_time:22634ms step_avg:224.10ms +[2025-07-17 19:43:26] [Rank 0] step:121/10000 train_time:27112ms step_avg:224.06ms +[2025-07-17 19:43:26] [Rank 0] step:121/10000 train_time:27112ms step_avg:224.06ms +[2025-07-17 19:43:31] [Rank 0] PRINT: step:125/10000 val_loss:5.1818 train_time:28241ms step_avg:225.93ms +[2025-07-17 19:43:31] [Rank 0] PRINT: step:125/10000 val_loss:5.1818 train_time:28241ms step_avg:225.93ms +[2025-07-17 19:43:35] [Rank 0] step:141/10000 train_time:31596ms step_avg:224.09ms +[2025-07-17 19:43:35] [Rank 0] step:141/10000 train_time:31596ms step_avg:224.09ms +[2025-07-17 19:43:39] [Rank 0] step:161/10000 train_time:36084ms step_avg:224.12ms +[2025-07-17 19:43:39] [Rank 0] step:161/10000 train_time:36084ms step_avg:224.12ms +[2025-07-17 19:43:44] [Rank 0] step:181/10000 train_time:40578ms step_avg:224.19ms +[2025-07-17 19:43:44] [Rank 0] step:181/10000 train_time:40578ms step_avg:224.19ms +[2025-07-17 19:43:48] [Rank 0] step:201/10000 train_time:45071ms step_avg:224.23ms +[2025-07-17 19:43:48] [Rank 0] step:201/10000 train_time:45071ms step_avg:224.23ms +[2025-07-17 19:43:53] [Rank 0] step:221/10000 train_time:49565ms step_avg:224.27ms +[2025-07-17 19:43:53] [Rank 0] step:221/10000 train_time:49565ms step_avg:224.27ms +[2025-07-17 19:43:57] [Rank 0] step:241/10000 train_time:54056ms step_avg:224.30ms +[2025-07-17 19:43:57] [Rank 0] step:241/10000 train_time:54056ms step_avg:224.30ms +[2025-07-17 19:44:04] [Rank 0] PRINT: step:250/10000 val_loss:4.7839 train_time:56311ms step_avg:225.24ms +[2025-07-17 19:44:04] [Rank 0] PRINT: step:250/10000 val_loss:4.7839 train_time:56311ms step_avg:225.24ms +[2025-07-17 19:44:06] [Rank 0] step:261/10000 train_time:58551ms step_avg:224.33ms +[2025-07-17 19:44:06] [Rank 0] step:261/10000 train_time:58551ms step_avg:224.33ms +[2025-07-17 19:44:11] [Rank 0] step:281/10000 train_time:63046ms step_avg:224.36ms +[2025-07-17 19:44:11] [Rank 0] step:281/10000 train_time:63046ms step_avg:224.36ms +[2025-07-17 19:44:15] [Rank 0] step:301/10000 train_time:67537ms step_avg:224.38ms +[2025-07-17 19:44:15] [Rank 0] step:301/10000 train_time:67537ms step_avg:224.38ms +[2025-07-17 19:44:20] [Rank 0] step:321/10000 train_time:72025ms step_avg:224.38ms +[2025-07-17 19:44:20] [Rank 0] step:321/10000 train_time:72025ms step_avg:224.38ms +[2025-07-17 19:44:24] [Rank 0] step:341/10000 train_time:76514ms step_avg:224.38ms +[2025-07-17 19:44:24] [Rank 0] step:341/10000 train_time:76514ms step_avg:224.38ms +[2025-07-17 19:44:29] [Rank 0] step:361/10000 train_time:81009ms step_avg:224.40ms +[2025-07-17 19:44:29] [Rank 0] step:361/10000 train_time:81009ms step_avg:224.40ms +[2025-07-17 19:44:36] [Rank 0] PRINT: step:375/10000 val_loss:4.5746 train_time:84381ms step_avg:225.02ms +[2025-07-17 19:44:36] [Rank 0] PRINT: step:375/10000 val_loss:4.5746 train_time:84381ms step_avg:225.02ms +[2025-07-17 19:44:37] [Rank 0] step:381/10000 train_time:85498ms step_avg:224.40ms +[2025-07-17 19:44:37] [Rank 0] step:381/10000 train_time:85498ms step_avg:224.40ms +[2025-07-17 19:44:42] [Rank 0] step:401/10000 train_time:89987ms step_avg:224.41ms +[2025-07-17 19:44:42] [Rank 0] step:401/10000 train_time:89987ms step_avg:224.41ms +[2025-07-17 19:44:46] [Rank 0] step:421/10000 train_time:94479ms step_avg:224.41ms +[2025-07-17 19:44:46] [Rank 0] step:421/10000 train_time:94479ms step_avg:224.41ms +[2025-07-17 19:44:51] [Rank 0] step:441/10000 train_time:98970ms step_avg:224.42ms +[2025-07-17 19:44:51] [Rank 0] step:441/10000 train_time:98970ms step_avg:224.42ms +[2025-07-17 19:44:55] [Rank 0] step:461/10000 train_time:103468ms step_avg:224.44ms +[2025-07-17 19:44:55] [Rank 0] step:461/10000 train_time:103468ms step_avg:224.44ms +[2025-07-17 19:45:00] [Rank 0] step:481/10000 train_time:107964ms step_avg:224.46ms +[2025-07-17 19:45:00] [Rank 0] step:481/10000 train_time:107964ms step_avg:224.46ms +[2025-07-17 19:45:09] [Rank 0] PRINT: step:500/10000 val_loss:4.4653 train_time:112462ms step_avg:224.92ms +[2025-07-17 19:45:09] [Rank 0] PRINT: step:500/10000 val_loss:4.4653 train_time:112462ms step_avg:224.92ms +[2025-07-17 19:45:09] [Rank 0] step:501/10000 train_time:112477ms step_avg:224.51ms +[2025-07-17 19:45:09] [Rank 0] step:501/10000 train_time:112477ms step_avg:224.51ms +[2025-07-17 19:45:14] [Rank 0] step:521/10000 train_time:117473ms step_avg:225.48ms +[2025-07-17 19:45:14] [Rank 0] step:521/10000 train_time:117473ms step_avg:225.48ms +[2025-07-17 19:45:18] [Rank 0] step:541/10000 train_time:121967ms step_avg:225.45ms +[2025-07-17 19:45:18] [Rank 0] step:541/10000 train_time:121967ms step_avg:225.45ms +[2025-07-17 19:45:23] [Rank 0] step:561/10000 train_time:126462ms step_avg:225.42ms +[2025-07-17 19:45:23] [Rank 0] step:561/10000 train_time:126462ms step_avg:225.42ms +[2025-07-17 19:45:27] [Rank 0] step:581/10000 train_time:130962ms step_avg:225.41ms +[2025-07-17 19:45:27] [Rank 0] step:581/10000 train_time:130962ms step_avg:225.41ms +[2025-07-17 19:45:32] [Rank 0] step:601/10000 train_time:135459ms step_avg:225.39ms +[2025-07-17 19:45:32] [Rank 0] step:601/10000 train_time:135459ms step_avg:225.39ms +[2025-07-17 19:45:36] [Rank 0] step:621/10000 train_time:139957ms step_avg:225.37ms +[2025-07-17 19:45:36] [Rank 0] step:621/10000 train_time:139957ms step_avg:225.37ms +[2025-07-17 19:45:42] [Rank 0] PRINT: step:625/10000 val_loss:4.3788 train_time:141088ms step_avg:225.74ms +[2025-07-17 19:45:42] [Rank 0] PRINT: step:625/10000 val_loss:4.3788 train_time:141088ms step_avg:225.74ms +[2025-07-17 19:45:45] [Rank 0] step:641/10000 train_time:144453ms step_avg:225.36ms +[2025-07-17 19:45:45] [Rank 0] step:641/10000 train_time:144453ms step_avg:225.36ms +[2025-07-17 19:45:50] [Rank 0] step:661/10000 train_time:148952ms step_avg:225.34ms +[2025-07-17 19:45:50] [Rank 0] step:661/10000 train_time:148952ms step_avg:225.34ms +[2025-07-17 19:45:54] [Rank 0] step:681/10000 train_time:153452ms step_avg:225.33ms +[2025-07-17 19:45:54] [Rank 0] step:681/10000 train_time:153452ms step_avg:225.33ms +[2025-07-17 19:45:59] [Rank 0] step:701/10000 train_time:157953ms step_avg:225.33ms +[2025-07-17 19:45:59] [Rank 0] step:701/10000 train_time:157953ms step_avg:225.33ms +[2025-07-17 19:46:03] [Rank 0] step:721/10000 train_time:162454ms step_avg:225.32ms +[2025-07-17 19:46:03] [Rank 0] step:721/10000 train_time:162454ms step_avg:225.32ms +[2025-07-17 19:46:08] [Rank 0] step:741/10000 train_time:166958ms step_avg:225.31ms +[2025-07-17 19:46:08] [Rank 0] step:741/10000 train_time:166958ms step_avg:225.31ms +[2025-07-17 19:46:14] [Rank 0] PRINT: step:750/10000 val_loss:4.3440 train_time:169228ms step_avg:225.64ms +[2025-07-17 19:46:14] [Rank 0] PRINT: step:750/10000 val_loss:4.3440 train_time:169228ms step_avg:225.64ms +[2025-07-17 19:46:17] [Rank 0] step:761/10000 train_time:171488ms step_avg:225.35ms +[2025-07-17 19:46:17] [Rank 0] step:761/10000 train_time:171488ms step_avg:225.35ms +[2025-07-17 19:46:21] [Rank 0] step:781/10000 train_time:176030ms step_avg:225.39ms +[2025-07-17 19:46:21] [Rank 0] step:781/10000 train_time:176030ms step_avg:225.39ms +[2025-07-17 19:46:26] [Rank 0] step:801/10000 train_time:180571ms step_avg:225.43ms +[2025-07-17 19:46:26] [Rank 0] step:801/10000 train_time:180571ms step_avg:225.43ms +[2025-07-17 19:46:30] [Rank 0] step:821/10000 train_time:185109ms step_avg:225.47ms +[2025-07-17 19:46:30] [Rank 0] step:821/10000 train_time:185109ms step_avg:225.47ms +[2025-07-17 19:46:35] [Rank 0] step:841/10000 train_time:189648ms step_avg:225.50ms +[2025-07-17 19:46:35] [Rank 0] step:841/10000 train_time:189648ms step_avg:225.50ms +[2025-07-17 19:46:39] [Rank 0] step:861/10000 train_time:194187ms step_avg:225.54ms +[2025-07-17 19:46:39] [Rank 0] step:861/10000 train_time:194187ms step_avg:225.54ms +[2025-07-17 19:46:47] [Rank 0] PRINT: step:875/10000 val_loss:4.1648 train_time:197596ms step_avg:225.82ms +[2025-07-17 19:46:47] [Rank 0] PRINT: step:875/10000 val_loss:4.1648 train_time:197596ms step_avg:225.82ms +[2025-07-17 19:46:48] [Rank 0] step:881/10000 train_time:198726ms step_avg:225.57ms +[2025-07-17 19:46:48] [Rank 0] step:881/10000 train_time:198726ms step_avg:225.57ms +[2025-07-17 19:46:53] [Rank 0] step:901/10000 train_time:203266ms step_avg:225.60ms +[2025-07-17 19:46:53] [Rank 0] step:901/10000 train_time:203266ms step_avg:225.60ms +[2025-07-17 19:46:57] [Rank 0] step:921/10000 train_time:207809ms step_avg:225.63ms +[2025-07-17 19:46:57] [Rank 0] step:921/10000 train_time:207809ms step_avg:225.63ms +[2025-07-17 19:47:02] [Rank 0] step:941/10000 train_time:212354ms step_avg:225.67ms +[2025-07-17 19:47:02] [Rank 0] step:941/10000 train_time:212354ms step_avg:225.67ms +[2025-07-17 19:47:07] [Rank 0] step:961/10000 train_time:216898ms step_avg:225.70ms +[2025-07-17 19:47:07] [Rank 0] step:961/10000 train_time:216898ms step_avg:225.70ms +[2025-07-17 19:47:11] [Rank 0] step:981/10000 train_time:221445ms step_avg:225.73ms +[2025-07-17 19:47:11] [Rank 0] step:981/10000 train_time:221445ms step_avg:225.73ms +[2025-07-17 19:47:19] [Rank 0] PRINT: step:1000/10000 val_loss:4.2120 train_time:225996ms step_avg:226.00ms +[2025-07-17 19:47:19] [Rank 0] PRINT: step:1000/10000 val_loss:4.2120 train_time:225996ms step_avg:226.00ms +[2025-07-17 19:47:20] [Rank 0] step:1001/10000 train_time:226011ms step_avg:225.79ms +[2025-07-17 19:47:20] [Rank 0] step:1001/10000 train_time:226011ms step_avg:225.79ms +[2025-07-17 19:47:25] [Rank 0] step:1021/10000 train_time:231072ms step_avg:226.32ms +[2025-07-17 19:47:25] [Rank 0] step:1021/10000 train_time:231072ms step_avg:226.32ms +[2025-07-17 19:47:29] [Rank 0] step:1041/10000 train_time:235616ms step_avg:226.34ms +[2025-07-17 19:47:29] [Rank 0] step:1041/10000 train_time:235616ms step_avg:226.34ms +[2025-07-17 19:47:34] [Rank 0] step:1061/10000 train_time:240162ms step_avg:226.35ms +[2025-07-17 19:47:34] [Rank 0] step:1061/10000 train_time:240162ms step_avg:226.35ms +[2025-07-17 19:47:38] [Rank 0] step:1081/10000 train_time:244710ms step_avg:226.37ms +[2025-07-17 19:47:38] [Rank 0] step:1081/10000 train_time:244710ms step_avg:226.37ms +[2025-07-17 19:47:43] [Rank 0] step:1101/10000 train_time:249259ms step_avg:226.39ms +[2025-07-17 19:47:43] [Rank 0] step:1101/10000 train_time:249259ms step_avg:226.39ms +[2025-07-17 19:47:47] [Rank 0] step:1121/10000 train_time:253809ms step_avg:226.41ms +[2025-07-17 19:47:47] [Rank 0] step:1121/10000 train_time:253809ms step_avg:226.41ms +[2025-07-17 19:47:52] [Rank 0] PRINT: step:1125/10000 val_loss:4.2012 train_time:254952ms step_avg:226.62ms +[2025-07-17 19:47:52] [Rank 0] PRINT: step:1125/10000 val_loss:4.2012 train_time:254952ms step_avg:226.62ms +[2025-07-17 19:47:56] [Rank 0] step:1141/10000 train_time:258359ms step_avg:226.43ms +[2025-07-17 19:47:56] [Rank 0] step:1141/10000 train_time:258359ms step_avg:226.43ms +[2025-07-17 19:48:01] [Rank 0] step:1161/10000 train_time:262911ms step_avg:226.45ms +[2025-07-17 19:48:01] [Rank 0] step:1161/10000 train_time:262911ms step_avg:226.45ms +[2025-07-17 19:48:05] [Rank 0] step:1181/10000 train_time:267468ms step_avg:226.48ms +[2025-07-17 19:48:05] [Rank 0] step:1181/10000 train_time:267468ms step_avg:226.48ms +[2025-07-17 19:48:10] [Rank 0] step:1201/10000 train_time:272019ms step_avg:226.49ms +[2025-07-17 19:48:10] [Rank 0] step:1201/10000 train_time:272019ms step_avg:226.49ms +[2025-07-17 19:48:14] [Rank 0] step:1221/10000 train_time:276576ms step_avg:226.52ms +[2025-07-17 19:48:14] [Rank 0] step:1221/10000 train_time:276576ms step_avg:226.52ms +[2025-07-17 19:48:19] [Rank 0] step:1241/10000 train_time:281129ms step_avg:226.53ms +[2025-07-17 19:48:19] [Rank 0] step:1241/10000 train_time:281129ms step_avg:226.53ms +[2025-07-17 19:48:25] [Rank 0] PRINT: step:1250/10000 val_loss:4.3606 train_time:283411ms step_avg:226.73ms +[2025-07-17 19:48:25] [Rank 0] PRINT: step:1250/10000 val_loss:4.3606 train_time:283411ms step_avg:226.73ms +[2025-07-17 19:48:28] [Rank 0] step:1261/10000 train_time:285681ms step_avg:226.55ms +[2025-07-17 19:48:28] [Rank 0] step:1261/10000 train_time:285681ms step_avg:226.55ms +[2025-07-17 19:48:32] [Rank 0] step:1281/10000 train_time:290232ms step_avg:226.57ms +[2025-07-17 19:48:32] [Rank 0] step:1281/10000 train_time:290232ms step_avg:226.57ms +[2025-07-17 19:48:37] [Rank 0] step:1301/10000 train_time:294783ms step_avg:226.58ms +[2025-07-17 19:48:37] [Rank 0] step:1301/10000 train_time:294783ms step_avg:226.58ms +[2025-07-17 19:48:42] [Rank 0] step:1321/10000 train_time:299335ms step_avg:226.60ms +[2025-07-17 19:48:42] [Rank 0] step:1321/10000 train_time:299335ms step_avg:226.60ms +[2025-07-17 19:48:46] [Rank 0] step:1341/10000 train_time:303887ms step_avg:226.61ms +[2025-07-17 19:48:46] [Rank 0] step:1341/10000 train_time:303887ms step_avg:226.61ms +[2025-07-17 19:48:51] [Rank 0] step:1361/10000 train_time:308437ms step_avg:226.63ms +[2025-07-17 19:48:51] [Rank 0] step:1361/10000 train_time:308437ms step_avg:226.63ms +[2025-07-17 19:48:58] [Rank 0] PRINT: step:1375/10000 val_loss:4.3626 train_time:311857ms step_avg:226.80ms +[2025-07-17 19:48:58] [Rank 0] PRINT: step:1375/10000 val_loss:4.3626 train_time:311857ms step_avg:226.80ms +[2025-07-17 19:49:00] [Rank 0] step:1381/10000 train_time:312993ms step_avg:226.64ms +[2025-07-17 19:49:00] [Rank 0] step:1381/10000 train_time:312993ms step_avg:226.64ms +[2025-07-17 19:49:04] [Rank 0] step:1401/10000 train_time:317550ms step_avg:226.66ms +[2025-07-17 19:49:04] [Rank 0] step:1401/10000 train_time:317550ms step_avg:226.66ms +[2025-07-17 19:49:09] [Rank 0] step:1421/10000 train_time:322111ms step_avg:226.68ms +[2025-07-17 19:49:09] [Rank 0] step:1421/10000 train_time:322111ms step_avg:226.68ms +[2025-07-17 19:49:13] [Rank 0] step:1441/10000 train_time:326668ms step_avg:226.70ms +[2025-07-17 19:49:13] [Rank 0] step:1441/10000 train_time:326668ms step_avg:226.70ms +[2025-07-17 19:49:18] [Rank 0] step:1461/10000 train_time:331228ms step_avg:226.71ms +[2025-07-17 19:49:18] [Rank 0] step:1461/10000 train_time:331228ms step_avg:226.71ms +[2025-07-17 19:49:22] [Rank 0] step:1481/10000 train_time:335787ms step_avg:226.73ms +[2025-07-17 19:49:22] [Rank 0] step:1481/10000 train_time:335787ms step_avg:226.73ms +[2025-07-17 19:49:31] [Rank 0] PRINT: step:1500/10000 val_loss:4.2580 train_time:340370ms step_avg:226.91ms +[2025-07-17 19:49:31] [Rank 0] PRINT: step:1500/10000 val_loss:4.2580 train_time:340370ms step_avg:226.91ms +[2025-07-17 19:49:32] [Rank 0] step:1501/10000 train_time:340386ms step_avg:226.77ms +[2025-07-17 19:49:32] [Rank 0] step:1501/10000 train_time:340386ms step_avg:226.77ms +[2025-07-17 19:49:36] [Rank 0] step:1521/10000 train_time:344956ms step_avg:226.80ms +[2025-07-17 19:49:36] [Rank 0] step:1521/10000 train_time:344956ms step_avg:226.80ms +[2025-07-17 19:49:41] [Rank 0] step:1541/10000 train_time:350074ms step_avg:227.17ms +[2025-07-17 19:49:41] [Rank 0] step:1541/10000 train_time:350074ms step_avg:227.17ms +[2025-07-17 19:49:46] [Rank 0] step:1561/10000 train_time:354660ms step_avg:227.20ms +[2025-07-17 19:49:46] [Rank 0] step:1561/10000 train_time:354660ms step_avg:227.20ms +[2025-07-17 19:49:50] [Rank 0] step:1581/10000 train_time:359247ms step_avg:227.23ms +[2025-07-17 19:49:50] [Rank 0] step:1581/10000 train_time:359247ms step_avg:227.23ms +[2025-07-17 19:49:55] [Rank 0] step:1601/10000 train_time:363837ms step_avg:227.26ms +[2025-07-17 19:49:55] [Rank 0] step:1601/10000 train_time:363837ms step_avg:227.26ms +[2025-07-17 19:50:00] [Rank 0] step:1621/10000 train_time:368426ms step_avg:227.28ms +[2025-07-17 19:50:00] [Rank 0] step:1621/10000 train_time:368426ms step_avg:227.28ms +[2025-07-17 19:50:05] [Rank 0] PRINT: step:1625/10000 val_loss:4.3035 train_time:369581ms step_avg:227.43ms +[2025-07-17 19:50:05] [Rank 0] PRINT: step:1625/10000 val_loss:4.3035 train_time:369581ms step_avg:227.43ms +[2025-07-17 19:50:09] [Rank 0] step:1641/10000 train_time:373012ms step_avg:227.31ms +[2025-07-17 19:50:09] [Rank 0] step:1641/10000 train_time:373012ms step_avg:227.31ms +[2025-07-17 19:50:13] [Rank 0] step:1661/10000 train_time:377600ms step_avg:227.33ms +[2025-07-17 19:50:13] [Rank 0] step:1661/10000 train_time:377600ms step_avg:227.33ms +[2025-07-17 19:50:18] [Rank 0] step:1681/10000 train_time:382185ms step_avg:227.36ms +[2025-07-17 19:50:18] [Rank 0] step:1681/10000 train_time:382185ms step_avg:227.36ms +[2025-07-17 19:50:22] [Rank 0] step:1701/10000 train_time:386768ms step_avg:227.38ms +[2025-07-17 19:50:22] [Rank 0] step:1701/10000 train_time:386768ms step_avg:227.38ms +[2025-07-17 19:50:27] [Rank 0] step:1721/10000 train_time:391351ms step_avg:227.40ms +[2025-07-17 19:50:27] [Rank 0] step:1721/10000 train_time:391351ms step_avg:227.40ms +[2025-07-17 19:50:32] [Rank 0] step:1741/10000 train_time:395934ms step_avg:227.42ms +[2025-07-17 19:50:32] [Rank 0] step:1741/10000 train_time:395934ms step_avg:227.42ms +[2025-07-17 19:50:38] [Rank 0] PRINT: step:1750/10000 val_loss:4.3863 train_time:398230ms step_avg:227.56ms +[2025-07-17 19:50:38] [Rank 0] PRINT: step:1750/10000 val_loss:4.3863 train_time:398230ms step_avg:227.56ms +[2025-07-17 19:50:41] [Rank 0] step:1761/10000 train_time:400516ms step_avg:227.44ms +[2025-07-17 19:50:41] [Rank 0] step:1761/10000 train_time:400516ms step_avg:227.44ms +[2025-07-17 19:50:45] [Rank 0] step:1781/10000 train_time:405101ms step_avg:227.46ms +[2025-07-17 19:50:45] [Rank 0] step:1781/10000 train_time:405101ms step_avg:227.46ms +[2025-07-17 19:50:50] [Rank 0] step:1801/10000 train_time:409690ms step_avg:227.48ms +[2025-07-17 19:50:50] [Rank 0] step:1801/10000 train_time:409690ms step_avg:227.48ms +[2025-07-17 19:50:54] [Rank 0] step:1821/10000 train_time:414275ms step_avg:227.50ms +[2025-07-17 19:50:54] [Rank 0] step:1821/10000 train_time:414275ms step_avg:227.50ms +[2025-07-17 19:50:59] [Rank 0] step:1841/10000 train_time:418861ms step_avg:227.52ms +[2025-07-17 19:50:59] [Rank 0] step:1841/10000 train_time:418861ms step_avg:227.52ms +[2025-07-17 19:51:04] [Rank 0] step:1861/10000 train_time:423447ms step_avg:227.54ms +[2025-07-17 19:51:04] [Rank 0] step:1861/10000 train_time:423447ms step_avg:227.54ms +[2025-07-17 19:51:11] [Rank 0] PRINT: step:1875/10000 val_loss:4.3938 train_time:426890ms step_avg:227.67ms +[2025-07-17 19:51:11] [Rank 0] PRINT: step:1875/10000 val_loss:4.3938 train_time:426890ms step_avg:227.67ms +[2025-07-17 19:51:13] [Rank 0] step:1881/10000 train_time:428032ms step_avg:227.56ms +[2025-07-17 19:51:13] [Rank 0] step:1881/10000 train_time:428032ms step_avg:227.56ms +[2025-07-17 19:51:17] [Rank 0] step:1901/10000 train_time:432617ms step_avg:227.57ms +[2025-07-17 19:51:17] [Rank 0] step:1901/10000 train_time:432617ms step_avg:227.57ms +[2025-07-17 19:51:22] [Rank 0] step:1921/10000 train_time:437202ms step_avg:227.59ms +[2025-07-17 19:51:22] [Rank 0] step:1921/10000 train_time:437202ms step_avg:227.59ms +[2025-07-17 19:51:26] [Rank 0] step:1941/10000 train_time:441785ms step_avg:227.61ms +[2025-07-17 19:51:26] [Rank 0] step:1941/10000 train_time:441785ms step_avg:227.61ms +[2025-07-17 19:51:31] [Rank 0] step:1961/10000 train_time:446370ms step_avg:227.62ms +[2025-07-17 19:51:31] [Rank 0] step:1961/10000 train_time:446370ms step_avg:227.62ms +[2025-07-17 19:51:36] [Rank 0] step:1981/10000 train_time:450952ms step_avg:227.64ms +[2025-07-17 19:51:36] [Rank 0] step:1981/10000 train_time:450952ms step_avg:227.64ms +[2025-07-17 19:51:44] [Rank 0] PRINT: step:2000/10000 val_loss:4.3778 train_time:455539ms step_avg:227.77ms +[2025-07-17 19:51:44] [Rank 0] PRINT: step:2000/10000 val_loss:4.3778 train_time:455539ms step_avg:227.77ms +[2025-07-17 19:51:45] [Rank 0] step:2001/10000 train_time:455554ms step_avg:227.66ms +[2025-07-17 19:51:45] [Rank 0] step:2001/10000 train_time:455554ms step_avg:227.66ms +[2025-07-17 19:51:49] [Rank 0] step:2021/10000 train_time:460115ms step_avg:227.67ms +[2025-07-17 19:51:49] [Rank 0] step:2021/10000 train_time:460115ms step_avg:227.67ms +[2025-07-17 19:51:54] [Rank 0] step:2041/10000 train_time:465205ms step_avg:227.93ms +[2025-07-17 19:51:54] [Rank 0] step:2041/10000 train_time:465205ms step_avg:227.93ms +[2025-07-17 19:51:59] [Rank 0] step:2061/10000 train_time:469784ms step_avg:227.94ms +[2025-07-17 19:51:59] [Rank 0] step:2061/10000 train_time:469784ms step_avg:227.94ms +[2025-07-17 19:52:03] [Rank 0] step:2081/10000 train_time:474363ms step_avg:227.95ms +[2025-07-17 19:52:03] [Rank 0] step:2081/10000 train_time:474363ms step_avg:227.95ms +[2025-07-17 19:52:08] [Rank 0] step:2101/10000 train_time:478947ms step_avg:227.96ms +[2025-07-17 19:52:08] [Rank 0] step:2101/10000 train_time:478947ms step_avg:227.96ms +[2025-07-17 19:52:13] [Rank 0] step:2121/10000 train_time:483532ms step_avg:227.97ms +[2025-07-17 19:52:13] [Rank 0] step:2121/10000 train_time:483532ms step_avg:227.97ms +[2025-07-17 19:52:18] [Rank 0] PRINT: step:2125/10000 val_loss:4.3511 train_time:484686ms step_avg:228.09ms +[2025-07-17 19:52:18] [Rank 0] PRINT: step:2125/10000 val_loss:4.3511 train_time:484686ms step_avg:228.09ms +[2025-07-17 19:52:22] [Rank 0] step:2141/10000 train_time:488116ms step_avg:227.99ms +[2025-07-17 19:52:22] [Rank 0] step:2141/10000 train_time:488116ms step_avg:227.99ms +[2025-07-17 19:52:26] [Rank 0] step:2161/10000 train_time:492704ms step_avg:228.00ms +[2025-07-17 19:52:26] [Rank 0] step:2161/10000 train_time:492704ms step_avg:228.00ms +[2025-07-17 19:52:31] [Rank 0] step:2181/10000 train_time:497294ms step_avg:228.01ms +[2025-07-17 19:52:31] [Rank 0] step:2181/10000 train_time:497294ms step_avg:228.01ms +[2025-07-17 19:52:35] [Rank 0] step:2201/10000 train_time:501883ms step_avg:228.03ms +[2025-07-17 19:52:35] [Rank 0] step:2201/10000 train_time:501883ms step_avg:228.03ms +[2025-07-17 19:52:40] [Rank 0] step:2221/10000 train_time:506470ms step_avg:228.04ms +[2025-07-17 19:52:40] [Rank 0] step:2221/10000 train_time:506470ms step_avg:228.04ms +[2025-07-17 19:52:45] [Rank 0] step:2241/10000 train_time:511153ms step_avg:228.09ms +[2025-07-17 19:52:45] [Rank 0] step:2241/10000 train_time:511153ms step_avg:228.09ms +[2025-07-17 19:52:51] [Rank 0] PRINT: step:2250/10000 val_loss:3.9703 train_time:513509ms step_avg:228.23ms +[2025-07-17 19:52:51] [Rank 0] PRINT: step:2250/10000 val_loss:3.9703 train_time:513509ms step_avg:228.23ms +[2025-07-17 19:52:54] [Rank 0] step:2261/10000 train_time:515853ms step_avg:228.15ms +[2025-07-17 19:52:54] [Rank 0] step:2261/10000 train_time:515853ms step_avg:228.15ms +[2025-07-17 19:52:59] [Rank 0] step:2281/10000 train_time:520557ms step_avg:228.21ms +[2025-07-17 19:52:59] [Rank 0] step:2281/10000 train_time:520557ms step_avg:228.21ms +[2025-07-17 19:53:03] [Rank 0] step:2301/10000 train_time:525261ms step_avg:228.27ms +[2025-07-17 19:53:03] [Rank 0] step:2301/10000 train_time:525261ms step_avg:228.27ms +[2025-07-17 19:53:08] [Rank 0] step:2321/10000 train_time:529962ms step_avg:228.33ms +[2025-07-17 19:53:08] [Rank 0] step:2321/10000 train_time:529962ms step_avg:228.33ms +[2025-07-17 19:53:13] [Rank 0] step:2341/10000 train_time:534664ms step_avg:228.39ms +[2025-07-17 19:53:13] [Rank 0] step:2341/10000 train_time:534664ms step_avg:228.39ms +[2025-07-17 19:53:17] [Rank 0] step:2361/10000 train_time:539370ms step_avg:228.45ms +[2025-07-17 19:53:17] [Rank 0] step:2361/10000 train_time:539370ms step_avg:228.45ms +[2025-07-17 19:53:25] [Rank 0] PRINT: step:2375/10000 val_loss:3.7562 train_time:542902ms step_avg:228.59ms +[2025-07-17 19:53:25] [Rank 0] PRINT: step:2375/10000 val_loss:3.7562 train_time:542902ms step_avg:228.59ms +[2025-07-17 19:53:27] [Rank 0] step:2381/10000 train_time:544071ms step_avg:228.51ms +[2025-07-17 19:53:27] [Rank 0] step:2381/10000 train_time:544071ms step_avg:228.51ms +[2025-07-17 19:53:31] [Rank 0] step:2401/10000 train_time:548772ms step_avg:228.56ms +[2025-07-17 19:53:31] [Rank 0] step:2401/10000 train_time:548772ms step_avg:228.56ms +[2025-07-17 19:53:36] [Rank 0] step:2421/10000 train_time:553474ms step_avg:228.61ms +[2025-07-17 19:53:36] [Rank 0] step:2421/10000 train_time:553474ms step_avg:228.61ms +[2025-07-17 19:53:41] [Rank 0] step:2441/10000 train_time:558177ms step_avg:228.67ms +[2025-07-17 19:53:41] [Rank 0] step:2441/10000 train_time:558177ms step_avg:228.67ms +[2025-07-17 19:53:46] [Rank 0] step:2461/10000 train_time:562885ms step_avg:228.72ms +[2025-07-17 19:53:46] [Rank 0] step:2461/10000 train_time:562885ms step_avg:228.72ms +[2025-07-17 19:53:50] [Rank 0] step:2481/10000 train_time:567590ms step_avg:228.77ms +[2025-07-17 19:53:50] [Rank 0] step:2481/10000 train_time:567590ms step_avg:228.77ms +[2025-07-17 19:53:59] [Rank 0] PRINT: step:2500/10000 val_loss:3.7373 train_time:572298ms step_avg:228.92ms +[2025-07-17 19:53:59] [Rank 0] PRINT: step:2500/10000 val_loss:3.7373 train_time:572298ms step_avg:228.92ms +[2025-07-17 19:53:59] [Rank 0] step:2501/10000 train_time:572314ms step_avg:228.83ms +[2025-07-17 19:53:59] [Rank 0] step:2501/10000 train_time:572314ms step_avg:228.83ms +[2025-07-17 19:54:04] [Rank 0] step:2521/10000 train_time:576999ms step_avg:228.88ms +[2025-07-17 19:54:04] [Rank 0] step:2521/10000 train_time:576999ms step_avg:228.88ms +[2025-07-17 19:54:09] [Rank 0] step:2541/10000 train_time:581711ms step_avg:228.93ms +[2025-07-17 19:54:09] [Rank 0] step:2541/10000 train_time:581711ms step_avg:228.93ms +[2025-07-17 19:54:14] [Rank 0] step:2561/10000 train_time:586922ms step_avg:229.18ms +[2025-07-17 19:54:14] [Rank 0] step:2561/10000 train_time:586922ms step_avg:229.18ms +[2025-07-17 19:54:19] [Rank 0] step:2581/10000 train_time:591632ms step_avg:229.23ms +[2025-07-17 19:54:19] [Rank 0] step:2581/10000 train_time:591632ms step_avg:229.23ms +[2025-07-17 19:54:23] [Rank 0] step:2601/10000 train_time:596339ms step_avg:229.27ms +[2025-07-17 19:54:23] [Rank 0] step:2601/10000 train_time:596339ms step_avg:229.27ms +[2025-07-17 19:54:28] [Rank 0] step:2621/10000 train_time:601046ms step_avg:229.32ms +[2025-07-17 19:54:28] [Rank 0] step:2621/10000 train_time:601046ms step_avg:229.32ms +[2025-07-17 19:54:34] [Rank 0] PRINT: step:2625/10000 val_loss:3.7541 train_time:602230ms step_avg:229.42ms +[2025-07-17 19:54:34] [Rank 0] PRINT: step:2625/10000 val_loss:3.7541 train_time:602230ms step_avg:229.42ms +[2025-07-17 19:54:37] [Rank 0] step:2641/10000 train_time:605755ms step_avg:229.37ms +[2025-07-17 19:54:37] [Rank 0] step:2641/10000 train_time:605755ms step_avg:229.37ms +[2025-07-17 19:54:42] [Rank 0] step:2661/10000 train_time:610465ms step_avg:229.41ms +[2025-07-17 19:54:42] [Rank 0] step:2661/10000 train_time:610465ms step_avg:229.41ms +[2025-07-17 19:54:47] [Rank 0] step:2681/10000 train_time:615173ms step_avg:229.46ms +[2025-07-17 19:54:47] [Rank 0] step:2681/10000 train_time:615173ms step_avg:229.46ms +[2025-07-17 19:54:52] [Rank 0] step:2701/10000 train_time:619882ms step_avg:229.50ms +[2025-07-17 19:54:52] [Rank 0] step:2701/10000 train_time:619882ms step_avg:229.50ms +[2025-07-17 19:54:56] [Rank 0] step:2721/10000 train_time:624589ms step_avg:229.54ms +[2025-07-17 19:54:56] [Rank 0] step:2721/10000 train_time:624589ms step_avg:229.54ms +[2025-07-17 19:55:01] [Rank 0] step:2741/10000 train_time:629295ms step_avg:229.59ms +[2025-07-17 19:55:01] [Rank 0] step:2741/10000 train_time:629295ms step_avg:229.59ms +[2025-07-17 19:55:08] [Rank 0] PRINT: step:2750/10000 val_loss:3.7590 train_time:631656ms step_avg:229.69ms +[2025-07-17 19:55:08] [Rank 0] PRINT: step:2750/10000 val_loss:3.7590 train_time:631656ms step_avg:229.69ms +[2025-07-17 19:55:10] [Rank 0] step:2761/10000 train_time:634000ms step_avg:229.63ms +[2025-07-17 19:55:10] [Rank 0] step:2761/10000 train_time:634000ms step_avg:229.63ms +[2025-07-17 19:55:15] [Rank 0] step:2781/10000 train_time:638703ms step_avg:229.67ms +[2025-07-17 19:55:15] [Rank 0] step:2781/10000 train_time:638703ms step_avg:229.67ms +[2025-07-17 19:55:20] [Rank 0] step:2801/10000 train_time:643405ms step_avg:229.71ms +[2025-07-17 19:55:20] [Rank 0] step:2801/10000 train_time:643405ms step_avg:229.71ms +[2025-07-17 19:55:24] [Rank 0] step:2821/10000 train_time:648108ms step_avg:229.74ms +[2025-07-17 19:55:24] [Rank 0] step:2821/10000 train_time:648108ms step_avg:229.74ms +[2025-07-17 19:55:29] [Rank 0] step:2841/10000 train_time:652808ms step_avg:229.78ms +[2025-07-17 19:55:29] [Rank 0] step:2841/10000 train_time:652808ms step_avg:229.78ms +[2025-07-17 19:55:34] [Rank 0] step:2861/10000 train_time:657511ms step_avg:229.82ms +[2025-07-17 19:55:34] [Rank 0] step:2861/10000 train_time:657511ms step_avg:229.82ms +[2025-07-17 19:55:42] [Rank 0] PRINT: step:2875/10000 val_loss:3.8000 train_time:661044ms step_avg:229.93ms +[2025-07-17 19:55:42] [Rank 0] PRINT: step:2875/10000 val_loss:3.8000 train_time:661044ms step_avg:229.93ms +[2025-07-17 19:55:43] [Rank 0] step:2881/10000 train_time:662213ms step_avg:229.86ms +[2025-07-17 19:55:43] [Rank 0] step:2881/10000 train_time:662213ms step_avg:229.86ms +[2025-07-17 19:55:48] [Rank 0] step:2901/10000 train_time:666911ms step_avg:229.89ms +[2025-07-17 19:55:48] [Rank 0] step:2901/10000 train_time:666911ms step_avg:229.89ms +[2025-07-17 19:55:52] [Rank 0] step:2921/10000 train_time:671610ms step_avg:229.92ms +[2025-07-17 19:55:52] [Rank 0] step:2921/10000 train_time:671610ms step_avg:229.92ms +[2025-07-17 19:55:57] [Rank 0] step:2941/10000 train_time:676311ms step_avg:229.96ms +[2025-07-17 19:55:57] [Rank 0] step:2941/10000 train_time:676311ms step_avg:229.96ms +[2025-07-17 19:56:02] [Rank 0] step:2961/10000 train_time:681012ms step_avg:229.99ms +[2025-07-17 19:56:02] [Rank 0] step:2961/10000 train_time:681012ms step_avg:229.99ms +[2025-07-17 19:56:07] [Rank 0] step:2981/10000 train_time:685731ms step_avg:230.03ms +[2025-07-17 19:56:07] [Rank 0] step:2981/10000 train_time:685731ms step_avg:230.03ms +[2025-07-17 19:56:16] [Rank 0] PRINT: step:3000/10000 val_loss:3.7771 train_time:690456ms step_avg:230.15ms +[2025-07-17 19:56:16] [Rank 0] PRINT: step:3000/10000 val_loss:3.7771 train_time:690456ms step_avg:230.15ms +[2025-07-17 19:56:16] [Rank 0] step:3001/10000 train_time:690472ms step_avg:230.08ms +[2025-07-17 19:56:16] [Rank 0] step:3001/10000 train_time:690472ms step_avg:230.08ms +[2025-07-17 19:56:21] [Rank 0] step:3021/10000 train_time:695178ms step_avg:230.12ms +[2025-07-17 19:56:21] [Rank 0] step:3021/10000 train_time:695178ms step_avg:230.12ms +[2025-07-17 19:56:25] [Rank 0] step:3041/10000 train_time:699899ms step_avg:230.15ms +[2025-07-17 19:56:25] [Rank 0] step:3041/10000 train_time:699899ms step_avg:230.15ms +[2025-07-17 19:56:31] [Rank 0] step:3061/10000 train_time:705134ms step_avg:230.36ms +[2025-07-17 19:56:31] [Rank 0] step:3061/10000 train_time:705134ms step_avg:230.36ms +[2025-07-17 19:56:35] [Rank 0] step:3081/10000 train_time:709869ms step_avg:230.40ms +[2025-07-17 19:56:35] [Rank 0] step:3081/10000 train_time:709869ms step_avg:230.40ms +[2025-07-17 19:56:40] [Rank 0] step:3101/10000 train_time:714600ms step_avg:230.44ms +[2025-07-17 19:56:40] [Rank 0] step:3101/10000 train_time:714600ms step_avg:230.44ms +[2025-07-17 19:56:45] [Rank 0] step:3121/10000 train_time:719327ms step_avg:230.48ms +[2025-07-17 19:56:45] [Rank 0] step:3121/10000 train_time:719327ms step_avg:230.48ms +[2025-07-17 19:56:50] [Rank 0] PRINT: step:3125/10000 val_loss:3.8013 train_time:720515ms step_avg:230.56ms +[2025-07-17 19:56:50] [Rank 0] PRINT: step:3125/10000 val_loss:3.8013 train_time:720515ms step_avg:230.56ms +[2025-07-17 19:56:54] [Rank 0] step:3141/10000 train_time:724052ms step_avg:230.52ms +[2025-07-17 19:56:54] [Rank 0] step:3141/10000 train_time:724052ms step_avg:230.52ms +[2025-07-17 19:56:59] [Rank 0] step:3161/10000 train_time:728782ms step_avg:230.55ms +[2025-07-17 19:56:59] [Rank 0] step:3161/10000 train_time:728782ms step_avg:230.55ms +[2025-07-17 19:57:04] [Rank 0] step:3181/10000 train_time:733509ms step_avg:230.59ms +[2025-07-17 19:57:04] [Rank 0] step:3181/10000 train_time:733509ms step_avg:230.59ms +[2025-07-17 19:57:08] [Rank 0] step:3201/10000 train_time:738238ms step_avg:230.63ms +[2025-07-17 19:57:08] [Rank 0] step:3201/10000 train_time:738238ms step_avg:230.63ms +[2025-07-17 19:57:13] [Rank 0] step:3221/10000 train_time:742965ms step_avg:230.66ms +[2025-07-17 19:57:13] [Rank 0] step:3221/10000 train_time:742965ms step_avg:230.66ms +[2025-07-17 19:57:18] [Rank 0] step:3241/10000 train_time:747691ms step_avg:230.70ms +[2025-07-17 19:57:18] [Rank 0] step:3241/10000 train_time:747691ms step_avg:230.70ms +[2025-07-17 19:57:24] [Rank 0] PRINT: step:3250/10000 val_loss:3.8071 train_time:750062ms step_avg:230.79ms +[2025-07-17 19:57:24] [Rank 0] PRINT: step:3250/10000 val_loss:3.8071 train_time:750062ms step_avg:230.79ms +[2025-07-17 19:57:27] [Rank 0] step:3261/10000 train_time:752414ms step_avg:230.73ms +[2025-07-17 19:57:27] [Rank 0] step:3261/10000 train_time:752414ms step_avg:230.73ms +[2025-07-17 19:57:32] [Rank 0] step:3281/10000 train_time:757137ms step_avg:230.76ms +[2025-07-17 19:57:32] [Rank 0] step:3281/10000 train_time:757137ms step_avg:230.76ms +[2025-07-17 19:57:36] [Rank 0] step:3301/10000 train_time:761854ms step_avg:230.79ms +[2025-07-17 19:57:36] [Rank 0] step:3301/10000 train_time:761854ms step_avg:230.79ms +[2025-07-17 19:57:41] [Rank 0] step:3321/10000 train_time:766578ms step_avg:230.83ms +[2025-07-17 19:57:41] [Rank 0] step:3321/10000 train_time:766578ms step_avg:230.83ms +[2025-07-17 19:57:46] [Rank 0] step:3341/10000 train_time:771300ms step_avg:230.86ms +[2025-07-17 19:57:46] [Rank 0] step:3341/10000 train_time:771300ms step_avg:230.86ms +[2025-07-17 19:57:51] [Rank 0] step:3361/10000 train_time:776024ms step_avg:230.89ms +[2025-07-17 19:57:51] [Rank 0] step:3361/10000 train_time:776024ms step_avg:230.89ms +[2025-07-17 19:57:58] [Rank 0] PRINT: step:3375/10000 val_loss:3.7936 train_time:779569ms step_avg:230.98ms +[2025-07-17 19:57:58] [Rank 0] PRINT: step:3375/10000 val_loss:3.7936 train_time:779569ms step_avg:230.98ms +[2025-07-17 19:58:00] [Rank 0] step:3381/10000 train_time:780746ms step_avg:230.92ms +[2025-07-17 19:58:00] [Rank 0] step:3381/10000 train_time:780746ms step_avg:230.92ms +[2025-07-17 19:58:05] [Rank 0] step:3401/10000 train_time:785468ms step_avg:230.95ms +[2025-07-17 19:58:05] [Rank 0] step:3401/10000 train_time:785468ms step_avg:230.95ms +[2025-07-17 19:58:09] [Rank 0] step:3421/10000 train_time:790189ms step_avg:230.98ms +[2025-07-17 19:58:09] [Rank 0] step:3421/10000 train_time:790189ms step_avg:230.98ms +[2025-07-17 19:58:14] [Rank 0] step:3441/10000 train_time:794909ms step_avg:231.01ms +[2025-07-17 19:58:14] [Rank 0] step:3441/10000 train_time:794909ms step_avg:231.01ms +[2025-07-17 19:58:19] [Rank 0] step:3461/10000 train_time:799634ms step_avg:231.04ms +[2025-07-17 19:58:19] [Rank 0] step:3461/10000 train_time:799634ms step_avg:231.04ms +[2025-07-17 19:58:24] [Rank 0] step:3481/10000 train_time:804359ms step_avg:231.07ms +[2025-07-17 19:58:24] [Rank 0] step:3481/10000 train_time:804359ms step_avg:231.07ms +[2025-07-17 19:58:33] [Rank 0] PRINT: step:3500/10000 val_loss:3.8517 train_time:809081ms step_avg:231.17ms +[2025-07-17 19:58:33] [Rank 0] PRINT: step:3500/10000 val_loss:3.8517 train_time:809081ms step_avg:231.17ms +[2025-07-17 19:58:33] [Rank 0] step:3501/10000 train_time:809096ms step_avg:231.10ms +[2025-07-17 19:58:33] [Rank 0] step:3501/10000 train_time:809096ms step_avg:231.10ms +[2025-07-17 19:58:38] [Rank 0] step:3521/10000 train_time:813804ms step_avg:231.13ms +[2025-07-17 19:58:38] [Rank 0] step:3521/10000 train_time:813804ms step_avg:231.13ms +[2025-07-17 19:58:42] [Rank 0] step:3541/10000 train_time:818528ms step_avg:231.16ms +[2025-07-17 19:58:42] [Rank 0] step:3541/10000 train_time:818528ms step_avg:231.16ms +[2025-07-17 19:58:48] [Rank 0] step:3561/10000 train_time:823766ms step_avg:231.33ms +[2025-07-17 19:58:48] [Rank 0] step:3561/10000 train_time:823766ms step_avg:231.33ms +[2025-07-17 19:58:52] [Rank 0] step:3581/10000 train_time:828487ms step_avg:231.36ms +[2025-07-17 19:58:52] [Rank 0] step:3581/10000 train_time:828487ms step_avg:231.36ms +[2025-07-17 19:58:57] [Rank 0] step:3601/10000 train_time:833206ms step_avg:231.38ms +[2025-07-17 19:58:57] [Rank 0] step:3601/10000 train_time:833206ms step_avg:231.38ms +[2025-07-17 19:59:02] [Rank 0] step:3621/10000 train_time:837925ms step_avg:231.41ms +[2025-07-17 19:59:02] [Rank 0] step:3621/10000 train_time:837925ms step_avg:231.41ms +[2025-07-17 19:59:07] [Rank 0] PRINT: step:3625/10000 val_loss:3.7641 train_time:839112ms step_avg:231.48ms +[2025-07-17 19:59:07] [Rank 0] PRINT: step:3625/10000 val_loss:3.7641 train_time:839112ms step_avg:231.48ms +[2025-07-17 19:59:11] [Rank 0] step:3641/10000 train_time:842645ms step_avg:231.43ms +[2025-07-17 19:59:11] [Rank 0] step:3641/10000 train_time:842645ms step_avg:231.43ms +[2025-07-17 19:59:15] [Rank 0] step:3661/10000 train_time:847365ms step_avg:231.46ms +[2025-07-17 19:59:15] [Rank 0] step:3661/10000 train_time:847365ms step_avg:231.46ms +[2025-07-17 19:59:20] [Rank 0] step:3681/10000 train_time:852085ms step_avg:231.48ms +[2025-07-17 19:59:20] [Rank 0] step:3681/10000 train_time:852085ms step_avg:231.48ms +[2025-07-17 19:59:25] [Rank 0] step:3701/10000 train_time:856810ms step_avg:231.51ms +[2025-07-17 19:59:25] [Rank 0] step:3701/10000 train_time:856810ms step_avg:231.51ms +[2025-07-17 19:59:30] [Rank 0] step:3721/10000 train_time:861600ms step_avg:231.55ms +[2025-07-17 19:59:30] [Rank 0] step:3721/10000 train_time:861600ms step_avg:231.55ms +[2025-07-17 19:59:34] [Rank 0] step:3741/10000 train_time:866408ms step_avg:231.60ms +[2025-07-17 19:59:34] [Rank 0] step:3741/10000 train_time:866408ms step_avg:231.60ms +[2025-07-17 19:59:41] [Rank 0] PRINT: step:3750/10000 val_loss:3.8514 train_time:868816ms step_avg:231.68ms +[2025-07-17 19:59:41] [Rank 0] PRINT: step:3750/10000 val_loss:3.8514 train_time:868816ms step_avg:231.68ms +[2025-07-17 19:59:44] [Rank 0] step:3761/10000 train_time:871214ms step_avg:231.64ms +[2025-07-17 19:59:44] [Rank 0] step:3761/10000 train_time:871214ms step_avg:231.64ms +[2025-07-17 19:59:49] [Rank 0] step:3781/10000 train_time:876020ms step_avg:231.69ms +[2025-07-17 19:59:49] [Rank 0] step:3781/10000 train_time:876020ms step_avg:231.69ms +[2025-07-17 19:59:53] [Rank 0] step:3801/10000 train_time:880826ms step_avg:231.74ms +[2025-07-17 19:59:53] [Rank 0] step:3801/10000 train_time:880826ms step_avg:231.74ms +[2025-07-17 19:59:58] [Rank 0] step:3821/10000 train_time:885633ms step_avg:231.78ms +[2025-07-17 19:59:58] [Rank 0] step:3821/10000 train_time:885633ms step_avg:231.78ms +[2025-07-17 20:00:03] [Rank 0] step:3841/10000 train_time:890441ms step_avg:231.83ms +[2025-07-17 20:00:03] [Rank 0] step:3841/10000 train_time:890441ms step_avg:231.83ms +[2025-07-17 20:00:08] [Rank 0] step:3861/10000 train_time:895246ms step_avg:231.87ms +[2025-07-17 20:00:08] [Rank 0] step:3861/10000 train_time:895246ms step_avg:231.87ms +[2025-07-17 20:00:16] [Rank 0] PRINT: step:3875/10000 val_loss:3.8174 train_time:898861ms step_avg:231.96ms +[2025-07-17 20:00:16] [Rank 0] PRINT: step:3875/10000 val_loss:3.8174 train_time:898861ms step_avg:231.96ms +[2025-07-17 20:00:17] [Rank 0] step:3881/10000 train_time:900060ms step_avg:231.91ms +[2025-07-17 20:00:17] [Rank 0] step:3881/10000 train_time:900060ms step_avg:231.91ms +[2025-07-17 20:00:22] [Rank 0] step:3901/10000 train_time:904869ms step_avg:231.96ms +[2025-07-17 20:00:22] [Rank 0] step:3901/10000 train_time:904869ms step_avg:231.96ms +[2025-07-17 20:00:27] [Rank 0] step:3921/10000 train_time:909679ms step_avg:232.00ms +[2025-07-17 20:00:27] [Rank 0] step:3921/10000 train_time:909679ms step_avg:232.00ms +[2025-07-17 20:00:32] [Rank 0] step:3941/10000 train_time:914490ms step_avg:232.05ms +[2025-07-17 20:00:32] [Rank 0] step:3941/10000 train_time:914490ms step_avg:232.05ms +[2025-07-17 20:00:37] [Rank 0] step:3961/10000 train_time:919303ms step_avg:232.09ms +[2025-07-17 20:00:37] [Rank 0] step:3961/10000 train_time:919303ms step_avg:232.09ms +[2025-07-17 20:00:41] [Rank 0] step:3981/10000 train_time:924117ms step_avg:232.13ms +[2025-07-17 20:00:41] [Rank 0] step:3981/10000 train_time:924117ms step_avg:232.13ms +[2025-07-17 20:00:51] [Rank 0] PRINT: step:4000/10000 val_loss:3.7593 train_time:928929ms step_avg:232.23ms +[2025-07-17 20:00:51] [Rank 0] PRINT: step:4000/10000 val_loss:3.7593 train_time:928929ms step_avg:232.23ms +[2025-07-17 20:00:51] [Rank 0] step:4001/10000 train_time:928947ms step_avg:232.18ms +[2025-07-17 20:00:51] [Rank 0] step:4001/10000 train_time:928947ms step_avg:232.18ms +[2025-07-17 20:00:56] [Rank 0] step:4021/10000 train_time:933734ms step_avg:232.21ms +[2025-07-17 20:00:56] [Rank 0] step:4021/10000 train_time:933734ms step_avg:232.21ms +[2025-07-17 20:01:00] [Rank 0] step:4041/10000 train_time:938541ms step_avg:232.25ms +[2025-07-17 20:01:00] [Rank 0] step:4041/10000 train_time:938541ms step_avg:232.25ms +[2025-07-17 20:01:05] [Rank 0] step:4061/10000 train_time:943350ms step_avg:232.30ms +[2025-07-17 20:01:05] [Rank 0] step:4061/10000 train_time:943350ms step_avg:232.30ms +[2025-07-17 20:01:11] [Rank 0] step:4081/10000 train_time:948661ms step_avg:232.46ms +[2025-07-17 20:01:11] [Rank 0] step:4081/10000 train_time:948661ms step_avg:232.46ms +[2025-07-17 20:01:15] [Rank 0] step:4101/10000 train_time:953464ms step_avg:232.50ms +[2025-07-17 20:01:15] [Rank 0] step:4101/10000 train_time:953464ms step_avg:232.50ms +[2025-07-17 20:01:20] [Rank 0] step:4121/10000 train_time:958266ms step_avg:232.53ms +[2025-07-17 20:01:20] [Rank 0] step:4121/10000 train_time:958266ms step_avg:232.53ms +[2025-07-17 20:01:26] [Rank 0] PRINT: step:4125/10000 val_loss:3.7888 train_time:959474ms step_avg:232.60ms +[2025-07-17 20:01:26] [Rank 0] PRINT: step:4125/10000 val_loss:3.7888 train_time:959474ms step_avg:232.60ms +[2025-07-17 20:01:30] [Rank 0] step:4141/10000 train_time:963067ms step_avg:232.57ms +[2025-07-17 20:01:30] [Rank 0] step:4141/10000 train_time:963067ms step_avg:232.57ms +[2025-07-17 20:01:34] [Rank 0] step:4161/10000 train_time:967870ms step_avg:232.61ms +[2025-07-17 20:01:34] [Rank 0] step:4161/10000 train_time:967870ms step_avg:232.61ms +[2025-07-17 20:01:39] [Rank 0] step:4181/10000 train_time:972670ms step_avg:232.64ms +[2025-07-17 20:01:39] [Rank 0] step:4181/10000 train_time:972670ms step_avg:232.64ms +[2025-07-17 20:01:44] [Rank 0] step:4201/10000 train_time:977472ms step_avg:232.68ms +[2025-07-17 20:01:44] [Rank 0] step:4201/10000 train_time:977472ms step_avg:232.68ms +[2025-07-17 20:01:49] [Rank 0] step:4221/10000 train_time:982274ms step_avg:232.71ms +[2025-07-17 20:01:49] [Rank 0] step:4221/10000 train_time:982274ms step_avg:232.71ms +[2025-07-17 20:01:54] [Rank 0] step:4241/10000 train_time:987077ms step_avg:232.75ms +[2025-07-17 20:01:54] [Rank 0] step:4241/10000 train_time:987077ms step_avg:232.75ms +[2025-07-17 20:02:00] [Rank 0] PRINT: step:4250/10000 val_loss:3.7462 train_time:989484ms step_avg:232.82ms +[2025-07-17 20:02:00] [Rank 0] PRINT: step:4250/10000 val_loss:3.7462 train_time:989484ms step_avg:232.82ms +[2025-07-17 20:02:03] [Rank 0] step:4261/10000 train_time:991878ms step_avg:232.78ms +[2025-07-17 20:02:03] [Rank 0] step:4261/10000 train_time:991878ms step_avg:232.78ms +[2025-07-17 20:02:08] [Rank 0] step:4281/10000 train_time:996683ms step_avg:232.82ms +[2025-07-17 20:02:08] [Rank 0] step:4281/10000 train_time:996683ms step_avg:232.82ms +[2025-07-17 20:02:13] [Rank 0] step:4301/10000 train_time:1001488ms step_avg:232.85ms +[2025-07-17 20:02:13] [Rank 0] step:4301/10000 train_time:1001488ms step_avg:232.85ms +[2025-07-17 20:02:17] [Rank 0] step:4321/10000 train_time:1006302ms step_avg:232.89ms +[2025-07-17 20:02:17] [Rank 0] step:4321/10000 train_time:1006302ms step_avg:232.89ms +[2025-07-17 20:02:22] [Rank 0] step:4341/10000 train_time:1011111ms step_avg:232.92ms +[2025-07-17 20:02:22] [Rank 0] step:4341/10000 train_time:1011111ms step_avg:232.92ms +[2025-07-17 20:02:27] [Rank 0] step:4361/10000 train_time:1015921ms step_avg:232.96ms +[2025-07-17 20:02:27] [Rank 0] step:4361/10000 train_time:1015921ms step_avg:232.96ms +[2025-07-17 20:02:35] [Rank 0] PRINT: step:4375/10000 val_loss:3.8176 train_time:1019532ms step_avg:233.04ms +[2025-07-17 20:02:35] [Rank 0] PRINT: step:4375/10000 val_loss:3.8176 train_time:1019532ms step_avg:233.04ms +[2025-07-17 20:02:37] [Rank 0] step:4381/10000 train_time:1020729ms step_avg:232.99ms +[2025-07-17 20:02:37] [Rank 0] step:4381/10000 train_time:1020729ms step_avg:232.99ms +[2025-07-17 20:02:41] [Rank 0] step:4401/10000 train_time:1025538ms step_avg:233.02ms +[2025-07-17 20:02:41] [Rank 0] step:4401/10000 train_time:1025538ms step_avg:233.02ms +[2025-07-17 20:02:46] [Rank 0] step:4421/10000 train_time:1030346ms step_avg:233.06ms +[2025-07-17 20:02:46] [Rank 0] step:4421/10000 train_time:1030346ms step_avg:233.06ms +[2025-07-17 20:02:51] [Rank 0] step:4441/10000 train_time:1035158ms step_avg:233.09ms +[2025-07-17 20:02:51] [Rank 0] step:4441/10000 train_time:1035158ms step_avg:233.09ms +[2025-07-17 20:02:56] [Rank 0] step:4461/10000 train_time:1039982ms step_avg:233.13ms +[2025-07-17 20:02:56] [Rank 0] step:4461/10000 train_time:1039982ms step_avg:233.13ms +[2025-07-17 20:03:01] [Rank 0] step:4481/10000 train_time:1044814ms step_avg:233.17ms +[2025-07-17 20:03:01] [Rank 0] step:4481/10000 train_time:1044814ms step_avg:233.17ms +[2025-07-17 20:03:10] [Rank 0] PRINT: step:4500/10000 val_loss:3.8108 train_time:1049647ms step_avg:233.25ms +[2025-07-17 20:03:10] [Rank 0] PRINT: step:4500/10000 val_loss:3.8108 train_time:1049647ms step_avg:233.25ms +[2025-07-17 20:03:10] [Rank 0] step:4501/10000 train_time:1049664ms step_avg:233.21ms +[2025-07-17 20:03:10] [Rank 0] step:4501/10000 train_time:1049664ms step_avg:233.21ms +[2025-07-17 20:03:15] [Rank 0] step:4521/10000 train_time:1054475ms step_avg:233.24ms +[2025-07-17 20:03:15] [Rank 0] step:4521/10000 train_time:1054475ms step_avg:233.24ms +[2025-07-17 20:03:20] [Rank 0] step:4541/10000 train_time:1059302ms step_avg:233.28ms +[2025-07-17 20:03:20] [Rank 0] step:4541/10000 train_time:1059302ms step_avg:233.28ms +[2025-07-17 20:03:25] [Rank 0] step:4561/10000 train_time:1064128ms step_avg:233.31ms +[2025-07-17 20:03:25] [Rank 0] step:4561/10000 train_time:1064128ms step_avg:233.31ms +[2025-07-17 20:03:30] [Rank 0] step:4581/10000 train_time:1069459ms step_avg:233.46ms +[2025-07-17 20:03:30] [Rank 0] step:4581/10000 train_time:1069459ms step_avg:233.46ms +[2025-07-17 20:03:35] [Rank 0] step:4601/10000 train_time:1074289ms step_avg:233.49ms +[2025-07-17 20:03:35] [Rank 0] step:4601/10000 train_time:1074289ms step_avg:233.49ms +[2025-07-17 20:03:40] [Rank 0] step:4621/10000 train_time:1079116ms step_avg:233.52ms +[2025-07-17 20:03:40] [Rank 0] step:4621/10000 train_time:1079116ms step_avg:233.52ms +[2025-07-17 20:03:45] [Rank 0] PRINT: step:4625/10000 val_loss:3.7688 train_time:1080332ms step_avg:233.59ms +[2025-07-17 20:03:45] [Rank 0] PRINT: step:4625/10000 val_loss:3.7688 train_time:1080332ms step_avg:233.59ms +[2025-07-17 20:03:49] [Rank 0] step:4641/10000 train_time:1083947ms step_avg:233.56ms +[2025-07-17 20:03:49] [Rank 0] step:4641/10000 train_time:1083947ms step_avg:233.56ms +[2025-07-17 20:03:54] [Rank 0] step:4661/10000 train_time:1088779ms step_avg:233.59ms +[2025-07-17 20:03:54] [Rank 0] step:4661/10000 train_time:1088779ms step_avg:233.59ms +[2025-07-17 20:03:59] [Rank 0] step:4681/10000 train_time:1093610ms step_avg:233.63ms +[2025-07-17 20:03:59] [Rank 0] step:4681/10000 train_time:1093610ms step_avg:233.63ms +[2025-07-17 20:04:04] [Rank 0] step:4701/10000 train_time:1098446ms step_avg:233.66ms +[2025-07-17 20:04:04] [Rank 0] step:4701/10000 train_time:1098446ms step_avg:233.66ms +[2025-07-17 20:04:08] [Rank 0] step:4721/10000 train_time:1103276ms step_avg:233.70ms +[2025-07-17 20:04:08] [Rank 0] step:4721/10000 train_time:1103276ms step_avg:233.70ms +[2025-07-17 20:04:13] [Rank 0] step:4741/10000 train_time:1108110ms step_avg:233.73ms +[2025-07-17 20:04:13] [Rank 0] step:4741/10000 train_time:1108110ms step_avg:233.73ms +[2025-07-17 20:04:20] [Rank 0] PRINT: step:4750/10000 val_loss:3.7849 train_time:1110535ms step_avg:233.80ms +[2025-07-17 20:04:20] [Rank 0] PRINT: step:4750/10000 val_loss:3.7849 train_time:1110535ms step_avg:233.80ms +[2025-07-17 20:04:23] [Rank 0] step:4761/10000 train_time:1112944ms step_avg:233.76ms +[2025-07-17 20:04:23] [Rank 0] step:4761/10000 train_time:1112944ms step_avg:233.76ms +[2025-07-17 20:04:28] [Rank 0] step:4781/10000 train_time:1117771ms step_avg:233.79ms +[2025-07-17 20:04:28] [Rank 0] step:4781/10000 train_time:1117771ms step_avg:233.79ms +[2025-07-17 20:04:32] [Rank 0] step:4801/10000 train_time:1122602ms step_avg:233.83ms +[2025-07-17 20:04:32] [Rank 0] step:4801/10000 train_time:1122602ms step_avg:233.83ms +[2025-07-17 20:04:37] [Rank 0] step:4821/10000 train_time:1127441ms step_avg:233.86ms +[2025-07-17 20:04:37] [Rank 0] step:4821/10000 train_time:1127441ms step_avg:233.86ms +[2025-07-17 20:04:42] [Rank 0] step:4841/10000 train_time:1132279ms step_avg:233.89ms +[2025-07-17 20:04:42] [Rank 0] step:4841/10000 train_time:1132279ms step_avg:233.89ms +[2025-07-17 20:04:47] [Rank 0] step:4861/10000 train_time:1137114ms step_avg:233.93ms +[2025-07-17 20:04:47] [Rank 0] step:4861/10000 train_time:1137114ms step_avg:233.93ms +[2025-07-17 20:04:55] [Rank 0] PRINT: step:4875/10000 val_loss:3.8393 train_time:1140747ms step_avg:234.00ms +[2025-07-17 20:04:55] [Rank 0] PRINT: step:4875/10000 val_loss:3.8393 train_time:1140747ms step_avg:234.00ms +[2025-07-17 20:04:56] [Rank 0] step:4881/10000 train_time:1141952ms step_avg:233.96ms +[2025-07-17 20:04:56] [Rank 0] step:4881/10000 train_time:1141952ms step_avg:233.96ms +[2025-07-17 20:05:01] [Rank 0] step:4901/10000 train_time:1146787ms step_avg:233.99ms +[2025-07-17 20:05:01] [Rank 0] step:4901/10000 train_time:1146787ms step_avg:233.99ms +[2025-07-17 20:05:06] [Rank 0] step:4921/10000 train_time:1151620ms step_avg:234.02ms +[2025-07-17 20:05:06] [Rank 0] step:4921/10000 train_time:1151620ms step_avg:234.02ms +[2025-07-17 20:05:11] [Rank 0] step:4941/10000 train_time:1156459ms step_avg:234.05ms +[2025-07-17 20:05:11] [Rank 0] step:4941/10000 train_time:1156459ms step_avg:234.05ms +[2025-07-17 20:05:16] [Rank 0] step:4961/10000 train_time:1161290ms step_avg:234.08ms +[2025-07-17 20:05:16] [Rank 0] step:4961/10000 train_time:1161290ms step_avg:234.08ms +[2025-07-17 20:05:21] [Rank 0] step:4981/10000 train_time:1166124ms step_avg:234.11ms +[2025-07-17 20:05:21] [Rank 0] step:4981/10000 train_time:1166124ms step_avg:234.11ms +[2025-07-17 20:05:30] [Rank 0] PRINT: step:5000/10000 val_loss:3.7884 train_time:1170965ms step_avg:234.19ms +[2025-07-17 20:05:30] [Rank 0] PRINT: step:5000/10000 val_loss:3.7884 train_time:1170965ms step_avg:234.19ms +[2025-07-17 20:05:30] [Rank 0] step:5001/10000 train_time:1170983ms step_avg:234.15ms +[2025-07-17 20:05:30] [Rank 0] step:5001/10000 train_time:1170983ms step_avg:234.15ms +[2025-07-17 20:05:35] [Rank 0] step:5021/10000 train_time:1175783ms step_avg:234.17ms +[2025-07-17 20:05:35] [Rank 0] step:5021/10000 train_time:1175783ms step_avg:234.17ms +[2025-07-17 20:05:40] [Rank 0] step:5041/10000 train_time:1180614ms step_avg:234.20ms +[2025-07-17 20:05:40] [Rank 0] step:5041/10000 train_time:1180614ms step_avg:234.20ms +[2025-07-17 20:05:45] [Rank 0] step:5061/10000 train_time:1185438ms step_avg:234.23ms +[2025-07-17 20:05:45] [Rank 0] step:5061/10000 train_time:1185438ms step_avg:234.23ms +[2025-07-17 20:05:49] [Rank 0] step:5081/10000 train_time:1190262ms step_avg:234.26ms +[2025-07-17 20:05:49] [Rank 0] step:5081/10000 train_time:1190262ms step_avg:234.26ms +[2025-07-17 20:05:54] [Rank 0] step:5101/10000 train_time:1195172ms step_avg:234.30ms +[2025-07-17 20:05:54] [Rank 0] step:5101/10000 train_time:1195172ms step_avg:234.30ms +[2025-07-17 20:05:59] [Rank 0] step:5121/10000 train_time:1199995ms step_avg:234.33ms +[2025-07-17 20:05:59] [Rank 0] step:5121/10000 train_time:1199995ms step_avg:234.33ms +[2025-07-17 20:06:05] [Rank 0] PRINT: step:5125/10000 val_loss:3.8427 train_time:1201207ms step_avg:234.38ms +[2025-07-17 20:06:05] [Rank 0] PRINT: step:5125/10000 val_loss:3.8427 train_time:1201207ms step_avg:234.38ms +[2025-07-17 20:06:09] [Rank 0] step:5141/10000 train_time:1204815ms step_avg:234.35ms +[2025-07-17 20:06:09] [Rank 0] step:5141/10000 train_time:1204815ms step_avg:234.35ms +[2025-07-17 20:06:13] [Rank 0] step:5161/10000 train_time:1209637ms step_avg:234.38ms +[2025-07-17 20:06:13] [Rank 0] step:5161/10000 train_time:1209637ms step_avg:234.38ms +[2025-07-17 20:06:18] [Rank 0] step:5181/10000 train_time:1214467ms step_avg:234.41ms +[2025-07-17 20:06:18] [Rank 0] step:5181/10000 train_time:1214467ms step_avg:234.41ms +[2025-07-17 20:06:23] [Rank 0] step:5201/10000 train_time:1219339ms step_avg:234.44ms +[2025-07-17 20:06:23] [Rank 0] step:5201/10000 train_time:1219339ms step_avg:234.44ms +[2025-07-17 20:06:28] [Rank 0] step:5221/10000 train_time:1224241ms step_avg:234.48ms +[2025-07-17 20:06:28] [Rank 0] step:5221/10000 train_time:1224241ms step_avg:234.48ms +[2025-07-17 20:06:33] [Rank 0] step:5241/10000 train_time:1229137ms step_avg:234.52ms +[2025-07-17 20:06:33] [Rank 0] step:5241/10000 train_time:1229137ms step_avg:234.52ms +[2025-07-17 20:06:40] [Rank 0] PRINT: step:5250/10000 val_loss:3.7133 train_time:1231585ms step_avg:234.59ms +[2025-07-17 20:06:40] [Rank 0] PRINT: step:5250/10000 val_loss:3.7133 train_time:1231585ms step_avg:234.59ms +[2025-07-17 20:06:43] [Rank 0] step:5261/10000 train_time:1234023ms step_avg:234.56ms +[2025-07-17 20:06:43] [Rank 0] step:5261/10000 train_time:1234023ms step_avg:234.56ms +[2025-07-17 20:06:47] [Rank 0] step:5281/10000 train_time:1238921ms step_avg:234.60ms +[2025-07-17 20:06:47] [Rank 0] step:5281/10000 train_time:1238921ms step_avg:234.60ms +[2025-07-17 20:06:52] [Rank 0] step:5301/10000 train_time:1243816ms step_avg:234.64ms +[2025-07-17 20:06:52] [Rank 0] step:5301/10000 train_time:1243816ms step_avg:234.64ms +[2025-07-17 20:06:57] [Rank 0] step:5321/10000 train_time:1248711ms step_avg:234.68ms +[2025-07-17 20:06:57] [Rank 0] step:5321/10000 train_time:1248711ms step_avg:234.68ms +[2025-07-17 20:07:02] [Rank 0] step:5341/10000 train_time:1253613ms step_avg:234.71ms +[2025-07-17 20:07:02] [Rank 0] step:5341/10000 train_time:1253613ms step_avg:234.71ms +[2025-07-17 20:07:07] [Rank 0] step:5361/10000 train_time:1258510ms step_avg:234.75ms +[2025-07-17 20:07:07] [Rank 0] step:5361/10000 train_time:1258510ms step_avg:234.75ms +[2025-07-17 20:07:15] [Rank 0] PRINT: step:5375/10000 val_loss:3.7320 train_time:1262192ms step_avg:234.83ms +[2025-07-17 20:07:15] [Rank 0] PRINT: step:5375/10000 val_loss:3.7320 train_time:1262192ms step_avg:234.83ms +[2025-07-17 20:07:17] [Rank 0] step:5381/10000 train_time:1263411ms step_avg:234.79ms +[2025-07-17 20:07:17] [Rank 0] step:5381/10000 train_time:1263411ms step_avg:234.79ms +[2025-07-17 20:07:22] [Rank 0] step:5401/10000 train_time:1268310ms step_avg:234.83ms +[2025-07-17 20:07:22] [Rank 0] step:5401/10000 train_time:1268310ms step_avg:234.83ms +[2025-07-17 20:07:26] [Rank 0] step:5421/10000 train_time:1273218ms step_avg:234.87ms +[2025-07-17 20:07:26] [Rank 0] step:5421/10000 train_time:1273218ms step_avg:234.87ms +[2025-07-17 20:07:31] [Rank 0] step:5441/10000 train_time:1278116ms step_avg:234.90ms +[2025-07-17 20:07:31] [Rank 0] step:5441/10000 train_time:1278116ms step_avg:234.90ms +[2025-07-17 20:07:36] [Rank 0] step:5461/10000 train_time:1283019ms step_avg:234.94ms +[2025-07-17 20:07:36] [Rank 0] step:5461/10000 train_time:1283019ms step_avg:234.94ms +[2025-07-17 20:07:41] [Rank 0] step:5481/10000 train_time:1287925ms step_avg:234.98ms +[2025-07-17 20:07:41] [Rank 0] step:5481/10000 train_time:1287925ms step_avg:234.98ms +[2025-07-17 20:07:50] [Rank 0] PRINT: step:5500/10000 val_loss:3.6954 train_time:1292824ms step_avg:235.06ms +[2025-07-17 20:07:50] [Rank 0] PRINT: step:5500/10000 val_loss:3.6954 train_time:1292824ms step_avg:235.06ms +[2025-07-17 20:07:51] [Rank 0] step:5501/10000 train_time:1292841ms step_avg:235.02ms +[2025-07-17 20:07:51] [Rank 0] step:5501/10000 train_time:1292841ms step_avg:235.02ms +[2025-07-17 20:07:56] [Rank 0] step:5521/10000 train_time:1297713ms step_avg:235.05ms +[2025-07-17 20:07:56] [Rank 0] step:5521/10000 train_time:1297713ms step_avg:235.05ms +[2025-07-17 20:08:01] [Rank 0] step:5541/10000 train_time:1302608ms step_avg:235.09ms +[2025-07-17 20:08:01] [Rank 0] step:5541/10000 train_time:1302608ms step_avg:235.09ms +[2025-07-17 20:08:05] [Rank 0] step:5561/10000 train_time:1307505ms step_avg:235.12ms +[2025-07-17 20:08:05] [Rank 0] step:5561/10000 train_time:1307505ms step_avg:235.12ms +[2025-07-17 20:08:10] [Rank 0] step:5581/10000 train_time:1312401ms step_avg:235.16ms +[2025-07-17 20:08:10] [Rank 0] step:5581/10000 train_time:1312401ms step_avg:235.16ms +[2025-07-17 20:08:16] [Rank 0] step:5601/10000 train_time:1317818ms step_avg:235.28ms +[2025-07-17 20:08:16] [Rank 0] step:5601/10000 train_time:1317818ms step_avg:235.28ms +[2025-07-17 20:08:21] [Rank 0] step:5621/10000 train_time:1322716ms step_avg:235.32ms +[2025-07-17 20:08:21] [Rank 0] step:5621/10000 train_time:1322716ms step_avg:235.32ms +[2025-07-17 20:08:26] [Rank 0] PRINT: step:5625/10000 val_loss:3.7791 train_time:1323945ms step_avg:235.37ms +[2025-07-17 20:08:26] [Rank 0] PRINT: step:5625/10000 val_loss:3.7791 train_time:1323945ms step_avg:235.37ms +[2025-07-17 20:08:30] [Rank 0] step:5641/10000 train_time:1327607ms step_avg:235.35ms +[2025-07-17 20:08:30] [Rank 0] step:5641/10000 train_time:1327607ms step_avg:235.35ms +[2025-07-17 20:08:35] [Rank 0] step:5661/10000 train_time:1332502ms step_avg:235.38ms +[2025-07-17 20:08:35] [Rank 0] step:5661/10000 train_time:1332502ms step_avg:235.38ms +[2025-07-17 20:08:40] [Rank 0] step:5681/10000 train_time:1337399ms step_avg:235.42ms +[2025-07-17 20:08:40] [Rank 0] step:5681/10000 train_time:1337399ms step_avg:235.42ms +[2025-07-17 20:08:45] [Rank 0] step:5701/10000 train_time:1342294ms step_avg:235.45ms +[2025-07-17 20:08:45] [Rank 0] step:5701/10000 train_time:1342294ms step_avg:235.45ms +[2025-07-17 20:08:50] [Rank 0] step:5721/10000 train_time:1347185ms step_avg:235.48ms +[2025-07-17 20:08:50] [Rank 0] step:5721/10000 train_time:1347185ms step_avg:235.48ms +[2025-07-17 20:08:55] [Rank 0] step:5741/10000 train_time:1352086ms step_avg:235.51ms +[2025-07-17 20:08:55] [Rank 0] step:5741/10000 train_time:1352086ms step_avg:235.51ms +[2025-07-17 20:09:02] [Rank 0] PRINT: step:5750/10000 val_loss:3.7027 train_time:1354539ms step_avg:235.57ms +[2025-07-17 20:09:02] [Rank 0] PRINT: step:5750/10000 val_loss:3.7027 train_time:1354539ms step_avg:235.57ms +[2025-07-17 20:09:04] [Rank 0] step:5761/10000 train_time:1356981ms step_avg:235.55ms +[2025-07-17 20:09:04] [Rank 0] step:5761/10000 train_time:1356981ms step_avg:235.55ms +[2025-07-17 20:09:09] [Rank 0] step:5781/10000 train_time:1361875ms step_avg:235.58ms +[2025-07-17 20:09:09] [Rank 0] step:5781/10000 train_time:1361875ms step_avg:235.58ms +[2025-07-17 20:09:14] [Rank 0] step:5801/10000 train_time:1366764ms step_avg:235.61ms +[2025-07-17 20:09:14] [Rank 0] step:5801/10000 train_time:1366764ms step_avg:235.61ms +[2025-07-17 20:09:19] [Rank 0] step:5821/10000 train_time:1371653ms step_avg:235.64ms +[2025-07-17 20:09:19] [Rank 0] step:5821/10000 train_time:1371653ms step_avg:235.64ms +[2025-07-17 20:09:24] [Rank 0] step:5841/10000 train_time:1376550ms step_avg:235.67ms +[2025-07-17 20:09:24] [Rank 0] step:5841/10000 train_time:1376550ms step_avg:235.67ms +[2025-07-17 20:09:29] [Rank 0] step:5861/10000 train_time:1381441ms step_avg:235.70ms +[2025-07-17 20:09:29] [Rank 0] step:5861/10000 train_time:1381441ms step_avg:235.70ms +[2025-07-17 20:09:37] [Rank 0] PRINT: step:5875/10000 val_loss:3.6964 train_time:1385108ms step_avg:235.76ms +[2025-07-17 20:09:37] [Rank 0] PRINT: step:5875/10000 val_loss:3.6964 train_time:1385108ms step_avg:235.76ms +[2025-07-17 20:09:38] [Rank 0] step:5881/10000 train_time:1386330ms step_avg:235.73ms +[2025-07-17 20:09:38] [Rank 0] step:5881/10000 train_time:1386330ms step_avg:235.73ms +[2025-07-17 20:09:43] [Rank 0] step:5901/10000 train_time:1391229ms step_avg:235.76ms +[2025-07-17 20:09:43] [Rank 0] step:5901/10000 train_time:1391229ms step_avg:235.76ms +[2025-07-17 20:09:48] [Rank 0] step:5921/10000 train_time:1396125ms step_avg:235.79ms +[2025-07-17 20:09:48] [Rank 0] step:5921/10000 train_time:1396125ms step_avg:235.79ms +[2025-07-17 20:09:53] [Rank 0] step:5941/10000 train_time:1401033ms step_avg:235.82ms +[2025-07-17 20:09:53] [Rank 0] step:5941/10000 train_time:1401033ms step_avg:235.82ms +[2025-07-17 20:09:58] [Rank 0] step:5961/10000 train_time:1405943ms step_avg:235.86ms +[2025-07-17 20:09:58] [Rank 0] step:5961/10000 train_time:1405943ms step_avg:235.86ms +[2025-07-17 20:10:03] [Rank 0] step:5981/10000 train_time:1410848ms step_avg:235.89ms +[2025-07-17 20:10:03] [Rank 0] step:5981/10000 train_time:1410848ms step_avg:235.89ms +[2025-07-17 20:10:12] [Rank 0] PRINT: step:6000/10000 val_loss:3.8560 train_time:1415766ms step_avg:235.96ms +[2025-07-17 20:10:12] [Rank 0] PRINT: step:6000/10000 val_loss:3.8560 train_time:1415766ms step_avg:235.96ms +[2025-07-17 20:10:12] [Rank 0] step:6001/10000 train_time:1415782ms step_avg:235.92ms +[2025-07-17 20:10:12] [Rank 0] step:6001/10000 train_time:1415782ms step_avg:235.92ms +[2025-07-17 20:10:17] [Rank 0] step:6021/10000 train_time:1420671ms step_avg:235.95ms +[2025-07-17 20:10:17] [Rank 0] step:6021/10000 train_time:1420671ms step_avg:235.95ms +[2025-07-17 20:10:22] [Rank 0] step:6041/10000 train_time:1425580ms step_avg:235.98ms +[2025-07-17 20:10:22] [Rank 0] step:6041/10000 train_time:1425580ms step_avg:235.98ms +[2025-07-17 20:10:27] [Rank 0] step:6061/10000 train_time:1430483ms step_avg:236.01ms +[2025-07-17 20:10:27] [Rank 0] step:6061/10000 train_time:1430483ms step_avg:236.01ms +[2025-07-17 20:10:32] [Rank 0] step:6081/10000 train_time:1435392ms step_avg:236.05ms +[2025-07-17 20:10:32] [Rank 0] step:6081/10000 train_time:1435392ms step_avg:236.05ms +[2025-07-17 20:10:37] [Rank 0] step:6101/10000 train_time:1440780ms step_avg:236.15ms +[2025-07-17 20:10:37] [Rank 0] step:6101/10000 train_time:1440780ms step_avg:236.15ms +[2025-07-17 20:10:42] [Rank 0] step:6121/10000 train_time:1445691ms step_avg:236.19ms +[2025-07-17 20:10:42] [Rank 0] step:6121/10000 train_time:1445691ms step_avg:236.19ms +[2025-07-17 20:10:48] [Rank 0] PRINT: step:6125/10000 val_loss:3.6844 train_time:1446923ms step_avg:236.23ms +[2025-07-17 20:10:48] [Rank 0] PRINT: step:6125/10000 val_loss:3.6844 train_time:1446923ms step_avg:236.23ms +[2025-07-17 20:10:52] [Rank 0] step:6141/10000 train_time:1450598ms step_avg:236.22ms +[2025-07-17 20:10:52] [Rank 0] step:6141/10000 train_time:1450598ms step_avg:236.22ms +[2025-07-17 20:10:56] [Rank 0] step:6161/10000 train_time:1455500ms step_avg:236.24ms +[2025-07-17 20:10:56] [Rank 0] step:6161/10000 train_time:1455500ms step_avg:236.24ms +[2025-07-17 20:11:01] [Rank 0] step:6181/10000 train_time:1460415ms step_avg:236.27ms +[2025-07-17 20:11:01] [Rank 0] step:6181/10000 train_time:1460415ms step_avg:236.27ms +[2025-07-17 20:11:06] [Rank 0] step:6201/10000 train_time:1465330ms step_avg:236.31ms +[2025-07-17 20:11:06] [Rank 0] step:6201/10000 train_time:1465330ms step_avg:236.31ms +[2025-07-17 20:11:11] [Rank 0] step:6221/10000 train_time:1470237ms step_avg:236.33ms +[2025-07-17 20:11:11] [Rank 0] step:6221/10000 train_time:1470237ms step_avg:236.33ms +[2025-07-17 20:11:16] [Rank 0] step:6241/10000 train_time:1475148ms step_avg:236.36ms +[2025-07-17 20:11:16] [Rank 0] step:6241/10000 train_time:1475148ms step_avg:236.36ms +[2025-07-17 20:11:23] [Rank 0] PRINT: step:6250/10000 val_loss:3.7407 train_time:1477607ms step_avg:236.42ms +[2025-07-17 20:11:23] [Rank 0] PRINT: step:6250/10000 val_loss:3.7407 train_time:1477607ms step_avg:236.42ms +[2025-07-17 20:11:25] [Rank 0] step:6261/10000 train_time:1480054ms step_avg:236.39ms +[2025-07-17 20:11:25] [Rank 0] step:6261/10000 train_time:1480054ms step_avg:236.39ms +[2025-07-17 20:11:30] [Rank 0] step:6281/10000 train_time:1484971ms step_avg:236.42ms +[2025-07-17 20:11:30] [Rank 0] step:6281/10000 train_time:1484971ms step_avg:236.42ms +[2025-07-17 20:11:35] [Rank 0] step:6301/10000 train_time:1489878ms step_avg:236.45ms +[2025-07-17 20:11:35] [Rank 0] step:6301/10000 train_time:1489878ms step_avg:236.45ms +[2025-07-17 20:11:40] [Rank 0] step:6321/10000 train_time:1494786ms step_avg:236.48ms +[2025-07-17 20:11:40] [Rank 0] step:6321/10000 train_time:1494786ms step_avg:236.48ms +[2025-07-17 20:11:45] [Rank 0] step:6341/10000 train_time:1499703ms step_avg:236.51ms +[2025-07-17 20:11:45] [Rank 0] step:6341/10000 train_time:1499703ms step_avg:236.51ms +[2025-07-17 20:11:50] [Rank 0] step:6361/10000 train_time:1504609ms step_avg:236.54ms +[2025-07-17 20:11:50] [Rank 0] step:6361/10000 train_time:1504609ms step_avg:236.54ms +[2025-07-17 20:11:58] [Rank 0] PRINT: step:6375/10000 val_loss:3.6491 train_time:1508291ms step_avg:236.59ms +[2025-07-17 20:11:58] [Rank 0] PRINT: step:6375/10000 val_loss:3.6491 train_time:1508291ms step_avg:236.59ms +[2025-07-17 20:11:59] [Rank 0] step:6381/10000 train_time:1509514ms step_avg:236.56ms +[2025-07-17 20:11:59] [Rank 0] step:6381/10000 train_time:1509514ms step_avg:236.56ms +[2025-07-17 20:12:04] [Rank 0] step:6401/10000 train_time:1514415ms step_avg:236.59ms +[2025-07-17 20:12:04] [Rank 0] step:6401/10000 train_time:1514415ms step_avg:236.59ms +[2025-07-17 20:12:09] [Rank 0] step:6421/10000 train_time:1519322ms step_avg:236.62ms +[2025-07-17 20:12:09] [Rank 0] step:6421/10000 train_time:1519322ms step_avg:236.62ms +[2025-07-17 20:12:14] [Rank 0] step:6441/10000 train_time:1524238ms step_avg:236.65ms +[2025-07-17 20:12:14] [Rank 0] step:6441/10000 train_time:1524238ms step_avg:236.65ms +[2025-07-17 20:12:19] [Rank 0] step:6461/10000 train_time:1529155ms step_avg:236.67ms +[2025-07-17 20:12:19] [Rank 0] step:6461/10000 train_time:1529155ms step_avg:236.67ms +[2025-07-17 20:12:24] [Rank 0] step:6481/10000 train_time:1534064ms step_avg:236.70ms +[2025-07-17 20:12:24] [Rank 0] step:6481/10000 train_time:1534064ms step_avg:236.70ms +[2025-07-17 20:12:33] [Rank 0] PRINT: step:6500/10000 val_loss:3.7803 train_time:1538979ms step_avg:236.77ms +[2025-07-17 20:12:33] [Rank 0] PRINT: step:6500/10000 val_loss:3.7803 train_time:1538979ms step_avg:236.77ms +[2025-07-17 20:12:33] [Rank 0] step:6501/10000 train_time:1538997ms step_avg:236.73ms +[2025-07-17 20:12:33] [Rank 0] step:6501/10000 train_time:1538997ms step_avg:236.73ms +[2025-07-17 20:12:38] [Rank 0] step:6521/10000 train_time:1543892ms step_avg:236.76ms +[2025-07-17 20:12:38] [Rank 0] step:6521/10000 train_time:1543892ms step_avg:236.76ms +[2025-07-17 20:12:43] [Rank 0] step:6541/10000 train_time:1548800ms step_avg:236.78ms +[2025-07-17 20:12:43] [Rank 0] step:6541/10000 train_time:1548800ms step_avg:236.78ms +[2025-07-17 20:12:48] [Rank 0] step:6561/10000 train_time:1553727ms step_avg:236.81ms +[2025-07-17 20:12:48] [Rank 0] step:6561/10000 train_time:1553727ms step_avg:236.81ms +[2025-07-17 20:12:52] [Rank 0] step:6581/10000 train_time:1558643ms step_avg:236.84ms +[2025-07-17 20:12:52] [Rank 0] step:6581/10000 train_time:1558643ms step_avg:236.84ms +[2025-07-17 20:12:57] [Rank 0] step:6601/10000 train_time:1563570ms step_avg:236.87ms +[2025-07-17 20:12:57] [Rank 0] step:6601/10000 train_time:1563570ms step_avg:236.87ms +[2025-07-17 20:13:03] [Rank 0] step:6621/10000 train_time:1568979ms step_avg:236.97ms +[2025-07-17 20:13:03] [Rank 0] step:6621/10000 train_time:1568979ms step_avg:236.97ms +[2025-07-17 20:13:08] [Rank 0] PRINT: step:6625/10000 val_loss:3.7032 train_time:1570215ms step_avg:237.01ms +[2025-07-17 20:13:08] [Rank 0] PRINT: step:6625/10000 val_loss:3.7032 train_time:1570215ms step_avg:237.01ms +[2025-07-17 20:13:12] [Rank 0] step:6641/10000 train_time:1573884ms step_avg:237.00ms +[2025-07-17 20:13:12] [Rank 0] step:6641/10000 train_time:1573884ms step_avg:237.00ms +[2025-07-17 20:13:17] [Rank 0] step:6661/10000 train_time:1578789ms step_avg:237.02ms +[2025-07-17 20:13:17] [Rank 0] step:6661/10000 train_time:1578789ms step_avg:237.02ms +[2025-07-17 20:13:22] [Rank 0] step:6681/10000 train_time:1583745ms step_avg:237.05ms +[2025-07-17 20:13:22] [Rank 0] step:6681/10000 train_time:1583745ms step_avg:237.05ms +[2025-07-17 20:13:27] [Rank 0] step:6701/10000 train_time:1588713ms step_avg:237.09ms +[2025-07-17 20:13:27] [Rank 0] step:6701/10000 train_time:1588713ms step_avg:237.09ms +[2025-07-17 20:13:32] [Rank 0] step:6721/10000 train_time:1593697ms step_avg:237.12ms +[2025-07-17 20:13:32] [Rank 0] step:6721/10000 train_time:1593697ms step_avg:237.12ms +[2025-07-17 20:13:37] [Rank 0] step:6741/10000 train_time:1598684ms step_avg:237.16ms +[2025-07-17 20:13:37] [Rank 0] step:6741/10000 train_time:1598684ms step_avg:237.16ms +[2025-07-17 20:13:44] [Rank 0] PRINT: step:6750/10000 val_loss:3.8028 train_time:1601168ms step_avg:237.21ms +[2025-07-17 20:13:44] [Rank 0] PRINT: step:6750/10000 val_loss:3.8028 train_time:1601168ms step_avg:237.21ms +[2025-07-17 20:13:46] [Rank 0] step:6761/10000 train_time:1603651ms step_avg:237.19ms +[2025-07-17 20:13:46] [Rank 0] step:6761/10000 train_time:1603651ms step_avg:237.19ms +[2025-07-17 20:13:51] [Rank 0] step:6781/10000 train_time:1608626ms step_avg:237.23ms +[2025-07-17 20:13:51] [Rank 0] step:6781/10000 train_time:1608626ms step_avg:237.23ms +[2025-07-17 20:13:56] [Rank 0] step:6801/10000 train_time:1613609ms step_avg:237.26ms +[2025-07-17 20:13:56] [Rank 0] step:6801/10000 train_time:1613609ms step_avg:237.26ms +[2025-07-17 20:14:01] [Rank 0] step:6821/10000 train_time:1618584ms step_avg:237.29ms +[2025-07-17 20:14:01] [Rank 0] step:6821/10000 train_time:1618584ms step_avg:237.29ms +[2025-07-17 20:14:06] [Rank 0] step:6841/10000 train_time:1623559ms step_avg:237.33ms +[2025-07-17 20:14:06] [Rank 0] step:6841/10000 train_time:1623559ms step_avg:237.33ms +[2025-07-17 20:14:11] [Rank 0] step:6861/10000 train_time:1628524ms step_avg:237.36ms +[2025-07-17 20:14:11] [Rank 0] step:6861/10000 train_time:1628524ms step_avg:237.36ms +[2025-07-17 20:14:20] [Rank 0] PRINT: step:6875/10000 val_loss:3.7693 train_time:1632250ms step_avg:237.42ms +[2025-07-17 20:14:20] [Rank 0] PRINT: step:6875/10000 val_loss:3.7693 train_time:1632250ms step_avg:237.42ms +[2025-07-17 20:14:21] [Rank 0] step:6881/10000 train_time:1633495ms step_avg:237.39ms +[2025-07-17 20:14:21] [Rank 0] step:6881/10000 train_time:1633495ms step_avg:237.39ms +[2025-07-17 20:14:26] [Rank 0] step:6901/10000 train_time:1638455ms step_avg:237.42ms +[2025-07-17 20:14:26] [Rank 0] step:6901/10000 train_time:1638455ms step_avg:237.42ms +[2025-07-17 20:14:31] [Rank 0] step:6921/10000 train_time:1643419ms step_avg:237.45ms +[2025-07-17 20:14:31] [Rank 0] step:6921/10000 train_time:1643419ms step_avg:237.45ms +[2025-07-17 20:14:36] [Rank 0] step:6941/10000 train_time:1648396ms step_avg:237.49ms +[2025-07-17 20:14:36] [Rank 0] step:6941/10000 train_time:1648396ms step_avg:237.49ms +[2025-07-17 20:14:41] [Rank 0] step:6961/10000 train_time:1653369ms step_avg:237.52ms +[2025-07-17 20:14:41] [Rank 0] step:6961/10000 train_time:1653369ms step_avg:237.52ms +[2025-07-17 20:14:46] [Rank 0] step:6981/10000 train_time:1658341ms step_avg:237.55ms +[2025-07-17 20:14:46] [Rank 0] step:6981/10000 train_time:1658341ms step_avg:237.55ms +[2025-07-17 20:14:55] [Rank 0] PRINT: step:7000/10000 val_loss:3.7704 train_time:1663311ms step_avg:237.62ms +[2025-07-17 20:14:55] [Rank 0] PRINT: step:7000/10000 val_loss:3.7704 train_time:1663311ms step_avg:237.62ms +[2025-07-17 20:14:55] [Rank 0] step:7001/10000 train_time:1663328ms step_avg:237.58ms +[2025-07-17 20:14:55] [Rank 0] step:7001/10000 train_time:1663328ms step_avg:237.58ms +[2025-07-17 20:15:00] [Rank 0] step:7021/10000 train_time:1668280ms step_avg:237.61ms +[2025-07-17 20:15:00] [Rank 0] step:7021/10000 train_time:1668280ms step_avg:237.61ms +[2025-07-17 20:15:05] [Rank 0] step:7041/10000 train_time:1673251ms step_avg:237.64ms +[2025-07-17 20:15:05] [Rank 0] step:7041/10000 train_time:1673251ms step_avg:237.64ms +[2025-07-17 20:15:10] [Rank 0] step:7061/10000 train_time:1678217ms step_avg:237.67ms +[2025-07-17 20:15:10] [Rank 0] step:7061/10000 train_time:1678217ms step_avg:237.67ms +[2025-07-17 20:15:15] [Rank 0] step:7081/10000 train_time:1683190ms step_avg:237.71ms +[2025-07-17 20:15:15] [Rank 0] step:7081/10000 train_time:1683190ms step_avg:237.71ms +[2025-07-17 20:15:20] [Rank 0] step:7101/10000 train_time:1688155ms step_avg:237.73ms +[2025-07-17 20:15:20] [Rank 0] step:7101/10000 train_time:1688155ms step_avg:237.73ms +[2025-07-17 20:15:25] [Rank 0] step:7121/10000 train_time:1693617ms step_avg:237.83ms +[2025-07-17 20:15:25] [Rank 0] step:7121/10000 train_time:1693617ms step_avg:237.83ms +[2025-07-17 20:15:31] [Rank 0] PRINT: step:7125/10000 val_loss:3.7638 train_time:1694864ms step_avg:237.88ms +[2025-07-17 20:15:31] [Rank 0] PRINT: step:7125/10000 val_loss:3.7638 train_time:1694864ms step_avg:237.88ms +[2025-07-17 20:15:34] [Rank 0] step:7141/10000 train_time:1698592ms step_avg:237.86ms +[2025-07-17 20:15:34] [Rank 0] step:7141/10000 train_time:1698592ms step_avg:237.86ms +[2025-07-17 20:15:39] [Rank 0] step:7161/10000 train_time:1703568ms step_avg:237.90ms +[2025-07-17 20:15:39] [Rank 0] step:7161/10000 train_time:1703568ms step_avg:237.90ms +[2025-07-17 20:15:44] [Rank 0] step:7181/10000 train_time:1708536ms step_avg:237.92ms +[2025-07-17 20:15:44] [Rank 0] step:7181/10000 train_time:1708536ms step_avg:237.92ms +[2025-07-17 20:15:49] [Rank 0] step:7201/10000 train_time:1713522ms step_avg:237.96ms +[2025-07-17 20:15:49] [Rank 0] step:7201/10000 train_time:1713522ms step_avg:237.96ms +[2025-07-17 20:15:54] [Rank 0] step:7221/10000 train_time:1718492ms step_avg:237.99ms +[2025-07-17 20:15:54] [Rank 0] step:7221/10000 train_time:1718492ms step_avg:237.99ms +[2025-07-17 20:15:59] [Rank 0] step:7241/10000 train_time:1723464ms step_avg:238.01ms +[2025-07-17 20:15:59] [Rank 0] step:7241/10000 train_time:1723464ms step_avg:238.01ms +[2025-07-17 20:16:06] [Rank 0] PRINT: step:7250/10000 val_loss:3.6698 train_time:1725956ms step_avg:238.06ms +[2025-07-17 20:16:06] [Rank 0] PRINT: step:7250/10000 val_loss:3.6698 train_time:1725956ms step_avg:238.06ms +[2025-07-17 20:16:09] [Rank 0] step:7261/10000 train_time:1728429ms step_avg:238.04ms +[2025-07-17 20:16:09] [Rank 0] step:7261/10000 train_time:1728429ms step_avg:238.04ms +[2025-07-17 20:16:14] [Rank 0] step:7281/10000 train_time:1733398ms step_avg:238.07ms +[2025-07-17 20:16:14] [Rank 0] step:7281/10000 train_time:1733398ms step_avg:238.07ms +[2025-07-17 20:16:19] [Rank 0] step:7301/10000 train_time:1738363ms step_avg:238.10ms +[2025-07-17 20:16:19] [Rank 0] step:7301/10000 train_time:1738363ms step_avg:238.10ms +[2025-07-17 20:16:24] [Rank 0] step:7321/10000 train_time:1743349ms step_avg:238.13ms +[2025-07-17 20:16:24] [Rank 0] step:7321/10000 train_time:1743349ms step_avg:238.13ms +[2025-07-17 20:16:29] [Rank 0] step:7341/10000 train_time:1748317ms step_avg:238.16ms +[2025-07-17 20:16:29] [Rank 0] step:7341/10000 train_time:1748317ms step_avg:238.16ms +[2025-07-17 20:16:34] [Rank 0] step:7361/10000 train_time:1753296ms step_avg:238.19ms +[2025-07-17 20:16:34] [Rank 0] step:7361/10000 train_time:1753296ms step_avg:238.19ms +[2025-07-17 20:16:42] [Rank 0] PRINT: step:7375/10000 val_loss:3.6116 train_time:1757033ms step_avg:238.24ms +[2025-07-17 20:16:42] [Rank 0] PRINT: step:7375/10000 val_loss:3.6116 train_time:1757033ms step_avg:238.24ms +[2025-07-17 20:16:44] [Rank 0] step:7381/10000 train_time:1758273ms step_avg:238.22ms +[2025-07-17 20:16:44] [Rank 0] step:7381/10000 train_time:1758273ms step_avg:238.22ms +[2025-07-17 20:16:49] [Rank 0] step:7401/10000 train_time:1763246ms step_avg:238.24ms +[2025-07-17 20:16:49] [Rank 0] step:7401/10000 train_time:1763246ms step_avg:238.24ms +[2025-07-17 20:16:54] [Rank 0] step:7421/10000 train_time:1768217ms step_avg:238.27ms +[2025-07-17 20:16:54] [Rank 0] step:7421/10000 train_time:1768217ms step_avg:238.27ms +[2025-07-17 20:16:59] [Rank 0] step:7441/10000 train_time:1773204ms step_avg:238.30ms +[2025-07-17 20:16:59] [Rank 0] step:7441/10000 train_time:1773204ms step_avg:238.30ms +[2025-07-17 20:17:04] [Rank 0] step:7461/10000 train_time:1778174ms step_avg:238.33ms +[2025-07-17 20:17:04] [Rank 0] step:7461/10000 train_time:1778174ms step_avg:238.33ms +[2025-07-17 20:17:09] [Rank 0] step:7481/10000 train_time:1783158ms step_avg:238.36ms +[2025-07-17 20:17:09] [Rank 0] step:7481/10000 train_time:1783158ms step_avg:238.36ms +[2025-07-17 20:17:18] [Rank 0] PRINT: step:7500/10000 val_loss:3.7318 train_time:1788152ms step_avg:238.42ms +[2025-07-17 20:17:18] [Rank 0] PRINT: step:7500/10000 val_loss:3.7318 train_time:1788152ms step_avg:238.42ms +[2025-07-17 20:17:18] [Rank 0] step:7501/10000 train_time:1788168ms step_avg:238.39ms +[2025-07-17 20:17:18] [Rank 0] step:7501/10000 train_time:1788168ms step_avg:238.39ms +[2025-07-17 20:17:23] [Rank 0] step:7521/10000 train_time:1793134ms step_avg:238.42ms +[2025-07-17 20:17:23] [Rank 0] step:7521/10000 train_time:1793134ms step_avg:238.42ms +[2025-07-17 20:17:28] [Rank 0] step:7541/10000 train_time:1798114ms step_avg:238.45ms +[2025-07-17 20:17:28] [Rank 0] step:7541/10000 train_time:1798114ms step_avg:238.45ms +[2025-07-17 20:17:33] [Rank 0] step:7561/10000 train_time:1803096ms step_avg:238.47ms +[2025-07-17 20:17:33] [Rank 0] step:7561/10000 train_time:1803096ms step_avg:238.47ms +[2025-07-17 20:17:38] [Rank 0] step:7581/10000 train_time:1808083ms step_avg:238.50ms +[2025-07-17 20:17:38] [Rank 0] step:7581/10000 train_time:1808083ms step_avg:238.50ms +[2025-07-17 20:17:43] [Rank 0] step:7601/10000 train_time:1813080ms step_avg:238.53ms +[2025-07-17 20:17:43] [Rank 0] step:7601/10000 train_time:1813080ms step_avg:238.53ms +[2025-07-17 20:17:49] [Rank 0] step:7621/10000 train_time:1818085ms step_avg:238.56ms +[2025-07-17 20:17:49] [Rank 0] step:7621/10000 train_time:1818085ms step_avg:238.56ms +[2025-07-17 20:17:54] [Rank 0] PRINT: step:7625/10000 val_loss:3.6757 train_time:1819841ms step_avg:238.67ms +[2025-07-17 20:17:54] [Rank 0] PRINT: step:7625/10000 val_loss:3.6757 train_time:1819841ms step_avg:238.67ms +[2025-07-17 20:17:58] [Rank 0] step:7641/10000 train_time:1823572ms step_avg:238.66ms +[2025-07-17 20:17:58] [Rank 0] step:7641/10000 train_time:1823572ms step_avg:238.66ms +[2025-07-17 20:18:03] [Rank 0] step:7661/10000 train_time:1828567ms step_avg:238.69ms +[2025-07-17 20:18:03] [Rank 0] step:7661/10000 train_time:1828567ms step_avg:238.69ms +[2025-07-17 20:18:08] [Rank 0] step:7681/10000 train_time:1833575ms step_avg:238.72ms +[2025-07-17 20:18:08] [Rank 0] step:7681/10000 train_time:1833575ms step_avg:238.72ms +[2025-07-17 20:18:13] [Rank 0] step:7701/10000 train_time:1838567ms step_avg:238.74ms +[2025-07-17 20:18:13] [Rank 0] step:7701/10000 train_time:1838567ms step_avg:238.74ms +[2025-07-17 20:18:18] [Rank 0] step:7721/10000 train_time:1843558ms step_avg:238.77ms +[2025-07-17 20:18:18] [Rank 0] step:7721/10000 train_time:1843558ms step_avg:238.77ms +[2025-07-17 20:18:23] [Rank 0] step:7741/10000 train_time:1848549ms step_avg:238.80ms +[2025-07-17 20:18:23] [Rank 0] step:7741/10000 train_time:1848549ms step_avg:238.80ms +[2025-07-17 20:18:30] [Rank 0] PRINT: step:7750/10000 val_loss:3.6742 train_time:1851060ms step_avg:238.85ms +[2025-07-17 20:18:30] [Rank 0] PRINT: step:7750/10000 val_loss:3.6742 train_time:1851060ms step_avg:238.85ms +[2025-07-17 20:18:33] [Rank 0] step:7761/10000 train_time:1853552ms step_avg:238.83ms +[2025-07-17 20:18:33] [Rank 0] step:7761/10000 train_time:1853552ms step_avg:238.83ms +[2025-07-17 20:18:38] [Rank 0] step:7781/10000 train_time:1858544ms step_avg:238.86ms +[2025-07-17 20:18:38] [Rank 0] step:7781/10000 train_time:1858544ms step_avg:238.86ms +[2025-07-17 20:18:43] [Rank 0] step:7801/10000 train_time:1863543ms step_avg:238.89ms +[2025-07-17 20:18:43] [Rank 0] step:7801/10000 train_time:1863543ms step_avg:238.89ms +[2025-07-17 20:18:48] [Rank 0] step:7821/10000 train_time:1868535ms step_avg:238.91ms +[2025-07-17 20:18:48] [Rank 0] step:7821/10000 train_time:1868535ms step_avg:238.91ms +[2025-07-17 20:18:53] [Rank 0] step:7841/10000 train_time:1873527ms step_avg:238.94ms +[2025-07-17 20:18:53] [Rank 0] step:7841/10000 train_time:1873527ms step_avg:238.94ms +[2025-07-17 20:18:58] [Rank 0] step:7861/10000 train_time:1878511ms step_avg:238.97ms +[2025-07-17 20:18:58] [Rank 0] step:7861/10000 train_time:1878511ms step_avg:238.97ms +[2025-07-17 20:19:06] [Rank 0] PRINT: step:7875/10000 val_loss:3.6762 train_time:1882253ms step_avg:239.02ms +[2025-07-17 20:19:06] [Rank 0] PRINT: step:7875/10000 val_loss:3.6762 train_time:1882253ms step_avg:239.02ms +[2025-07-17 20:19:08] [Rank 0] step:7881/10000 train_time:1883496ms step_avg:238.99ms +[2025-07-17 20:19:08] [Rank 0] step:7881/10000 train_time:1883496ms step_avg:238.99ms +[2025-07-17 20:19:13] [Rank 0] step:7901/10000 train_time:1888484ms step_avg:239.02ms +[2025-07-17 20:19:13] [Rank 0] step:7901/10000 train_time:1888484ms step_avg:239.02ms +[2025-07-17 20:19:18] [Rank 0] step:7921/10000 train_time:1893473ms step_avg:239.04ms +[2025-07-17 20:19:18] [Rank 0] step:7921/10000 train_time:1893473ms step_avg:239.04ms +[2025-07-17 20:19:23] [Rank 0] step:7941/10000 train_time:1898473ms step_avg:239.07ms +[2025-07-17 20:19:23] [Rank 0] step:7941/10000 train_time:1898473ms step_avg:239.07ms +[2025-07-17 20:19:28] [Rank 0] step:7961/10000 train_time:1903474ms step_avg:239.10ms +[2025-07-17 20:19:28] [Rank 0] step:7961/10000 train_time:1903474ms step_avg:239.10ms +[2025-07-17 20:19:33] [Rank 0] step:7981/10000 train_time:1908455ms step_avg:239.12ms +[2025-07-17 20:19:33] [Rank 0] step:7981/10000 train_time:1908455ms step_avg:239.12ms +[2025-07-17 20:19:42] [Rank 0] PRINT: step:8000/10000 val_loss:3.7033 train_time:1913460ms step_avg:239.18ms +[2025-07-17 20:19:42] [Rank 0] PRINT: step:8000/10000 val_loss:3.7033 train_time:1913460ms step_avg:239.18ms +[2025-07-17 20:19:43] [Rank 0] step:8001/10000 train_time:1913476ms step_avg:239.15ms +[2025-07-17 20:19:43] [Rank 0] step:8001/10000 train_time:1913476ms step_avg:239.15ms +[2025-07-17 20:19:48] [Rank 0] step:8021/10000 train_time:1918440ms step_avg:239.18ms +[2025-07-17 20:19:48] [Rank 0] step:8021/10000 train_time:1918440ms step_avg:239.18ms +[2025-07-17 20:19:53] [Rank 0] step:8041/10000 train_time:1923449ms step_avg:239.21ms +[2025-07-17 20:19:53] [Rank 0] step:8041/10000 train_time:1923449ms step_avg:239.21ms +[2025-07-17 20:19:58] [Rank 0] step:8061/10000 train_time:1928428ms step_avg:239.23ms +[2025-07-17 20:19:58] [Rank 0] step:8061/10000 train_time:1928428ms step_avg:239.23ms +[2025-07-17 20:20:03] [Rank 0] step:8081/10000 train_time:1933421ms step_avg:239.26ms +[2025-07-17 20:20:03] [Rank 0] step:8081/10000 train_time:1933421ms step_avg:239.26ms +[2025-07-17 20:20:08] [Rank 0] step:8101/10000 train_time:1938403ms step_avg:239.28ms +[2025-07-17 20:20:08] [Rank 0] step:8101/10000 train_time:1938403ms step_avg:239.28ms +[2025-07-17 20:20:13] [Rank 0] step:8121/10000 train_time:1943392ms step_avg:239.30ms +[2025-07-17 20:20:13] [Rank 0] step:8121/10000 train_time:1943392ms step_avg:239.30ms +[2025-07-17 20:20:18] [Rank 0] PRINT: step:8125/10000 val_loss:3.6939 train_time:1944645ms step_avg:239.34ms +[2025-07-17 20:20:18] [Rank 0] PRINT: step:8125/10000 val_loss:3.6939 train_time:1944645ms step_avg:239.34ms +[2025-07-17 20:20:23] [Rank 0] step:8141/10000 train_time:1948870ms step_avg:239.39ms +[2025-07-17 20:20:23] [Rank 0] step:8141/10000 train_time:1948870ms step_avg:239.39ms +[2025-07-17 20:20:28] [Rank 0] step:8161/10000 train_time:1953896ms step_avg:239.42ms +[2025-07-17 20:20:28] [Rank 0] step:8161/10000 train_time:1953896ms step_avg:239.42ms +[2025-07-17 20:20:33] [Rank 0] step:8181/10000 train_time:1958959ms step_avg:239.45ms +[2025-07-17 20:20:33] [Rank 0] step:8181/10000 train_time:1958959ms step_avg:239.45ms +[2025-07-17 20:20:38] [Rank 0] step:8201/10000 train_time:1963995ms step_avg:239.48ms +[2025-07-17 20:20:38] [Rank 0] step:8201/10000 train_time:1963995ms step_avg:239.48ms +[2025-07-17 20:20:43] [Rank 0] step:8221/10000 train_time:1969046ms step_avg:239.51ms +[2025-07-17 20:20:43] [Rank 0] step:8221/10000 train_time:1969046ms step_avg:239.51ms +[2025-07-17 20:20:48] [Rank 0] step:8241/10000 train_time:1974096ms step_avg:239.55ms +[2025-07-17 20:20:48] [Rank 0] step:8241/10000 train_time:1974096ms step_avg:239.55ms +[2025-07-17 20:20:55] [Rank 0] PRINT: step:8250/10000 val_loss:3.7658 train_time:1976629ms step_avg:239.59ms +[2025-07-17 20:20:55] [Rank 0] PRINT: step:8250/10000 val_loss:3.7658 train_time:1976629ms step_avg:239.59ms +[2025-07-17 20:20:58] [Rank 0] step:8261/10000 train_time:1979148ms step_avg:239.58ms +[2025-07-17 20:20:58] [Rank 0] step:8261/10000 train_time:1979148ms step_avg:239.58ms +[2025-07-17 20:21:03] [Rank 0] step:8281/10000 train_time:1984220ms step_avg:239.61ms +[2025-07-17 20:21:03] [Rank 0] step:8281/10000 train_time:1984220ms step_avg:239.61ms +[2025-07-17 20:21:08] [Rank 0] step:8301/10000 train_time:1989258ms step_avg:239.64ms +[2025-07-17 20:21:08] [Rank 0] step:8301/10000 train_time:1989258ms step_avg:239.64ms +[2025-07-17 20:21:13] [Rank 0] step:8321/10000 train_time:1994311ms step_avg:239.67ms +[2025-07-17 20:21:13] [Rank 0] step:8321/10000 train_time:1994311ms step_avg:239.67ms +[2025-07-17 20:21:18] [Rank 0] step:8341/10000 train_time:1999373ms step_avg:239.70ms +[2025-07-17 20:21:18] [Rank 0] step:8341/10000 train_time:1999373ms step_avg:239.70ms +[2025-07-17 20:21:23] [Rank 0] step:8361/10000 train_time:2004409ms step_avg:239.73ms +[2025-07-17 20:21:23] [Rank 0] step:8361/10000 train_time:2004409ms step_avg:239.73ms +[2025-07-17 20:21:31] [Rank 0] PRINT: step:8375/10000 val_loss:3.6485 train_time:2008197ms step_avg:239.78ms +[2025-07-17 20:21:31] [Rank 0] PRINT: step:8375/10000 val_loss:3.6485 train_time:2008197ms step_avg:239.78ms +[2025-07-17 20:21:33] [Rank 0] step:8381/10000 train_time:2009445ms step_avg:239.76ms +[2025-07-17 20:21:33] [Rank 0] step:8381/10000 train_time:2009445ms step_avg:239.76ms +[2025-07-17 20:21:38] [Rank 0] step:8401/10000 train_time:2014476ms step_avg:239.79ms +[2025-07-17 20:21:38] [Rank 0] step:8401/10000 train_time:2014476ms step_avg:239.79ms +[2025-07-17 20:21:43] [Rank 0] step:8421/10000 train_time:2019522ms step_avg:239.82ms +[2025-07-17 20:21:43] [Rank 0] step:8421/10000 train_time:2019522ms step_avg:239.82ms +[2025-07-17 20:21:48] [Rank 0] step:8441/10000 train_time:2024574ms step_avg:239.85ms +[2025-07-17 20:21:48] [Rank 0] step:8441/10000 train_time:2024574ms step_avg:239.85ms +[2025-07-17 20:21:53] [Rank 0] step:8461/10000 train_time:2029634ms step_avg:239.88ms +[2025-07-17 20:21:53] [Rank 0] step:8461/10000 train_time:2029634ms step_avg:239.88ms +[2025-07-17 20:21:58] [Rank 0] step:8481/10000 train_time:2034674ms step_avg:239.91ms +[2025-07-17 20:21:58] [Rank 0] step:8481/10000 train_time:2034674ms step_avg:239.91ms +[2025-07-17 20:22:08] [Rank 0] PRINT: step:8500/10000 val_loss:3.6060 train_time:2039736ms step_avg:239.97ms +[2025-07-17 20:22:08] [Rank 0] PRINT: step:8500/10000 val_loss:3.6060 train_time:2039736ms step_avg:239.97ms +[2025-07-17 20:22:08] [Rank 0] step:8501/10000 train_time:2039752ms step_avg:239.94ms +[2025-07-17 20:22:08] [Rank 0] step:8501/10000 train_time:2039752ms step_avg:239.94ms +[2025-07-17 20:22:13] [Rank 0] step:8521/10000 train_time:2044775ms step_avg:239.97ms +[2025-07-17 20:22:13] [Rank 0] step:8521/10000 train_time:2044775ms step_avg:239.97ms +[2025-07-17 20:22:18] [Rank 0] step:8541/10000 train_time:2049843ms step_avg:240.00ms +[2025-07-17 20:22:18] [Rank 0] step:8541/10000 train_time:2049843ms step_avg:240.00ms +[2025-07-17 20:22:23] [Rank 0] step:8561/10000 train_time:2054888ms step_avg:240.03ms +[2025-07-17 20:22:23] [Rank 0] step:8561/10000 train_time:2054888ms step_avg:240.03ms +[2025-07-17 20:22:28] [Rank 0] step:8581/10000 train_time:2059939ms step_avg:240.06ms +[2025-07-17 20:22:28] [Rank 0] step:8581/10000 train_time:2059939ms step_avg:240.06ms +[2025-07-17 20:22:33] [Rank 0] step:8601/10000 train_time:2064971ms step_avg:240.09ms +[2025-07-17 20:22:33] [Rank 0] step:8601/10000 train_time:2064971ms step_avg:240.09ms +[2025-07-17 20:22:38] [Rank 0] step:8621/10000 train_time:2070018ms step_avg:240.11ms +[2025-07-17 20:22:38] [Rank 0] step:8621/10000 train_time:2070018ms step_avg:240.11ms +[2025-07-17 20:22:44] [Rank 0] PRINT: step:8625/10000 val_loss:3.6214 train_time:2071286ms step_avg:240.15ms +[2025-07-17 20:22:44] [Rank 0] PRINT: step:8625/10000 val_loss:3.6214 train_time:2071286ms step_avg:240.15ms +[2025-07-17 20:22:49] [Rank 0] step:8641/10000 train_time:2075573ms step_avg:240.20ms +[2025-07-17 20:22:49] [Rank 0] step:8641/10000 train_time:2075573ms step_avg:240.20ms +[2025-07-17 20:22:54] [Rank 0] step:8661/10000 train_time:2080628ms step_avg:240.23ms +[2025-07-17 20:22:54] [Rank 0] step:8661/10000 train_time:2080628ms step_avg:240.23ms +[2025-07-17 20:22:59] [Rank 0] step:8681/10000 train_time:2085679ms step_avg:240.26ms +[2025-07-17 20:22:59] [Rank 0] step:8681/10000 train_time:2085679ms step_avg:240.26ms +[2025-07-17 20:23:04] [Rank 0] step:8701/10000 train_time:2090738ms step_avg:240.29ms +[2025-07-17 20:23:04] [Rank 0] step:8701/10000 train_time:2090738ms step_avg:240.29ms +[2025-07-17 20:23:09] [Rank 0] step:8721/10000 train_time:2095799ms step_avg:240.32ms +[2025-07-17 20:23:09] [Rank 0] step:8721/10000 train_time:2095799ms step_avg:240.32ms +[2025-07-17 20:23:14] [Rank 0] step:8741/10000 train_time:2100850ms step_avg:240.34ms +[2025-07-17 20:23:14] [Rank 0] step:8741/10000 train_time:2100850ms step_avg:240.34ms +[2025-07-17 20:23:21] [Rank 0] PRINT: step:8750/10000 val_loss:3.6374 train_time:2103378ms step_avg:240.39ms +[2025-07-17 20:23:21] [Rank 0] PRINT: step:8750/10000 val_loss:3.6374 train_time:2103378ms step_avg:240.39ms +[2025-07-17 20:23:23] [Rank 0] step:8761/10000 train_time:2105904ms step_avg:240.37ms +[2025-07-17 20:23:23] [Rank 0] step:8761/10000 train_time:2105904ms step_avg:240.37ms +[2025-07-17 20:23:28] [Rank 0] step:8781/10000 train_time:2110956ms step_avg:240.40ms +[2025-07-17 20:23:28] [Rank 0] step:8781/10000 train_time:2110956ms step_avg:240.40ms +[2025-07-17 20:23:33] [Rank 0] step:8801/10000 train_time:2116019ms step_avg:240.43ms +[2025-07-17 20:23:33] [Rank 0] step:8801/10000 train_time:2116019ms step_avg:240.43ms +[2025-07-17 20:23:38] [Rank 0] step:8821/10000 train_time:2121070ms step_avg:240.46ms +[2025-07-17 20:23:38] [Rank 0] step:8821/10000 train_time:2121070ms step_avg:240.46ms +[2025-07-17 20:23:44] [Rank 0] step:8841/10000 train_time:2126139ms step_avg:240.49ms +[2025-07-17 20:23:44] [Rank 0] step:8841/10000 train_time:2126139ms step_avg:240.49ms +[2025-07-17 20:23:49] [Rank 0] step:8861/10000 train_time:2131208ms step_avg:240.52ms +[2025-07-17 20:23:49] [Rank 0] step:8861/10000 train_time:2131208ms step_avg:240.52ms +[2025-07-17 20:23:57] [Rank 0] PRINT: step:8875/10000 val_loss:3.6361 train_time:2134988ms step_avg:240.56ms +[2025-07-17 20:23:57] [Rank 0] PRINT: step:8875/10000 val_loss:3.6361 train_time:2134988ms step_avg:240.56ms +[2025-07-17 20:23:58] [Rank 0] step:8881/10000 train_time:2136241ms step_avg:240.54ms +[2025-07-17 20:23:58] [Rank 0] step:8881/10000 train_time:2136241ms step_avg:240.54ms +[2025-07-17 20:24:03] [Rank 0] step:8901/10000 train_time:2141287ms step_avg:240.57ms +[2025-07-17 20:24:03] [Rank 0] step:8901/10000 train_time:2141287ms step_avg:240.57ms +[2025-07-17 20:24:09] [Rank 0] step:8921/10000 train_time:2146335ms step_avg:240.59ms +[2025-07-17 20:24:09] [Rank 0] step:8921/10000 train_time:2146335ms step_avg:240.59ms +[2025-07-17 20:24:14] [Rank 0] step:8941/10000 train_time:2151390ms step_avg:240.62ms +[2025-07-17 20:24:14] [Rank 0] step:8941/10000 train_time:2151390ms step_avg:240.62ms +[2025-07-17 20:24:19] [Rank 0] step:8961/10000 train_time:2156450ms step_avg:240.65ms +[2025-07-17 20:24:19] [Rank 0] step:8961/10000 train_time:2156450ms step_avg:240.65ms +[2025-07-17 20:24:24] [Rank 0] step:8981/10000 train_time:2161508ms step_avg:240.68ms +[2025-07-17 20:24:24] [Rank 0] step:8981/10000 train_time:2161508ms step_avg:240.68ms +[2025-07-17 20:24:33] [Rank 0] PRINT: step:9000/10000 val_loss:3.5588 train_time:2166569ms step_avg:240.73ms +[2025-07-17 20:24:33] [Rank 0] PRINT: step:9000/10000 val_loss:3.5588 train_time:2166569ms step_avg:240.73ms +[2025-07-17 20:24:34] [Rank 0] step:9001/10000 train_time:2166584ms step_avg:240.70ms +[2025-07-17 20:24:34] [Rank 0] step:9001/10000 train_time:2166584ms step_avg:240.70ms +[2025-07-17 20:24:39] [Rank 0] step:9021/10000 train_time:2171610ms step_avg:240.73ms +[2025-07-17 20:24:39] [Rank 0] step:9021/10000 train_time:2171610ms step_avg:240.73ms +[2025-07-17 20:24:44] [Rank 0] step:9041/10000 train_time:2176685ms step_avg:240.76ms +[2025-07-17 20:24:44] [Rank 0] step:9041/10000 train_time:2176685ms step_avg:240.76ms +[2025-07-17 20:24:49] [Rank 0] step:9061/10000 train_time:2181745ms step_avg:240.78ms +[2025-07-17 20:24:49] [Rank 0] step:9061/10000 train_time:2181745ms step_avg:240.78ms +[2025-07-17 20:24:54] [Rank 0] step:9081/10000 train_time:2186828ms step_avg:240.81ms +[2025-07-17 20:24:54] [Rank 0] step:9081/10000 train_time:2186828ms step_avg:240.81ms +[2025-07-17 20:24:59] [Rank 0] step:9101/10000 train_time:2191898ms step_avg:240.84ms +[2025-07-17 20:24:59] [Rank 0] step:9101/10000 train_time:2191898ms step_avg:240.84ms +[2025-07-17 20:25:04] [Rank 0] step:9121/10000 train_time:2196973ms step_avg:240.87ms +[2025-07-17 20:25:04] [Rank 0] step:9121/10000 train_time:2196973ms step_avg:240.87ms +[2025-07-17 20:25:10] [Rank 0] PRINT: step:9125/10000 val_loss:3.6342 train_time:2198238ms step_avg:240.90ms +[2025-07-17 20:25:10] [Rank 0] PRINT: step:9125/10000 val_loss:3.6342 train_time:2198238ms step_avg:240.90ms +[2025-07-17 20:25:14] [Rank 0] step:9141/10000 train_time:2202018ms step_avg:240.89ms +[2025-07-17 20:25:14] [Rank 0] step:9141/10000 train_time:2202018ms step_avg:240.89ms +[2025-07-17 20:25:19] [Rank 0] step:9161/10000 train_time:2207593ms step_avg:240.98ms +[2025-07-17 20:25:19] [Rank 0] step:9161/10000 train_time:2207593ms step_avg:240.98ms +[2025-07-17 20:25:24] [Rank 0] step:9181/10000 train_time:2212650ms step_avg:241.00ms +[2025-07-17 20:25:24] [Rank 0] step:9181/10000 train_time:2212650ms step_avg:241.00ms +[2025-07-17 20:25:29] [Rank 0] step:9201/10000 train_time:2217708ms step_avg:241.03ms +[2025-07-17 20:25:29] [Rank 0] step:9201/10000 train_time:2217708ms step_avg:241.03ms +[2025-07-17 20:25:35] [Rank 0] step:9221/10000 train_time:2222797ms step_avg:241.06ms +[2025-07-17 20:25:35] [Rank 0] step:9221/10000 train_time:2222797ms step_avg:241.06ms +[2025-07-17 20:25:40] [Rank 0] step:9241/10000 train_time:2227862ms step_avg:241.08ms +[2025-07-17 20:25:40] [Rank 0] step:9241/10000 train_time:2227862ms step_avg:241.08ms +[2025-07-17 20:25:46] [Rank 0] PRINT: step:9250/10000 val_loss:3.5834 train_time:2230399ms step_avg:241.12ms +[2025-07-17 20:25:46] [Rank 0] PRINT: step:9250/10000 val_loss:3.5834 train_time:2230399ms step_avg:241.12ms +[2025-07-17 20:25:49] [Rank 0] step:9261/10000 train_time:2232927ms step_avg:241.11ms +[2025-07-17 20:25:49] [Rank 0] step:9261/10000 train_time:2232927ms step_avg:241.11ms +[2025-07-17 20:25:54] [Rank 0] step:9281/10000 train_time:2237969ms step_avg:241.13ms +[2025-07-17 20:25:54] [Rank 0] step:9281/10000 train_time:2237969ms step_avg:241.13ms +[2025-07-17 20:25:59] [Rank 0] step:9301/10000 train_time:2243036ms step_avg:241.16ms +[2025-07-17 20:25:59] [Rank 0] step:9301/10000 train_time:2243036ms step_avg:241.16ms +[2025-07-17 20:26:04] [Rank 0] step:9321/10000 train_time:2248114ms step_avg:241.19ms +[2025-07-17 20:26:04] [Rank 0] step:9321/10000 train_time:2248114ms step_avg:241.19ms +[2025-07-17 20:26:09] [Rank 0] step:9341/10000 train_time:2253175ms step_avg:241.21ms +[2025-07-17 20:26:09] [Rank 0] step:9341/10000 train_time:2253175ms step_avg:241.21ms +[2025-07-17 20:26:14] [Rank 0] step:9361/10000 train_time:2258238ms step_avg:241.24ms +[2025-07-17 20:26:14] [Rank 0] step:9361/10000 train_time:2258238ms step_avg:241.24ms +[2025-07-17 20:26:23] [Rank 0] PRINT: step:9375/10000 val_loss:3.6202 train_time:2262038ms step_avg:241.28ms +[2025-07-17 20:26:23] [Rank 0] PRINT: step:9375/10000 val_loss:3.6202 train_time:2262038ms step_avg:241.28ms +[2025-07-17 20:26:24] [Rank 0] step:9381/10000 train_time:2263296ms step_avg:241.26ms +[2025-07-17 20:26:24] [Rank 0] step:9381/10000 train_time:2263296ms step_avg:241.26ms +[2025-07-17 20:26:29] [Rank 0] step:9401/10000 train_time:2268343ms step_avg:241.29ms +[2025-07-17 20:26:29] [Rank 0] step:9401/10000 train_time:2268343ms step_avg:241.29ms +[2025-07-17 20:26:34] [Rank 0] step:9421/10000 train_time:2273405ms step_avg:241.31ms +[2025-07-17 20:26:34] [Rank 0] step:9421/10000 train_time:2273405ms step_avg:241.31ms +[2025-07-17 20:26:39] [Rank 0] step:9441/10000 train_time:2278472ms step_avg:241.34ms +[2025-07-17 20:26:39] [Rank 0] step:9441/10000 train_time:2278472ms step_avg:241.34ms +[2025-07-17 20:26:44] [Rank 0] step:9461/10000 train_time:2283552ms step_avg:241.36ms +[2025-07-17 20:26:44] [Rank 0] step:9461/10000 train_time:2283552ms step_avg:241.36ms +[2025-07-17 20:26:49] [Rank 0] step:9481/10000 train_time:2288619ms step_avg:241.39ms +[2025-07-17 20:26:49] [Rank 0] step:9481/10000 train_time:2288619ms step_avg:241.39ms +[2025-07-17 20:26:59] [Rank 0] PRINT: step:9500/10000 val_loss:3.5754 train_time:2293713ms step_avg:241.44ms +[2025-07-17 20:26:59] [Rank 0] PRINT: step:9500/10000 val_loss:3.5754 train_time:2293713ms step_avg:241.44ms +[2025-07-17 20:26:59] [Rank 0] step:9501/10000 train_time:2293728ms step_avg:241.42ms +[2025-07-17 20:26:59] [Rank 0] step:9501/10000 train_time:2293728ms step_avg:241.42ms +[2025-07-17 20:27:04] [Rank 0] step:9521/10000 train_time:2298767ms step_avg:241.44ms +[2025-07-17 20:27:04] [Rank 0] step:9521/10000 train_time:2298767ms step_avg:241.44ms +[2025-07-17 20:27:10] [Rank 0] step:9541/10000 train_time:2303830ms step_avg:241.47ms +[2025-07-17 20:27:10] [Rank 0] step:9541/10000 train_time:2303830ms step_avg:241.47ms +[2025-07-17 20:27:15] [Rank 0] step:9561/10000 train_time:2308888ms step_avg:241.49ms +[2025-07-17 20:27:15] [Rank 0] step:9561/10000 train_time:2308888ms step_avg:241.49ms +[2025-07-17 20:27:20] [Rank 0] step:9581/10000 train_time:2313934ms step_avg:241.51ms +[2025-07-17 20:27:20] [Rank 0] step:9581/10000 train_time:2313934ms step_avg:241.51ms +[2025-07-17 20:27:25] [Rank 0] step:9601/10000 train_time:2318981ms step_avg:241.54ms +[2025-07-17 20:27:25] [Rank 0] step:9601/10000 train_time:2318981ms step_avg:241.54ms +[2025-07-17 20:27:30] [Rank 0] step:9621/10000 train_time:2324063ms step_avg:241.56ms +[2025-07-17 20:27:30] [Rank 0] step:9621/10000 train_time:2324063ms step_avg:241.56ms +[2025-07-17 20:27:35] [Rank 0] PRINT: step:9625/10000 val_loss:3.5298 train_time:2325325ms step_avg:241.59ms +[2025-07-17 20:27:35] [Rank 0] PRINT: step:9625/10000 val_loss:3.5298 train_time:2325325ms step_avg:241.59ms +[2025-07-17 20:27:39] [Rank 0] step:9641/10000 train_time:2329139ms step_avg:241.59ms +[2025-07-17 20:27:39] [Rank 0] step:9641/10000 train_time:2329139ms step_avg:241.59ms +[2025-07-17 20:27:45] [Rank 0] step:9661/10000 train_time:2334761ms step_avg:241.67ms +[2025-07-17 20:27:45] [Rank 0] step:9661/10000 train_time:2334761ms step_avg:241.67ms +[2025-07-17 20:27:50] [Rank 0] step:9681/10000 train_time:2339883ms step_avg:241.70ms +[2025-07-17 20:27:50] [Rank 0] step:9681/10000 train_time:2339883ms step_avg:241.70ms +[2025-07-17 20:27:55] [Rank 0] step:9701/10000 train_time:2345006ms step_avg:241.73ms +[2025-07-17 20:27:55] [Rank 0] step:9701/10000 train_time:2345006ms step_avg:241.73ms +[2025-07-17 20:28:00] [Rank 0] step:9721/10000 train_time:2350111ms step_avg:241.76ms +[2025-07-17 20:28:00] [Rank 0] step:9721/10000 train_time:2350111ms step_avg:241.76ms +[2025-07-17 20:28:05] [Rank 0] step:9741/10000 train_time:2355234ms step_avg:241.79ms +[2025-07-17 20:28:05] [Rank 0] step:9741/10000 train_time:2355234ms step_avg:241.79ms +[2025-07-17 20:28:12] [Rank 0] PRINT: step:9750/10000 val_loss:3.5737 train_time:2357789ms step_avg:241.82ms +[2025-07-17 20:28:12] [Rank 0] PRINT: step:9750/10000 val_loss:3.5737 train_time:2357789ms step_avg:241.82ms +[2025-07-17 20:28:15] [Rank 0] step:9761/10000 train_time:2360333ms step_avg:241.81ms +[2025-07-17 20:28:15] [Rank 0] step:9761/10000 train_time:2360333ms step_avg:241.81ms +[2025-07-17 20:28:20] [Rank 0] step:9781/10000 train_time:2365444ms step_avg:241.84ms +[2025-07-17 20:28:20] [Rank 0] step:9781/10000 train_time:2365444ms step_avg:241.84ms +[2025-07-17 20:28:25] [Rank 0] step:9801/10000 train_time:2370544ms step_avg:241.87ms +[2025-07-17 20:28:25] [Rank 0] step:9801/10000 train_time:2370544ms step_avg:241.87ms +[2025-07-17 20:28:31] [Rank 0] step:9821/10000 train_time:2375653ms step_avg:241.90ms +[2025-07-17 20:28:31] [Rank 0] step:9821/10000 train_time:2375653ms step_avg:241.90ms +[2025-07-17 20:28:36] [Rank 0] step:9841/10000 train_time:2380751ms step_avg:241.92ms +[2025-07-17 20:28:36] [Rank 0] step:9841/10000 train_time:2380751ms step_avg:241.92ms +[2025-07-17 20:28:41] [Rank 0] step:9861/10000 train_time:2385844ms step_avg:241.95ms +[2025-07-17 20:28:41] [Rank 0] step:9861/10000 train_time:2385844ms step_avg:241.95ms +[2025-07-17 20:28:49] [Rank 0] PRINT: step:9875/10000 val_loss:3.5444 train_time:2389672ms step_avg:241.99ms +[2025-07-17 20:28:49] [Rank 0] PRINT: step:9875/10000 val_loss:3.5444 train_time:2389672ms step_avg:241.99ms +[2025-07-17 20:28:51] [Rank 0] step:9881/10000 train_time:2390947ms step_avg:241.97ms +[2025-07-17 20:28:51] [Rank 0] step:9881/10000 train_time:2390947ms step_avg:241.97ms +[2025-07-17 20:28:56] [Rank 0] step:9901/10000 train_time:2396046ms step_avg:242.00ms +[2025-07-17 20:28:56] [Rank 0] step:9901/10000 train_time:2396046ms step_avg:242.00ms +[2025-07-17 20:29:01] [Rank 0] step:9921/10000 train_time:2401158ms step_avg:242.03ms +[2025-07-17 20:29:01] [Rank 0] step:9921/10000 train_time:2401158ms step_avg:242.03ms +[2025-07-17 20:29:06] [Rank 0] step:9941/10000 train_time:2406299ms step_avg:242.06ms +[2025-07-17 20:29:06] [Rank 0] step:9941/10000 train_time:2406299ms step_avg:242.06ms +[2025-07-17 20:29:11] [Rank 0] step:9961/10000 train_time:2411423ms step_avg:242.09ms +[2025-07-17 20:29:11] [Rank 0] step:9961/10000 train_time:2411423ms step_avg:242.09ms +[2025-07-17 20:29:16] [Rank 0] step:9981/10000 train_time:2416562ms step_avg:242.12ms +[2025-07-17 20:29:16] [Rank 0] step:9981/10000 train_time:2416562ms step_avg:242.12ms +[2025-07-17 20:29:21] [Rank 0] step:10000/10000 train_time:2421394ms step_avg:242.14ms +[2025-07-17 20:29:21] [Rank 0] step:10000/10000 train_time:2421394ms step_avg:242.14ms +[2025-07-17 20:29:25] [Rank 0] PRINT: step:10000/10000 val_loss:3.5512 train_time:2421657ms step_avg:242.17ms +[2025-07-17 20:29:25] [Rank 0] PRINT: step:10000/10000 val_loss:3.5512 train_time:2421657ms step_avg:242.17ms +[2025-07-17 20:29:25] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 20:29:25 2025 --- +[2025-07-17 20:29:25] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 20:29:25 2025 --- +[2025-07-17 20:29:25] [Rank 0] PRINT: Peak memory allocated: 31029 MiB reserved: 31336 MiB +[2025-07-17 20:29:25] [Rank 0] PRINT: Peak memory allocated: 31029 MiB reserved: 31336 MiB diff --git a/logs_norope/diff_modes/mode_4_param_norope_seed_42/config.json b/logs_norope/diff_modes/mode_4_param_norope_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f15bfcac2fda2c491177d7e0215bcb5fd6674d66 --- /dev/null +++ b/logs_norope/diff_modes/mode_4_param_norope_seed_42/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 4, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "00dd603a-2cd7-4812-a3a4-32e81f8dc072", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_4_param_norope_seed_42/training_log_00dd603a-2cd7-4812-a3a4-32e81f8dc072.txt b/logs_norope/diff_modes/mode_4_param_norope_seed_42/training_log_00dd603a-2cd7-4812-a3a4-32e81f8dc072.txt new file mode 100644 index 0000000000000000000000000000000000000000..9c1f7026bd8478381416a8a096285f2b63784d77 --- /dev/null +++ b/logs_norope/diff_modes/mode_4_param_norope_seed_42/training_log_00dd603a-2cd7-4812-a3a4-32e81f8dc072.txt @@ -0,0 +1,2360 @@ +[2025-07-17 13:17:42] [Rank 0] PRINT: --- Script Start: Thu Jul 17 13:17:42 2025 --- +[2025-07-17 13:17:42] [Rank 0] PRINT: --- Script Start: Thu Jul 17 13:17:42 2025 --- +[2025-07-17 13:17:42] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=4, model_parameterization='norope') +[2025-07-17 13:17:42] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=4, model_parameterization='norope') +[2025-07-17 13:17:42] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 13:17:42] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 13:17:42] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 13:17:42] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 13:17:42] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_4_param_norope_seed_42 +[2025-07-17 13:17:42] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_4_param_norope_seed_42 +[2025-07-17 13:17:42] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 13:17:42] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 13:17:42] [Rank 0] PRINT: Constructing model... +[2025-07-17 13:17:42] [Rank 0] PRINT: Constructing model... +[2025-07-17 13:17:45] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 13:17:45] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 13:17:45] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 13:17:45] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 13:17:45] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 13:17:45] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 13:17:45] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 13:17:45] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 13:17:45] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 4 +[2025-07-17 13:17:45] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 4 +[2025-07-17 13:17:45] [Rank 0] PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: 0.001). +[2025-07-17 13:17:45] [Rank 0] PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: 0.001). +[2025-07-17 13:17:45] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 13:17:45] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 13:17:45] [Rank 0] PRINT: Muon optimizer is active with 24 parameters. +[2025-07-17 13:17:45] [Rank 0] PRINT: Muon optimizer is active with 24 parameters. +[2025-07-17 13:17:45] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 13:17:45] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 13:17:45] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 13:17:45] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 13:17:45] [Rank 0] PRINT: Starting warmup... +[2025-07-17 13:17:45] [Rank 0] PRINT: Starting warmup... +[2025-07-17 13:18:57] [Rank 0] PRINT: Warmup complete. +[2025-07-17 13:18:57] [Rank 0] PRINT: Warmup complete. +[2025-07-17 13:18:58] [Rank 0] PRINT: Starting training... +[2025-07-17 13:18:58] [Rank 0] PRINT: Starting training... +[2025-07-17 13:19:08] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 13:19:08] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 13:19:13] [Rank 0] step:21/10000 train_time:4559ms step_avg:217.10ms +[2025-07-17 13:19:13] [Rank 0] step:21/10000 train_time:4559ms step_avg:217.10ms +[2025-07-17 13:19:17] [Rank 0] step:41/10000 train_time:9056ms step_avg:220.87ms +[2025-07-17 13:19:17] [Rank 0] step:41/10000 train_time:9056ms step_avg:220.87ms +[2025-07-17 13:19:22] [Rank 0] step:61/10000 train_time:13553ms step_avg:222.17ms +[2025-07-17 13:19:22] [Rank 0] step:61/10000 train_time:13553ms step_avg:222.17ms +[2025-07-17 13:19:26] [Rank 0] step:81/10000 train_time:18057ms step_avg:222.93ms +[2025-07-17 13:19:26] [Rank 0] step:81/10000 train_time:18057ms step_avg:222.93ms +[2025-07-17 13:19:31] [Rank 0] step:101/10000 train_time:22566ms step_avg:223.42ms +[2025-07-17 13:19:31] [Rank 0] step:101/10000 train_time:22566ms step_avg:223.42ms +[2025-07-17 13:19:35] [Rank 0] step:121/10000 train_time:27078ms step_avg:223.78ms +[2025-07-17 13:19:35] [Rank 0] step:121/10000 train_time:27078ms step_avg:223.78ms +[2025-07-17 13:19:41] [Rank 0] PRINT: step:125/10000 val_loss:5.5209 train_time:28436ms step_avg:227.49ms +[2025-07-17 13:19:41] [Rank 0] PRINT: step:125/10000 val_loss:5.5209 train_time:28436ms step_avg:227.49ms +[2025-07-17 13:19:44] [Rank 0] step:141/10000 train_time:31589ms step_avg:224.04ms +[2025-07-17 13:19:44] [Rank 0] step:141/10000 train_time:31589ms step_avg:224.04ms +[2025-07-17 13:19:49] [Rank 0] step:161/10000 train_time:36103ms step_avg:224.24ms +[2025-07-17 13:19:49] [Rank 0] step:161/10000 train_time:36103ms step_avg:224.24ms +[2025-07-17 13:19:53] [Rank 0] step:181/10000 train_time:40616ms step_avg:224.40ms +[2025-07-17 13:19:53] [Rank 0] step:181/10000 train_time:40616ms step_avg:224.40ms +[2025-07-17 13:19:58] [Rank 0] step:201/10000 train_time:45130ms step_avg:224.53ms +[2025-07-17 13:19:58] [Rank 0] step:201/10000 train_time:45130ms step_avg:224.53ms +[2025-07-17 13:20:02] [Rank 0] step:221/10000 train_time:49646ms step_avg:224.64ms +[2025-07-17 13:20:02] [Rank 0] step:221/10000 train_time:49646ms step_avg:224.64ms +[2025-07-17 13:20:07] [Rank 0] step:241/10000 train_time:54166ms step_avg:224.76ms +[2025-07-17 13:20:07] [Rank 0] step:241/10000 train_time:54166ms step_avg:224.76ms +[2025-07-17 13:20:13] [Rank 0] PRINT: step:250/10000 val_loss:5.1390 train_time:56655ms step_avg:226.62ms +[2025-07-17 13:20:13] [Rank 0] PRINT: step:250/10000 val_loss:5.1390 train_time:56655ms step_avg:226.62ms +[2025-07-17 13:20:16] [Rank 0] step:261/10000 train_time:58681ms step_avg:224.83ms +[2025-07-17 13:20:16] [Rank 0] step:261/10000 train_time:58681ms step_avg:224.83ms +[2025-07-17 13:20:20] [Rank 0] step:281/10000 train_time:63196ms step_avg:224.90ms +[2025-07-17 13:20:20] [Rank 0] step:281/10000 train_time:63196ms step_avg:224.90ms +[2025-07-17 13:20:25] [Rank 0] step:301/10000 train_time:67711ms step_avg:224.95ms +[2025-07-17 13:20:25] [Rank 0] step:301/10000 train_time:67711ms step_avg:224.95ms +[2025-07-17 13:20:29] [Rank 0] step:321/10000 train_time:72227ms step_avg:225.01ms +[2025-07-17 13:20:29] [Rank 0] step:321/10000 train_time:72227ms step_avg:225.01ms +[2025-07-17 13:20:34] [Rank 0] step:341/10000 train_time:76738ms step_avg:225.04ms +[2025-07-17 13:20:34] [Rank 0] step:341/10000 train_time:76738ms step_avg:225.04ms +[2025-07-17 13:20:39] [Rank 0] step:361/10000 train_time:81253ms step_avg:225.08ms +[2025-07-17 13:20:39] [Rank 0] step:361/10000 train_time:81253ms step_avg:225.08ms +[2025-07-17 13:20:46] [Rank 0] PRINT: step:375/10000 val_loss:5.1128 train_time:84867ms step_avg:226.31ms +[2025-07-17 13:20:46] [Rank 0] PRINT: step:375/10000 val_loss:5.1128 train_time:84867ms step_avg:226.31ms +[2025-07-17 13:20:47] [Rank 0] step:381/10000 train_time:85765ms step_avg:225.11ms +[2025-07-17 13:20:47] [Rank 0] step:381/10000 train_time:85765ms step_avg:225.11ms +[2025-07-17 13:20:52] [Rank 0] step:401/10000 train_time:90276ms step_avg:225.13ms +[2025-07-17 13:20:52] [Rank 0] step:401/10000 train_time:90276ms step_avg:225.13ms +[2025-07-17 13:20:56] [Rank 0] step:421/10000 train_time:94785ms step_avg:225.14ms +[2025-07-17 13:20:56] [Rank 0] step:421/10000 train_time:94785ms step_avg:225.14ms +[2025-07-17 13:21:01] [Rank 0] step:441/10000 train_time:99296ms step_avg:225.16ms +[2025-07-17 13:21:01] [Rank 0] step:441/10000 train_time:99296ms step_avg:225.16ms +[2025-07-17 13:21:05] [Rank 0] step:461/10000 train_time:103811ms step_avg:225.19ms +[2025-07-17 13:21:05] [Rank 0] step:461/10000 train_time:103811ms step_avg:225.19ms +[2025-07-17 13:21:10] [Rank 0] step:481/10000 train_time:108324ms step_avg:225.21ms +[2025-07-17 13:21:10] [Rank 0] step:481/10000 train_time:108324ms step_avg:225.21ms +[2025-07-17 13:21:19] [Rank 0] PRINT: step:500/10000 val_loss:5.0798 train_time:113067ms step_avg:226.13ms +[2025-07-17 13:21:19] [Rank 0] PRINT: step:500/10000 val_loss:5.0798 train_time:113067ms step_avg:226.13ms +[2025-07-17 13:21:19] [Rank 0] step:501/10000 train_time:113080ms step_avg:225.71ms +[2025-07-17 13:21:19] [Rank 0] step:501/10000 train_time:113080ms step_avg:225.71ms +[2025-07-17 13:21:24] [Rank 0] step:521/10000 train_time:117647ms step_avg:225.81ms +[2025-07-17 13:21:24] [Rank 0] step:521/10000 train_time:117647ms step_avg:225.81ms +[2025-07-17 13:21:28] [Rank 0] step:541/10000 train_time:122165ms step_avg:225.81ms +[2025-07-17 13:21:28] [Rank 0] step:541/10000 train_time:122165ms step_avg:225.81ms +[2025-07-17 13:21:33] [Rank 0] step:561/10000 train_time:126681ms step_avg:225.81ms +[2025-07-17 13:21:33] [Rank 0] step:561/10000 train_time:126681ms step_avg:225.81ms +[2025-07-17 13:21:37] [Rank 0] step:581/10000 train_time:131198ms step_avg:225.81ms +[2025-07-17 13:21:37] [Rank 0] step:581/10000 train_time:131198ms step_avg:225.81ms +[2025-07-17 13:21:42] [Rank 0] step:601/10000 train_time:135717ms step_avg:225.82ms +[2025-07-17 13:21:42] [Rank 0] step:601/10000 train_time:135717ms step_avg:225.82ms +[2025-07-17 13:21:46] [Rank 0] step:621/10000 train_time:140233ms step_avg:225.82ms +[2025-07-17 13:21:46] [Rank 0] step:621/10000 train_time:140233ms step_avg:225.82ms +[2025-07-17 13:21:52] [Rank 0] PRINT: step:625/10000 val_loss:5.0523 train_time:141596ms step_avg:226.55ms +[2025-07-17 13:21:52] [Rank 0] PRINT: step:625/10000 val_loss:5.0523 train_time:141596ms step_avg:226.55ms +[2025-07-17 13:21:55] [Rank 0] step:641/10000 train_time:144752ms step_avg:225.82ms +[2025-07-17 13:21:55] [Rank 0] step:641/10000 train_time:144752ms step_avg:225.82ms +[2025-07-17 13:22:00] [Rank 0] step:661/10000 train_time:149273ms step_avg:225.83ms +[2025-07-17 13:22:00] [Rank 0] step:661/10000 train_time:149273ms step_avg:225.83ms +[2025-07-17 13:22:04] [Rank 0] step:681/10000 train_time:153793ms step_avg:225.83ms +[2025-07-17 13:22:04] [Rank 0] step:681/10000 train_time:153793ms step_avg:225.83ms +[2025-07-17 13:22:09] [Rank 0] step:701/10000 train_time:158314ms step_avg:225.84ms +[2025-07-17 13:22:09] [Rank 0] step:701/10000 train_time:158314ms step_avg:225.84ms +[2025-07-17 13:22:13] [Rank 0] step:721/10000 train_time:162836ms step_avg:225.85ms +[2025-07-17 13:22:13] [Rank 0] step:721/10000 train_time:162836ms step_avg:225.85ms +[2025-07-17 13:22:18] [Rank 0] step:741/10000 train_time:167361ms step_avg:225.86ms +[2025-07-17 13:22:18] [Rank 0] step:741/10000 train_time:167361ms step_avg:225.86ms +[2025-07-17 13:22:24] [Rank 0] PRINT: step:750/10000 val_loss:4.7857 train_time:169867ms step_avg:226.49ms +[2025-07-17 13:22:24] [Rank 0] PRINT: step:750/10000 val_loss:4.7857 train_time:169867ms step_avg:226.49ms +[2025-07-17 13:22:27] [Rank 0] step:761/10000 train_time:171910ms step_avg:225.90ms +[2025-07-17 13:22:27] [Rank 0] step:761/10000 train_time:171910ms step_avg:225.90ms +[2025-07-17 13:22:31] [Rank 0] step:781/10000 train_time:176466ms step_avg:225.95ms +[2025-07-17 13:22:31] [Rank 0] step:781/10000 train_time:176466ms step_avg:225.95ms +[2025-07-17 13:22:36] [Rank 0] step:801/10000 train_time:181018ms step_avg:225.99ms +[2025-07-17 13:22:36] [Rank 0] step:801/10000 train_time:181018ms step_avg:225.99ms +[2025-07-17 13:22:41] [Rank 0] step:821/10000 train_time:185575ms step_avg:226.03ms +[2025-07-17 13:22:41] [Rank 0] step:821/10000 train_time:185575ms step_avg:226.03ms +[2025-07-17 13:22:45] [Rank 0] step:841/10000 train_time:190132ms step_avg:226.08ms +[2025-07-17 13:22:45] [Rank 0] step:841/10000 train_time:190132ms step_avg:226.08ms +[2025-07-17 13:22:50] [Rank 0] step:861/10000 train_time:194691ms step_avg:226.12ms +[2025-07-17 13:22:50] [Rank 0] step:861/10000 train_time:194691ms step_avg:226.12ms +[2025-07-17 13:22:57] [Rank 0] PRINT: step:875/10000 val_loss:4.7052 train_time:198341ms step_avg:226.68ms +[2025-07-17 13:22:57] [Rank 0] PRINT: step:875/10000 val_loss:4.7052 train_time:198341ms step_avg:226.68ms +[2025-07-17 13:22:59] [Rank 0] step:881/10000 train_time:199248ms step_avg:226.16ms +[2025-07-17 13:22:59] [Rank 0] step:881/10000 train_time:199248ms step_avg:226.16ms +[2025-07-17 13:23:03] [Rank 0] step:901/10000 train_time:203807ms step_avg:226.20ms +[2025-07-17 13:23:03] [Rank 0] step:901/10000 train_time:203807ms step_avg:226.20ms +[2025-07-17 13:23:08] [Rank 0] step:921/10000 train_time:208365ms step_avg:226.24ms +[2025-07-17 13:23:08] [Rank 0] step:921/10000 train_time:208365ms step_avg:226.24ms +[2025-07-17 13:23:12] [Rank 0] step:941/10000 train_time:212924ms step_avg:226.27ms +[2025-07-17 13:23:12] [Rank 0] step:941/10000 train_time:212924ms step_avg:226.27ms +[2025-07-17 13:23:17] [Rank 0] step:961/10000 train_time:217483ms step_avg:226.31ms +[2025-07-17 13:23:17] [Rank 0] step:961/10000 train_time:217483ms step_avg:226.31ms +[2025-07-17 13:23:22] [Rank 0] step:981/10000 train_time:222043ms step_avg:226.34ms +[2025-07-17 13:23:22] [Rank 0] step:981/10000 train_time:222043ms step_avg:226.34ms +[2025-07-17 13:23:30] [Rank 0] PRINT: step:1000/10000 val_loss:4.7304 train_time:226835ms step_avg:226.83ms +[2025-07-17 13:23:30] [Rank 0] PRINT: step:1000/10000 val_loss:4.7304 train_time:226835ms step_avg:226.83ms +[2025-07-17 13:23:31] [Rank 0] step:1001/10000 train_time:226848ms step_avg:226.62ms +[2025-07-17 13:23:31] [Rank 0] step:1001/10000 train_time:226848ms step_avg:226.62ms +[2025-07-17 13:23:35] [Rank 0] step:1021/10000 train_time:231449ms step_avg:226.69ms +[2025-07-17 13:23:35] [Rank 0] step:1021/10000 train_time:231449ms step_avg:226.69ms +[2025-07-17 13:23:40] [Rank 0] step:1041/10000 train_time:236011ms step_avg:226.72ms +[2025-07-17 13:23:40] [Rank 0] step:1041/10000 train_time:236011ms step_avg:226.72ms +[2025-07-17 13:23:44] [Rank 0] step:1061/10000 train_time:240576ms step_avg:226.74ms +[2025-07-17 13:23:44] [Rank 0] step:1061/10000 train_time:240576ms step_avg:226.74ms +[2025-07-17 13:23:49] [Rank 0] step:1081/10000 train_time:245177ms step_avg:226.81ms +[2025-07-17 13:23:49] [Rank 0] step:1081/10000 train_time:245177ms step_avg:226.81ms +[2025-07-17 13:23:54] [Rank 0] step:1101/10000 train_time:249742ms step_avg:226.83ms +[2025-07-17 13:23:54] [Rank 0] step:1101/10000 train_time:249742ms step_avg:226.83ms +[2025-07-17 13:23:58] [Rank 0] step:1121/10000 train_time:254306ms step_avg:226.86ms +[2025-07-17 13:23:58] [Rank 0] step:1121/10000 train_time:254306ms step_avg:226.86ms +[2025-07-17 13:24:04] [Rank 0] PRINT: step:1125/10000 val_loss:4.7475 train_time:255681ms step_avg:227.27ms +[2025-07-17 13:24:04] [Rank 0] PRINT: step:1125/10000 val_loss:4.7475 train_time:255681ms step_avg:227.27ms +[2025-07-17 13:24:07] [Rank 0] step:1141/10000 train_time:258869ms step_avg:226.88ms +[2025-07-17 13:24:07] [Rank 0] step:1141/10000 train_time:258869ms step_avg:226.88ms +[2025-07-17 13:24:12] [Rank 0] step:1161/10000 train_time:263436ms step_avg:226.90ms +[2025-07-17 13:24:12] [Rank 0] step:1161/10000 train_time:263436ms step_avg:226.90ms +[2025-07-17 13:24:16] [Rank 0] step:1181/10000 train_time:268004ms step_avg:226.93ms +[2025-07-17 13:24:16] [Rank 0] step:1181/10000 train_time:268004ms step_avg:226.93ms +[2025-07-17 13:24:21] [Rank 0] step:1201/10000 train_time:272573ms step_avg:226.95ms +[2025-07-17 13:24:21] [Rank 0] step:1201/10000 train_time:272573ms step_avg:226.95ms +[2025-07-17 13:24:26] [Rank 0] step:1221/10000 train_time:277139ms step_avg:226.98ms +[2025-07-17 13:24:26] [Rank 0] step:1221/10000 train_time:277139ms step_avg:226.98ms +[2025-07-17 13:24:30] [Rank 0] step:1241/10000 train_time:281704ms step_avg:227.00ms +[2025-07-17 13:24:30] [Rank 0] step:1241/10000 train_time:281704ms step_avg:227.00ms +[2025-07-17 13:24:37] [Rank 0] PRINT: step:1250/10000 val_loss:4.7332 train_time:284219ms step_avg:227.38ms +[2025-07-17 13:24:37] [Rank 0] PRINT: step:1250/10000 val_loss:4.7332 train_time:284219ms step_avg:227.38ms +[2025-07-17 13:24:39] [Rank 0] step:1261/10000 train_time:286267ms step_avg:227.02ms +[2025-07-17 13:24:39] [Rank 0] step:1261/10000 train_time:286267ms step_avg:227.02ms +[2025-07-17 13:24:44] [Rank 0] step:1281/10000 train_time:290828ms step_avg:227.03ms +[2025-07-17 13:24:44] [Rank 0] step:1281/10000 train_time:290828ms step_avg:227.03ms +[2025-07-17 13:24:48] [Rank 0] step:1301/10000 train_time:295390ms step_avg:227.05ms +[2025-07-17 13:24:48] [Rank 0] step:1301/10000 train_time:295390ms step_avg:227.05ms +[2025-07-17 13:24:53] [Rank 0] step:1321/10000 train_time:299952ms step_avg:227.06ms +[2025-07-17 13:24:53] [Rank 0] step:1321/10000 train_time:299952ms step_avg:227.06ms +[2025-07-17 13:24:57] [Rank 0] step:1341/10000 train_time:304517ms step_avg:227.08ms +[2025-07-17 13:24:57] [Rank 0] step:1341/10000 train_time:304517ms step_avg:227.08ms +[2025-07-17 13:25:02] [Rank 0] step:1361/10000 train_time:309082ms step_avg:227.10ms +[2025-07-17 13:25:02] [Rank 0] step:1361/10000 train_time:309082ms step_avg:227.10ms +[2025-07-17 13:25:10] [Rank 0] PRINT: step:1375/10000 val_loss:4.7660 train_time:312738ms step_avg:227.45ms +[2025-07-17 13:25:10] [Rank 0] PRINT: step:1375/10000 val_loss:4.7660 train_time:312738ms step_avg:227.45ms +[2025-07-17 13:25:11] [Rank 0] step:1381/10000 train_time:313647ms step_avg:227.12ms +[2025-07-17 13:25:11] [Rank 0] step:1381/10000 train_time:313647ms step_avg:227.12ms +[2025-07-17 13:25:16] [Rank 0] step:1401/10000 train_time:318215ms step_avg:227.13ms +[2025-07-17 13:25:16] [Rank 0] step:1401/10000 train_time:318215ms step_avg:227.13ms +[2025-07-17 13:25:20] [Rank 0] step:1421/10000 train_time:322786ms step_avg:227.15ms +[2025-07-17 13:25:20] [Rank 0] step:1421/10000 train_time:322786ms step_avg:227.15ms +[2025-07-17 13:25:25] [Rank 0] step:1441/10000 train_time:327358ms step_avg:227.17ms +[2025-07-17 13:25:25] [Rank 0] step:1441/10000 train_time:327358ms step_avg:227.17ms +[2025-07-17 13:25:29] [Rank 0] step:1461/10000 train_time:331930ms step_avg:227.19ms +[2025-07-17 13:25:29] [Rank 0] step:1461/10000 train_time:331930ms step_avg:227.19ms +[2025-07-17 13:25:34] [Rank 0] step:1481/10000 train_time:336503ms step_avg:227.21ms +[2025-07-17 13:25:34] [Rank 0] step:1481/10000 train_time:336503ms step_avg:227.21ms +[2025-07-17 13:25:43] [Rank 0] PRINT: step:1500/10000 val_loss:4.6533 train_time:341330ms step_avg:227.55ms +[2025-07-17 13:25:43] [Rank 0] PRINT: step:1500/10000 val_loss:4.6533 train_time:341330ms step_avg:227.55ms +[2025-07-17 13:25:43] [Rank 0] step:1501/10000 train_time:341343ms step_avg:227.41ms +[2025-07-17 13:25:43] [Rank 0] step:1501/10000 train_time:341343ms step_avg:227.41ms +[2025-07-17 13:25:48] [Rank 0] step:1521/10000 train_time:345696ms step_avg:227.28ms +[2025-07-17 13:25:48] [Rank 0] step:1521/10000 train_time:345696ms step_avg:227.28ms +[2025-07-17 13:25:52] [Rank 0] step:1541/10000 train_time:350576ms step_avg:227.50ms +[2025-07-17 13:25:52] [Rank 0] step:1541/10000 train_time:350576ms step_avg:227.50ms +[2025-07-17 13:25:57] [Rank 0] step:1561/10000 train_time:355174ms step_avg:227.53ms +[2025-07-17 13:25:57] [Rank 0] step:1561/10000 train_time:355174ms step_avg:227.53ms +[2025-07-17 13:26:02] [Rank 0] step:1581/10000 train_time:359772ms step_avg:227.56ms +[2025-07-17 13:26:02] [Rank 0] step:1581/10000 train_time:359772ms step_avg:227.56ms +[2025-07-17 13:26:06] [Rank 0] step:1601/10000 train_time:364372ms step_avg:227.59ms +[2025-07-17 13:26:06] [Rank 0] step:1601/10000 train_time:364372ms step_avg:227.59ms +[2025-07-17 13:26:11] [Rank 0] step:1621/10000 train_time:368970ms step_avg:227.62ms +[2025-07-17 13:26:11] [Rank 0] step:1621/10000 train_time:368970ms step_avg:227.62ms +[2025-07-17 13:26:16] [Rank 0] PRINT: step:1625/10000 val_loss:4.7175 train_time:370356ms step_avg:227.91ms +[2025-07-17 13:26:16] [Rank 0] PRINT: step:1625/10000 val_loss:4.7175 train_time:370356ms step_avg:227.91ms +[2025-07-17 13:26:20] [Rank 0] step:1641/10000 train_time:373566ms step_avg:227.65ms +[2025-07-17 13:26:20] [Rank 0] step:1641/10000 train_time:373566ms step_avg:227.65ms +[2025-07-17 13:26:25] [Rank 0] step:1661/10000 train_time:378166ms step_avg:227.67ms +[2025-07-17 13:26:25] [Rank 0] step:1661/10000 train_time:378166ms step_avg:227.67ms +[2025-07-17 13:26:29] [Rank 0] step:1681/10000 train_time:382765ms step_avg:227.70ms +[2025-07-17 13:26:29] [Rank 0] step:1681/10000 train_time:382765ms step_avg:227.70ms +[2025-07-17 13:26:34] [Rank 0] step:1701/10000 train_time:387361ms step_avg:227.73ms +[2025-07-17 13:26:34] [Rank 0] step:1701/10000 train_time:387361ms step_avg:227.73ms +[2025-07-17 13:26:38] [Rank 0] step:1721/10000 train_time:391962ms step_avg:227.75ms +[2025-07-17 13:26:38] [Rank 0] step:1721/10000 train_time:391962ms step_avg:227.75ms +[2025-07-17 13:26:43] [Rank 0] step:1741/10000 train_time:396562ms step_avg:227.78ms +[2025-07-17 13:26:43] [Rank 0] step:1741/10000 train_time:396562ms step_avg:227.78ms +[2025-07-17 13:26:49] [Rank 0] PRINT: step:1750/10000 val_loss:4.7135 train_time:399096ms step_avg:228.05ms +[2025-07-17 13:26:49] [Rank 0] PRINT: step:1750/10000 val_loss:4.7135 train_time:399096ms step_avg:228.05ms +[2025-07-17 13:26:52] [Rank 0] step:1761/10000 train_time:401159ms step_avg:227.80ms +[2025-07-17 13:26:52] [Rank 0] step:1761/10000 train_time:401159ms step_avg:227.80ms +[2025-07-17 13:26:56] [Rank 0] step:1781/10000 train_time:405759ms step_avg:227.83ms +[2025-07-17 13:26:56] [Rank 0] step:1781/10000 train_time:405759ms step_avg:227.83ms +[2025-07-17 13:27:01] [Rank 0] step:1801/10000 train_time:410361ms step_avg:227.85ms +[2025-07-17 13:27:01] [Rank 0] step:1801/10000 train_time:410361ms step_avg:227.85ms +[2025-07-17 13:27:05] [Rank 0] step:1821/10000 train_time:414957ms step_avg:227.87ms +[2025-07-17 13:27:05] [Rank 0] step:1821/10000 train_time:414957ms step_avg:227.87ms +[2025-07-17 13:27:10] [Rank 0] step:1841/10000 train_time:419552ms step_avg:227.89ms +[2025-07-17 13:27:10] [Rank 0] step:1841/10000 train_time:419552ms step_avg:227.89ms +[2025-07-17 13:27:15] [Rank 0] step:1861/10000 train_time:424153ms step_avg:227.92ms +[2025-07-17 13:27:15] [Rank 0] step:1861/10000 train_time:424153ms step_avg:227.92ms +[2025-07-17 13:27:22] [Rank 0] PRINT: step:1875/10000 val_loss:4.5899 train_time:427841ms step_avg:228.18ms +[2025-07-17 13:27:22] [Rank 0] PRINT: step:1875/10000 val_loss:4.5899 train_time:427841ms step_avg:228.18ms +[2025-07-17 13:27:24] [Rank 0] step:1881/10000 train_time:428754ms step_avg:227.94ms +[2025-07-17 13:27:24] [Rank 0] step:1881/10000 train_time:428754ms step_avg:227.94ms +[2025-07-17 13:27:28] [Rank 0] step:1901/10000 train_time:433344ms step_avg:227.96ms +[2025-07-17 13:27:28] [Rank 0] step:1901/10000 train_time:433344ms step_avg:227.96ms +[2025-07-17 13:27:33] [Rank 0] step:1921/10000 train_time:437933ms step_avg:227.97ms +[2025-07-17 13:27:33] [Rank 0] step:1921/10000 train_time:437933ms step_avg:227.97ms +[2025-07-17 13:27:37] [Rank 0] step:1941/10000 train_time:442525ms step_avg:227.99ms +[2025-07-17 13:27:37] [Rank 0] step:1941/10000 train_time:442525ms step_avg:227.99ms +[2025-07-17 13:27:42] [Rank 0] step:1961/10000 train_time:447117ms step_avg:228.00ms +[2025-07-17 13:27:42] [Rank 0] step:1961/10000 train_time:447117ms step_avg:228.00ms +[2025-07-17 13:27:47] [Rank 0] step:1981/10000 train_time:451709ms step_avg:228.02ms +[2025-07-17 13:27:47] [Rank 0] step:1981/10000 train_time:451709ms step_avg:228.02ms +[2025-07-17 13:27:55] [Rank 0] PRINT: step:2000/10000 val_loss:4.7113 train_time:456534ms step_avg:228.27ms +[2025-07-17 13:27:55] [Rank 0] PRINT: step:2000/10000 val_loss:4.7113 train_time:456534ms step_avg:228.27ms +[2025-07-17 13:27:56] [Rank 0] step:2001/10000 train_time:456547ms step_avg:228.16ms +[2025-07-17 13:27:56] [Rank 0] step:2001/10000 train_time:456547ms step_avg:228.16ms +[2025-07-17 13:28:00] [Rank 0] step:2021/10000 train_time:460893ms step_avg:228.05ms +[2025-07-17 13:28:00] [Rank 0] step:2021/10000 train_time:460893ms step_avg:228.05ms +[2025-07-17 13:28:05] [Rank 0] step:2041/10000 train_time:465770ms step_avg:228.21ms +[2025-07-17 13:28:05] [Rank 0] step:2041/10000 train_time:465770ms step_avg:228.21ms +[2025-07-17 13:28:10] [Rank 0] step:2061/10000 train_time:470363ms step_avg:228.22ms +[2025-07-17 13:28:10] [Rank 0] step:2061/10000 train_time:470363ms step_avg:228.22ms +[2025-07-17 13:28:14] [Rank 0] step:2081/10000 train_time:474956ms step_avg:228.23ms +[2025-07-17 13:28:14] [Rank 0] step:2081/10000 train_time:474956ms step_avg:228.23ms +[2025-07-17 13:28:19] [Rank 0] step:2101/10000 train_time:479549ms step_avg:228.25ms +[2025-07-17 13:28:19] [Rank 0] step:2101/10000 train_time:479549ms step_avg:228.25ms +[2025-07-17 13:28:24] [Rank 0] step:2121/10000 train_time:484141ms step_avg:228.26ms +[2025-07-17 13:28:24] [Rank 0] step:2121/10000 train_time:484141ms step_avg:228.26ms +[2025-07-17 13:28:29] [Rank 0] PRINT: step:2125/10000 val_loss:4.5751 train_time:485525ms step_avg:228.48ms +[2025-07-17 13:28:29] [Rank 0] PRINT: step:2125/10000 val_loss:4.5751 train_time:485525ms step_avg:228.48ms +[2025-07-17 13:28:33] [Rank 0] step:2141/10000 train_time:488730ms step_avg:228.27ms +[2025-07-17 13:28:33] [Rank 0] step:2141/10000 train_time:488730ms step_avg:228.27ms +[2025-07-17 13:28:37] [Rank 0] step:2161/10000 train_time:493323ms step_avg:228.28ms +[2025-07-17 13:28:37] [Rank 0] step:2161/10000 train_time:493323ms step_avg:228.28ms +[2025-07-17 13:28:42] [Rank 0] step:2181/10000 train_time:497915ms step_avg:228.30ms +[2025-07-17 13:28:42] [Rank 0] step:2181/10000 train_time:497915ms step_avg:228.30ms +[2025-07-17 13:28:46] [Rank 0] step:2201/10000 train_time:502509ms step_avg:228.31ms +[2025-07-17 13:28:46] [Rank 0] step:2201/10000 train_time:502509ms step_avg:228.31ms +[2025-07-17 13:28:51] [Rank 0] step:2221/10000 train_time:507102ms step_avg:228.32ms +[2025-07-17 13:28:51] [Rank 0] step:2221/10000 train_time:507102ms step_avg:228.32ms +[2025-07-17 13:28:56] [Rank 0] step:2241/10000 train_time:511785ms step_avg:228.37ms +[2025-07-17 13:28:56] [Rank 0] step:2241/10000 train_time:511785ms step_avg:228.37ms +[2025-07-17 13:29:02] [Rank 0] PRINT: step:2250/10000 val_loss:4.3027 train_time:514380ms step_avg:228.61ms +[2025-07-17 13:29:02] [Rank 0] PRINT: step:2250/10000 val_loss:4.3027 train_time:514380ms step_avg:228.61ms +[2025-07-17 13:29:05] [Rank 0] step:2261/10000 train_time:516492ms step_avg:228.44ms +[2025-07-17 13:29:05] [Rank 0] step:2261/10000 train_time:516492ms step_avg:228.44ms +[2025-07-17 13:29:10] [Rank 0] step:2281/10000 train_time:521197ms step_avg:228.50ms +[2025-07-17 13:29:10] [Rank 0] step:2281/10000 train_time:521197ms step_avg:228.50ms +[2025-07-17 13:29:14] [Rank 0] step:2301/10000 train_time:525901ms step_avg:228.55ms +[2025-07-17 13:29:14] [Rank 0] step:2301/10000 train_time:525901ms step_avg:228.55ms +[2025-07-17 13:29:19] [Rank 0] step:2321/10000 train_time:530607ms step_avg:228.61ms +[2025-07-17 13:29:19] [Rank 0] step:2321/10000 train_time:530607ms step_avg:228.61ms +[2025-07-17 13:29:24] [Rank 0] step:2341/10000 train_time:535313ms step_avg:228.67ms +[2025-07-17 13:29:24] [Rank 0] step:2341/10000 train_time:535313ms step_avg:228.67ms +[2025-07-17 13:29:29] [Rank 0] step:2361/10000 train_time:540021ms step_avg:228.73ms +[2025-07-17 13:29:29] [Rank 0] step:2361/10000 train_time:540021ms step_avg:228.73ms +[2025-07-17 13:29:36] [Rank 0] PRINT: step:2375/10000 val_loss:4.5143 train_time:543793ms step_avg:228.97ms +[2025-07-17 13:29:36] [Rank 0] PRINT: step:2375/10000 val_loss:4.5143 train_time:543793ms step_avg:228.97ms +[2025-07-17 13:29:38] [Rank 0] step:2381/10000 train_time:544728ms step_avg:228.78ms +[2025-07-17 13:29:38] [Rank 0] step:2381/10000 train_time:544728ms step_avg:228.78ms +[2025-07-17 13:29:43] [Rank 0] step:2401/10000 train_time:549434ms step_avg:228.84ms +[2025-07-17 13:29:43] [Rank 0] step:2401/10000 train_time:549434ms step_avg:228.84ms +[2025-07-17 13:29:47] [Rank 0] step:2421/10000 train_time:554142ms step_avg:228.89ms +[2025-07-17 13:29:47] [Rank 0] step:2421/10000 train_time:554142ms step_avg:228.89ms +[2025-07-17 13:29:52] [Rank 0] step:2441/10000 train_time:558848ms step_avg:228.94ms +[2025-07-17 13:29:52] [Rank 0] step:2441/10000 train_time:558848ms step_avg:228.94ms +[2025-07-17 13:29:57] [Rank 0] step:2461/10000 train_time:563561ms step_avg:229.00ms +[2025-07-17 13:29:57] [Rank 0] step:2461/10000 train_time:563561ms step_avg:229.00ms +[2025-07-17 13:30:01] [Rank 0] step:2481/10000 train_time:568271ms step_avg:229.05ms +[2025-07-17 13:30:01] [Rank 0] step:2481/10000 train_time:568271ms step_avg:229.05ms +[2025-07-17 13:30:10] [Rank 0] PRINT: step:2500/10000 val_loss:4.3721 train_time:573218ms step_avg:229.29ms +[2025-07-17 13:30:10] [Rank 0] PRINT: step:2500/10000 val_loss:4.3721 train_time:573218ms step_avg:229.29ms +[2025-07-17 13:30:11] [Rank 0] step:2501/10000 train_time:573232ms step_avg:229.20ms +[2025-07-17 13:30:11] [Rank 0] step:2501/10000 train_time:573232ms step_avg:229.20ms +[2025-07-17 13:30:15] [Rank 0] step:2521/10000 train_time:577688ms step_avg:229.15ms +[2025-07-17 13:30:15] [Rank 0] step:2521/10000 train_time:577688ms step_avg:229.15ms +[2025-07-17 13:30:20] [Rank 0] step:2541/10000 train_time:582938ms step_avg:229.41ms +[2025-07-17 13:30:20] [Rank 0] step:2541/10000 train_time:582938ms step_avg:229.41ms +[2025-07-17 13:30:25] [Rank 0] step:2561/10000 train_time:587388ms step_avg:229.36ms +[2025-07-17 13:30:25] [Rank 0] step:2561/10000 train_time:587388ms step_avg:229.36ms +[2025-07-17 13:30:30] [Rank 0] step:2581/10000 train_time:592097ms step_avg:229.41ms +[2025-07-17 13:30:30] [Rank 0] step:2581/10000 train_time:592097ms step_avg:229.41ms +[2025-07-17 13:30:35] [Rank 0] step:2601/10000 train_time:596805ms step_avg:229.45ms +[2025-07-17 13:30:35] [Rank 0] step:2601/10000 train_time:596805ms step_avg:229.45ms +[2025-07-17 13:30:39] [Rank 0] step:2621/10000 train_time:601516ms step_avg:229.50ms +[2025-07-17 13:30:39] [Rank 0] step:2621/10000 train_time:601516ms step_avg:229.50ms +[2025-07-17 13:30:45] [Rank 0] PRINT: step:2625/10000 val_loss:4.5680 train_time:602935ms step_avg:229.69ms +[2025-07-17 13:30:45] [Rank 0] PRINT: step:2625/10000 val_loss:4.5680 train_time:602935ms step_avg:229.69ms +[2025-07-17 13:30:48] [Rank 0] step:2641/10000 train_time:606222ms step_avg:229.54ms +[2025-07-17 13:30:48] [Rank 0] step:2641/10000 train_time:606222ms step_avg:229.54ms +[2025-07-17 13:30:53] [Rank 0] step:2661/10000 train_time:610932ms step_avg:229.59ms +[2025-07-17 13:30:53] [Rank 0] step:2661/10000 train_time:610932ms step_avg:229.59ms +[2025-07-17 13:30:58] [Rank 0] step:2681/10000 train_time:615641ms step_avg:229.63ms +[2025-07-17 13:30:58] [Rank 0] step:2681/10000 train_time:615641ms step_avg:229.63ms +[2025-07-17 13:31:03] [Rank 0] step:2701/10000 train_time:620352ms step_avg:229.68ms +[2025-07-17 13:31:03] [Rank 0] step:2701/10000 train_time:620352ms step_avg:229.68ms +[2025-07-17 13:31:07] [Rank 0] step:2721/10000 train_time:625065ms step_avg:229.72ms +[2025-07-17 13:31:07] [Rank 0] step:2721/10000 train_time:625065ms step_avg:229.72ms +[2025-07-17 13:31:12] [Rank 0] step:2741/10000 train_time:629774ms step_avg:229.76ms +[2025-07-17 13:31:12] [Rank 0] step:2741/10000 train_time:629774ms step_avg:229.76ms +[2025-07-17 13:31:19] [Rank 0] PRINT: step:2750/10000 val_loss:4.4183 train_time:632370ms step_avg:229.95ms +[2025-07-17 13:31:19] [Rank 0] PRINT: step:2750/10000 val_loss:4.4183 train_time:632370ms step_avg:229.95ms +[2025-07-17 13:31:21] [Rank 0] step:2761/10000 train_time:634481ms step_avg:229.80ms +[2025-07-17 13:31:21] [Rank 0] step:2761/10000 train_time:634481ms step_avg:229.80ms +[2025-07-17 13:31:26] [Rank 0] step:2781/10000 train_time:639190ms step_avg:229.84ms +[2025-07-17 13:31:26] [Rank 0] step:2781/10000 train_time:639190ms step_avg:229.84ms +[2025-07-17 13:31:31] [Rank 0] step:2801/10000 train_time:643898ms step_avg:229.88ms +[2025-07-17 13:31:31] [Rank 0] step:2801/10000 train_time:643898ms step_avg:229.88ms +[2025-07-17 13:31:35] [Rank 0] step:2821/10000 train_time:648607ms step_avg:229.92ms +[2025-07-17 13:31:35] [Rank 0] step:2821/10000 train_time:648607ms step_avg:229.92ms +[2025-07-17 13:31:40] [Rank 0] step:2841/10000 train_time:653322ms step_avg:229.96ms +[2025-07-17 13:31:40] [Rank 0] step:2841/10000 train_time:653322ms step_avg:229.96ms +[2025-07-17 13:31:45] [Rank 0] step:2861/10000 train_time:658035ms step_avg:230.00ms +[2025-07-17 13:31:45] [Rank 0] step:2861/10000 train_time:658035ms step_avg:230.00ms +[2025-07-17 13:31:53] [Rank 0] PRINT: step:2875/10000 val_loss:4.4271 train_time:661805ms step_avg:230.19ms +[2025-07-17 13:31:53] [Rank 0] PRINT: step:2875/10000 val_loss:4.4271 train_time:661805ms step_avg:230.19ms +[2025-07-17 13:31:54] [Rank 0] step:2881/10000 train_time:662742ms step_avg:230.04ms +[2025-07-17 13:31:54] [Rank 0] step:2881/10000 train_time:662742ms step_avg:230.04ms +[2025-07-17 13:31:59] [Rank 0] step:2901/10000 train_time:667451ms step_avg:230.08ms +[2025-07-17 13:31:59] [Rank 0] step:2901/10000 train_time:667451ms step_avg:230.08ms +[2025-07-17 13:32:03] [Rank 0] step:2921/10000 train_time:672161ms step_avg:230.11ms +[2025-07-17 13:32:03] [Rank 0] step:2921/10000 train_time:672161ms step_avg:230.11ms +[2025-07-17 13:32:08] [Rank 0] step:2941/10000 train_time:676871ms step_avg:230.15ms +[2025-07-17 13:32:08] [Rank 0] step:2941/10000 train_time:676871ms step_avg:230.15ms +[2025-07-17 13:32:13] [Rank 0] step:2961/10000 train_time:681583ms step_avg:230.19ms +[2025-07-17 13:32:13] [Rank 0] step:2961/10000 train_time:681583ms step_avg:230.19ms +[2025-07-17 13:32:18] [Rank 0] step:2981/10000 train_time:686309ms step_avg:230.23ms +[2025-07-17 13:32:18] [Rank 0] step:2981/10000 train_time:686309ms step_avg:230.23ms +[2025-07-17 13:32:27] [Rank 0] PRINT: step:3000/10000 val_loss:4.3848 train_time:691276ms step_avg:230.43ms +[2025-07-17 13:32:27] [Rank 0] PRINT: step:3000/10000 val_loss:4.3848 train_time:691276ms step_avg:230.43ms +[2025-07-17 13:32:27] [Rank 0] step:3001/10000 train_time:691289ms step_avg:230.35ms +[2025-07-17 13:32:27] [Rank 0] step:3001/10000 train_time:691289ms step_avg:230.35ms +[2025-07-17 13:32:32] [Rank 0] step:3021/10000 train_time:695768ms step_avg:230.31ms +[2025-07-17 13:32:32] [Rank 0] step:3021/10000 train_time:695768ms step_avg:230.31ms +[2025-07-17 13:32:36] [Rank 0] step:3041/10000 train_time:700497ms step_avg:230.35ms +[2025-07-17 13:32:36] [Rank 0] step:3041/10000 train_time:700497ms step_avg:230.35ms +[2025-07-17 13:32:41] [Rank 0] step:3061/10000 train_time:705509ms step_avg:230.48ms +[2025-07-17 13:32:41] [Rank 0] step:3061/10000 train_time:705509ms step_avg:230.48ms +[2025-07-17 13:32:46] [Rank 0] step:3081/10000 train_time:710235ms step_avg:230.52ms +[2025-07-17 13:32:46] [Rank 0] step:3081/10000 train_time:710235ms step_avg:230.52ms +[2025-07-17 13:32:51] [Rank 0] step:3101/10000 train_time:714961ms step_avg:230.56ms +[2025-07-17 13:32:51] [Rank 0] step:3101/10000 train_time:714961ms step_avg:230.56ms +[2025-07-17 13:32:56] [Rank 0] step:3121/10000 train_time:719688ms step_avg:230.60ms +[2025-07-17 13:32:56] [Rank 0] step:3121/10000 train_time:719688ms step_avg:230.60ms +[2025-07-17 13:33:01] [Rank 0] PRINT: step:3125/10000 val_loss:4.2597 train_time:721114ms step_avg:230.76ms +[2025-07-17 13:33:01] [Rank 0] PRINT: step:3125/10000 val_loss:4.2597 train_time:721114ms step_avg:230.76ms +[2025-07-17 13:33:05] [Rank 0] step:3141/10000 train_time:724417ms step_avg:230.63ms +[2025-07-17 13:33:05] [Rank 0] step:3141/10000 train_time:724417ms step_avg:230.63ms +[2025-07-17 13:33:10] [Rank 0] step:3161/10000 train_time:729146ms step_avg:230.67ms +[2025-07-17 13:33:10] [Rank 0] step:3161/10000 train_time:729146ms step_avg:230.67ms +[2025-07-17 13:33:14] [Rank 0] step:3181/10000 train_time:733873ms step_avg:230.71ms +[2025-07-17 13:33:14] [Rank 0] step:3181/10000 train_time:733873ms step_avg:230.71ms +[2025-07-17 13:33:19] [Rank 0] step:3201/10000 train_time:738606ms step_avg:230.74ms +[2025-07-17 13:33:19] [Rank 0] step:3201/10000 train_time:738606ms step_avg:230.74ms +[2025-07-17 13:33:24] [Rank 0] step:3221/10000 train_time:743334ms step_avg:230.78ms +[2025-07-17 13:33:24] [Rank 0] step:3221/10000 train_time:743334ms step_avg:230.78ms +[2025-07-17 13:33:29] [Rank 0] step:3241/10000 train_time:748063ms step_avg:230.81ms +[2025-07-17 13:33:29] [Rank 0] step:3241/10000 train_time:748063ms step_avg:230.81ms +[2025-07-17 13:33:35] [Rank 0] PRINT: step:3250/10000 val_loss:4.2624 train_time:750668ms step_avg:230.97ms +[2025-07-17 13:33:35] [Rank 0] PRINT: step:3250/10000 val_loss:4.2624 train_time:750668ms step_avg:230.97ms +[2025-07-17 13:33:38] [Rank 0] step:3261/10000 train_time:752792ms step_avg:230.85ms +[2025-07-17 13:33:38] [Rank 0] step:3261/10000 train_time:752792ms step_avg:230.85ms +[2025-07-17 13:33:43] [Rank 0] step:3281/10000 train_time:757525ms step_avg:230.88ms +[2025-07-17 13:33:43] [Rank 0] step:3281/10000 train_time:757525ms step_avg:230.88ms +[2025-07-17 13:33:47] [Rank 0] step:3301/10000 train_time:762256ms step_avg:230.92ms +[2025-07-17 13:33:47] [Rank 0] step:3301/10000 train_time:762256ms step_avg:230.92ms +[2025-07-17 13:33:52] [Rank 0] step:3321/10000 train_time:766985ms step_avg:230.95ms +[2025-07-17 13:33:52] [Rank 0] step:3321/10000 train_time:766985ms step_avg:230.95ms +[2025-07-17 13:33:57] [Rank 0] step:3341/10000 train_time:771711ms step_avg:230.98ms +[2025-07-17 13:33:57] [Rank 0] step:3341/10000 train_time:771711ms step_avg:230.98ms +[2025-07-17 13:34:02] [Rank 0] step:3361/10000 train_time:776443ms step_avg:231.02ms +[2025-07-17 13:34:02] [Rank 0] step:3361/10000 train_time:776443ms step_avg:231.02ms +[2025-07-17 13:34:09] [Rank 0] PRINT: step:3375/10000 val_loss:4.2594 train_time:780226ms step_avg:231.18ms +[2025-07-17 13:34:09] [Rank 0] PRINT: step:3375/10000 val_loss:4.2594 train_time:780226ms step_avg:231.18ms +[2025-07-17 13:34:11] [Rank 0] step:3381/10000 train_time:781165ms step_avg:231.05ms +[2025-07-17 13:34:11] [Rank 0] step:3381/10000 train_time:781165ms step_avg:231.05ms +[2025-07-17 13:34:16] [Rank 0] step:3401/10000 train_time:785886ms step_avg:231.08ms +[2025-07-17 13:34:16] [Rank 0] step:3401/10000 train_time:785886ms step_avg:231.08ms +[2025-07-17 13:34:20] [Rank 0] step:3421/10000 train_time:790610ms step_avg:231.10ms +[2025-07-17 13:34:20] [Rank 0] step:3421/10000 train_time:790610ms step_avg:231.10ms +[2025-07-17 13:34:25] [Rank 0] step:3441/10000 train_time:795330ms step_avg:231.13ms +[2025-07-17 13:34:25] [Rank 0] step:3441/10000 train_time:795330ms step_avg:231.13ms +[2025-07-17 13:34:30] [Rank 0] step:3461/10000 train_time:800052ms step_avg:231.16ms +[2025-07-17 13:34:30] [Rank 0] step:3461/10000 train_time:800052ms step_avg:231.16ms +[2025-07-17 13:34:35] [Rank 0] step:3481/10000 train_time:804779ms step_avg:231.19ms +[2025-07-17 13:34:35] [Rank 0] step:3481/10000 train_time:804779ms step_avg:231.19ms +[2025-07-17 13:34:43] [Rank 0] PRINT: step:3500/10000 val_loss:4.3180 train_time:809743ms step_avg:231.36ms +[2025-07-17 13:34:43] [Rank 0] PRINT: step:3500/10000 val_loss:4.3180 train_time:809743ms step_avg:231.36ms +[2025-07-17 13:34:43] [Rank 0] step:3501/10000 train_time:809755ms step_avg:231.29ms +[2025-07-17 13:34:43] [Rank 0] step:3501/10000 train_time:809755ms step_avg:231.29ms +[2025-07-17 13:34:48] [Rank 0] step:3521/10000 train_time:814224ms step_avg:231.25ms +[2025-07-17 13:34:48] [Rank 0] step:3521/10000 train_time:814224ms step_avg:231.25ms +[2025-07-17 13:34:53] [Rank 0] step:3541/10000 train_time:818951ms step_avg:231.28ms +[2025-07-17 13:34:53] [Rank 0] step:3541/10000 train_time:818951ms step_avg:231.28ms +[2025-07-17 13:34:58] [Rank 0] step:3561/10000 train_time:823943ms step_avg:231.38ms +[2025-07-17 13:34:58] [Rank 0] step:3561/10000 train_time:823943ms step_avg:231.38ms +[2025-07-17 13:35:03] [Rank 0] step:3581/10000 train_time:828669ms step_avg:231.41ms +[2025-07-17 13:35:03] [Rank 0] step:3581/10000 train_time:828669ms step_avg:231.41ms +[2025-07-17 13:35:07] [Rank 0] step:3601/10000 train_time:833396ms step_avg:231.43ms +[2025-07-17 13:35:07] [Rank 0] step:3601/10000 train_time:833396ms step_avg:231.43ms +[2025-07-17 13:35:12] [Rank 0] step:3621/10000 train_time:838123ms step_avg:231.46ms +[2025-07-17 13:35:12] [Rank 0] step:3621/10000 train_time:838123ms step_avg:231.46ms +[2025-07-17 13:35:18] [Rank 0] PRINT: step:3625/10000 val_loss:4.3295 train_time:839544ms step_avg:231.60ms +[2025-07-17 13:35:18] [Rank 0] PRINT: step:3625/10000 val_loss:4.3295 train_time:839544ms step_avg:231.60ms +[2025-07-17 13:35:21] [Rank 0] step:3641/10000 train_time:842841ms step_avg:231.49ms +[2025-07-17 13:35:21] [Rank 0] step:3641/10000 train_time:842841ms step_avg:231.49ms +[2025-07-17 13:35:26] [Rank 0] step:3661/10000 train_time:847563ms step_avg:231.51ms +[2025-07-17 13:35:26] [Rank 0] step:3661/10000 train_time:847563ms step_avg:231.51ms +[2025-07-17 13:35:31] [Rank 0] step:3681/10000 train_time:852290ms step_avg:231.54ms +[2025-07-17 13:35:31] [Rank 0] step:3681/10000 train_time:852290ms step_avg:231.54ms +[2025-07-17 13:35:36] [Rank 0] step:3701/10000 train_time:857017ms step_avg:231.56ms +[2025-07-17 13:35:36] [Rank 0] step:3701/10000 train_time:857017ms step_avg:231.56ms +[2025-07-17 13:35:40] [Rank 0] step:3721/10000 train_time:861806ms step_avg:231.61ms +[2025-07-17 13:35:40] [Rank 0] step:3721/10000 train_time:861806ms step_avg:231.61ms +[2025-07-17 13:35:45] [Rank 0] step:3741/10000 train_time:866622ms step_avg:231.66ms +[2025-07-17 13:35:45] [Rank 0] step:3741/10000 train_time:866622ms step_avg:231.66ms +[2025-07-17 13:35:52] [Rank 0] PRINT: step:3750/10000 val_loss:4.2336 train_time:869274ms step_avg:231.81ms +[2025-07-17 13:35:52] [Rank 0] PRINT: step:3750/10000 val_loss:4.2336 train_time:869274ms step_avg:231.81ms +[2025-07-17 13:35:55] [Rank 0] step:3761/10000 train_time:871431ms step_avg:231.70ms +[2025-07-17 13:35:55] [Rank 0] step:3761/10000 train_time:871431ms step_avg:231.70ms +[2025-07-17 13:35:59] [Rank 0] step:3781/10000 train_time:876241ms step_avg:231.75ms +[2025-07-17 13:35:59] [Rank 0] step:3781/10000 train_time:876241ms step_avg:231.75ms +[2025-07-17 13:36:04] [Rank 0] step:3801/10000 train_time:881055ms step_avg:231.80ms +[2025-07-17 13:36:04] [Rank 0] step:3801/10000 train_time:881055ms step_avg:231.80ms +[2025-07-17 13:36:09] [Rank 0] step:3821/10000 train_time:885870ms step_avg:231.84ms +[2025-07-17 13:36:09] [Rank 0] step:3821/10000 train_time:885870ms step_avg:231.84ms +[2025-07-17 13:36:14] [Rank 0] step:3841/10000 train_time:890687ms step_avg:231.89ms +[2025-07-17 13:36:14] [Rank 0] step:3841/10000 train_time:890687ms step_avg:231.89ms +[2025-07-17 13:36:19] [Rank 0] step:3861/10000 train_time:895503ms step_avg:231.94ms +[2025-07-17 13:36:19] [Rank 0] step:3861/10000 train_time:895503ms step_avg:231.94ms +[2025-07-17 13:36:27] [Rank 0] PRINT: step:3875/10000 val_loss:4.4179 train_time:899360ms step_avg:232.09ms +[2025-07-17 13:36:27] [Rank 0] PRINT: step:3875/10000 val_loss:4.4179 train_time:899360ms step_avg:232.09ms +[2025-07-17 13:36:28] [Rank 0] step:3881/10000 train_time:900320ms step_avg:231.98ms +[2025-07-17 13:36:28] [Rank 0] step:3881/10000 train_time:900320ms step_avg:231.98ms +[2025-07-17 13:36:33] [Rank 0] step:3901/10000 train_time:905138ms step_avg:232.03ms +[2025-07-17 13:36:33] [Rank 0] step:3901/10000 train_time:905138ms step_avg:232.03ms +[2025-07-17 13:36:38] [Rank 0] step:3921/10000 train_time:909952ms step_avg:232.07ms +[2025-07-17 13:36:38] [Rank 0] step:3921/10000 train_time:909952ms step_avg:232.07ms +[2025-07-17 13:36:43] [Rank 0] step:3941/10000 train_time:914772ms step_avg:232.12ms +[2025-07-17 13:36:43] [Rank 0] step:3941/10000 train_time:914772ms step_avg:232.12ms +[2025-07-17 13:36:47] [Rank 0] step:3961/10000 train_time:919590ms step_avg:232.16ms +[2025-07-17 13:36:47] [Rank 0] step:3961/10000 train_time:919590ms step_avg:232.16ms +[2025-07-17 13:36:52] [Rank 0] step:3981/10000 train_time:924405ms step_avg:232.20ms +[2025-07-17 13:36:52] [Rank 0] step:3981/10000 train_time:924405ms step_avg:232.20ms +[2025-07-17 13:37:01] [Rank 0] PRINT: step:4000/10000 val_loss:4.9668 train_time:929462ms step_avg:232.37ms +[2025-07-17 13:37:01] [Rank 0] PRINT: step:4000/10000 val_loss:4.9668 train_time:929462ms step_avg:232.37ms +[2025-07-17 13:37:02] [Rank 0] step:4001/10000 train_time:929475ms step_avg:232.31ms +[2025-07-17 13:37:02] [Rank 0] step:4001/10000 train_time:929475ms step_avg:232.31ms +[2025-07-17 13:37:07] [Rank 0] step:4021/10000 train_time:934030ms step_avg:232.29ms +[2025-07-17 13:37:07] [Rank 0] step:4021/10000 train_time:934030ms step_avg:232.29ms +[2025-07-17 13:37:11] [Rank 0] step:4041/10000 train_time:938835ms step_avg:232.33ms +[2025-07-17 13:37:11] [Rank 0] step:4041/10000 train_time:938835ms step_avg:232.33ms +[2025-07-17 13:37:16] [Rank 0] step:4061/10000 train_time:943640ms step_avg:232.37ms +[2025-07-17 13:37:16] [Rank 0] step:4061/10000 train_time:943640ms step_avg:232.37ms +[2025-07-17 13:37:21] [Rank 0] step:4081/10000 train_time:948696ms step_avg:232.47ms +[2025-07-17 13:37:21] [Rank 0] step:4081/10000 train_time:948696ms step_avg:232.47ms +[2025-07-17 13:37:26] [Rank 0] step:4101/10000 train_time:953505ms step_avg:232.51ms +[2025-07-17 13:37:26] [Rank 0] step:4101/10000 train_time:953505ms step_avg:232.51ms +[2025-07-17 13:37:31] [Rank 0] step:4121/10000 train_time:958311ms step_avg:232.54ms +[2025-07-17 13:37:31] [Rank 0] step:4121/10000 train_time:958311ms step_avg:232.54ms +[2025-07-17 13:37:36] [Rank 0] PRINT: step:4125/10000 val_loss:5.2604 train_time:959761ms step_avg:232.67ms +[2025-07-17 13:37:36] [Rank 0] PRINT: step:4125/10000 val_loss:5.2604 train_time:959761ms step_avg:232.67ms +[2025-07-17 13:37:40] [Rank 0] step:4141/10000 train_time:963120ms step_avg:232.58ms +[2025-07-17 13:37:40] [Rank 0] step:4141/10000 train_time:963120ms step_avg:232.58ms +[2025-07-17 13:37:45] [Rank 0] step:4161/10000 train_time:967928ms step_avg:232.62ms +[2025-07-17 13:37:45] [Rank 0] step:4161/10000 train_time:967928ms step_avg:232.62ms +[2025-07-17 13:37:50] [Rank 0] step:4181/10000 train_time:972737ms step_avg:232.66ms +[2025-07-17 13:37:50] [Rank 0] step:4181/10000 train_time:972737ms step_avg:232.66ms +[2025-07-17 13:37:55] [Rank 0] step:4201/10000 train_time:977549ms step_avg:232.69ms +[2025-07-17 13:37:55] [Rank 0] step:4201/10000 train_time:977549ms step_avg:232.69ms +[2025-07-17 13:37:59] [Rank 0] step:4221/10000 train_time:982360ms step_avg:232.73ms +[2025-07-17 13:37:59] [Rank 0] step:4221/10000 train_time:982360ms step_avg:232.73ms +[2025-07-17 13:38:04] [Rank 0] step:4241/10000 train_time:987172ms step_avg:232.77ms +[2025-07-17 13:38:04] [Rank 0] step:4241/10000 train_time:987172ms step_avg:232.77ms +[2025-07-17 13:38:11] [Rank 0] PRINT: step:4250/10000 val_loss:4.8059 train_time:989827ms step_avg:232.90ms +[2025-07-17 13:38:11] [Rank 0] PRINT: step:4250/10000 val_loss:4.8059 train_time:989827ms step_avg:232.90ms +[2025-07-17 13:38:13] [Rank 0] step:4261/10000 train_time:991986ms step_avg:232.81ms +[2025-07-17 13:38:13] [Rank 0] step:4261/10000 train_time:991986ms step_avg:232.81ms +[2025-07-17 13:38:18] [Rank 0] step:4281/10000 train_time:996802ms step_avg:232.84ms +[2025-07-17 13:38:18] [Rank 0] step:4281/10000 train_time:996802ms step_avg:232.84ms +[2025-07-17 13:38:23] [Rank 0] step:4301/10000 train_time:1001615ms step_avg:232.88ms +[2025-07-17 13:38:23] [Rank 0] step:4301/10000 train_time:1001615ms step_avg:232.88ms +[2025-07-17 13:38:28] [Rank 0] step:4321/10000 train_time:1006436ms step_avg:232.92ms +[2025-07-17 13:38:28] [Rank 0] step:4321/10000 train_time:1006436ms step_avg:232.92ms +[2025-07-17 13:38:33] [Rank 0] step:4341/10000 train_time:1011252ms step_avg:232.95ms +[2025-07-17 13:38:33] [Rank 0] step:4341/10000 train_time:1011252ms step_avg:232.95ms +[2025-07-17 13:38:37] [Rank 0] step:4361/10000 train_time:1016069ms step_avg:232.99ms +[2025-07-17 13:38:37] [Rank 0] step:4361/10000 train_time:1016069ms step_avg:232.99ms +[2025-07-17 13:38:45] [Rank 0] PRINT: step:4375/10000 val_loss:4.7842 train_time:1019926ms step_avg:233.13ms +[2025-07-17 13:38:45] [Rank 0] PRINT: step:4375/10000 val_loss:4.7842 train_time:1019926ms step_avg:233.13ms +[2025-07-17 13:38:47] [Rank 0] step:4381/10000 train_time:1020885ms step_avg:233.03ms +[2025-07-17 13:38:47] [Rank 0] step:4381/10000 train_time:1020885ms step_avg:233.03ms +[2025-07-17 13:38:52] [Rank 0] step:4401/10000 train_time:1025700ms step_avg:233.06ms +[2025-07-17 13:38:52] [Rank 0] step:4401/10000 train_time:1025700ms step_avg:233.06ms +[2025-07-17 13:38:57] [Rank 0] step:4421/10000 train_time:1030512ms step_avg:233.09ms +[2025-07-17 13:38:57] [Rank 0] step:4421/10000 train_time:1030512ms step_avg:233.09ms +[2025-07-17 13:39:01] [Rank 0] step:4441/10000 train_time:1035327ms step_avg:233.13ms +[2025-07-17 13:39:01] [Rank 0] step:4441/10000 train_time:1035327ms step_avg:233.13ms +[2025-07-17 13:39:06] [Rank 0] step:4461/10000 train_time:1040157ms step_avg:233.17ms +[2025-07-17 13:39:06] [Rank 0] step:4461/10000 train_time:1040157ms step_avg:233.17ms +[2025-07-17 13:39:11] [Rank 0] step:4481/10000 train_time:1044991ms step_avg:233.20ms +[2025-07-17 13:39:11] [Rank 0] step:4481/10000 train_time:1044991ms step_avg:233.20ms +[2025-07-17 13:39:20] [Rank 0] PRINT: step:4500/10000 val_loss:4.6896 train_time:1050069ms step_avg:233.35ms +[2025-07-17 13:39:20] [Rank 0] PRINT: step:4500/10000 val_loss:4.6896 train_time:1050069ms step_avg:233.35ms +[2025-07-17 13:39:20] [Rank 0] step:4501/10000 train_time:1050084ms step_avg:233.30ms +[2025-07-17 13:39:20] [Rank 0] step:4501/10000 train_time:1050084ms step_avg:233.30ms +[2025-07-17 13:39:25] [Rank 0] step:4521/10000 train_time:1054658ms step_avg:233.28ms +[2025-07-17 13:39:25] [Rank 0] step:4521/10000 train_time:1054658ms step_avg:233.28ms +[2025-07-17 13:39:30] [Rank 0] step:4541/10000 train_time:1059493ms step_avg:233.32ms +[2025-07-17 13:39:30] [Rank 0] step:4541/10000 train_time:1059493ms step_avg:233.32ms +[2025-07-17 13:39:35] [Rank 0] step:4561/10000 train_time:1064328ms step_avg:233.35ms +[2025-07-17 13:39:35] [Rank 0] step:4561/10000 train_time:1064328ms step_avg:233.35ms +[2025-07-17 13:39:40] [Rank 0] step:4581/10000 train_time:1069429ms step_avg:233.45ms +[2025-07-17 13:39:40] [Rank 0] step:4581/10000 train_time:1069429ms step_avg:233.45ms +[2025-07-17 13:39:45] [Rank 0] step:4601/10000 train_time:1074273ms step_avg:233.49ms +[2025-07-17 13:39:45] [Rank 0] step:4601/10000 train_time:1074273ms step_avg:233.49ms +[2025-07-17 13:39:50] [Rank 0] step:4621/10000 train_time:1079104ms step_avg:233.52ms +[2025-07-17 13:39:50] [Rank 0] step:4621/10000 train_time:1079104ms step_avg:233.52ms +[2025-07-17 13:39:55] [Rank 0] PRINT: step:4625/10000 val_loss:4.5560 train_time:1080559ms step_avg:233.63ms +[2025-07-17 13:39:55] [Rank 0] PRINT: step:4625/10000 val_loss:4.5560 train_time:1080559ms step_avg:233.63ms +[2025-07-17 13:39:59] [Rank 0] step:4641/10000 train_time:1083934ms step_avg:233.56ms +[2025-07-17 13:39:59] [Rank 0] step:4641/10000 train_time:1083934ms step_avg:233.56ms +[2025-07-17 13:40:04] [Rank 0] step:4661/10000 train_time:1088772ms step_avg:233.59ms +[2025-07-17 13:40:04] [Rank 0] step:4661/10000 train_time:1088772ms step_avg:233.59ms +[2025-07-17 13:40:09] [Rank 0] step:4681/10000 train_time:1093607ms step_avg:233.63ms +[2025-07-17 13:40:09] [Rank 0] step:4681/10000 train_time:1093607ms step_avg:233.63ms +[2025-07-17 13:40:14] [Rank 0] step:4701/10000 train_time:1098443ms step_avg:233.66ms +[2025-07-17 13:40:14] [Rank 0] step:4701/10000 train_time:1098443ms step_avg:233.66ms +[2025-07-17 13:40:19] [Rank 0] step:4721/10000 train_time:1103273ms step_avg:233.69ms +[2025-07-17 13:40:19] [Rank 0] step:4721/10000 train_time:1103273ms step_avg:233.69ms +[2025-07-17 13:40:23] [Rank 0] step:4741/10000 train_time:1108104ms step_avg:233.73ms +[2025-07-17 13:40:23] [Rank 0] step:4741/10000 train_time:1108104ms step_avg:233.73ms +[2025-07-17 13:40:30] [Rank 0] PRINT: step:4750/10000 val_loss:4.7428 train_time:1110767ms step_avg:233.85ms +[2025-07-17 13:40:30] [Rank 0] PRINT: step:4750/10000 val_loss:4.7428 train_time:1110767ms step_avg:233.85ms +[2025-07-17 13:40:33] [Rank 0] step:4761/10000 train_time:1112932ms step_avg:233.76ms +[2025-07-17 13:40:33] [Rank 0] step:4761/10000 train_time:1112932ms step_avg:233.76ms +[2025-07-17 13:40:38] [Rank 0] step:4781/10000 train_time:1117753ms step_avg:233.79ms +[2025-07-17 13:40:38] [Rank 0] step:4781/10000 train_time:1117753ms step_avg:233.79ms +[2025-07-17 13:40:43] [Rank 0] step:4801/10000 train_time:1122572ms step_avg:233.82ms +[2025-07-17 13:40:43] [Rank 0] step:4801/10000 train_time:1122572ms step_avg:233.82ms +[2025-07-17 13:40:47] [Rank 0] step:4821/10000 train_time:1127398ms step_avg:233.85ms +[2025-07-17 13:40:47] [Rank 0] step:4821/10000 train_time:1127398ms step_avg:233.85ms +[2025-07-17 13:40:52] [Rank 0] step:4841/10000 train_time:1132230ms step_avg:233.88ms +[2025-07-17 13:40:52] [Rank 0] step:4841/10000 train_time:1132230ms step_avg:233.88ms +[2025-07-17 13:40:57] [Rank 0] step:4861/10000 train_time:1137055ms step_avg:233.91ms +[2025-07-17 13:40:57] [Rank 0] step:4861/10000 train_time:1137055ms step_avg:233.91ms +[2025-07-17 13:41:05] [Rank 0] PRINT: step:4875/10000 val_loss:4.5413 train_time:1140918ms step_avg:234.03ms +[2025-07-17 13:41:05] [Rank 0] PRINT: step:4875/10000 val_loss:4.5413 train_time:1140918ms step_avg:234.03ms +[2025-07-17 13:41:06] [Rank 0] step:4881/10000 train_time:1141875ms step_avg:233.94ms +[2025-07-17 13:41:06] [Rank 0] step:4881/10000 train_time:1141875ms step_avg:233.94ms +[2025-07-17 13:41:11] [Rank 0] step:4901/10000 train_time:1146693ms step_avg:233.97ms +[2025-07-17 13:41:11] [Rank 0] step:4901/10000 train_time:1146693ms step_avg:233.97ms +[2025-07-17 13:41:16] [Rank 0] step:4921/10000 train_time:1151507ms step_avg:234.00ms +[2025-07-17 13:41:16] [Rank 0] step:4921/10000 train_time:1151507ms step_avg:234.00ms +[2025-07-17 13:41:21] [Rank 0] step:4941/10000 train_time:1156325ms step_avg:234.03ms +[2025-07-17 13:41:21] [Rank 0] step:4941/10000 train_time:1156325ms step_avg:234.03ms +[2025-07-17 13:41:26] [Rank 0] step:4961/10000 train_time:1161132ms step_avg:234.05ms +[2025-07-17 13:41:26] [Rank 0] step:4961/10000 train_time:1161132ms step_avg:234.05ms +[2025-07-17 13:41:30] [Rank 0] step:4981/10000 train_time:1165935ms step_avg:234.08ms +[2025-07-17 13:41:30] [Rank 0] step:4981/10000 train_time:1165935ms step_avg:234.08ms +[2025-07-17 13:41:40] [Rank 0] PRINT: step:5000/10000 val_loss:4.6503 train_time:1170988ms step_avg:234.20ms +[2025-07-17 13:41:40] [Rank 0] PRINT: step:5000/10000 val_loss:4.6503 train_time:1170988ms step_avg:234.20ms +[2025-07-17 13:41:40] [Rank 0] step:5001/10000 train_time:1171001ms step_avg:234.15ms +[2025-07-17 13:41:40] [Rank 0] step:5001/10000 train_time:1171001ms step_avg:234.15ms +[2025-07-17 13:41:45] [Rank 0] step:5021/10000 train_time:1175585ms step_avg:234.13ms +[2025-07-17 13:41:45] [Rank 0] step:5021/10000 train_time:1175585ms step_avg:234.13ms +[2025-07-17 13:41:50] [Rank 0] step:5041/10000 train_time:1180398ms step_avg:234.16ms +[2025-07-17 13:41:50] [Rank 0] step:5041/10000 train_time:1180398ms step_avg:234.16ms +[2025-07-17 13:41:54] [Rank 0] step:5061/10000 train_time:1185215ms step_avg:234.19ms +[2025-07-17 13:41:54] [Rank 0] step:5061/10000 train_time:1185215ms step_avg:234.19ms +[2025-07-17 13:41:59] [Rank 0] step:5081/10000 train_time:1190144ms step_avg:234.23ms +[2025-07-17 13:41:59] [Rank 0] step:5081/10000 train_time:1190144ms step_avg:234.23ms +[2025-07-17 13:42:04] [Rank 0] step:5101/10000 train_time:1195133ms step_avg:234.29ms +[2025-07-17 13:42:04] [Rank 0] step:5101/10000 train_time:1195133ms step_avg:234.29ms +[2025-07-17 13:42:09] [Rank 0] step:5121/10000 train_time:1199954ms step_avg:234.32ms +[2025-07-17 13:42:09] [Rank 0] step:5121/10000 train_time:1199954ms step_avg:234.32ms +[2025-07-17 13:42:15] [Rank 0] PRINT: step:5125/10000 val_loss:4.2800 train_time:1201408ms step_avg:234.42ms +[2025-07-17 13:42:15] [Rank 0] PRINT: step:5125/10000 val_loss:4.2800 train_time:1201408ms step_avg:234.42ms +[2025-07-17 13:42:19] [Rank 0] step:5141/10000 train_time:1204771ms step_avg:234.35ms +[2025-07-17 13:42:19] [Rank 0] step:5141/10000 train_time:1204771ms step_avg:234.35ms +[2025-07-17 13:42:23] [Rank 0] step:5161/10000 train_time:1209594ms step_avg:234.37ms +[2025-07-17 13:42:23] [Rank 0] step:5161/10000 train_time:1209594ms step_avg:234.37ms +[2025-07-17 13:42:28] [Rank 0] step:5181/10000 train_time:1214420ms step_avg:234.40ms +[2025-07-17 13:42:28] [Rank 0] step:5181/10000 train_time:1214420ms step_avg:234.40ms +[2025-07-17 13:42:33] [Rank 0] step:5201/10000 train_time:1219286ms step_avg:234.43ms +[2025-07-17 13:42:33] [Rank 0] step:5201/10000 train_time:1219286ms step_avg:234.43ms +[2025-07-17 13:42:38] [Rank 0] step:5221/10000 train_time:1224185ms step_avg:234.47ms +[2025-07-17 13:42:38] [Rank 0] step:5221/10000 train_time:1224185ms step_avg:234.47ms +[2025-07-17 13:42:43] [Rank 0] step:5241/10000 train_time:1229080ms step_avg:234.51ms +[2025-07-17 13:42:43] [Rank 0] step:5241/10000 train_time:1229080ms step_avg:234.51ms +[2025-07-17 13:42:50] [Rank 0] PRINT: step:5250/10000 val_loss:4.3038 train_time:1231778ms step_avg:234.62ms +[2025-07-17 13:42:50] [Rank 0] PRINT: step:5250/10000 val_loss:4.3038 train_time:1231778ms step_avg:234.62ms +[2025-07-17 13:42:52] [Rank 0] step:5261/10000 train_time:1233970ms step_avg:234.55ms +[2025-07-17 13:42:52] [Rank 0] step:5261/10000 train_time:1233970ms step_avg:234.55ms +[2025-07-17 13:42:57] [Rank 0] step:5281/10000 train_time:1238867ms step_avg:234.59ms +[2025-07-17 13:42:57] [Rank 0] step:5281/10000 train_time:1238867ms step_avg:234.59ms +[2025-07-17 13:43:02] [Rank 0] step:5301/10000 train_time:1243764ms step_avg:234.63ms +[2025-07-17 13:43:02] [Rank 0] step:5301/10000 train_time:1243764ms step_avg:234.63ms +[2025-07-17 13:43:07] [Rank 0] step:5321/10000 train_time:1248653ms step_avg:234.67ms +[2025-07-17 13:43:07] [Rank 0] step:5321/10000 train_time:1248653ms step_avg:234.67ms +[2025-07-17 13:43:12] [Rank 0] step:5341/10000 train_time:1253556ms step_avg:234.70ms +[2025-07-17 13:43:12] [Rank 0] step:5341/10000 train_time:1253556ms step_avg:234.70ms +[2025-07-17 13:43:17] [Rank 0] step:5361/10000 train_time:1258449ms step_avg:234.74ms +[2025-07-17 13:43:17] [Rank 0] step:5361/10000 train_time:1258449ms step_avg:234.74ms +[2025-07-17 13:43:25] [Rank 0] PRINT: step:5375/10000 val_loss:4.3637 train_time:1262375ms step_avg:234.86ms +[2025-07-17 13:43:25] [Rank 0] PRINT: step:5375/10000 val_loss:4.3637 train_time:1262375ms step_avg:234.86ms +[2025-07-17 13:43:27] [Rank 0] step:5381/10000 train_time:1263351ms step_avg:234.78ms +[2025-07-17 13:43:27] [Rank 0] step:5381/10000 train_time:1263351ms step_avg:234.78ms +[2025-07-17 13:43:31] [Rank 0] step:5401/10000 train_time:1268250ms step_avg:234.82ms +[2025-07-17 13:43:31] [Rank 0] step:5401/10000 train_time:1268250ms step_avg:234.82ms +[2025-07-17 13:43:36] [Rank 0] step:5421/10000 train_time:1273161ms step_avg:234.86ms +[2025-07-17 13:43:36] [Rank 0] step:5421/10000 train_time:1273161ms step_avg:234.86ms +[2025-07-17 13:43:41] [Rank 0] step:5441/10000 train_time:1278058ms step_avg:234.89ms +[2025-07-17 13:43:41] [Rank 0] step:5441/10000 train_time:1278058ms step_avg:234.89ms +[2025-07-17 13:43:46] [Rank 0] step:5461/10000 train_time:1282975ms step_avg:234.93ms +[2025-07-17 13:43:46] [Rank 0] step:5461/10000 train_time:1282975ms step_avg:234.93ms +[2025-07-17 13:43:51] [Rank 0] step:5481/10000 train_time:1287887ms step_avg:234.97ms +[2025-07-17 13:43:51] [Rank 0] step:5481/10000 train_time:1287887ms step_avg:234.97ms +[2025-07-17 13:44:00] [Rank 0] PRINT: step:5500/10000 val_loss:4.7942 train_time:1293037ms step_avg:235.10ms +[2025-07-17 13:44:00] [Rank 0] PRINT: step:5500/10000 val_loss:4.7942 train_time:1293037ms step_avg:235.10ms +[2025-07-17 13:44:01] [Rank 0] step:5501/10000 train_time:1293050ms step_avg:235.06ms +[2025-07-17 13:44:01] [Rank 0] step:5501/10000 train_time:1293050ms step_avg:235.06ms +[2025-07-17 13:44:06] [Rank 0] step:5521/10000 train_time:1297683ms step_avg:235.04ms +[2025-07-17 13:44:06] [Rank 0] step:5521/10000 train_time:1297683ms step_avg:235.04ms +[2025-07-17 13:44:11] [Rank 0] step:5541/10000 train_time:1302590ms step_avg:235.08ms +[2025-07-17 13:44:11] [Rank 0] step:5541/10000 train_time:1302590ms step_avg:235.08ms +[2025-07-17 13:44:15] [Rank 0] step:5561/10000 train_time:1307498ms step_avg:235.12ms +[2025-07-17 13:44:15] [Rank 0] step:5561/10000 train_time:1307498ms step_avg:235.12ms +[2025-07-17 13:44:20] [Rank 0] step:5581/10000 train_time:1312399ms step_avg:235.15ms +[2025-07-17 13:44:20] [Rank 0] step:5581/10000 train_time:1312399ms step_avg:235.15ms +[2025-07-17 13:44:25] [Rank 0] step:5601/10000 train_time:1317562ms step_avg:235.24ms +[2025-07-17 13:44:25] [Rank 0] step:5601/10000 train_time:1317562ms step_avg:235.24ms +[2025-07-17 13:44:30] [Rank 0] step:5621/10000 train_time:1322470ms step_avg:235.27ms +[2025-07-17 13:44:30] [Rank 0] step:5621/10000 train_time:1322470ms step_avg:235.27ms +[2025-07-17 13:44:36] [Rank 0] PRINT: step:5625/10000 val_loss:5.0263 train_time:1324046ms step_avg:235.39ms +[2025-07-17 13:44:36] [Rank 0] PRINT: step:5625/10000 val_loss:5.0263 train_time:1324046ms step_avg:235.39ms +[2025-07-17 13:44:40] [Rank 0] step:5641/10000 train_time:1327476ms step_avg:235.33ms +[2025-07-17 13:44:40] [Rank 0] step:5641/10000 train_time:1327476ms step_avg:235.33ms +[2025-07-17 13:44:45] [Rank 0] step:5661/10000 train_time:1332487ms step_avg:235.38ms +[2025-07-17 13:44:45] [Rank 0] step:5661/10000 train_time:1332487ms step_avg:235.38ms +[2025-07-17 13:44:50] [Rank 0] step:5681/10000 train_time:1337404ms step_avg:235.42ms +[2025-07-17 13:44:50] [Rank 0] step:5681/10000 train_time:1337404ms step_avg:235.42ms +[2025-07-17 13:44:55] [Rank 0] step:5701/10000 train_time:1342311ms step_avg:235.45ms +[2025-07-17 13:44:55] [Rank 0] step:5701/10000 train_time:1342311ms step_avg:235.45ms +[2025-07-17 13:45:00] [Rank 0] step:5721/10000 train_time:1347222ms step_avg:235.49ms +[2025-07-17 13:45:00] [Rank 0] step:5721/10000 train_time:1347222ms step_avg:235.49ms +[2025-07-17 13:45:05] [Rank 0] step:5741/10000 train_time:1352140ms step_avg:235.52ms +[2025-07-17 13:45:05] [Rank 0] step:5741/10000 train_time:1352140ms step_avg:235.52ms +[2025-07-17 13:45:12] [Rank 0] PRINT: step:5750/10000 val_loss:4.9634 train_time:1354844ms step_avg:235.63ms +[2025-07-17 13:45:12] [Rank 0] PRINT: step:5750/10000 val_loss:4.9634 train_time:1354844ms step_avg:235.63ms +[2025-07-17 13:45:14] [Rank 0] step:5761/10000 train_time:1357049ms step_avg:235.56ms +[2025-07-17 13:45:14] [Rank 0] step:5761/10000 train_time:1357049ms step_avg:235.56ms +[2025-07-17 13:45:19] [Rank 0] step:5781/10000 train_time:1361961ms step_avg:235.59ms +[2025-07-17 13:45:19] [Rank 0] step:5781/10000 train_time:1361961ms step_avg:235.59ms +[2025-07-17 13:45:24] [Rank 0] step:5801/10000 train_time:1366874ms step_avg:235.63ms +[2025-07-17 13:45:24] [Rank 0] step:5801/10000 train_time:1366874ms step_avg:235.63ms +[2025-07-17 13:45:29] [Rank 0] step:5821/10000 train_time:1371788ms step_avg:235.66ms +[2025-07-17 13:45:29] [Rank 0] step:5821/10000 train_time:1371788ms step_avg:235.66ms +[2025-07-17 13:45:34] [Rank 0] step:5841/10000 train_time:1376702ms step_avg:235.70ms +[2025-07-17 13:45:34] [Rank 0] step:5841/10000 train_time:1376702ms step_avg:235.70ms +[2025-07-17 13:45:39] [Rank 0] step:5861/10000 train_time:1381611ms step_avg:235.73ms +[2025-07-17 13:45:39] [Rank 0] step:5861/10000 train_time:1381611ms step_avg:235.73ms +[2025-07-17 13:45:47] [Rank 0] PRINT: step:5875/10000 val_loss:4.8478 train_time:1385541ms step_avg:235.84ms +[2025-07-17 13:45:47] [Rank 0] PRINT: step:5875/10000 val_loss:4.8478 train_time:1385541ms step_avg:235.84ms +[2025-07-17 13:45:49] [Rank 0] step:5881/10000 train_time:1386521ms step_avg:235.76ms +[2025-07-17 13:45:49] [Rank 0] step:5881/10000 train_time:1386521ms step_avg:235.76ms +[2025-07-17 13:45:53] [Rank 0] step:5901/10000 train_time:1391441ms step_avg:235.80ms +[2025-07-17 13:45:53] [Rank 0] step:5901/10000 train_time:1391441ms step_avg:235.80ms +[2025-07-17 13:45:58] [Rank 0] step:5921/10000 train_time:1396352ms step_avg:235.83ms +[2025-07-17 13:45:58] [Rank 0] step:5921/10000 train_time:1396352ms step_avg:235.83ms +[2025-07-17 13:46:03] [Rank 0] step:5941/10000 train_time:1401376ms step_avg:235.88ms +[2025-07-17 13:46:03] [Rank 0] step:5941/10000 train_time:1401376ms step_avg:235.88ms +[2025-07-17 13:46:08] [Rank 0] step:5961/10000 train_time:1406301ms step_avg:235.92ms +[2025-07-17 13:46:08] [Rank 0] step:5961/10000 train_time:1406301ms step_avg:235.92ms +[2025-07-17 13:46:13] [Rank 0] step:5981/10000 train_time:1411221ms step_avg:235.95ms +[2025-07-17 13:46:13] [Rank 0] step:5981/10000 train_time:1411221ms step_avg:235.95ms +[2025-07-17 13:46:23] [Rank 0] PRINT: step:6000/10000 val_loss:4.8189 train_time:1416396ms step_avg:236.07ms +[2025-07-17 13:46:23] [Rank 0] PRINT: step:6000/10000 val_loss:4.8189 train_time:1416396ms step_avg:236.07ms +[2025-07-17 13:46:23] [Rank 0] step:6001/10000 train_time:1416411ms step_avg:236.03ms +[2025-07-17 13:46:23] [Rank 0] step:6001/10000 train_time:1416411ms step_avg:236.03ms +[2025-07-17 13:46:28] [Rank 0] step:6021/10000 train_time:1421065ms step_avg:236.02ms +[2025-07-17 13:46:28] [Rank 0] step:6021/10000 train_time:1421065ms step_avg:236.02ms +[2025-07-17 13:46:33] [Rank 0] step:6041/10000 train_time:1425983ms step_avg:236.05ms +[2025-07-17 13:46:33] [Rank 0] step:6041/10000 train_time:1425983ms step_avg:236.05ms +[2025-07-17 13:46:38] [Rank 0] step:6061/10000 train_time:1430894ms step_avg:236.08ms +[2025-07-17 13:46:38] [Rank 0] step:6061/10000 train_time:1430894ms step_avg:236.08ms +[2025-07-17 13:46:43] [Rank 0] step:6081/10000 train_time:1435811ms step_avg:236.11ms +[2025-07-17 13:46:43] [Rank 0] step:6081/10000 train_time:1435811ms step_avg:236.11ms +[2025-07-17 13:46:48] [Rank 0] step:6101/10000 train_time:1440969ms step_avg:236.19ms +[2025-07-17 13:46:48] [Rank 0] step:6101/10000 train_time:1440969ms step_avg:236.19ms +[2025-07-17 13:46:53] [Rank 0] step:6121/10000 train_time:1445893ms step_avg:236.22ms +[2025-07-17 13:46:53] [Rank 0] step:6121/10000 train_time:1445893ms step_avg:236.22ms +[2025-07-17 13:46:58] [Rank 0] PRINT: step:6125/10000 val_loss:5.0392 train_time:1447372ms step_avg:236.31ms +[2025-07-17 13:46:58] [Rank 0] PRINT: step:6125/10000 val_loss:5.0392 train_time:1447372ms step_avg:236.31ms +[2025-07-17 13:47:02] [Rank 0] step:6141/10000 train_time:1450805ms step_avg:236.25ms +[2025-07-17 13:47:02] [Rank 0] step:6141/10000 train_time:1450805ms step_avg:236.25ms +[2025-07-17 13:47:07] [Rank 0] step:6161/10000 train_time:1455716ms step_avg:236.28ms +[2025-07-17 13:47:07] [Rank 0] step:6161/10000 train_time:1455716ms step_avg:236.28ms +[2025-07-17 13:47:12] [Rank 0] step:6181/10000 train_time:1460637ms step_avg:236.31ms +[2025-07-17 13:47:12] [Rank 0] step:6181/10000 train_time:1460637ms step_avg:236.31ms +[2025-07-17 13:47:17] [Rank 0] step:6201/10000 train_time:1465561ms step_avg:236.34ms +[2025-07-17 13:47:17] [Rank 0] step:6201/10000 train_time:1465561ms step_avg:236.34ms +[2025-07-17 13:47:22] [Rank 0] step:6221/10000 train_time:1470480ms step_avg:236.37ms +[2025-07-17 13:47:22] [Rank 0] step:6221/10000 train_time:1470480ms step_avg:236.37ms +[2025-07-17 13:47:27] [Rank 0] step:6241/10000 train_time:1475398ms step_avg:236.40ms +[2025-07-17 13:47:27] [Rank 0] step:6241/10000 train_time:1475398ms step_avg:236.40ms +[2025-07-17 13:47:33] [Rank 0] PRINT: step:6250/10000 val_loss:5.1444 train_time:1478104ms step_avg:236.50ms +[2025-07-17 13:47:33] [Rank 0] PRINT: step:6250/10000 val_loss:5.1444 train_time:1478104ms step_avg:236.50ms +[2025-07-17 13:47:36] [Rank 0] step:6261/10000 train_time:1480309ms step_avg:236.43ms +[2025-07-17 13:47:36] [Rank 0] step:6261/10000 train_time:1480309ms step_avg:236.43ms +[2025-07-17 13:47:41] [Rank 0] step:6281/10000 train_time:1485231ms step_avg:236.46ms +[2025-07-17 13:47:41] [Rank 0] step:6281/10000 train_time:1485231ms step_avg:236.46ms +[2025-07-17 13:47:46] [Rank 0] step:6301/10000 train_time:1490144ms step_avg:236.49ms +[2025-07-17 13:47:46] [Rank 0] step:6301/10000 train_time:1490144ms step_avg:236.49ms +[2025-07-17 13:47:51] [Rank 0] step:6321/10000 train_time:1495062ms step_avg:236.52ms +[2025-07-17 13:47:51] [Rank 0] step:6321/10000 train_time:1495062ms step_avg:236.52ms +[2025-07-17 13:47:56] [Rank 0] step:6341/10000 train_time:1500077ms step_avg:236.57ms +[2025-07-17 13:47:56] [Rank 0] step:6341/10000 train_time:1500077ms step_avg:236.57ms +[2025-07-17 13:48:01] [Rank 0] step:6361/10000 train_time:1504993ms step_avg:236.60ms +[2025-07-17 13:48:01] [Rank 0] step:6361/10000 train_time:1504993ms step_avg:236.60ms +[2025-07-17 13:48:08] [Rank 0] PRINT: step:6375/10000 val_loss:5.0746 train_time:1508922ms step_avg:236.69ms +[2025-07-17 13:48:08] [Rank 0] PRINT: step:6375/10000 val_loss:5.0746 train_time:1508922ms step_avg:236.69ms +[2025-07-17 13:48:10] [Rank 0] step:6381/10000 train_time:1509901ms step_avg:236.62ms +[2025-07-17 13:48:10] [Rank 0] step:6381/10000 train_time:1509901ms step_avg:236.62ms +[2025-07-17 13:48:15] [Rank 0] step:6401/10000 train_time:1514809ms step_avg:236.65ms +[2025-07-17 13:48:15] [Rank 0] step:6401/10000 train_time:1514809ms step_avg:236.65ms +[2025-07-17 13:48:20] [Rank 0] step:6421/10000 train_time:1519720ms step_avg:236.68ms +[2025-07-17 13:48:20] [Rank 0] step:6421/10000 train_time:1519720ms step_avg:236.68ms +[2025-07-17 13:48:25] [Rank 0] step:6441/10000 train_time:1524635ms step_avg:236.71ms +[2025-07-17 13:48:25] [Rank 0] step:6441/10000 train_time:1524635ms step_avg:236.71ms +[2025-07-17 13:48:30] [Rank 0] step:6461/10000 train_time:1529557ms step_avg:236.74ms +[2025-07-17 13:48:30] [Rank 0] step:6461/10000 train_time:1529557ms step_avg:236.74ms +[2025-07-17 13:48:34] [Rank 0] step:6481/10000 train_time:1534474ms step_avg:236.77ms +[2025-07-17 13:48:34] [Rank 0] step:6481/10000 train_time:1534474ms step_avg:236.77ms +[2025-07-17 13:48:44] [Rank 0] PRINT: step:6500/10000 val_loss:5.1015 train_time:1539638ms step_avg:236.87ms +[2025-07-17 13:48:44] [Rank 0] PRINT: step:6500/10000 val_loss:5.1015 train_time:1539638ms step_avg:236.87ms +[2025-07-17 13:48:44] [Rank 0] step:6501/10000 train_time:1539652ms step_avg:236.83ms +[2025-07-17 13:48:44] [Rank 0] step:6501/10000 train_time:1539652ms step_avg:236.83ms +[2025-07-17 13:48:49] [Rank 0] step:6521/10000 train_time:1544305ms step_avg:236.82ms +[2025-07-17 13:48:49] [Rank 0] step:6521/10000 train_time:1544305ms step_avg:236.82ms +[2025-07-17 13:48:54] [Rank 0] step:6541/10000 train_time:1549220ms step_avg:236.85ms +[2025-07-17 13:48:54] [Rank 0] step:6541/10000 train_time:1549220ms step_avg:236.85ms +[2025-07-17 13:48:59] [Rank 0] step:6561/10000 train_time:1554144ms step_avg:236.88ms +[2025-07-17 13:48:59] [Rank 0] step:6561/10000 train_time:1554144ms step_avg:236.88ms +[2025-07-17 13:49:04] [Rank 0] step:6581/10000 train_time:1559063ms step_avg:236.90ms +[2025-07-17 13:49:04] [Rank 0] step:6581/10000 train_time:1559063ms step_avg:236.90ms +[2025-07-17 13:49:09] [Rank 0] step:6601/10000 train_time:1563989ms step_avg:236.93ms +[2025-07-17 13:49:09] [Rank 0] step:6601/10000 train_time:1563989ms step_avg:236.93ms +[2025-07-17 13:49:14] [Rank 0] step:6621/10000 train_time:1569175ms step_avg:237.00ms +[2025-07-17 13:49:14] [Rank 0] step:6621/10000 train_time:1569175ms step_avg:237.00ms +[2025-07-17 13:49:20] [Rank 0] PRINT: step:6625/10000 val_loss:4.9705 train_time:1570653ms step_avg:237.08ms +[2025-07-17 13:49:20] [Rank 0] PRINT: step:6625/10000 val_loss:4.9705 train_time:1570653ms step_avg:237.08ms +[2025-07-17 13:49:23] [Rank 0] step:6641/10000 train_time:1574085ms step_avg:237.03ms +[2025-07-17 13:49:23] [Rank 0] step:6641/10000 train_time:1574085ms step_avg:237.03ms +[2025-07-17 13:49:28] [Rank 0] step:6661/10000 train_time:1579001ms step_avg:237.05ms +[2025-07-17 13:49:28] [Rank 0] step:6661/10000 train_time:1579001ms step_avg:237.05ms +[2025-07-17 13:49:33] [Rank 0] step:6681/10000 train_time:1583964ms step_avg:237.08ms +[2025-07-17 13:49:33] [Rank 0] step:6681/10000 train_time:1583964ms step_avg:237.08ms +[2025-07-17 13:49:38] [Rank 0] step:6701/10000 train_time:1588941ms step_avg:237.12ms +[2025-07-17 13:49:38] [Rank 0] step:6701/10000 train_time:1588941ms step_avg:237.12ms +[2025-07-17 13:49:43] [Rank 0] step:6721/10000 train_time:1593934ms step_avg:237.16ms +[2025-07-17 13:49:43] [Rank 0] step:6721/10000 train_time:1593934ms step_avg:237.16ms +[2025-07-17 13:49:48] [Rank 0] step:6741/10000 train_time:1598923ms step_avg:237.19ms +[2025-07-17 13:49:48] [Rank 0] step:6741/10000 train_time:1598923ms step_avg:237.19ms +[2025-07-17 13:49:55] [Rank 0] PRINT: step:6750/10000 val_loss:4.9325 train_time:1601663ms step_avg:237.28ms +[2025-07-17 13:49:55] [Rank 0] PRINT: step:6750/10000 val_loss:4.9325 train_time:1601663ms step_avg:237.28ms +[2025-07-17 13:49:58] [Rank 0] step:6761/10000 train_time:1603898ms step_avg:237.23ms +[2025-07-17 13:49:58] [Rank 0] step:6761/10000 train_time:1603898ms step_avg:237.23ms +[2025-07-17 13:50:03] [Rank 0] step:6781/10000 train_time:1608882ms step_avg:237.26ms +[2025-07-17 13:50:03] [Rank 0] step:6781/10000 train_time:1608882ms step_avg:237.26ms +[2025-07-17 13:50:08] [Rank 0] step:6801/10000 train_time:1613869ms step_avg:237.30ms +[2025-07-17 13:50:08] [Rank 0] step:6801/10000 train_time:1613869ms step_avg:237.30ms +[2025-07-17 13:50:13] [Rank 0] step:6821/10000 train_time:1618852ms step_avg:237.33ms +[2025-07-17 13:50:13] [Rank 0] step:6821/10000 train_time:1618852ms step_avg:237.33ms +[2025-07-17 13:50:18] [Rank 0] step:6841/10000 train_time:1623835ms step_avg:237.37ms +[2025-07-17 13:50:18] [Rank 0] step:6841/10000 train_time:1623835ms step_avg:237.37ms +[2025-07-17 13:50:23] [Rank 0] step:6861/10000 train_time:1628814ms step_avg:237.40ms +[2025-07-17 13:50:23] [Rank 0] step:6861/10000 train_time:1628814ms step_avg:237.40ms +[2025-07-17 13:50:31] [Rank 0] PRINT: step:6875/10000 val_loss:5.5200 train_time:1632795ms step_avg:237.50ms +[2025-07-17 13:50:31] [Rank 0] PRINT: step:6875/10000 val_loss:5.5200 train_time:1632795ms step_avg:237.50ms +[2025-07-17 13:50:33] [Rank 0] step:6881/10000 train_time:1633786ms step_avg:237.43ms +[2025-07-17 13:50:33] [Rank 0] step:6881/10000 train_time:1633786ms step_avg:237.43ms +[2025-07-17 13:50:38] [Rank 0] step:6901/10000 train_time:1638762ms step_avg:237.47ms +[2025-07-17 13:50:38] [Rank 0] step:6901/10000 train_time:1638762ms step_avg:237.47ms +[2025-07-17 13:50:43] [Rank 0] step:6921/10000 train_time:1643741ms step_avg:237.50ms +[2025-07-17 13:50:43] [Rank 0] step:6921/10000 train_time:1643741ms step_avg:237.50ms +[2025-07-17 13:50:48] [Rank 0] step:6941/10000 train_time:1648727ms step_avg:237.53ms +[2025-07-17 13:50:48] [Rank 0] step:6941/10000 train_time:1648727ms step_avg:237.53ms +[2025-07-17 13:50:53] [Rank 0] step:6961/10000 train_time:1653712ms step_avg:237.57ms +[2025-07-17 13:50:53] [Rank 0] step:6961/10000 train_time:1653712ms step_avg:237.57ms +[2025-07-17 13:50:58] [Rank 0] step:6981/10000 train_time:1658697ms step_avg:237.60ms +[2025-07-17 13:50:58] [Rank 0] step:6981/10000 train_time:1658697ms step_avg:237.60ms +[2025-07-17 13:51:07] [Rank 0] PRINT: step:7000/10000 val_loss:5.7219 train_time:1663928ms step_avg:237.70ms +[2025-07-17 13:51:07] [Rank 0] PRINT: step:7000/10000 val_loss:5.7219 train_time:1663928ms step_avg:237.70ms +[2025-07-17 13:51:07] [Rank 0] step:7001/10000 train_time:1663942ms step_avg:237.67ms +[2025-07-17 13:51:07] [Rank 0] step:7001/10000 train_time:1663942ms step_avg:237.67ms +[2025-07-17 13:51:12] [Rank 0] step:7021/10000 train_time:1668656ms step_avg:237.67ms +[2025-07-17 13:51:12] [Rank 0] step:7021/10000 train_time:1668656ms step_avg:237.67ms +[2025-07-17 13:51:17] [Rank 0] step:7041/10000 train_time:1673634ms step_avg:237.70ms +[2025-07-17 13:51:17] [Rank 0] step:7041/10000 train_time:1673634ms step_avg:237.70ms +[2025-07-17 13:51:22] [Rank 0] step:7061/10000 train_time:1678609ms step_avg:237.73ms +[2025-07-17 13:51:22] [Rank 0] step:7061/10000 train_time:1678609ms step_avg:237.73ms +[2025-07-17 13:51:27] [Rank 0] step:7081/10000 train_time:1683587ms step_avg:237.76ms +[2025-07-17 13:51:27] [Rank 0] step:7081/10000 train_time:1683587ms step_avg:237.76ms +[2025-07-17 13:51:32] [Rank 0] step:7101/10000 train_time:1688558ms step_avg:237.79ms +[2025-07-17 13:51:32] [Rank 0] step:7101/10000 train_time:1688558ms step_avg:237.79ms +[2025-07-17 13:51:37] [Rank 0] step:7121/10000 train_time:1693781ms step_avg:237.86ms +[2025-07-17 13:51:37] [Rank 0] step:7121/10000 train_time:1693781ms step_avg:237.86ms +[2025-07-17 13:51:43] [Rank 0] PRINT: step:7125/10000 val_loss:5.1607 train_time:1695276ms step_avg:237.93ms +[2025-07-17 13:51:43] [Rank 0] PRINT: step:7125/10000 val_loss:5.1607 train_time:1695276ms step_avg:237.93ms +[2025-07-17 13:51:47] [Rank 0] step:7141/10000 train_time:1698763ms step_avg:237.89ms +[2025-07-17 13:51:47] [Rank 0] step:7141/10000 train_time:1698763ms step_avg:237.89ms +[2025-07-17 13:51:52] [Rank 0] step:7161/10000 train_time:1703746ms step_avg:237.92ms +[2025-07-17 13:51:52] [Rank 0] step:7161/10000 train_time:1703746ms step_avg:237.92ms +[2025-07-17 13:51:57] [Rank 0] step:7181/10000 train_time:1708722ms step_avg:237.95ms +[2025-07-17 13:51:57] [Rank 0] step:7181/10000 train_time:1708722ms step_avg:237.95ms +[2025-07-17 13:52:02] [Rank 0] step:7201/10000 train_time:1713718ms step_avg:237.98ms +[2025-07-17 13:52:02] [Rank 0] step:7201/10000 train_time:1713718ms step_avg:237.98ms +[2025-07-17 13:52:07] [Rank 0] step:7221/10000 train_time:1718696ms step_avg:238.01ms +[2025-07-17 13:52:07] [Rank 0] step:7221/10000 train_time:1718696ms step_avg:238.01ms +[2025-07-17 13:52:12] [Rank 0] step:7241/10000 train_time:1723669ms step_avg:238.04ms +[2025-07-17 13:52:12] [Rank 0] step:7241/10000 train_time:1723669ms step_avg:238.04ms +[2025-07-17 13:52:19] [Rank 0] PRINT: step:7250/10000 val_loss:5.1864 train_time:1726415ms step_avg:238.13ms +[2025-07-17 13:52:19] [Rank 0] PRINT: step:7250/10000 val_loss:5.1864 train_time:1726415ms step_avg:238.13ms +[2025-07-17 13:52:22] [Rank 0] step:7261/10000 train_time:1728645ms step_avg:238.07ms +[2025-07-17 13:52:22] [Rank 0] step:7261/10000 train_time:1728645ms step_avg:238.07ms +[2025-07-17 13:52:27] [Rank 0] step:7281/10000 train_time:1733623ms step_avg:238.10ms +[2025-07-17 13:52:27] [Rank 0] step:7281/10000 train_time:1733623ms step_avg:238.10ms +[2025-07-17 13:52:32] [Rank 0] step:7301/10000 train_time:1738598ms step_avg:238.13ms +[2025-07-17 13:52:32] [Rank 0] step:7301/10000 train_time:1738598ms step_avg:238.13ms +[2025-07-17 13:52:37] [Rank 0] step:7321/10000 train_time:1743591ms step_avg:238.16ms +[2025-07-17 13:52:37] [Rank 0] step:7321/10000 train_time:1743591ms step_avg:238.16ms +[2025-07-17 13:52:42] [Rank 0] step:7341/10000 train_time:1748569ms step_avg:238.19ms +[2025-07-17 13:52:42] [Rank 0] step:7341/10000 train_time:1748569ms step_avg:238.19ms +[2025-07-17 13:52:47] [Rank 0] step:7361/10000 train_time:1753554ms step_avg:238.22ms +[2025-07-17 13:52:47] [Rank 0] step:7361/10000 train_time:1753554ms step_avg:238.22ms +[2025-07-17 13:52:55] [Rank 0] PRINT: step:7375/10000 val_loss:5.2579 train_time:1757543ms step_avg:238.31ms +[2025-07-17 13:52:55] [Rank 0] PRINT: step:7375/10000 val_loss:5.2579 train_time:1757543ms step_avg:238.31ms +[2025-07-17 13:52:56] [Rank 0] step:7381/10000 train_time:1758534ms step_avg:238.25ms +[2025-07-17 13:52:56] [Rank 0] step:7381/10000 train_time:1758534ms step_avg:238.25ms +[2025-07-17 13:53:01] [Rank 0] step:7401/10000 train_time:1763517ms step_avg:238.28ms +[2025-07-17 13:53:01] [Rank 0] step:7401/10000 train_time:1763517ms step_avg:238.28ms +[2025-07-17 13:53:06] [Rank 0] step:7421/10000 train_time:1768495ms step_avg:238.31ms +[2025-07-17 13:53:06] [Rank 0] step:7421/10000 train_time:1768495ms step_avg:238.31ms +[2025-07-17 13:53:11] [Rank 0] step:7441/10000 train_time:1773493ms step_avg:238.34ms +[2025-07-17 13:53:11] [Rank 0] step:7441/10000 train_time:1773493ms step_avg:238.34ms +[2025-07-17 13:53:16] [Rank 0] step:7461/10000 train_time:1778470ms step_avg:238.37ms +[2025-07-17 13:53:16] [Rank 0] step:7461/10000 train_time:1778470ms step_avg:238.37ms +[2025-07-17 13:53:21] [Rank 0] step:7481/10000 train_time:1783461ms step_avg:238.40ms +[2025-07-17 13:53:21] [Rank 0] step:7481/10000 train_time:1783461ms step_avg:238.40ms +[2025-07-17 13:53:31] [Rank 0] PRINT: step:7500/10000 val_loss:5.3064 train_time:1788713ms step_avg:238.50ms +[2025-07-17 13:53:31] [Rank 0] PRINT: step:7500/10000 val_loss:5.3064 train_time:1788713ms step_avg:238.50ms +[2025-07-17 13:53:31] [Rank 0] step:7501/10000 train_time:1788728ms step_avg:238.47ms +[2025-07-17 13:53:31] [Rank 0] step:7501/10000 train_time:1788728ms step_avg:238.47ms +[2025-07-17 13:53:36] [Rank 0] step:7521/10000 train_time:1793462ms step_avg:238.46ms +[2025-07-17 13:53:36] [Rank 0] step:7521/10000 train_time:1793462ms step_avg:238.46ms +[2025-07-17 13:53:41] [Rank 0] step:7541/10000 train_time:1798454ms step_avg:238.49ms +[2025-07-17 13:53:41] [Rank 0] step:7541/10000 train_time:1798454ms step_avg:238.49ms +[2025-07-17 13:53:46] [Rank 0] step:7561/10000 train_time:1803440ms step_avg:238.52ms +[2025-07-17 13:53:46] [Rank 0] step:7561/10000 train_time:1803440ms step_avg:238.52ms +[2025-07-17 13:53:51] [Rank 0] step:7581/10000 train_time:1808437ms step_avg:238.55ms +[2025-07-17 13:53:51] [Rank 0] step:7581/10000 train_time:1808437ms step_avg:238.55ms +[2025-07-17 13:53:56] [Rank 0] step:7601/10000 train_time:1813437ms step_avg:238.58ms +[2025-07-17 13:53:56] [Rank 0] step:7601/10000 train_time:1813437ms step_avg:238.58ms +[2025-07-17 13:54:01] [Rank 0] step:7621/10000 train_time:1818980ms step_avg:238.68ms +[2025-07-17 13:54:01] [Rank 0] step:7621/10000 train_time:1818980ms step_avg:238.68ms +[2025-07-17 13:54:07] [Rank 0] PRINT: step:7625/10000 val_loss:5.6638 train_time:1820214ms step_avg:238.72ms +[2025-07-17 13:54:07] [Rank 0] PRINT: step:7625/10000 val_loss:5.6638 train_time:1820214ms step_avg:238.72ms +[2025-07-17 13:54:11] [Rank 0] step:7641/10000 train_time:1823700ms step_avg:238.67ms +[2025-07-17 13:54:11] [Rank 0] step:7641/10000 train_time:1823700ms step_avg:238.67ms +[2025-07-17 13:54:16] [Rank 0] step:7661/10000 train_time:1828703ms step_avg:238.70ms +[2025-07-17 13:54:16] [Rank 0] step:7661/10000 train_time:1828703ms step_avg:238.70ms +[2025-07-17 13:54:21] [Rank 0] step:7681/10000 train_time:1833815ms step_avg:238.75ms +[2025-07-17 13:54:21] [Rank 0] step:7681/10000 train_time:1833815ms step_avg:238.75ms +[2025-07-17 13:54:26] [Rank 0] step:7701/10000 train_time:1838808ms step_avg:238.78ms +[2025-07-17 13:54:26] [Rank 0] step:7701/10000 train_time:1838808ms step_avg:238.78ms +[2025-07-17 13:54:31] [Rank 0] step:7721/10000 train_time:1843805ms step_avg:238.80ms +[2025-07-17 13:54:31] [Rank 0] step:7721/10000 train_time:1843805ms step_avg:238.80ms +[2025-07-17 13:54:36] [Rank 0] step:7741/10000 train_time:1848800ms step_avg:238.83ms +[2025-07-17 13:54:36] [Rank 0] step:7741/10000 train_time:1848800ms step_avg:238.83ms +[2025-07-17 13:54:43] [Rank 0] PRINT: step:7750/10000 val_loss:5.7540 train_time:1851565ms step_avg:238.91ms +[2025-07-17 13:54:43] [Rank 0] PRINT: step:7750/10000 val_loss:5.7540 train_time:1851565ms step_avg:238.91ms +[2025-07-17 13:54:45] [Rank 0] step:7761/10000 train_time:1853806ms step_avg:238.86ms +[2025-07-17 13:54:45] [Rank 0] step:7761/10000 train_time:1853806ms step_avg:238.86ms +[2025-07-17 13:54:50] [Rank 0] step:7781/10000 train_time:1858806ms step_avg:238.89ms +[2025-07-17 13:54:50] [Rank 0] step:7781/10000 train_time:1858806ms step_avg:238.89ms +[2025-07-17 13:54:55] [Rank 0] step:7801/10000 train_time:1863804ms step_avg:238.92ms +[2025-07-17 13:54:55] [Rank 0] step:7801/10000 train_time:1863804ms step_avg:238.92ms +[2025-07-17 13:55:00] [Rank 0] step:7821/10000 train_time:1868799ms step_avg:238.95ms +[2025-07-17 13:55:00] [Rank 0] step:7821/10000 train_time:1868799ms step_avg:238.95ms +[2025-07-17 13:55:05] [Rank 0] step:7841/10000 train_time:1873799ms step_avg:238.97ms +[2025-07-17 13:55:05] [Rank 0] step:7841/10000 train_time:1873799ms step_avg:238.97ms +[2025-07-17 13:55:10] [Rank 0] step:7861/10000 train_time:1878782ms step_avg:239.00ms +[2025-07-17 13:55:10] [Rank 0] step:7861/10000 train_time:1878782ms step_avg:239.00ms +[2025-07-17 13:55:19] [Rank 0] PRINT: step:7875/10000 val_loss:5.9621 train_time:1882773ms step_avg:239.08ms +[2025-07-17 13:55:19] [Rank 0] PRINT: step:7875/10000 val_loss:5.9621 train_time:1882773ms step_avg:239.08ms +[2025-07-17 13:55:20] [Rank 0] step:7881/10000 train_time:1883761ms step_avg:239.03ms +[2025-07-17 13:55:20] [Rank 0] step:7881/10000 train_time:1883761ms step_avg:239.03ms +[2025-07-17 13:55:25] [Rank 0] step:7901/10000 train_time:1888749ms step_avg:239.05ms +[2025-07-17 13:55:25] [Rank 0] step:7901/10000 train_time:1888749ms step_avg:239.05ms +[2025-07-17 13:55:30] [Rank 0] step:7921/10000 train_time:1893741ms step_avg:239.08ms +[2025-07-17 13:55:30] [Rank 0] step:7921/10000 train_time:1893741ms step_avg:239.08ms +[2025-07-17 13:55:35] [Rank 0] step:7941/10000 train_time:1898835ms step_avg:239.12ms +[2025-07-17 13:55:35] [Rank 0] step:7941/10000 train_time:1898835ms step_avg:239.12ms +[2025-07-17 13:55:40] [Rank 0] step:7961/10000 train_time:1903840ms step_avg:239.15ms +[2025-07-17 13:55:40] [Rank 0] step:7961/10000 train_time:1903840ms step_avg:239.15ms +[2025-07-17 13:55:45] [Rank 0] step:7981/10000 train_time:1908825ms step_avg:239.17ms +[2025-07-17 13:55:45] [Rank 0] step:7981/10000 train_time:1908825ms step_avg:239.17ms +[2025-07-17 13:55:54] [Rank 0] PRINT: step:8000/10000 val_loss:6.0364 train_time:1914081ms step_avg:239.26ms +[2025-07-17 13:55:54] [Rank 0] PRINT: step:8000/10000 val_loss:6.0364 train_time:1914081ms step_avg:239.26ms +[2025-07-17 13:55:54] [Rank 0] step:8001/10000 train_time:1914095ms step_avg:239.23ms +[2025-07-17 13:55:54] [Rank 0] step:8001/10000 train_time:1914095ms step_avg:239.23ms +[2025-07-17 13:55:59] [Rank 0] step:8021/10000 train_time:1918817ms step_avg:239.22ms +[2025-07-17 13:55:59] [Rank 0] step:8021/10000 train_time:1918817ms step_avg:239.22ms +[2025-07-17 13:56:04] [Rank 0] step:8041/10000 train_time:1923830ms step_avg:239.25ms +[2025-07-17 13:56:04] [Rank 0] step:8041/10000 train_time:1923830ms step_avg:239.25ms +[2025-07-17 13:56:09] [Rank 0] step:8061/10000 train_time:1928818ms step_avg:239.28ms +[2025-07-17 13:56:09] [Rank 0] step:8061/10000 train_time:1928818ms step_avg:239.28ms +[2025-07-17 13:56:14] [Rank 0] step:8081/10000 train_time:1933816ms step_avg:239.30ms +[2025-07-17 13:56:14] [Rank 0] step:8081/10000 train_time:1933816ms step_avg:239.30ms +[2025-07-17 13:56:19] [Rank 0] step:8101/10000 train_time:1938806ms step_avg:239.33ms +[2025-07-17 13:56:19] [Rank 0] step:8101/10000 train_time:1938806ms step_avg:239.33ms +[2025-07-17 13:56:24] [Rank 0] step:8121/10000 train_time:1943798ms step_avg:239.35ms +[2025-07-17 13:56:24] [Rank 0] step:8121/10000 train_time:1943798ms step_avg:239.35ms +[2025-07-17 13:56:30] [Rank 0] PRINT: step:8125/10000 val_loss:6.0307 train_time:1945299ms step_avg:239.42ms +[2025-07-17 13:56:30] [Rank 0] PRINT: step:8125/10000 val_loss:6.0307 train_time:1945299ms step_avg:239.42ms +[2025-07-17 13:56:34] [Rank 0] step:8141/10000 train_time:1949021ms step_avg:239.41ms +[2025-07-17 13:56:34] [Rank 0] step:8141/10000 train_time:1949021ms step_avg:239.41ms +[2025-07-17 13:56:39] [Rank 0] step:8161/10000 train_time:1954050ms step_avg:239.44ms +[2025-07-17 13:56:39] [Rank 0] step:8161/10000 train_time:1954050ms step_avg:239.44ms +[2025-07-17 13:56:44] [Rank 0] step:8181/10000 train_time:1959110ms step_avg:239.47ms +[2025-07-17 13:56:44] [Rank 0] step:8181/10000 train_time:1959110ms step_avg:239.47ms +[2025-07-17 13:56:49] [Rank 0] step:8201/10000 train_time:1964153ms step_avg:239.50ms +[2025-07-17 13:56:49] [Rank 0] step:8201/10000 train_time:1964153ms step_avg:239.50ms +[2025-07-17 13:56:54] [Rank 0] step:8221/10000 train_time:1969210ms step_avg:239.53ms +[2025-07-17 13:56:54] [Rank 0] step:8221/10000 train_time:1969210ms step_avg:239.53ms +[2025-07-17 13:56:59] [Rank 0] step:8241/10000 train_time:1974264ms step_avg:239.57ms +[2025-07-17 13:56:59] [Rank 0] step:8241/10000 train_time:1974264ms step_avg:239.57ms +[2025-07-17 13:57:06] [Rank 0] PRINT: step:8250/10000 val_loss:5.9105 train_time:1977052ms step_avg:239.64ms +[2025-07-17 13:57:06] [Rank 0] PRINT: step:8250/10000 val_loss:5.9105 train_time:1977052ms step_avg:239.64ms +[2025-07-17 13:57:09] [Rank 0] step:8261/10000 train_time:1979323ms step_avg:239.60ms +[2025-07-17 13:57:09] [Rank 0] step:8261/10000 train_time:1979323ms step_avg:239.60ms +[2025-07-17 13:57:14] [Rank 0] step:8281/10000 train_time:1984396ms step_avg:239.63ms +[2025-07-17 13:57:14] [Rank 0] step:8281/10000 train_time:1984396ms step_avg:239.63ms +[2025-07-17 13:57:19] [Rank 0] step:8301/10000 train_time:1989444ms step_avg:239.66ms +[2025-07-17 13:57:19] [Rank 0] step:8301/10000 train_time:1989444ms step_avg:239.66ms +[2025-07-17 13:57:24] [Rank 0] step:8321/10000 train_time:1994504ms step_avg:239.70ms +[2025-07-17 13:57:24] [Rank 0] step:8321/10000 train_time:1994504ms step_avg:239.70ms +[2025-07-17 13:57:29] [Rank 0] step:8341/10000 train_time:1999571ms step_avg:239.73ms +[2025-07-17 13:57:29] [Rank 0] step:8341/10000 train_time:1999571ms step_avg:239.73ms +[2025-07-17 13:57:34] [Rank 0] step:8361/10000 train_time:2004620ms step_avg:239.76ms +[2025-07-17 13:57:34] [Rank 0] step:8361/10000 train_time:2004620ms step_avg:239.76ms +[2025-07-17 13:57:42] [Rank 0] PRINT: step:8375/10000 val_loss:5.7187 train_time:2008666ms step_avg:239.84ms +[2025-07-17 13:57:42] [Rank 0] PRINT: step:8375/10000 val_loss:5.7187 train_time:2008666ms step_avg:239.84ms +[2025-07-17 13:57:44] [Rank 0] step:8381/10000 train_time:2009666ms step_avg:239.79ms +[2025-07-17 13:57:44] [Rank 0] step:8381/10000 train_time:2009666ms step_avg:239.79ms +[2025-07-17 13:57:49] [Rank 0] step:8401/10000 train_time:2014700ms step_avg:239.82ms +[2025-07-17 13:57:49] [Rank 0] step:8401/10000 train_time:2014700ms step_avg:239.82ms +[2025-07-17 13:57:54] [Rank 0] step:8421/10000 train_time:2019758ms step_avg:239.85ms +[2025-07-17 13:57:54] [Rank 0] step:8421/10000 train_time:2019758ms step_avg:239.85ms +[2025-07-17 13:57:59] [Rank 0] step:8441/10000 train_time:2024811ms step_avg:239.88ms +[2025-07-17 13:57:59] [Rank 0] step:8441/10000 train_time:2024811ms step_avg:239.88ms +[2025-07-17 13:58:04] [Rank 0] step:8461/10000 train_time:2029882ms step_avg:239.91ms +[2025-07-17 13:58:04] [Rank 0] step:8461/10000 train_time:2029882ms step_avg:239.91ms +[2025-07-17 13:58:09] [Rank 0] step:8481/10000 train_time:2034924ms step_avg:239.94ms +[2025-07-17 13:58:09] [Rank 0] step:8481/10000 train_time:2034924ms step_avg:239.94ms +[2025-07-17 13:58:19] [Rank 0] PRINT: step:8500/10000 val_loss:5.9110 train_time:2040239ms step_avg:240.03ms +[2025-07-17 13:58:19] [Rank 0] PRINT: step:8500/10000 val_loss:5.9110 train_time:2040239ms step_avg:240.03ms +[2025-07-17 13:58:19] [Rank 0] step:8501/10000 train_time:2040254ms step_avg:240.00ms +[2025-07-17 13:58:19] [Rank 0] step:8501/10000 train_time:2040254ms step_avg:240.00ms +[2025-07-17 13:58:24] [Rank 0] step:8521/10000 train_time:2045035ms step_avg:240.00ms +[2025-07-17 13:58:24] [Rank 0] step:8521/10000 train_time:2045035ms step_avg:240.00ms +[2025-07-17 13:58:29] [Rank 0] step:8541/10000 train_time:2050108ms step_avg:240.03ms +[2025-07-17 13:58:29] [Rank 0] step:8541/10000 train_time:2050108ms step_avg:240.03ms +[2025-07-17 13:58:34] [Rank 0] step:8561/10000 train_time:2055158ms step_avg:240.06ms +[2025-07-17 13:58:34] [Rank 0] step:8561/10000 train_time:2055158ms step_avg:240.06ms +[2025-07-17 13:58:39] [Rank 0] step:8581/10000 train_time:2060220ms step_avg:240.09ms +[2025-07-17 13:58:39] [Rank 0] step:8581/10000 train_time:2060220ms step_avg:240.09ms +[2025-07-17 13:58:44] [Rank 0] step:8601/10000 train_time:2065265ms step_avg:240.12ms +[2025-07-17 13:58:44] [Rank 0] step:8601/10000 train_time:2065265ms step_avg:240.12ms +[2025-07-17 13:58:49] [Rank 0] step:8621/10000 train_time:2070319ms step_avg:240.15ms +[2025-07-17 13:58:49] [Rank 0] step:8621/10000 train_time:2070319ms step_avg:240.15ms +[2025-07-17 13:58:55] [Rank 0] PRINT: step:8625/10000 val_loss:5.8209 train_time:2071839ms step_avg:240.21ms +[2025-07-17 13:58:55] [Rank 0] PRINT: step:8625/10000 val_loss:5.8209 train_time:2071839ms step_avg:240.21ms +[2025-07-17 13:59:00] [Rank 0] step:8641/10000 train_time:2075645ms step_avg:240.21ms +[2025-07-17 13:59:00] [Rank 0] step:8641/10000 train_time:2075645ms step_avg:240.21ms +[2025-07-17 13:59:05] [Rank 0] step:8661/10000 train_time:2080696ms step_avg:240.24ms +[2025-07-17 13:59:05] [Rank 0] step:8661/10000 train_time:2080696ms step_avg:240.24ms +[2025-07-17 13:59:10] [Rank 0] step:8681/10000 train_time:2085751ms step_avg:240.27ms +[2025-07-17 13:59:10] [Rank 0] step:8681/10000 train_time:2085751ms step_avg:240.27ms +[2025-07-17 13:59:15] [Rank 0] step:8701/10000 train_time:2090820ms step_avg:240.30ms +[2025-07-17 13:59:15] [Rank 0] step:8701/10000 train_time:2090820ms step_avg:240.30ms +[2025-07-17 13:59:20] [Rank 0] step:8721/10000 train_time:2095884ms step_avg:240.33ms +[2025-07-17 13:59:20] [Rank 0] step:8721/10000 train_time:2095884ms step_avg:240.33ms +[2025-07-17 13:59:25] [Rank 0] step:8741/10000 train_time:2100950ms step_avg:240.36ms +[2025-07-17 13:59:25] [Rank 0] step:8741/10000 train_time:2100950ms step_avg:240.36ms +[2025-07-17 13:59:32] [Rank 0] PRINT: step:8750/10000 val_loss:5.6676 train_time:2103728ms step_avg:240.43ms +[2025-07-17 13:59:32] [Rank 0] PRINT: step:8750/10000 val_loss:5.6676 train_time:2103728ms step_avg:240.43ms +[2025-07-17 13:59:35] [Rank 0] step:8761/10000 train_time:2105994ms step_avg:240.38ms +[2025-07-17 13:59:35] [Rank 0] step:8761/10000 train_time:2105994ms step_avg:240.38ms +[2025-07-17 13:59:40] [Rank 0] step:8781/10000 train_time:2111047ms step_avg:240.41ms +[2025-07-17 13:59:40] [Rank 0] step:8781/10000 train_time:2111047ms step_avg:240.41ms +[2025-07-17 13:59:45] [Rank 0] step:8801/10000 train_time:2116096ms step_avg:240.44ms +[2025-07-17 13:59:45] [Rank 0] step:8801/10000 train_time:2116096ms step_avg:240.44ms +[2025-07-17 13:59:50] [Rank 0] step:8821/10000 train_time:2121157ms step_avg:240.47ms +[2025-07-17 13:59:50] [Rank 0] step:8821/10000 train_time:2121157ms step_avg:240.47ms +[2025-07-17 13:59:55] [Rank 0] step:8841/10000 train_time:2126234ms step_avg:240.50ms +[2025-07-17 13:59:55] [Rank 0] step:8841/10000 train_time:2126234ms step_avg:240.50ms +[2025-07-17 14:00:00] [Rank 0] step:8861/10000 train_time:2131292ms step_avg:240.53ms +[2025-07-17 14:00:00] [Rank 0] step:8861/10000 train_time:2131292ms step_avg:240.53ms +[2025-07-17 14:00:08] [Rank 0] PRINT: step:8875/10000 val_loss:5.7026 train_time:2135334ms step_avg:240.60ms +[2025-07-17 14:00:08] [Rank 0] PRINT: step:8875/10000 val_loss:5.7026 train_time:2135334ms step_avg:240.60ms +[2025-07-17 14:00:10] [Rank 0] step:8881/10000 train_time:2136339ms step_avg:240.55ms +[2025-07-17 14:00:10] [Rank 0] step:8881/10000 train_time:2136339ms step_avg:240.55ms +[2025-07-17 14:00:15] [Rank 0] step:8901/10000 train_time:2141382ms step_avg:240.58ms +[2025-07-17 14:00:15] [Rank 0] step:8901/10000 train_time:2141382ms step_avg:240.58ms +[2025-07-17 14:00:20] [Rank 0] step:8921/10000 train_time:2146426ms step_avg:240.60ms +[2025-07-17 14:00:20] [Rank 0] step:8921/10000 train_time:2146426ms step_avg:240.60ms +[2025-07-17 14:00:25] [Rank 0] step:8941/10000 train_time:2151476ms step_avg:240.63ms +[2025-07-17 14:00:25] [Rank 0] step:8941/10000 train_time:2151476ms step_avg:240.63ms +[2025-07-17 14:00:30] [Rank 0] step:8961/10000 train_time:2156530ms step_avg:240.66ms +[2025-07-17 14:00:30] [Rank 0] step:8961/10000 train_time:2156530ms step_avg:240.66ms +[2025-07-17 14:00:35] [Rank 0] step:8981/10000 train_time:2161586ms step_avg:240.68ms +[2025-07-17 14:00:35] [Rank 0] step:8981/10000 train_time:2161586ms step_avg:240.68ms +[2025-07-17 14:00:44] [Rank 0] PRINT: step:9000/10000 val_loss:5.7070 train_time:2166901ms step_avg:240.77ms +[2025-07-17 14:00:44] [Rank 0] PRINT: step:9000/10000 val_loss:5.7070 train_time:2166901ms step_avg:240.77ms +[2025-07-17 14:00:44] [Rank 0] step:9001/10000 train_time:2166915ms step_avg:240.74ms +[2025-07-17 14:00:44] [Rank 0] step:9001/10000 train_time:2166915ms step_avg:240.74ms +[2025-07-17 14:00:49] [Rank 0] step:9021/10000 train_time:2171688ms step_avg:240.74ms +[2025-07-17 14:00:49] [Rank 0] step:9021/10000 train_time:2171688ms step_avg:240.74ms +[2025-07-17 14:00:55] [Rank 0] step:9041/10000 train_time:2176767ms step_avg:240.77ms +[2025-07-17 14:00:55] [Rank 0] step:9041/10000 train_time:2176767ms step_avg:240.77ms +[2025-07-17 14:01:00] [Rank 0] step:9061/10000 train_time:2181822ms step_avg:240.79ms +[2025-07-17 14:01:00] [Rank 0] step:9061/10000 train_time:2181822ms step_avg:240.79ms +[2025-07-17 14:01:05] [Rank 0] step:9081/10000 train_time:2186901ms step_avg:240.82ms +[2025-07-17 14:01:05] [Rank 0] step:9081/10000 train_time:2186901ms step_avg:240.82ms +[2025-07-17 14:01:10] [Rank 0] step:9101/10000 train_time:2191981ms step_avg:240.85ms +[2025-07-17 14:01:10] [Rank 0] step:9101/10000 train_time:2191981ms step_avg:240.85ms +[2025-07-17 14:01:15] [Rank 0] step:9121/10000 train_time:2197147ms step_avg:240.89ms +[2025-07-17 14:01:15] [Rank 0] step:9121/10000 train_time:2197147ms step_avg:240.89ms +[2025-07-17 14:01:21] [Rank 0] PRINT: step:9125/10000 val_loss:5.9363 train_time:2198665ms step_avg:240.95ms +[2025-07-17 14:01:21] [Rank 0] PRINT: step:9125/10000 val_loss:5.9363 train_time:2198665ms step_avg:240.95ms +[2025-07-17 14:01:25] [Rank 0] step:9141/10000 train_time:2202194ms step_avg:240.91ms +[2025-07-17 14:01:25] [Rank 0] step:9141/10000 train_time:2202194ms step_avg:240.91ms +[2025-07-17 14:01:30] [Rank 0] step:9161/10000 train_time:2207525ms step_avg:240.97ms +[2025-07-17 14:01:30] [Rank 0] step:9161/10000 train_time:2207525ms step_avg:240.97ms +[2025-07-17 14:01:35] [Rank 0] step:9181/10000 train_time:2212585ms step_avg:241.00ms +[2025-07-17 14:01:35] [Rank 0] step:9181/10000 train_time:2212585ms step_avg:241.00ms +[2025-07-17 14:01:40] [Rank 0] step:9201/10000 train_time:2217646ms step_avg:241.02ms +[2025-07-17 14:01:40] [Rank 0] step:9201/10000 train_time:2217646ms step_avg:241.02ms +[2025-07-17 14:01:45] [Rank 0] step:9221/10000 train_time:2222738ms step_avg:241.05ms +[2025-07-17 14:01:45] [Rank 0] step:9221/10000 train_time:2222738ms step_avg:241.05ms +[2025-07-17 14:01:50] [Rank 0] step:9241/10000 train_time:2227814ms step_avg:241.08ms +[2025-07-17 14:01:50] [Rank 0] step:9241/10000 train_time:2227814ms step_avg:241.08ms +[2025-07-17 14:01:58] [Rank 0] PRINT: step:9250/10000 val_loss:6.3194 train_time:2230605ms step_avg:241.15ms +[2025-07-17 14:01:58] [Rank 0] PRINT: step:9250/10000 val_loss:6.3194 train_time:2230605ms step_avg:241.15ms +[2025-07-17 14:02:00] [Rank 0] step:9261/10000 train_time:2232884ms step_avg:241.11ms +[2025-07-17 14:02:00] [Rank 0] step:9261/10000 train_time:2232884ms step_avg:241.11ms +[2025-07-17 14:02:05] [Rank 0] step:9281/10000 train_time:2237927ms step_avg:241.13ms +[2025-07-17 14:02:05] [Rank 0] step:9281/10000 train_time:2237927ms step_avg:241.13ms +[2025-07-17 14:02:10] [Rank 0] step:9301/10000 train_time:2242991ms step_avg:241.16ms +[2025-07-17 14:02:10] [Rank 0] step:9301/10000 train_time:2242991ms step_avg:241.16ms +[2025-07-17 14:02:15] [Rank 0] step:9321/10000 train_time:2248077ms step_avg:241.18ms +[2025-07-17 14:02:15] [Rank 0] step:9321/10000 train_time:2248077ms step_avg:241.18ms +[2025-07-17 14:02:21] [Rank 0] step:9341/10000 train_time:2253138ms step_avg:241.21ms +[2025-07-17 14:02:21] [Rank 0] step:9341/10000 train_time:2253138ms step_avg:241.21ms +[2025-07-17 14:02:26] [Rank 0] step:9361/10000 train_time:2258207ms step_avg:241.24ms +[2025-07-17 14:02:26] [Rank 0] step:9361/10000 train_time:2258207ms step_avg:241.24ms +[2025-07-17 14:02:34] [Rank 0] PRINT: step:9375/10000 val_loss:6.3660 train_time:2262261ms step_avg:241.31ms +[2025-07-17 14:02:34] [Rank 0] PRINT: step:9375/10000 val_loss:6.3660 train_time:2262261ms step_avg:241.31ms +[2025-07-17 14:02:35] [Rank 0] step:9381/10000 train_time:2263268ms step_avg:241.26ms +[2025-07-17 14:02:35] [Rank 0] step:9381/10000 train_time:2263268ms step_avg:241.26ms +[2025-07-17 14:02:40] [Rank 0] step:9401/10000 train_time:2268311ms step_avg:241.28ms +[2025-07-17 14:02:40] [Rank 0] step:9401/10000 train_time:2268311ms step_avg:241.28ms +[2025-07-17 14:02:46] [Rank 0] step:9421/10000 train_time:2273373ms step_avg:241.31ms +[2025-07-17 14:02:46] [Rank 0] step:9421/10000 train_time:2273373ms step_avg:241.31ms +[2025-07-17 14:02:51] [Rank 0] step:9441/10000 train_time:2278437ms step_avg:241.33ms +[2025-07-17 14:02:51] [Rank 0] step:9441/10000 train_time:2278437ms step_avg:241.33ms +[2025-07-17 14:02:56] [Rank 0] step:9461/10000 train_time:2283613ms step_avg:241.37ms +[2025-07-17 14:02:56] [Rank 0] step:9461/10000 train_time:2283613ms step_avg:241.37ms +[2025-07-17 14:03:01] [Rank 0] step:9481/10000 train_time:2288683ms step_avg:241.40ms +[2025-07-17 14:03:01] [Rank 0] step:9481/10000 train_time:2288683ms step_avg:241.40ms +[2025-07-17 14:03:10] [Rank 0] PRINT: step:9500/10000 val_loss:6.1694 train_time:2294027ms step_avg:241.48ms +[2025-07-17 14:03:10] [Rank 0] PRINT: step:9500/10000 val_loss:6.1694 train_time:2294027ms step_avg:241.48ms +[2025-07-17 14:03:10] [Rank 0] step:9501/10000 train_time:2294041ms step_avg:241.45ms +[2025-07-17 14:03:10] [Rank 0] step:9501/10000 train_time:2294041ms step_avg:241.45ms +[2025-07-17 14:03:15] [Rank 0] step:9521/10000 train_time:2298827ms step_avg:241.45ms +[2025-07-17 14:03:15] [Rank 0] step:9521/10000 train_time:2298827ms step_avg:241.45ms +[2025-07-17 14:03:20] [Rank 0] step:9541/10000 train_time:2303909ms step_avg:241.47ms +[2025-07-17 14:03:20] [Rank 0] step:9541/10000 train_time:2303909ms step_avg:241.47ms +[2025-07-17 14:03:25] [Rank 0] step:9561/10000 train_time:2308957ms step_avg:241.50ms +[2025-07-17 14:03:25] [Rank 0] step:9561/10000 train_time:2308957ms step_avg:241.50ms +[2025-07-17 14:03:31] [Rank 0] step:9581/10000 train_time:2314010ms step_avg:241.52ms +[2025-07-17 14:03:31] [Rank 0] step:9581/10000 train_time:2314010ms step_avg:241.52ms +[2025-07-17 14:03:36] [Rank 0] step:9601/10000 train_time:2319062ms step_avg:241.54ms +[2025-07-17 14:03:36] [Rank 0] step:9601/10000 train_time:2319062ms step_avg:241.54ms +[2025-07-17 14:03:41] [Rank 0] step:9621/10000 train_time:2324148ms step_avg:241.57ms +[2025-07-17 14:03:41] [Rank 0] step:9621/10000 train_time:2324148ms step_avg:241.57ms +[2025-07-17 14:03:46] [Rank 0] PRINT: step:9625/10000 val_loss:6.2089 train_time:2325661ms step_avg:241.63ms +[2025-07-17 14:03:46] [Rank 0] PRINT: step:9625/10000 val_loss:6.2089 train_time:2325661ms step_avg:241.63ms +[2025-07-17 14:03:50] [Rank 0] step:9641/10000 train_time:2329217ms step_avg:241.60ms +[2025-07-17 14:03:50] [Rank 0] step:9641/10000 train_time:2329217ms step_avg:241.60ms +[2025-07-17 14:03:56] [Rank 0] step:9661/10000 train_time:2334588ms step_avg:241.65ms +[2025-07-17 14:03:56] [Rank 0] step:9661/10000 train_time:2334588ms step_avg:241.65ms +[2025-07-17 14:04:01] [Rank 0] step:9681/10000 train_time:2339697ms step_avg:241.68ms +[2025-07-17 14:04:01] [Rank 0] step:9681/10000 train_time:2339697ms step_avg:241.68ms +[2025-07-17 14:04:06] [Rank 0] step:9701/10000 train_time:2344817ms step_avg:241.71ms +[2025-07-17 14:04:06] [Rank 0] step:9701/10000 train_time:2344817ms step_avg:241.71ms +[2025-07-17 14:04:11] [Rank 0] step:9721/10000 train_time:2349916ms step_avg:241.74ms +[2025-07-17 14:04:11] [Rank 0] step:9721/10000 train_time:2349916ms step_avg:241.74ms +[2025-07-17 14:04:16] [Rank 0] step:9741/10000 train_time:2355034ms step_avg:241.77ms +[2025-07-17 14:04:16] [Rank 0] step:9741/10000 train_time:2355034ms step_avg:241.77ms +[2025-07-17 14:04:23] [Rank 0] PRINT: step:9750/10000 val_loss:5.7758 train_time:2357842ms step_avg:241.83ms +[2025-07-17 14:04:23] [Rank 0] PRINT: step:9750/10000 val_loss:5.7758 train_time:2357842ms step_avg:241.83ms +[2025-07-17 14:04:26] [Rank 0] step:9761/10000 train_time:2360140ms step_avg:241.79ms +[2025-07-17 14:04:26] [Rank 0] step:9761/10000 train_time:2360140ms step_avg:241.79ms +[2025-07-17 14:04:31] [Rank 0] step:9781/10000 train_time:2365254ms step_avg:241.82ms +[2025-07-17 14:04:31] [Rank 0] step:9781/10000 train_time:2365254ms step_avg:241.82ms +[2025-07-17 14:04:36] [Rank 0] step:9801/10000 train_time:2370362ms step_avg:241.85ms +[2025-07-17 14:04:36] [Rank 0] step:9801/10000 train_time:2370362ms step_avg:241.85ms +[2025-07-17 14:04:42] [Rank 0] step:9821/10000 train_time:2375473ms step_avg:241.88ms +[2025-07-17 14:04:42] [Rank 0] step:9821/10000 train_time:2375473ms step_avg:241.88ms +[2025-07-17 14:04:47] [Rank 0] step:9841/10000 train_time:2380585ms step_avg:241.90ms +[2025-07-17 14:04:47] [Rank 0] step:9841/10000 train_time:2380585ms step_avg:241.90ms +[2025-07-17 14:04:52] [Rank 0] step:9861/10000 train_time:2385699ms step_avg:241.93ms +[2025-07-17 14:04:52] [Rank 0] step:9861/10000 train_time:2385699ms step_avg:241.93ms +[2025-07-17 14:05:00] [Rank 0] PRINT: step:9875/10000 val_loss:5.7419 train_time:2389785ms step_avg:242.00ms +[2025-07-17 14:05:00] [Rank 0] PRINT: step:9875/10000 val_loss:5.7419 train_time:2389785ms step_avg:242.00ms +[2025-07-17 14:05:02] [Rank 0] step:9881/10000 train_time:2390801ms step_avg:241.96ms +[2025-07-17 14:05:02] [Rank 0] step:9881/10000 train_time:2390801ms step_avg:241.96ms +[2025-07-17 14:05:07] [Rank 0] step:9901/10000 train_time:2395920ms step_avg:241.99ms +[2025-07-17 14:05:07] [Rank 0] step:9901/10000 train_time:2395920ms step_avg:241.99ms +[2025-07-17 14:05:12] [Rank 0] step:9921/10000 train_time:2401047ms step_avg:242.02ms +[2025-07-17 14:05:12] [Rank 0] step:9921/10000 train_time:2401047ms step_avg:242.02ms +[2025-07-17 14:05:17] [Rank 0] step:9941/10000 train_time:2406196ms step_avg:242.05ms +[2025-07-17 14:05:17] [Rank 0] step:9941/10000 train_time:2406196ms step_avg:242.05ms +[2025-07-17 14:05:22] [Rank 0] step:9961/10000 train_time:2411331ms step_avg:242.08ms +[2025-07-17 14:05:22] [Rank 0] step:9961/10000 train_time:2411331ms step_avg:242.08ms +[2025-07-17 14:05:27] [Rank 0] step:9981/10000 train_time:2416475ms step_avg:242.11ms +[2025-07-17 14:05:27] [Rank 0] step:9981/10000 train_time:2416475ms step_avg:242.11ms +[2025-07-17 14:05:32] [Rank 0] step:10000/10000 train_time:2421321ms step_avg:242.13ms +[2025-07-17 14:05:32] [Rank 0] step:10000/10000 train_time:2421321ms step_avg:242.13ms +[2025-07-17 14:05:37] [Rank 0] PRINT: step:10000/10000 val_loss:5.7189 train_time:2421838ms step_avg:242.18ms +[2025-07-17 14:05:37] [Rank 0] PRINT: step:10000/10000 val_loss:5.7189 train_time:2421838ms step_avg:242.18ms +[2025-07-17 14:05:37] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 14:05:37 2025 --- +[2025-07-17 14:05:37] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 14:05:37 2025 --- +[2025-07-17 14:05:37] [Rank 0] PRINT: Peak memory allocated: 30851 MiB reserved: 31354 MiB +[2025-07-17 14:05:37] [Rank 0] PRINT: Peak memory allocated: 30851 MiB reserved: 31354 MiB diff --git a/logs_norope/diff_modes/mode_4_param_norope_seed_43/config.json b/logs_norope/diff_modes/mode_4_param_norope_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..92860b7ed6ee50fef48a903336368c5defaa067e --- /dev/null +++ b/logs_norope/diff_modes/mode_4_param_norope_seed_43/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 4, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "072f1401-29b7-4170-bba4-d6c7847cf87c", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_4_param_norope_seed_43/training_log_072f1401-29b7-4170-bba4-d6c7847cf87c.txt b/logs_norope/diff_modes/mode_4_param_norope_seed_43/training_log_072f1401-29b7-4170-bba4-d6c7847cf87c.txt new file mode 100644 index 0000000000000000000000000000000000000000..9c7af6e8ce0ca76545dd83f25c337ee99a4ac4c0 --- /dev/null +++ b/logs_norope/diff_modes/mode_4_param_norope_seed_43/training_log_072f1401-29b7-4170-bba4-d6c7847cf87c.txt @@ -0,0 +1,2360 @@ +[2025-07-17 20:29:45] [Rank 0] PRINT: --- Script Start: Thu Jul 17 20:29:45 2025 --- +[2025-07-17 20:29:45] [Rank 0] PRINT: --- Script Start: Thu Jul 17 20:29:45 2025 --- +[2025-07-17 20:29:46] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=4, model_parameterization='norope') +[2025-07-17 20:29:46] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=4, model_parameterization='norope') +[2025-07-17 20:29:46] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 20:29:46] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 20:29:46] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 20:29:46] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 20:29:46] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_4_param_norope_seed_43 +[2025-07-17 20:29:46] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_4_param_norope_seed_43 +[2025-07-17 20:29:46] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 20:29:46] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 20:29:46] [Rank 0] PRINT: Constructing model... +[2025-07-17 20:29:46] [Rank 0] PRINT: Constructing model... +[2025-07-17 20:29:48] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 20:29:48] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 20:29:48] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 20:29:48] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 20:29:48] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 20:29:48] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 20:29:48] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 20:29:48] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 20:29:48] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 4 +[2025-07-17 20:29:48] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 4 +[2025-07-17 20:29:48] [Rank 0] PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: 0.001). +[2025-07-17 20:29:48] [Rank 0] PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: 0.001). +[2025-07-17 20:29:48] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 20:29:48] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 20:29:48] [Rank 0] PRINT: Muon optimizer is active with 24 parameters. +[2025-07-17 20:29:48] [Rank 0] PRINT: Muon optimizer is active with 24 parameters. +[2025-07-17 20:29:48] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 20:29:48] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 20:29:49] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 20:29:49] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 20:29:49] [Rank 0] PRINT: Starting warmup... +[2025-07-17 20:29:49] [Rank 0] PRINT: Starting warmup... +[2025-07-17 20:30:53] [Rank 0] PRINT: Warmup complete. +[2025-07-17 20:30:53] [Rank 0] PRINT: Warmup complete. +[2025-07-17 20:30:53] [Rank 0] PRINT: Starting training... +[2025-07-17 20:30:53] [Rank 0] PRINT: Starting training... +[2025-07-17 20:31:03] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 20:31:03] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 20:31:08] [Rank 0] step:21/10000 train_time:4556ms step_avg:216.95ms +[2025-07-17 20:31:08] [Rank 0] step:21/10000 train_time:4556ms step_avg:216.95ms +[2025-07-17 20:31:12] [Rank 0] step:41/10000 train_time:9058ms step_avg:220.92ms +[2025-07-17 20:31:12] [Rank 0] step:41/10000 train_time:9058ms step_avg:220.92ms +[2025-07-17 20:31:17] [Rank 0] step:61/10000 train_time:13567ms step_avg:222.41ms +[2025-07-17 20:31:17] [Rank 0] step:61/10000 train_time:13567ms step_avg:222.41ms +[2025-07-17 20:31:21] [Rank 0] step:81/10000 train_time:18077ms step_avg:223.17ms +[2025-07-17 20:31:21] [Rank 0] step:81/10000 train_time:18077ms step_avg:223.17ms +[2025-07-17 20:31:26] [Rank 0] step:101/10000 train_time:22595ms step_avg:223.71ms +[2025-07-17 20:31:26] [Rank 0] step:101/10000 train_time:22595ms step_avg:223.71ms +[2025-07-17 20:31:30] [Rank 0] step:121/10000 train_time:27115ms step_avg:224.09ms +[2025-07-17 20:31:30] [Rank 0] step:121/10000 train_time:27115ms step_avg:224.09ms +[2025-07-17 20:31:36] [Rank 0] PRINT: step:125/10000 val_loss:5.5921 train_time:28477ms step_avg:227.82ms +[2025-07-17 20:31:36] [Rank 0] PRINT: step:125/10000 val_loss:5.5921 train_time:28477ms step_avg:227.82ms +[2025-07-17 20:31:40] [Rank 0] step:141/10000 train_time:31636ms step_avg:224.37ms +[2025-07-17 20:31:40] [Rank 0] step:141/10000 train_time:31636ms step_avg:224.37ms +[2025-07-17 20:31:44] [Rank 0] step:161/10000 train_time:36159ms step_avg:224.59ms +[2025-07-17 20:31:44] [Rank 0] step:161/10000 train_time:36159ms step_avg:224.59ms +[2025-07-17 20:31:49] [Rank 0] step:181/10000 train_time:40685ms step_avg:224.78ms +[2025-07-17 20:31:49] [Rank 0] step:181/10000 train_time:40685ms step_avg:224.78ms +[2025-07-17 20:31:53] [Rank 0] step:201/10000 train_time:45210ms step_avg:224.92ms +[2025-07-17 20:31:53] [Rank 0] step:201/10000 train_time:45210ms step_avg:224.92ms +[2025-07-17 20:31:58] [Rank 0] step:221/10000 train_time:49738ms step_avg:225.06ms +[2025-07-17 20:31:58] [Rank 0] step:221/10000 train_time:49738ms step_avg:225.06ms +[2025-07-17 20:32:02] [Rank 0] step:241/10000 train_time:54268ms step_avg:225.18ms +[2025-07-17 20:32:02] [Rank 0] step:241/10000 train_time:54268ms step_avg:225.18ms +[2025-07-17 20:32:09] [Rank 0] PRINT: step:250/10000 val_loss:5.1605 train_time:56769ms step_avg:227.07ms +[2025-07-17 20:32:09] [Rank 0] PRINT: step:250/10000 val_loss:5.1605 train_time:56769ms step_avg:227.07ms +[2025-07-17 20:32:11] [Rank 0] step:261/10000 train_time:58796ms step_avg:225.27ms +[2025-07-17 20:32:11] [Rank 0] step:261/10000 train_time:58796ms step_avg:225.27ms +[2025-07-17 20:32:16] [Rank 0] step:281/10000 train_time:63324ms step_avg:225.35ms +[2025-07-17 20:32:16] [Rank 0] step:281/10000 train_time:63324ms step_avg:225.35ms +[2025-07-17 20:32:20] [Rank 0] step:301/10000 train_time:67849ms step_avg:225.41ms +[2025-07-17 20:32:20] [Rank 0] step:301/10000 train_time:67849ms step_avg:225.41ms +[2025-07-17 20:32:25] [Rank 0] step:321/10000 train_time:72374ms step_avg:225.46ms +[2025-07-17 20:32:25] [Rank 0] step:321/10000 train_time:72374ms step_avg:225.46ms +[2025-07-17 20:32:29] [Rank 0] step:341/10000 train_time:76899ms step_avg:225.51ms +[2025-07-17 20:32:29] [Rank 0] step:341/10000 train_time:76899ms step_avg:225.51ms +[2025-07-17 20:32:34] [Rank 0] step:361/10000 train_time:81425ms step_avg:225.55ms +[2025-07-17 20:32:34] [Rank 0] step:361/10000 train_time:81425ms step_avg:225.55ms +[2025-07-17 20:32:41] [Rank 0] PRINT: step:375/10000 val_loss:5.0650 train_time:85048ms step_avg:226.79ms +[2025-07-17 20:32:41] [Rank 0] PRINT: step:375/10000 val_loss:5.0650 train_time:85048ms step_avg:226.79ms +[2025-07-17 20:32:43] [Rank 0] step:381/10000 train_time:85947ms step_avg:225.58ms +[2025-07-17 20:32:43] [Rank 0] step:381/10000 train_time:85947ms step_avg:225.58ms +[2025-07-17 20:32:47] [Rank 0] step:401/10000 train_time:90466ms step_avg:225.60ms +[2025-07-17 20:32:47] [Rank 0] step:401/10000 train_time:90466ms step_avg:225.60ms +[2025-07-17 20:32:52] [Rank 0] step:421/10000 train_time:94989ms step_avg:225.63ms +[2025-07-17 20:32:52] [Rank 0] step:421/10000 train_time:94989ms step_avg:225.63ms +[2025-07-17 20:32:56] [Rank 0] step:441/10000 train_time:99513ms step_avg:225.65ms +[2025-07-17 20:32:56] [Rank 0] step:441/10000 train_time:99513ms step_avg:225.65ms +[2025-07-17 20:33:01] [Rank 0] step:461/10000 train_time:104037ms step_avg:225.68ms +[2025-07-17 20:33:01] [Rank 0] step:461/10000 train_time:104037ms step_avg:225.68ms +[2025-07-17 20:33:05] [Rank 0] step:481/10000 train_time:108562ms step_avg:225.70ms +[2025-07-17 20:33:05] [Rank 0] step:481/10000 train_time:108562ms step_avg:225.70ms +[2025-07-17 20:33:14] [Rank 0] PRINT: step:500/10000 val_loss:5.0775 train_time:113313ms step_avg:226.63ms +[2025-07-17 20:33:14] [Rank 0] PRINT: step:500/10000 val_loss:5.0775 train_time:113313ms step_avg:226.63ms +[2025-07-17 20:33:14] [Rank 0] step:501/10000 train_time:113327ms step_avg:226.20ms +[2025-07-17 20:33:14] [Rank 0] step:501/10000 train_time:113327ms step_avg:226.20ms +[2025-07-17 20:33:19] [Rank 0] step:521/10000 train_time:117611ms step_avg:225.74ms +[2025-07-17 20:33:19] [Rank 0] step:521/10000 train_time:117611ms step_avg:225.74ms +[2025-07-17 20:33:23] [Rank 0] step:541/10000 train_time:122138ms step_avg:225.76ms +[2025-07-17 20:33:23] [Rank 0] step:541/10000 train_time:122138ms step_avg:225.76ms +[2025-07-17 20:33:28] [Rank 0] step:561/10000 train_time:126665ms step_avg:225.78ms +[2025-07-17 20:33:28] [Rank 0] step:561/10000 train_time:126665ms step_avg:225.78ms +[2025-07-17 20:33:32] [Rank 0] step:581/10000 train_time:131191ms step_avg:225.80ms +[2025-07-17 20:33:32] [Rank 0] step:581/10000 train_time:131191ms step_avg:225.80ms +[2025-07-17 20:33:37] [Rank 0] step:601/10000 train_time:135717ms step_avg:225.82ms +[2025-07-17 20:33:37] [Rank 0] step:601/10000 train_time:135717ms step_avg:225.82ms +[2025-07-17 20:33:41] [Rank 0] step:621/10000 train_time:140242ms step_avg:225.83ms +[2025-07-17 20:33:41] [Rank 0] step:621/10000 train_time:140242ms step_avg:225.83ms +[2025-07-17 20:33:47] [Rank 0] PRINT: step:625/10000 val_loss:4.9964 train_time:141606ms step_avg:226.57ms +[2025-07-17 20:33:47] [Rank 0] PRINT: step:625/10000 val_loss:4.9964 train_time:141606ms step_avg:226.57ms +[2025-07-17 20:33:50] [Rank 0] step:641/10000 train_time:144765ms step_avg:225.84ms +[2025-07-17 20:33:50] [Rank 0] step:641/10000 train_time:144765ms step_avg:225.84ms +[2025-07-17 20:33:55] [Rank 0] step:661/10000 train_time:149291ms step_avg:225.86ms +[2025-07-17 20:33:55] [Rank 0] step:661/10000 train_time:149291ms step_avg:225.86ms +[2025-07-17 20:33:59] [Rank 0] step:681/10000 train_time:153820ms step_avg:225.87ms +[2025-07-17 20:33:59] [Rank 0] step:681/10000 train_time:153820ms step_avg:225.87ms +[2025-07-17 20:34:04] [Rank 0] step:701/10000 train_time:158348ms step_avg:225.89ms +[2025-07-17 20:34:04] [Rank 0] step:701/10000 train_time:158348ms step_avg:225.89ms +[2025-07-17 20:34:08] [Rank 0] step:721/10000 train_time:162876ms step_avg:225.90ms +[2025-07-17 20:34:08] [Rank 0] step:721/10000 train_time:162876ms step_avg:225.90ms +[2025-07-17 20:34:13] [Rank 0] step:741/10000 train_time:167405ms step_avg:225.92ms +[2025-07-17 20:34:13] [Rank 0] step:741/10000 train_time:167405ms step_avg:225.92ms +[2025-07-17 20:34:20] [Rank 0] PRINT: step:750/10000 val_loss:4.8845 train_time:169916ms step_avg:226.55ms +[2025-07-17 20:34:20] [Rank 0] PRINT: step:750/10000 val_loss:4.8845 train_time:169916ms step_avg:226.55ms +[2025-07-17 20:34:22] [Rank 0] step:761/10000 train_time:171963ms step_avg:225.97ms +[2025-07-17 20:34:22] [Rank 0] step:761/10000 train_time:171963ms step_avg:225.97ms +[2025-07-17 20:34:27] [Rank 0] step:781/10000 train_time:176527ms step_avg:226.03ms +[2025-07-17 20:34:27] [Rank 0] step:781/10000 train_time:176527ms step_avg:226.03ms +[2025-07-17 20:34:31] [Rank 0] step:801/10000 train_time:181090ms step_avg:226.08ms +[2025-07-17 20:34:31] [Rank 0] step:801/10000 train_time:181090ms step_avg:226.08ms +[2025-07-17 20:34:36] [Rank 0] step:821/10000 train_time:185655ms step_avg:226.13ms +[2025-07-17 20:34:36] [Rank 0] step:821/10000 train_time:185655ms step_avg:226.13ms +[2025-07-17 20:34:40] [Rank 0] step:841/10000 train_time:190217ms step_avg:226.18ms +[2025-07-17 20:34:40] [Rank 0] step:841/10000 train_time:190217ms step_avg:226.18ms +[2025-07-17 20:34:45] [Rank 0] step:861/10000 train_time:194782ms step_avg:226.23ms +[2025-07-17 20:34:45] [Rank 0] step:861/10000 train_time:194782ms step_avg:226.23ms +[2025-07-17 20:34:52] [Rank 0] PRINT: step:875/10000 val_loss:4.7107 train_time:198438ms step_avg:226.79ms +[2025-07-17 20:34:52] [Rank 0] PRINT: step:875/10000 val_loss:4.7107 train_time:198438ms step_avg:226.79ms +[2025-07-17 20:34:54] [Rank 0] step:881/10000 train_time:199346ms step_avg:226.27ms +[2025-07-17 20:34:54] [Rank 0] step:881/10000 train_time:199346ms step_avg:226.27ms +[2025-07-17 20:34:58] [Rank 0] step:901/10000 train_time:203912ms step_avg:226.32ms +[2025-07-17 20:34:58] [Rank 0] step:901/10000 train_time:203912ms step_avg:226.32ms +[2025-07-17 20:35:03] [Rank 0] step:921/10000 train_time:208478ms step_avg:226.36ms +[2025-07-17 20:35:03] [Rank 0] step:921/10000 train_time:208478ms step_avg:226.36ms +[2025-07-17 20:35:08] [Rank 0] step:941/10000 train_time:213049ms step_avg:226.41ms +[2025-07-17 20:35:08] [Rank 0] step:941/10000 train_time:213049ms step_avg:226.41ms +[2025-07-17 20:35:12] [Rank 0] step:961/10000 train_time:217616ms step_avg:226.45ms +[2025-07-17 20:35:12] [Rank 0] step:961/10000 train_time:217616ms step_avg:226.45ms +[2025-07-17 20:35:17] [Rank 0] step:981/10000 train_time:222183ms step_avg:226.49ms +[2025-07-17 20:35:17] [Rank 0] step:981/10000 train_time:222183ms step_avg:226.49ms +[2025-07-17 20:35:25] [Rank 0] PRINT: step:1000/10000 val_loss:4.7031 train_time:226981ms step_avg:226.98ms +[2025-07-17 20:35:25] [Rank 0] PRINT: step:1000/10000 val_loss:4.7031 train_time:226981ms step_avg:226.98ms +[2025-07-17 20:35:25] [Rank 0] step:1001/10000 train_time:226995ms step_avg:226.77ms +[2025-07-17 20:35:25] [Rank 0] step:1001/10000 train_time:226995ms step_avg:226.77ms +[2025-07-17 20:35:30] [Rank 0] step:1021/10000 train_time:231600ms step_avg:226.84ms +[2025-07-17 20:35:30] [Rank 0] step:1021/10000 train_time:231600ms step_avg:226.84ms +[2025-07-17 20:35:35] [Rank 0] step:1041/10000 train_time:236170ms step_avg:226.87ms +[2025-07-17 20:35:35] [Rank 0] step:1041/10000 train_time:236170ms step_avg:226.87ms +[2025-07-17 20:35:39] [Rank 0] step:1061/10000 train_time:240737ms step_avg:226.90ms +[2025-07-17 20:35:39] [Rank 0] step:1061/10000 train_time:240737ms step_avg:226.90ms +[2025-07-17 20:35:44] [Rank 0] step:1081/10000 train_time:245308ms step_avg:226.93ms +[2025-07-17 20:35:44] [Rank 0] step:1081/10000 train_time:245308ms step_avg:226.93ms +[2025-07-17 20:35:48] [Rank 0] step:1101/10000 train_time:249877ms step_avg:226.95ms +[2025-07-17 20:35:48] [Rank 0] step:1101/10000 train_time:249877ms step_avg:226.95ms +[2025-07-17 20:35:53] [Rank 0] step:1121/10000 train_time:254448ms step_avg:226.98ms +[2025-07-17 20:35:53] [Rank 0] step:1121/10000 train_time:254448ms step_avg:226.98ms +[2025-07-17 20:35:58] [Rank 0] PRINT: step:1125/10000 val_loss:4.7282 train_time:255825ms step_avg:227.40ms +[2025-07-17 20:35:58] [Rank 0] PRINT: step:1125/10000 val_loss:4.7282 train_time:255825ms step_avg:227.40ms +[2025-07-17 20:36:02] [Rank 0] step:1141/10000 train_time:259018ms step_avg:227.01ms +[2025-07-17 20:36:02] [Rank 0] step:1141/10000 train_time:259018ms step_avg:227.01ms +[2025-07-17 20:36:06] [Rank 0] step:1161/10000 train_time:263588ms step_avg:227.04ms +[2025-07-17 20:36:06] [Rank 0] step:1161/10000 train_time:263588ms step_avg:227.04ms +[2025-07-17 20:36:11] [Rank 0] step:1181/10000 train_time:268162ms step_avg:227.06ms +[2025-07-17 20:36:11] [Rank 0] step:1181/10000 train_time:268162ms step_avg:227.06ms +[2025-07-17 20:36:15] [Rank 0] step:1201/10000 train_time:272735ms step_avg:227.09ms +[2025-07-17 20:36:15] [Rank 0] step:1201/10000 train_time:272735ms step_avg:227.09ms +[2025-07-17 20:36:20] [Rank 0] step:1221/10000 train_time:277306ms step_avg:227.11ms +[2025-07-17 20:36:20] [Rank 0] step:1221/10000 train_time:277306ms step_avg:227.11ms +[2025-07-17 20:36:25] [Rank 0] step:1241/10000 train_time:281883ms step_avg:227.14ms +[2025-07-17 20:36:25] [Rank 0] step:1241/10000 train_time:281883ms step_avg:227.14ms +[2025-07-17 20:36:31] [Rank 0] PRINT: step:1250/10000 val_loss:4.7278 train_time:284404ms step_avg:227.52ms +[2025-07-17 20:36:31] [Rank 0] PRINT: step:1250/10000 val_loss:4.7278 train_time:284404ms step_avg:227.52ms +[2025-07-17 20:36:34] [Rank 0] step:1261/10000 train_time:286458ms step_avg:227.17ms +[2025-07-17 20:36:34] [Rank 0] step:1261/10000 train_time:286458ms step_avg:227.17ms +[2025-07-17 20:36:38] [Rank 0] step:1281/10000 train_time:291034ms step_avg:227.19ms +[2025-07-17 20:36:38] [Rank 0] step:1281/10000 train_time:291034ms step_avg:227.19ms +[2025-07-17 20:36:43] [Rank 0] step:1301/10000 train_time:295610ms step_avg:227.22ms +[2025-07-17 20:36:43] [Rank 0] step:1301/10000 train_time:295610ms step_avg:227.22ms +[2025-07-17 20:36:47] [Rank 0] step:1321/10000 train_time:300186ms step_avg:227.24ms +[2025-07-17 20:36:47] [Rank 0] step:1321/10000 train_time:300186ms step_avg:227.24ms +[2025-07-17 20:36:52] [Rank 0] step:1341/10000 train_time:304760ms step_avg:227.26ms +[2025-07-17 20:36:52] [Rank 0] step:1341/10000 train_time:304760ms step_avg:227.26ms +[2025-07-17 20:36:56] [Rank 0] step:1361/10000 train_time:309337ms step_avg:227.29ms +[2025-07-17 20:36:56] [Rank 0] step:1361/10000 train_time:309337ms step_avg:227.29ms +[2025-07-17 20:37:04] [Rank 0] PRINT: step:1375/10000 val_loss:4.8621 train_time:313001ms step_avg:227.64ms +[2025-07-17 20:37:04] [Rank 0] PRINT: step:1375/10000 val_loss:4.8621 train_time:313001ms step_avg:227.64ms +[2025-07-17 20:37:05] [Rank 0] step:1381/10000 train_time:313911ms step_avg:227.31ms +[2025-07-17 20:37:05] [Rank 0] step:1381/10000 train_time:313911ms step_avg:227.31ms +[2025-07-17 20:37:10] [Rank 0] step:1401/10000 train_time:318487ms step_avg:227.33ms +[2025-07-17 20:37:10] [Rank 0] step:1401/10000 train_time:318487ms step_avg:227.33ms +[2025-07-17 20:37:14] [Rank 0] step:1421/10000 train_time:323063ms step_avg:227.35ms +[2025-07-17 20:37:14] [Rank 0] step:1421/10000 train_time:323063ms step_avg:227.35ms +[2025-07-17 20:37:19] [Rank 0] step:1441/10000 train_time:327639ms step_avg:227.37ms +[2025-07-17 20:37:19] [Rank 0] step:1441/10000 train_time:327639ms step_avg:227.37ms +[2025-07-17 20:37:23] [Rank 0] step:1461/10000 train_time:332212ms step_avg:227.39ms +[2025-07-17 20:37:23] [Rank 0] step:1461/10000 train_time:332212ms step_avg:227.39ms +[2025-07-17 20:37:28] [Rank 0] step:1481/10000 train_time:336788ms step_avg:227.41ms +[2025-07-17 20:37:28] [Rank 0] step:1481/10000 train_time:336788ms step_avg:227.41ms +[2025-07-17 20:37:37] [Rank 0] PRINT: step:1500/10000 val_loss:4.7774 train_time:341620ms step_avg:227.75ms +[2025-07-17 20:37:37] [Rank 0] PRINT: step:1500/10000 val_loss:4.7774 train_time:341620ms step_avg:227.75ms +[2025-07-17 20:37:37] [Rank 0] step:1501/10000 train_time:341634ms step_avg:227.60ms +[2025-07-17 20:37:37] [Rank 0] step:1501/10000 train_time:341634ms step_avg:227.60ms +[2025-07-17 20:37:42] [Rank 0] step:1521/10000 train_time:345986ms step_avg:227.47ms +[2025-07-17 20:37:42] [Rank 0] step:1521/10000 train_time:345986ms step_avg:227.47ms +[2025-07-17 20:37:47] [Rank 0] step:1541/10000 train_time:350891ms step_avg:227.70ms +[2025-07-17 20:37:47] [Rank 0] step:1541/10000 train_time:350891ms step_avg:227.70ms +[2025-07-17 20:37:51] [Rank 0] step:1561/10000 train_time:355490ms step_avg:227.73ms +[2025-07-17 20:37:51] [Rank 0] step:1561/10000 train_time:355490ms step_avg:227.73ms +[2025-07-17 20:37:56] [Rank 0] step:1581/10000 train_time:360092ms step_avg:227.76ms +[2025-07-17 20:37:56] [Rank 0] step:1581/10000 train_time:360092ms step_avg:227.76ms +[2025-07-17 20:38:00] [Rank 0] step:1601/10000 train_time:364695ms step_avg:227.79ms +[2025-07-17 20:38:00] [Rank 0] step:1601/10000 train_time:364695ms step_avg:227.79ms +[2025-07-17 20:38:05] [Rank 0] step:1621/10000 train_time:369299ms step_avg:227.82ms +[2025-07-17 20:38:05] [Rank 0] step:1621/10000 train_time:369299ms step_avg:227.82ms +[2025-07-17 20:38:10] [Rank 0] PRINT: step:1625/10000 val_loss:4.8876 train_time:370686ms step_avg:228.11ms +[2025-07-17 20:38:10] [Rank 0] PRINT: step:1625/10000 val_loss:4.8876 train_time:370686ms step_avg:228.11ms +[2025-07-17 20:38:14] [Rank 0] step:1641/10000 train_time:373895ms step_avg:227.85ms +[2025-07-17 20:38:14] [Rank 0] step:1641/10000 train_time:373895ms step_avg:227.85ms +[2025-07-17 20:38:19] [Rank 0] step:1661/10000 train_time:378494ms step_avg:227.87ms +[2025-07-17 20:38:19] [Rank 0] step:1661/10000 train_time:378494ms step_avg:227.87ms +[2025-07-17 20:38:23] [Rank 0] step:1681/10000 train_time:383095ms step_avg:227.90ms +[2025-07-17 20:38:23] [Rank 0] step:1681/10000 train_time:383095ms step_avg:227.90ms +[2025-07-17 20:38:28] [Rank 0] step:1701/10000 train_time:387696ms step_avg:227.92ms +[2025-07-17 20:38:28] [Rank 0] step:1701/10000 train_time:387696ms step_avg:227.92ms +[2025-07-17 20:38:32] [Rank 0] step:1721/10000 train_time:392296ms step_avg:227.95ms +[2025-07-17 20:38:32] [Rank 0] step:1721/10000 train_time:392296ms step_avg:227.95ms +[2025-07-17 20:38:37] [Rank 0] step:1741/10000 train_time:396896ms step_avg:227.97ms +[2025-07-17 20:38:37] [Rank 0] step:1741/10000 train_time:396896ms step_avg:227.97ms +[2025-07-17 20:38:44] [Rank 0] PRINT: step:1750/10000 val_loss:4.8510 train_time:399433ms step_avg:228.25ms +[2025-07-17 20:38:44] [Rank 0] PRINT: step:1750/10000 val_loss:4.8510 train_time:399433ms step_avg:228.25ms +[2025-07-17 20:38:46] [Rank 0] step:1761/10000 train_time:401499ms step_avg:227.99ms +[2025-07-17 20:38:46] [Rank 0] step:1761/10000 train_time:401499ms step_avg:227.99ms +[2025-07-17 20:38:51] [Rank 0] step:1781/10000 train_time:406099ms step_avg:228.02ms +[2025-07-17 20:38:51] [Rank 0] step:1781/10000 train_time:406099ms step_avg:228.02ms +[2025-07-17 20:38:55] [Rank 0] step:1801/10000 train_time:410700ms step_avg:228.04ms +[2025-07-17 20:38:55] [Rank 0] step:1801/10000 train_time:410700ms step_avg:228.04ms +[2025-07-17 20:39:00] [Rank 0] step:1821/10000 train_time:415305ms step_avg:228.06ms +[2025-07-17 20:39:00] [Rank 0] step:1821/10000 train_time:415305ms step_avg:228.06ms +[2025-07-17 20:39:05] [Rank 0] step:1841/10000 train_time:419908ms step_avg:228.09ms +[2025-07-17 20:39:05] [Rank 0] step:1841/10000 train_time:419908ms step_avg:228.09ms +[2025-07-17 20:39:09] [Rank 0] step:1861/10000 train_time:424513ms step_avg:228.11ms +[2025-07-17 20:39:09] [Rank 0] step:1861/10000 train_time:424513ms step_avg:228.11ms +[2025-07-17 20:39:17] [Rank 0] PRINT: step:1875/10000 val_loss:4.8236 train_time:428202ms step_avg:228.37ms +[2025-07-17 20:39:17] [Rank 0] PRINT: step:1875/10000 val_loss:4.8236 train_time:428202ms step_avg:228.37ms +[2025-07-17 20:39:18] [Rank 0] step:1881/10000 train_time:429117ms step_avg:228.13ms +[2025-07-17 20:39:18] [Rank 0] step:1881/10000 train_time:429117ms step_avg:228.13ms +[2025-07-17 20:39:23] [Rank 0] step:1901/10000 train_time:433724ms step_avg:228.16ms +[2025-07-17 20:39:23] [Rank 0] step:1901/10000 train_time:433724ms step_avg:228.16ms +[2025-07-17 20:39:27] [Rank 0] step:1921/10000 train_time:438331ms step_avg:228.18ms +[2025-07-17 20:39:27] [Rank 0] step:1921/10000 train_time:438331ms step_avg:228.18ms +[2025-07-17 20:39:32] [Rank 0] step:1941/10000 train_time:442941ms step_avg:228.20ms +[2025-07-17 20:39:32] [Rank 0] step:1941/10000 train_time:442941ms step_avg:228.20ms +[2025-07-17 20:39:37] [Rank 0] step:1961/10000 train_time:447560ms step_avg:228.23ms +[2025-07-17 20:39:37] [Rank 0] step:1961/10000 train_time:447560ms step_avg:228.23ms +[2025-07-17 20:39:41] [Rank 0] step:1981/10000 train_time:452167ms step_avg:228.25ms +[2025-07-17 20:39:41] [Rank 0] step:1981/10000 train_time:452167ms step_avg:228.25ms +[2025-07-17 20:39:50] [Rank 0] PRINT: step:2000/10000 val_loss:5.0169 train_time:457011ms step_avg:228.51ms +[2025-07-17 20:39:50] [Rank 0] PRINT: step:2000/10000 val_loss:5.0169 train_time:457011ms step_avg:228.51ms +[2025-07-17 20:39:50] [Rank 0] step:2001/10000 train_time:457025ms step_avg:228.40ms +[2025-07-17 20:39:50] [Rank 0] step:2001/10000 train_time:457025ms step_avg:228.40ms +[2025-07-17 20:39:55] [Rank 0] step:2021/10000 train_time:461386ms step_avg:228.30ms +[2025-07-17 20:39:55] [Rank 0] step:2021/10000 train_time:461386ms step_avg:228.30ms +[2025-07-17 20:40:00] [Rank 0] step:2041/10000 train_time:466271ms step_avg:228.45ms +[2025-07-17 20:40:00] [Rank 0] step:2041/10000 train_time:466271ms step_avg:228.45ms +[2025-07-17 20:40:05] [Rank 0] step:2061/10000 train_time:470881ms step_avg:228.47ms +[2025-07-17 20:40:05] [Rank 0] step:2061/10000 train_time:470881ms step_avg:228.47ms +[2025-07-17 20:40:09] [Rank 0] step:2081/10000 train_time:475491ms step_avg:228.49ms +[2025-07-17 20:40:09] [Rank 0] step:2081/10000 train_time:475491ms step_avg:228.49ms +[2025-07-17 20:40:14] [Rank 0] step:2101/10000 train_time:480101ms step_avg:228.51ms +[2025-07-17 20:40:14] [Rank 0] step:2101/10000 train_time:480101ms step_avg:228.51ms +[2025-07-17 20:40:18] [Rank 0] step:2121/10000 train_time:484714ms step_avg:228.53ms +[2025-07-17 20:40:18] [Rank 0] step:2121/10000 train_time:484714ms step_avg:228.53ms +[2025-07-17 20:40:24] [Rank 0] PRINT: step:2125/10000 val_loss:4.7987 train_time:486104ms step_avg:228.76ms +[2025-07-17 20:40:24] [Rank 0] PRINT: step:2125/10000 val_loss:4.7987 train_time:486104ms step_avg:228.76ms +[2025-07-17 20:40:27] [Rank 0] step:2141/10000 train_time:489323ms step_avg:228.55ms +[2025-07-17 20:40:27] [Rank 0] step:2141/10000 train_time:489323ms step_avg:228.55ms +[2025-07-17 20:40:32] [Rank 0] step:2161/10000 train_time:493936ms step_avg:228.57ms +[2025-07-17 20:40:32] [Rank 0] step:2161/10000 train_time:493936ms step_avg:228.57ms +[2025-07-17 20:40:37] [Rank 0] step:2181/10000 train_time:498544ms step_avg:228.59ms +[2025-07-17 20:40:37] [Rank 0] step:2181/10000 train_time:498544ms step_avg:228.59ms +[2025-07-17 20:40:41] [Rank 0] step:2201/10000 train_time:503158ms step_avg:228.60ms +[2025-07-17 20:40:41] [Rank 0] step:2201/10000 train_time:503158ms step_avg:228.60ms +[2025-07-17 20:40:46] [Rank 0] step:2221/10000 train_time:507769ms step_avg:228.62ms +[2025-07-17 20:40:46] [Rank 0] step:2221/10000 train_time:507769ms step_avg:228.62ms +[2025-07-17 20:40:51] [Rank 0] step:2241/10000 train_time:512469ms step_avg:228.68ms +[2025-07-17 20:40:51] [Rank 0] step:2241/10000 train_time:512469ms step_avg:228.68ms +[2025-07-17 20:40:57] [Rank 0] PRINT: step:2250/10000 val_loss:4.5565 train_time:515073ms step_avg:228.92ms +[2025-07-17 20:40:57] [Rank 0] PRINT: step:2250/10000 val_loss:4.5565 train_time:515073ms step_avg:228.92ms +[2025-07-17 20:41:00] [Rank 0] step:2261/10000 train_time:517191ms step_avg:228.74ms +[2025-07-17 20:41:00] [Rank 0] step:2261/10000 train_time:517191ms step_avg:228.74ms +[2025-07-17 20:41:05] [Rank 0] step:2281/10000 train_time:521910ms step_avg:228.81ms +[2025-07-17 20:41:05] [Rank 0] step:2281/10000 train_time:521910ms step_avg:228.81ms +[2025-07-17 20:41:09] [Rank 0] step:2301/10000 train_time:526628ms step_avg:228.87ms +[2025-07-17 20:41:09] [Rank 0] step:2301/10000 train_time:526628ms step_avg:228.87ms +[2025-07-17 20:41:14] [Rank 0] step:2321/10000 train_time:531349ms step_avg:228.93ms +[2025-07-17 20:41:14] [Rank 0] step:2321/10000 train_time:531349ms step_avg:228.93ms +[2025-07-17 20:41:19] [Rank 0] step:2341/10000 train_time:536070ms step_avg:228.99ms +[2025-07-17 20:41:19] [Rank 0] step:2341/10000 train_time:536070ms step_avg:228.99ms +[2025-07-17 20:41:24] [Rank 0] step:2361/10000 train_time:540790ms step_avg:229.05ms +[2025-07-17 20:41:24] [Rank 0] step:2361/10000 train_time:540790ms step_avg:229.05ms +[2025-07-17 20:41:31] [Rank 0] PRINT: step:2375/10000 val_loss:4.6604 train_time:544568ms step_avg:229.29ms +[2025-07-17 20:41:31] [Rank 0] PRINT: step:2375/10000 val_loss:4.6604 train_time:544568ms step_avg:229.29ms +[2025-07-17 20:41:33] [Rank 0] step:2381/10000 train_time:545512ms step_avg:229.11ms +[2025-07-17 20:41:33] [Rank 0] step:2381/10000 train_time:545512ms step_avg:229.11ms +[2025-07-17 20:41:38] [Rank 0] step:2401/10000 train_time:550238ms step_avg:229.17ms +[2025-07-17 20:41:38] [Rank 0] step:2401/10000 train_time:550238ms step_avg:229.17ms +[2025-07-17 20:41:42] [Rank 0] step:2421/10000 train_time:554958ms step_avg:229.23ms +[2025-07-17 20:41:42] [Rank 0] step:2421/10000 train_time:554958ms step_avg:229.23ms +[2025-07-17 20:41:47] [Rank 0] step:2441/10000 train_time:559680ms step_avg:229.28ms +[2025-07-17 20:41:47] [Rank 0] step:2441/10000 train_time:559680ms step_avg:229.28ms +[2025-07-17 20:41:52] [Rank 0] step:2461/10000 train_time:564403ms step_avg:229.34ms +[2025-07-17 20:41:52] [Rank 0] step:2461/10000 train_time:564403ms step_avg:229.34ms +[2025-07-17 20:41:56] [Rank 0] step:2481/10000 train_time:569126ms step_avg:229.39ms +[2025-07-17 20:41:56] [Rank 0] step:2481/10000 train_time:569126ms step_avg:229.39ms +[2025-07-17 20:42:06] [Rank 0] PRINT: step:2500/10000 val_loss:4.5565 train_time:574086ms step_avg:229.63ms +[2025-07-17 20:42:06] [Rank 0] PRINT: step:2500/10000 val_loss:4.5565 train_time:574086ms step_avg:229.63ms +[2025-07-17 20:42:06] [Rank 0] step:2501/10000 train_time:574101ms step_avg:229.55ms +[2025-07-17 20:42:06] [Rank 0] step:2501/10000 train_time:574101ms step_avg:229.55ms +[2025-07-17 20:42:11] [Rank 0] step:2521/10000 train_time:578574ms step_avg:229.50ms +[2025-07-17 20:42:11] [Rank 0] step:2521/10000 train_time:578574ms step_avg:229.50ms +[2025-07-17 20:42:16] [Rank 0] step:2541/10000 train_time:583840ms step_avg:229.77ms +[2025-07-17 20:42:16] [Rank 0] step:2541/10000 train_time:583840ms step_avg:229.77ms +[2025-07-17 20:42:20] [Rank 0] step:2561/10000 train_time:588307ms step_avg:229.72ms +[2025-07-17 20:42:20] [Rank 0] step:2561/10000 train_time:588307ms step_avg:229.72ms +[2025-07-17 20:42:25] [Rank 0] step:2581/10000 train_time:593033ms step_avg:229.77ms +[2025-07-17 20:42:25] [Rank 0] step:2581/10000 train_time:593033ms step_avg:229.77ms +[2025-07-17 20:42:30] [Rank 0] step:2601/10000 train_time:597759ms step_avg:229.82ms +[2025-07-17 20:42:30] [Rank 0] step:2601/10000 train_time:597759ms step_avg:229.82ms +[2025-07-17 20:42:34] [Rank 0] step:2621/10000 train_time:602484ms step_avg:229.87ms +[2025-07-17 20:42:34] [Rank 0] step:2621/10000 train_time:602484ms step_avg:229.87ms +[2025-07-17 20:42:40] [Rank 0] PRINT: step:2625/10000 val_loss:5.3540 train_time:603907ms step_avg:230.06ms +[2025-07-17 20:42:40] [Rank 0] PRINT: step:2625/10000 val_loss:5.3540 train_time:603907ms step_avg:230.06ms +[2025-07-17 20:42:44] [Rank 0] step:2641/10000 train_time:607209ms step_avg:229.92ms +[2025-07-17 20:42:44] [Rank 0] step:2641/10000 train_time:607209ms step_avg:229.92ms +[2025-07-17 20:42:48] [Rank 0] step:2661/10000 train_time:611933ms step_avg:229.96ms +[2025-07-17 20:42:48] [Rank 0] step:2661/10000 train_time:611933ms step_avg:229.96ms +[2025-07-17 20:42:53] [Rank 0] step:2681/10000 train_time:616658ms step_avg:230.01ms +[2025-07-17 20:42:53] [Rank 0] step:2681/10000 train_time:616658ms step_avg:230.01ms +[2025-07-17 20:42:58] [Rank 0] step:2701/10000 train_time:621386ms step_avg:230.06ms +[2025-07-17 20:42:58] [Rank 0] step:2701/10000 train_time:621386ms step_avg:230.06ms +[2025-07-17 20:43:03] [Rank 0] step:2721/10000 train_time:626110ms step_avg:230.10ms +[2025-07-17 20:43:03] [Rank 0] step:2721/10000 train_time:626110ms step_avg:230.10ms +[2025-07-17 20:43:07] [Rank 0] step:2741/10000 train_time:630835ms step_avg:230.15ms +[2025-07-17 20:43:07] [Rank 0] step:2741/10000 train_time:630835ms step_avg:230.15ms +[2025-07-17 20:43:14] [Rank 0] PRINT: step:2750/10000 val_loss:4.2570 train_time:633441ms step_avg:230.34ms +[2025-07-17 20:43:14] [Rank 0] PRINT: step:2750/10000 val_loss:4.2570 train_time:633441ms step_avg:230.34ms +[2025-07-17 20:43:16] [Rank 0] step:2761/10000 train_time:635561ms step_avg:230.19ms +[2025-07-17 20:43:16] [Rank 0] step:2761/10000 train_time:635561ms step_avg:230.19ms +[2025-07-17 20:43:21] [Rank 0] step:2781/10000 train_time:640287ms step_avg:230.24ms +[2025-07-17 20:43:21] [Rank 0] step:2781/10000 train_time:640287ms step_avg:230.24ms +[2025-07-17 20:43:26] [Rank 0] step:2801/10000 train_time:645014ms step_avg:230.28ms +[2025-07-17 20:43:26] [Rank 0] step:2801/10000 train_time:645014ms step_avg:230.28ms +[2025-07-17 20:43:30] [Rank 0] step:2821/10000 train_time:649738ms step_avg:230.32ms +[2025-07-17 20:43:30] [Rank 0] step:2821/10000 train_time:649738ms step_avg:230.32ms +[2025-07-17 20:43:35] [Rank 0] step:2841/10000 train_time:654464ms step_avg:230.36ms +[2025-07-17 20:43:35] [Rank 0] step:2841/10000 train_time:654464ms step_avg:230.36ms +[2025-07-17 20:43:40] [Rank 0] step:2861/10000 train_time:659189ms step_avg:230.41ms +[2025-07-17 20:43:40] [Rank 0] step:2861/10000 train_time:659189ms step_avg:230.41ms +[2025-07-17 20:43:48] [Rank 0] PRINT: step:2875/10000 val_loss:4.8137 train_time:662973ms step_avg:230.60ms +[2025-07-17 20:43:48] [Rank 0] PRINT: step:2875/10000 val_loss:4.8137 train_time:662973ms step_avg:230.60ms +[2025-07-17 20:43:49] [Rank 0] step:2881/10000 train_time:663914ms step_avg:230.45ms +[2025-07-17 20:43:49] [Rank 0] step:2881/10000 train_time:663914ms step_avg:230.45ms +[2025-07-17 20:43:54] [Rank 0] step:2901/10000 train_time:668636ms step_avg:230.48ms +[2025-07-17 20:43:54] [Rank 0] step:2901/10000 train_time:668636ms step_avg:230.48ms +[2025-07-17 20:43:59] [Rank 0] step:2921/10000 train_time:673364ms step_avg:230.53ms +[2025-07-17 20:43:59] [Rank 0] step:2921/10000 train_time:673364ms step_avg:230.53ms +[2025-07-17 20:44:03] [Rank 0] step:2941/10000 train_time:678089ms step_avg:230.56ms +[2025-07-17 20:44:03] [Rank 0] step:2941/10000 train_time:678089ms step_avg:230.56ms +[2025-07-17 20:44:08] [Rank 0] step:2961/10000 train_time:682816ms step_avg:230.60ms +[2025-07-17 20:44:08] [Rank 0] step:2961/10000 train_time:682816ms step_avg:230.60ms +[2025-07-17 20:44:13] [Rank 0] step:2981/10000 train_time:687556ms step_avg:230.65ms +[2025-07-17 20:44:13] [Rank 0] step:2981/10000 train_time:687556ms step_avg:230.65ms +[2025-07-17 20:44:22] [Rank 0] PRINT: step:3000/10000 val_loss:4.8640 train_time:692543ms step_avg:230.85ms +[2025-07-17 20:44:22] [Rank 0] PRINT: step:3000/10000 val_loss:4.8640 train_time:692543ms step_avg:230.85ms +[2025-07-17 20:44:22] [Rank 0] step:3001/10000 train_time:692558ms step_avg:230.78ms +[2025-07-17 20:44:22] [Rank 0] step:3001/10000 train_time:692558ms step_avg:230.78ms +[2025-07-17 20:44:27] [Rank 0] step:3021/10000 train_time:697043ms step_avg:230.73ms +[2025-07-17 20:44:27] [Rank 0] step:3021/10000 train_time:697043ms step_avg:230.73ms +[2025-07-17 20:44:32] [Rank 0] step:3041/10000 train_time:701783ms step_avg:230.77ms +[2025-07-17 20:44:32] [Rank 0] step:3041/10000 train_time:701783ms step_avg:230.77ms +[2025-07-17 20:44:37] [Rank 0] step:3061/10000 train_time:706800ms step_avg:230.90ms +[2025-07-17 20:44:37] [Rank 0] step:3061/10000 train_time:706800ms step_avg:230.90ms +[2025-07-17 20:44:41] [Rank 0] step:3081/10000 train_time:711540ms step_avg:230.94ms +[2025-07-17 20:44:41] [Rank 0] step:3081/10000 train_time:711540ms step_avg:230.94ms +[2025-07-17 20:44:46] [Rank 0] step:3101/10000 train_time:716283ms step_avg:230.98ms +[2025-07-17 20:44:46] [Rank 0] step:3101/10000 train_time:716283ms step_avg:230.98ms +[2025-07-17 20:44:51] [Rank 0] step:3121/10000 train_time:721024ms step_avg:231.02ms +[2025-07-17 20:44:51] [Rank 0] step:3121/10000 train_time:721024ms step_avg:231.02ms +[2025-07-17 20:44:56] [Rank 0] PRINT: step:3125/10000 val_loss:5.0508 train_time:722454ms step_avg:231.19ms +[2025-07-17 20:44:56] [Rank 0] PRINT: step:3125/10000 val_loss:5.0508 train_time:722454ms step_avg:231.19ms +[2025-07-17 20:45:00] [Rank 0] step:3141/10000 train_time:725763ms step_avg:231.06ms +[2025-07-17 20:45:00] [Rank 0] step:3141/10000 train_time:725763ms step_avg:231.06ms +[2025-07-17 20:45:05] [Rank 0] step:3161/10000 train_time:730506ms step_avg:231.10ms +[2025-07-17 20:45:05] [Rank 0] step:3161/10000 train_time:730506ms step_avg:231.10ms +[2025-07-17 20:45:10] [Rank 0] step:3181/10000 train_time:735247ms step_avg:231.14ms +[2025-07-17 20:45:10] [Rank 0] step:3181/10000 train_time:735247ms step_avg:231.14ms +[2025-07-17 20:45:14] [Rank 0] step:3201/10000 train_time:739988ms step_avg:231.17ms +[2025-07-17 20:45:14] [Rank 0] step:3201/10000 train_time:739988ms step_avg:231.17ms +[2025-07-17 20:45:19] [Rank 0] step:3221/10000 train_time:744733ms step_avg:231.21ms +[2025-07-17 20:45:19] [Rank 0] step:3221/10000 train_time:744733ms step_avg:231.21ms +[2025-07-17 20:45:24] [Rank 0] step:3241/10000 train_time:749476ms step_avg:231.25ms +[2025-07-17 20:45:24] [Rank 0] step:3241/10000 train_time:749476ms step_avg:231.25ms +[2025-07-17 20:45:30] [Rank 0] PRINT: step:3250/10000 val_loss:4.8345 train_time:752089ms step_avg:231.41ms +[2025-07-17 20:45:30] [Rank 0] PRINT: step:3250/10000 val_loss:4.8345 train_time:752089ms step_avg:231.41ms +[2025-07-17 20:45:33] [Rank 0] step:3261/10000 train_time:754216ms step_avg:231.28ms +[2025-07-17 20:45:33] [Rank 0] step:3261/10000 train_time:754216ms step_avg:231.28ms +[2025-07-17 20:45:38] [Rank 0] step:3281/10000 train_time:758954ms step_avg:231.32ms +[2025-07-17 20:45:38] [Rank 0] step:3281/10000 train_time:758954ms step_avg:231.32ms +[2025-07-17 20:45:42] [Rank 0] step:3301/10000 train_time:763693ms step_avg:231.35ms +[2025-07-17 20:45:42] [Rank 0] step:3301/10000 train_time:763693ms step_avg:231.35ms +[2025-07-17 20:45:47] [Rank 0] step:3321/10000 train_time:768431ms step_avg:231.39ms +[2025-07-17 20:45:47] [Rank 0] step:3321/10000 train_time:768431ms step_avg:231.39ms +[2025-07-17 20:45:52] [Rank 0] step:3341/10000 train_time:773168ms step_avg:231.42ms +[2025-07-17 20:45:52] [Rank 0] step:3341/10000 train_time:773168ms step_avg:231.42ms +[2025-07-17 20:45:56] [Rank 0] step:3361/10000 train_time:777907ms step_avg:231.45ms +[2025-07-17 20:45:56] [Rank 0] step:3361/10000 train_time:777907ms step_avg:231.45ms +[2025-07-17 20:46:04] [Rank 0] PRINT: step:3375/10000 val_loss:4.9494 train_time:781702ms step_avg:231.62ms +[2025-07-17 20:46:04] [Rank 0] PRINT: step:3375/10000 val_loss:4.9494 train_time:781702ms step_avg:231.62ms +[2025-07-17 20:46:06] [Rank 0] step:3381/10000 train_time:782642ms step_avg:231.48ms +[2025-07-17 20:46:06] [Rank 0] step:3381/10000 train_time:782642ms step_avg:231.48ms +[2025-07-17 20:46:11] [Rank 0] step:3401/10000 train_time:787377ms step_avg:231.51ms +[2025-07-17 20:46:11] [Rank 0] step:3401/10000 train_time:787377ms step_avg:231.51ms +[2025-07-17 20:46:15] [Rank 0] step:3421/10000 train_time:792116ms step_avg:231.55ms +[2025-07-17 20:46:15] [Rank 0] step:3421/10000 train_time:792116ms step_avg:231.55ms +[2025-07-17 20:46:20] [Rank 0] step:3441/10000 train_time:796852ms step_avg:231.58ms +[2025-07-17 20:46:20] [Rank 0] step:3441/10000 train_time:796852ms step_avg:231.58ms +[2025-07-17 20:46:25] [Rank 0] step:3461/10000 train_time:801588ms step_avg:231.61ms +[2025-07-17 20:46:25] [Rank 0] step:3461/10000 train_time:801588ms step_avg:231.61ms +[2025-07-17 20:46:29] [Rank 0] step:3481/10000 train_time:806325ms step_avg:231.64ms +[2025-07-17 20:46:29] [Rank 0] step:3481/10000 train_time:806325ms step_avg:231.64ms +[2025-07-17 20:46:38] [Rank 0] PRINT: step:3500/10000 val_loss:5.2154 train_time:811298ms step_avg:231.80ms +[2025-07-17 20:46:38] [Rank 0] PRINT: step:3500/10000 val_loss:5.2154 train_time:811298ms step_avg:231.80ms +[2025-07-17 20:46:39] [Rank 0] step:3501/10000 train_time:811312ms step_avg:231.74ms +[2025-07-17 20:46:39] [Rank 0] step:3501/10000 train_time:811312ms step_avg:231.74ms +[2025-07-17 20:46:43] [Rank 0] step:3521/10000 train_time:815790ms step_avg:231.69ms +[2025-07-17 20:46:43] [Rank 0] step:3521/10000 train_time:815790ms step_avg:231.69ms +[2025-07-17 20:46:48] [Rank 0] step:3541/10000 train_time:820528ms step_avg:231.72ms +[2025-07-17 20:46:48] [Rank 0] step:3541/10000 train_time:820528ms step_avg:231.72ms +[2025-07-17 20:46:53] [Rank 0] step:3561/10000 train_time:825554ms step_avg:231.83ms +[2025-07-17 20:46:53] [Rank 0] step:3561/10000 train_time:825554ms step_avg:231.83ms +[2025-07-17 20:46:58] [Rank 0] step:3581/10000 train_time:830287ms step_avg:231.86ms +[2025-07-17 20:46:58] [Rank 0] step:3581/10000 train_time:830287ms step_avg:231.86ms +[2025-07-17 20:47:03] [Rank 0] step:3601/10000 train_time:835022ms step_avg:231.89ms +[2025-07-17 20:47:03] [Rank 0] step:3601/10000 train_time:835022ms step_avg:231.89ms +[2025-07-17 20:47:07] [Rank 0] step:3621/10000 train_time:839755ms step_avg:231.91ms +[2025-07-17 20:47:07] [Rank 0] step:3621/10000 train_time:839755ms step_avg:231.91ms +[2025-07-17 20:47:13] [Rank 0] PRINT: step:3625/10000 val_loss:5.2104 train_time:841181ms step_avg:232.05ms +[2025-07-17 20:47:13] [Rank 0] PRINT: step:3625/10000 val_loss:5.2104 train_time:841181ms step_avg:232.05ms +[2025-07-17 20:47:17] [Rank 0] step:3641/10000 train_time:844486ms step_avg:231.94ms +[2025-07-17 20:47:17] [Rank 0] step:3641/10000 train_time:844486ms step_avg:231.94ms +[2025-07-17 20:47:22] [Rank 0] step:3661/10000 train_time:849219ms step_avg:231.96ms +[2025-07-17 20:47:22] [Rank 0] step:3661/10000 train_time:849219ms step_avg:231.96ms +[2025-07-17 20:47:26] [Rank 0] step:3681/10000 train_time:853953ms step_avg:231.99ms +[2025-07-17 20:47:26] [Rank 0] step:3681/10000 train_time:853953ms step_avg:231.99ms +[2025-07-17 20:47:31] [Rank 0] step:3701/10000 train_time:858688ms step_avg:232.02ms +[2025-07-17 20:47:31] [Rank 0] step:3701/10000 train_time:858688ms step_avg:232.02ms +[2025-07-17 20:47:36] [Rank 0] step:3721/10000 train_time:863488ms step_avg:232.06ms +[2025-07-17 20:47:36] [Rank 0] step:3721/10000 train_time:863488ms step_avg:232.06ms +[2025-07-17 20:47:41] [Rank 0] step:3741/10000 train_time:868313ms step_avg:232.11ms +[2025-07-17 20:47:41] [Rank 0] step:3741/10000 train_time:868313ms step_avg:232.11ms +[2025-07-17 20:47:47] [Rank 0] PRINT: step:3750/10000 val_loss:5.3094 train_time:870971ms step_avg:232.26ms +[2025-07-17 20:47:47] [Rank 0] PRINT: step:3750/10000 val_loss:5.3094 train_time:870971ms step_avg:232.26ms +[2025-07-17 20:47:50] [Rank 0] step:3761/10000 train_time:873135ms step_avg:232.16ms +[2025-07-17 20:47:50] [Rank 0] step:3761/10000 train_time:873135ms step_avg:232.16ms +[2025-07-17 20:47:55] [Rank 0] step:3781/10000 train_time:877963ms step_avg:232.20ms +[2025-07-17 20:47:55] [Rank 0] step:3781/10000 train_time:877963ms step_avg:232.20ms +[2025-07-17 20:47:59] [Rank 0] step:3801/10000 train_time:882786ms step_avg:232.25ms +[2025-07-17 20:47:59] [Rank 0] step:3801/10000 train_time:882786ms step_avg:232.25ms +[2025-07-17 20:48:04] [Rank 0] step:3821/10000 train_time:887614ms step_avg:232.30ms +[2025-07-17 20:48:04] [Rank 0] step:3821/10000 train_time:887614ms step_avg:232.30ms +[2025-07-17 20:48:09] [Rank 0] step:3841/10000 train_time:892437ms step_avg:232.34ms +[2025-07-17 20:48:09] [Rank 0] step:3841/10000 train_time:892437ms step_avg:232.34ms +[2025-07-17 20:48:14] [Rank 0] step:3861/10000 train_time:897258ms step_avg:232.39ms +[2025-07-17 20:48:14] [Rank 0] step:3861/10000 train_time:897258ms step_avg:232.39ms +[2025-07-17 20:48:22] [Rank 0] PRINT: step:3875/10000 val_loss:5.3170 train_time:901118ms step_avg:232.55ms +[2025-07-17 20:48:22] [Rank 0] PRINT: step:3875/10000 val_loss:5.3170 train_time:901118ms step_avg:232.55ms +[2025-07-17 20:48:23] [Rank 0] step:3881/10000 train_time:902075ms step_avg:232.43ms +[2025-07-17 20:48:23] [Rank 0] step:3881/10000 train_time:902075ms step_avg:232.43ms +[2025-07-17 20:48:28] [Rank 0] step:3901/10000 train_time:906890ms step_avg:232.48ms +[2025-07-17 20:48:28] [Rank 0] step:3901/10000 train_time:906890ms step_avg:232.48ms +[2025-07-17 20:48:33] [Rank 0] step:3921/10000 train_time:911705ms step_avg:232.52ms +[2025-07-17 20:48:33] [Rank 0] step:3921/10000 train_time:911705ms step_avg:232.52ms +[2025-07-17 20:48:38] [Rank 0] step:3941/10000 train_time:916522ms step_avg:232.56ms +[2025-07-17 20:48:38] [Rank 0] step:3941/10000 train_time:916522ms step_avg:232.56ms +[2025-07-17 20:48:43] [Rank 0] step:3961/10000 train_time:921337ms step_avg:232.60ms +[2025-07-17 20:48:43] [Rank 0] step:3961/10000 train_time:921337ms step_avg:232.60ms +[2025-07-17 20:48:47] [Rank 0] step:3981/10000 train_time:926153ms step_avg:232.64ms +[2025-07-17 20:48:47] [Rank 0] step:3981/10000 train_time:926153ms step_avg:232.64ms +[2025-07-17 20:48:57] [Rank 0] PRINT: step:4000/10000 val_loss:5.3815 train_time:931211ms step_avg:232.80ms +[2025-07-17 20:48:57] [Rank 0] PRINT: step:4000/10000 val_loss:5.3815 train_time:931211ms step_avg:232.80ms +[2025-07-17 20:48:57] [Rank 0] step:4001/10000 train_time:931225ms step_avg:232.75ms +[2025-07-17 20:48:57] [Rank 0] step:4001/10000 train_time:931225ms step_avg:232.75ms +[2025-07-17 20:49:02] [Rank 0] step:4021/10000 train_time:935780ms step_avg:232.72ms +[2025-07-17 20:49:02] [Rank 0] step:4021/10000 train_time:935780ms step_avg:232.72ms +[2025-07-17 20:49:06] [Rank 0] step:4041/10000 train_time:940594ms step_avg:232.76ms +[2025-07-17 20:49:06] [Rank 0] step:4041/10000 train_time:940594ms step_avg:232.76ms +[2025-07-17 20:49:11] [Rank 0] step:4061/10000 train_time:945410ms step_avg:232.80ms +[2025-07-17 20:49:11] [Rank 0] step:4061/10000 train_time:945410ms step_avg:232.80ms +[2025-07-17 20:49:16] [Rank 0] step:4081/10000 train_time:950497ms step_avg:232.91ms +[2025-07-17 20:49:16] [Rank 0] step:4081/10000 train_time:950497ms step_avg:232.91ms +[2025-07-17 20:49:21] [Rank 0] step:4101/10000 train_time:955322ms step_avg:232.95ms +[2025-07-17 20:49:21] [Rank 0] step:4101/10000 train_time:955322ms step_avg:232.95ms +[2025-07-17 20:49:26] [Rank 0] step:4121/10000 train_time:960141ms step_avg:232.99ms +[2025-07-17 20:49:26] [Rank 0] step:4121/10000 train_time:960141ms step_avg:232.99ms +[2025-07-17 20:49:31] [Rank 0] PRINT: step:4125/10000 val_loss:5.5527 train_time:961594ms step_avg:233.11ms +[2025-07-17 20:49:31] [Rank 0] PRINT: step:4125/10000 val_loss:5.5527 train_time:961594ms step_avg:233.11ms +[2025-07-17 20:49:35] [Rank 0] step:4141/10000 train_time:964960ms step_avg:233.03ms +[2025-07-17 20:49:35] [Rank 0] step:4141/10000 train_time:964960ms step_avg:233.03ms +[2025-07-17 20:49:40] [Rank 0] step:4161/10000 train_time:969778ms step_avg:233.06ms +[2025-07-17 20:49:40] [Rank 0] step:4161/10000 train_time:969778ms step_avg:233.06ms +[2025-07-17 20:49:45] [Rank 0] step:4181/10000 train_time:974594ms step_avg:233.10ms +[2025-07-17 20:49:45] [Rank 0] step:4181/10000 train_time:974594ms step_avg:233.10ms +[2025-07-17 20:49:50] [Rank 0] step:4201/10000 train_time:979414ms step_avg:233.14ms +[2025-07-17 20:49:50] [Rank 0] step:4201/10000 train_time:979414ms step_avg:233.14ms +[2025-07-17 20:49:54] [Rank 0] step:4221/10000 train_time:984233ms step_avg:233.18ms +[2025-07-17 20:49:54] [Rank 0] step:4221/10000 train_time:984233ms step_avg:233.18ms +[2025-07-17 20:49:59] [Rank 0] step:4241/10000 train_time:989054ms step_avg:233.21ms +[2025-07-17 20:49:59] [Rank 0] step:4241/10000 train_time:989054ms step_avg:233.21ms +[2025-07-17 20:50:06] [Rank 0] PRINT: step:4250/10000 val_loss:5.6774 train_time:991707ms step_avg:233.34ms +[2025-07-17 20:50:06] [Rank 0] PRINT: step:4250/10000 val_loss:5.6774 train_time:991707ms step_avg:233.34ms +[2025-07-17 20:50:09] [Rank 0] step:4261/10000 train_time:993867ms step_avg:233.25ms +[2025-07-17 20:50:09] [Rank 0] step:4261/10000 train_time:993867ms step_avg:233.25ms +[2025-07-17 20:50:13] [Rank 0] step:4281/10000 train_time:998689ms step_avg:233.28ms +[2025-07-17 20:50:13] [Rank 0] step:4281/10000 train_time:998689ms step_avg:233.28ms +[2025-07-17 20:50:18] [Rank 0] step:4301/10000 train_time:1003509ms step_avg:233.32ms +[2025-07-17 20:50:18] [Rank 0] step:4301/10000 train_time:1003509ms step_avg:233.32ms +[2025-07-17 20:50:23] [Rank 0] step:4321/10000 train_time:1008435ms step_avg:233.38ms +[2025-07-17 20:50:23] [Rank 0] step:4321/10000 train_time:1008435ms step_avg:233.38ms +[2025-07-17 20:50:28] [Rank 0] step:4341/10000 train_time:1013256ms step_avg:233.42ms +[2025-07-17 20:50:28] [Rank 0] step:4341/10000 train_time:1013256ms step_avg:233.42ms +[2025-07-17 20:50:33] [Rank 0] step:4361/10000 train_time:1018078ms step_avg:233.45ms +[2025-07-17 20:50:33] [Rank 0] step:4361/10000 train_time:1018078ms step_avg:233.45ms +[2025-07-17 20:50:41] [Rank 0] PRINT: step:4375/10000 val_loss:5.4936 train_time:1021941ms step_avg:233.59ms +[2025-07-17 20:50:41] [Rank 0] PRINT: step:4375/10000 val_loss:5.4936 train_time:1021941ms step_avg:233.59ms +[2025-07-17 20:50:42] [Rank 0] step:4381/10000 train_time:1022902ms step_avg:233.49ms +[2025-07-17 20:50:42] [Rank 0] step:4381/10000 train_time:1022902ms step_avg:233.49ms +[2025-07-17 20:50:47] [Rank 0] step:4401/10000 train_time:1027721ms step_avg:233.52ms +[2025-07-17 20:50:47] [Rank 0] step:4401/10000 train_time:1027721ms step_avg:233.52ms +[2025-07-17 20:50:52] [Rank 0] step:4421/10000 train_time:1032541ms step_avg:233.55ms +[2025-07-17 20:50:52] [Rank 0] step:4421/10000 train_time:1032541ms step_avg:233.55ms +[2025-07-17 20:50:57] [Rank 0] step:4441/10000 train_time:1037363ms step_avg:233.59ms +[2025-07-17 20:50:57] [Rank 0] step:4441/10000 train_time:1037363ms step_avg:233.59ms +[2025-07-17 20:51:02] [Rank 0] step:4461/10000 train_time:1042194ms step_avg:233.62ms +[2025-07-17 20:51:02] [Rank 0] step:4461/10000 train_time:1042194ms step_avg:233.62ms +[2025-07-17 20:51:06] [Rank 0] step:4481/10000 train_time:1047030ms step_avg:233.66ms +[2025-07-17 20:51:06] [Rank 0] step:4481/10000 train_time:1047030ms step_avg:233.66ms +[2025-07-17 20:51:16] [Rank 0] PRINT: step:4500/10000 val_loss:5.4119 train_time:1052111ms step_avg:233.80ms +[2025-07-17 20:51:16] [Rank 0] PRINT: step:4500/10000 val_loss:5.4119 train_time:1052111ms step_avg:233.80ms +[2025-07-17 20:51:16] [Rank 0] step:4501/10000 train_time:1052126ms step_avg:233.75ms +[2025-07-17 20:51:16] [Rank 0] step:4501/10000 train_time:1052126ms step_avg:233.75ms +[2025-07-17 20:51:21] [Rank 0] step:4521/10000 train_time:1056703ms step_avg:233.73ms +[2025-07-17 20:51:21] [Rank 0] step:4521/10000 train_time:1056703ms step_avg:233.73ms +[2025-07-17 20:51:26] [Rank 0] step:4541/10000 train_time:1061538ms step_avg:233.77ms +[2025-07-17 20:51:26] [Rank 0] step:4541/10000 train_time:1061538ms step_avg:233.77ms +[2025-07-17 20:51:30] [Rank 0] step:4561/10000 train_time:1066373ms step_avg:233.80ms +[2025-07-17 20:51:30] [Rank 0] step:4561/10000 train_time:1066373ms step_avg:233.80ms +[2025-07-17 20:51:36] [Rank 0] step:4581/10000 train_time:1071484ms step_avg:233.90ms +[2025-07-17 20:51:36] [Rank 0] step:4581/10000 train_time:1071484ms step_avg:233.90ms +[2025-07-17 20:51:40] [Rank 0] step:4601/10000 train_time:1076330ms step_avg:233.93ms +[2025-07-17 20:51:40] [Rank 0] step:4601/10000 train_time:1076330ms step_avg:233.93ms +[2025-07-17 20:51:45] [Rank 0] step:4621/10000 train_time:1081170ms step_avg:233.97ms +[2025-07-17 20:51:45] [Rank 0] step:4621/10000 train_time:1081170ms step_avg:233.97ms +[2025-07-17 20:51:51] [Rank 0] PRINT: step:4625/10000 val_loss:5.5534 train_time:1082633ms step_avg:234.08ms +[2025-07-17 20:51:51] [Rank 0] PRINT: step:4625/10000 val_loss:5.5534 train_time:1082633ms step_avg:234.08ms +[2025-07-17 20:51:55] [Rank 0] step:4641/10000 train_time:1086010ms step_avg:234.00ms +[2025-07-17 20:51:55] [Rank 0] step:4641/10000 train_time:1086010ms step_avg:234.00ms +[2025-07-17 20:52:00] [Rank 0] step:4661/10000 train_time:1090855ms step_avg:234.04ms +[2025-07-17 20:52:00] [Rank 0] step:4661/10000 train_time:1090855ms step_avg:234.04ms +[2025-07-17 20:52:04] [Rank 0] step:4681/10000 train_time:1095694ms step_avg:234.07ms +[2025-07-17 20:52:04] [Rank 0] step:4681/10000 train_time:1095694ms step_avg:234.07ms +[2025-07-17 20:52:09] [Rank 0] step:4701/10000 train_time:1100537ms step_avg:234.11ms +[2025-07-17 20:52:09] [Rank 0] step:4701/10000 train_time:1100537ms step_avg:234.11ms +[2025-07-17 20:52:14] [Rank 0] step:4721/10000 train_time:1105378ms step_avg:234.14ms +[2025-07-17 20:52:14] [Rank 0] step:4721/10000 train_time:1105378ms step_avg:234.14ms +[2025-07-17 20:52:19] [Rank 0] step:4741/10000 train_time:1110220ms step_avg:234.17ms +[2025-07-17 20:52:19] [Rank 0] step:4741/10000 train_time:1110220ms step_avg:234.17ms +[2025-07-17 20:52:25] [Rank 0] PRINT: step:4750/10000 val_loss:5.5575 train_time:1112891ms step_avg:234.29ms +[2025-07-17 20:52:25] [Rank 0] PRINT: step:4750/10000 val_loss:5.5575 train_time:1112891ms step_avg:234.29ms +[2025-07-17 20:52:28] [Rank 0] step:4761/10000 train_time:1115061ms step_avg:234.21ms +[2025-07-17 20:52:28] [Rank 0] step:4761/10000 train_time:1115061ms step_avg:234.21ms +[2025-07-17 20:52:33] [Rank 0] step:4781/10000 train_time:1119901ms step_avg:234.24ms +[2025-07-17 20:52:33] [Rank 0] step:4781/10000 train_time:1119901ms step_avg:234.24ms +[2025-07-17 20:52:38] [Rank 0] step:4801/10000 train_time:1124736ms step_avg:234.27ms +[2025-07-17 20:52:38] [Rank 0] step:4801/10000 train_time:1124736ms step_avg:234.27ms +[2025-07-17 20:52:43] [Rank 0] step:4821/10000 train_time:1129672ms step_avg:234.32ms +[2025-07-17 20:52:43] [Rank 0] step:4821/10000 train_time:1129672ms step_avg:234.32ms +[2025-07-17 20:52:48] [Rank 0] step:4841/10000 train_time:1134615ms step_avg:234.38ms +[2025-07-17 20:52:48] [Rank 0] step:4841/10000 train_time:1134615ms step_avg:234.38ms +[2025-07-17 20:52:52] [Rank 0] step:4861/10000 train_time:1139457ms step_avg:234.41ms +[2025-07-17 20:52:52] [Rank 0] step:4861/10000 train_time:1139457ms step_avg:234.41ms +[2025-07-17 20:53:01] [Rank 0] PRINT: step:4875/10000 val_loss:5.3127 train_time:1143331ms step_avg:234.53ms +[2025-07-17 20:53:01] [Rank 0] PRINT: step:4875/10000 val_loss:5.3127 train_time:1143331ms step_avg:234.53ms +[2025-07-17 20:53:02] [Rank 0] step:4881/10000 train_time:1144293ms step_avg:234.44ms +[2025-07-17 20:53:02] [Rank 0] step:4881/10000 train_time:1144293ms step_avg:234.44ms +[2025-07-17 20:53:07] [Rank 0] step:4901/10000 train_time:1149133ms step_avg:234.47ms +[2025-07-17 20:53:07] [Rank 0] step:4901/10000 train_time:1149133ms step_avg:234.47ms +[2025-07-17 20:53:12] [Rank 0] step:4921/10000 train_time:1153968ms step_avg:234.50ms +[2025-07-17 20:53:12] [Rank 0] step:4921/10000 train_time:1153968ms step_avg:234.50ms +[2025-07-17 20:53:16] [Rank 0] step:4941/10000 train_time:1158809ms step_avg:234.53ms +[2025-07-17 20:53:16] [Rank 0] step:4941/10000 train_time:1158809ms step_avg:234.53ms +[2025-07-17 20:53:21] [Rank 0] step:4961/10000 train_time:1163645ms step_avg:234.56ms +[2025-07-17 20:53:21] [Rank 0] step:4961/10000 train_time:1163645ms step_avg:234.56ms +[2025-07-17 20:53:26] [Rank 0] step:4981/10000 train_time:1168483ms step_avg:234.59ms +[2025-07-17 20:53:26] [Rank 0] step:4981/10000 train_time:1168483ms step_avg:234.59ms +[2025-07-17 20:53:35] [Rank 0] PRINT: step:5000/10000 val_loss:5.2364 train_time:1173567ms step_avg:234.71ms +[2025-07-17 20:53:35] [Rank 0] PRINT: step:5000/10000 val_loss:5.2364 train_time:1173567ms step_avg:234.71ms +[2025-07-17 20:53:35] [Rank 0] step:5001/10000 train_time:1173582ms step_avg:234.67ms +[2025-07-17 20:53:35] [Rank 0] step:5001/10000 train_time:1173582ms step_avg:234.67ms +[2025-07-17 20:53:40] [Rank 0] step:5021/10000 train_time:1178156ms step_avg:234.65ms +[2025-07-17 20:53:40] [Rank 0] step:5021/10000 train_time:1178156ms step_avg:234.65ms +[2025-07-17 20:53:45] [Rank 0] step:5041/10000 train_time:1183100ms step_avg:234.70ms +[2025-07-17 20:53:45] [Rank 0] step:5041/10000 train_time:1183100ms step_avg:234.70ms +[2025-07-17 20:53:50] [Rank 0] step:5061/10000 train_time:1187938ms step_avg:234.72ms +[2025-07-17 20:53:50] [Rank 0] step:5061/10000 train_time:1187938ms step_avg:234.72ms +[2025-07-17 20:53:55] [Rank 0] step:5081/10000 train_time:1192891ms step_avg:234.77ms +[2025-07-17 20:53:55] [Rank 0] step:5081/10000 train_time:1192891ms step_avg:234.77ms +[2025-07-17 20:54:00] [Rank 0] step:5101/10000 train_time:1197901ms step_avg:234.84ms +[2025-07-17 20:54:00] [Rank 0] step:5101/10000 train_time:1197901ms step_avg:234.84ms +[2025-07-17 20:54:05] [Rank 0] step:5121/10000 train_time:1202733ms step_avg:234.86ms +[2025-07-17 20:54:05] [Rank 0] step:5121/10000 train_time:1202733ms step_avg:234.86ms +[2025-07-17 20:54:10] [Rank 0] PRINT: step:5125/10000 val_loss:5.4893 train_time:1204189ms step_avg:234.96ms +[2025-07-17 20:54:10] [Rank 0] PRINT: step:5125/10000 val_loss:5.4893 train_time:1204189ms step_avg:234.96ms +[2025-07-17 20:54:14] [Rank 0] step:5141/10000 train_time:1207569ms step_avg:234.89ms +[2025-07-17 20:54:14] [Rank 0] step:5141/10000 train_time:1207569ms step_avg:234.89ms +[2025-07-17 20:54:19] [Rank 0] step:5161/10000 train_time:1212405ms step_avg:234.92ms +[2025-07-17 20:54:19] [Rank 0] step:5161/10000 train_time:1212405ms step_avg:234.92ms +[2025-07-17 20:54:24] [Rank 0] step:5181/10000 train_time:1217244ms step_avg:234.94ms +[2025-07-17 20:54:24] [Rank 0] step:5181/10000 train_time:1217244ms step_avg:234.94ms +[2025-07-17 20:54:29] [Rank 0] step:5201/10000 train_time:1222127ms step_avg:234.98ms +[2025-07-17 20:54:29] [Rank 0] step:5201/10000 train_time:1222127ms step_avg:234.98ms +[2025-07-17 20:54:34] [Rank 0] step:5221/10000 train_time:1227040ms step_avg:235.02ms +[2025-07-17 20:54:34] [Rank 0] step:5221/10000 train_time:1227040ms step_avg:235.02ms +[2025-07-17 20:54:38] [Rank 0] step:5241/10000 train_time:1231946ms step_avg:235.06ms +[2025-07-17 20:54:38] [Rank 0] step:5241/10000 train_time:1231946ms step_avg:235.06ms +[2025-07-17 20:54:45] [Rank 0] PRINT: step:5250/10000 val_loss:5.5415 train_time:1234647ms step_avg:235.17ms +[2025-07-17 20:54:45] [Rank 0] PRINT: step:5250/10000 val_loss:5.5415 train_time:1234647ms step_avg:235.17ms +[2025-07-17 20:54:48] [Rank 0] step:5261/10000 train_time:1236846ms step_avg:235.10ms +[2025-07-17 20:54:48] [Rank 0] step:5261/10000 train_time:1236846ms step_avg:235.10ms +[2025-07-17 20:54:53] [Rank 0] step:5281/10000 train_time:1241747ms step_avg:235.13ms +[2025-07-17 20:54:53] [Rank 0] step:5281/10000 train_time:1241747ms step_avg:235.13ms +[2025-07-17 20:54:57] [Rank 0] step:5301/10000 train_time:1246644ms step_avg:235.17ms +[2025-07-17 20:54:57] [Rank 0] step:5301/10000 train_time:1246644ms step_avg:235.17ms +[2025-07-17 20:55:02] [Rank 0] step:5321/10000 train_time:1251539ms step_avg:235.21ms +[2025-07-17 20:55:02] [Rank 0] step:5321/10000 train_time:1251539ms step_avg:235.21ms +[2025-07-17 20:55:07] [Rank 0] step:5341/10000 train_time:1256443ms step_avg:235.24ms +[2025-07-17 20:55:07] [Rank 0] step:5341/10000 train_time:1256443ms step_avg:235.24ms +[2025-07-17 20:55:12] [Rank 0] step:5361/10000 train_time:1261341ms step_avg:235.28ms +[2025-07-17 20:55:12] [Rank 0] step:5361/10000 train_time:1261341ms step_avg:235.28ms +[2025-07-17 20:55:20] [Rank 0] PRINT: step:5375/10000 val_loss:5.1057 train_time:1265266ms step_avg:235.40ms +[2025-07-17 20:55:20] [Rank 0] PRINT: step:5375/10000 val_loss:5.1057 train_time:1265266ms step_avg:235.40ms +[2025-07-17 20:55:22] [Rank 0] step:5381/10000 train_time:1266241ms step_avg:235.32ms +[2025-07-17 20:55:22] [Rank 0] step:5381/10000 train_time:1266241ms step_avg:235.32ms +[2025-07-17 20:55:27] [Rank 0] step:5401/10000 train_time:1271143ms step_avg:235.35ms +[2025-07-17 20:55:27] [Rank 0] step:5401/10000 train_time:1271143ms step_avg:235.35ms +[2025-07-17 20:55:32] [Rank 0] step:5421/10000 train_time:1276048ms step_avg:235.39ms +[2025-07-17 20:55:32] [Rank 0] step:5421/10000 train_time:1276048ms step_avg:235.39ms +[2025-07-17 20:55:36] [Rank 0] step:5441/10000 train_time:1280945ms step_avg:235.42ms +[2025-07-17 20:55:36] [Rank 0] step:5441/10000 train_time:1280945ms step_avg:235.42ms +[2025-07-17 20:55:41] [Rank 0] step:5461/10000 train_time:1285856ms step_avg:235.46ms +[2025-07-17 20:55:41] [Rank 0] step:5461/10000 train_time:1285856ms step_avg:235.46ms +[2025-07-17 20:55:46] [Rank 0] step:5481/10000 train_time:1290767ms step_avg:235.50ms +[2025-07-17 20:55:46] [Rank 0] step:5481/10000 train_time:1290767ms step_avg:235.50ms +[2025-07-17 20:55:56] [Rank 0] PRINT: step:5500/10000 val_loss:5.0338 train_time:1295920ms step_avg:235.62ms +[2025-07-17 20:55:56] [Rank 0] PRINT: step:5500/10000 val_loss:5.0338 train_time:1295920ms step_avg:235.62ms +[2025-07-17 20:55:56] [Rank 0] step:5501/10000 train_time:1295935ms step_avg:235.58ms +[2025-07-17 20:55:56] [Rank 0] step:5501/10000 train_time:1295935ms step_avg:235.58ms +[2025-07-17 20:56:01] [Rank 0] step:5521/10000 train_time:1300570ms step_avg:235.57ms +[2025-07-17 20:56:01] [Rank 0] step:5521/10000 train_time:1300570ms step_avg:235.57ms +[2025-07-17 20:56:06] [Rank 0] step:5541/10000 train_time:1305481ms step_avg:235.60ms +[2025-07-17 20:56:06] [Rank 0] step:5541/10000 train_time:1305481ms step_avg:235.60ms +[2025-07-17 20:56:11] [Rank 0] step:5561/10000 train_time:1310391ms step_avg:235.64ms +[2025-07-17 20:56:11] [Rank 0] step:5561/10000 train_time:1310391ms step_avg:235.64ms +[2025-07-17 20:56:15] [Rank 0] step:5581/10000 train_time:1315295ms step_avg:235.67ms +[2025-07-17 20:56:15] [Rank 0] step:5581/10000 train_time:1315295ms step_avg:235.67ms +[2025-07-17 20:56:21] [Rank 0] step:5601/10000 train_time:1320477ms step_avg:235.76ms +[2025-07-17 20:56:21] [Rank 0] step:5601/10000 train_time:1320477ms step_avg:235.76ms +[2025-07-17 20:56:26] [Rank 0] step:5621/10000 train_time:1325386ms step_avg:235.79ms +[2025-07-17 20:56:26] [Rank 0] step:5621/10000 train_time:1325386ms step_avg:235.79ms +[2025-07-17 20:56:31] [Rank 0] PRINT: step:5625/10000 val_loss:5.3961 train_time:1326862ms step_avg:235.89ms +[2025-07-17 20:56:31] [Rank 0] PRINT: step:5625/10000 val_loss:5.3961 train_time:1326862ms step_avg:235.89ms +[2025-07-17 20:56:35] [Rank 0] step:5641/10000 train_time:1330289ms step_avg:235.83ms +[2025-07-17 20:56:35] [Rank 0] step:5641/10000 train_time:1330289ms step_avg:235.83ms +[2025-07-17 20:56:40] [Rank 0] step:5661/10000 train_time:1335198ms step_avg:235.86ms +[2025-07-17 20:56:40] [Rank 0] step:5661/10000 train_time:1335198ms step_avg:235.86ms +[2025-07-17 20:56:45] [Rank 0] step:5681/10000 train_time:1340111ms step_avg:235.89ms +[2025-07-17 20:56:45] [Rank 0] step:5681/10000 train_time:1340111ms step_avg:235.89ms +[2025-07-17 20:56:50] [Rank 0] step:5701/10000 train_time:1345015ms step_avg:235.93ms +[2025-07-17 20:56:50] [Rank 0] step:5701/10000 train_time:1345015ms step_avg:235.93ms +[2025-07-17 20:56:55] [Rank 0] step:5721/10000 train_time:1349918ms step_avg:235.96ms +[2025-07-17 20:56:55] [Rank 0] step:5721/10000 train_time:1349918ms step_avg:235.96ms +[2025-07-17 20:57:00] [Rank 0] step:5741/10000 train_time:1354831ms step_avg:235.99ms +[2025-07-17 20:57:00] [Rank 0] step:5741/10000 train_time:1354831ms step_avg:235.99ms +[2025-07-17 20:57:07] [Rank 0] PRINT: step:5750/10000 val_loss:6.4522 train_time:1357535ms step_avg:236.09ms +[2025-07-17 20:57:07] [Rank 0] PRINT: step:5750/10000 val_loss:6.4522 train_time:1357535ms step_avg:236.09ms +[2025-07-17 20:57:09] [Rank 0] step:5761/10000 train_time:1359740ms step_avg:236.02ms +[2025-07-17 20:57:09] [Rank 0] step:5761/10000 train_time:1359740ms step_avg:236.02ms +[2025-07-17 20:57:14] [Rank 0] step:5781/10000 train_time:1364646ms step_avg:236.06ms +[2025-07-17 20:57:14] [Rank 0] step:5781/10000 train_time:1364646ms step_avg:236.06ms +[2025-07-17 20:57:19] [Rank 0] step:5801/10000 train_time:1369549ms step_avg:236.09ms +[2025-07-17 20:57:19] [Rank 0] step:5801/10000 train_time:1369549ms step_avg:236.09ms +[2025-07-17 20:57:24] [Rank 0] step:5821/10000 train_time:1374460ms step_avg:236.12ms +[2025-07-17 20:57:24] [Rank 0] step:5821/10000 train_time:1374460ms step_avg:236.12ms +[2025-07-17 20:57:29] [Rank 0] step:5841/10000 train_time:1379375ms step_avg:236.15ms +[2025-07-17 20:57:29] [Rank 0] step:5841/10000 train_time:1379375ms step_avg:236.15ms +[2025-07-17 20:57:34] [Rank 0] step:5861/10000 train_time:1384283ms step_avg:236.19ms +[2025-07-17 20:57:34] [Rank 0] step:5861/10000 train_time:1384283ms step_avg:236.19ms +[2025-07-17 20:57:42] [Rank 0] PRINT: step:5875/10000 val_loss:5.7700 train_time:1388207ms step_avg:236.29ms +[2025-07-17 20:57:42] [Rank 0] PRINT: step:5875/10000 val_loss:5.7700 train_time:1388207ms step_avg:236.29ms +[2025-07-17 20:57:43] [Rank 0] step:5881/10000 train_time:1389184ms step_avg:236.22ms +[2025-07-17 20:57:43] [Rank 0] step:5881/10000 train_time:1389184ms step_avg:236.22ms +[2025-07-17 20:57:48] [Rank 0] step:5901/10000 train_time:1394102ms step_avg:236.25ms +[2025-07-17 20:57:48] [Rank 0] step:5901/10000 train_time:1394102ms step_avg:236.25ms +[2025-07-17 20:57:53] [Rank 0] step:5921/10000 train_time:1399009ms step_avg:236.28ms +[2025-07-17 20:57:53] [Rank 0] step:5921/10000 train_time:1399009ms step_avg:236.28ms +[2025-07-17 20:57:58] [Rank 0] step:5941/10000 train_time:1403926ms step_avg:236.31ms +[2025-07-17 20:57:58] [Rank 0] step:5941/10000 train_time:1403926ms step_avg:236.31ms +[2025-07-17 20:58:03] [Rank 0] step:5961/10000 train_time:1408840ms step_avg:236.34ms +[2025-07-17 20:58:03] [Rank 0] step:5961/10000 train_time:1408840ms step_avg:236.34ms +[2025-07-17 20:58:08] [Rank 0] step:5981/10000 train_time:1413751ms step_avg:236.37ms +[2025-07-17 20:58:08] [Rank 0] step:5981/10000 train_time:1413751ms step_avg:236.37ms +[2025-07-17 20:58:17] [Rank 0] PRINT: step:6000/10000 val_loss:5.4819 train_time:1418915ms step_avg:236.49ms +[2025-07-17 20:58:17] [Rank 0] PRINT: step:6000/10000 val_loss:5.4819 train_time:1418915ms step_avg:236.49ms +[2025-07-17 20:58:18] [Rank 0] step:6001/10000 train_time:1418930ms step_avg:236.45ms +[2025-07-17 20:58:18] [Rank 0] step:6001/10000 train_time:1418930ms step_avg:236.45ms +[2025-07-17 20:58:23] [Rank 0] step:6021/10000 train_time:1423573ms step_avg:236.43ms +[2025-07-17 20:58:23] [Rank 0] step:6021/10000 train_time:1423573ms step_avg:236.43ms +[2025-07-17 20:58:27] [Rank 0] step:6041/10000 train_time:1428475ms step_avg:236.46ms +[2025-07-17 20:58:27] [Rank 0] step:6041/10000 train_time:1428475ms step_avg:236.46ms +[2025-07-17 20:58:32] [Rank 0] step:6061/10000 train_time:1433376ms step_avg:236.49ms +[2025-07-17 20:58:32] [Rank 0] step:6061/10000 train_time:1433376ms step_avg:236.49ms +[2025-07-17 20:58:37] [Rank 0] step:6081/10000 train_time:1438283ms step_avg:236.52ms +[2025-07-17 20:58:37] [Rank 0] step:6081/10000 train_time:1438283ms step_avg:236.52ms +[2025-07-17 20:58:42] [Rank 0] step:6101/10000 train_time:1443456ms step_avg:236.59ms +[2025-07-17 20:58:42] [Rank 0] step:6101/10000 train_time:1443456ms step_avg:236.59ms +[2025-07-17 20:58:47] [Rank 0] step:6121/10000 train_time:1448369ms step_avg:236.62ms +[2025-07-17 20:58:47] [Rank 0] step:6121/10000 train_time:1448369ms step_avg:236.62ms +[2025-07-17 20:58:53] [Rank 0] PRINT: step:6125/10000 val_loss:5.4999 train_time:1449847ms step_avg:236.71ms +[2025-07-17 20:58:53] [Rank 0] PRINT: step:6125/10000 val_loss:5.4999 train_time:1449847ms step_avg:236.71ms +[2025-07-17 20:58:57] [Rank 0] step:6141/10000 train_time:1453277ms step_avg:236.65ms +[2025-07-17 20:58:57] [Rank 0] step:6141/10000 train_time:1453277ms step_avg:236.65ms +[2025-07-17 20:59:02] [Rank 0] step:6161/10000 train_time:1458189ms step_avg:236.68ms +[2025-07-17 20:59:02] [Rank 0] step:6161/10000 train_time:1458189ms step_avg:236.68ms +[2025-07-17 20:59:07] [Rank 0] step:6181/10000 train_time:1463108ms step_avg:236.71ms +[2025-07-17 20:59:07] [Rank 0] step:6181/10000 train_time:1463108ms step_avg:236.71ms +[2025-07-17 20:59:12] [Rank 0] step:6201/10000 train_time:1468034ms step_avg:236.74ms +[2025-07-17 20:59:12] [Rank 0] step:6201/10000 train_time:1468034ms step_avg:236.74ms +[2025-07-17 20:59:17] [Rank 0] step:6221/10000 train_time:1472951ms step_avg:236.77ms +[2025-07-17 20:59:17] [Rank 0] step:6221/10000 train_time:1472951ms step_avg:236.77ms +[2025-07-17 20:59:21] [Rank 0] step:6241/10000 train_time:1477871ms step_avg:236.80ms +[2025-07-17 20:59:21] [Rank 0] step:6241/10000 train_time:1477871ms step_avg:236.80ms +[2025-07-17 20:59:28] [Rank 0] PRINT: step:6250/10000 val_loss:5.5464 train_time:1480582ms step_avg:236.89ms +[2025-07-17 20:59:28] [Rank 0] PRINT: step:6250/10000 val_loss:5.5464 train_time:1480582ms step_avg:236.89ms +[2025-07-17 20:59:31] [Rank 0] step:6261/10000 train_time:1482786ms step_avg:236.83ms +[2025-07-17 20:59:31] [Rank 0] step:6261/10000 train_time:1482786ms step_avg:236.83ms +[2025-07-17 20:59:36] [Rank 0] step:6281/10000 train_time:1487714ms step_avg:236.86ms +[2025-07-17 20:59:36] [Rank 0] step:6281/10000 train_time:1487714ms step_avg:236.86ms +[2025-07-17 20:59:41] [Rank 0] step:6301/10000 train_time:1492633ms step_avg:236.89ms +[2025-07-17 20:59:41] [Rank 0] step:6301/10000 train_time:1492633ms step_avg:236.89ms +[2025-07-17 20:59:46] [Rank 0] step:6321/10000 train_time:1497558ms step_avg:236.92ms +[2025-07-17 20:59:46] [Rank 0] step:6321/10000 train_time:1497558ms step_avg:236.92ms +[2025-07-17 20:59:51] [Rank 0] step:6341/10000 train_time:1502490ms step_avg:236.95ms +[2025-07-17 20:59:51] [Rank 0] step:6341/10000 train_time:1502490ms step_avg:236.95ms +[2025-07-17 20:59:56] [Rank 0] step:6361/10000 train_time:1507409ms step_avg:236.98ms +[2025-07-17 20:59:56] [Rank 0] step:6361/10000 train_time:1507409ms step_avg:236.98ms +[2025-07-17 21:00:04] [Rank 0] PRINT: step:6375/10000 val_loss:5.8359 train_time:1511349ms step_avg:237.07ms +[2025-07-17 21:00:04] [Rank 0] PRINT: step:6375/10000 val_loss:5.8359 train_time:1511349ms step_avg:237.07ms +[2025-07-17 21:00:05] [Rank 0] step:6381/10000 train_time:1512330ms step_avg:237.01ms +[2025-07-17 21:00:05] [Rank 0] step:6381/10000 train_time:1512330ms step_avg:237.01ms +[2025-07-17 21:00:10] [Rank 0] step:6401/10000 train_time:1517245ms step_avg:237.03ms +[2025-07-17 21:00:10] [Rank 0] step:6401/10000 train_time:1517245ms step_avg:237.03ms +[2025-07-17 21:00:15] [Rank 0] step:6421/10000 train_time:1522165ms step_avg:237.06ms +[2025-07-17 21:00:15] [Rank 0] step:6421/10000 train_time:1522165ms step_avg:237.06ms +[2025-07-17 21:00:20] [Rank 0] step:6441/10000 train_time:1527088ms step_avg:237.09ms +[2025-07-17 21:00:20] [Rank 0] step:6441/10000 train_time:1527088ms step_avg:237.09ms +[2025-07-17 21:00:25] [Rank 0] step:6461/10000 train_time:1532021ms step_avg:237.12ms +[2025-07-17 21:00:25] [Rank 0] step:6461/10000 train_time:1532021ms step_avg:237.12ms +[2025-07-17 21:00:30] [Rank 0] step:6481/10000 train_time:1536943ms step_avg:237.15ms +[2025-07-17 21:00:30] [Rank 0] step:6481/10000 train_time:1536943ms step_avg:237.15ms +[2025-07-17 21:00:39] [Rank 0] PRINT: step:6500/10000 val_loss:5.5929 train_time:1542113ms step_avg:237.25ms +[2025-07-17 21:00:39] [Rank 0] PRINT: step:6500/10000 val_loss:5.5929 train_time:1542113ms step_avg:237.25ms +[2025-07-17 21:00:40] [Rank 0] step:6501/10000 train_time:1542127ms step_avg:237.21ms +[2025-07-17 21:00:40] [Rank 0] step:6501/10000 train_time:1542127ms step_avg:237.21ms +[2025-07-17 21:00:44] [Rank 0] step:6521/10000 train_time:1546780ms step_avg:237.20ms +[2025-07-17 21:00:44] [Rank 0] step:6521/10000 train_time:1546780ms step_avg:237.20ms +[2025-07-17 21:00:49] [Rank 0] step:6541/10000 train_time:1551705ms step_avg:237.23ms +[2025-07-17 21:00:49] [Rank 0] step:6541/10000 train_time:1551705ms step_avg:237.23ms +[2025-07-17 21:00:54] [Rank 0] step:6561/10000 train_time:1556637ms step_avg:237.26ms +[2025-07-17 21:00:54] [Rank 0] step:6561/10000 train_time:1556637ms step_avg:237.26ms +[2025-07-17 21:00:59] [Rank 0] step:6581/10000 train_time:1561564ms step_avg:237.28ms +[2025-07-17 21:00:59] [Rank 0] step:6581/10000 train_time:1561564ms step_avg:237.28ms +[2025-07-17 21:01:04] [Rank 0] step:6601/10000 train_time:1566496ms step_avg:237.31ms +[2025-07-17 21:01:04] [Rank 0] step:6601/10000 train_time:1566496ms step_avg:237.31ms +[2025-07-17 21:01:09] [Rank 0] step:6621/10000 train_time:1571681ms step_avg:237.38ms +[2025-07-17 21:01:09] [Rank 0] step:6621/10000 train_time:1571681ms step_avg:237.38ms +[2025-07-17 21:01:15] [Rank 0] PRINT: step:6625/10000 val_loss:5.5743 train_time:1573161ms step_avg:237.46ms +[2025-07-17 21:01:15] [Rank 0] PRINT: step:6625/10000 val_loss:5.5743 train_time:1573161ms step_avg:237.46ms +[2025-07-17 21:01:19] [Rank 0] step:6641/10000 train_time:1576591ms step_avg:237.40ms +[2025-07-17 21:01:19] [Rank 0] step:6641/10000 train_time:1576591ms step_avg:237.40ms +[2025-07-17 21:01:24] [Rank 0] step:6661/10000 train_time:1581507ms step_avg:237.43ms +[2025-07-17 21:01:24] [Rank 0] step:6661/10000 train_time:1581507ms step_avg:237.43ms +[2025-07-17 21:01:29] [Rank 0] step:6681/10000 train_time:1586470ms step_avg:237.46ms +[2025-07-17 21:01:29] [Rank 0] step:6681/10000 train_time:1586470ms step_avg:237.46ms +[2025-07-17 21:01:34] [Rank 0] step:6701/10000 train_time:1591451ms step_avg:237.49ms +[2025-07-17 21:01:34] [Rank 0] step:6701/10000 train_time:1591451ms step_avg:237.49ms +[2025-07-17 21:01:39] [Rank 0] step:6721/10000 train_time:1596450ms step_avg:237.53ms +[2025-07-17 21:01:39] [Rank 0] step:6721/10000 train_time:1596450ms step_avg:237.53ms +[2025-07-17 21:01:44] [Rank 0] step:6741/10000 train_time:1601449ms step_avg:237.57ms +[2025-07-17 21:01:44] [Rank 0] step:6741/10000 train_time:1601449ms step_avg:237.57ms +[2025-07-17 21:01:51] [Rank 0] PRINT: step:6750/10000 val_loss:5.5225 train_time:1604189ms step_avg:237.66ms +[2025-07-17 21:01:51] [Rank 0] PRINT: step:6750/10000 val_loss:5.5225 train_time:1604189ms step_avg:237.66ms +[2025-07-17 21:01:54] [Rank 0] step:6761/10000 train_time:1606427ms step_avg:237.60ms +[2025-07-17 21:01:54] [Rank 0] step:6761/10000 train_time:1606427ms step_avg:237.60ms +[2025-07-17 21:01:59] [Rank 0] step:6781/10000 train_time:1611410ms step_avg:237.64ms +[2025-07-17 21:01:59] [Rank 0] step:6781/10000 train_time:1611410ms step_avg:237.64ms +[2025-07-17 21:02:04] [Rank 0] step:6801/10000 train_time:1616399ms step_avg:237.67ms +[2025-07-17 21:02:04] [Rank 0] step:6801/10000 train_time:1616399ms step_avg:237.67ms +[2025-07-17 21:02:09] [Rank 0] step:6821/10000 train_time:1621381ms step_avg:237.70ms +[2025-07-17 21:02:09] [Rank 0] step:6821/10000 train_time:1621381ms step_avg:237.70ms +[2025-07-17 21:02:13] [Rank 0] step:6841/10000 train_time:1626363ms step_avg:237.74ms +[2025-07-17 21:02:13] [Rank 0] step:6841/10000 train_time:1626363ms step_avg:237.74ms +[2025-07-17 21:02:18] [Rank 0] step:6861/10000 train_time:1631340ms step_avg:237.77ms +[2025-07-17 21:02:18] [Rank 0] step:6861/10000 train_time:1631340ms step_avg:237.77ms +[2025-07-17 21:02:27] [Rank 0] PRINT: step:6875/10000 val_loss:5.5259 train_time:1635322ms step_avg:237.87ms +[2025-07-17 21:02:27] [Rank 0] PRINT: step:6875/10000 val_loss:5.5259 train_time:1635322ms step_avg:237.87ms +[2025-07-17 21:02:28] [Rank 0] step:6881/10000 train_time:1636313ms step_avg:237.80ms +[2025-07-17 21:02:28] [Rank 0] step:6881/10000 train_time:1636313ms step_avg:237.80ms +[2025-07-17 21:02:33] [Rank 0] step:6901/10000 train_time:1641291ms step_avg:237.83ms +[2025-07-17 21:02:33] [Rank 0] step:6901/10000 train_time:1641291ms step_avg:237.83ms +[2025-07-17 21:02:38] [Rank 0] step:6921/10000 train_time:1646271ms step_avg:237.87ms +[2025-07-17 21:02:38] [Rank 0] step:6921/10000 train_time:1646271ms step_avg:237.87ms +[2025-07-17 21:02:43] [Rank 0] step:6941/10000 train_time:1651262ms step_avg:237.90ms +[2025-07-17 21:02:43] [Rank 0] step:6941/10000 train_time:1651262ms step_avg:237.90ms +[2025-07-17 21:02:48] [Rank 0] step:6961/10000 train_time:1656248ms step_avg:237.93ms +[2025-07-17 21:02:48] [Rank 0] step:6961/10000 train_time:1656248ms step_avg:237.93ms +[2025-07-17 21:02:53] [Rank 0] step:6981/10000 train_time:1661239ms step_avg:237.97ms +[2025-07-17 21:02:53] [Rank 0] step:6981/10000 train_time:1661239ms step_avg:237.97ms +[2025-07-17 21:03:03] [Rank 0] PRINT: step:7000/10000 val_loss:5.4346 train_time:1666476ms step_avg:238.07ms +[2025-07-17 21:03:03] [Rank 0] PRINT: step:7000/10000 val_loss:5.4346 train_time:1666476ms step_avg:238.07ms +[2025-07-17 21:03:03] [Rank 0] step:7001/10000 train_time:1666490ms step_avg:238.04ms +[2025-07-17 21:03:03] [Rank 0] step:7001/10000 train_time:1666490ms step_avg:238.04ms +[2025-07-17 21:03:08] [Rank 0] step:7021/10000 train_time:1671214ms step_avg:238.03ms +[2025-07-17 21:03:08] [Rank 0] step:7021/10000 train_time:1671214ms step_avg:238.03ms +[2025-07-17 21:03:13] [Rank 0] step:7041/10000 train_time:1676201ms step_avg:238.06ms +[2025-07-17 21:03:13] [Rank 0] step:7041/10000 train_time:1676201ms step_avg:238.06ms +[2025-07-17 21:03:18] [Rank 0] step:7061/10000 train_time:1681188ms step_avg:238.09ms +[2025-07-17 21:03:18] [Rank 0] step:7061/10000 train_time:1681188ms step_avg:238.09ms +[2025-07-17 21:03:23] [Rank 0] step:7081/10000 train_time:1686181ms step_avg:238.13ms +[2025-07-17 21:03:23] [Rank 0] step:7081/10000 train_time:1686181ms step_avg:238.13ms +[2025-07-17 21:03:28] [Rank 0] step:7101/10000 train_time:1691163ms step_avg:238.16ms +[2025-07-17 21:03:28] [Rank 0] step:7101/10000 train_time:1691163ms step_avg:238.16ms +[2025-07-17 21:03:33] [Rank 0] step:7121/10000 train_time:1696392ms step_avg:238.22ms +[2025-07-17 21:03:33] [Rank 0] step:7121/10000 train_time:1696392ms step_avg:238.22ms +[2025-07-17 21:03:39] [Rank 0] PRINT: step:7125/10000 val_loss:5.7076 train_time:1697889ms step_avg:238.30ms +[2025-07-17 21:03:39] [Rank 0] PRINT: step:7125/10000 val_loss:5.7076 train_time:1697889ms step_avg:238.30ms +[2025-07-17 21:03:43] [Rank 0] step:7141/10000 train_time:1701378ms step_avg:238.25ms +[2025-07-17 21:03:43] [Rank 0] step:7141/10000 train_time:1701378ms step_avg:238.25ms +[2025-07-17 21:03:48] [Rank 0] step:7161/10000 train_time:1706370ms step_avg:238.29ms +[2025-07-17 21:03:48] [Rank 0] step:7161/10000 train_time:1706370ms step_avg:238.29ms +[2025-07-17 21:03:53] [Rank 0] step:7181/10000 train_time:1711357ms step_avg:238.32ms +[2025-07-17 21:03:53] [Rank 0] step:7181/10000 train_time:1711357ms step_avg:238.32ms +[2025-07-17 21:03:58] [Rank 0] step:7201/10000 train_time:1716357ms step_avg:238.35ms +[2025-07-17 21:03:58] [Rank 0] step:7201/10000 train_time:1716357ms step_avg:238.35ms +[2025-07-17 21:04:03] [Rank 0] step:7221/10000 train_time:1721344ms step_avg:238.38ms +[2025-07-17 21:04:03] [Rank 0] step:7221/10000 train_time:1721344ms step_avg:238.38ms +[2025-07-17 21:04:08] [Rank 0] step:7241/10000 train_time:1726325ms step_avg:238.41ms +[2025-07-17 21:04:08] [Rank 0] step:7241/10000 train_time:1726325ms step_avg:238.41ms +[2025-07-17 21:04:15] [Rank 0] PRINT: step:7250/10000 val_loss:5.6275 train_time:1729075ms step_avg:238.49ms +[2025-07-17 21:04:15] [Rank 0] PRINT: step:7250/10000 val_loss:5.6275 train_time:1729075ms step_avg:238.49ms +[2025-07-17 21:04:17] [Rank 0] step:7261/10000 train_time:1731304ms step_avg:238.44ms +[2025-07-17 21:04:17] [Rank 0] step:7261/10000 train_time:1731304ms step_avg:238.44ms +[2025-07-17 21:04:22] [Rank 0] step:7281/10000 train_time:1736283ms step_avg:238.47ms +[2025-07-17 21:04:22] [Rank 0] step:7281/10000 train_time:1736283ms step_avg:238.47ms +[2025-07-17 21:04:27] [Rank 0] step:7301/10000 train_time:1741266ms step_avg:238.50ms +[2025-07-17 21:04:27] [Rank 0] step:7301/10000 train_time:1741266ms step_avg:238.50ms +[2025-07-17 21:04:32] [Rank 0] step:7321/10000 train_time:1746260ms step_avg:238.53ms +[2025-07-17 21:04:32] [Rank 0] step:7321/10000 train_time:1746260ms step_avg:238.53ms +[2025-07-17 21:04:37] [Rank 0] step:7341/10000 train_time:1751243ms step_avg:238.56ms +[2025-07-17 21:04:37] [Rank 0] step:7341/10000 train_time:1751243ms step_avg:238.56ms +[2025-07-17 21:04:42] [Rank 0] step:7361/10000 train_time:1756234ms step_avg:238.59ms +[2025-07-17 21:04:42] [Rank 0] step:7361/10000 train_time:1756234ms step_avg:238.59ms +[2025-07-17 21:04:51] [Rank 0] PRINT: step:7375/10000 val_loss:5.5797 train_time:1760229ms step_avg:238.68ms +[2025-07-17 21:04:51] [Rank 0] PRINT: step:7375/10000 val_loss:5.5797 train_time:1760229ms step_avg:238.68ms +[2025-07-17 21:04:52] [Rank 0] step:7381/10000 train_time:1761221ms step_avg:238.62ms +[2025-07-17 21:04:52] [Rank 0] step:7381/10000 train_time:1761221ms step_avg:238.62ms +[2025-07-17 21:04:57] [Rank 0] step:7401/10000 train_time:1766208ms step_avg:238.64ms +[2025-07-17 21:04:57] [Rank 0] step:7401/10000 train_time:1766208ms step_avg:238.64ms +[2025-07-17 21:05:02] [Rank 0] step:7421/10000 train_time:1771191ms step_avg:238.67ms +[2025-07-17 21:05:02] [Rank 0] step:7421/10000 train_time:1771191ms step_avg:238.67ms +[2025-07-17 21:05:07] [Rank 0] step:7441/10000 train_time:1776190ms step_avg:238.70ms +[2025-07-17 21:05:07] [Rank 0] step:7441/10000 train_time:1776190ms step_avg:238.70ms +[2025-07-17 21:05:12] [Rank 0] step:7461/10000 train_time:1781172ms step_avg:238.73ms +[2025-07-17 21:05:12] [Rank 0] step:7461/10000 train_time:1781172ms step_avg:238.73ms +[2025-07-17 21:05:17] [Rank 0] step:7481/10000 train_time:1786168ms step_avg:238.76ms +[2025-07-17 21:05:17] [Rank 0] step:7481/10000 train_time:1786168ms step_avg:238.76ms +[2025-07-17 21:05:27] [Rank 0] PRINT: step:7500/10000 val_loss:5.5127 train_time:1791424ms step_avg:238.86ms +[2025-07-17 21:05:27] [Rank 0] PRINT: step:7500/10000 val_loss:5.5127 train_time:1791424ms step_avg:238.86ms +[2025-07-17 21:05:27] [Rank 0] step:7501/10000 train_time:1791439ms step_avg:238.83ms +[2025-07-17 21:05:27] [Rank 0] step:7501/10000 train_time:1791439ms step_avg:238.83ms +[2025-07-17 21:05:32] [Rank 0] step:7521/10000 train_time:1796167ms step_avg:238.82ms +[2025-07-17 21:05:32] [Rank 0] step:7521/10000 train_time:1796167ms step_avg:238.82ms +[2025-07-17 21:05:37] [Rank 0] step:7541/10000 train_time:1801158ms step_avg:238.85ms +[2025-07-17 21:05:37] [Rank 0] step:7541/10000 train_time:1801158ms step_avg:238.85ms +[2025-07-17 21:05:42] [Rank 0] step:7561/10000 train_time:1806146ms step_avg:238.88ms +[2025-07-17 21:05:42] [Rank 0] step:7561/10000 train_time:1806146ms step_avg:238.88ms +[2025-07-17 21:05:47] [Rank 0] step:7581/10000 train_time:1811148ms step_avg:238.91ms +[2025-07-17 21:05:47] [Rank 0] step:7581/10000 train_time:1811148ms step_avg:238.91ms +[2025-07-17 21:05:52] [Rank 0] step:7601/10000 train_time:1816149ms step_avg:238.94ms +[2025-07-17 21:05:52] [Rank 0] step:7601/10000 train_time:1816149ms step_avg:238.94ms +[2025-07-17 21:05:57] [Rank 0] step:7621/10000 train_time:1821169ms step_avg:238.97ms +[2025-07-17 21:05:57] [Rank 0] step:7621/10000 train_time:1821169ms step_avg:238.97ms +[2025-07-17 21:06:03] [Rank 0] PRINT: step:7625/10000 val_loss:5.4255 train_time:1822914ms step_avg:239.07ms +[2025-07-17 21:06:03] [Rank 0] PRINT: step:7625/10000 val_loss:5.4255 train_time:1822914ms step_avg:239.07ms +[2025-07-17 21:06:07] [Rank 0] step:7641/10000 train_time:1826409ms step_avg:239.03ms +[2025-07-17 21:06:07] [Rank 0] step:7641/10000 train_time:1826409ms step_avg:239.03ms +[2025-07-17 21:06:12] [Rank 0] step:7661/10000 train_time:1831419ms step_avg:239.06ms +[2025-07-17 21:06:12] [Rank 0] step:7661/10000 train_time:1831419ms step_avg:239.06ms +[2025-07-17 21:06:17] [Rank 0] step:7681/10000 train_time:1836440ms step_avg:239.09ms +[2025-07-17 21:06:17] [Rank 0] step:7681/10000 train_time:1836440ms step_avg:239.09ms +[2025-07-17 21:06:22] [Rank 0] step:7701/10000 train_time:1841441ms step_avg:239.12ms +[2025-07-17 21:06:22] [Rank 0] step:7701/10000 train_time:1841441ms step_avg:239.12ms +[2025-07-17 21:06:27] [Rank 0] step:7721/10000 train_time:1846449ms step_avg:239.15ms +[2025-07-17 21:06:27] [Rank 0] step:7721/10000 train_time:1846449ms step_avg:239.15ms +[2025-07-17 21:06:32] [Rank 0] step:7741/10000 train_time:1851447ms step_avg:239.17ms +[2025-07-17 21:06:32] [Rank 0] step:7741/10000 train_time:1851447ms step_avg:239.17ms +[2025-07-17 21:06:39] [Rank 0] PRINT: step:7750/10000 val_loss:5.4960 train_time:1854216ms step_avg:239.25ms +[2025-07-17 21:06:39] [Rank 0] PRINT: step:7750/10000 val_loss:5.4960 train_time:1854216ms step_avg:239.25ms +[2025-07-17 21:06:42] [Rank 0] step:7761/10000 train_time:1856460ms step_avg:239.20ms +[2025-07-17 21:06:42] [Rank 0] step:7761/10000 train_time:1856460ms step_avg:239.20ms +[2025-07-17 21:06:47] [Rank 0] step:7781/10000 train_time:1861461ms step_avg:239.23ms +[2025-07-17 21:06:47] [Rank 0] step:7781/10000 train_time:1861461ms step_avg:239.23ms +[2025-07-17 21:06:52] [Rank 0] step:7801/10000 train_time:1866462ms step_avg:239.26ms +[2025-07-17 21:06:52] [Rank 0] step:7801/10000 train_time:1866462ms step_avg:239.26ms +[2025-07-17 21:06:57] [Rank 0] step:7821/10000 train_time:1871465ms step_avg:239.29ms +[2025-07-17 21:06:57] [Rank 0] step:7821/10000 train_time:1871465ms step_avg:239.29ms +[2025-07-17 21:07:02] [Rank 0] step:7841/10000 train_time:1876470ms step_avg:239.32ms +[2025-07-17 21:07:02] [Rank 0] step:7841/10000 train_time:1876470ms step_avg:239.32ms +[2025-07-17 21:07:07] [Rank 0] step:7861/10000 train_time:1881457ms step_avg:239.34ms +[2025-07-17 21:07:07] [Rank 0] step:7861/10000 train_time:1881457ms step_avg:239.34ms +[2025-07-17 21:07:15] [Rank 0] PRINT: step:7875/10000 val_loss:5.5826 train_time:1885451ms step_avg:239.42ms +[2025-07-17 21:07:15] [Rank 0] PRINT: step:7875/10000 val_loss:5.5826 train_time:1885451ms step_avg:239.42ms +[2025-07-17 21:07:16] [Rank 0] step:7881/10000 train_time:1886442ms step_avg:239.37ms +[2025-07-17 21:07:16] [Rank 0] step:7881/10000 train_time:1886442ms step_avg:239.37ms +[2025-07-17 21:07:21] [Rank 0] step:7901/10000 train_time:1891435ms step_avg:239.39ms +[2025-07-17 21:07:21] [Rank 0] step:7901/10000 train_time:1891435ms step_avg:239.39ms +[2025-07-17 21:07:26] [Rank 0] step:7921/10000 train_time:1896434ms step_avg:239.42ms +[2025-07-17 21:07:26] [Rank 0] step:7921/10000 train_time:1896434ms step_avg:239.42ms +[2025-07-17 21:07:31] [Rank 0] step:7941/10000 train_time:1901438ms step_avg:239.45ms +[2025-07-17 21:07:31] [Rank 0] step:7941/10000 train_time:1901438ms step_avg:239.45ms +[2025-07-17 21:07:36] [Rank 0] step:7961/10000 train_time:1906453ms step_avg:239.47ms +[2025-07-17 21:07:36] [Rank 0] step:7961/10000 train_time:1906453ms step_avg:239.47ms +[2025-07-17 21:07:41] [Rank 0] step:7981/10000 train_time:1911447ms step_avg:239.50ms +[2025-07-17 21:07:41] [Rank 0] step:7981/10000 train_time:1911447ms step_avg:239.50ms +[2025-07-17 21:07:51] [Rank 0] PRINT: step:8000/10000 val_loss:5.4331 train_time:1916710ms step_avg:239.59ms +[2025-07-17 21:07:51] [Rank 0] PRINT: step:8000/10000 val_loss:5.4331 train_time:1916710ms step_avg:239.59ms +[2025-07-17 21:07:51] [Rank 0] step:8001/10000 train_time:1916724ms step_avg:239.56ms +[2025-07-17 21:07:51] [Rank 0] step:8001/10000 train_time:1916724ms step_avg:239.56ms +[2025-07-17 21:07:56] [Rank 0] step:8021/10000 train_time:1921456ms step_avg:239.55ms +[2025-07-17 21:07:56] [Rank 0] step:8021/10000 train_time:1921456ms step_avg:239.55ms +[2025-07-17 21:08:01] [Rank 0] step:8041/10000 train_time:1926481ms step_avg:239.58ms +[2025-07-17 21:08:01] [Rank 0] step:8041/10000 train_time:1926481ms step_avg:239.58ms +[2025-07-17 21:08:06] [Rank 0] step:8061/10000 train_time:1931479ms step_avg:239.61ms +[2025-07-17 21:08:06] [Rank 0] step:8061/10000 train_time:1931479ms step_avg:239.61ms +[2025-07-17 21:08:11] [Rank 0] step:8081/10000 train_time:1936485ms step_avg:239.63ms +[2025-07-17 21:08:11] [Rank 0] step:8081/10000 train_time:1936485ms step_avg:239.63ms +[2025-07-17 21:08:16] [Rank 0] step:8101/10000 train_time:1941482ms step_avg:239.66ms +[2025-07-17 21:08:16] [Rank 0] step:8101/10000 train_time:1941482ms step_avg:239.66ms +[2025-07-17 21:08:21] [Rank 0] step:8121/10000 train_time:1946483ms step_avg:239.69ms +[2025-07-17 21:08:21] [Rank 0] step:8121/10000 train_time:1946483ms step_avg:239.69ms +[2025-07-17 21:08:27] [Rank 0] PRINT: step:8125/10000 val_loss:5.4701 train_time:1947988ms step_avg:239.75ms +[2025-07-17 21:08:27] [Rank 0] PRINT: step:8125/10000 val_loss:5.4701 train_time:1947988ms step_avg:239.75ms +[2025-07-17 21:08:31] [Rank 0] step:8141/10000 train_time:1951735ms step_avg:239.74ms +[2025-07-17 21:08:31] [Rank 0] step:8141/10000 train_time:1951735ms step_avg:239.74ms +[2025-07-17 21:08:36] [Rank 0] step:8161/10000 train_time:1956777ms step_avg:239.77ms +[2025-07-17 21:08:36] [Rank 0] step:8161/10000 train_time:1956777ms step_avg:239.77ms +[2025-07-17 21:08:41] [Rank 0] step:8181/10000 train_time:1961849ms step_avg:239.81ms +[2025-07-17 21:08:41] [Rank 0] step:8181/10000 train_time:1961849ms step_avg:239.81ms +[2025-07-17 21:08:46] [Rank 0] step:8201/10000 train_time:1966905ms step_avg:239.84ms +[2025-07-17 21:08:46] [Rank 0] step:8201/10000 train_time:1966905ms step_avg:239.84ms +[2025-07-17 21:08:51] [Rank 0] step:8221/10000 train_time:1971974ms step_avg:239.87ms +[2025-07-17 21:08:51] [Rank 0] step:8221/10000 train_time:1971974ms step_avg:239.87ms +[2025-07-17 21:08:57] [Rank 0] step:8241/10000 train_time:1977045ms step_avg:239.90ms +[2025-07-17 21:08:57] [Rank 0] step:8241/10000 train_time:1977045ms step_avg:239.90ms +[2025-07-17 21:09:04] [Rank 0] PRINT: step:8250/10000 val_loss:5.3616 train_time:1979842ms step_avg:239.98ms +[2025-07-17 21:09:04] [Rank 0] PRINT: step:8250/10000 val_loss:5.3616 train_time:1979842ms step_avg:239.98ms +[2025-07-17 21:09:06] [Rank 0] step:8261/10000 train_time:1982116ms step_avg:239.94ms +[2025-07-17 21:09:06] [Rank 0] step:8261/10000 train_time:1982116ms step_avg:239.94ms +[2025-07-17 21:09:11] [Rank 0] step:8281/10000 train_time:1987203ms step_avg:239.97ms +[2025-07-17 21:09:11] [Rank 0] step:8281/10000 train_time:1987203ms step_avg:239.97ms +[2025-07-17 21:09:17] [Rank 0] step:8301/10000 train_time:1992264ms step_avg:240.00ms +[2025-07-17 21:09:17] [Rank 0] step:8301/10000 train_time:1992264ms step_avg:240.00ms +[2025-07-17 21:09:22] [Rank 0] step:8321/10000 train_time:1997337ms step_avg:240.04ms +[2025-07-17 21:09:22] [Rank 0] step:8321/10000 train_time:1997337ms step_avg:240.04ms +[2025-07-17 21:09:27] [Rank 0] step:8341/10000 train_time:2002414ms step_avg:240.07ms +[2025-07-17 21:09:27] [Rank 0] step:8341/10000 train_time:2002414ms step_avg:240.07ms +[2025-07-17 21:09:32] [Rank 0] step:8361/10000 train_time:2007476ms step_avg:240.10ms +[2025-07-17 21:09:32] [Rank 0] step:8361/10000 train_time:2007476ms step_avg:240.10ms +[2025-07-17 21:09:40] [Rank 0] PRINT: step:8375/10000 val_loss:5.7939 train_time:2011534ms step_avg:240.18ms +[2025-07-17 21:09:40] [Rank 0] PRINT: step:8375/10000 val_loss:5.7939 train_time:2011534ms step_avg:240.18ms +[2025-07-17 21:09:42] [Rank 0] step:8381/10000 train_time:2012536ms step_avg:240.13ms +[2025-07-17 21:09:42] [Rank 0] step:8381/10000 train_time:2012536ms step_avg:240.13ms +[2025-07-17 21:09:47] [Rank 0] step:8401/10000 train_time:2017583ms step_avg:240.16ms +[2025-07-17 21:09:47] [Rank 0] step:8401/10000 train_time:2017583ms step_avg:240.16ms +[2025-07-17 21:09:52] [Rank 0] step:8421/10000 train_time:2022654ms step_avg:240.19ms +[2025-07-17 21:09:52] [Rank 0] step:8421/10000 train_time:2022654ms step_avg:240.19ms +[2025-07-17 21:09:57] [Rank 0] step:8441/10000 train_time:2027720ms step_avg:240.22ms +[2025-07-17 21:09:57] [Rank 0] step:8441/10000 train_time:2027720ms step_avg:240.22ms +[2025-07-17 21:10:02] [Rank 0] step:8461/10000 train_time:2032800ms step_avg:240.26ms +[2025-07-17 21:10:02] [Rank 0] step:8461/10000 train_time:2032800ms step_avg:240.26ms +[2025-07-17 21:10:07] [Rank 0] step:8481/10000 train_time:2037859ms step_avg:240.29ms +[2025-07-17 21:10:07] [Rank 0] step:8481/10000 train_time:2037859ms step_avg:240.29ms +[2025-07-17 21:10:17] [Rank 0] PRINT: step:8500/10000 val_loss:5.5817 train_time:2043191ms step_avg:240.38ms +[2025-07-17 21:10:17] [Rank 0] PRINT: step:8500/10000 val_loss:5.5817 train_time:2043191ms step_avg:240.38ms +[2025-07-17 21:10:17] [Rank 0] step:8501/10000 train_time:2043205ms step_avg:240.35ms +[2025-07-17 21:10:17] [Rank 0] step:8501/10000 train_time:2043205ms step_avg:240.35ms +[2025-07-17 21:10:22] [Rank 0] step:8521/10000 train_time:2047999ms step_avg:240.35ms +[2025-07-17 21:10:22] [Rank 0] step:8521/10000 train_time:2047999ms step_avg:240.35ms +[2025-07-17 21:10:27] [Rank 0] step:8541/10000 train_time:2053082ms step_avg:240.38ms +[2025-07-17 21:10:27] [Rank 0] step:8541/10000 train_time:2053082ms step_avg:240.38ms +[2025-07-17 21:10:32] [Rank 0] step:8561/10000 train_time:2058142ms step_avg:240.41ms +[2025-07-17 21:10:32] [Rank 0] step:8561/10000 train_time:2058142ms step_avg:240.41ms +[2025-07-17 21:10:37] [Rank 0] step:8581/10000 train_time:2063213ms step_avg:240.44ms +[2025-07-17 21:10:37] [Rank 0] step:8581/10000 train_time:2063213ms step_avg:240.44ms +[2025-07-17 21:10:42] [Rank 0] step:8601/10000 train_time:2068266ms step_avg:240.47ms +[2025-07-17 21:10:42] [Rank 0] step:8601/10000 train_time:2068266ms step_avg:240.47ms +[2025-07-17 21:10:47] [Rank 0] step:8621/10000 train_time:2073331ms step_avg:240.50ms +[2025-07-17 21:10:47] [Rank 0] step:8621/10000 train_time:2073331ms step_avg:240.50ms +[2025-07-17 21:10:53] [Rank 0] PRINT: step:8625/10000 val_loss:5.5628 train_time:2074853ms step_avg:240.56ms +[2025-07-17 21:10:53] [Rank 0] PRINT: step:8625/10000 val_loss:5.5628 train_time:2074853ms step_avg:240.56ms +[2025-07-17 21:10:57] [Rank 0] step:8641/10000 train_time:2078641ms step_avg:240.56ms +[2025-07-17 21:10:57] [Rank 0] step:8641/10000 train_time:2078641ms step_avg:240.56ms +[2025-07-17 21:11:02] [Rank 0] step:8661/10000 train_time:2083704ms step_avg:240.58ms +[2025-07-17 21:11:02] [Rank 0] step:8661/10000 train_time:2083704ms step_avg:240.58ms +[2025-07-17 21:11:07] [Rank 0] step:8681/10000 train_time:2088771ms step_avg:240.61ms +[2025-07-17 21:11:07] [Rank 0] step:8681/10000 train_time:2088771ms step_avg:240.61ms +[2025-07-17 21:11:13] [Rank 0] step:8701/10000 train_time:2093852ms step_avg:240.64ms +[2025-07-17 21:11:13] [Rank 0] step:8701/10000 train_time:2093852ms step_avg:240.64ms +[2025-07-17 21:11:18] [Rank 0] step:8721/10000 train_time:2098924ms step_avg:240.67ms +[2025-07-17 21:11:18] [Rank 0] step:8721/10000 train_time:2098924ms step_avg:240.67ms +[2025-07-17 21:11:23] [Rank 0] step:8741/10000 train_time:2103993ms step_avg:240.70ms +[2025-07-17 21:11:23] [Rank 0] step:8741/10000 train_time:2103993ms step_avg:240.70ms +[2025-07-17 21:11:29] [Rank 0] PRINT: step:8750/10000 val_loss:5.6446 train_time:2106776ms step_avg:240.77ms +[2025-07-17 21:11:29] [Rank 0] PRINT: step:8750/10000 val_loss:5.6446 train_time:2106776ms step_avg:240.77ms +[2025-07-17 21:11:32] [Rank 0] step:8761/10000 train_time:2109053ms step_avg:240.73ms +[2025-07-17 21:11:32] [Rank 0] step:8761/10000 train_time:2109053ms step_avg:240.73ms +[2025-07-17 21:11:37] [Rank 0] step:8781/10000 train_time:2114124ms step_avg:240.76ms +[2025-07-17 21:11:37] [Rank 0] step:8781/10000 train_time:2114124ms step_avg:240.76ms +[2025-07-17 21:11:42] [Rank 0] step:8801/10000 train_time:2119187ms step_avg:240.79ms +[2025-07-17 21:11:42] [Rank 0] step:8801/10000 train_time:2119187ms step_avg:240.79ms +[2025-07-17 21:11:47] [Rank 0] step:8821/10000 train_time:2124262ms step_avg:240.82ms +[2025-07-17 21:11:47] [Rank 0] step:8821/10000 train_time:2124262ms step_avg:240.82ms +[2025-07-17 21:11:52] [Rank 0] step:8841/10000 train_time:2129352ms step_avg:240.85ms +[2025-07-17 21:11:52] [Rank 0] step:8841/10000 train_time:2129352ms step_avg:240.85ms +[2025-07-17 21:11:57] [Rank 0] step:8861/10000 train_time:2134425ms step_avg:240.88ms +[2025-07-17 21:11:57] [Rank 0] step:8861/10000 train_time:2134425ms step_avg:240.88ms +[2025-07-17 21:12:06] [Rank 0] PRINT: step:8875/10000 val_loss:5.4441 train_time:2138482ms step_avg:240.96ms +[2025-07-17 21:12:06] [Rank 0] PRINT: step:8875/10000 val_loss:5.4441 train_time:2138482ms step_avg:240.96ms +[2025-07-17 21:12:07] [Rank 0] step:8881/10000 train_time:2139489ms step_avg:240.91ms +[2025-07-17 21:12:07] [Rank 0] step:8881/10000 train_time:2139489ms step_avg:240.91ms +[2025-07-17 21:12:12] [Rank 0] step:8901/10000 train_time:2144551ms step_avg:240.93ms +[2025-07-17 21:12:12] [Rank 0] step:8901/10000 train_time:2144551ms step_avg:240.93ms +[2025-07-17 21:12:17] [Rank 0] step:8921/10000 train_time:2149616ms step_avg:240.96ms +[2025-07-17 21:12:17] [Rank 0] step:8921/10000 train_time:2149616ms step_avg:240.96ms +[2025-07-17 21:12:22] [Rank 0] step:8941/10000 train_time:2154690ms step_avg:240.99ms +[2025-07-17 21:12:22] [Rank 0] step:8941/10000 train_time:2154690ms step_avg:240.99ms +[2025-07-17 21:12:28] [Rank 0] step:8961/10000 train_time:2159765ms step_avg:241.02ms +[2025-07-17 21:12:28] [Rank 0] step:8961/10000 train_time:2159765ms step_avg:241.02ms +[2025-07-17 21:12:33] [Rank 0] step:8981/10000 train_time:2164839ms step_avg:241.05ms +[2025-07-17 21:12:33] [Rank 0] step:8981/10000 train_time:2164839ms step_avg:241.05ms +[2025-07-17 21:12:42] [Rank 0] PRINT: step:9000/10000 val_loss:5.5606 train_time:2170168ms step_avg:241.13ms +[2025-07-17 21:12:42] [Rank 0] PRINT: step:9000/10000 val_loss:5.5606 train_time:2170168ms step_avg:241.13ms +[2025-07-17 21:12:43] [Rank 0] step:9001/10000 train_time:2170181ms step_avg:241.10ms +[2025-07-17 21:12:43] [Rank 0] step:9001/10000 train_time:2170181ms step_avg:241.10ms +[2025-07-17 21:12:48] [Rank 0] step:9021/10000 train_time:2174975ms step_avg:241.10ms +[2025-07-17 21:12:48] [Rank 0] step:9021/10000 train_time:2174975ms step_avg:241.10ms +[2025-07-17 21:12:53] [Rank 0] step:9041/10000 train_time:2180068ms step_avg:241.13ms +[2025-07-17 21:12:53] [Rank 0] step:9041/10000 train_time:2180068ms step_avg:241.13ms +[2025-07-17 21:12:58] [Rank 0] step:9061/10000 train_time:2185136ms step_avg:241.16ms +[2025-07-17 21:12:58] [Rank 0] step:9061/10000 train_time:2185136ms step_avg:241.16ms +[2025-07-17 21:13:03] [Rank 0] step:9081/10000 train_time:2190232ms step_avg:241.19ms +[2025-07-17 21:13:03] [Rank 0] step:9081/10000 train_time:2190232ms step_avg:241.19ms +[2025-07-17 21:13:08] [Rank 0] step:9101/10000 train_time:2195326ms step_avg:241.22ms +[2025-07-17 21:13:08] [Rank 0] step:9101/10000 train_time:2195326ms step_avg:241.22ms +[2025-07-17 21:13:13] [Rank 0] step:9121/10000 train_time:2200409ms step_avg:241.25ms +[2025-07-17 21:13:13] [Rank 0] step:9121/10000 train_time:2200409ms step_avg:241.25ms +[2025-07-17 21:13:19] [Rank 0] PRINT: step:9125/10000 val_loss:5.5359 train_time:2201934ms step_avg:241.31ms +[2025-07-17 21:13:19] [Rank 0] PRINT: step:9125/10000 val_loss:5.5359 train_time:2201934ms step_avg:241.31ms +[2025-07-17 21:13:23] [Rank 0] step:9141/10000 train_time:2205470ms step_avg:241.27ms +[2025-07-17 21:13:23] [Rank 0] step:9141/10000 train_time:2205470ms step_avg:241.27ms +[2025-07-17 21:13:28] [Rank 0] step:9161/10000 train_time:2210820ms step_avg:241.33ms +[2025-07-17 21:13:28] [Rank 0] step:9161/10000 train_time:2210820ms step_avg:241.33ms +[2025-07-17 21:13:33] [Rank 0] step:9181/10000 train_time:2215897ms step_avg:241.36ms +[2025-07-17 21:13:33] [Rank 0] step:9181/10000 train_time:2215897ms step_avg:241.36ms +[2025-07-17 21:13:38] [Rank 0] step:9201/10000 train_time:2220971ms step_avg:241.38ms +[2025-07-17 21:13:38] [Rank 0] step:9201/10000 train_time:2220971ms step_avg:241.38ms +[2025-07-17 21:13:43] [Rank 0] step:9221/10000 train_time:2226079ms step_avg:241.41ms +[2025-07-17 21:13:43] [Rank 0] step:9221/10000 train_time:2226079ms step_avg:241.41ms +[2025-07-17 21:13:49] [Rank 0] step:9241/10000 train_time:2231166ms step_avg:241.44ms +[2025-07-17 21:13:49] [Rank 0] step:9241/10000 train_time:2231166ms step_avg:241.44ms +[2025-07-17 21:13:56] [Rank 0] PRINT: step:9250/10000 val_loss:5.5343 train_time:2233964ms step_avg:241.51ms +[2025-07-17 21:13:56] [Rank 0] PRINT: step:9250/10000 val_loss:5.5343 train_time:2233964ms step_avg:241.51ms +[2025-07-17 21:13:58] [Rank 0] step:9261/10000 train_time:2236246ms step_avg:241.47ms +[2025-07-17 21:13:58] [Rank 0] step:9261/10000 train_time:2236246ms step_avg:241.47ms +[2025-07-17 21:14:03] [Rank 0] step:9281/10000 train_time:2241302ms step_avg:241.49ms +[2025-07-17 21:14:03] [Rank 0] step:9281/10000 train_time:2241302ms step_avg:241.49ms +[2025-07-17 21:14:09] [Rank 0] step:9301/10000 train_time:2246384ms step_avg:241.52ms +[2025-07-17 21:14:09] [Rank 0] step:9301/10000 train_time:2246384ms step_avg:241.52ms +[2025-07-17 21:14:14] [Rank 0] step:9321/10000 train_time:2251480ms step_avg:241.55ms +[2025-07-17 21:14:14] [Rank 0] step:9321/10000 train_time:2251480ms step_avg:241.55ms +[2025-07-17 21:14:19] [Rank 0] step:9341/10000 train_time:2256561ms step_avg:241.58ms +[2025-07-17 21:14:19] [Rank 0] step:9341/10000 train_time:2256561ms step_avg:241.58ms +[2025-07-17 21:14:24] [Rank 0] step:9361/10000 train_time:2261641ms step_avg:241.60ms +[2025-07-17 21:14:24] [Rank 0] step:9361/10000 train_time:2261641ms step_avg:241.60ms +[2025-07-17 21:14:32] [Rank 0] PRINT: step:9375/10000 val_loss:5.5012 train_time:2265701ms step_avg:241.67ms +[2025-07-17 21:14:32] [Rank 0] PRINT: step:9375/10000 val_loss:5.5012 train_time:2265701ms step_avg:241.67ms +[2025-07-17 21:14:34] [Rank 0] step:9381/10000 train_time:2266710ms step_avg:241.63ms +[2025-07-17 21:14:34] [Rank 0] step:9381/10000 train_time:2266710ms step_avg:241.63ms +[2025-07-17 21:14:39] [Rank 0] step:9401/10000 train_time:2271763ms step_avg:241.65ms +[2025-07-17 21:14:39] [Rank 0] step:9401/10000 train_time:2271763ms step_avg:241.65ms +[2025-07-17 21:14:44] [Rank 0] step:9421/10000 train_time:2276833ms step_avg:241.68ms +[2025-07-17 21:14:44] [Rank 0] step:9421/10000 train_time:2276833ms step_avg:241.68ms +[2025-07-17 21:14:49] [Rank 0] step:9441/10000 train_time:2281902ms step_avg:241.70ms +[2025-07-17 21:14:49] [Rank 0] step:9441/10000 train_time:2281902ms step_avg:241.70ms +[2025-07-17 21:14:54] [Rank 0] step:9461/10000 train_time:2286977ms step_avg:241.73ms +[2025-07-17 21:14:54] [Rank 0] step:9461/10000 train_time:2286977ms step_avg:241.73ms +[2025-07-17 21:14:59] [Rank 0] step:9481/10000 train_time:2292046ms step_avg:241.75ms +[2025-07-17 21:14:59] [Rank 0] step:9481/10000 train_time:2292046ms step_avg:241.75ms +[2025-07-17 21:15:09] [Rank 0] PRINT: step:9500/10000 val_loss:5.4250 train_time:2297388ms step_avg:241.83ms +[2025-07-17 21:15:09] [Rank 0] PRINT: step:9500/10000 val_loss:5.4250 train_time:2297388ms step_avg:241.83ms +[2025-07-17 21:15:09] [Rank 0] step:9501/10000 train_time:2297402ms step_avg:241.81ms +[2025-07-17 21:15:09] [Rank 0] step:9501/10000 train_time:2297402ms step_avg:241.81ms +[2025-07-17 21:15:14] [Rank 0] step:9521/10000 train_time:2302186ms step_avg:241.80ms +[2025-07-17 21:15:14] [Rank 0] step:9521/10000 train_time:2302186ms step_avg:241.80ms +[2025-07-17 21:15:19] [Rank 0] step:9541/10000 train_time:2307264ms step_avg:241.83ms +[2025-07-17 21:15:19] [Rank 0] step:9541/10000 train_time:2307264ms step_avg:241.83ms +[2025-07-17 21:15:24] [Rank 0] step:9561/10000 train_time:2312309ms step_avg:241.85ms +[2025-07-17 21:15:24] [Rank 0] step:9561/10000 train_time:2312309ms step_avg:241.85ms +[2025-07-17 21:15:29] [Rank 0] step:9581/10000 train_time:2317361ms step_avg:241.87ms +[2025-07-17 21:15:29] [Rank 0] step:9581/10000 train_time:2317361ms step_avg:241.87ms +[2025-07-17 21:15:34] [Rank 0] step:9601/10000 train_time:2322419ms step_avg:241.89ms +[2025-07-17 21:15:34] [Rank 0] step:9601/10000 train_time:2322419ms step_avg:241.89ms +[2025-07-17 21:15:39] [Rank 0] step:9621/10000 train_time:2327503ms step_avg:241.92ms +[2025-07-17 21:15:39] [Rank 0] step:9621/10000 train_time:2327503ms step_avg:241.92ms +[2025-07-17 21:15:45] [Rank 0] PRINT: step:9625/10000 val_loss:5.4334 train_time:2329020ms step_avg:241.98ms +[2025-07-17 21:15:45] [Rank 0] PRINT: step:9625/10000 val_loss:5.4334 train_time:2329020ms step_avg:241.98ms +[2025-07-17 21:15:49] [Rank 0] step:9641/10000 train_time:2332586ms step_avg:241.94ms +[2025-07-17 21:15:49] [Rank 0] step:9641/10000 train_time:2332586ms step_avg:241.94ms +[2025-07-17 21:15:54] [Rank 0] step:9661/10000 train_time:2337943ms step_avg:242.00ms +[2025-07-17 21:15:54] [Rank 0] step:9661/10000 train_time:2337943ms step_avg:242.00ms +[2025-07-17 21:16:00] [Rank 0] step:9681/10000 train_time:2343059ms step_avg:242.03ms +[2025-07-17 21:16:00] [Rank 0] step:9681/10000 train_time:2343059ms step_avg:242.03ms +[2025-07-17 21:16:05] [Rank 0] step:9701/10000 train_time:2348190ms step_avg:242.06ms +[2025-07-17 21:16:05] [Rank 0] step:9701/10000 train_time:2348190ms step_avg:242.06ms +[2025-07-17 21:16:10] [Rank 0] step:9721/10000 train_time:2353301ms step_avg:242.08ms +[2025-07-17 21:16:10] [Rank 0] step:9721/10000 train_time:2353301ms step_avg:242.08ms +[2025-07-17 21:16:15] [Rank 0] step:9741/10000 train_time:2358430ms step_avg:242.11ms +[2025-07-17 21:16:15] [Rank 0] step:9741/10000 train_time:2358430ms step_avg:242.11ms +[2025-07-17 21:16:22] [Rank 0] PRINT: step:9750/10000 val_loss:5.5443 train_time:2361243ms step_avg:242.18ms +[2025-07-17 21:16:22] [Rank 0] PRINT: step:9750/10000 val_loss:5.5443 train_time:2361243ms step_avg:242.18ms +[2025-07-17 21:16:25] [Rank 0] step:9761/10000 train_time:2363537ms step_avg:242.14ms +[2025-07-17 21:16:25] [Rank 0] step:9761/10000 train_time:2363537ms step_avg:242.14ms +[2025-07-17 21:16:30] [Rank 0] step:9781/10000 train_time:2368655ms step_avg:242.17ms +[2025-07-17 21:16:30] [Rank 0] step:9781/10000 train_time:2368655ms step_avg:242.17ms +[2025-07-17 21:16:35] [Rank 0] step:9801/10000 train_time:2373765ms step_avg:242.20ms +[2025-07-17 21:16:35] [Rank 0] step:9801/10000 train_time:2373765ms step_avg:242.20ms +[2025-07-17 21:16:40] [Rank 0] step:9821/10000 train_time:2378884ms step_avg:242.22ms +[2025-07-17 21:16:40] [Rank 0] step:9821/10000 train_time:2378884ms step_avg:242.22ms +[2025-07-17 21:16:45] [Rank 0] step:9841/10000 train_time:2383997ms step_avg:242.25ms +[2025-07-17 21:16:45] [Rank 0] step:9841/10000 train_time:2383997ms step_avg:242.25ms +[2025-07-17 21:16:50] [Rank 0] step:9861/10000 train_time:2389109ms step_avg:242.28ms +[2025-07-17 21:16:50] [Rank 0] step:9861/10000 train_time:2389109ms step_avg:242.28ms +[2025-07-17 21:16:59] [Rank 0] PRINT: step:9875/10000 val_loss:5.5653 train_time:2393200ms step_avg:242.35ms +[2025-07-17 21:16:59] [Rank 0] PRINT: step:9875/10000 val_loss:5.5653 train_time:2393200ms step_avg:242.35ms +[2025-07-17 21:17:00] [Rank 0] step:9881/10000 train_time:2394216ms step_avg:242.30ms +[2025-07-17 21:17:00] [Rank 0] step:9881/10000 train_time:2394216ms step_avg:242.30ms +[2025-07-17 21:17:05] [Rank 0] step:9901/10000 train_time:2399331ms step_avg:242.33ms +[2025-07-17 21:17:05] [Rank 0] step:9901/10000 train_time:2399331ms step_avg:242.33ms +[2025-07-17 21:17:11] [Rank 0] step:9921/10000 train_time:2404455ms step_avg:242.36ms +[2025-07-17 21:17:11] [Rank 0] step:9921/10000 train_time:2404455ms step_avg:242.36ms +[2025-07-17 21:17:16] [Rank 0] step:9941/10000 train_time:2409604ms step_avg:242.39ms +[2025-07-17 21:17:16] [Rank 0] step:9941/10000 train_time:2409604ms step_avg:242.39ms +[2025-07-17 21:17:21] [Rank 0] step:9961/10000 train_time:2414737ms step_avg:242.42ms +[2025-07-17 21:17:21] [Rank 0] step:9961/10000 train_time:2414737ms step_avg:242.42ms +[2025-07-17 21:17:26] [Rank 0] step:9981/10000 train_time:2419885ms step_avg:242.45ms +[2025-07-17 21:17:26] [Rank 0] step:9981/10000 train_time:2419885ms step_avg:242.45ms +[2025-07-17 21:17:31] [Rank 0] step:10000/10000 train_time:2424733ms step_avg:242.47ms +[2025-07-17 21:17:31] [Rank 0] step:10000/10000 train_time:2424733ms step_avg:242.47ms +[2025-07-17 21:17:35] [Rank 0] PRINT: step:10000/10000 val_loss:5.5914 train_time:2425250ms step_avg:242.53ms +[2025-07-17 21:17:35] [Rank 0] PRINT: step:10000/10000 val_loss:5.5914 train_time:2425250ms step_avg:242.53ms +[2025-07-17 21:17:35] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 21:17:35 2025 --- +[2025-07-17 21:17:35] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 21:17:35 2025 --- +[2025-07-17 21:17:35] [Rank 0] PRINT: Peak memory allocated: 30851 MiB reserved: 31236 MiB +[2025-07-17 21:17:35] [Rank 0] PRINT: Peak memory allocated: 30851 MiB reserved: 31236 MiB diff --git a/logs_norope/diff_modes/mode_5_param_norope_seed_42/config.json b/logs_norope/diff_modes/mode_5_param_norope_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..93ec5953e0e03cfd0ac8f4889259b1488583531b --- /dev/null +++ b/logs_norope/diff_modes/mode_5_param_norope_seed_42/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 5, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "504f4b0e-bd9c-4604-b0ca-27ac231c5283", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_5_param_norope_seed_42/training_log_504f4b0e-bd9c-4604-b0ca-27ac231c5283.txt b/logs_norope/diff_modes/mode_5_param_norope_seed_42/training_log_504f4b0e-bd9c-4604-b0ca-27ac231c5283.txt new file mode 100644 index 0000000000000000000000000000000000000000..b455d6b58b75b4f8078281b9007b51981be116b4 --- /dev/null +++ b/logs_norope/diff_modes/mode_5_param_norope_seed_42/training_log_504f4b0e-bd9c-4604-b0ca-27ac231c5283.txt @@ -0,0 +1,2360 @@ +[2025-07-17 14:05:56] [Rank 0] PRINT: --- Script Start: Thu Jul 17 14:05:56 2025 --- +[2025-07-17 14:05:56] [Rank 0] PRINT: --- Script Start: Thu Jul 17 14:05:56 2025 --- +[2025-07-17 14:05:56] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=5, model_parameterization='norope') +[2025-07-17 14:05:56] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=5, model_parameterization='norope') +[2025-07-17 14:05:56] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 14:05:56] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 14:05:56] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 14:05:56] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 14:05:56] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_5_param_norope_seed_42 +[2025-07-17 14:05:56] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_5_param_norope_seed_42 +[2025-07-17 14:05:56] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 14:05:56] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 14:05:56] [Rank 0] PRINT: Constructing model... +[2025-07-17 14:05:56] [Rank 0] PRINT: Constructing model... +[2025-07-17 14:05:59] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 14:05:59] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 14:05:59] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 14:05:59] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 14:05:59] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 14:05:59] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 14:05:59] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 14:05:59] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 14:05:59] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 5 +[2025-07-17 14:05:59] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 5 +[2025-07-17 14:05:59] [Rank 0] PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: 0.001). +[2025-07-17 14:05:59] [Rank 0] PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: 0.001). +[2025-07-17 14:05:59] [Rank 0] PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices). +[2025-07-17 14:05:59] [Rank 0] PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices). +[2025-07-17 14:05:59] [Rank 0] PRINT: Optimizers configured. Total optimizers: 1 +[2025-07-17 14:05:59] [Rank 0] PRINT: Optimizers configured. Total optimizers: 1 +[2025-07-17 14:05:59] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 14:05:59] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 14:05:59] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 14:05:59] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 14:05:59] [Rank 0] PRINT: Starting warmup... +[2025-07-17 14:05:59] [Rank 0] PRINT: Starting warmup... +[2025-07-17 14:07:04] [Rank 0] PRINT: Warmup complete. +[2025-07-17 14:07:04] [Rank 0] PRINT: Warmup complete. +[2025-07-17 14:07:05] [Rank 0] PRINT: Starting training... +[2025-07-17 14:07:05] [Rank 0] PRINT: Starting training... +[2025-07-17 14:07:14] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 14:07:14] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 14:07:19] [Rank 0] step:21/10000 train_time:3537ms step_avg:168.44ms +[2025-07-17 14:07:19] [Rank 0] step:21/10000 train_time:3537ms step_avg:168.44ms +[2025-07-17 14:07:23] [Rank 0] step:41/10000 train_time:7933ms step_avg:193.50ms +[2025-07-17 14:07:23] [Rank 0] step:41/10000 train_time:7933ms step_avg:193.50ms +[2025-07-17 14:07:28] [Rank 0] step:61/10000 train_time:12338ms step_avg:202.27ms +[2025-07-17 14:07:28] [Rank 0] step:61/10000 train_time:12338ms step_avg:202.27ms +[2025-07-17 14:07:32] [Rank 0] step:81/10000 train_time:16752ms step_avg:206.82ms +[2025-07-17 14:07:32] [Rank 0] step:81/10000 train_time:16752ms step_avg:206.82ms +[2025-07-17 14:07:37] [Rank 0] step:101/10000 train_time:21171ms step_avg:209.62ms +[2025-07-17 14:07:37] [Rank 0] step:101/10000 train_time:21171ms step_avg:209.62ms +[2025-07-17 14:07:41] [Rank 0] step:121/10000 train_time:25597ms step_avg:211.55ms +[2025-07-17 14:07:41] [Rank 0] step:121/10000 train_time:25597ms step_avg:211.55ms +[2025-07-17 14:07:46] [Rank 0] PRINT: step:125/10000 val_loss:5.5357 train_time:27678ms step_avg:221.42ms +[2025-07-17 14:07:46] [Rank 0] PRINT: step:125/10000 val_loss:5.5357 train_time:27678ms step_avg:221.42ms +[2025-07-17 14:07:50] [Rank 0] step:141/10000 train_time:30027ms step_avg:212.96ms +[2025-07-17 14:07:50] [Rank 0] step:141/10000 train_time:30027ms step_avg:212.96ms +[2025-07-17 14:07:54] [Rank 0] step:161/10000 train_time:34451ms step_avg:213.98ms +[2025-07-17 14:07:54] [Rank 0] step:161/10000 train_time:34451ms step_avg:213.98ms +[2025-07-17 14:07:59] [Rank 0] step:181/10000 train_time:38877ms step_avg:214.79ms +[2025-07-17 14:07:59] [Rank 0] step:181/10000 train_time:38877ms step_avg:214.79ms +[2025-07-17 14:08:03] [Rank 0] step:201/10000 train_time:43305ms step_avg:215.45ms +[2025-07-17 14:08:03] [Rank 0] step:201/10000 train_time:43305ms step_avg:215.45ms +[2025-07-17 14:08:08] [Rank 0] step:221/10000 train_time:47738ms step_avg:216.01ms +[2025-07-17 14:08:08] [Rank 0] step:221/10000 train_time:47738ms step_avg:216.01ms +[2025-07-17 14:08:12] [Rank 0] step:241/10000 train_time:52170ms step_avg:216.47ms +[2025-07-17 14:08:12] [Rank 0] step:241/10000 train_time:52170ms step_avg:216.47ms +[2025-07-17 14:08:18] [Rank 0] PRINT: step:250/10000 val_loss:5.0790 train_time:55355ms step_avg:221.42ms +[2025-07-17 14:08:18] [Rank 0] PRINT: step:250/10000 val_loss:5.0790 train_time:55355ms step_avg:221.42ms +[2025-07-17 14:08:21] [Rank 0] step:261/10000 train_time:56597ms step_avg:216.85ms +[2025-07-17 14:08:21] [Rank 0] step:261/10000 train_time:56597ms step_avg:216.85ms +[2025-07-17 14:08:25] [Rank 0] step:281/10000 train_time:61023ms step_avg:217.16ms +[2025-07-17 14:08:25] [Rank 0] step:281/10000 train_time:61023ms step_avg:217.16ms +[2025-07-17 14:08:30] [Rank 0] step:301/10000 train_time:65451ms step_avg:217.44ms +[2025-07-17 14:08:30] [Rank 0] step:301/10000 train_time:65451ms step_avg:217.44ms +[2025-07-17 14:08:34] [Rank 0] step:321/10000 train_time:69877ms step_avg:217.68ms +[2025-07-17 14:08:34] [Rank 0] step:321/10000 train_time:69877ms step_avg:217.68ms +[2025-07-17 14:08:39] [Rank 0] step:341/10000 train_time:74305ms step_avg:217.90ms +[2025-07-17 14:08:39] [Rank 0] step:341/10000 train_time:74305ms step_avg:217.90ms +[2025-07-17 14:08:43] [Rank 0] step:361/10000 train_time:78732ms step_avg:218.09ms +[2025-07-17 14:08:43] [Rank 0] step:361/10000 train_time:78732ms step_avg:218.09ms +[2025-07-17 14:08:50] [Rank 0] PRINT: step:375/10000 val_loss:4.7816 train_time:83020ms step_avg:221.39ms +[2025-07-17 14:08:50] [Rank 0] PRINT: step:375/10000 val_loss:4.7816 train_time:83020ms step_avg:221.39ms +[2025-07-17 14:08:52] [Rank 0] step:381/10000 train_time:83159ms step_avg:218.26ms +[2025-07-17 14:08:52] [Rank 0] step:381/10000 train_time:83159ms step_avg:218.26ms +[2025-07-17 14:08:56] [Rank 0] step:401/10000 train_time:87587ms step_avg:218.42ms +[2025-07-17 14:08:56] [Rank 0] step:401/10000 train_time:87587ms step_avg:218.42ms +[2025-07-17 14:09:01] [Rank 0] step:421/10000 train_time:92016ms step_avg:218.56ms +[2025-07-17 14:09:01] [Rank 0] step:421/10000 train_time:92016ms step_avg:218.56ms +[2025-07-17 14:09:05] [Rank 0] step:441/10000 train_time:96447ms step_avg:218.70ms +[2025-07-17 14:09:05] [Rank 0] step:441/10000 train_time:96447ms step_avg:218.70ms +[2025-07-17 14:09:10] [Rank 0] step:461/10000 train_time:100880ms step_avg:218.83ms +[2025-07-17 14:09:10] [Rank 0] step:461/10000 train_time:100880ms step_avg:218.83ms +[2025-07-17 14:09:14] [Rank 0] step:481/10000 train_time:105312ms step_avg:218.94ms +[2025-07-17 14:09:14] [Rank 0] step:481/10000 train_time:105312ms step_avg:218.94ms +[2025-07-17 14:09:23] [Rank 0] PRINT: step:500/10000 val_loss:4.6560 train_time:110710ms step_avg:221.42ms +[2025-07-17 14:09:23] [Rank 0] PRINT: step:500/10000 val_loss:4.6560 train_time:110710ms step_avg:221.42ms +[2025-07-17 14:09:23] [Rank 0] step:501/10000 train_time:110720ms step_avg:221.00ms +[2025-07-17 14:09:23] [Rank 0] step:501/10000 train_time:110720ms step_avg:221.00ms +[2025-07-17 14:09:27] [Rank 0] step:521/10000 train_time:114172ms step_avg:219.14ms +[2025-07-17 14:09:27] [Rank 0] step:521/10000 train_time:114172ms step_avg:219.14ms +[2025-07-17 14:09:32] [Rank 0] step:541/10000 train_time:118604ms step_avg:219.23ms +[2025-07-17 14:09:32] [Rank 0] step:541/10000 train_time:118604ms step_avg:219.23ms +[2025-07-17 14:09:36] [Rank 0] step:561/10000 train_time:123030ms step_avg:219.31ms +[2025-07-17 14:09:36] [Rank 0] step:561/10000 train_time:123030ms step_avg:219.31ms +[2025-07-17 14:09:41] [Rank 0] step:581/10000 train_time:127464ms step_avg:219.39ms +[2025-07-17 14:09:41] [Rank 0] step:581/10000 train_time:127464ms step_avg:219.39ms +[2025-07-17 14:09:45] [Rank 0] step:601/10000 train_time:131898ms step_avg:219.46ms +[2025-07-17 14:09:45] [Rank 0] step:601/10000 train_time:131898ms step_avg:219.46ms +[2025-07-17 14:09:49] [Rank 0] step:621/10000 train_time:136332ms step_avg:219.54ms +[2025-07-17 14:09:49] [Rank 0] step:621/10000 train_time:136332ms step_avg:219.54ms +[2025-07-17 14:09:55] [Rank 0] PRINT: step:625/10000 val_loss:4.5340 train_time:138413ms step_avg:221.46ms +[2025-07-17 14:09:55] [Rank 0] PRINT: step:625/10000 val_loss:4.5340 train_time:138413ms step_avg:221.46ms +[2025-07-17 14:09:58] [Rank 0] step:641/10000 train_time:140767ms step_avg:219.61ms +[2025-07-17 14:09:58] [Rank 0] step:641/10000 train_time:140767ms step_avg:219.61ms +[2025-07-17 14:10:03] [Rank 0] step:661/10000 train_time:145207ms step_avg:219.68ms +[2025-07-17 14:10:03] [Rank 0] step:661/10000 train_time:145207ms step_avg:219.68ms +[2025-07-17 14:10:07] [Rank 0] step:681/10000 train_time:149642ms step_avg:219.74ms +[2025-07-17 14:10:07] [Rank 0] step:681/10000 train_time:149642ms step_avg:219.74ms +[2025-07-17 14:10:12] [Rank 0] step:701/10000 train_time:154080ms step_avg:219.80ms +[2025-07-17 14:10:12] [Rank 0] step:701/10000 train_time:154080ms step_avg:219.80ms +[2025-07-17 14:10:16] [Rank 0] step:721/10000 train_time:158520ms step_avg:219.86ms +[2025-07-17 14:10:16] [Rank 0] step:721/10000 train_time:158520ms step_avg:219.86ms +[2025-07-17 14:10:20] [Rank 0] step:741/10000 train_time:162958ms step_avg:219.92ms +[2025-07-17 14:10:20] [Rank 0] step:741/10000 train_time:162958ms step_avg:219.92ms +[2025-07-17 14:10:27] [Rank 0] PRINT: step:750/10000 val_loss:4.7052 train_time:166162ms step_avg:221.55ms +[2025-07-17 14:10:27] [Rank 0] PRINT: step:750/10000 val_loss:4.7052 train_time:166162ms step_avg:221.55ms +[2025-07-17 14:10:29] [Rank 0] step:761/10000 train_time:167419ms step_avg:220.00ms +[2025-07-17 14:10:29] [Rank 0] step:761/10000 train_time:167419ms step_avg:220.00ms +[2025-07-17 14:10:34] [Rank 0] step:781/10000 train_time:171888ms step_avg:220.09ms +[2025-07-17 14:10:34] [Rank 0] step:781/10000 train_time:171888ms step_avg:220.09ms +[2025-07-17 14:10:38] [Rank 0] step:801/10000 train_time:176360ms step_avg:220.18ms +[2025-07-17 14:10:38] [Rank 0] step:801/10000 train_time:176360ms step_avg:220.18ms +[2025-07-17 14:10:43] [Rank 0] step:821/10000 train_time:180831ms step_avg:220.26ms +[2025-07-17 14:10:43] [Rank 0] step:821/10000 train_time:180831ms step_avg:220.26ms +[2025-07-17 14:10:47] [Rank 0] step:841/10000 train_time:185300ms step_avg:220.33ms +[2025-07-17 14:10:47] [Rank 0] step:841/10000 train_time:185300ms step_avg:220.33ms +[2025-07-17 14:10:52] [Rank 0] step:861/10000 train_time:189773ms step_avg:220.41ms +[2025-07-17 14:10:52] [Rank 0] step:861/10000 train_time:189773ms step_avg:220.41ms +[2025-07-17 14:10:59] [Rank 0] PRINT: step:875/10000 val_loss:4.7295 train_time:194107ms step_avg:221.84ms +[2025-07-17 14:10:59] [Rank 0] PRINT: step:875/10000 val_loss:4.7295 train_time:194107ms step_avg:221.84ms +[2025-07-17 14:11:01] [Rank 0] step:881/10000 train_time:194247ms step_avg:220.48ms +[2025-07-17 14:11:01] [Rank 0] step:881/10000 train_time:194247ms step_avg:220.48ms +[2025-07-17 14:11:05] [Rank 0] step:901/10000 train_time:198717ms step_avg:220.55ms +[2025-07-17 14:11:05] [Rank 0] step:901/10000 train_time:198717ms step_avg:220.55ms +[2025-07-17 14:11:10] [Rank 0] step:921/10000 train_time:203190ms step_avg:220.62ms +[2025-07-17 14:11:10] [Rank 0] step:921/10000 train_time:203190ms step_avg:220.62ms +[2025-07-17 14:11:14] [Rank 0] step:941/10000 train_time:207667ms step_avg:220.69ms +[2025-07-17 14:11:14] [Rank 0] step:941/10000 train_time:207667ms step_avg:220.69ms +[2025-07-17 14:11:19] [Rank 0] step:961/10000 train_time:212144ms step_avg:220.75ms +[2025-07-17 14:11:19] [Rank 0] step:961/10000 train_time:212144ms step_avg:220.75ms +[2025-07-17 14:11:23] [Rank 0] step:981/10000 train_time:216620ms step_avg:220.82ms +[2025-07-17 14:11:23] [Rank 0] step:981/10000 train_time:216620ms step_avg:220.82ms +[2025-07-17 14:11:32] [Rank 0] PRINT: step:1000/10000 val_loss:4.7029 train_time:222078ms step_avg:222.08ms +[2025-07-17 14:11:32] [Rank 0] PRINT: step:1000/10000 val_loss:4.7029 train_time:222078ms step_avg:222.08ms +[2025-07-17 14:11:32] [Rank 0] step:1001/10000 train_time:222087ms step_avg:221.87ms +[2025-07-17 14:11:32] [Rank 0] step:1001/10000 train_time:222087ms step_avg:221.87ms +[2025-07-17 14:11:36] [Rank 0] step:1021/10000 train_time:225577ms step_avg:220.94ms +[2025-07-17 14:11:36] [Rank 0] step:1021/10000 train_time:225577ms step_avg:220.94ms +[2025-07-17 14:11:41] [Rank 0] step:1041/10000 train_time:230055ms step_avg:220.99ms +[2025-07-17 14:11:41] [Rank 0] step:1041/10000 train_time:230055ms step_avg:220.99ms +[2025-07-17 14:11:45] [Rank 0] step:1061/10000 train_time:234534ms step_avg:221.05ms +[2025-07-17 14:11:45] [Rank 0] step:1061/10000 train_time:234534ms step_avg:221.05ms +[2025-07-17 14:11:50] [Rank 0] step:1081/10000 train_time:239016ms step_avg:221.11ms +[2025-07-17 14:11:50] [Rank 0] step:1081/10000 train_time:239016ms step_avg:221.11ms +[2025-07-17 14:11:54] [Rank 0] step:1101/10000 train_time:243494ms step_avg:221.16ms +[2025-07-17 14:11:54] [Rank 0] step:1101/10000 train_time:243494ms step_avg:221.16ms +[2025-07-17 14:11:59] [Rank 0] step:1121/10000 train_time:247975ms step_avg:221.21ms +[2025-07-17 14:11:59] [Rank 0] step:1121/10000 train_time:247975ms step_avg:221.21ms +[2025-07-17 14:12:04] [Rank 0] PRINT: step:1125/10000 val_loss:4.6720 train_time:250079ms step_avg:222.29ms +[2025-07-17 14:12:04] [Rank 0] PRINT: step:1125/10000 val_loss:4.6720 train_time:250079ms step_avg:222.29ms +[2025-07-17 14:12:08] [Rank 0] step:1141/10000 train_time:252456ms step_avg:221.26ms +[2025-07-17 14:12:08] [Rank 0] step:1141/10000 train_time:252456ms step_avg:221.26ms +[2025-07-17 14:12:12] [Rank 0] step:1161/10000 train_time:256940ms step_avg:221.31ms +[2025-07-17 14:12:12] [Rank 0] step:1161/10000 train_time:256940ms step_avg:221.31ms +[2025-07-17 14:12:17] [Rank 0] step:1181/10000 train_time:261423ms step_avg:221.36ms +[2025-07-17 14:12:17] [Rank 0] step:1181/10000 train_time:261423ms step_avg:221.36ms +[2025-07-17 14:12:21] [Rank 0] step:1201/10000 train_time:265904ms step_avg:221.40ms +[2025-07-17 14:12:21] [Rank 0] step:1201/10000 train_time:265904ms step_avg:221.40ms +[2025-07-17 14:12:26] [Rank 0] step:1221/10000 train_time:270387ms step_avg:221.45ms +[2025-07-17 14:12:26] [Rank 0] step:1221/10000 train_time:270387ms step_avg:221.45ms +[2025-07-17 14:12:30] [Rank 0] step:1241/10000 train_time:274872ms step_avg:221.49ms +[2025-07-17 14:12:30] [Rank 0] step:1241/10000 train_time:274872ms step_avg:221.49ms +[2025-07-17 14:12:37] [Rank 0] PRINT: step:1250/10000 val_loss:4.7586 train_time:278094ms step_avg:222.48ms +[2025-07-17 14:12:37] [Rank 0] PRINT: step:1250/10000 val_loss:4.7586 train_time:278094ms step_avg:222.48ms +[2025-07-17 14:12:39] [Rank 0] step:1261/10000 train_time:279354ms step_avg:221.53ms +[2025-07-17 14:12:39] [Rank 0] step:1261/10000 train_time:279354ms step_avg:221.53ms +[2025-07-17 14:12:44] [Rank 0] step:1281/10000 train_time:283835ms step_avg:221.57ms +[2025-07-17 14:12:44] [Rank 0] step:1281/10000 train_time:283835ms step_avg:221.57ms +[2025-07-17 14:12:48] [Rank 0] step:1301/10000 train_time:288321ms step_avg:221.61ms +[2025-07-17 14:12:48] [Rank 0] step:1301/10000 train_time:288321ms step_avg:221.61ms +[2025-07-17 14:12:53] [Rank 0] step:1321/10000 train_time:292808ms step_avg:221.66ms +[2025-07-17 14:12:53] [Rank 0] step:1321/10000 train_time:292808ms step_avg:221.66ms +[2025-07-17 14:12:57] [Rank 0] step:1341/10000 train_time:297293ms step_avg:221.70ms +[2025-07-17 14:12:57] [Rank 0] step:1341/10000 train_time:297293ms step_avg:221.70ms +[2025-07-17 14:13:01] [Rank 0] step:1361/10000 train_time:301779ms step_avg:221.73ms +[2025-07-17 14:13:01] [Rank 0] step:1361/10000 train_time:301779ms step_avg:221.73ms +[2025-07-17 14:13:09] [Rank 0] PRINT: step:1375/10000 val_loss:4.6739 train_time:306122ms step_avg:222.63ms +[2025-07-17 14:13:09] [Rank 0] PRINT: step:1375/10000 val_loss:4.6739 train_time:306122ms step_avg:222.63ms +[2025-07-17 14:13:10] [Rank 0] step:1381/10000 train_time:306265ms step_avg:221.77ms +[2025-07-17 14:13:10] [Rank 0] step:1381/10000 train_time:306265ms step_avg:221.77ms +[2025-07-17 14:13:15] [Rank 0] step:1401/10000 train_time:310752ms step_avg:221.81ms +[2025-07-17 14:13:15] [Rank 0] step:1401/10000 train_time:310752ms step_avg:221.81ms +[2025-07-17 14:13:19] [Rank 0] step:1421/10000 train_time:315242ms step_avg:221.85ms +[2025-07-17 14:13:19] [Rank 0] step:1421/10000 train_time:315242ms step_avg:221.85ms +[2025-07-17 14:13:24] [Rank 0] step:1441/10000 train_time:319736ms step_avg:221.88ms +[2025-07-17 14:13:24] [Rank 0] step:1441/10000 train_time:319736ms step_avg:221.88ms +[2025-07-17 14:13:28] [Rank 0] step:1461/10000 train_time:324229ms step_avg:221.92ms +[2025-07-17 14:13:28] [Rank 0] step:1461/10000 train_time:324229ms step_avg:221.92ms +[2025-07-17 14:13:33] [Rank 0] step:1481/10000 train_time:328726ms step_avg:221.96ms +[2025-07-17 14:13:33] [Rank 0] step:1481/10000 train_time:328726ms step_avg:221.96ms +[2025-07-17 14:13:42] [Rank 0] PRINT: step:1500/10000 val_loss:4.6481 train_time:334222ms step_avg:222.81ms +[2025-07-17 14:13:42] [Rank 0] PRINT: step:1500/10000 val_loss:4.6481 train_time:334222ms step_avg:222.81ms +[2025-07-17 14:13:42] [Rank 0] step:1501/10000 train_time:334231ms step_avg:222.67ms +[2025-07-17 14:13:42] [Rank 0] step:1501/10000 train_time:334231ms step_avg:222.67ms +[2025-07-17 14:13:46] [Rank 0] step:1521/10000 train_time:337753ms step_avg:222.06ms +[2025-07-17 14:13:46] [Rank 0] step:1521/10000 train_time:337753ms step_avg:222.06ms +[2025-07-17 14:13:51] [Rank 0] step:1541/10000 train_time:342268ms step_avg:222.11ms +[2025-07-17 14:13:51] [Rank 0] step:1541/10000 train_time:342268ms step_avg:222.11ms +[2025-07-17 14:13:55] [Rank 0] step:1561/10000 train_time:346788ms step_avg:222.16ms +[2025-07-17 14:13:55] [Rank 0] step:1561/10000 train_time:346788ms step_avg:222.16ms +[2025-07-17 14:14:00] [Rank 0] step:1581/10000 train_time:351305ms step_avg:222.20ms +[2025-07-17 14:14:00] [Rank 0] step:1581/10000 train_time:351305ms step_avg:222.20ms +[2025-07-17 14:14:04] [Rank 0] step:1601/10000 train_time:355823ms step_avg:222.25ms +[2025-07-17 14:14:04] [Rank 0] step:1601/10000 train_time:355823ms step_avg:222.25ms +[2025-07-17 14:14:09] [Rank 0] step:1621/10000 train_time:360341ms step_avg:222.30ms +[2025-07-17 14:14:09] [Rank 0] step:1621/10000 train_time:360341ms step_avg:222.30ms +[2025-07-17 14:14:14] [Rank 0] PRINT: step:1625/10000 val_loss:4.6648 train_time:362465ms step_avg:223.06ms +[2025-07-17 14:14:14] [Rank 0] PRINT: step:1625/10000 val_loss:4.6648 train_time:362465ms step_avg:223.06ms +[2025-07-17 14:14:18] [Rank 0] step:1641/10000 train_time:364862ms step_avg:222.34ms +[2025-07-17 14:14:18] [Rank 0] step:1641/10000 train_time:364862ms step_avg:222.34ms +[2025-07-17 14:14:23] [Rank 0] step:1661/10000 train_time:369381ms step_avg:222.38ms +[2025-07-17 14:14:23] [Rank 0] step:1661/10000 train_time:369381ms step_avg:222.38ms +[2025-07-17 14:14:27] [Rank 0] step:1681/10000 train_time:373900ms step_avg:222.43ms +[2025-07-17 14:14:27] [Rank 0] step:1681/10000 train_time:373900ms step_avg:222.43ms +[2025-07-17 14:14:32] [Rank 0] step:1701/10000 train_time:378422ms step_avg:222.47ms +[2025-07-17 14:14:32] [Rank 0] step:1701/10000 train_time:378422ms step_avg:222.47ms +[2025-07-17 14:14:36] [Rank 0] step:1721/10000 train_time:382943ms step_avg:222.51ms +[2025-07-17 14:14:36] [Rank 0] step:1721/10000 train_time:382943ms step_avg:222.51ms +[2025-07-17 14:14:41] [Rank 0] step:1741/10000 train_time:387465ms step_avg:222.55ms +[2025-07-17 14:14:41] [Rank 0] step:1741/10000 train_time:387465ms step_avg:222.55ms +[2025-07-17 14:14:47] [Rank 0] PRINT: step:1750/10000 val_loss:4.6029 train_time:390716ms step_avg:223.27ms +[2025-07-17 14:14:47] [Rank 0] PRINT: step:1750/10000 val_loss:4.6029 train_time:390716ms step_avg:223.27ms +[2025-07-17 14:14:49] [Rank 0] step:1761/10000 train_time:391987ms step_avg:222.59ms +[2025-07-17 14:14:49] [Rank 0] step:1761/10000 train_time:391987ms step_avg:222.59ms +[2025-07-17 14:14:54] [Rank 0] step:1781/10000 train_time:396510ms step_avg:222.63ms +[2025-07-17 14:14:54] [Rank 0] step:1781/10000 train_time:396510ms step_avg:222.63ms +[2025-07-17 14:14:58] [Rank 0] step:1801/10000 train_time:401034ms step_avg:222.67ms +[2025-07-17 14:14:58] [Rank 0] step:1801/10000 train_time:401034ms step_avg:222.67ms +[2025-07-17 14:15:03] [Rank 0] step:1821/10000 train_time:405555ms step_avg:222.71ms +[2025-07-17 14:15:03] [Rank 0] step:1821/10000 train_time:405555ms step_avg:222.71ms +[2025-07-17 14:15:07] [Rank 0] step:1841/10000 train_time:410082ms step_avg:222.75ms +[2025-07-17 14:15:07] [Rank 0] step:1841/10000 train_time:410082ms step_avg:222.75ms +[2025-07-17 14:15:12] [Rank 0] step:1861/10000 train_time:414605ms step_avg:222.79ms +[2025-07-17 14:15:12] [Rank 0] step:1861/10000 train_time:414605ms step_avg:222.79ms +[2025-07-17 14:15:19] [Rank 0] PRINT: step:1875/10000 val_loss:4.6020 train_time:418986ms step_avg:223.46ms +[2025-07-17 14:15:19] [Rank 0] PRINT: step:1875/10000 val_loss:4.6020 train_time:418986ms step_avg:223.46ms +[2025-07-17 14:15:21] [Rank 0] step:1881/10000 train_time:419127ms step_avg:222.82ms +[2025-07-17 14:15:21] [Rank 0] step:1881/10000 train_time:419127ms step_avg:222.82ms +[2025-07-17 14:15:25] [Rank 0] step:1901/10000 train_time:423645ms step_avg:222.85ms +[2025-07-17 14:15:25] [Rank 0] step:1901/10000 train_time:423645ms step_avg:222.85ms +[2025-07-17 14:15:30] [Rank 0] step:1921/10000 train_time:428163ms step_avg:222.89ms +[2025-07-17 14:15:30] [Rank 0] step:1921/10000 train_time:428163ms step_avg:222.89ms +[2025-07-17 14:15:34] [Rank 0] step:1941/10000 train_time:432683ms step_avg:222.92ms +[2025-07-17 14:15:34] [Rank 0] step:1941/10000 train_time:432683ms step_avg:222.92ms +[2025-07-17 14:15:39] [Rank 0] step:1961/10000 train_time:437203ms step_avg:222.95ms +[2025-07-17 14:15:39] [Rank 0] step:1961/10000 train_time:437203ms step_avg:222.95ms +[2025-07-17 14:15:43] [Rank 0] step:1981/10000 train_time:441721ms step_avg:222.98ms +[2025-07-17 14:15:43] [Rank 0] step:1981/10000 train_time:441721ms step_avg:222.98ms +[2025-07-17 14:15:52] [Rank 0] PRINT: step:2000/10000 val_loss:4.5965 train_time:447232ms step_avg:223.62ms +[2025-07-17 14:15:52] [Rank 0] PRINT: step:2000/10000 val_loss:4.5965 train_time:447232ms step_avg:223.62ms +[2025-07-17 14:15:52] [Rank 0] step:2001/10000 train_time:447242ms step_avg:223.51ms +[2025-07-17 14:15:52] [Rank 0] step:2001/10000 train_time:447242ms step_avg:223.51ms +[2025-07-17 14:15:57] [Rank 0] step:2021/10000 train_time:450760ms step_avg:223.04ms +[2025-07-17 14:15:57] [Rank 0] step:2021/10000 train_time:450760ms step_avg:223.04ms +[2025-07-17 14:16:01] [Rank 0] step:2041/10000 train_time:455282ms step_avg:223.07ms +[2025-07-17 14:16:01] [Rank 0] step:2041/10000 train_time:455282ms step_avg:223.07ms +[2025-07-17 14:16:06] [Rank 0] step:2061/10000 train_time:459802ms step_avg:223.10ms +[2025-07-17 14:16:06] [Rank 0] step:2061/10000 train_time:459802ms step_avg:223.10ms +[2025-07-17 14:16:11] [Rank 0] step:2081/10000 train_time:464322ms step_avg:223.12ms +[2025-07-17 14:16:11] [Rank 0] step:2081/10000 train_time:464322ms step_avg:223.12ms +[2025-07-17 14:16:15] [Rank 0] step:2101/10000 train_time:468840ms step_avg:223.15ms +[2025-07-17 14:16:15] [Rank 0] step:2101/10000 train_time:468840ms step_avg:223.15ms +[2025-07-17 14:16:20] [Rank 0] step:2121/10000 train_time:473366ms step_avg:223.18ms +[2025-07-17 14:16:20] [Rank 0] step:2121/10000 train_time:473366ms step_avg:223.18ms +[2025-07-17 14:16:25] [Rank 0] PRINT: step:2125/10000 val_loss:4.6174 train_time:475489ms step_avg:223.76ms +[2025-07-17 14:16:25] [Rank 0] PRINT: step:2125/10000 val_loss:4.6174 train_time:475489ms step_avg:223.76ms +[2025-07-17 14:16:29] [Rank 0] step:2141/10000 train_time:477886ms step_avg:223.21ms +[2025-07-17 14:16:29] [Rank 0] step:2141/10000 train_time:477886ms step_avg:223.21ms +[2025-07-17 14:16:33] [Rank 0] step:2161/10000 train_time:482412ms step_avg:223.24ms +[2025-07-17 14:16:33] [Rank 0] step:2161/10000 train_time:482412ms step_avg:223.24ms +[2025-07-17 14:16:38] [Rank 0] step:2181/10000 train_time:486935ms step_avg:223.26ms +[2025-07-17 14:16:38] [Rank 0] step:2181/10000 train_time:486935ms step_avg:223.26ms +[2025-07-17 14:16:42] [Rank 0] step:2201/10000 train_time:491454ms step_avg:223.29ms +[2025-07-17 14:16:42] [Rank 0] step:2201/10000 train_time:491454ms step_avg:223.29ms +[2025-07-17 14:16:47] [Rank 0] step:2221/10000 train_time:495975ms step_avg:223.31ms +[2025-07-17 14:16:47] [Rank 0] step:2221/10000 train_time:495975ms step_avg:223.31ms +[2025-07-17 14:16:51] [Rank 0] step:2241/10000 train_time:500570ms step_avg:223.37ms +[2025-07-17 14:16:51] [Rank 0] step:2241/10000 train_time:500570ms step_avg:223.37ms +[2025-07-17 14:16:58] [Rank 0] PRINT: step:2250/10000 val_loss:4.2113 train_time:503905ms step_avg:223.96ms +[2025-07-17 14:16:58] [Rank 0] PRINT: step:2250/10000 val_loss:4.2113 train_time:503905ms step_avg:223.96ms +[2025-07-17 14:17:01] [Rank 0] step:2261/10000 train_time:505207ms step_avg:223.44ms +[2025-07-17 14:17:01] [Rank 0] step:2261/10000 train_time:505207ms step_avg:223.44ms +[2025-07-17 14:17:05] [Rank 0] step:2281/10000 train_time:509843ms step_avg:223.52ms +[2025-07-17 14:17:05] [Rank 0] step:2281/10000 train_time:509843ms step_avg:223.52ms +[2025-07-17 14:17:10] [Rank 0] step:2301/10000 train_time:514482ms step_avg:223.59ms +[2025-07-17 14:17:10] [Rank 0] step:2301/10000 train_time:514482ms step_avg:223.59ms +[2025-07-17 14:17:14] [Rank 0] step:2321/10000 train_time:519120ms step_avg:223.66ms +[2025-07-17 14:17:14] [Rank 0] step:2321/10000 train_time:519120ms step_avg:223.66ms +[2025-07-17 14:17:19] [Rank 0] step:2341/10000 train_time:523757ms step_avg:223.73ms +[2025-07-17 14:17:19] [Rank 0] step:2341/10000 train_time:523757ms step_avg:223.73ms +[2025-07-17 14:17:24] [Rank 0] step:2361/10000 train_time:528391ms step_avg:223.80ms +[2025-07-17 14:17:24] [Rank 0] step:2361/10000 train_time:528391ms step_avg:223.80ms +[2025-07-17 14:17:31] [Rank 0] PRINT: step:2375/10000 val_loss:4.2269 train_time:532881ms step_avg:224.37ms +[2025-07-17 14:17:31] [Rank 0] PRINT: step:2375/10000 val_loss:4.2269 train_time:532881ms step_avg:224.37ms +[2025-07-17 14:17:33] [Rank 0] step:2381/10000 train_time:533026ms step_avg:223.87ms +[2025-07-17 14:17:33] [Rank 0] step:2381/10000 train_time:533026ms step_avg:223.87ms +[2025-07-17 14:17:38] [Rank 0] step:2401/10000 train_time:537653ms step_avg:223.93ms +[2025-07-17 14:17:38] [Rank 0] step:2401/10000 train_time:537653ms step_avg:223.93ms +[2025-07-17 14:17:42] [Rank 0] step:2421/10000 train_time:542287ms step_avg:223.99ms +[2025-07-17 14:17:42] [Rank 0] step:2421/10000 train_time:542287ms step_avg:223.99ms +[2025-07-17 14:17:47] [Rank 0] step:2441/10000 train_time:546920ms step_avg:224.06ms +[2025-07-17 14:17:47] [Rank 0] step:2441/10000 train_time:546920ms step_avg:224.06ms +[2025-07-17 14:17:51] [Rank 0] step:2461/10000 train_time:551553ms step_avg:224.12ms +[2025-07-17 14:17:51] [Rank 0] step:2461/10000 train_time:551553ms step_avg:224.12ms +[2025-07-17 14:17:56] [Rank 0] step:2481/10000 train_time:556184ms step_avg:224.18ms +[2025-07-17 14:17:56] [Rank 0] step:2481/10000 train_time:556184ms step_avg:224.18ms +[2025-07-17 14:18:05] [Rank 0] PRINT: step:2500/10000 val_loss:4.1654 train_time:561825ms step_avg:224.73ms +[2025-07-17 14:18:05] [Rank 0] PRINT: step:2500/10000 val_loss:4.1654 train_time:561825ms step_avg:224.73ms +[2025-07-17 14:18:05] [Rank 0] step:2501/10000 train_time:561834ms step_avg:224.64ms +[2025-07-17 14:18:05] [Rank 0] step:2501/10000 train_time:561834ms step_avg:224.64ms +[2025-07-17 14:18:10] [Rank 0] step:2521/10000 train_time:565440ms step_avg:224.29ms +[2025-07-17 14:18:10] [Rank 0] step:2521/10000 train_time:565440ms step_avg:224.29ms +[2025-07-17 14:18:14] [Rank 0] step:2541/10000 train_time:570065ms step_avg:224.35ms +[2025-07-17 14:18:14] [Rank 0] step:2541/10000 train_time:570065ms step_avg:224.35ms +[2025-07-17 14:18:19] [Rank 0] step:2561/10000 train_time:574695ms step_avg:224.40ms +[2025-07-17 14:18:19] [Rank 0] step:2561/10000 train_time:574695ms step_avg:224.40ms +[2025-07-17 14:18:24] [Rank 0] step:2581/10000 train_time:579320ms step_avg:224.46ms +[2025-07-17 14:18:24] [Rank 0] step:2581/10000 train_time:579320ms step_avg:224.46ms +[2025-07-17 14:18:28] [Rank 0] step:2601/10000 train_time:583949ms step_avg:224.51ms +[2025-07-17 14:18:28] [Rank 0] step:2601/10000 train_time:583949ms step_avg:224.51ms +[2025-07-17 14:18:33] [Rank 0] step:2621/10000 train_time:588582ms step_avg:224.56ms +[2025-07-17 14:18:33] [Rank 0] step:2621/10000 train_time:588582ms step_avg:224.56ms +[2025-07-17 14:18:38] [Rank 0] PRINT: step:2625/10000 val_loss:4.1413 train_time:590757ms step_avg:225.05ms +[2025-07-17 14:18:38] [Rank 0] PRINT: step:2625/10000 val_loss:4.1413 train_time:590757ms step_avg:225.05ms +[2025-07-17 14:18:42] [Rank 0] step:2641/10000 train_time:593217ms step_avg:224.62ms +[2025-07-17 14:18:42] [Rank 0] step:2641/10000 train_time:593217ms step_avg:224.62ms +[2025-07-17 14:18:46] [Rank 0] step:2661/10000 train_time:597854ms step_avg:224.67ms +[2025-07-17 14:18:46] [Rank 0] step:2661/10000 train_time:597854ms step_avg:224.67ms +[2025-07-17 14:18:51] [Rank 0] step:2681/10000 train_time:602490ms step_avg:224.73ms +[2025-07-17 14:18:51] [Rank 0] step:2681/10000 train_time:602490ms step_avg:224.73ms +[2025-07-17 14:18:56] [Rank 0] step:2701/10000 train_time:607126ms step_avg:224.78ms +[2025-07-17 14:18:56] [Rank 0] step:2701/10000 train_time:607126ms step_avg:224.78ms +[2025-07-17 14:19:00] [Rank 0] step:2721/10000 train_time:611762ms step_avg:224.83ms +[2025-07-17 14:19:00] [Rank 0] step:2721/10000 train_time:611762ms step_avg:224.83ms +[2025-07-17 14:19:05] [Rank 0] step:2741/10000 train_time:616401ms step_avg:224.88ms +[2025-07-17 14:19:05] [Rank 0] step:2741/10000 train_time:616401ms step_avg:224.88ms +[2025-07-17 14:19:12] [Rank 0] PRINT: step:2750/10000 val_loss:4.1001 train_time:619740ms step_avg:225.36ms +[2025-07-17 14:19:12] [Rank 0] PRINT: step:2750/10000 val_loss:4.1001 train_time:619740ms step_avg:225.36ms +[2025-07-17 14:19:14] [Rank 0] step:2761/10000 train_time:621043ms step_avg:224.93ms +[2025-07-17 14:19:14] [Rank 0] step:2761/10000 train_time:621043ms step_avg:224.93ms +[2025-07-17 14:19:19] [Rank 0] step:2781/10000 train_time:625675ms step_avg:224.98ms +[2025-07-17 14:19:19] [Rank 0] step:2781/10000 train_time:625675ms step_avg:224.98ms +[2025-07-17 14:19:23] [Rank 0] step:2801/10000 train_time:630313ms step_avg:225.03ms +[2025-07-17 14:19:23] [Rank 0] step:2801/10000 train_time:630313ms step_avg:225.03ms +[2025-07-17 14:19:28] [Rank 0] step:2821/10000 train_time:634948ms step_avg:225.08ms +[2025-07-17 14:19:28] [Rank 0] step:2821/10000 train_time:634948ms step_avg:225.08ms +[2025-07-17 14:19:33] [Rank 0] step:2841/10000 train_time:639585ms step_avg:225.13ms +[2025-07-17 14:19:33] [Rank 0] step:2841/10000 train_time:639585ms step_avg:225.13ms +[2025-07-17 14:19:37] [Rank 0] step:2861/10000 train_time:644221ms step_avg:225.17ms +[2025-07-17 14:19:37] [Rank 0] step:2861/10000 train_time:644221ms step_avg:225.17ms +[2025-07-17 14:19:45] [Rank 0] PRINT: step:2875/10000 val_loss:4.1395 train_time:648717ms step_avg:225.64ms +[2025-07-17 14:19:45] [Rank 0] PRINT: step:2875/10000 val_loss:4.1395 train_time:648717ms step_avg:225.64ms +[2025-07-17 14:19:47] [Rank 0] step:2881/10000 train_time:648862ms step_avg:225.22ms +[2025-07-17 14:19:47] [Rank 0] step:2881/10000 train_time:648862ms step_avg:225.22ms +[2025-07-17 14:19:51] [Rank 0] step:2901/10000 train_time:653495ms step_avg:225.27ms +[2025-07-17 14:19:51] [Rank 0] step:2901/10000 train_time:653495ms step_avg:225.27ms +[2025-07-17 14:19:56] [Rank 0] step:2921/10000 train_time:658127ms step_avg:225.31ms +[2025-07-17 14:19:56] [Rank 0] step:2921/10000 train_time:658127ms step_avg:225.31ms +[2025-07-17 14:20:00] [Rank 0] step:2941/10000 train_time:662756ms step_avg:225.35ms +[2025-07-17 14:20:00] [Rank 0] step:2941/10000 train_time:662756ms step_avg:225.35ms +[2025-07-17 14:20:05] [Rank 0] step:2961/10000 train_time:667389ms step_avg:225.39ms +[2025-07-17 14:20:05] [Rank 0] step:2961/10000 train_time:667389ms step_avg:225.39ms +[2025-07-17 14:20:10] [Rank 0] step:2981/10000 train_time:672035ms step_avg:225.44ms +[2025-07-17 14:20:10] [Rank 0] step:2981/10000 train_time:672035ms step_avg:225.44ms +[2025-07-17 14:20:19] [Rank 0] PRINT: step:3000/10000 val_loss:4.1107 train_time:677706ms step_avg:225.90ms +[2025-07-17 14:20:19] [Rank 0] PRINT: step:3000/10000 val_loss:4.1107 train_time:677706ms step_avg:225.90ms +[2025-07-17 14:20:19] [Rank 0] step:3001/10000 train_time:677716ms step_avg:225.83ms +[2025-07-17 14:20:19] [Rank 0] step:3001/10000 train_time:677716ms step_avg:225.83ms +[2025-07-17 14:20:24] [Rank 0] step:3021/10000 train_time:681337ms step_avg:225.53ms +[2025-07-17 14:20:24] [Rank 0] step:3021/10000 train_time:681337ms step_avg:225.53ms +[2025-07-17 14:20:28] [Rank 0] step:3041/10000 train_time:685988ms step_avg:225.58ms +[2025-07-17 14:20:28] [Rank 0] step:3041/10000 train_time:685988ms step_avg:225.58ms +[2025-07-17 14:20:33] [Rank 0] step:3061/10000 train_time:690640ms step_avg:225.63ms +[2025-07-17 14:20:33] [Rank 0] step:3061/10000 train_time:690640ms step_avg:225.63ms +[2025-07-17 14:20:38] [Rank 0] step:3081/10000 train_time:695291ms step_avg:225.67ms +[2025-07-17 14:20:38] [Rank 0] step:3081/10000 train_time:695291ms step_avg:225.67ms +[2025-07-17 14:20:42] [Rank 0] step:3101/10000 train_time:699942ms step_avg:225.72ms +[2025-07-17 14:20:42] [Rank 0] step:3101/10000 train_time:699942ms step_avg:225.72ms +[2025-07-17 14:20:47] [Rank 0] step:3121/10000 train_time:704598ms step_avg:225.76ms +[2025-07-17 14:20:47] [Rank 0] step:3121/10000 train_time:704598ms step_avg:225.76ms +[2025-07-17 14:20:52] [Rank 0] PRINT: step:3125/10000 val_loss:4.1288 train_time:706785ms step_avg:226.17ms +[2025-07-17 14:20:52] [Rank 0] PRINT: step:3125/10000 val_loss:4.1288 train_time:706785ms step_avg:226.17ms +[2025-07-17 14:20:56] [Rank 0] step:3141/10000 train_time:709254ms step_avg:225.81ms +[2025-07-17 14:20:56] [Rank 0] step:3141/10000 train_time:709254ms step_avg:225.81ms +[2025-07-17 14:21:01] [Rank 0] step:3161/10000 train_time:713909ms step_avg:225.85ms +[2025-07-17 14:21:01] [Rank 0] step:3161/10000 train_time:713909ms step_avg:225.85ms +[2025-07-17 14:21:05] [Rank 0] step:3181/10000 train_time:718564ms step_avg:225.89ms +[2025-07-17 14:21:05] [Rank 0] step:3181/10000 train_time:718564ms step_avg:225.89ms +[2025-07-17 14:21:10] [Rank 0] step:3201/10000 train_time:723221ms step_avg:225.94ms +[2025-07-17 14:21:10] [Rank 0] step:3201/10000 train_time:723221ms step_avg:225.94ms +[2025-07-17 14:21:15] [Rank 0] step:3221/10000 train_time:727877ms step_avg:225.98ms +[2025-07-17 14:21:15] [Rank 0] step:3221/10000 train_time:727877ms step_avg:225.98ms +[2025-07-17 14:21:19] [Rank 0] step:3241/10000 train_time:732534ms step_avg:226.02ms +[2025-07-17 14:21:19] [Rank 0] step:3241/10000 train_time:732534ms step_avg:226.02ms +[2025-07-17 14:21:26] [Rank 0] PRINT: step:3250/10000 val_loss:4.1425 train_time:735884ms step_avg:226.43ms +[2025-07-17 14:21:26] [Rank 0] PRINT: step:3250/10000 val_loss:4.1425 train_time:735884ms step_avg:226.43ms +[2025-07-17 14:21:29] [Rank 0] step:3261/10000 train_time:737190ms step_avg:226.06ms +[2025-07-17 14:21:29] [Rank 0] step:3261/10000 train_time:737190ms step_avg:226.06ms +[2025-07-17 14:21:33] [Rank 0] step:3281/10000 train_time:741844ms step_avg:226.10ms +[2025-07-17 14:21:33] [Rank 0] step:3281/10000 train_time:741844ms step_avg:226.10ms +[2025-07-17 14:21:38] [Rank 0] step:3301/10000 train_time:746500ms step_avg:226.14ms +[2025-07-17 14:21:38] [Rank 0] step:3301/10000 train_time:746500ms step_avg:226.14ms +[2025-07-17 14:21:43] [Rank 0] step:3321/10000 train_time:751158ms step_avg:226.18ms +[2025-07-17 14:21:43] [Rank 0] step:3321/10000 train_time:751158ms step_avg:226.18ms +[2025-07-17 14:21:47] [Rank 0] step:3341/10000 train_time:755812ms step_avg:226.22ms +[2025-07-17 14:21:47] [Rank 0] step:3341/10000 train_time:755812ms step_avg:226.22ms +[2025-07-17 14:21:52] [Rank 0] step:3361/10000 train_time:760468ms step_avg:226.26ms +[2025-07-17 14:21:52] [Rank 0] step:3361/10000 train_time:760468ms step_avg:226.26ms +[2025-07-17 14:22:00] [Rank 0] PRINT: step:3375/10000 val_loss:4.1217 train_time:764979ms step_avg:226.66ms +[2025-07-17 14:22:00] [Rank 0] PRINT: step:3375/10000 val_loss:4.1217 train_time:764979ms step_avg:226.66ms +[2025-07-17 14:22:01] [Rank 0] step:3381/10000 train_time:765126ms step_avg:226.30ms +[2025-07-17 14:22:01] [Rank 0] step:3381/10000 train_time:765126ms step_avg:226.30ms +[2025-07-17 14:22:06] [Rank 0] step:3401/10000 train_time:769779ms step_avg:226.34ms +[2025-07-17 14:22:06] [Rank 0] step:3401/10000 train_time:769779ms step_avg:226.34ms +[2025-07-17 14:22:10] [Rank 0] step:3421/10000 train_time:774435ms step_avg:226.38ms +[2025-07-17 14:22:10] [Rank 0] step:3421/10000 train_time:774435ms step_avg:226.38ms +[2025-07-17 14:22:15] [Rank 0] step:3441/10000 train_time:779090ms step_avg:226.41ms +[2025-07-17 14:22:15] [Rank 0] step:3441/10000 train_time:779090ms step_avg:226.41ms +[2025-07-17 14:22:20] [Rank 0] step:3461/10000 train_time:783745ms step_avg:226.45ms +[2025-07-17 14:22:20] [Rank 0] step:3461/10000 train_time:783745ms step_avg:226.45ms +[2025-07-17 14:22:24] [Rank 0] step:3481/10000 train_time:788402ms step_avg:226.49ms +[2025-07-17 14:22:24] [Rank 0] step:3481/10000 train_time:788402ms step_avg:226.49ms +[2025-07-17 14:22:33] [Rank 0] PRINT: step:3500/10000 val_loss:4.1919 train_time:794074ms step_avg:226.88ms +[2025-07-17 14:22:33] [Rank 0] PRINT: step:3500/10000 val_loss:4.1919 train_time:794074ms step_avg:226.88ms +[2025-07-17 14:22:34] [Rank 0] step:3501/10000 train_time:794083ms step_avg:226.82ms +[2025-07-17 14:22:34] [Rank 0] step:3501/10000 train_time:794083ms step_avg:226.82ms +[2025-07-17 14:22:38] [Rank 0] step:3521/10000 train_time:797712ms step_avg:226.56ms +[2025-07-17 14:22:38] [Rank 0] step:3521/10000 train_time:797712ms step_avg:226.56ms +[2025-07-17 14:22:43] [Rank 0] step:3541/10000 train_time:802368ms step_avg:226.59ms +[2025-07-17 14:22:43] [Rank 0] step:3541/10000 train_time:802368ms step_avg:226.59ms +[2025-07-17 14:22:48] [Rank 0] step:3561/10000 train_time:807026ms step_avg:226.63ms +[2025-07-17 14:22:48] [Rank 0] step:3561/10000 train_time:807026ms step_avg:226.63ms +[2025-07-17 14:22:52] [Rank 0] step:3581/10000 train_time:811686ms step_avg:226.66ms +[2025-07-17 14:22:52] [Rank 0] step:3581/10000 train_time:811686ms step_avg:226.66ms +[2025-07-17 14:22:57] [Rank 0] step:3601/10000 train_time:816346ms step_avg:226.70ms +[2025-07-17 14:22:57] [Rank 0] step:3601/10000 train_time:816346ms step_avg:226.70ms +[2025-07-17 14:23:02] [Rank 0] step:3621/10000 train_time:821005ms step_avg:226.73ms +[2025-07-17 14:23:02] [Rank 0] step:3621/10000 train_time:821005ms step_avg:226.73ms +[2025-07-17 14:23:07] [Rank 0] PRINT: step:3625/10000 val_loss:4.1694 train_time:823193ms step_avg:227.09ms +[2025-07-17 14:23:07] [Rank 0] PRINT: step:3625/10000 val_loss:4.1694 train_time:823193ms step_avg:227.09ms +[2025-07-17 14:23:10] [Rank 0] step:3641/10000 train_time:825661ms step_avg:226.77ms +[2025-07-17 14:23:10] [Rank 0] step:3641/10000 train_time:825661ms step_avg:226.77ms +[2025-07-17 14:23:15] [Rank 0] step:3661/10000 train_time:830325ms step_avg:226.80ms +[2025-07-17 14:23:15] [Rank 0] step:3661/10000 train_time:830325ms step_avg:226.80ms +[2025-07-17 14:23:20] [Rank 0] step:3681/10000 train_time:834991ms step_avg:226.84ms +[2025-07-17 14:23:20] [Rank 0] step:3681/10000 train_time:834991ms step_avg:226.84ms +[2025-07-17 14:23:24] [Rank 0] step:3701/10000 train_time:839658ms step_avg:226.87ms +[2025-07-17 14:23:24] [Rank 0] step:3701/10000 train_time:839658ms step_avg:226.87ms +[2025-07-17 14:23:29] [Rank 0] step:3721/10000 train_time:844372ms step_avg:226.92ms +[2025-07-17 14:23:29] [Rank 0] step:3721/10000 train_time:844372ms step_avg:226.92ms +[2025-07-17 14:23:34] [Rank 0] step:3741/10000 train_time:849126ms step_avg:226.98ms +[2025-07-17 14:23:34] [Rank 0] step:3741/10000 train_time:849126ms step_avg:226.98ms +[2025-07-17 14:23:41] [Rank 0] PRINT: step:3750/10000 val_loss:3.9725 train_time:852541ms step_avg:227.34ms +[2025-07-17 14:23:41] [Rank 0] PRINT: step:3750/10000 val_loss:3.9725 train_time:852541ms step_avg:227.34ms +[2025-07-17 14:23:43] [Rank 0] step:3761/10000 train_time:853870ms step_avg:227.03ms +[2025-07-17 14:23:43] [Rank 0] step:3761/10000 train_time:853870ms step_avg:227.03ms +[2025-07-17 14:23:48] [Rank 0] step:3781/10000 train_time:858620ms step_avg:227.09ms +[2025-07-17 14:23:48] [Rank 0] step:3781/10000 train_time:858620ms step_avg:227.09ms +[2025-07-17 14:23:53] [Rank 0] step:3801/10000 train_time:863368ms step_avg:227.14ms +[2025-07-17 14:23:53] [Rank 0] step:3801/10000 train_time:863368ms step_avg:227.14ms +[2025-07-17 14:23:58] [Rank 0] step:3821/10000 train_time:868116ms step_avg:227.20ms +[2025-07-17 14:23:58] [Rank 0] step:3821/10000 train_time:868116ms step_avg:227.20ms +[2025-07-17 14:24:02] [Rank 0] step:3841/10000 train_time:872863ms step_avg:227.25ms +[2025-07-17 14:24:02] [Rank 0] step:3841/10000 train_time:872863ms step_avg:227.25ms +[2025-07-17 14:24:07] [Rank 0] step:3861/10000 train_time:877610ms step_avg:227.30ms +[2025-07-17 14:24:07] [Rank 0] step:3861/10000 train_time:877610ms step_avg:227.30ms +[2025-07-17 14:24:15] [Rank 0] PRINT: step:3875/10000 val_loss:4.1499 train_time:882211ms step_avg:227.67ms +[2025-07-17 14:24:15] [Rank 0] PRINT: step:3875/10000 val_loss:4.1499 train_time:882211ms step_avg:227.67ms +[2025-07-17 14:24:16] [Rank 0] step:3881/10000 train_time:882358ms step_avg:227.35ms +[2025-07-17 14:24:16] [Rank 0] step:3881/10000 train_time:882358ms step_avg:227.35ms +[2025-07-17 14:24:21] [Rank 0] step:3901/10000 train_time:887101ms step_avg:227.40ms +[2025-07-17 14:24:21] [Rank 0] step:3901/10000 train_time:887101ms step_avg:227.40ms +[2025-07-17 14:24:26] [Rank 0] step:3921/10000 train_time:891845ms step_avg:227.45ms +[2025-07-17 14:24:26] [Rank 0] step:3921/10000 train_time:891845ms step_avg:227.45ms +[2025-07-17 14:24:31] [Rank 0] step:3941/10000 train_time:896592ms step_avg:227.50ms +[2025-07-17 14:24:31] [Rank 0] step:3941/10000 train_time:896592ms step_avg:227.50ms +[2025-07-17 14:24:35] [Rank 0] step:3961/10000 train_time:901338ms step_avg:227.55ms +[2025-07-17 14:24:35] [Rank 0] step:3961/10000 train_time:901338ms step_avg:227.55ms +[2025-07-17 14:24:40] [Rank 0] step:3981/10000 train_time:906086ms step_avg:227.60ms +[2025-07-17 14:24:40] [Rank 0] step:3981/10000 train_time:906086ms step_avg:227.60ms +[2025-07-17 14:24:49] [Rank 0] PRINT: step:4000/10000 val_loss:4.1610 train_time:911867ms step_avg:227.97ms +[2025-07-17 14:24:49] [Rank 0] PRINT: step:4000/10000 val_loss:4.1610 train_time:911867ms step_avg:227.97ms +[2025-07-17 14:24:50] [Rank 0] step:4001/10000 train_time:911876ms step_avg:227.91ms +[2025-07-17 14:24:50] [Rank 0] step:4001/10000 train_time:911876ms step_avg:227.91ms +[2025-07-17 14:24:54] [Rank 0] step:4021/10000 train_time:915574ms step_avg:227.70ms +[2025-07-17 14:24:54] [Rank 0] step:4021/10000 train_time:915574ms step_avg:227.70ms +[2025-07-17 14:24:59] [Rank 0] step:4041/10000 train_time:920317ms step_avg:227.74ms +[2025-07-17 14:24:59] [Rank 0] step:4041/10000 train_time:920317ms step_avg:227.74ms +[2025-07-17 14:25:04] [Rank 0] step:4061/10000 train_time:925062ms step_avg:227.79ms +[2025-07-17 14:25:04] [Rank 0] step:4061/10000 train_time:925062ms step_avg:227.79ms +[2025-07-17 14:25:09] [Rank 0] step:4081/10000 train_time:929807ms step_avg:227.84ms +[2025-07-17 14:25:09] [Rank 0] step:4081/10000 train_time:929807ms step_avg:227.84ms +[2025-07-17 14:25:13] [Rank 0] step:4101/10000 train_time:934552ms step_avg:227.88ms +[2025-07-17 14:25:13] [Rank 0] step:4101/10000 train_time:934552ms step_avg:227.88ms +[2025-07-17 14:25:18] [Rank 0] step:4121/10000 train_time:939300ms step_avg:227.93ms +[2025-07-17 14:25:18] [Rank 0] step:4121/10000 train_time:939300ms step_avg:227.93ms +[2025-07-17 14:25:23] [Rank 0] PRINT: step:4125/10000 val_loss:4.2147 train_time:941528ms step_avg:228.25ms +[2025-07-17 14:25:23] [Rank 0] PRINT: step:4125/10000 val_loss:4.2147 train_time:941528ms step_avg:228.25ms +[2025-07-17 14:25:27] [Rank 0] step:4141/10000 train_time:944041ms step_avg:227.97ms +[2025-07-17 14:25:27] [Rank 0] step:4141/10000 train_time:944041ms step_avg:227.97ms +[2025-07-17 14:25:32] [Rank 0] step:4161/10000 train_time:948785ms step_avg:228.02ms +[2025-07-17 14:25:32] [Rank 0] step:4161/10000 train_time:948785ms step_avg:228.02ms +[2025-07-17 14:25:37] [Rank 0] step:4181/10000 train_time:953529ms step_avg:228.06ms +[2025-07-17 14:25:37] [Rank 0] step:4181/10000 train_time:953529ms step_avg:228.06ms +[2025-07-17 14:25:41] [Rank 0] step:4201/10000 train_time:958269ms step_avg:228.10ms +[2025-07-17 14:25:41] [Rank 0] step:4201/10000 train_time:958269ms step_avg:228.10ms +[2025-07-17 14:25:46] [Rank 0] step:4221/10000 train_time:963016ms step_avg:228.15ms +[2025-07-17 14:25:46] [Rank 0] step:4221/10000 train_time:963016ms step_avg:228.15ms +[2025-07-17 14:25:51] [Rank 0] step:4241/10000 train_time:967761ms step_avg:228.19ms +[2025-07-17 14:25:51] [Rank 0] step:4241/10000 train_time:967761ms step_avg:228.19ms +[2025-07-17 14:25:58] [Rank 0] PRINT: step:4250/10000 val_loss:4.2032 train_time:971173ms step_avg:228.51ms +[2025-07-17 14:25:58] [Rank 0] PRINT: step:4250/10000 val_loss:4.2032 train_time:971173ms step_avg:228.51ms +[2025-07-17 14:26:00] [Rank 0] step:4261/10000 train_time:972502ms step_avg:228.23ms +[2025-07-17 14:26:00] [Rank 0] step:4261/10000 train_time:972502ms step_avg:228.23ms +[2025-07-17 14:26:05] [Rank 0] step:4281/10000 train_time:977241ms step_avg:228.27ms +[2025-07-17 14:26:05] [Rank 0] step:4281/10000 train_time:977241ms step_avg:228.27ms +[2025-07-17 14:26:10] [Rank 0] step:4301/10000 train_time:981978ms step_avg:228.31ms +[2025-07-17 14:26:10] [Rank 0] step:4301/10000 train_time:981978ms step_avg:228.31ms +[2025-07-17 14:26:14] [Rank 0] step:4321/10000 train_time:986719ms step_avg:228.35ms +[2025-07-17 14:26:14] [Rank 0] step:4321/10000 train_time:986719ms step_avg:228.35ms +[2025-07-17 14:26:19] [Rank 0] step:4341/10000 train_time:991459ms step_avg:228.39ms +[2025-07-17 14:26:19] [Rank 0] step:4341/10000 train_time:991459ms step_avg:228.39ms +[2025-07-17 14:26:24] [Rank 0] step:4361/10000 train_time:996198ms step_avg:228.43ms +[2025-07-17 14:26:24] [Rank 0] step:4361/10000 train_time:996198ms step_avg:228.43ms +[2025-07-17 14:26:32] [Rank 0] PRINT: step:4375/10000 val_loss:4.1596 train_time:1000783ms step_avg:228.75ms +[2025-07-17 14:26:32] [Rank 0] PRINT: step:4375/10000 val_loss:4.1596 train_time:1000783ms step_avg:228.75ms +[2025-07-17 14:26:33] [Rank 0] step:4381/10000 train_time:1000930ms step_avg:228.47ms +[2025-07-17 14:26:33] [Rank 0] step:4381/10000 train_time:1000930ms step_avg:228.47ms +[2025-07-17 14:26:38] [Rank 0] step:4401/10000 train_time:1005665ms step_avg:228.51ms +[2025-07-17 14:26:38] [Rank 0] step:4401/10000 train_time:1005665ms step_avg:228.51ms +[2025-07-17 14:26:43] [Rank 0] step:4421/10000 train_time:1010396ms step_avg:228.54ms +[2025-07-17 14:26:43] [Rank 0] step:4421/10000 train_time:1010396ms step_avg:228.54ms +[2025-07-17 14:26:47] [Rank 0] step:4441/10000 train_time:1015127ms step_avg:228.58ms +[2025-07-17 14:26:47] [Rank 0] step:4441/10000 train_time:1015127ms step_avg:228.58ms +[2025-07-17 14:26:52] [Rank 0] step:4461/10000 train_time:1019871ms step_avg:228.62ms +[2025-07-17 14:26:52] [Rank 0] step:4461/10000 train_time:1019871ms step_avg:228.62ms +[2025-07-17 14:26:57] [Rank 0] step:4481/10000 train_time:1024628ms step_avg:228.66ms +[2025-07-17 14:26:57] [Rank 0] step:4481/10000 train_time:1024628ms step_avg:228.66ms +[2025-07-17 14:27:06] [Rank 0] PRINT: step:4500/10000 val_loss:4.2209 train_time:1030424ms step_avg:228.98ms +[2025-07-17 14:27:06] [Rank 0] PRINT: step:4500/10000 val_loss:4.2209 train_time:1030424ms step_avg:228.98ms +[2025-07-17 14:27:06] [Rank 0] step:4501/10000 train_time:1030433ms step_avg:228.93ms +[2025-07-17 14:27:06] [Rank 0] step:4501/10000 train_time:1030433ms step_avg:228.93ms +[2025-07-17 14:27:11] [Rank 0] step:4521/10000 train_time:1034141ms step_avg:228.74ms +[2025-07-17 14:27:11] [Rank 0] step:4521/10000 train_time:1034141ms step_avg:228.74ms +[2025-07-17 14:27:16] [Rank 0] step:4541/10000 train_time:1038899ms step_avg:228.78ms +[2025-07-17 14:27:16] [Rank 0] step:4541/10000 train_time:1038899ms step_avg:228.78ms +[2025-07-17 14:27:21] [Rank 0] step:4561/10000 train_time:1043650ms step_avg:228.82ms +[2025-07-17 14:27:21] [Rank 0] step:4561/10000 train_time:1043650ms step_avg:228.82ms +[2025-07-17 14:27:25] [Rank 0] step:4581/10000 train_time:1048405ms step_avg:228.86ms +[2025-07-17 14:27:25] [Rank 0] step:4581/10000 train_time:1048405ms step_avg:228.86ms +[2025-07-17 14:27:30] [Rank 0] step:4601/10000 train_time:1053161ms step_avg:228.90ms +[2025-07-17 14:27:30] [Rank 0] step:4601/10000 train_time:1053161ms step_avg:228.90ms +[2025-07-17 14:27:35] [Rank 0] step:4621/10000 train_time:1057919ms step_avg:228.94ms +[2025-07-17 14:27:35] [Rank 0] step:4621/10000 train_time:1057919ms step_avg:228.94ms +[2025-07-17 14:27:40] [Rank 0] PRINT: step:4625/10000 val_loss:4.2304 train_time:1060152ms step_avg:229.22ms +[2025-07-17 14:27:40] [Rank 0] PRINT: step:4625/10000 val_loss:4.2304 train_time:1060152ms step_avg:229.22ms +[2025-07-17 14:27:44] [Rank 0] step:4641/10000 train_time:1062675ms step_avg:228.98ms +[2025-07-17 14:27:44] [Rank 0] step:4641/10000 train_time:1062675ms step_avg:228.98ms +[2025-07-17 14:27:49] [Rank 0] step:4661/10000 train_time:1067432ms step_avg:229.01ms +[2025-07-17 14:27:49] [Rank 0] step:4661/10000 train_time:1067432ms step_avg:229.01ms +[2025-07-17 14:27:54] [Rank 0] step:4681/10000 train_time:1072193ms step_avg:229.05ms +[2025-07-17 14:27:54] [Rank 0] step:4681/10000 train_time:1072193ms step_avg:229.05ms +[2025-07-17 14:27:58] [Rank 0] step:4701/10000 train_time:1076956ms step_avg:229.09ms +[2025-07-17 14:27:58] [Rank 0] step:4701/10000 train_time:1076956ms step_avg:229.09ms +[2025-07-17 14:28:03] [Rank 0] step:4721/10000 train_time:1081708ms step_avg:229.13ms +[2025-07-17 14:28:03] [Rank 0] step:4721/10000 train_time:1081708ms step_avg:229.13ms +[2025-07-17 14:28:08] [Rank 0] step:4741/10000 train_time:1086467ms step_avg:229.16ms +[2025-07-17 14:28:08] [Rank 0] step:4741/10000 train_time:1086467ms step_avg:229.16ms +[2025-07-17 14:28:15] [Rank 0] PRINT: step:4750/10000 val_loss:4.1792 train_time:1089891ms step_avg:229.45ms +[2025-07-17 14:28:15] [Rank 0] PRINT: step:4750/10000 val_loss:4.1792 train_time:1089891ms step_avg:229.45ms +[2025-07-17 14:28:17] [Rank 0] step:4761/10000 train_time:1091223ms step_avg:229.20ms +[2025-07-17 14:28:17] [Rank 0] step:4761/10000 train_time:1091223ms step_avg:229.20ms +[2025-07-17 14:28:22] [Rank 0] step:4781/10000 train_time:1096073ms step_avg:229.26ms +[2025-07-17 14:28:22] [Rank 0] step:4781/10000 train_time:1096073ms step_avg:229.26ms +[2025-07-17 14:28:27] [Rank 0] step:4801/10000 train_time:1100820ms step_avg:229.29ms +[2025-07-17 14:28:27] [Rank 0] step:4801/10000 train_time:1100820ms step_avg:229.29ms +[2025-07-17 14:28:32] [Rank 0] step:4821/10000 train_time:1105573ms step_avg:229.32ms +[2025-07-17 14:28:32] [Rank 0] step:4821/10000 train_time:1105573ms step_avg:229.32ms +[2025-07-17 14:28:36] [Rank 0] step:4841/10000 train_time:1110329ms step_avg:229.36ms +[2025-07-17 14:28:36] [Rank 0] step:4841/10000 train_time:1110329ms step_avg:229.36ms +[2025-07-17 14:28:41] [Rank 0] step:4861/10000 train_time:1115081ms step_avg:229.39ms +[2025-07-17 14:28:41] [Rank 0] step:4861/10000 train_time:1115081ms step_avg:229.39ms +[2025-07-17 14:28:49] [Rank 0] PRINT: step:4875/10000 val_loss:4.1737 train_time:1119687ms step_avg:229.68ms +[2025-07-17 14:28:49] [Rank 0] PRINT: step:4875/10000 val_loss:4.1737 train_time:1119687ms step_avg:229.68ms +[2025-07-17 14:28:51] [Rank 0] step:4881/10000 train_time:1119834ms step_avg:229.43ms +[2025-07-17 14:28:51] [Rank 0] step:4881/10000 train_time:1119834ms step_avg:229.43ms +[2025-07-17 14:28:55] [Rank 0] step:4901/10000 train_time:1124585ms step_avg:229.46ms +[2025-07-17 14:28:55] [Rank 0] step:4901/10000 train_time:1124585ms step_avg:229.46ms +[2025-07-17 14:29:00] [Rank 0] step:4921/10000 train_time:1129334ms step_avg:229.49ms +[2025-07-17 14:29:00] [Rank 0] step:4921/10000 train_time:1129334ms step_avg:229.49ms +[2025-07-17 14:29:05] [Rank 0] step:4941/10000 train_time:1134088ms step_avg:229.53ms +[2025-07-17 14:29:05] [Rank 0] step:4941/10000 train_time:1134088ms step_avg:229.53ms +[2025-07-17 14:29:10] [Rank 0] step:4961/10000 train_time:1138842ms step_avg:229.56ms +[2025-07-17 14:29:10] [Rank 0] step:4961/10000 train_time:1138842ms step_avg:229.56ms +[2025-07-17 14:29:14] [Rank 0] step:4981/10000 train_time:1143592ms step_avg:229.59ms +[2025-07-17 14:29:14] [Rank 0] step:4981/10000 train_time:1143592ms step_avg:229.59ms +[2025-07-17 14:29:24] [Rank 0] PRINT: step:5000/10000 val_loss:4.2200 train_time:1149384ms step_avg:229.88ms +[2025-07-17 14:29:24] [Rank 0] PRINT: step:5000/10000 val_loss:4.2200 train_time:1149384ms step_avg:229.88ms +[2025-07-17 14:29:24] [Rank 0] step:5001/10000 train_time:1149393ms step_avg:229.83ms +[2025-07-17 14:29:24] [Rank 0] step:5001/10000 train_time:1149393ms step_avg:229.83ms +[2025-07-17 14:29:29] [Rank 0] step:5021/10000 train_time:1153099ms step_avg:229.66ms +[2025-07-17 14:29:29] [Rank 0] step:5021/10000 train_time:1153099ms step_avg:229.66ms +[2025-07-17 14:29:33] [Rank 0] step:5041/10000 train_time:1157858ms step_avg:229.69ms +[2025-07-17 14:29:33] [Rank 0] step:5041/10000 train_time:1157858ms step_avg:229.69ms +[2025-07-17 14:29:38] [Rank 0] step:5061/10000 train_time:1162617ms step_avg:229.72ms +[2025-07-17 14:29:38] [Rank 0] step:5061/10000 train_time:1162617ms step_avg:229.72ms +[2025-07-17 14:29:43] [Rank 0] step:5081/10000 train_time:1167374ms step_avg:229.75ms +[2025-07-17 14:29:43] [Rank 0] step:5081/10000 train_time:1167374ms step_avg:229.75ms +[2025-07-17 14:29:48] [Rank 0] step:5101/10000 train_time:1172128ms step_avg:229.78ms +[2025-07-17 14:29:48] [Rank 0] step:5101/10000 train_time:1172128ms step_avg:229.78ms +[2025-07-17 14:29:52] [Rank 0] step:5121/10000 train_time:1176882ms step_avg:229.81ms +[2025-07-17 14:29:52] [Rank 0] step:5121/10000 train_time:1176882ms step_avg:229.81ms +[2025-07-17 14:29:57] [Rank 0] PRINT: step:5125/10000 val_loss:4.2640 train_time:1179118ms step_avg:230.07ms +[2025-07-17 14:29:57] [Rank 0] PRINT: step:5125/10000 val_loss:4.2640 train_time:1179118ms step_avg:230.07ms +[2025-07-17 14:30:01] [Rank 0] step:5141/10000 train_time:1181639ms step_avg:229.85ms +[2025-07-17 14:30:01] [Rank 0] step:5141/10000 train_time:1181639ms step_avg:229.85ms +[2025-07-17 14:30:06] [Rank 0] step:5161/10000 train_time:1186399ms step_avg:229.88ms +[2025-07-17 14:30:06] [Rank 0] step:5161/10000 train_time:1186399ms step_avg:229.88ms +[2025-07-17 14:30:11] [Rank 0] step:5181/10000 train_time:1191163ms step_avg:229.91ms +[2025-07-17 14:30:11] [Rank 0] step:5181/10000 train_time:1191163ms step_avg:229.91ms +[2025-07-17 14:30:16] [Rank 0] step:5201/10000 train_time:1195956ms step_avg:229.95ms +[2025-07-17 14:30:16] [Rank 0] step:5201/10000 train_time:1195956ms step_avg:229.95ms +[2025-07-17 14:30:20] [Rank 0] step:5221/10000 train_time:1200789ms step_avg:229.99ms +[2025-07-17 14:30:20] [Rank 0] step:5221/10000 train_time:1200789ms step_avg:229.99ms +[2025-07-17 14:30:25] [Rank 0] step:5241/10000 train_time:1205620ms step_avg:230.04ms +[2025-07-17 14:30:25] [Rank 0] step:5241/10000 train_time:1205620ms step_avg:230.04ms +[2025-07-17 14:30:32] [Rank 0] PRINT: step:5250/10000 val_loss:3.9574 train_time:1209099ms step_avg:230.30ms +[2025-07-17 14:30:32] [Rank 0] PRINT: step:5250/10000 val_loss:3.9574 train_time:1209099ms step_avg:230.30ms +[2025-07-17 14:30:35] [Rank 0] step:5261/10000 train_time:1210453ms step_avg:230.08ms +[2025-07-17 14:30:35] [Rank 0] step:5261/10000 train_time:1210453ms step_avg:230.08ms +[2025-07-17 14:30:40] [Rank 0] step:5281/10000 train_time:1215280ms step_avg:230.12ms +[2025-07-17 14:30:40] [Rank 0] step:5281/10000 train_time:1215280ms step_avg:230.12ms +[2025-07-17 14:30:44] [Rank 0] step:5301/10000 train_time:1220103ms step_avg:230.16ms +[2025-07-17 14:30:44] [Rank 0] step:5301/10000 train_time:1220103ms step_avg:230.16ms +[2025-07-17 14:30:49] [Rank 0] step:5321/10000 train_time:1224927ms step_avg:230.21ms +[2025-07-17 14:30:49] [Rank 0] step:5321/10000 train_time:1224927ms step_avg:230.21ms +[2025-07-17 14:30:54] [Rank 0] step:5341/10000 train_time:1229756ms step_avg:230.25ms +[2025-07-17 14:30:54] [Rank 0] step:5341/10000 train_time:1229756ms step_avg:230.25ms +[2025-07-17 14:30:59] [Rank 0] step:5361/10000 train_time:1234582ms step_avg:230.29ms +[2025-07-17 14:30:59] [Rank 0] step:5361/10000 train_time:1234582ms step_avg:230.29ms +[2025-07-17 14:31:07] [Rank 0] PRINT: step:5375/10000 val_loss:3.9896 train_time:1239267ms step_avg:230.56ms +[2025-07-17 14:31:07] [Rank 0] PRINT: step:5375/10000 val_loss:3.9896 train_time:1239267ms step_avg:230.56ms +[2025-07-17 14:31:09] [Rank 0] step:5381/10000 train_time:1239417ms step_avg:230.33ms +[2025-07-17 14:31:09] [Rank 0] step:5381/10000 train_time:1239417ms step_avg:230.33ms +[2025-07-17 14:31:13] [Rank 0] step:5401/10000 train_time:1244256ms step_avg:230.38ms +[2025-07-17 14:31:13] [Rank 0] step:5401/10000 train_time:1244256ms step_avg:230.38ms +[2025-07-17 14:31:18] [Rank 0] step:5421/10000 train_time:1249094ms step_avg:230.42ms +[2025-07-17 14:31:18] [Rank 0] step:5421/10000 train_time:1249094ms step_avg:230.42ms +[2025-07-17 14:31:23] [Rank 0] step:5441/10000 train_time:1253923ms step_avg:230.46ms +[2025-07-17 14:31:23] [Rank 0] step:5441/10000 train_time:1253923ms step_avg:230.46ms +[2025-07-17 14:31:28] [Rank 0] step:5461/10000 train_time:1258758ms step_avg:230.50ms +[2025-07-17 14:31:28] [Rank 0] step:5461/10000 train_time:1258758ms step_avg:230.50ms +[2025-07-17 14:31:33] [Rank 0] step:5481/10000 train_time:1263607ms step_avg:230.54ms +[2025-07-17 14:31:33] [Rank 0] step:5481/10000 train_time:1263607ms step_avg:230.54ms +[2025-07-17 14:31:42] [Rank 0] PRINT: step:5500/10000 val_loss:4.1107 train_time:1269492ms step_avg:230.82ms +[2025-07-17 14:31:42] [Rank 0] PRINT: step:5500/10000 val_loss:4.1107 train_time:1269492ms step_avg:230.82ms +[2025-07-17 14:31:42] [Rank 0] step:5501/10000 train_time:1269500ms step_avg:230.78ms +[2025-07-17 14:31:42] [Rank 0] step:5501/10000 train_time:1269500ms step_avg:230.78ms +[2025-07-17 14:31:47] [Rank 0] step:5521/10000 train_time:1273263ms step_avg:230.62ms +[2025-07-17 14:31:47] [Rank 0] step:5521/10000 train_time:1273263ms step_avg:230.62ms +[2025-07-17 14:31:52] [Rank 0] step:5541/10000 train_time:1278100ms step_avg:230.66ms +[2025-07-17 14:31:52] [Rank 0] step:5541/10000 train_time:1278100ms step_avg:230.66ms +[2025-07-17 14:31:57] [Rank 0] step:5561/10000 train_time:1282936ms step_avg:230.70ms +[2025-07-17 14:31:57] [Rank 0] step:5561/10000 train_time:1282936ms step_avg:230.70ms +[2025-07-17 14:32:02] [Rank 0] step:5581/10000 train_time:1287764ms step_avg:230.74ms +[2025-07-17 14:32:02] [Rank 0] step:5581/10000 train_time:1287764ms step_avg:230.74ms +[2025-07-17 14:32:06] [Rank 0] step:5601/10000 train_time:1292602ms step_avg:230.78ms +[2025-07-17 14:32:06] [Rank 0] step:5601/10000 train_time:1292602ms step_avg:230.78ms +[2025-07-17 14:32:11] [Rank 0] step:5621/10000 train_time:1297537ms step_avg:230.84ms +[2025-07-17 14:32:11] [Rank 0] step:5621/10000 train_time:1297537ms step_avg:230.84ms +[2025-07-17 14:32:17] [Rank 0] PRINT: step:5625/10000 val_loss:4.0797 train_time:1299806ms step_avg:231.08ms +[2025-07-17 14:32:17] [Rank 0] PRINT: step:5625/10000 val_loss:4.0797 train_time:1299806ms step_avg:231.08ms +[2025-07-17 14:32:21] [Rank 0] step:5641/10000 train_time:1302365ms step_avg:230.87ms +[2025-07-17 14:32:21] [Rank 0] step:5641/10000 train_time:1302365ms step_avg:230.87ms +[2025-07-17 14:32:26] [Rank 0] step:5661/10000 train_time:1307202ms step_avg:230.91ms +[2025-07-17 14:32:26] [Rank 0] step:5661/10000 train_time:1307202ms step_avg:230.91ms +[2025-07-17 14:32:31] [Rank 0] step:5681/10000 train_time:1312038ms step_avg:230.95ms +[2025-07-17 14:32:31] [Rank 0] step:5681/10000 train_time:1312038ms step_avg:230.95ms +[2025-07-17 14:32:35] [Rank 0] step:5701/10000 train_time:1316871ms step_avg:230.99ms +[2025-07-17 14:32:35] [Rank 0] step:5701/10000 train_time:1316871ms step_avg:230.99ms +[2025-07-17 14:32:40] [Rank 0] step:5721/10000 train_time:1321704ms step_avg:231.03ms +[2025-07-17 14:32:40] [Rank 0] step:5721/10000 train_time:1321704ms step_avg:231.03ms +[2025-07-17 14:32:45] [Rank 0] step:5741/10000 train_time:1326542ms step_avg:231.06ms +[2025-07-17 14:32:45] [Rank 0] step:5741/10000 train_time:1326542ms step_avg:231.06ms +[2025-07-17 14:32:52] [Rank 0] PRINT: step:5750/10000 val_loss:4.1364 train_time:1330018ms step_avg:231.31ms +[2025-07-17 14:32:52] [Rank 0] PRINT: step:5750/10000 val_loss:4.1364 train_time:1330018ms step_avg:231.31ms +[2025-07-17 14:32:55] [Rank 0] step:5761/10000 train_time:1331376ms step_avg:231.10ms +[2025-07-17 14:32:55] [Rank 0] step:5761/10000 train_time:1331376ms step_avg:231.10ms +[2025-07-17 14:32:59] [Rank 0] step:5781/10000 train_time:1336209ms step_avg:231.14ms +[2025-07-17 14:32:59] [Rank 0] step:5781/10000 train_time:1336209ms step_avg:231.14ms +[2025-07-17 14:33:04] [Rank 0] step:5801/10000 train_time:1341040ms step_avg:231.17ms +[2025-07-17 14:33:04] [Rank 0] step:5801/10000 train_time:1341040ms step_avg:231.17ms +[2025-07-17 14:33:09] [Rank 0] step:5821/10000 train_time:1345876ms step_avg:231.21ms +[2025-07-17 14:33:09] [Rank 0] step:5821/10000 train_time:1345876ms step_avg:231.21ms +[2025-07-17 14:33:14] [Rank 0] step:5841/10000 train_time:1350713ms step_avg:231.25ms +[2025-07-17 14:33:14] [Rank 0] step:5841/10000 train_time:1350713ms step_avg:231.25ms +[2025-07-17 14:33:19] [Rank 0] step:5861/10000 train_time:1355541ms step_avg:231.28ms +[2025-07-17 14:33:19] [Rank 0] step:5861/10000 train_time:1355541ms step_avg:231.28ms +[2025-07-17 14:33:27] [Rank 0] PRINT: step:5875/10000 val_loss:3.8920 train_time:1360220ms step_avg:231.53ms +[2025-07-17 14:33:27] [Rank 0] PRINT: step:5875/10000 val_loss:3.8920 train_time:1360220ms step_avg:231.53ms +[2025-07-17 14:33:28] [Rank 0] step:5881/10000 train_time:1360370ms step_avg:231.32ms +[2025-07-17 14:33:28] [Rank 0] step:5881/10000 train_time:1360370ms step_avg:231.32ms +[2025-07-17 14:33:33] [Rank 0] step:5901/10000 train_time:1365213ms step_avg:231.35ms +[2025-07-17 14:33:33] [Rank 0] step:5901/10000 train_time:1365213ms step_avg:231.35ms +[2025-07-17 14:33:38] [Rank 0] step:5921/10000 train_time:1370047ms step_avg:231.39ms +[2025-07-17 14:33:38] [Rank 0] step:5921/10000 train_time:1370047ms step_avg:231.39ms +[2025-07-17 14:33:43] [Rank 0] step:5941/10000 train_time:1374888ms step_avg:231.42ms +[2025-07-17 14:33:43] [Rank 0] step:5941/10000 train_time:1374888ms step_avg:231.42ms +[2025-07-17 14:33:48] [Rank 0] step:5961/10000 train_time:1379740ms step_avg:231.46ms +[2025-07-17 14:33:48] [Rank 0] step:5961/10000 train_time:1379740ms step_avg:231.46ms +[2025-07-17 14:33:53] [Rank 0] step:5981/10000 train_time:1384686ms step_avg:231.51ms +[2025-07-17 14:33:53] [Rank 0] step:5981/10000 train_time:1384686ms step_avg:231.51ms +[2025-07-17 14:34:01] [Rank 0] PRINT: step:6000/10000 val_loss:4.0516 train_time:1390603ms step_avg:231.77ms +[2025-07-17 14:34:01] [Rank 0] PRINT: step:6000/10000 val_loss:4.0516 train_time:1390603ms step_avg:231.77ms +[2025-07-17 14:34:02] [Rank 0] step:6001/10000 train_time:1390612ms step_avg:231.73ms +[2025-07-17 14:34:02] [Rank 0] step:6001/10000 train_time:1390612ms step_avg:231.73ms +[2025-07-17 14:34:07] [Rank 0] step:6021/10000 train_time:1394393ms step_avg:231.59ms +[2025-07-17 14:34:07] [Rank 0] step:6021/10000 train_time:1394393ms step_avg:231.59ms +[2025-07-17 14:34:11] [Rank 0] step:6041/10000 train_time:1399247ms step_avg:231.63ms +[2025-07-17 14:34:11] [Rank 0] step:6041/10000 train_time:1399247ms step_avg:231.63ms +[2025-07-17 14:34:16] [Rank 0] step:6061/10000 train_time:1404101ms step_avg:231.66ms +[2025-07-17 14:34:16] [Rank 0] step:6061/10000 train_time:1404101ms step_avg:231.66ms +[2025-07-17 14:34:21] [Rank 0] step:6081/10000 train_time:1408959ms step_avg:231.70ms +[2025-07-17 14:34:21] [Rank 0] step:6081/10000 train_time:1408959ms step_avg:231.70ms +[2025-07-17 14:34:26] [Rank 0] step:6101/10000 train_time:1413808ms step_avg:231.73ms +[2025-07-17 14:34:26] [Rank 0] step:6101/10000 train_time:1413808ms step_avg:231.73ms +[2025-07-17 14:34:31] [Rank 0] step:6121/10000 train_time:1418674ms step_avg:231.77ms +[2025-07-17 14:34:31] [Rank 0] step:6121/10000 train_time:1418674ms step_avg:231.77ms +[2025-07-17 14:34:37] [Rank 0] PRINT: step:6125/10000 val_loss:4.0572 train_time:1420951ms step_avg:231.99ms +[2025-07-17 14:34:37] [Rank 0] PRINT: step:6125/10000 val_loss:4.0572 train_time:1420951ms step_avg:231.99ms +[2025-07-17 14:34:41] [Rank 0] step:6141/10000 train_time:1423514ms step_avg:231.80ms +[2025-07-17 14:34:41] [Rank 0] step:6141/10000 train_time:1423514ms step_avg:231.80ms +[2025-07-17 14:34:45] [Rank 0] step:6161/10000 train_time:1428362ms step_avg:231.84ms +[2025-07-17 14:34:45] [Rank 0] step:6161/10000 train_time:1428362ms step_avg:231.84ms +[2025-07-17 14:34:50] [Rank 0] step:6181/10000 train_time:1433218ms step_avg:231.87ms +[2025-07-17 14:34:50] [Rank 0] step:6181/10000 train_time:1433218ms step_avg:231.87ms +[2025-07-17 14:34:55] [Rank 0] step:6201/10000 train_time:1438070ms step_avg:231.91ms +[2025-07-17 14:34:55] [Rank 0] step:6201/10000 train_time:1438070ms step_avg:231.91ms +[2025-07-17 14:35:00] [Rank 0] step:6221/10000 train_time:1442932ms step_avg:231.95ms +[2025-07-17 14:35:00] [Rank 0] step:6221/10000 train_time:1442932ms step_avg:231.95ms +[2025-07-17 14:35:05] [Rank 0] step:6241/10000 train_time:1447792ms step_avg:231.98ms +[2025-07-17 14:35:05] [Rank 0] step:6241/10000 train_time:1447792ms step_avg:231.98ms +[2025-07-17 14:35:11] [Rank 0] PRINT: step:6250/10000 val_loss:4.7226 train_time:1451281ms step_avg:232.20ms +[2025-07-17 14:35:11] [Rank 0] PRINT: step:6250/10000 val_loss:4.7226 train_time:1451281ms step_avg:232.20ms +[2025-07-17 14:35:14] [Rank 0] step:6261/10000 train_time:1452636ms step_avg:232.01ms +[2025-07-17 14:35:14] [Rank 0] step:6261/10000 train_time:1452636ms step_avg:232.01ms +[2025-07-17 14:35:19] [Rank 0] step:6281/10000 train_time:1457488ms step_avg:232.05ms +[2025-07-17 14:35:19] [Rank 0] step:6281/10000 train_time:1457488ms step_avg:232.05ms +[2025-07-17 14:35:24] [Rank 0] step:6301/10000 train_time:1462338ms step_avg:232.08ms +[2025-07-17 14:35:24] [Rank 0] step:6301/10000 train_time:1462338ms step_avg:232.08ms +[2025-07-17 14:35:29] [Rank 0] step:6321/10000 train_time:1467189ms step_avg:232.11ms +[2025-07-17 14:35:29] [Rank 0] step:6321/10000 train_time:1467189ms step_avg:232.11ms +[2025-07-17 14:35:33] [Rank 0] step:6341/10000 train_time:1472048ms step_avg:232.15ms +[2025-07-17 14:35:33] [Rank 0] step:6341/10000 train_time:1472048ms step_avg:232.15ms +[2025-07-17 14:35:38] [Rank 0] step:6361/10000 train_time:1476896ms step_avg:232.18ms +[2025-07-17 14:35:38] [Rank 0] step:6361/10000 train_time:1476896ms step_avg:232.18ms +[2025-07-17 14:35:46] [Rank 0] PRINT: step:6375/10000 val_loss:4.1850 train_time:1481596ms step_avg:232.41ms +[2025-07-17 14:35:46] [Rank 0] PRINT: step:6375/10000 val_loss:4.1850 train_time:1481596ms step_avg:232.41ms +[2025-07-17 14:35:48] [Rank 0] step:6381/10000 train_time:1481746ms step_avg:232.21ms +[2025-07-17 14:35:48] [Rank 0] step:6381/10000 train_time:1481746ms step_avg:232.21ms +[2025-07-17 14:35:53] [Rank 0] step:6401/10000 train_time:1486673ms step_avg:232.26ms +[2025-07-17 14:35:53] [Rank 0] step:6401/10000 train_time:1486673ms step_avg:232.26ms +[2025-07-17 14:35:58] [Rank 0] step:6421/10000 train_time:1491518ms step_avg:232.29ms +[2025-07-17 14:35:58] [Rank 0] step:6421/10000 train_time:1491518ms step_avg:232.29ms +[2025-07-17 14:36:02] [Rank 0] step:6441/10000 train_time:1496356ms step_avg:232.32ms +[2025-07-17 14:36:02] [Rank 0] step:6441/10000 train_time:1496356ms step_avg:232.32ms +[2025-07-17 14:36:07] [Rank 0] step:6461/10000 train_time:1501210ms step_avg:232.35ms +[2025-07-17 14:36:07] [Rank 0] step:6461/10000 train_time:1501210ms step_avg:232.35ms +[2025-07-17 14:36:12] [Rank 0] step:6481/10000 train_time:1506058ms step_avg:232.38ms +[2025-07-17 14:36:12] [Rank 0] step:6481/10000 train_time:1506058ms step_avg:232.38ms +[2025-07-17 14:36:21] [Rank 0] PRINT: step:6500/10000 val_loss:4.1358 train_time:1511961ms step_avg:232.61ms +[2025-07-17 14:36:21] [Rank 0] PRINT: step:6500/10000 val_loss:4.1358 train_time:1511961ms step_avg:232.61ms +[2025-07-17 14:36:22] [Rank 0] step:6501/10000 train_time:1511971ms step_avg:232.58ms +[2025-07-17 14:36:22] [Rank 0] step:6501/10000 train_time:1511971ms step_avg:232.58ms +[2025-07-17 14:36:27] [Rank 0] step:6521/10000 train_time:1515745ms step_avg:232.44ms +[2025-07-17 14:36:27] [Rank 0] step:6521/10000 train_time:1515745ms step_avg:232.44ms +[2025-07-17 14:36:31] [Rank 0] step:6541/10000 train_time:1520688ms step_avg:232.49ms +[2025-07-17 14:36:31] [Rank 0] step:6541/10000 train_time:1520688ms step_avg:232.49ms +[2025-07-17 14:36:36] [Rank 0] step:6561/10000 train_time:1525537ms step_avg:232.52ms +[2025-07-17 14:36:36] [Rank 0] step:6561/10000 train_time:1525537ms step_avg:232.52ms +[2025-07-17 14:36:41] [Rank 0] step:6581/10000 train_time:1530392ms step_avg:232.55ms +[2025-07-17 14:36:41] [Rank 0] step:6581/10000 train_time:1530392ms step_avg:232.55ms +[2025-07-17 14:36:46] [Rank 0] step:6601/10000 train_time:1535245ms step_avg:232.58ms +[2025-07-17 14:36:46] [Rank 0] step:6601/10000 train_time:1535245ms step_avg:232.58ms +[2025-07-17 14:36:51] [Rank 0] step:6621/10000 train_time:1540094ms step_avg:232.61ms +[2025-07-17 14:36:51] [Rank 0] step:6621/10000 train_time:1540094ms step_avg:232.61ms +[2025-07-17 14:36:56] [Rank 0] PRINT: step:6625/10000 val_loss:4.1324 train_time:1542364ms step_avg:232.81ms +[2025-07-17 14:36:56] [Rank 0] PRINT: step:6625/10000 val_loss:4.1324 train_time:1542364ms step_avg:232.81ms +[2025-07-17 14:37:00] [Rank 0] step:6641/10000 train_time:1544933ms step_avg:232.64ms +[2025-07-17 14:37:00] [Rank 0] step:6641/10000 train_time:1544933ms step_avg:232.64ms +[2025-07-17 14:37:05] [Rank 0] step:6661/10000 train_time:1549776ms step_avg:232.66ms +[2025-07-17 14:37:05] [Rank 0] step:6661/10000 train_time:1549776ms step_avg:232.66ms +[2025-07-17 14:37:10] [Rank 0] step:6681/10000 train_time:1554663ms step_avg:232.70ms +[2025-07-17 14:37:10] [Rank 0] step:6681/10000 train_time:1554663ms step_avg:232.70ms +[2025-07-17 14:37:15] [Rank 0] step:6701/10000 train_time:1559571ms step_avg:232.74ms +[2025-07-17 14:37:15] [Rank 0] step:6701/10000 train_time:1559571ms step_avg:232.74ms +[2025-07-17 14:37:20] [Rank 0] step:6721/10000 train_time:1564601ms step_avg:232.79ms +[2025-07-17 14:37:20] [Rank 0] step:6721/10000 train_time:1564601ms step_avg:232.79ms +[2025-07-17 14:37:25] [Rank 0] step:6741/10000 train_time:1569519ms step_avg:232.83ms +[2025-07-17 14:37:25] [Rank 0] step:6741/10000 train_time:1569519ms step_avg:232.83ms +[2025-07-17 14:37:32] [Rank 0] PRINT: step:6750/10000 val_loss:4.1421 train_time:1573060ms step_avg:233.05ms +[2025-07-17 14:37:32] [Rank 0] PRINT: step:6750/10000 val_loss:4.1421 train_time:1573060ms step_avg:233.05ms +[2025-07-17 14:37:34] [Rank 0] step:6761/10000 train_time:1574436ms step_avg:232.87ms +[2025-07-17 14:37:34] [Rank 0] step:6761/10000 train_time:1574436ms step_avg:232.87ms +[2025-07-17 14:37:39] [Rank 0] step:6781/10000 train_time:1579345ms step_avg:232.91ms +[2025-07-17 14:37:39] [Rank 0] step:6781/10000 train_time:1579345ms step_avg:232.91ms +[2025-07-17 14:37:44] [Rank 0] step:6801/10000 train_time:1584264ms step_avg:232.95ms +[2025-07-17 14:37:44] [Rank 0] step:6801/10000 train_time:1584264ms step_avg:232.95ms +[2025-07-17 14:37:49] [Rank 0] step:6821/10000 train_time:1589186ms step_avg:232.98ms +[2025-07-17 14:37:49] [Rank 0] step:6821/10000 train_time:1589186ms step_avg:232.98ms +[2025-07-17 14:37:54] [Rank 0] step:6841/10000 train_time:1594104ms step_avg:233.02ms +[2025-07-17 14:37:54] [Rank 0] step:6841/10000 train_time:1594104ms step_avg:233.02ms +[2025-07-17 14:37:59] [Rank 0] step:6861/10000 train_time:1599014ms step_avg:233.06ms +[2025-07-17 14:37:59] [Rank 0] step:6861/10000 train_time:1599014ms step_avg:233.06ms +[2025-07-17 14:38:07] [Rank 0] PRINT: step:6875/10000 val_loss:4.0962 train_time:1603771ms step_avg:233.28ms +[2025-07-17 14:38:07] [Rank 0] PRINT: step:6875/10000 val_loss:4.0962 train_time:1603771ms step_avg:233.28ms +[2025-07-17 14:38:09] [Rank 0] step:6881/10000 train_time:1603921ms step_avg:233.09ms +[2025-07-17 14:38:09] [Rank 0] step:6881/10000 train_time:1603921ms step_avg:233.09ms +[2025-07-17 14:38:13] [Rank 0] step:6901/10000 train_time:1608829ms step_avg:233.13ms +[2025-07-17 14:38:13] [Rank 0] step:6901/10000 train_time:1608829ms step_avg:233.13ms +[2025-07-17 14:38:18] [Rank 0] step:6921/10000 train_time:1613748ms step_avg:233.17ms +[2025-07-17 14:38:18] [Rank 0] step:6921/10000 train_time:1613748ms step_avg:233.17ms +[2025-07-17 14:38:23] [Rank 0] step:6941/10000 train_time:1618676ms step_avg:233.21ms +[2025-07-17 14:38:23] [Rank 0] step:6941/10000 train_time:1618676ms step_avg:233.21ms +[2025-07-17 14:38:28] [Rank 0] step:6961/10000 train_time:1623593ms step_avg:233.24ms +[2025-07-17 14:38:28] [Rank 0] step:6961/10000 train_time:1623593ms step_avg:233.24ms +[2025-07-17 14:38:33] [Rank 0] step:6981/10000 train_time:1628619ms step_avg:233.29ms +[2025-07-17 14:38:33] [Rank 0] step:6981/10000 train_time:1628619ms step_avg:233.29ms +[2025-07-17 14:38:43] [Rank 0] PRINT: step:7000/10000 val_loss:4.0260 train_time:1634618ms step_avg:233.52ms +[2025-07-17 14:38:43] [Rank 0] PRINT: step:7000/10000 val_loss:4.0260 train_time:1634618ms step_avg:233.52ms +[2025-07-17 14:38:43] [Rank 0] step:7001/10000 train_time:1634627ms step_avg:233.48ms +[2025-07-17 14:38:43] [Rank 0] step:7001/10000 train_time:1634627ms step_avg:233.48ms +[2025-07-17 14:38:48] [Rank 0] step:7021/10000 train_time:1638462ms step_avg:233.37ms +[2025-07-17 14:38:48] [Rank 0] step:7021/10000 train_time:1638462ms step_avg:233.37ms +[2025-07-17 14:38:53] [Rank 0] step:7041/10000 train_time:1643380ms step_avg:233.40ms +[2025-07-17 14:38:53] [Rank 0] step:7041/10000 train_time:1643380ms step_avg:233.40ms +[2025-07-17 14:38:58] [Rank 0] step:7061/10000 train_time:1648295ms step_avg:233.44ms +[2025-07-17 14:38:58] [Rank 0] step:7061/10000 train_time:1648295ms step_avg:233.44ms +[2025-07-17 14:39:03] [Rank 0] step:7081/10000 train_time:1653206ms step_avg:233.47ms +[2025-07-17 14:39:03] [Rank 0] step:7081/10000 train_time:1653206ms step_avg:233.47ms +[2025-07-17 14:39:08] [Rank 0] step:7101/10000 train_time:1658123ms step_avg:233.51ms +[2025-07-17 14:39:08] [Rank 0] step:7101/10000 train_time:1658123ms step_avg:233.51ms +[2025-07-17 14:39:12] [Rank 0] step:7121/10000 train_time:1663043ms step_avg:233.54ms +[2025-07-17 14:39:12] [Rank 0] step:7121/10000 train_time:1663043ms step_avg:233.54ms +[2025-07-17 14:39:18] [Rank 0] PRINT: step:7125/10000 val_loss:4.1635 train_time:1665345ms step_avg:233.73ms +[2025-07-17 14:39:18] [Rank 0] PRINT: step:7125/10000 val_loss:4.1635 train_time:1665345ms step_avg:233.73ms +[2025-07-17 14:39:22] [Rank 0] step:7141/10000 train_time:1667951ms step_avg:233.57ms +[2025-07-17 14:39:22] [Rank 0] step:7141/10000 train_time:1667951ms step_avg:233.57ms +[2025-07-17 14:39:27] [Rank 0] step:7161/10000 train_time:1672860ms step_avg:233.61ms +[2025-07-17 14:39:27] [Rank 0] step:7161/10000 train_time:1672860ms step_avg:233.61ms +[2025-07-17 14:39:32] [Rank 0] step:7181/10000 train_time:1677758ms step_avg:233.64ms +[2025-07-17 14:39:32] [Rank 0] step:7181/10000 train_time:1677758ms step_avg:233.64ms +[2025-07-17 14:39:37] [Rank 0] step:7201/10000 train_time:1682677ms step_avg:233.67ms +[2025-07-17 14:39:37] [Rank 0] step:7201/10000 train_time:1682677ms step_avg:233.67ms +[2025-07-17 14:39:42] [Rank 0] step:7221/10000 train_time:1687583ms step_avg:233.70ms +[2025-07-17 14:39:42] [Rank 0] step:7221/10000 train_time:1687583ms step_avg:233.70ms +[2025-07-17 14:39:47] [Rank 0] step:7241/10000 train_time:1692473ms step_avg:233.73ms +[2025-07-17 14:39:47] [Rank 0] step:7241/10000 train_time:1692473ms step_avg:233.73ms +[2025-07-17 14:39:53] [Rank 0] PRINT: step:7250/10000 val_loss:4.0750 train_time:1696006ms step_avg:233.93ms +[2025-07-17 14:39:53] [Rank 0] PRINT: step:7250/10000 val_loss:4.0750 train_time:1696006ms step_avg:233.93ms +[2025-07-17 14:39:56] [Rank 0] step:7261/10000 train_time:1697374ms step_avg:233.77ms +[2025-07-17 14:39:56] [Rank 0] step:7261/10000 train_time:1697374ms step_avg:233.77ms +[2025-07-17 14:40:01] [Rank 0] step:7281/10000 train_time:1702270ms step_avg:233.80ms +[2025-07-17 14:40:01] [Rank 0] step:7281/10000 train_time:1702270ms step_avg:233.80ms +[2025-07-17 14:40:06] [Rank 0] step:7301/10000 train_time:1707169ms step_avg:233.83ms +[2025-07-17 14:40:06] [Rank 0] step:7301/10000 train_time:1707169ms step_avg:233.83ms +[2025-07-17 14:40:11] [Rank 0] step:7321/10000 train_time:1712082ms step_avg:233.86ms +[2025-07-17 14:40:11] [Rank 0] step:7321/10000 train_time:1712082ms step_avg:233.86ms +[2025-07-17 14:40:16] [Rank 0] step:7341/10000 train_time:1716975ms step_avg:233.89ms +[2025-07-17 14:40:16] [Rank 0] step:7341/10000 train_time:1716975ms step_avg:233.89ms +[2025-07-17 14:40:21] [Rank 0] step:7361/10000 train_time:1721890ms step_avg:233.92ms +[2025-07-17 14:40:21] [Rank 0] step:7361/10000 train_time:1721890ms step_avg:233.92ms +[2025-07-17 14:40:29] [Rank 0] PRINT: step:7375/10000 val_loss:4.1777 train_time:1726644ms step_avg:234.12ms +[2025-07-17 14:40:29] [Rank 0] PRINT: step:7375/10000 val_loss:4.1777 train_time:1726644ms step_avg:234.12ms +[2025-07-17 14:40:30] [Rank 0] step:7381/10000 train_time:1726795ms step_avg:233.95ms +[2025-07-17 14:40:30] [Rank 0] step:7381/10000 train_time:1726795ms step_avg:233.95ms +[2025-07-17 14:40:35] [Rank 0] step:7401/10000 train_time:1731703ms step_avg:233.98ms +[2025-07-17 14:40:35] [Rank 0] step:7401/10000 train_time:1731703ms step_avg:233.98ms +[2025-07-17 14:40:40] [Rank 0] step:7421/10000 train_time:1736606ms step_avg:234.01ms +[2025-07-17 14:40:40] [Rank 0] step:7421/10000 train_time:1736606ms step_avg:234.01ms +[2025-07-17 14:40:45] [Rank 0] step:7441/10000 train_time:1741529ms step_avg:234.04ms +[2025-07-17 14:40:45] [Rank 0] step:7441/10000 train_time:1741529ms step_avg:234.04ms +[2025-07-17 14:40:50] [Rank 0] step:7461/10000 train_time:1746444ms step_avg:234.08ms +[2025-07-17 14:40:50] [Rank 0] step:7461/10000 train_time:1746444ms step_avg:234.08ms +[2025-07-17 14:40:55] [Rank 0] step:7481/10000 train_time:1751365ms step_avg:234.11ms +[2025-07-17 14:40:55] [Rank 0] step:7481/10000 train_time:1751365ms step_avg:234.11ms +[2025-07-17 14:41:04] [Rank 0] PRINT: step:7500/10000 val_loss:4.1153 train_time:1757376ms step_avg:234.32ms +[2025-07-17 14:41:04] [Rank 0] PRINT: step:7500/10000 val_loss:4.1153 train_time:1757376ms step_avg:234.32ms +[2025-07-17 14:41:05] [Rank 0] step:7501/10000 train_time:1757386ms step_avg:234.29ms +[2025-07-17 14:41:05] [Rank 0] step:7501/10000 train_time:1757386ms step_avg:234.29ms +[2025-07-17 14:41:09] [Rank 0] step:7521/10000 train_time:1761218ms step_avg:234.17ms +[2025-07-17 14:41:09] [Rank 0] step:7521/10000 train_time:1761218ms step_avg:234.17ms +[2025-07-17 14:41:14] [Rank 0] step:7541/10000 train_time:1766122ms step_avg:234.20ms +[2025-07-17 14:41:14] [Rank 0] step:7541/10000 train_time:1766122ms step_avg:234.20ms +[2025-07-17 14:41:19] [Rank 0] step:7561/10000 train_time:1771027ms step_avg:234.23ms +[2025-07-17 14:41:19] [Rank 0] step:7561/10000 train_time:1771027ms step_avg:234.23ms +[2025-07-17 14:41:24] [Rank 0] step:7581/10000 train_time:1775954ms step_avg:234.26ms +[2025-07-17 14:41:24] [Rank 0] step:7581/10000 train_time:1775954ms step_avg:234.26ms +[2025-07-17 14:41:29] [Rank 0] step:7601/10000 train_time:1780868ms step_avg:234.29ms +[2025-07-17 14:41:29] [Rank 0] step:7601/10000 train_time:1780868ms step_avg:234.29ms +[2025-07-17 14:41:34] [Rank 0] step:7621/10000 train_time:1785806ms step_avg:234.33ms +[2025-07-17 14:41:34] [Rank 0] step:7621/10000 train_time:1785806ms step_avg:234.33ms +[2025-07-17 14:41:40] [Rank 0] PRINT: step:7625/10000 val_loss:4.1707 train_time:1788120ms step_avg:234.51ms +[2025-07-17 14:41:40] [Rank 0] PRINT: step:7625/10000 val_loss:4.1707 train_time:1788120ms step_avg:234.51ms +[2025-07-17 14:41:44] [Rank 0] step:7641/10000 train_time:1790731ms step_avg:234.36ms +[2025-07-17 14:41:44] [Rank 0] step:7641/10000 train_time:1790731ms step_avg:234.36ms +[2025-07-17 14:41:49] [Rank 0] step:7661/10000 train_time:1795661ms step_avg:234.39ms +[2025-07-17 14:41:49] [Rank 0] step:7661/10000 train_time:1795661ms step_avg:234.39ms +[2025-07-17 14:41:54] [Rank 0] step:7681/10000 train_time:1800600ms step_avg:234.42ms +[2025-07-17 14:41:54] [Rank 0] step:7681/10000 train_time:1800600ms step_avg:234.42ms +[2025-07-17 14:41:59] [Rank 0] step:7701/10000 train_time:1805519ms step_avg:234.45ms +[2025-07-17 14:41:59] [Rank 0] step:7701/10000 train_time:1805519ms step_avg:234.45ms +[2025-07-17 14:42:03] [Rank 0] step:7721/10000 train_time:1810429ms step_avg:234.48ms +[2025-07-17 14:42:03] [Rank 0] step:7721/10000 train_time:1810429ms step_avg:234.48ms +[2025-07-17 14:42:08] [Rank 0] step:7741/10000 train_time:1815353ms step_avg:234.51ms +[2025-07-17 14:42:08] [Rank 0] step:7741/10000 train_time:1815353ms step_avg:234.51ms +[2025-07-17 14:42:15] [Rank 0] PRINT: step:7750/10000 val_loss:4.1755 train_time:1819002ms step_avg:234.71ms +[2025-07-17 14:42:15] [Rank 0] PRINT: step:7750/10000 val_loss:4.1755 train_time:1819002ms step_avg:234.71ms +[2025-07-17 14:42:18] [Rank 0] step:7761/10000 train_time:1820377ms step_avg:234.55ms +[2025-07-17 14:42:18] [Rank 0] step:7761/10000 train_time:1820377ms step_avg:234.55ms +[2025-07-17 14:42:23] [Rank 0] step:7781/10000 train_time:1825306ms step_avg:234.58ms +[2025-07-17 14:42:23] [Rank 0] step:7781/10000 train_time:1825306ms step_avg:234.58ms +[2025-07-17 14:42:28] [Rank 0] step:7801/10000 train_time:1830214ms step_avg:234.61ms +[2025-07-17 14:42:28] [Rank 0] step:7801/10000 train_time:1830214ms step_avg:234.61ms +[2025-07-17 14:42:32] [Rank 0] step:7821/10000 train_time:1835133ms step_avg:234.64ms +[2025-07-17 14:42:32] [Rank 0] step:7821/10000 train_time:1835133ms step_avg:234.64ms +[2025-07-17 14:42:37] [Rank 0] step:7841/10000 train_time:1840055ms step_avg:234.67ms +[2025-07-17 14:42:37] [Rank 0] step:7841/10000 train_time:1840055ms step_avg:234.67ms +[2025-07-17 14:42:42] [Rank 0] step:7861/10000 train_time:1844958ms step_avg:234.70ms +[2025-07-17 14:42:42] [Rank 0] step:7861/10000 train_time:1844958ms step_avg:234.70ms +[2025-07-17 14:42:50] [Rank 0] PRINT: step:7875/10000 val_loss:4.2158 train_time:1849714ms step_avg:234.88ms +[2025-07-17 14:42:50] [Rank 0] PRINT: step:7875/10000 val_loss:4.2158 train_time:1849714ms step_avg:234.88ms +[2025-07-17 14:42:52] [Rank 0] step:7881/10000 train_time:1849864ms step_avg:234.72ms +[2025-07-17 14:42:52] [Rank 0] step:7881/10000 train_time:1849864ms step_avg:234.72ms +[2025-07-17 14:42:57] [Rank 0] step:7901/10000 train_time:1854772ms step_avg:234.75ms +[2025-07-17 14:42:57] [Rank 0] step:7901/10000 train_time:1854772ms step_avg:234.75ms +[2025-07-17 14:43:02] [Rank 0] step:7921/10000 train_time:1859678ms step_avg:234.78ms +[2025-07-17 14:43:02] [Rank 0] step:7921/10000 train_time:1859678ms step_avg:234.78ms +[2025-07-17 14:43:07] [Rank 0] step:7941/10000 train_time:1864601ms step_avg:234.81ms +[2025-07-17 14:43:07] [Rank 0] step:7941/10000 train_time:1864601ms step_avg:234.81ms +[2025-07-17 14:43:12] [Rank 0] step:7961/10000 train_time:1869541ms step_avg:234.84ms +[2025-07-17 14:43:12] [Rank 0] step:7961/10000 train_time:1869541ms step_avg:234.84ms +[2025-07-17 14:43:17] [Rank 0] step:7981/10000 train_time:1874458ms step_avg:234.87ms +[2025-07-17 14:43:17] [Rank 0] step:7981/10000 train_time:1874458ms step_avg:234.87ms +[2025-07-17 14:43:26] [Rank 0] PRINT: step:8000/10000 val_loss:4.2387 train_time:1880467ms step_avg:235.06ms +[2025-07-17 14:43:26] [Rank 0] PRINT: step:8000/10000 val_loss:4.2387 train_time:1880467ms step_avg:235.06ms +[2025-07-17 14:43:26] [Rank 0] step:8001/10000 train_time:1880477ms step_avg:235.03ms +[2025-07-17 14:43:26] [Rank 0] step:8001/10000 train_time:1880477ms step_avg:235.03ms +[2025-07-17 14:43:31] [Rank 0] step:8021/10000 train_time:1884307ms step_avg:234.92ms +[2025-07-17 14:43:31] [Rank 0] step:8021/10000 train_time:1884307ms step_avg:234.92ms +[2025-07-17 14:43:36] [Rank 0] step:8041/10000 train_time:1889235ms step_avg:234.95ms +[2025-07-17 14:43:36] [Rank 0] step:8041/10000 train_time:1889235ms step_avg:234.95ms +[2025-07-17 14:43:41] [Rank 0] step:8061/10000 train_time:1894158ms step_avg:234.98ms +[2025-07-17 14:43:41] [Rank 0] step:8061/10000 train_time:1894158ms step_avg:234.98ms +[2025-07-17 14:43:46] [Rank 0] step:8081/10000 train_time:1899074ms step_avg:235.00ms +[2025-07-17 14:43:46] [Rank 0] step:8081/10000 train_time:1899074ms step_avg:235.00ms +[2025-07-17 14:43:51] [Rank 0] step:8101/10000 train_time:1903981ms step_avg:235.03ms +[2025-07-17 14:43:51] [Rank 0] step:8101/10000 train_time:1903981ms step_avg:235.03ms +[2025-07-17 14:43:56] [Rank 0] step:8121/10000 train_time:1908913ms step_avg:235.06ms +[2025-07-17 14:43:56] [Rank 0] step:8121/10000 train_time:1908913ms step_avg:235.06ms +[2025-07-17 14:44:01] [Rank 0] PRINT: step:8125/10000 val_loss:4.2449 train_time:1911219ms step_avg:235.23ms +[2025-07-17 14:44:01] [Rank 0] PRINT: step:8125/10000 val_loss:4.2449 train_time:1911219ms step_avg:235.23ms +[2025-07-17 14:44:05] [Rank 0] step:8141/10000 train_time:1913829ms step_avg:235.09ms +[2025-07-17 14:44:05] [Rank 0] step:8141/10000 train_time:1913829ms step_avg:235.09ms +[2025-07-17 14:44:10] [Rank 0] step:8161/10000 train_time:1918875ms step_avg:235.13ms +[2025-07-17 14:44:10] [Rank 0] step:8161/10000 train_time:1918875ms step_avg:235.13ms +[2025-07-17 14:44:15] [Rank 0] step:8181/10000 train_time:1923860ms step_avg:235.16ms +[2025-07-17 14:44:15] [Rank 0] step:8181/10000 train_time:1923860ms step_avg:235.16ms +[2025-07-17 14:44:20] [Rank 0] step:8201/10000 train_time:1928817ms step_avg:235.19ms +[2025-07-17 14:44:20] [Rank 0] step:8201/10000 train_time:1928817ms step_avg:235.19ms +[2025-07-17 14:44:25] [Rank 0] step:8221/10000 train_time:1933802ms step_avg:235.23ms +[2025-07-17 14:44:25] [Rank 0] step:8221/10000 train_time:1933802ms step_avg:235.23ms +[2025-07-17 14:44:30] [Rank 0] step:8241/10000 train_time:1938772ms step_avg:235.26ms +[2025-07-17 14:44:30] [Rank 0] step:8241/10000 train_time:1938772ms step_avg:235.26ms +[2025-07-17 14:44:37] [Rank 0] PRINT: step:8250/10000 val_loss:4.1349 train_time:1942362ms step_avg:235.44ms +[2025-07-17 14:44:37] [Rank 0] PRINT: step:8250/10000 val_loss:4.1349 train_time:1942362ms step_avg:235.44ms +[2025-07-17 14:44:40] [Rank 0] step:8261/10000 train_time:1943759ms step_avg:235.29ms +[2025-07-17 14:44:40] [Rank 0] step:8261/10000 train_time:1943759ms step_avg:235.29ms +[2025-07-17 14:44:45] [Rank 0] step:8281/10000 train_time:1948756ms step_avg:235.33ms +[2025-07-17 14:44:45] [Rank 0] step:8281/10000 train_time:1948756ms step_avg:235.33ms +[2025-07-17 14:44:50] [Rank 0] step:8301/10000 train_time:1953731ms step_avg:235.36ms +[2025-07-17 14:44:50] [Rank 0] step:8301/10000 train_time:1953731ms step_avg:235.36ms +[2025-07-17 14:44:55] [Rank 0] step:8321/10000 train_time:1958715ms step_avg:235.39ms +[2025-07-17 14:44:55] [Rank 0] step:8321/10000 train_time:1958715ms step_avg:235.39ms +[2025-07-17 14:45:00] [Rank 0] step:8341/10000 train_time:1963706ms step_avg:235.43ms +[2025-07-17 14:45:00] [Rank 0] step:8341/10000 train_time:1963706ms step_avg:235.43ms +[2025-07-17 14:45:05] [Rank 0] step:8361/10000 train_time:1968682ms step_avg:235.46ms +[2025-07-17 14:45:05] [Rank 0] step:8361/10000 train_time:1968682ms step_avg:235.46ms +[2025-07-17 14:45:13] [Rank 0] PRINT: step:8375/10000 val_loss:4.2283 train_time:1973501ms step_avg:235.64ms +[2025-07-17 14:45:13] [Rank 0] PRINT: step:8375/10000 val_loss:4.2283 train_time:1973501ms step_avg:235.64ms +[2025-07-17 14:45:14] [Rank 0] step:8381/10000 train_time:1973651ms step_avg:235.49ms +[2025-07-17 14:45:14] [Rank 0] step:8381/10000 train_time:1973651ms step_avg:235.49ms +[2025-07-17 14:45:19] [Rank 0] step:8401/10000 train_time:1978606ms step_avg:235.52ms +[2025-07-17 14:45:19] [Rank 0] step:8401/10000 train_time:1978606ms step_avg:235.52ms +[2025-07-17 14:45:24] [Rank 0] step:8421/10000 train_time:1983582ms step_avg:235.55ms +[2025-07-17 14:45:24] [Rank 0] step:8421/10000 train_time:1983582ms step_avg:235.55ms +[2025-07-17 14:45:29] [Rank 0] step:8441/10000 train_time:1988561ms step_avg:235.58ms +[2025-07-17 14:45:29] [Rank 0] step:8441/10000 train_time:1988561ms step_avg:235.58ms +[2025-07-17 14:45:34] [Rank 0] step:8461/10000 train_time:1993539ms step_avg:235.62ms +[2025-07-17 14:45:34] [Rank 0] step:8461/10000 train_time:1993539ms step_avg:235.62ms +[2025-07-17 14:45:39] [Rank 0] step:8481/10000 train_time:1998524ms step_avg:235.65ms +[2025-07-17 14:45:39] [Rank 0] step:8481/10000 train_time:1998524ms step_avg:235.65ms +[2025-07-17 14:45:48] [Rank 0] PRINT: step:8500/10000 val_loss:4.1936 train_time:2004597ms step_avg:235.83ms +[2025-07-17 14:45:48] [Rank 0] PRINT: step:8500/10000 val_loss:4.1936 train_time:2004597ms step_avg:235.83ms +[2025-07-17 14:45:48] [Rank 0] step:8501/10000 train_time:2004607ms step_avg:235.81ms +[2025-07-17 14:45:48] [Rank 0] step:8501/10000 train_time:2004607ms step_avg:235.81ms +[2025-07-17 14:45:53] [Rank 0] step:8521/10000 train_time:2008487ms step_avg:235.71ms +[2025-07-17 14:45:53] [Rank 0] step:8521/10000 train_time:2008487ms step_avg:235.71ms +[2025-07-17 14:45:58] [Rank 0] step:8541/10000 train_time:2013475ms step_avg:235.74ms +[2025-07-17 14:45:58] [Rank 0] step:8541/10000 train_time:2013475ms step_avg:235.74ms +[2025-07-17 14:46:03] [Rank 0] step:8561/10000 train_time:2018461ms step_avg:235.77ms +[2025-07-17 14:46:03] [Rank 0] step:8561/10000 train_time:2018461ms step_avg:235.77ms +[2025-07-17 14:46:08] [Rank 0] step:8581/10000 train_time:2023440ms step_avg:235.80ms +[2025-07-17 14:46:08] [Rank 0] step:8581/10000 train_time:2023440ms step_avg:235.80ms +[2025-07-17 14:46:13] [Rank 0] step:8601/10000 train_time:2028423ms step_avg:235.84ms +[2025-07-17 14:46:13] [Rank 0] step:8601/10000 train_time:2028423ms step_avg:235.84ms +[2025-07-17 14:46:18] [Rank 0] step:8621/10000 train_time:2033394ms step_avg:235.87ms +[2025-07-17 14:46:18] [Rank 0] step:8621/10000 train_time:2033394ms step_avg:235.87ms +[2025-07-17 14:46:24] [Rank 0] PRINT: step:8625/10000 val_loss:4.2253 train_time:2035735ms step_avg:236.03ms +[2025-07-17 14:46:24] [Rank 0] PRINT: step:8625/10000 val_loss:4.2253 train_time:2035735ms step_avg:236.03ms +[2025-07-17 14:46:28] [Rank 0] step:8641/10000 train_time:2038381ms step_avg:235.90ms +[2025-07-17 14:46:28] [Rank 0] step:8641/10000 train_time:2038381ms step_avg:235.90ms +[2025-07-17 14:46:33] [Rank 0] step:8661/10000 train_time:2043365ms step_avg:235.93ms +[2025-07-17 14:46:33] [Rank 0] step:8661/10000 train_time:2043365ms step_avg:235.93ms +[2025-07-17 14:46:38] [Rank 0] step:8681/10000 train_time:2048353ms step_avg:235.96ms +[2025-07-17 14:46:38] [Rank 0] step:8681/10000 train_time:2048353ms step_avg:235.96ms +[2025-07-17 14:46:43] [Rank 0] step:8701/10000 train_time:2053352ms step_avg:235.99ms +[2025-07-17 14:46:43] [Rank 0] step:8701/10000 train_time:2053352ms step_avg:235.99ms +[2025-07-17 14:46:48] [Rank 0] step:8721/10000 train_time:2058330ms step_avg:236.02ms +[2025-07-17 14:46:48] [Rank 0] step:8721/10000 train_time:2058330ms step_avg:236.02ms +[2025-07-17 14:46:53] [Rank 0] step:8741/10000 train_time:2063336ms step_avg:236.05ms +[2025-07-17 14:46:53] [Rank 0] step:8741/10000 train_time:2063336ms step_avg:236.05ms +[2025-07-17 14:47:00] [Rank 0] PRINT: step:8750/10000 val_loss:4.2334 train_time:2066917ms step_avg:236.22ms +[2025-07-17 14:47:00] [Rank 0] PRINT: step:8750/10000 val_loss:4.2334 train_time:2066917ms step_avg:236.22ms +[2025-07-17 14:47:03] [Rank 0] step:8761/10000 train_time:2068312ms step_avg:236.08ms +[2025-07-17 14:47:03] [Rank 0] step:8761/10000 train_time:2068312ms step_avg:236.08ms +[2025-07-17 14:47:08] [Rank 0] step:8781/10000 train_time:2073305ms step_avg:236.11ms +[2025-07-17 14:47:08] [Rank 0] step:8781/10000 train_time:2073305ms step_avg:236.11ms +[2025-07-17 14:47:13] [Rank 0] step:8801/10000 train_time:2078296ms step_avg:236.14ms +[2025-07-17 14:47:13] [Rank 0] step:8801/10000 train_time:2078296ms step_avg:236.14ms +[2025-07-17 14:47:18] [Rank 0] step:8821/10000 train_time:2083288ms step_avg:236.17ms +[2025-07-17 14:47:18] [Rank 0] step:8821/10000 train_time:2083288ms step_avg:236.17ms +[2025-07-17 14:47:23] [Rank 0] step:8841/10000 train_time:2088305ms step_avg:236.21ms +[2025-07-17 14:47:23] [Rank 0] step:8841/10000 train_time:2088305ms step_avg:236.21ms +[2025-07-17 14:47:28] [Rank 0] step:8861/10000 train_time:2093302ms step_avg:236.24ms +[2025-07-17 14:47:28] [Rank 0] step:8861/10000 train_time:2093302ms step_avg:236.24ms +[2025-07-17 14:47:36] [Rank 0] PRINT: step:8875/10000 val_loss:4.2016 train_time:2098138ms step_avg:236.41ms +[2025-07-17 14:47:36] [Rank 0] PRINT: step:8875/10000 val_loss:4.2016 train_time:2098138ms step_avg:236.41ms +[2025-07-17 14:47:37] [Rank 0] step:8881/10000 train_time:2098291ms step_avg:236.27ms +[2025-07-17 14:47:37] [Rank 0] step:8881/10000 train_time:2098291ms step_avg:236.27ms +[2025-07-17 14:47:42] [Rank 0] step:8901/10000 train_time:2103266ms step_avg:236.30ms +[2025-07-17 14:47:42] [Rank 0] step:8901/10000 train_time:2103266ms step_avg:236.30ms +[2025-07-17 14:47:47] [Rank 0] step:8921/10000 train_time:2108247ms step_avg:236.32ms +[2025-07-17 14:47:47] [Rank 0] step:8921/10000 train_time:2108247ms step_avg:236.32ms +[2025-07-17 14:47:52] [Rank 0] step:8941/10000 train_time:2113240ms step_avg:236.35ms +[2025-07-17 14:47:52] [Rank 0] step:8941/10000 train_time:2113240ms step_avg:236.35ms +[2025-07-17 14:47:57] [Rank 0] step:8961/10000 train_time:2118229ms step_avg:236.38ms +[2025-07-17 14:47:57] [Rank 0] step:8961/10000 train_time:2118229ms step_avg:236.38ms +[2025-07-17 14:48:02] [Rank 0] step:8981/10000 train_time:2123215ms step_avg:236.41ms +[2025-07-17 14:48:02] [Rank 0] step:8981/10000 train_time:2123215ms step_avg:236.41ms +[2025-07-17 14:48:12] [Rank 0] PRINT: step:9000/10000 val_loss:4.2346 train_time:2129297ms step_avg:236.59ms +[2025-07-17 14:48:12] [Rank 0] PRINT: step:9000/10000 val_loss:4.2346 train_time:2129297ms step_avg:236.59ms +[2025-07-17 14:48:12] [Rank 0] step:9001/10000 train_time:2129307ms step_avg:236.56ms +[2025-07-17 14:48:12] [Rank 0] step:9001/10000 train_time:2129307ms step_avg:236.56ms +[2025-07-17 14:48:17] [Rank 0] step:9021/10000 train_time:2133182ms step_avg:236.47ms +[2025-07-17 14:48:17] [Rank 0] step:9021/10000 train_time:2133182ms step_avg:236.47ms +[2025-07-17 14:48:22] [Rank 0] step:9041/10000 train_time:2138185ms step_avg:236.50ms +[2025-07-17 14:48:22] [Rank 0] step:9041/10000 train_time:2138185ms step_avg:236.50ms +[2025-07-17 14:48:27] [Rank 0] step:9061/10000 train_time:2143166ms step_avg:236.53ms +[2025-07-17 14:48:27] [Rank 0] step:9061/10000 train_time:2143166ms step_avg:236.53ms +[2025-07-17 14:48:32] [Rank 0] step:9081/10000 train_time:2148168ms step_avg:236.56ms +[2025-07-17 14:48:32] [Rank 0] step:9081/10000 train_time:2148168ms step_avg:236.56ms +[2025-07-17 14:48:37] [Rank 0] step:9101/10000 train_time:2153173ms step_avg:236.59ms +[2025-07-17 14:48:37] [Rank 0] step:9101/10000 train_time:2153173ms step_avg:236.59ms +[2025-07-17 14:48:42] [Rank 0] step:9121/10000 train_time:2158173ms step_avg:236.62ms +[2025-07-17 14:48:42] [Rank 0] step:9121/10000 train_time:2158173ms step_avg:236.62ms +[2025-07-17 14:48:48] [Rank 0] PRINT: step:9125/10000 val_loss:4.1873 train_time:2160512ms step_avg:236.77ms +[2025-07-17 14:48:48] [Rank 0] PRINT: step:9125/10000 val_loss:4.1873 train_time:2160512ms step_avg:236.77ms +[2025-07-17 14:48:52] [Rank 0] step:9141/10000 train_time:2163141ms step_avg:236.64ms +[2025-07-17 14:48:52] [Rank 0] step:9141/10000 train_time:2163141ms step_avg:236.64ms +[2025-07-17 14:48:57] [Rank 0] step:9161/10000 train_time:2168160ms step_avg:236.67ms +[2025-07-17 14:48:57] [Rank 0] step:9161/10000 train_time:2168160ms step_avg:236.67ms +[2025-07-17 14:49:02] [Rank 0] step:9181/10000 train_time:2173168ms step_avg:236.70ms +[2025-07-17 14:49:02] [Rank 0] step:9181/10000 train_time:2173168ms step_avg:236.70ms +[2025-07-17 14:49:07] [Rank 0] step:9201/10000 train_time:2178161ms step_avg:236.73ms +[2025-07-17 14:49:07] [Rank 0] step:9201/10000 train_time:2178161ms step_avg:236.73ms +[2025-07-17 14:49:12] [Rank 0] step:9221/10000 train_time:2183167ms step_avg:236.76ms +[2025-07-17 14:49:12] [Rank 0] step:9221/10000 train_time:2183167ms step_avg:236.76ms +[2025-07-17 14:49:17] [Rank 0] step:9241/10000 train_time:2188181ms step_avg:236.79ms +[2025-07-17 14:49:17] [Rank 0] step:9241/10000 train_time:2188181ms step_avg:236.79ms +[2025-07-17 14:49:24] [Rank 0] PRINT: step:9250/10000 val_loss:4.3242 train_time:2191789ms step_avg:236.95ms +[2025-07-17 14:49:24] [Rank 0] PRINT: step:9250/10000 val_loss:4.3242 train_time:2191789ms step_avg:236.95ms +[2025-07-17 14:49:27] [Rank 0] step:9261/10000 train_time:2193193ms step_avg:236.82ms +[2025-07-17 14:49:27] [Rank 0] step:9261/10000 train_time:2193193ms step_avg:236.82ms +[2025-07-17 14:49:32] [Rank 0] step:9281/10000 train_time:2198183ms step_avg:236.85ms +[2025-07-17 14:49:32] [Rank 0] step:9281/10000 train_time:2198183ms step_avg:236.85ms +[2025-07-17 14:49:37] [Rank 0] step:9301/10000 train_time:2203175ms step_avg:236.88ms +[2025-07-17 14:49:37] [Rank 0] step:9301/10000 train_time:2203175ms step_avg:236.88ms +[2025-07-17 14:49:42] [Rank 0] step:9321/10000 train_time:2208191ms step_avg:236.90ms +[2025-07-17 14:49:42] [Rank 0] step:9321/10000 train_time:2208191ms step_avg:236.90ms +[2025-07-17 14:49:47] [Rank 0] step:9341/10000 train_time:2213195ms step_avg:236.93ms +[2025-07-17 14:49:47] [Rank 0] step:9341/10000 train_time:2213195ms step_avg:236.93ms +[2025-07-17 14:49:52] [Rank 0] step:9361/10000 train_time:2218177ms step_avg:236.96ms +[2025-07-17 14:49:52] [Rank 0] step:9361/10000 train_time:2218177ms step_avg:236.96ms +[2025-07-17 14:50:00] [Rank 0] PRINT: step:9375/10000 val_loss:4.2773 train_time:2223030ms step_avg:237.12ms +[2025-07-17 14:50:00] [Rank 0] PRINT: step:9375/10000 val_loss:4.2773 train_time:2223030ms step_avg:237.12ms +[2025-07-17 14:50:01] [Rank 0] step:9381/10000 train_time:2223183ms step_avg:236.99ms +[2025-07-17 14:50:01] [Rank 0] step:9381/10000 train_time:2223183ms step_avg:236.99ms +[2025-07-17 14:50:06] [Rank 0] step:9401/10000 train_time:2228253ms step_avg:237.02ms +[2025-07-17 14:50:06] [Rank 0] step:9401/10000 train_time:2228253ms step_avg:237.02ms +[2025-07-17 14:50:11] [Rank 0] step:9421/10000 train_time:2233245ms step_avg:237.05ms +[2025-07-17 14:50:11] [Rank 0] step:9421/10000 train_time:2233245ms step_avg:237.05ms +[2025-07-17 14:50:16] [Rank 0] step:9441/10000 train_time:2238233ms step_avg:237.08ms +[2025-07-17 14:50:16] [Rank 0] step:9441/10000 train_time:2238233ms step_avg:237.08ms +[2025-07-17 14:50:21] [Rank 0] step:9461/10000 train_time:2243236ms step_avg:237.10ms +[2025-07-17 14:50:21] [Rank 0] step:9461/10000 train_time:2243236ms step_avg:237.10ms +[2025-07-17 14:50:26] [Rank 0] step:9481/10000 train_time:2248235ms step_avg:237.13ms +[2025-07-17 14:50:26] [Rank 0] step:9481/10000 train_time:2248235ms step_avg:237.13ms +[2025-07-17 14:50:36] [Rank 0] PRINT: step:9500/10000 val_loss:4.2769 train_time:2254346ms step_avg:237.30ms +[2025-07-17 14:50:36] [Rank 0] PRINT: step:9500/10000 val_loss:4.2769 train_time:2254346ms step_avg:237.30ms +[2025-07-17 14:50:36] [Rank 0] step:9501/10000 train_time:2254356ms step_avg:237.28ms +[2025-07-17 14:50:36] [Rank 0] step:9501/10000 train_time:2254356ms step_avg:237.28ms +[2025-07-17 14:50:41] [Rank 0] step:9521/10000 train_time:2258236ms step_avg:237.18ms +[2025-07-17 14:50:41] [Rank 0] step:9521/10000 train_time:2258236ms step_avg:237.18ms +[2025-07-17 14:50:46] [Rank 0] step:9541/10000 train_time:2263233ms step_avg:237.21ms +[2025-07-17 14:50:46] [Rank 0] step:9541/10000 train_time:2263233ms step_avg:237.21ms +[2025-07-17 14:50:51] [Rank 0] step:9561/10000 train_time:2268211ms step_avg:237.24ms +[2025-07-17 14:50:51] [Rank 0] step:9561/10000 train_time:2268211ms step_avg:237.24ms +[2025-07-17 14:50:56] [Rank 0] step:9581/10000 train_time:2273190ms step_avg:237.26ms +[2025-07-17 14:50:56] [Rank 0] step:9581/10000 train_time:2273190ms step_avg:237.26ms +[2025-07-17 14:51:01] [Rank 0] step:9601/10000 train_time:2278154ms step_avg:237.28ms +[2025-07-17 14:51:01] [Rank 0] step:9601/10000 train_time:2278154ms step_avg:237.28ms +[2025-07-17 14:51:06] [Rank 0] step:9621/10000 train_time:2283161ms step_avg:237.31ms +[2025-07-17 14:51:06] [Rank 0] step:9621/10000 train_time:2283161ms step_avg:237.31ms +[2025-07-17 14:51:12] [Rank 0] PRINT: step:9625/10000 val_loss:4.3031 train_time:2285506ms step_avg:237.46ms +[2025-07-17 14:51:12] [Rank 0] PRINT: step:9625/10000 val_loss:4.3031 train_time:2285506ms step_avg:237.46ms +[2025-07-17 14:51:16] [Rank 0] step:9641/10000 train_time:2288155ms step_avg:237.34ms +[2025-07-17 14:51:16] [Rank 0] step:9641/10000 train_time:2288155ms step_avg:237.34ms +[2025-07-17 14:51:21] [Rank 0] step:9661/10000 train_time:2293230ms step_avg:237.37ms +[2025-07-17 14:51:21] [Rank 0] step:9661/10000 train_time:2293230ms step_avg:237.37ms +[2025-07-17 14:51:26] [Rank 0] step:9681/10000 train_time:2298256ms step_avg:237.40ms +[2025-07-17 14:51:26] [Rank 0] step:9681/10000 train_time:2298256ms step_avg:237.40ms +[2025-07-17 14:51:31] [Rank 0] step:9701/10000 train_time:2303326ms step_avg:237.43ms +[2025-07-17 14:51:31] [Rank 0] step:9701/10000 train_time:2303326ms step_avg:237.43ms +[2025-07-17 14:51:36] [Rank 0] step:9721/10000 train_time:2308360ms step_avg:237.46ms +[2025-07-17 14:51:36] [Rank 0] step:9721/10000 train_time:2308360ms step_avg:237.46ms +[2025-07-17 14:51:41] [Rank 0] step:9741/10000 train_time:2313417ms step_avg:237.49ms +[2025-07-17 14:51:41] [Rank 0] step:9741/10000 train_time:2313417ms step_avg:237.49ms +[2025-07-17 14:51:48] [Rank 0] PRINT: step:9750/10000 val_loss:4.3452 train_time:2317026ms step_avg:237.64ms +[2025-07-17 14:51:48] [Rank 0] PRINT: step:9750/10000 val_loss:4.3452 train_time:2317026ms step_avg:237.64ms +[2025-07-17 14:51:51] [Rank 0] step:9761/10000 train_time:2318435ms step_avg:237.52ms +[2025-07-17 14:51:51] [Rank 0] step:9761/10000 train_time:2318435ms step_avg:237.52ms +[2025-07-17 14:51:56] [Rank 0] step:9781/10000 train_time:2323484ms step_avg:237.55ms +[2025-07-17 14:51:56] [Rank 0] step:9781/10000 train_time:2323484ms step_avg:237.55ms +[2025-07-17 14:52:01] [Rank 0] step:9801/10000 train_time:2328500ms step_avg:237.58ms +[2025-07-17 14:52:01] [Rank 0] step:9801/10000 train_time:2328500ms step_avg:237.58ms +[2025-07-17 14:52:06] [Rank 0] step:9821/10000 train_time:2333540ms step_avg:237.61ms +[2025-07-17 14:52:06] [Rank 0] step:9821/10000 train_time:2333540ms step_avg:237.61ms +[2025-07-17 14:52:11] [Rank 0] step:9841/10000 train_time:2338572ms step_avg:237.64ms +[2025-07-17 14:52:11] [Rank 0] step:9841/10000 train_time:2338572ms step_avg:237.64ms +[2025-07-17 14:52:16] [Rank 0] step:9861/10000 train_time:2343606ms step_avg:237.66ms +[2025-07-17 14:52:16] [Rank 0] step:9861/10000 train_time:2343606ms step_avg:237.66ms +[2025-07-17 14:52:25] [Rank 0] PRINT: step:9875/10000 val_loss:4.3214 train_time:2348477ms step_avg:237.82ms +[2025-07-17 14:52:25] [Rank 0] PRINT: step:9875/10000 val_loss:4.3214 train_time:2348477ms step_avg:237.82ms +[2025-07-17 14:52:26] [Rank 0] step:9881/10000 train_time:2348633ms step_avg:237.69ms +[2025-07-17 14:52:26] [Rank 0] step:9881/10000 train_time:2348633ms step_avg:237.69ms +[2025-07-17 14:52:31] [Rank 0] step:9901/10000 train_time:2353660ms step_avg:237.72ms +[2025-07-17 14:52:31] [Rank 0] step:9901/10000 train_time:2353660ms step_avg:237.72ms +[2025-07-17 14:52:36] [Rank 0] step:9921/10000 train_time:2358709ms step_avg:237.75ms +[2025-07-17 14:52:36] [Rank 0] step:9921/10000 train_time:2358709ms step_avg:237.75ms +[2025-07-17 14:52:41] [Rank 0] step:9941/10000 train_time:2363773ms step_avg:237.78ms +[2025-07-17 14:52:41] [Rank 0] step:9941/10000 train_time:2363773ms step_avg:237.78ms +[2025-07-17 14:52:46] [Rank 0] step:9961/10000 train_time:2368840ms step_avg:237.81ms +[2025-07-17 14:52:46] [Rank 0] step:9961/10000 train_time:2368840ms step_avg:237.81ms +[2025-07-17 14:52:51] [Rank 0] step:9981/10000 train_time:2373900ms step_avg:237.84ms +[2025-07-17 14:52:51] [Rank 0] step:9981/10000 train_time:2373900ms step_avg:237.84ms +[2025-07-17 14:52:56] [Rank 0] step:10000/10000 train_time:2378682ms step_avg:237.87ms +[2025-07-17 14:52:56] [Rank 0] step:10000/10000 train_time:2378682ms step_avg:237.87ms +[2025-07-17 14:53:00] [Rank 0] PRINT: step:10000/10000 val_loss:4.3311 train_time:2380035ms step_avg:238.00ms +[2025-07-17 14:53:00] [Rank 0] PRINT: step:10000/10000 val_loss:4.3311 train_time:2380035ms step_avg:238.00ms +[2025-07-17 14:53:00] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 14:53:00 2025 --- +[2025-07-17 14:53:00] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 14:53:00 2025 --- +[2025-07-17 14:53:00] [Rank 0] PRINT: Peak memory allocated: 31193 MiB reserved: 31476 MiB +[2025-07-17 14:53:00] [Rank 0] PRINT: Peak memory allocated: 31193 MiB reserved: 31476 MiB diff --git a/logs_norope/diff_modes/mode_5_param_norope_seed_43/config.json b/logs_norope/diff_modes/mode_5_param_norope_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..58a80775067fc77eb487de139d4ccdb7fbd0bb8a --- /dev/null +++ b/logs_norope/diff_modes/mode_5_param_norope_seed_43/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 5, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "a73c0f9d-ff63-4ad1-b30b-a000911132b3", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_5_param_norope_seed_43/training_log_a73c0f9d-ff63-4ad1-b30b-a000911132b3.txt b/logs_norope/diff_modes/mode_5_param_norope_seed_43/training_log_a73c0f9d-ff63-4ad1-b30b-a000911132b3.txt new file mode 100644 index 0000000000000000000000000000000000000000..4951f09aac4c392e9600fd82bf9761a712a69e73 --- /dev/null +++ b/logs_norope/diff_modes/mode_5_param_norope_seed_43/training_log_a73c0f9d-ff63-4ad1-b30b-a000911132b3.txt @@ -0,0 +1,2360 @@ +[2025-07-17 21:17:55] [Rank 0] PRINT: --- Script Start: Thu Jul 17 21:17:55 2025 --- +[2025-07-17 21:17:55] [Rank 0] PRINT: --- Script Start: Thu Jul 17 21:17:55 2025 --- +[2025-07-17 21:17:55] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=5, model_parameterization='norope') +[2025-07-17 21:17:55] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=5, model_parameterization='norope') +[2025-07-17 21:17:55] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 21:17:55] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 21:17:55] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 21:17:55] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 21:17:55] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_5_param_norope_seed_43 +[2025-07-17 21:17:55] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_5_param_norope_seed_43 +[2025-07-17 21:17:55] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 21:17:55] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 21:17:55] [Rank 0] PRINT: Constructing model... +[2025-07-17 21:17:55] [Rank 0] PRINT: Constructing model... +[2025-07-17 21:17:57] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 21:17:57] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 21:17:57] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 21:17:57] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 21:17:57] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 21:17:57] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 21:17:57] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 21:17:57] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 21:17:57] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 5 +[2025-07-17 21:17:57] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 5 +[2025-07-17 21:17:57] [Rank 0] PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: 0.001). +[2025-07-17 21:17:57] [Rank 0] PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: 0.001). +[2025-07-17 21:17:57] [Rank 0] PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices). +[2025-07-17 21:17:57] [Rank 0] PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices). +[2025-07-17 21:17:57] [Rank 0] PRINT: Optimizers configured. Total optimizers: 1 +[2025-07-17 21:17:57] [Rank 0] PRINT: Optimizers configured. Total optimizers: 1 +[2025-07-17 21:17:57] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 21:17:57] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 21:17:58] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 21:17:58] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 21:17:58] [Rank 0] PRINT: Starting warmup... +[2025-07-17 21:17:58] [Rank 0] PRINT: Starting warmup... +[2025-07-17 21:19:02] [Rank 0] PRINT: Warmup complete. +[2025-07-17 21:19:02] [Rank 0] PRINT: Warmup complete. +[2025-07-17 21:19:03] [Rank 0] PRINT: Starting training... +[2025-07-17 21:19:03] [Rank 0] PRINT: Starting training... +[2025-07-17 21:19:13] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 21:19:13] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 21:19:18] [Rank 0] step:21/10000 train_time:3737ms step_avg:177.93ms +[2025-07-17 21:19:18] [Rank 0] step:21/10000 train_time:3737ms step_avg:177.93ms +[2025-07-17 21:19:23] [Rank 0] step:41/10000 train_time:8133ms step_avg:198.37ms +[2025-07-17 21:19:23] [Rank 0] step:41/10000 train_time:8133ms step_avg:198.37ms +[2025-07-17 21:19:27] [Rank 0] step:61/10000 train_time:12543ms step_avg:205.63ms +[2025-07-17 21:19:27] [Rank 0] step:61/10000 train_time:12543ms step_avg:205.63ms +[2025-07-17 21:19:32] [Rank 0] step:81/10000 train_time:16959ms step_avg:209.37ms +[2025-07-17 21:19:32] [Rank 0] step:81/10000 train_time:16959ms step_avg:209.37ms +[2025-07-17 21:19:36] [Rank 0] step:101/10000 train_time:21385ms step_avg:211.73ms +[2025-07-17 21:19:36] [Rank 0] step:101/10000 train_time:21385ms step_avg:211.73ms +[2025-07-17 21:19:40] [Rank 0] step:121/10000 train_time:25810ms step_avg:213.31ms +[2025-07-17 21:19:40] [Rank 0] step:121/10000 train_time:25810ms step_avg:213.31ms +[2025-07-17 21:19:46] [Rank 0] PRINT: step:125/10000 val_loss:5.5439 train_time:27888ms step_avg:223.10ms +[2025-07-17 21:19:46] [Rank 0] PRINT: step:125/10000 val_loss:5.5439 train_time:27888ms step_avg:223.10ms +[2025-07-17 21:19:49] [Rank 0] step:141/10000 train_time:30233ms step_avg:214.42ms +[2025-07-17 21:19:49] [Rank 0] step:141/10000 train_time:30233ms step_avg:214.42ms +[2025-07-17 21:19:54] [Rank 0] step:161/10000 train_time:34664ms step_avg:215.31ms +[2025-07-17 21:19:54] [Rank 0] step:161/10000 train_time:34664ms step_avg:215.31ms +[2025-07-17 21:19:58] [Rank 0] step:181/10000 train_time:39090ms step_avg:215.97ms +[2025-07-17 21:19:58] [Rank 0] step:181/10000 train_time:39090ms step_avg:215.97ms +[2025-07-17 21:20:03] [Rank 0] step:201/10000 train_time:43520ms step_avg:216.52ms +[2025-07-17 21:20:03] [Rank 0] step:201/10000 train_time:43520ms step_avg:216.52ms +[2025-07-17 21:20:07] [Rank 0] step:221/10000 train_time:47956ms step_avg:217.00ms +[2025-07-17 21:20:07] [Rank 0] step:221/10000 train_time:47956ms step_avg:217.00ms +[2025-07-17 21:20:12] [Rank 0] step:241/10000 train_time:52386ms step_avg:217.37ms +[2025-07-17 21:20:12] [Rank 0] step:241/10000 train_time:52386ms step_avg:217.37ms +[2025-07-17 21:20:18] [Rank 0] PRINT: step:250/10000 val_loss:5.0536 train_time:55576ms step_avg:222.30ms +[2025-07-17 21:20:18] [Rank 0] PRINT: step:250/10000 val_loss:5.0536 train_time:55576ms step_avg:222.30ms +[2025-07-17 21:20:20] [Rank 0] step:261/10000 train_time:56815ms step_avg:217.68ms +[2025-07-17 21:20:20] [Rank 0] step:261/10000 train_time:56815ms step_avg:217.68ms +[2025-07-17 21:20:25] [Rank 0] step:281/10000 train_time:61242ms step_avg:217.94ms +[2025-07-17 21:20:25] [Rank 0] step:281/10000 train_time:61242ms step_avg:217.94ms +[2025-07-17 21:20:29] [Rank 0] step:301/10000 train_time:65667ms step_avg:218.16ms +[2025-07-17 21:20:29] [Rank 0] step:301/10000 train_time:65667ms step_avg:218.16ms +[2025-07-17 21:20:34] [Rank 0] step:321/10000 train_time:70096ms step_avg:218.37ms +[2025-07-17 21:20:34] [Rank 0] step:321/10000 train_time:70096ms step_avg:218.37ms +[2025-07-17 21:20:38] [Rank 0] step:341/10000 train_time:74523ms step_avg:218.54ms +[2025-07-17 21:20:38] [Rank 0] step:341/10000 train_time:74523ms step_avg:218.54ms +[2025-07-17 21:20:42] [Rank 0] step:361/10000 train_time:78951ms step_avg:218.70ms +[2025-07-17 21:20:42] [Rank 0] step:361/10000 train_time:78951ms step_avg:218.70ms +[2025-07-17 21:20:50] [Rank 0] PRINT: step:375/10000 val_loss:4.7621 train_time:83243ms step_avg:221.98ms +[2025-07-17 21:20:50] [Rank 0] PRINT: step:375/10000 val_loss:4.7621 train_time:83243ms step_avg:221.98ms +[2025-07-17 21:20:51] [Rank 0] step:381/10000 train_time:83382ms step_avg:218.85ms +[2025-07-17 21:20:51] [Rank 0] step:381/10000 train_time:83382ms step_avg:218.85ms +[2025-07-17 21:20:56] [Rank 0] step:401/10000 train_time:87811ms step_avg:218.98ms +[2025-07-17 21:20:56] [Rank 0] step:401/10000 train_time:87811ms step_avg:218.98ms +[2025-07-17 21:21:00] [Rank 0] step:421/10000 train_time:92236ms step_avg:219.09ms +[2025-07-17 21:21:00] [Rank 0] step:421/10000 train_time:92236ms step_avg:219.09ms +[2025-07-17 21:21:05] [Rank 0] step:441/10000 train_time:96666ms step_avg:219.20ms +[2025-07-17 21:21:05] [Rank 0] step:441/10000 train_time:96666ms step_avg:219.20ms +[2025-07-17 21:21:09] [Rank 0] step:461/10000 train_time:101094ms step_avg:219.29ms +[2025-07-17 21:21:09] [Rank 0] step:461/10000 train_time:101094ms step_avg:219.29ms +[2025-07-17 21:21:13] [Rank 0] step:481/10000 train_time:105525ms step_avg:219.39ms +[2025-07-17 21:21:13] [Rank 0] step:481/10000 train_time:105525ms step_avg:219.39ms +[2025-07-17 21:21:22] [Rank 0] PRINT: step:500/10000 val_loss:4.6322 train_time:110921ms step_avg:221.84ms +[2025-07-17 21:21:22] [Rank 0] PRINT: step:500/10000 val_loss:4.6322 train_time:110921ms step_avg:221.84ms +[2025-07-17 21:21:22] [Rank 0] step:501/10000 train_time:110930ms step_avg:221.42ms +[2025-07-17 21:21:22] [Rank 0] step:501/10000 train_time:110930ms step_avg:221.42ms +[2025-07-17 21:21:27] [Rank 0] step:521/10000 train_time:114384ms step_avg:219.55ms +[2025-07-17 21:21:27] [Rank 0] step:521/10000 train_time:114384ms step_avg:219.55ms +[2025-07-17 21:21:31] [Rank 0] step:541/10000 train_time:118814ms step_avg:219.62ms +[2025-07-17 21:21:31] [Rank 0] step:541/10000 train_time:118814ms step_avg:219.62ms +[2025-07-17 21:21:36] [Rank 0] step:561/10000 train_time:123247ms step_avg:219.69ms +[2025-07-17 21:21:36] [Rank 0] step:561/10000 train_time:123247ms step_avg:219.69ms +[2025-07-17 21:21:40] [Rank 0] step:581/10000 train_time:127678ms step_avg:219.76ms +[2025-07-17 21:21:40] [Rank 0] step:581/10000 train_time:127678ms step_avg:219.76ms +[2025-07-17 21:21:44] [Rank 0] step:601/10000 train_time:132119ms step_avg:219.83ms +[2025-07-17 21:21:44] [Rank 0] step:601/10000 train_time:132119ms step_avg:219.83ms +[2025-07-17 21:21:49] [Rank 0] step:621/10000 train_time:136555ms step_avg:219.90ms +[2025-07-17 21:21:49] [Rank 0] step:621/10000 train_time:136555ms step_avg:219.90ms +[2025-07-17 21:21:54] [Rank 0] PRINT: step:625/10000 val_loss:4.5359 train_time:138638ms step_avg:221.82ms +[2025-07-17 21:21:54] [Rank 0] PRINT: step:625/10000 val_loss:4.5359 train_time:138638ms step_avg:221.82ms +[2025-07-17 21:21:58] [Rank 0] step:641/10000 train_time:140990ms step_avg:219.95ms +[2025-07-17 21:21:58] [Rank 0] step:641/10000 train_time:140990ms step_avg:219.95ms +[2025-07-17 21:22:02] [Rank 0] step:661/10000 train_time:145423ms step_avg:220.01ms +[2025-07-17 21:22:02] [Rank 0] step:661/10000 train_time:145423ms step_avg:220.01ms +[2025-07-17 21:22:07] [Rank 0] step:681/10000 train_time:149856ms step_avg:220.05ms +[2025-07-17 21:22:07] [Rank 0] step:681/10000 train_time:149856ms step_avg:220.05ms +[2025-07-17 21:22:11] [Rank 0] step:701/10000 train_time:154291ms step_avg:220.10ms +[2025-07-17 21:22:11] [Rank 0] step:701/10000 train_time:154291ms step_avg:220.10ms +[2025-07-17 21:22:16] [Rank 0] step:721/10000 train_time:158728ms step_avg:220.15ms +[2025-07-17 21:22:16] [Rank 0] step:721/10000 train_time:158728ms step_avg:220.15ms +[2025-07-17 21:22:20] [Rank 0] step:741/10000 train_time:163165ms step_avg:220.20ms +[2025-07-17 21:22:20] [Rank 0] step:741/10000 train_time:163165ms step_avg:220.20ms +[2025-07-17 21:22:26] [Rank 0] PRINT: step:750/10000 val_loss:4.7743 train_time:166368ms step_avg:221.82ms +[2025-07-17 21:22:26] [Rank 0] PRINT: step:750/10000 val_loss:4.7743 train_time:166368ms step_avg:221.82ms +[2025-07-17 21:22:29] [Rank 0] step:761/10000 train_time:167623ms step_avg:220.27ms +[2025-07-17 21:22:29] [Rank 0] step:761/10000 train_time:167623ms step_avg:220.27ms +[2025-07-17 21:22:33] [Rank 0] step:781/10000 train_time:172086ms step_avg:220.34ms +[2025-07-17 21:22:33] [Rank 0] step:781/10000 train_time:172086ms step_avg:220.34ms +[2025-07-17 21:22:38] [Rank 0] step:801/10000 train_time:176553ms step_avg:220.42ms +[2025-07-17 21:22:38] [Rank 0] step:801/10000 train_time:176553ms step_avg:220.42ms +[2025-07-17 21:22:42] [Rank 0] step:821/10000 train_time:181021ms step_avg:220.49ms +[2025-07-17 21:22:42] [Rank 0] step:821/10000 train_time:181021ms step_avg:220.49ms +[2025-07-17 21:22:47] [Rank 0] step:841/10000 train_time:185492ms step_avg:220.56ms +[2025-07-17 21:22:47] [Rank 0] step:841/10000 train_time:185492ms step_avg:220.56ms +[2025-07-17 21:22:51] [Rank 0] step:861/10000 train_time:189966ms step_avg:220.63ms +[2025-07-17 21:22:51] [Rank 0] step:861/10000 train_time:189966ms step_avg:220.63ms +[2025-07-17 21:22:59] [Rank 0] PRINT: step:875/10000 val_loss:4.6456 train_time:194304ms step_avg:222.06ms +[2025-07-17 21:22:59] [Rank 0] PRINT: step:875/10000 val_loss:4.6456 train_time:194304ms step_avg:222.06ms +[2025-07-17 21:23:00] [Rank 0] step:881/10000 train_time:194446ms step_avg:220.71ms +[2025-07-17 21:23:00] [Rank 0] step:881/10000 train_time:194446ms step_avg:220.71ms +[2025-07-17 21:23:05] [Rank 0] step:901/10000 train_time:198920ms step_avg:220.78ms +[2025-07-17 21:23:05] [Rank 0] step:901/10000 train_time:198920ms step_avg:220.78ms +[2025-07-17 21:23:09] [Rank 0] step:921/10000 train_time:203393ms step_avg:220.84ms +[2025-07-17 21:23:09] [Rank 0] step:921/10000 train_time:203393ms step_avg:220.84ms +[2025-07-17 21:23:14] [Rank 0] step:941/10000 train_time:207866ms step_avg:220.90ms +[2025-07-17 21:23:14] [Rank 0] step:941/10000 train_time:207866ms step_avg:220.90ms +[2025-07-17 21:23:18] [Rank 0] step:961/10000 train_time:212336ms step_avg:220.95ms +[2025-07-17 21:23:18] [Rank 0] step:961/10000 train_time:212336ms step_avg:220.95ms +[2025-07-17 21:23:22] [Rank 0] step:981/10000 train_time:216808ms step_avg:221.01ms +[2025-07-17 21:23:22] [Rank 0] step:981/10000 train_time:216808ms step_avg:221.01ms +[2025-07-17 21:23:31] [Rank 0] PRINT: step:1000/10000 val_loss:4.7154 train_time:222260ms step_avg:222.26ms +[2025-07-17 21:23:31] [Rank 0] PRINT: step:1000/10000 val_loss:4.7154 train_time:222260ms step_avg:222.26ms +[2025-07-17 21:23:31] [Rank 0] step:1001/10000 train_time:222268ms step_avg:222.05ms +[2025-07-17 21:23:31] [Rank 0] step:1001/10000 train_time:222268ms step_avg:222.05ms +[2025-07-17 21:23:36] [Rank 0] step:1021/10000 train_time:225759ms step_avg:221.12ms +[2025-07-17 21:23:36] [Rank 0] step:1021/10000 train_time:225759ms step_avg:221.12ms +[2025-07-17 21:23:40] [Rank 0] step:1041/10000 train_time:230236ms step_avg:221.17ms +[2025-07-17 21:23:40] [Rank 0] step:1041/10000 train_time:230236ms step_avg:221.17ms +[2025-07-17 21:23:45] [Rank 0] step:1061/10000 train_time:234710ms step_avg:221.22ms +[2025-07-17 21:23:45] [Rank 0] step:1061/10000 train_time:234710ms step_avg:221.22ms +[2025-07-17 21:23:49] [Rank 0] step:1081/10000 train_time:239188ms step_avg:221.27ms +[2025-07-17 21:23:49] [Rank 0] step:1081/10000 train_time:239188ms step_avg:221.27ms +[2025-07-17 21:23:54] [Rank 0] step:1101/10000 train_time:243667ms step_avg:221.31ms +[2025-07-17 21:23:54] [Rank 0] step:1101/10000 train_time:243667ms step_avg:221.31ms +[2025-07-17 21:23:58] [Rank 0] step:1121/10000 train_time:248151ms step_avg:221.37ms +[2025-07-17 21:23:58] [Rank 0] step:1121/10000 train_time:248151ms step_avg:221.37ms +[2025-07-17 21:24:04] [Rank 0] PRINT: step:1125/10000 val_loss:4.6694 train_time:250256ms step_avg:222.45ms +[2025-07-17 21:24:04] [Rank 0] PRINT: step:1125/10000 val_loss:4.6694 train_time:250256ms step_avg:222.45ms +[2025-07-17 21:24:07] [Rank 0] step:1141/10000 train_time:252634ms step_avg:221.41ms +[2025-07-17 21:24:07] [Rank 0] step:1141/10000 train_time:252634ms step_avg:221.41ms +[2025-07-17 21:24:12] [Rank 0] step:1161/10000 train_time:257109ms step_avg:221.45ms +[2025-07-17 21:24:12] [Rank 0] step:1161/10000 train_time:257109ms step_avg:221.45ms +[2025-07-17 21:24:16] [Rank 0] step:1181/10000 train_time:261587ms step_avg:221.50ms +[2025-07-17 21:24:16] [Rank 0] step:1181/10000 train_time:261587ms step_avg:221.50ms +[2025-07-17 21:24:21] [Rank 0] step:1201/10000 train_time:266067ms step_avg:221.54ms +[2025-07-17 21:24:21] [Rank 0] step:1201/10000 train_time:266067ms step_avg:221.54ms +[2025-07-17 21:24:25] [Rank 0] step:1221/10000 train_time:270552ms step_avg:221.58ms +[2025-07-17 21:24:25] [Rank 0] step:1221/10000 train_time:270552ms step_avg:221.58ms +[2025-07-17 21:24:30] [Rank 0] step:1241/10000 train_time:275032ms step_avg:221.62ms +[2025-07-17 21:24:30] [Rank 0] step:1241/10000 train_time:275032ms step_avg:221.62ms +[2025-07-17 21:24:36] [Rank 0] PRINT: step:1250/10000 val_loss:4.6659 train_time:278252ms step_avg:222.60ms +[2025-07-17 21:24:36] [Rank 0] PRINT: step:1250/10000 val_loss:4.6659 train_time:278252ms step_avg:222.60ms +[2025-07-17 21:24:39] [Rank 0] step:1261/10000 train_time:279508ms step_avg:221.66ms +[2025-07-17 21:24:39] [Rank 0] step:1261/10000 train_time:279508ms step_avg:221.66ms +[2025-07-17 21:24:43] [Rank 0] step:1281/10000 train_time:283987ms step_avg:221.69ms +[2025-07-17 21:24:43] [Rank 0] step:1281/10000 train_time:283987ms step_avg:221.69ms +[2025-07-17 21:24:47] [Rank 0] step:1301/10000 train_time:288468ms step_avg:221.73ms +[2025-07-17 21:24:47] [Rank 0] step:1301/10000 train_time:288468ms step_avg:221.73ms +[2025-07-17 21:24:52] [Rank 0] step:1321/10000 train_time:292949ms step_avg:221.76ms +[2025-07-17 21:24:52] [Rank 0] step:1321/10000 train_time:292949ms step_avg:221.76ms +[2025-07-17 21:24:56] [Rank 0] step:1341/10000 train_time:297426ms step_avg:221.79ms +[2025-07-17 21:24:56] [Rank 0] step:1341/10000 train_time:297426ms step_avg:221.79ms +[2025-07-17 21:25:01] [Rank 0] step:1361/10000 train_time:301910ms step_avg:221.83ms +[2025-07-17 21:25:01] [Rank 0] step:1361/10000 train_time:301910ms step_avg:221.83ms +[2025-07-17 21:25:09] [Rank 0] PRINT: step:1375/10000 val_loss:4.6512 train_time:306254ms step_avg:222.73ms +[2025-07-17 21:25:09] [Rank 0] PRINT: step:1375/10000 val_loss:4.6512 train_time:306254ms step_avg:222.73ms +[2025-07-17 21:25:10] [Rank 0] step:1381/10000 train_time:306394ms step_avg:221.86ms +[2025-07-17 21:25:10] [Rank 0] step:1381/10000 train_time:306394ms step_avg:221.86ms +[2025-07-17 21:25:14] [Rank 0] step:1401/10000 train_time:310879ms step_avg:221.90ms +[2025-07-17 21:25:14] [Rank 0] step:1401/10000 train_time:310879ms step_avg:221.90ms +[2025-07-17 21:25:19] [Rank 0] step:1421/10000 train_time:315363ms step_avg:221.93ms +[2025-07-17 21:25:19] [Rank 0] step:1421/10000 train_time:315363ms step_avg:221.93ms +[2025-07-17 21:25:23] [Rank 0] step:1441/10000 train_time:319848ms step_avg:221.96ms +[2025-07-17 21:25:23] [Rank 0] step:1441/10000 train_time:319848ms step_avg:221.96ms +[2025-07-17 21:25:28] [Rank 0] step:1461/10000 train_time:324331ms step_avg:221.99ms +[2025-07-17 21:25:28] [Rank 0] step:1461/10000 train_time:324331ms step_avg:221.99ms +[2025-07-17 21:25:32] [Rank 0] step:1481/10000 train_time:328816ms step_avg:222.02ms +[2025-07-17 21:25:32] [Rank 0] step:1481/10000 train_time:328816ms step_avg:222.02ms +[2025-07-17 21:25:41] [Rank 0] PRINT: step:1500/10000 val_loss:4.6056 train_time:334315ms step_avg:222.88ms +[2025-07-17 21:25:41] [Rank 0] PRINT: step:1500/10000 val_loss:4.6056 train_time:334315ms step_avg:222.88ms +[2025-07-17 21:25:41] [Rank 0] step:1501/10000 train_time:334324ms step_avg:222.73ms +[2025-07-17 21:25:41] [Rank 0] step:1501/10000 train_time:334324ms step_avg:222.73ms +[2025-07-17 21:25:46] [Rank 0] step:1521/10000 train_time:337845ms step_avg:222.12ms +[2025-07-17 21:25:46] [Rank 0] step:1521/10000 train_time:337845ms step_avg:222.12ms +[2025-07-17 21:25:50] [Rank 0] step:1541/10000 train_time:342364ms step_avg:222.17ms +[2025-07-17 21:25:50] [Rank 0] step:1541/10000 train_time:342364ms step_avg:222.17ms +[2025-07-17 21:25:55] [Rank 0] step:1561/10000 train_time:346884ms step_avg:222.22ms +[2025-07-17 21:25:55] [Rank 0] step:1561/10000 train_time:346884ms step_avg:222.22ms +[2025-07-17 21:25:59] [Rank 0] step:1581/10000 train_time:351404ms step_avg:222.27ms +[2025-07-17 21:25:59] [Rank 0] step:1581/10000 train_time:351404ms step_avg:222.27ms +[2025-07-17 21:26:04] [Rank 0] step:1601/10000 train_time:355926ms step_avg:222.31ms +[2025-07-17 21:26:04] [Rank 0] step:1601/10000 train_time:355926ms step_avg:222.31ms +[2025-07-17 21:26:08] [Rank 0] step:1621/10000 train_time:360446ms step_avg:222.36ms +[2025-07-17 21:26:08] [Rank 0] step:1621/10000 train_time:360446ms step_avg:222.36ms +[2025-07-17 21:26:14] [Rank 0] PRINT: step:1625/10000 val_loss:4.6049 train_time:362567ms step_avg:223.12ms +[2025-07-17 21:26:14] [Rank 0] PRINT: step:1625/10000 val_loss:4.6049 train_time:362567ms step_avg:223.12ms +[2025-07-17 21:26:17] [Rank 0] step:1641/10000 train_time:364965ms step_avg:222.40ms +[2025-07-17 21:26:17] [Rank 0] step:1641/10000 train_time:364965ms step_avg:222.40ms +[2025-07-17 21:26:22] [Rank 0] step:1661/10000 train_time:369483ms step_avg:222.45ms +[2025-07-17 21:26:22] [Rank 0] step:1661/10000 train_time:369483ms step_avg:222.45ms +[2025-07-17 21:26:27] [Rank 0] step:1681/10000 train_time:374001ms step_avg:222.49ms +[2025-07-17 21:26:27] [Rank 0] step:1681/10000 train_time:374001ms step_avg:222.49ms +[2025-07-17 21:26:31] [Rank 0] step:1701/10000 train_time:378517ms step_avg:222.53ms +[2025-07-17 21:26:31] [Rank 0] step:1701/10000 train_time:378517ms step_avg:222.53ms +[2025-07-17 21:26:36] [Rank 0] step:1721/10000 train_time:383036ms step_avg:222.57ms +[2025-07-17 21:26:36] [Rank 0] step:1721/10000 train_time:383036ms step_avg:222.57ms +[2025-07-17 21:26:40] [Rank 0] step:1741/10000 train_time:387552ms step_avg:222.60ms +[2025-07-17 21:26:40] [Rank 0] step:1741/10000 train_time:387552ms step_avg:222.60ms +[2025-07-17 21:26:47] [Rank 0] PRINT: step:1750/10000 val_loss:4.5539 train_time:390802ms step_avg:223.32ms +[2025-07-17 21:26:47] [Rank 0] PRINT: step:1750/10000 val_loss:4.5539 train_time:390802ms step_avg:223.32ms +[2025-07-17 21:26:49] [Rank 0] step:1761/10000 train_time:392069ms step_avg:222.64ms +[2025-07-17 21:26:49] [Rank 0] step:1761/10000 train_time:392069ms step_avg:222.64ms +[2025-07-17 21:26:54] [Rank 0] step:1781/10000 train_time:396586ms step_avg:222.68ms +[2025-07-17 21:26:54] [Rank 0] step:1781/10000 train_time:396586ms step_avg:222.68ms +[2025-07-17 21:26:58] [Rank 0] step:1801/10000 train_time:401104ms step_avg:222.71ms +[2025-07-17 21:26:58] [Rank 0] step:1801/10000 train_time:401104ms step_avg:222.71ms +[2025-07-17 21:27:03] [Rank 0] step:1821/10000 train_time:405620ms step_avg:222.75ms +[2025-07-17 21:27:03] [Rank 0] step:1821/10000 train_time:405620ms step_avg:222.75ms +[2025-07-17 21:27:07] [Rank 0] step:1841/10000 train_time:410141ms step_avg:222.78ms +[2025-07-17 21:27:07] [Rank 0] step:1841/10000 train_time:410141ms step_avg:222.78ms +[2025-07-17 21:27:12] [Rank 0] step:1861/10000 train_time:414659ms step_avg:222.82ms +[2025-07-17 21:27:12] [Rank 0] step:1861/10000 train_time:414659ms step_avg:222.82ms +[2025-07-17 21:27:19] [Rank 0] PRINT: step:1875/10000 val_loss:4.5339 train_time:419036ms step_avg:223.49ms +[2025-07-17 21:27:19] [Rank 0] PRINT: step:1875/10000 val_loss:4.5339 train_time:419036ms step_avg:223.49ms +[2025-07-17 21:27:21] [Rank 0] step:1881/10000 train_time:419177ms step_avg:222.85ms +[2025-07-17 21:27:21] [Rank 0] step:1881/10000 train_time:419177ms step_avg:222.85ms +[2025-07-17 21:27:25] [Rank 0] step:1901/10000 train_time:423699ms step_avg:222.88ms +[2025-07-17 21:27:25] [Rank 0] step:1901/10000 train_time:423699ms step_avg:222.88ms +[2025-07-17 21:27:30] [Rank 0] step:1921/10000 train_time:428223ms step_avg:222.92ms +[2025-07-17 21:27:30] [Rank 0] step:1921/10000 train_time:428223ms step_avg:222.92ms +[2025-07-17 21:27:34] [Rank 0] step:1941/10000 train_time:432741ms step_avg:222.95ms +[2025-07-17 21:27:34] [Rank 0] step:1941/10000 train_time:432741ms step_avg:222.95ms +[2025-07-17 21:27:39] [Rank 0] step:1961/10000 train_time:437265ms step_avg:222.98ms +[2025-07-17 21:27:39] [Rank 0] step:1961/10000 train_time:437265ms step_avg:222.98ms +[2025-07-17 21:27:43] [Rank 0] step:1981/10000 train_time:441789ms step_avg:223.01ms +[2025-07-17 21:27:43] [Rank 0] step:1981/10000 train_time:441789ms step_avg:223.01ms +[2025-07-17 21:27:52] [Rank 0] PRINT: step:2000/10000 val_loss:4.5437 train_time:447302ms step_avg:223.65ms +[2025-07-17 21:27:52] [Rank 0] PRINT: step:2000/10000 val_loss:4.5437 train_time:447302ms step_avg:223.65ms +[2025-07-17 21:27:52] [Rank 0] step:2001/10000 train_time:447310ms step_avg:223.54ms +[2025-07-17 21:27:52] [Rank 0] step:2001/10000 train_time:447310ms step_avg:223.54ms +[2025-07-17 21:27:57] [Rank 0] step:2021/10000 train_time:450837ms step_avg:223.08ms +[2025-07-17 21:27:57] [Rank 0] step:2021/10000 train_time:450837ms step_avg:223.08ms +[2025-07-17 21:28:01] [Rank 0] step:2041/10000 train_time:455361ms step_avg:223.11ms +[2025-07-17 21:28:01] [Rank 0] step:2041/10000 train_time:455361ms step_avg:223.11ms +[2025-07-17 21:28:06] [Rank 0] step:2061/10000 train_time:459894ms step_avg:223.14ms +[2025-07-17 21:28:06] [Rank 0] step:2061/10000 train_time:459894ms step_avg:223.14ms +[2025-07-17 21:28:10] [Rank 0] step:2081/10000 train_time:464421ms step_avg:223.17ms +[2025-07-17 21:28:10] [Rank 0] step:2081/10000 train_time:464421ms step_avg:223.17ms +[2025-07-17 21:28:15] [Rank 0] step:2101/10000 train_time:468946ms step_avg:223.20ms +[2025-07-17 21:28:15] [Rank 0] step:2101/10000 train_time:468946ms step_avg:223.20ms +[2025-07-17 21:28:19] [Rank 0] step:2121/10000 train_time:473475ms step_avg:223.23ms +[2025-07-17 21:28:19] [Rank 0] step:2121/10000 train_time:473475ms step_avg:223.23ms +[2025-07-17 21:28:24] [Rank 0] PRINT: step:2125/10000 val_loss:4.5636 train_time:475599ms step_avg:223.81ms +[2025-07-17 21:28:24] [Rank 0] PRINT: step:2125/10000 val_loss:4.5636 train_time:475599ms step_avg:223.81ms +[2025-07-17 21:28:28] [Rank 0] step:2141/10000 train_time:477998ms step_avg:223.26ms +[2025-07-17 21:28:28] [Rank 0] step:2141/10000 train_time:477998ms step_avg:223.26ms +[2025-07-17 21:28:33] [Rank 0] step:2161/10000 train_time:482521ms step_avg:223.29ms +[2025-07-17 21:28:33] [Rank 0] step:2161/10000 train_time:482521ms step_avg:223.29ms +[2025-07-17 21:28:37] [Rank 0] step:2181/10000 train_time:487047ms step_avg:223.31ms +[2025-07-17 21:28:37] [Rank 0] step:2181/10000 train_time:487047ms step_avg:223.31ms +[2025-07-17 21:28:42] [Rank 0] step:2201/10000 train_time:491575ms step_avg:223.34ms +[2025-07-17 21:28:42] [Rank 0] step:2201/10000 train_time:491575ms step_avg:223.34ms +[2025-07-17 21:28:46] [Rank 0] step:2221/10000 train_time:496099ms step_avg:223.37ms +[2025-07-17 21:28:46] [Rank 0] step:2221/10000 train_time:496099ms step_avg:223.37ms +[2025-07-17 21:28:51] [Rank 0] step:2241/10000 train_time:500693ms step_avg:223.42ms +[2025-07-17 21:28:51] [Rank 0] step:2241/10000 train_time:500693ms step_avg:223.42ms +[2025-07-17 21:28:57] [Rank 0] PRINT: step:2250/10000 val_loss:4.0820 train_time:504029ms step_avg:224.01ms +[2025-07-17 21:28:57] [Rank 0] PRINT: step:2250/10000 val_loss:4.0820 train_time:504029ms step_avg:224.01ms +[2025-07-17 21:29:00] [Rank 0] step:2261/10000 train_time:505331ms step_avg:223.50ms +[2025-07-17 21:29:00] [Rank 0] step:2261/10000 train_time:505331ms step_avg:223.50ms +[2025-07-17 21:29:04] [Rank 0] step:2281/10000 train_time:509969ms step_avg:223.57ms +[2025-07-17 21:29:04] [Rank 0] step:2281/10000 train_time:509969ms step_avg:223.57ms +[2025-07-17 21:29:09] [Rank 0] step:2301/10000 train_time:514605ms step_avg:223.64ms +[2025-07-17 21:29:09] [Rank 0] step:2301/10000 train_time:514605ms step_avg:223.64ms +[2025-07-17 21:29:14] [Rank 0] step:2321/10000 train_time:519243ms step_avg:223.72ms +[2025-07-17 21:29:14] [Rank 0] step:2321/10000 train_time:519243ms step_avg:223.72ms +[2025-07-17 21:29:18] [Rank 0] step:2341/10000 train_time:523883ms step_avg:223.79ms +[2025-07-17 21:29:18] [Rank 0] step:2341/10000 train_time:523883ms step_avg:223.79ms +[2025-07-17 21:29:23] [Rank 0] step:2361/10000 train_time:528522ms step_avg:223.86ms +[2025-07-17 21:29:23] [Rank 0] step:2361/10000 train_time:528522ms step_avg:223.86ms +[2025-07-17 21:29:31] [Rank 0] PRINT: step:2375/10000 val_loss:4.2252 train_time:533017ms step_avg:224.43ms +[2025-07-17 21:29:31] [Rank 0] PRINT: step:2375/10000 val_loss:4.2252 train_time:533017ms step_avg:224.43ms +[2025-07-17 21:29:32] [Rank 0] step:2381/10000 train_time:533161ms step_avg:223.92ms +[2025-07-17 21:29:32] [Rank 0] step:2381/10000 train_time:533161ms step_avg:223.92ms +[2025-07-17 21:29:37] [Rank 0] step:2401/10000 train_time:537798ms step_avg:223.99ms +[2025-07-17 21:29:37] [Rank 0] step:2401/10000 train_time:537798ms step_avg:223.99ms +[2025-07-17 21:29:41] [Rank 0] step:2421/10000 train_time:542435ms step_avg:224.05ms +[2025-07-17 21:29:41] [Rank 0] step:2421/10000 train_time:542435ms step_avg:224.05ms +[2025-07-17 21:29:46] [Rank 0] step:2441/10000 train_time:547075ms step_avg:224.12ms +[2025-07-17 21:29:46] [Rank 0] step:2441/10000 train_time:547075ms step_avg:224.12ms +[2025-07-17 21:29:51] [Rank 0] step:2461/10000 train_time:551713ms step_avg:224.18ms +[2025-07-17 21:29:51] [Rank 0] step:2461/10000 train_time:551713ms step_avg:224.18ms +[2025-07-17 21:29:55] [Rank 0] step:2481/10000 train_time:556349ms step_avg:224.24ms +[2025-07-17 21:29:55] [Rank 0] step:2481/10000 train_time:556349ms step_avg:224.24ms +[2025-07-17 21:30:04] [Rank 0] PRINT: step:2500/10000 val_loss:4.2512 train_time:562002ms step_avg:224.80ms +[2025-07-17 21:30:04] [Rank 0] PRINT: step:2500/10000 val_loss:4.2512 train_time:562002ms step_avg:224.80ms +[2025-07-17 21:30:04] [Rank 0] step:2501/10000 train_time:562011ms step_avg:224.71ms +[2025-07-17 21:30:04] [Rank 0] step:2501/10000 train_time:562011ms step_avg:224.71ms +[2025-07-17 21:30:09] [Rank 0] step:2521/10000 train_time:565630ms step_avg:224.37ms +[2025-07-17 21:30:09] [Rank 0] step:2521/10000 train_time:565630ms step_avg:224.37ms +[2025-07-17 21:30:14] [Rank 0] step:2541/10000 train_time:570271ms step_avg:224.43ms +[2025-07-17 21:30:14] [Rank 0] step:2541/10000 train_time:570271ms step_avg:224.43ms +[2025-07-17 21:30:18] [Rank 0] step:2561/10000 train_time:574913ms step_avg:224.49ms +[2025-07-17 21:30:18] [Rank 0] step:2561/10000 train_time:574913ms step_avg:224.49ms +[2025-07-17 21:30:23] [Rank 0] step:2581/10000 train_time:579556ms step_avg:224.55ms +[2025-07-17 21:30:23] [Rank 0] step:2581/10000 train_time:579556ms step_avg:224.55ms +[2025-07-17 21:30:28] [Rank 0] step:2601/10000 train_time:584201ms step_avg:224.61ms +[2025-07-17 21:30:28] [Rank 0] step:2601/10000 train_time:584201ms step_avg:224.61ms +[2025-07-17 21:30:32] [Rank 0] step:2621/10000 train_time:588843ms step_avg:224.66ms +[2025-07-17 21:30:32] [Rank 0] step:2621/10000 train_time:588843ms step_avg:224.66ms +[2025-07-17 21:30:38] [Rank 0] PRINT: step:2625/10000 val_loss:4.2349 train_time:591022ms step_avg:225.15ms +[2025-07-17 21:30:38] [Rank 0] PRINT: step:2625/10000 val_loss:4.2349 train_time:591022ms step_avg:225.15ms +[2025-07-17 21:30:41] [Rank 0] step:2641/10000 train_time:593479ms step_avg:224.72ms +[2025-07-17 21:30:41] [Rank 0] step:2641/10000 train_time:593479ms step_avg:224.72ms +[2025-07-17 21:30:46] [Rank 0] step:2661/10000 train_time:598118ms step_avg:224.77ms +[2025-07-17 21:30:46] [Rank 0] step:2661/10000 train_time:598118ms step_avg:224.77ms +[2025-07-17 21:30:51] [Rank 0] step:2681/10000 train_time:602754ms step_avg:224.82ms +[2025-07-17 21:30:51] [Rank 0] step:2681/10000 train_time:602754ms step_avg:224.82ms +[2025-07-17 21:30:55] [Rank 0] step:2701/10000 train_time:607391ms step_avg:224.88ms +[2025-07-17 21:30:55] [Rank 0] step:2701/10000 train_time:607391ms step_avg:224.88ms +[2025-07-17 21:31:00] [Rank 0] step:2721/10000 train_time:612028ms step_avg:224.93ms +[2025-07-17 21:31:00] [Rank 0] step:2721/10000 train_time:612028ms step_avg:224.93ms +[2025-07-17 21:31:05] [Rank 0] step:2741/10000 train_time:616665ms step_avg:224.98ms +[2025-07-17 21:31:05] [Rank 0] step:2741/10000 train_time:616665ms step_avg:224.98ms +[2025-07-17 21:31:11] [Rank 0] PRINT: step:2750/10000 val_loss:4.1725 train_time:620000ms step_avg:225.45ms +[2025-07-17 21:31:11] [Rank 0] PRINT: step:2750/10000 val_loss:4.1725 train_time:620000ms step_avg:225.45ms +[2025-07-17 21:31:13] [Rank 0] step:2761/10000 train_time:621300ms step_avg:225.03ms +[2025-07-17 21:31:13] [Rank 0] step:2761/10000 train_time:621300ms step_avg:225.03ms +[2025-07-17 21:31:18] [Rank 0] step:2781/10000 train_time:625934ms step_avg:225.08ms +[2025-07-17 21:31:18] [Rank 0] step:2781/10000 train_time:625934ms step_avg:225.08ms +[2025-07-17 21:31:23] [Rank 0] step:2801/10000 train_time:630570ms step_avg:225.12ms +[2025-07-17 21:31:23] [Rank 0] step:2801/10000 train_time:630570ms step_avg:225.12ms +[2025-07-17 21:31:27] [Rank 0] step:2821/10000 train_time:635203ms step_avg:225.17ms +[2025-07-17 21:31:27] [Rank 0] step:2821/10000 train_time:635203ms step_avg:225.17ms +[2025-07-17 21:31:32] [Rank 0] step:2841/10000 train_time:639837ms step_avg:225.22ms +[2025-07-17 21:31:32] [Rank 0] step:2841/10000 train_time:639837ms step_avg:225.22ms +[2025-07-17 21:31:37] [Rank 0] step:2861/10000 train_time:644471ms step_avg:225.26ms +[2025-07-17 21:31:37] [Rank 0] step:2861/10000 train_time:644471ms step_avg:225.26ms +[2025-07-17 21:31:44] [Rank 0] PRINT: step:2875/10000 val_loss:4.1631 train_time:648963ms step_avg:225.73ms +[2025-07-17 21:31:44] [Rank 0] PRINT: step:2875/10000 val_loss:4.1631 train_time:648963ms step_avg:225.73ms +[2025-07-17 21:31:46] [Rank 0] step:2881/10000 train_time:649106ms step_avg:225.31ms +[2025-07-17 21:31:46] [Rank 0] step:2881/10000 train_time:649106ms step_avg:225.31ms +[2025-07-17 21:31:50] [Rank 0] step:2901/10000 train_time:653739ms step_avg:225.35ms +[2025-07-17 21:31:50] [Rank 0] step:2901/10000 train_time:653739ms step_avg:225.35ms +[2025-07-17 21:31:55] [Rank 0] step:2921/10000 train_time:658372ms step_avg:225.39ms +[2025-07-17 21:31:55] [Rank 0] step:2921/10000 train_time:658372ms step_avg:225.39ms +[2025-07-17 21:32:00] [Rank 0] step:2941/10000 train_time:663008ms step_avg:225.44ms +[2025-07-17 21:32:00] [Rank 0] step:2941/10000 train_time:663008ms step_avg:225.44ms +[2025-07-17 21:32:04] [Rank 0] step:2961/10000 train_time:667644ms step_avg:225.48ms +[2025-07-17 21:32:04] [Rank 0] step:2961/10000 train_time:667644ms step_avg:225.48ms +[2025-07-17 21:32:09] [Rank 0] step:2981/10000 train_time:672292ms step_avg:225.53ms +[2025-07-17 21:32:09] [Rank 0] step:2981/10000 train_time:672292ms step_avg:225.53ms +[2025-07-17 21:32:18] [Rank 0] PRINT: step:3000/10000 val_loss:4.0728 train_time:677971ms step_avg:225.99ms +[2025-07-17 21:32:18] [Rank 0] PRINT: step:3000/10000 val_loss:4.0728 train_time:677971ms step_avg:225.99ms +[2025-07-17 21:32:18] [Rank 0] step:3001/10000 train_time:677980ms step_avg:225.92ms +[2025-07-17 21:32:18] [Rank 0] step:3001/10000 train_time:677980ms step_avg:225.92ms +[2025-07-17 21:32:23] [Rank 0] step:3021/10000 train_time:681609ms step_avg:225.62ms +[2025-07-17 21:32:23] [Rank 0] step:3021/10000 train_time:681609ms step_avg:225.62ms +[2025-07-17 21:32:27] [Rank 0] step:3041/10000 train_time:686266ms step_avg:225.67ms +[2025-07-17 21:32:27] [Rank 0] step:3041/10000 train_time:686266ms step_avg:225.67ms +[2025-07-17 21:32:32] [Rank 0] step:3061/10000 train_time:690925ms step_avg:225.72ms +[2025-07-17 21:32:32] [Rank 0] step:3061/10000 train_time:690925ms step_avg:225.72ms +[2025-07-17 21:32:37] [Rank 0] step:3081/10000 train_time:695584ms step_avg:225.77ms +[2025-07-17 21:32:37] [Rank 0] step:3081/10000 train_time:695584ms step_avg:225.77ms +[2025-07-17 21:32:41] [Rank 0] step:3101/10000 train_time:700242ms step_avg:225.81ms +[2025-07-17 21:32:41] [Rank 0] step:3101/10000 train_time:700242ms step_avg:225.81ms +[2025-07-17 21:32:46] [Rank 0] step:3121/10000 train_time:704902ms step_avg:225.86ms +[2025-07-17 21:32:46] [Rank 0] step:3121/10000 train_time:704902ms step_avg:225.86ms +[2025-07-17 21:32:52] [Rank 0] PRINT: step:3125/10000 val_loss:4.1055 train_time:707088ms step_avg:226.27ms +[2025-07-17 21:32:52] [Rank 0] PRINT: step:3125/10000 val_loss:4.1055 train_time:707088ms step_avg:226.27ms +[2025-07-17 21:32:55] [Rank 0] step:3141/10000 train_time:709556ms step_avg:225.90ms +[2025-07-17 21:32:55] [Rank 0] step:3141/10000 train_time:709556ms step_avg:225.90ms +[2025-07-17 21:33:00] [Rank 0] step:3161/10000 train_time:714216ms step_avg:225.95ms +[2025-07-17 21:33:00] [Rank 0] step:3161/10000 train_time:714216ms step_avg:225.95ms +[2025-07-17 21:33:05] [Rank 0] step:3181/10000 train_time:718877ms step_avg:225.99ms +[2025-07-17 21:33:05] [Rank 0] step:3181/10000 train_time:718877ms step_avg:225.99ms +[2025-07-17 21:33:09] [Rank 0] step:3201/10000 train_time:723534ms step_avg:226.03ms +[2025-07-17 21:33:09] [Rank 0] step:3201/10000 train_time:723534ms step_avg:226.03ms +[2025-07-17 21:33:14] [Rank 0] step:3221/10000 train_time:728197ms step_avg:226.08ms +[2025-07-17 21:33:14] [Rank 0] step:3221/10000 train_time:728197ms step_avg:226.08ms +[2025-07-17 21:33:19] [Rank 0] step:3241/10000 train_time:732859ms step_avg:226.12ms +[2025-07-17 21:33:19] [Rank 0] step:3241/10000 train_time:732859ms step_avg:226.12ms +[2025-07-17 21:33:25] [Rank 0] PRINT: step:3250/10000 val_loss:4.0596 train_time:736211ms step_avg:226.53ms +[2025-07-17 21:33:25] [Rank 0] PRINT: step:3250/10000 val_loss:4.0596 train_time:736211ms step_avg:226.53ms +[2025-07-17 21:33:28] [Rank 0] step:3261/10000 train_time:737518ms step_avg:226.16ms +[2025-07-17 21:33:28] [Rank 0] step:3261/10000 train_time:737518ms step_avg:226.16ms +[2025-07-17 21:33:33] [Rank 0] step:3281/10000 train_time:742181ms step_avg:226.21ms +[2025-07-17 21:33:33] [Rank 0] step:3281/10000 train_time:742181ms step_avg:226.21ms +[2025-07-17 21:33:37] [Rank 0] step:3301/10000 train_time:746843ms step_avg:226.25ms +[2025-07-17 21:33:37] [Rank 0] step:3301/10000 train_time:746843ms step_avg:226.25ms +[2025-07-17 21:33:42] [Rank 0] step:3321/10000 train_time:751505ms step_avg:226.29ms +[2025-07-17 21:33:42] [Rank 0] step:3321/10000 train_time:751505ms step_avg:226.29ms +[2025-07-17 21:33:47] [Rank 0] step:3341/10000 train_time:756170ms step_avg:226.33ms +[2025-07-17 21:33:47] [Rank 0] step:3341/10000 train_time:756170ms step_avg:226.33ms +[2025-07-17 21:33:51] [Rank 0] step:3361/10000 train_time:760834ms step_avg:226.37ms +[2025-07-17 21:33:51] [Rank 0] step:3361/10000 train_time:760834ms step_avg:226.37ms +[2025-07-17 21:33:59] [Rank 0] PRINT: step:3375/10000 val_loss:4.1048 train_time:765355ms step_avg:226.77ms +[2025-07-17 21:33:59] [Rank 0] PRINT: step:3375/10000 val_loss:4.1048 train_time:765355ms step_avg:226.77ms +[2025-07-17 21:34:01] [Rank 0] step:3381/10000 train_time:765500ms step_avg:226.41ms +[2025-07-17 21:34:01] [Rank 0] step:3381/10000 train_time:765500ms step_avg:226.41ms +[2025-07-17 21:34:05] [Rank 0] step:3401/10000 train_time:770161ms step_avg:226.45ms +[2025-07-17 21:34:05] [Rank 0] step:3401/10000 train_time:770161ms step_avg:226.45ms +[2025-07-17 21:34:10] [Rank 0] step:3421/10000 train_time:774821ms step_avg:226.49ms +[2025-07-17 21:34:10] [Rank 0] step:3421/10000 train_time:774821ms step_avg:226.49ms +[2025-07-17 21:34:15] [Rank 0] step:3441/10000 train_time:779484ms step_avg:226.53ms +[2025-07-17 21:34:15] [Rank 0] step:3441/10000 train_time:779484ms step_avg:226.53ms +[2025-07-17 21:34:19] [Rank 0] step:3461/10000 train_time:784148ms step_avg:226.57ms +[2025-07-17 21:34:19] [Rank 0] step:3461/10000 train_time:784148ms step_avg:226.57ms +[2025-07-17 21:34:24] [Rank 0] step:3481/10000 train_time:788812ms step_avg:226.60ms +[2025-07-17 21:34:24] [Rank 0] step:3481/10000 train_time:788812ms step_avg:226.60ms +[2025-07-17 21:34:33] [Rank 0] PRINT: step:3500/10000 val_loss:4.1476 train_time:794498ms step_avg:227.00ms +[2025-07-17 21:34:33] [Rank 0] PRINT: step:3500/10000 val_loss:4.1476 train_time:794498ms step_avg:227.00ms +[2025-07-17 21:34:33] [Rank 0] step:3501/10000 train_time:794507ms step_avg:226.94ms +[2025-07-17 21:34:33] [Rank 0] step:3501/10000 train_time:794507ms step_avg:226.94ms +[2025-07-17 21:34:38] [Rank 0] step:3521/10000 train_time:798137ms step_avg:226.68ms +[2025-07-17 21:34:38] [Rank 0] step:3521/10000 train_time:798137ms step_avg:226.68ms +[2025-07-17 21:34:42] [Rank 0] step:3541/10000 train_time:802797ms step_avg:226.71ms +[2025-07-17 21:34:42] [Rank 0] step:3541/10000 train_time:802797ms step_avg:226.71ms +[2025-07-17 21:34:47] [Rank 0] step:3561/10000 train_time:807458ms step_avg:226.75ms +[2025-07-17 21:34:47] [Rank 0] step:3561/10000 train_time:807458ms step_avg:226.75ms +[2025-07-17 21:34:52] [Rank 0] step:3581/10000 train_time:812119ms step_avg:226.79ms +[2025-07-17 21:34:52] [Rank 0] step:3581/10000 train_time:812119ms step_avg:226.79ms +[2025-07-17 21:34:56] [Rank 0] step:3601/10000 train_time:816781ms step_avg:226.82ms +[2025-07-17 21:34:56] [Rank 0] step:3601/10000 train_time:816781ms step_avg:226.82ms +[2025-07-17 21:35:01] [Rank 0] step:3621/10000 train_time:821444ms step_avg:226.86ms +[2025-07-17 21:35:01] [Rank 0] step:3621/10000 train_time:821444ms step_avg:226.86ms +[2025-07-17 21:35:07] [Rank 0] PRINT: step:3625/10000 val_loss:4.1298 train_time:823633ms step_avg:227.21ms +[2025-07-17 21:35:07] [Rank 0] PRINT: step:3625/10000 val_loss:4.1298 train_time:823633ms step_avg:227.21ms +[2025-07-17 21:35:10] [Rank 0] step:3641/10000 train_time:826102ms step_avg:226.89ms +[2025-07-17 21:35:10] [Rank 0] step:3641/10000 train_time:826102ms step_avg:226.89ms +[2025-07-17 21:35:15] [Rank 0] step:3661/10000 train_time:830767ms step_avg:226.92ms +[2025-07-17 21:35:15] [Rank 0] step:3661/10000 train_time:830767ms step_avg:226.92ms +[2025-07-17 21:35:20] [Rank 0] step:3681/10000 train_time:835426ms step_avg:226.96ms +[2025-07-17 21:35:20] [Rank 0] step:3681/10000 train_time:835426ms step_avg:226.96ms +[2025-07-17 21:35:24] [Rank 0] step:3701/10000 train_time:840090ms step_avg:226.99ms +[2025-07-17 21:35:24] [Rank 0] step:3701/10000 train_time:840090ms step_avg:226.99ms +[2025-07-17 21:35:29] [Rank 0] step:3721/10000 train_time:844805ms step_avg:227.04ms +[2025-07-17 21:35:29] [Rank 0] step:3721/10000 train_time:844805ms step_avg:227.04ms +[2025-07-17 21:35:34] [Rank 0] step:3741/10000 train_time:849554ms step_avg:227.09ms +[2025-07-17 21:35:34] [Rank 0] step:3741/10000 train_time:849554ms step_avg:227.09ms +[2025-07-17 21:35:41] [Rank 0] PRINT: step:3750/10000 val_loss:4.0910 train_time:852969ms step_avg:227.46ms +[2025-07-17 21:35:41] [Rank 0] PRINT: step:3750/10000 val_loss:4.0910 train_time:852969ms step_avg:227.46ms +[2025-07-17 21:35:43] [Rank 0] step:3761/10000 train_time:854303ms step_avg:227.15ms +[2025-07-17 21:35:43] [Rank 0] step:3761/10000 train_time:854303ms step_avg:227.15ms +[2025-07-17 21:35:48] [Rank 0] step:3781/10000 train_time:859051ms step_avg:227.20ms +[2025-07-17 21:35:48] [Rank 0] step:3781/10000 train_time:859051ms step_avg:227.20ms +[2025-07-17 21:35:53] [Rank 0] step:3801/10000 train_time:863802ms step_avg:227.26ms +[2025-07-17 21:35:53] [Rank 0] step:3801/10000 train_time:863802ms step_avg:227.26ms +[2025-07-17 21:35:57] [Rank 0] step:3821/10000 train_time:868553ms step_avg:227.31ms +[2025-07-17 21:35:57] [Rank 0] step:3821/10000 train_time:868553ms step_avg:227.31ms +[2025-07-17 21:36:02] [Rank 0] step:3841/10000 train_time:873302ms step_avg:227.36ms +[2025-07-17 21:36:02] [Rank 0] step:3841/10000 train_time:873302ms step_avg:227.36ms +[2025-07-17 21:36:07] [Rank 0] step:3861/10000 train_time:878052ms step_avg:227.42ms +[2025-07-17 21:36:07] [Rank 0] step:3861/10000 train_time:878052ms step_avg:227.42ms +[2025-07-17 21:36:15] [Rank 0] PRINT: step:3875/10000 val_loss:4.0133 train_time:882656ms step_avg:227.78ms +[2025-07-17 21:36:15] [Rank 0] PRINT: step:3875/10000 val_loss:4.0133 train_time:882656ms step_avg:227.78ms +[2025-07-17 21:36:16] [Rank 0] step:3881/10000 train_time:882805ms step_avg:227.47ms +[2025-07-17 21:36:16] [Rank 0] step:3881/10000 train_time:882805ms step_avg:227.47ms +[2025-07-17 21:36:21] [Rank 0] step:3901/10000 train_time:887555ms step_avg:227.52ms +[2025-07-17 21:36:21] [Rank 0] step:3901/10000 train_time:887555ms step_avg:227.52ms +[2025-07-17 21:36:26] [Rank 0] step:3921/10000 train_time:892300ms step_avg:227.57ms +[2025-07-17 21:36:26] [Rank 0] step:3921/10000 train_time:892300ms step_avg:227.57ms +[2025-07-17 21:36:31] [Rank 0] step:3941/10000 train_time:897047ms step_avg:227.62ms +[2025-07-17 21:36:31] [Rank 0] step:3941/10000 train_time:897047ms step_avg:227.62ms +[2025-07-17 21:36:35] [Rank 0] step:3961/10000 train_time:901795ms step_avg:227.67ms +[2025-07-17 21:36:35] [Rank 0] step:3961/10000 train_time:901795ms step_avg:227.67ms +[2025-07-17 21:36:40] [Rank 0] step:3981/10000 train_time:906543ms step_avg:227.72ms +[2025-07-17 21:36:40] [Rank 0] step:3981/10000 train_time:906543ms step_avg:227.72ms +[2025-07-17 21:36:49] [Rank 0] PRINT: step:4000/10000 val_loss:4.0506 train_time:912324ms step_avg:228.08ms +[2025-07-17 21:36:49] [Rank 0] PRINT: step:4000/10000 val_loss:4.0506 train_time:912324ms step_avg:228.08ms +[2025-07-17 21:36:49] [Rank 0] step:4001/10000 train_time:912332ms step_avg:228.03ms +[2025-07-17 21:36:49] [Rank 0] step:4001/10000 train_time:912332ms step_avg:228.03ms +[2025-07-17 21:36:54] [Rank 0] step:4021/10000 train_time:916036ms step_avg:227.81ms +[2025-07-17 21:36:54] [Rank 0] step:4021/10000 train_time:916036ms step_avg:227.81ms +[2025-07-17 21:36:59] [Rank 0] step:4041/10000 train_time:920786ms step_avg:227.86ms +[2025-07-17 21:36:59] [Rank 0] step:4041/10000 train_time:920786ms step_avg:227.86ms +[2025-07-17 21:37:04] [Rank 0] step:4061/10000 train_time:925534ms step_avg:227.91ms +[2025-07-17 21:37:04] [Rank 0] step:4061/10000 train_time:925534ms step_avg:227.91ms +[2025-07-17 21:37:08] [Rank 0] step:4081/10000 train_time:930289ms step_avg:227.96ms +[2025-07-17 21:37:08] [Rank 0] step:4081/10000 train_time:930289ms step_avg:227.96ms +[2025-07-17 21:37:13] [Rank 0] step:4101/10000 train_time:935040ms step_avg:228.00ms +[2025-07-17 21:37:13] [Rank 0] step:4101/10000 train_time:935040ms step_avg:228.00ms +[2025-07-17 21:37:18] [Rank 0] step:4121/10000 train_time:939798ms step_avg:228.05ms +[2025-07-17 21:37:18] [Rank 0] step:4121/10000 train_time:939798ms step_avg:228.05ms +[2025-07-17 21:37:24] [Rank 0] PRINT: step:4125/10000 val_loss:4.0602 train_time:942030ms step_avg:228.37ms +[2025-07-17 21:37:24] [Rank 0] PRINT: step:4125/10000 val_loss:4.0602 train_time:942030ms step_avg:228.37ms +[2025-07-17 21:37:27] [Rank 0] step:4141/10000 train_time:944549ms step_avg:228.10ms +[2025-07-17 21:37:27] [Rank 0] step:4141/10000 train_time:944549ms step_avg:228.10ms +[2025-07-17 21:37:32] [Rank 0] step:4161/10000 train_time:949299ms step_avg:228.14ms +[2025-07-17 21:37:32] [Rank 0] step:4161/10000 train_time:949299ms step_avg:228.14ms +[2025-07-17 21:37:37] [Rank 0] step:4181/10000 train_time:954048ms step_avg:228.19ms +[2025-07-17 21:37:37] [Rank 0] step:4181/10000 train_time:954048ms step_avg:228.19ms +[2025-07-17 21:37:42] [Rank 0] step:4201/10000 train_time:958800ms step_avg:228.23ms +[2025-07-17 21:37:42] [Rank 0] step:4201/10000 train_time:958800ms step_avg:228.23ms +[2025-07-17 21:37:46] [Rank 0] step:4221/10000 train_time:963553ms step_avg:228.28ms +[2025-07-17 21:37:46] [Rank 0] step:4221/10000 train_time:963553ms step_avg:228.28ms +[2025-07-17 21:37:51] [Rank 0] step:4241/10000 train_time:968303ms step_avg:228.32ms +[2025-07-17 21:37:51] [Rank 0] step:4241/10000 train_time:968303ms step_avg:228.32ms +[2025-07-17 21:37:58] [Rank 0] PRINT: step:4250/10000 val_loss:4.0504 train_time:971722ms step_avg:228.64ms +[2025-07-17 21:37:58] [Rank 0] PRINT: step:4250/10000 val_loss:4.0504 train_time:971722ms step_avg:228.64ms +[2025-07-17 21:38:01] [Rank 0] step:4261/10000 train_time:973055ms step_avg:228.36ms +[2025-07-17 21:38:01] [Rank 0] step:4261/10000 train_time:973055ms step_avg:228.36ms +[2025-07-17 21:38:05] [Rank 0] step:4281/10000 train_time:977812ms step_avg:228.41ms +[2025-07-17 21:38:05] [Rank 0] step:4281/10000 train_time:977812ms step_avg:228.41ms +[2025-07-17 21:38:10] [Rank 0] step:4301/10000 train_time:982566ms step_avg:228.45ms +[2025-07-17 21:38:10] [Rank 0] step:4301/10000 train_time:982566ms step_avg:228.45ms +[2025-07-17 21:38:15] [Rank 0] step:4321/10000 train_time:987327ms step_avg:228.50ms +[2025-07-17 21:38:15] [Rank 0] step:4321/10000 train_time:987327ms step_avg:228.50ms +[2025-07-17 21:38:20] [Rank 0] step:4341/10000 train_time:992084ms step_avg:228.54ms +[2025-07-17 21:38:20] [Rank 0] step:4341/10000 train_time:992084ms step_avg:228.54ms +[2025-07-17 21:38:24] [Rank 0] step:4361/10000 train_time:996841ms step_avg:228.58ms +[2025-07-17 21:38:24] [Rank 0] step:4361/10000 train_time:996841ms step_avg:228.58ms +[2025-07-17 21:38:32] [Rank 0] PRINT: step:4375/10000 val_loss:4.0598 train_time:1001449ms step_avg:228.90ms +[2025-07-17 21:38:32] [Rank 0] PRINT: step:4375/10000 val_loss:4.0598 train_time:1001449ms step_avg:228.90ms +[2025-07-17 21:38:34] [Rank 0] step:4381/10000 train_time:1001596ms step_avg:228.62ms +[2025-07-17 21:38:34] [Rank 0] step:4381/10000 train_time:1001596ms step_avg:228.62ms +[2025-07-17 21:38:39] [Rank 0] step:4401/10000 train_time:1006348ms step_avg:228.66ms +[2025-07-17 21:38:39] [Rank 0] step:4401/10000 train_time:1006348ms step_avg:228.66ms +[2025-07-17 21:38:43] [Rank 0] step:4421/10000 train_time:1011105ms step_avg:228.71ms +[2025-07-17 21:38:43] [Rank 0] step:4421/10000 train_time:1011105ms step_avg:228.71ms +[2025-07-17 21:38:48] [Rank 0] step:4441/10000 train_time:1015860ms step_avg:228.75ms +[2025-07-17 21:38:48] [Rank 0] step:4441/10000 train_time:1015860ms step_avg:228.75ms +[2025-07-17 21:38:53] [Rank 0] step:4461/10000 train_time:1020627ms step_avg:228.79ms +[2025-07-17 21:38:53] [Rank 0] step:4461/10000 train_time:1020627ms step_avg:228.79ms +[2025-07-17 21:38:58] [Rank 0] step:4481/10000 train_time:1025401ms step_avg:228.83ms +[2025-07-17 21:38:58] [Rank 0] step:4481/10000 train_time:1025401ms step_avg:228.83ms +[2025-07-17 21:39:07] [Rank 0] PRINT: step:4500/10000 val_loss:3.9628 train_time:1031218ms step_avg:229.16ms +[2025-07-17 21:39:07] [Rank 0] PRINT: step:4500/10000 val_loss:3.9628 train_time:1031218ms step_avg:229.16ms +[2025-07-17 21:39:07] [Rank 0] step:4501/10000 train_time:1031226ms step_avg:229.11ms +[2025-07-17 21:39:07] [Rank 0] step:4501/10000 train_time:1031226ms step_avg:229.11ms +[2025-07-17 21:39:12] [Rank 0] step:4521/10000 train_time:1034937ms step_avg:228.92ms +[2025-07-17 21:39:12] [Rank 0] step:4521/10000 train_time:1034937ms step_avg:228.92ms +[2025-07-17 21:39:17] [Rank 0] step:4541/10000 train_time:1039702ms step_avg:228.96ms +[2025-07-17 21:39:17] [Rank 0] step:4541/10000 train_time:1039702ms step_avg:228.96ms +[2025-07-17 21:39:21] [Rank 0] step:4561/10000 train_time:1044464ms step_avg:229.00ms +[2025-07-17 21:39:21] [Rank 0] step:4561/10000 train_time:1044464ms step_avg:229.00ms +[2025-07-17 21:39:26] [Rank 0] step:4581/10000 train_time:1049229ms step_avg:229.04ms +[2025-07-17 21:39:26] [Rank 0] step:4581/10000 train_time:1049229ms step_avg:229.04ms +[2025-07-17 21:39:31] [Rank 0] step:4601/10000 train_time:1053995ms step_avg:229.08ms +[2025-07-17 21:39:31] [Rank 0] step:4601/10000 train_time:1053995ms step_avg:229.08ms +[2025-07-17 21:39:36] [Rank 0] step:4621/10000 train_time:1058758ms step_avg:229.12ms +[2025-07-17 21:39:36] [Rank 0] step:4621/10000 train_time:1058758ms step_avg:229.12ms +[2025-07-17 21:39:41] [Rank 0] PRINT: step:4625/10000 val_loss:4.1838 train_time:1060997ms step_avg:229.40ms +[2025-07-17 21:39:41] [Rank 0] PRINT: step:4625/10000 val_loss:4.1838 train_time:1060997ms step_avg:229.40ms +[2025-07-17 21:39:45] [Rank 0] step:4641/10000 train_time:1063522ms step_avg:229.16ms +[2025-07-17 21:39:45] [Rank 0] step:4641/10000 train_time:1063522ms step_avg:229.16ms +[2025-07-17 21:39:50] [Rank 0] step:4661/10000 train_time:1068289ms step_avg:229.20ms +[2025-07-17 21:39:50] [Rank 0] step:4661/10000 train_time:1068289ms step_avg:229.20ms +[2025-07-17 21:39:55] [Rank 0] step:4681/10000 train_time:1073058ms step_avg:229.24ms +[2025-07-17 21:39:55] [Rank 0] step:4681/10000 train_time:1073058ms step_avg:229.24ms +[2025-07-17 21:39:59] [Rank 0] step:4701/10000 train_time:1077828ms step_avg:229.28ms +[2025-07-17 21:39:59] [Rank 0] step:4701/10000 train_time:1077828ms step_avg:229.28ms +[2025-07-17 21:40:04] [Rank 0] step:4721/10000 train_time:1082591ms step_avg:229.31ms +[2025-07-17 21:40:04] [Rank 0] step:4721/10000 train_time:1082591ms step_avg:229.31ms +[2025-07-17 21:40:09] [Rank 0] step:4741/10000 train_time:1087358ms step_avg:229.35ms +[2025-07-17 21:40:09] [Rank 0] step:4741/10000 train_time:1087358ms step_avg:229.35ms +[2025-07-17 21:40:16] [Rank 0] PRINT: step:4750/10000 val_loss:4.1964 train_time:1090784ms step_avg:229.64ms +[2025-07-17 21:40:16] [Rank 0] PRINT: step:4750/10000 val_loss:4.1964 train_time:1090784ms step_avg:229.64ms +[2025-07-17 21:40:18] [Rank 0] step:4761/10000 train_time:1092116ms step_avg:229.39ms +[2025-07-17 21:40:18] [Rank 0] step:4761/10000 train_time:1092116ms step_avg:229.39ms +[2025-07-17 21:40:23] [Rank 0] step:4781/10000 train_time:1096874ms step_avg:229.42ms +[2025-07-17 21:40:23] [Rank 0] step:4781/10000 train_time:1096874ms step_avg:229.42ms +[2025-07-17 21:40:28] [Rank 0] step:4801/10000 train_time:1101630ms step_avg:229.46ms +[2025-07-17 21:40:28] [Rank 0] step:4801/10000 train_time:1101630ms step_avg:229.46ms +[2025-07-17 21:40:33] [Rank 0] step:4821/10000 train_time:1106393ms step_avg:229.49ms +[2025-07-17 21:40:33] [Rank 0] step:4821/10000 train_time:1106393ms step_avg:229.49ms +[2025-07-17 21:40:37] [Rank 0] step:4841/10000 train_time:1111164ms step_avg:229.53ms +[2025-07-17 21:40:37] [Rank 0] step:4841/10000 train_time:1111164ms step_avg:229.53ms +[2025-07-17 21:40:42] [Rank 0] step:4861/10000 train_time:1115930ms step_avg:229.57ms +[2025-07-17 21:40:42] [Rank 0] step:4861/10000 train_time:1115930ms step_avg:229.57ms +[2025-07-17 21:40:50] [Rank 0] PRINT: step:4875/10000 val_loss:4.2172 train_time:1120552ms step_avg:229.86ms +[2025-07-17 21:40:50] [Rank 0] PRINT: step:4875/10000 val_loss:4.2172 train_time:1120552ms step_avg:229.86ms +[2025-07-17 21:40:52] [Rank 0] step:4881/10000 train_time:1120698ms step_avg:229.60ms +[2025-07-17 21:40:52] [Rank 0] step:4881/10000 train_time:1120698ms step_avg:229.60ms +[2025-07-17 21:40:56] [Rank 0] step:4901/10000 train_time:1125465ms step_avg:229.64ms +[2025-07-17 21:40:56] [Rank 0] step:4901/10000 train_time:1125465ms step_avg:229.64ms +[2025-07-17 21:41:01] [Rank 0] step:4921/10000 train_time:1130227ms step_avg:229.67ms +[2025-07-17 21:41:01] [Rank 0] step:4921/10000 train_time:1130227ms step_avg:229.67ms +[2025-07-17 21:41:06] [Rank 0] step:4941/10000 train_time:1134991ms step_avg:229.71ms +[2025-07-17 21:41:06] [Rank 0] step:4941/10000 train_time:1134991ms step_avg:229.71ms +[2025-07-17 21:41:11] [Rank 0] step:4961/10000 train_time:1139753ms step_avg:229.74ms +[2025-07-17 21:41:11] [Rank 0] step:4961/10000 train_time:1139753ms step_avg:229.74ms +[2025-07-17 21:41:15] [Rank 0] step:4981/10000 train_time:1144513ms step_avg:229.78ms +[2025-07-17 21:41:15] [Rank 0] step:4981/10000 train_time:1144513ms step_avg:229.78ms +[2025-07-17 21:41:24] [Rank 0] PRINT: step:5000/10000 val_loss:4.1880 train_time:1150321ms step_avg:230.06ms +[2025-07-17 21:41:24] [Rank 0] PRINT: step:5000/10000 val_loss:4.1880 train_time:1150321ms step_avg:230.06ms +[2025-07-17 21:41:24] [Rank 0] step:5001/10000 train_time:1150329ms step_avg:230.02ms +[2025-07-17 21:41:24] [Rank 0] step:5001/10000 train_time:1150329ms step_avg:230.02ms +[2025-07-17 21:41:29] [Rank 0] step:5021/10000 train_time:1154047ms step_avg:229.84ms +[2025-07-17 21:41:29] [Rank 0] step:5021/10000 train_time:1154047ms step_avg:229.84ms +[2025-07-17 21:41:34] [Rank 0] step:5041/10000 train_time:1158817ms step_avg:229.88ms +[2025-07-17 21:41:34] [Rank 0] step:5041/10000 train_time:1158817ms step_avg:229.88ms +[2025-07-17 21:41:39] [Rank 0] step:5061/10000 train_time:1163586ms step_avg:229.91ms +[2025-07-17 21:41:39] [Rank 0] step:5061/10000 train_time:1163586ms step_avg:229.91ms +[2025-07-17 21:41:43] [Rank 0] step:5081/10000 train_time:1168354ms step_avg:229.95ms +[2025-07-17 21:41:43] [Rank 0] step:5081/10000 train_time:1168354ms step_avg:229.95ms +[2025-07-17 21:41:48] [Rank 0] step:5101/10000 train_time:1173123ms step_avg:229.98ms +[2025-07-17 21:41:48] [Rank 0] step:5101/10000 train_time:1173123ms step_avg:229.98ms +[2025-07-17 21:41:53] [Rank 0] step:5121/10000 train_time:1177886ms step_avg:230.01ms +[2025-07-17 21:41:53] [Rank 0] step:5121/10000 train_time:1177886ms step_avg:230.01ms +[2025-07-17 21:41:59] [Rank 0] PRINT: step:5125/10000 val_loss:4.2022 train_time:1180129ms step_avg:230.27ms +[2025-07-17 21:41:59] [Rank 0] PRINT: step:5125/10000 val_loss:4.2022 train_time:1180129ms step_avg:230.27ms +[2025-07-17 21:42:02] [Rank 0] step:5141/10000 train_time:1182655ms step_avg:230.04ms +[2025-07-17 21:42:02] [Rank 0] step:5141/10000 train_time:1182655ms step_avg:230.04ms +[2025-07-17 21:42:07] [Rank 0] step:5161/10000 train_time:1187424ms step_avg:230.08ms +[2025-07-17 21:42:07] [Rank 0] step:5161/10000 train_time:1187424ms step_avg:230.08ms +[2025-07-17 21:42:12] [Rank 0] step:5181/10000 train_time:1192198ms step_avg:230.11ms +[2025-07-17 21:42:12] [Rank 0] step:5181/10000 train_time:1192198ms step_avg:230.11ms +[2025-07-17 21:42:17] [Rank 0] step:5201/10000 train_time:1197006ms step_avg:230.15ms +[2025-07-17 21:42:17] [Rank 0] step:5201/10000 train_time:1197006ms step_avg:230.15ms +[2025-07-17 21:42:22] [Rank 0] step:5221/10000 train_time:1201848ms step_avg:230.20ms +[2025-07-17 21:42:22] [Rank 0] step:5221/10000 train_time:1201848ms step_avg:230.20ms +[2025-07-17 21:42:26] [Rank 0] step:5241/10000 train_time:1206695ms step_avg:230.24ms +[2025-07-17 21:42:26] [Rank 0] step:5241/10000 train_time:1206695ms step_avg:230.24ms +[2025-07-17 21:42:33] [Rank 0] PRINT: step:5250/10000 val_loss:4.1087 train_time:1210184ms step_avg:230.51ms +[2025-07-17 21:42:33] [Rank 0] PRINT: step:5250/10000 val_loss:4.1087 train_time:1210184ms step_avg:230.51ms +[2025-07-17 21:42:36] [Rank 0] step:5261/10000 train_time:1211538ms step_avg:230.29ms +[2025-07-17 21:42:36] [Rank 0] step:5261/10000 train_time:1211538ms step_avg:230.29ms +[2025-07-17 21:42:41] [Rank 0] step:5281/10000 train_time:1216380ms step_avg:230.33ms +[2025-07-17 21:42:41] [Rank 0] step:5281/10000 train_time:1216380ms step_avg:230.33ms +[2025-07-17 21:42:46] [Rank 0] step:5301/10000 train_time:1221224ms step_avg:230.38ms +[2025-07-17 21:42:46] [Rank 0] step:5301/10000 train_time:1221224ms step_avg:230.38ms +[2025-07-17 21:42:51] [Rank 0] step:5321/10000 train_time:1226066ms step_avg:230.42ms +[2025-07-17 21:42:51] [Rank 0] step:5321/10000 train_time:1226066ms step_avg:230.42ms +[2025-07-17 21:42:55] [Rank 0] step:5341/10000 train_time:1230918ms step_avg:230.47ms +[2025-07-17 21:42:55] [Rank 0] step:5341/10000 train_time:1230918ms step_avg:230.47ms +[2025-07-17 21:43:00] [Rank 0] step:5361/10000 train_time:1235757ms step_avg:230.51ms +[2025-07-17 21:43:00] [Rank 0] step:5361/10000 train_time:1235757ms step_avg:230.51ms +[2025-07-17 21:43:08] [Rank 0] PRINT: step:5375/10000 val_loss:3.9428 train_time:1240449ms step_avg:230.78ms +[2025-07-17 21:43:08] [Rank 0] PRINT: step:5375/10000 val_loss:3.9428 train_time:1240449ms step_avg:230.78ms +[2025-07-17 21:43:10] [Rank 0] step:5381/10000 train_time:1240598ms step_avg:230.55ms +[2025-07-17 21:43:10] [Rank 0] step:5381/10000 train_time:1240598ms step_avg:230.55ms +[2025-07-17 21:43:15] [Rank 0] step:5401/10000 train_time:1245424ms step_avg:230.59ms +[2025-07-17 21:43:15] [Rank 0] step:5401/10000 train_time:1245424ms step_avg:230.59ms +[2025-07-17 21:43:19] [Rank 0] step:5421/10000 train_time:1250255ms step_avg:230.63ms +[2025-07-17 21:43:19] [Rank 0] step:5421/10000 train_time:1250255ms step_avg:230.63ms +[2025-07-17 21:43:24] [Rank 0] step:5441/10000 train_time:1255077ms step_avg:230.67ms +[2025-07-17 21:43:24] [Rank 0] step:5441/10000 train_time:1255077ms step_avg:230.67ms +[2025-07-17 21:43:29] [Rank 0] step:5461/10000 train_time:1259904ms step_avg:230.71ms +[2025-07-17 21:43:29] [Rank 0] step:5461/10000 train_time:1259904ms step_avg:230.71ms +[2025-07-17 21:43:34] [Rank 0] step:5481/10000 train_time:1264736ms step_avg:230.75ms +[2025-07-17 21:43:34] [Rank 0] step:5481/10000 train_time:1264736ms step_avg:230.75ms +[2025-07-17 21:43:43] [Rank 0] PRINT: step:5500/10000 val_loss:4.2448 train_time:1270617ms step_avg:231.02ms +[2025-07-17 21:43:43] [Rank 0] PRINT: step:5500/10000 val_loss:4.2448 train_time:1270617ms step_avg:231.02ms +[2025-07-17 21:43:43] [Rank 0] step:5501/10000 train_time:1270626ms step_avg:230.98ms +[2025-07-17 21:43:43] [Rank 0] step:5501/10000 train_time:1270626ms step_avg:230.98ms +[2025-07-17 21:43:48] [Rank 0] step:5521/10000 train_time:1274385ms step_avg:230.83ms +[2025-07-17 21:43:48] [Rank 0] step:5521/10000 train_time:1274385ms step_avg:230.83ms +[2025-07-17 21:43:53] [Rank 0] step:5541/10000 train_time:1279211ms step_avg:230.86ms +[2025-07-17 21:43:53] [Rank 0] step:5541/10000 train_time:1279211ms step_avg:230.86ms +[2025-07-17 21:43:57] [Rank 0] step:5561/10000 train_time:1284042ms step_avg:230.90ms +[2025-07-17 21:43:57] [Rank 0] step:5561/10000 train_time:1284042ms step_avg:230.90ms +[2025-07-17 21:44:02] [Rank 0] step:5581/10000 train_time:1288866ms step_avg:230.94ms +[2025-07-17 21:44:02] [Rank 0] step:5581/10000 train_time:1288866ms step_avg:230.94ms +[2025-07-17 21:44:07] [Rank 0] step:5601/10000 train_time:1293697ms step_avg:230.98ms +[2025-07-17 21:44:07] [Rank 0] step:5601/10000 train_time:1293697ms step_avg:230.98ms +[2025-07-17 21:44:12] [Rank 0] step:5621/10000 train_time:1298530ms step_avg:231.01ms +[2025-07-17 21:44:12] [Rank 0] step:5621/10000 train_time:1298530ms step_avg:231.01ms +[2025-07-17 21:44:18] [Rank 0] PRINT: step:5625/10000 val_loss:3.8433 train_time:1300799ms step_avg:231.25ms +[2025-07-17 21:44:18] [Rank 0] PRINT: step:5625/10000 val_loss:3.8433 train_time:1300799ms step_avg:231.25ms +[2025-07-17 21:44:22] [Rank 0] step:5641/10000 train_time:1303359ms step_avg:231.05ms +[2025-07-17 21:44:22] [Rank 0] step:5641/10000 train_time:1303359ms step_avg:231.05ms +[2025-07-17 21:44:26] [Rank 0] step:5661/10000 train_time:1308204ms step_avg:231.09ms +[2025-07-17 21:44:26] [Rank 0] step:5661/10000 train_time:1308204ms step_avg:231.09ms +[2025-07-17 21:44:31] [Rank 0] step:5681/10000 train_time:1313045ms step_avg:231.13ms +[2025-07-17 21:44:31] [Rank 0] step:5681/10000 train_time:1313045ms step_avg:231.13ms +[2025-07-17 21:44:36] [Rank 0] step:5701/10000 train_time:1317883ms step_avg:231.17ms +[2025-07-17 21:44:36] [Rank 0] step:5701/10000 train_time:1317883ms step_avg:231.17ms +[2025-07-17 21:44:41] [Rank 0] step:5721/10000 train_time:1322719ms step_avg:231.20ms +[2025-07-17 21:44:41] [Rank 0] step:5721/10000 train_time:1322719ms step_avg:231.20ms +[2025-07-17 21:44:46] [Rank 0] step:5741/10000 train_time:1327558ms step_avg:231.24ms +[2025-07-17 21:44:46] [Rank 0] step:5741/10000 train_time:1327558ms step_avg:231.24ms +[2025-07-17 21:44:52] [Rank 0] PRINT: step:5750/10000 val_loss:4.2449 train_time:1331039ms step_avg:231.48ms +[2025-07-17 21:44:52] [Rank 0] PRINT: step:5750/10000 val_loss:4.2449 train_time:1331039ms step_avg:231.48ms +[2025-07-17 21:44:55] [Rank 0] step:5761/10000 train_time:1332394ms step_avg:231.28ms +[2025-07-17 21:44:55] [Rank 0] step:5761/10000 train_time:1332394ms step_avg:231.28ms +[2025-07-17 21:45:00] [Rank 0] step:5781/10000 train_time:1337234ms step_avg:231.32ms +[2025-07-17 21:45:00] [Rank 0] step:5781/10000 train_time:1337234ms step_avg:231.32ms +[2025-07-17 21:45:05] [Rank 0] step:5801/10000 train_time:1342066ms step_avg:231.35ms +[2025-07-17 21:45:05] [Rank 0] step:5801/10000 train_time:1342066ms step_avg:231.35ms +[2025-07-17 21:45:09] [Rank 0] step:5821/10000 train_time:1346904ms step_avg:231.39ms +[2025-07-17 21:45:09] [Rank 0] step:5821/10000 train_time:1346904ms step_avg:231.39ms +[2025-07-17 21:45:14] [Rank 0] step:5841/10000 train_time:1351744ms step_avg:231.42ms +[2025-07-17 21:45:14] [Rank 0] step:5841/10000 train_time:1351744ms step_avg:231.42ms +[2025-07-17 21:45:19] [Rank 0] step:5861/10000 train_time:1356579ms step_avg:231.46ms +[2025-07-17 21:45:19] [Rank 0] step:5861/10000 train_time:1356579ms step_avg:231.46ms +[2025-07-17 21:45:27] [Rank 0] PRINT: step:5875/10000 val_loss:4.1817 train_time:1361268ms step_avg:231.71ms +[2025-07-17 21:45:27] [Rank 0] PRINT: step:5875/10000 val_loss:4.1817 train_time:1361268ms step_avg:231.71ms +[2025-07-17 21:45:28] [Rank 0] step:5881/10000 train_time:1361417ms step_avg:231.49ms +[2025-07-17 21:45:28] [Rank 0] step:5881/10000 train_time:1361417ms step_avg:231.49ms +[2025-07-17 21:45:33] [Rank 0] step:5901/10000 train_time:1366260ms step_avg:231.53ms +[2025-07-17 21:45:33] [Rank 0] step:5901/10000 train_time:1366260ms step_avg:231.53ms +[2025-07-17 21:45:38] [Rank 0] step:5921/10000 train_time:1371105ms step_avg:231.57ms +[2025-07-17 21:45:38] [Rank 0] step:5921/10000 train_time:1371105ms step_avg:231.57ms +[2025-07-17 21:45:43] [Rank 0] step:5941/10000 train_time:1375955ms step_avg:231.60ms +[2025-07-17 21:45:43] [Rank 0] step:5941/10000 train_time:1375955ms step_avg:231.60ms +[2025-07-17 21:45:48] [Rank 0] step:5961/10000 train_time:1380814ms step_avg:231.64ms +[2025-07-17 21:45:48] [Rank 0] step:5961/10000 train_time:1380814ms step_avg:231.64ms +[2025-07-17 21:45:52] [Rank 0] step:5981/10000 train_time:1385661ms step_avg:231.68ms +[2025-07-17 21:45:52] [Rank 0] step:5981/10000 train_time:1385661ms step_avg:231.68ms +[2025-07-17 21:46:01] [Rank 0] PRINT: step:6000/10000 val_loss:4.3126 train_time:1391575ms step_avg:231.93ms +[2025-07-17 21:46:01] [Rank 0] PRINT: step:6000/10000 val_loss:4.3126 train_time:1391575ms step_avg:231.93ms +[2025-07-17 21:46:02] [Rank 0] step:6001/10000 train_time:1391584ms step_avg:231.89ms +[2025-07-17 21:46:02] [Rank 0] step:6001/10000 train_time:1391584ms step_avg:231.89ms +[2025-07-17 21:46:06] [Rank 0] step:6021/10000 train_time:1395376ms step_avg:231.75ms +[2025-07-17 21:46:06] [Rank 0] step:6021/10000 train_time:1395376ms step_avg:231.75ms +[2025-07-17 21:46:11] [Rank 0] step:6041/10000 train_time:1400218ms step_avg:231.79ms +[2025-07-17 21:46:11] [Rank 0] step:6041/10000 train_time:1400218ms step_avg:231.79ms +[2025-07-17 21:46:16] [Rank 0] step:6061/10000 train_time:1405066ms step_avg:231.82ms +[2025-07-17 21:46:16] [Rank 0] step:6061/10000 train_time:1405066ms step_avg:231.82ms +[2025-07-17 21:46:21] [Rank 0] step:6081/10000 train_time:1409916ms step_avg:231.86ms +[2025-07-17 21:46:21] [Rank 0] step:6081/10000 train_time:1409916ms step_avg:231.86ms +[2025-07-17 21:46:26] [Rank 0] step:6101/10000 train_time:1414756ms step_avg:231.89ms +[2025-07-17 21:46:26] [Rank 0] step:6101/10000 train_time:1414756ms step_avg:231.89ms +[2025-07-17 21:46:31] [Rank 0] step:6121/10000 train_time:1419614ms step_avg:231.93ms +[2025-07-17 21:46:31] [Rank 0] step:6121/10000 train_time:1419614ms step_avg:231.93ms +[2025-07-17 21:46:36] [Rank 0] PRINT: step:6125/10000 val_loss:4.1980 train_time:1421888ms step_avg:232.15ms +[2025-07-17 21:46:36] [Rank 0] PRINT: step:6125/10000 val_loss:4.1980 train_time:1421888ms step_avg:232.15ms +[2025-07-17 21:46:40] [Rank 0] step:6141/10000 train_time:1424458ms step_avg:231.96ms +[2025-07-17 21:46:40] [Rank 0] step:6141/10000 train_time:1424458ms step_avg:231.96ms +[2025-07-17 21:46:45] [Rank 0] step:6161/10000 train_time:1429313ms step_avg:231.99ms +[2025-07-17 21:46:45] [Rank 0] step:6161/10000 train_time:1429313ms step_avg:231.99ms +[2025-07-17 21:46:50] [Rank 0] step:6181/10000 train_time:1434173ms step_avg:232.03ms +[2025-07-17 21:46:50] [Rank 0] step:6181/10000 train_time:1434173ms step_avg:232.03ms +[2025-07-17 21:46:55] [Rank 0] step:6201/10000 train_time:1439029ms step_avg:232.06ms +[2025-07-17 21:46:55] [Rank 0] step:6201/10000 train_time:1439029ms step_avg:232.06ms +[2025-07-17 21:47:00] [Rank 0] step:6221/10000 train_time:1443890ms step_avg:232.10ms +[2025-07-17 21:47:00] [Rank 0] step:6221/10000 train_time:1443890ms step_avg:232.10ms +[2025-07-17 21:47:05] [Rank 0] step:6241/10000 train_time:1448746ms step_avg:232.13ms +[2025-07-17 21:47:05] [Rank 0] step:6241/10000 train_time:1448746ms step_avg:232.13ms +[2025-07-17 21:47:11] [Rank 0] PRINT: step:6250/10000 val_loss:4.2944 train_time:1452236ms step_avg:232.36ms +[2025-07-17 21:47:11] [Rank 0] PRINT: step:6250/10000 val_loss:4.2944 train_time:1452236ms step_avg:232.36ms +[2025-07-17 21:47:14] [Rank 0] step:6261/10000 train_time:1453593ms step_avg:232.17ms +[2025-07-17 21:47:14] [Rank 0] step:6261/10000 train_time:1453593ms step_avg:232.17ms +[2025-07-17 21:47:19] [Rank 0] step:6281/10000 train_time:1458446ms step_avg:232.20ms +[2025-07-17 21:47:19] [Rank 0] step:6281/10000 train_time:1458446ms step_avg:232.20ms +[2025-07-17 21:47:24] [Rank 0] step:6301/10000 train_time:1463299ms step_avg:232.23ms +[2025-07-17 21:47:24] [Rank 0] step:6301/10000 train_time:1463299ms step_avg:232.23ms +[2025-07-17 21:47:29] [Rank 0] step:6321/10000 train_time:1468153ms step_avg:232.27ms +[2025-07-17 21:47:29] [Rank 0] step:6321/10000 train_time:1468153ms step_avg:232.27ms +[2025-07-17 21:47:34] [Rank 0] step:6341/10000 train_time:1473015ms step_avg:232.30ms +[2025-07-17 21:47:34] [Rank 0] step:6341/10000 train_time:1473015ms step_avg:232.30ms +[2025-07-17 21:47:38] [Rank 0] step:6361/10000 train_time:1477861ms step_avg:232.33ms +[2025-07-17 21:47:38] [Rank 0] step:6361/10000 train_time:1477861ms step_avg:232.33ms +[2025-07-17 21:47:46] [Rank 0] PRINT: step:6375/10000 val_loss:4.2856 train_time:1482558ms step_avg:232.56ms +[2025-07-17 21:47:46] [Rank 0] PRINT: step:6375/10000 val_loss:4.2856 train_time:1482558ms step_avg:232.56ms +[2025-07-17 21:47:48] [Rank 0] step:6381/10000 train_time:1482710ms step_avg:232.36ms +[2025-07-17 21:47:48] [Rank 0] step:6381/10000 train_time:1482710ms step_avg:232.36ms +[2025-07-17 21:47:53] [Rank 0] step:6401/10000 train_time:1487548ms step_avg:232.39ms +[2025-07-17 21:47:53] [Rank 0] step:6401/10000 train_time:1487548ms step_avg:232.39ms +[2025-07-17 21:47:58] [Rank 0] step:6421/10000 train_time:1492399ms step_avg:232.42ms +[2025-07-17 21:47:58] [Rank 0] step:6421/10000 train_time:1492399ms step_avg:232.42ms +[2025-07-17 21:48:02] [Rank 0] step:6441/10000 train_time:1497248ms step_avg:232.46ms +[2025-07-17 21:48:02] [Rank 0] step:6441/10000 train_time:1497248ms step_avg:232.46ms +[2025-07-17 21:48:07] [Rank 0] step:6461/10000 train_time:1502108ms step_avg:232.49ms +[2025-07-17 21:48:07] [Rank 0] step:6461/10000 train_time:1502108ms step_avg:232.49ms +[2025-07-17 21:48:12] [Rank 0] step:6481/10000 train_time:1506959ms step_avg:232.52ms +[2025-07-17 21:48:12] [Rank 0] step:6481/10000 train_time:1506959ms step_avg:232.52ms +[2025-07-17 21:48:21] [Rank 0] PRINT: step:6500/10000 val_loss:4.2940 train_time:1512868ms step_avg:232.75ms +[2025-07-17 21:48:21] [Rank 0] PRINT: step:6500/10000 val_loss:4.2940 train_time:1512868ms step_avg:232.75ms +[2025-07-17 21:48:22] [Rank 0] step:6501/10000 train_time:1512877ms step_avg:232.71ms +[2025-07-17 21:48:22] [Rank 0] step:6501/10000 train_time:1512877ms step_avg:232.71ms +[2025-07-17 21:48:27] [Rank 0] step:6521/10000 train_time:1516657ms step_avg:232.58ms +[2025-07-17 21:48:27] [Rank 0] step:6521/10000 train_time:1516657ms step_avg:232.58ms +[2025-07-17 21:48:31] [Rank 0] step:6541/10000 train_time:1521513ms step_avg:232.61ms +[2025-07-17 21:48:31] [Rank 0] step:6541/10000 train_time:1521513ms step_avg:232.61ms +[2025-07-17 21:48:36] [Rank 0] step:6561/10000 train_time:1526368ms step_avg:232.64ms +[2025-07-17 21:48:36] [Rank 0] step:6561/10000 train_time:1526368ms step_avg:232.64ms +[2025-07-17 21:48:41] [Rank 0] step:6581/10000 train_time:1531225ms step_avg:232.67ms +[2025-07-17 21:48:41] [Rank 0] step:6581/10000 train_time:1531225ms step_avg:232.67ms +[2025-07-17 21:48:46] [Rank 0] step:6601/10000 train_time:1536083ms step_avg:232.70ms +[2025-07-17 21:48:46] [Rank 0] step:6601/10000 train_time:1536083ms step_avg:232.70ms +[2025-07-17 21:48:51] [Rank 0] step:6621/10000 train_time:1540933ms step_avg:232.73ms +[2025-07-17 21:48:51] [Rank 0] step:6621/10000 train_time:1540933ms step_avg:232.73ms +[2025-07-17 21:48:57] [Rank 0] PRINT: step:6625/10000 val_loss:4.2110 train_time:1543209ms step_avg:232.94ms +[2025-07-17 21:48:57] [Rank 0] PRINT: step:6625/10000 val_loss:4.2110 train_time:1543209ms step_avg:232.94ms +[2025-07-17 21:49:00] [Rank 0] step:6641/10000 train_time:1545773ms step_avg:232.76ms +[2025-07-17 21:49:00] [Rank 0] step:6641/10000 train_time:1545773ms step_avg:232.76ms +[2025-07-17 21:49:05] [Rank 0] step:6661/10000 train_time:1550621ms step_avg:232.79ms +[2025-07-17 21:49:05] [Rank 0] step:6661/10000 train_time:1550621ms step_avg:232.79ms +[2025-07-17 21:49:10] [Rank 0] step:6681/10000 train_time:1555512ms step_avg:232.83ms +[2025-07-17 21:49:10] [Rank 0] step:6681/10000 train_time:1555512ms step_avg:232.83ms +[2025-07-17 21:49:15] [Rank 0] step:6701/10000 train_time:1560423ms step_avg:232.86ms +[2025-07-17 21:49:15] [Rank 0] step:6701/10000 train_time:1560423ms step_avg:232.86ms +[2025-07-17 21:49:20] [Rank 0] step:6721/10000 train_time:1565351ms step_avg:232.90ms +[2025-07-17 21:49:20] [Rank 0] step:6721/10000 train_time:1565351ms step_avg:232.90ms +[2025-07-17 21:49:25] [Rank 0] step:6741/10000 train_time:1570269ms step_avg:232.94ms +[2025-07-17 21:49:25] [Rank 0] step:6741/10000 train_time:1570269ms step_avg:232.94ms +[2025-07-17 21:49:32] [Rank 0] PRINT: step:6750/10000 val_loss:4.4323 train_time:1573814ms step_avg:233.16ms +[2025-07-17 21:49:32] [Rank 0] PRINT: step:6750/10000 val_loss:4.4323 train_time:1573814ms step_avg:233.16ms +[2025-07-17 21:49:35] [Rank 0] step:6761/10000 train_time:1575188ms step_avg:232.98ms +[2025-07-17 21:49:35] [Rank 0] step:6761/10000 train_time:1575188ms step_avg:232.98ms +[2025-07-17 21:49:40] [Rank 0] step:6781/10000 train_time:1580097ms step_avg:233.02ms +[2025-07-17 21:49:40] [Rank 0] step:6781/10000 train_time:1580097ms step_avg:233.02ms +[2025-07-17 21:49:44] [Rank 0] step:6801/10000 train_time:1585011ms step_avg:233.06ms +[2025-07-17 21:49:44] [Rank 0] step:6801/10000 train_time:1585011ms step_avg:233.06ms +[2025-07-17 21:49:49] [Rank 0] step:6821/10000 train_time:1589931ms step_avg:233.09ms +[2025-07-17 21:49:49] [Rank 0] step:6821/10000 train_time:1589931ms step_avg:233.09ms +[2025-07-17 21:49:54] [Rank 0] step:6841/10000 train_time:1594846ms step_avg:233.13ms +[2025-07-17 21:49:54] [Rank 0] step:6841/10000 train_time:1594846ms step_avg:233.13ms +[2025-07-17 21:49:59] [Rank 0] step:6861/10000 train_time:1599750ms step_avg:233.17ms +[2025-07-17 21:49:59] [Rank 0] step:6861/10000 train_time:1599750ms step_avg:233.17ms +[2025-07-17 21:50:07] [Rank 0] PRINT: step:6875/10000 val_loss:4.4403 train_time:1604504ms step_avg:233.38ms +[2025-07-17 21:50:07] [Rank 0] PRINT: step:6875/10000 val_loss:4.4403 train_time:1604504ms step_avg:233.38ms +[2025-07-17 21:50:09] [Rank 0] step:6881/10000 train_time:1604654ms step_avg:233.20ms +[2025-07-17 21:50:09] [Rank 0] step:6881/10000 train_time:1604654ms step_avg:233.20ms +[2025-07-17 21:50:14] [Rank 0] step:6901/10000 train_time:1609567ms step_avg:233.24ms +[2025-07-17 21:50:14] [Rank 0] step:6901/10000 train_time:1609567ms step_avg:233.24ms +[2025-07-17 21:50:19] [Rank 0] step:6921/10000 train_time:1614471ms step_avg:233.27ms +[2025-07-17 21:50:19] [Rank 0] step:6921/10000 train_time:1614471ms step_avg:233.27ms +[2025-07-17 21:50:24] [Rank 0] step:6941/10000 train_time:1619376ms step_avg:233.31ms +[2025-07-17 21:50:24] [Rank 0] step:6941/10000 train_time:1619376ms step_avg:233.31ms +[2025-07-17 21:50:28] [Rank 0] step:6961/10000 train_time:1624273ms step_avg:233.34ms +[2025-07-17 21:50:28] [Rank 0] step:6961/10000 train_time:1624273ms step_avg:233.34ms +[2025-07-17 21:50:33] [Rank 0] step:6981/10000 train_time:1629182ms step_avg:233.37ms +[2025-07-17 21:50:33] [Rank 0] step:6981/10000 train_time:1629182ms step_avg:233.37ms +[2025-07-17 21:50:43] [Rank 0] PRINT: step:7000/10000 val_loss:4.3408 train_time:1635157ms step_avg:233.59ms +[2025-07-17 21:50:43] [Rank 0] PRINT: step:7000/10000 val_loss:4.3408 train_time:1635157ms step_avg:233.59ms +[2025-07-17 21:50:43] [Rank 0] step:7001/10000 train_time:1635166ms step_avg:233.56ms +[2025-07-17 21:50:43] [Rank 0] step:7001/10000 train_time:1635166ms step_avg:233.56ms +[2025-07-17 21:50:48] [Rank 0] step:7021/10000 train_time:1638977ms step_avg:233.44ms +[2025-07-17 21:50:48] [Rank 0] step:7021/10000 train_time:1638977ms step_avg:233.44ms +[2025-07-17 21:50:53] [Rank 0] step:7041/10000 train_time:1643867ms step_avg:233.47ms +[2025-07-17 21:50:53] [Rank 0] step:7041/10000 train_time:1643867ms step_avg:233.47ms +[2025-07-17 21:50:58] [Rank 0] step:7061/10000 train_time:1648753ms step_avg:233.50ms +[2025-07-17 21:50:58] [Rank 0] step:7061/10000 train_time:1648753ms step_avg:233.50ms +[2025-07-17 21:51:03] [Rank 0] step:7081/10000 train_time:1653636ms step_avg:233.53ms +[2025-07-17 21:51:03] [Rank 0] step:7081/10000 train_time:1653636ms step_avg:233.53ms +[2025-07-17 21:51:07] [Rank 0] step:7101/10000 train_time:1658527ms step_avg:233.56ms +[2025-07-17 21:51:07] [Rank 0] step:7101/10000 train_time:1658527ms step_avg:233.56ms +[2025-07-17 21:51:12] [Rank 0] step:7121/10000 train_time:1663426ms step_avg:233.59ms +[2025-07-17 21:51:12] [Rank 0] step:7121/10000 train_time:1663426ms step_avg:233.59ms +[2025-07-17 21:51:18] [Rank 0] PRINT: step:7125/10000 val_loss:4.3483 train_time:1665717ms step_avg:233.78ms +[2025-07-17 21:51:18] [Rank 0] PRINT: step:7125/10000 val_loss:4.3483 train_time:1665717ms step_avg:233.78ms +[2025-07-17 21:51:22] [Rank 0] step:7141/10000 train_time:1668320ms step_avg:233.63ms +[2025-07-17 21:51:22] [Rank 0] step:7141/10000 train_time:1668320ms step_avg:233.63ms +[2025-07-17 21:51:27] [Rank 0] step:7161/10000 train_time:1673215ms step_avg:233.66ms +[2025-07-17 21:51:27] [Rank 0] step:7161/10000 train_time:1673215ms step_avg:233.66ms +[2025-07-17 21:51:32] [Rank 0] step:7181/10000 train_time:1678107ms step_avg:233.69ms +[2025-07-17 21:51:32] [Rank 0] step:7181/10000 train_time:1678107ms step_avg:233.69ms +[2025-07-17 21:51:37] [Rank 0] step:7201/10000 train_time:1683017ms step_avg:233.72ms +[2025-07-17 21:51:37] [Rank 0] step:7201/10000 train_time:1683017ms step_avg:233.72ms +[2025-07-17 21:51:42] [Rank 0] step:7221/10000 train_time:1687923ms step_avg:233.75ms +[2025-07-17 21:51:42] [Rank 0] step:7221/10000 train_time:1687923ms step_avg:233.75ms +[2025-07-17 21:51:46] [Rank 0] step:7241/10000 train_time:1692820ms step_avg:233.78ms +[2025-07-17 21:51:46] [Rank 0] step:7241/10000 train_time:1692820ms step_avg:233.78ms +[2025-07-17 21:51:53] [Rank 0] PRINT: step:7250/10000 val_loss:4.2675 train_time:1696357ms step_avg:233.98ms +[2025-07-17 21:51:53] [Rank 0] PRINT: step:7250/10000 val_loss:4.2675 train_time:1696357ms step_avg:233.98ms +[2025-07-17 21:51:56] [Rank 0] step:7261/10000 train_time:1697726ms step_avg:233.81ms +[2025-07-17 21:51:56] [Rank 0] step:7261/10000 train_time:1697726ms step_avg:233.81ms +[2025-07-17 21:52:01] [Rank 0] step:7281/10000 train_time:1702632ms step_avg:233.85ms +[2025-07-17 21:52:01] [Rank 0] step:7281/10000 train_time:1702632ms step_avg:233.85ms +[2025-07-17 21:52:06] [Rank 0] step:7301/10000 train_time:1707537ms step_avg:233.88ms +[2025-07-17 21:52:06] [Rank 0] step:7301/10000 train_time:1707537ms step_avg:233.88ms +[2025-07-17 21:52:11] [Rank 0] step:7321/10000 train_time:1712456ms step_avg:233.91ms +[2025-07-17 21:52:11] [Rank 0] step:7321/10000 train_time:1712456ms step_avg:233.91ms +[2025-07-17 21:52:16] [Rank 0] step:7341/10000 train_time:1717352ms step_avg:233.94ms +[2025-07-17 21:52:16] [Rank 0] step:7341/10000 train_time:1717352ms step_avg:233.94ms +[2025-07-17 21:52:21] [Rank 0] step:7361/10000 train_time:1722272ms step_avg:233.97ms +[2025-07-17 21:52:21] [Rank 0] step:7361/10000 train_time:1722272ms step_avg:233.97ms +[2025-07-17 21:52:29] [Rank 0] PRINT: step:7375/10000 val_loss:4.2742 train_time:1727032ms step_avg:234.17ms +[2025-07-17 21:52:29] [Rank 0] PRINT: step:7375/10000 val_loss:4.2742 train_time:1727032ms step_avg:234.17ms +[2025-07-17 21:52:30] [Rank 0] step:7381/10000 train_time:1727185ms step_avg:234.00ms +[2025-07-17 21:52:30] [Rank 0] step:7381/10000 train_time:1727185ms step_avg:234.00ms +[2025-07-17 21:52:35] [Rank 0] step:7401/10000 train_time:1732092ms step_avg:234.03ms +[2025-07-17 21:52:35] [Rank 0] step:7401/10000 train_time:1732092ms step_avg:234.03ms +[2025-07-17 21:52:40] [Rank 0] step:7421/10000 train_time:1736997ms step_avg:234.07ms +[2025-07-17 21:52:40] [Rank 0] step:7421/10000 train_time:1736997ms step_avg:234.07ms +[2025-07-17 21:52:45] [Rank 0] step:7441/10000 train_time:1741916ms step_avg:234.10ms +[2025-07-17 21:52:45] [Rank 0] step:7441/10000 train_time:1741916ms step_avg:234.10ms +[2025-07-17 21:52:50] [Rank 0] step:7461/10000 train_time:1746828ms step_avg:234.13ms +[2025-07-17 21:52:50] [Rank 0] step:7461/10000 train_time:1746828ms step_avg:234.13ms +[2025-07-17 21:52:55] [Rank 0] step:7481/10000 train_time:1751747ms step_avg:234.16ms +[2025-07-17 21:52:55] [Rank 0] step:7481/10000 train_time:1751747ms step_avg:234.16ms +[2025-07-17 21:53:04] [Rank 0] PRINT: step:7500/10000 val_loss:4.3770 train_time:1757749ms step_avg:234.37ms +[2025-07-17 21:53:04] [Rank 0] PRINT: step:7500/10000 val_loss:4.3770 train_time:1757749ms step_avg:234.37ms +[2025-07-17 21:53:05] [Rank 0] step:7501/10000 train_time:1757757ms step_avg:234.34ms +[2025-07-17 21:53:05] [Rank 0] step:7501/10000 train_time:1757757ms step_avg:234.34ms +[2025-07-17 21:53:09] [Rank 0] step:7521/10000 train_time:1761597ms step_avg:234.22ms +[2025-07-17 21:53:09] [Rank 0] step:7521/10000 train_time:1761597ms step_avg:234.22ms +[2025-07-17 21:53:14] [Rank 0] step:7541/10000 train_time:1766507ms step_avg:234.25ms +[2025-07-17 21:53:14] [Rank 0] step:7541/10000 train_time:1766507ms step_avg:234.25ms +[2025-07-17 21:53:19] [Rank 0] step:7561/10000 train_time:1771422ms step_avg:234.28ms +[2025-07-17 21:53:19] [Rank 0] step:7561/10000 train_time:1771422ms step_avg:234.28ms +[2025-07-17 21:53:24] [Rank 0] step:7581/10000 train_time:1776352ms step_avg:234.32ms +[2025-07-17 21:53:24] [Rank 0] step:7581/10000 train_time:1776352ms step_avg:234.32ms +[2025-07-17 21:53:29] [Rank 0] step:7601/10000 train_time:1781265ms step_avg:234.35ms +[2025-07-17 21:53:29] [Rank 0] step:7601/10000 train_time:1781265ms step_avg:234.35ms +[2025-07-17 21:53:34] [Rank 0] step:7621/10000 train_time:1786713ms step_avg:234.45ms +[2025-07-17 21:53:34] [Rank 0] step:7621/10000 train_time:1786713ms step_avg:234.45ms +[2025-07-17 21:53:40] [Rank 0] PRINT: step:7625/10000 val_loss:4.3582 train_time:1788510ms step_avg:234.56ms +[2025-07-17 21:53:40] [Rank 0] PRINT: step:7625/10000 val_loss:4.3582 train_time:1788510ms step_avg:234.56ms +[2025-07-17 21:53:44] [Rank 0] step:7641/10000 train_time:1791115ms step_avg:234.41ms +[2025-07-17 21:53:44] [Rank 0] step:7641/10000 train_time:1791115ms step_avg:234.41ms +[2025-07-17 21:53:49] [Rank 0] step:7661/10000 train_time:1796036ms step_avg:234.44ms +[2025-07-17 21:53:49] [Rank 0] step:7661/10000 train_time:1796036ms step_avg:234.44ms +[2025-07-17 21:53:54] [Rank 0] step:7681/10000 train_time:1800964ms step_avg:234.47ms +[2025-07-17 21:53:54] [Rank 0] step:7681/10000 train_time:1800964ms step_avg:234.47ms +[2025-07-17 21:53:58] [Rank 0] step:7701/10000 train_time:1805883ms step_avg:234.50ms +[2025-07-17 21:53:58] [Rank 0] step:7701/10000 train_time:1805883ms step_avg:234.50ms +[2025-07-17 21:54:03] [Rank 0] step:7721/10000 train_time:1810792ms step_avg:234.53ms +[2025-07-17 21:54:03] [Rank 0] step:7721/10000 train_time:1810792ms step_avg:234.53ms +[2025-07-17 21:54:08] [Rank 0] step:7741/10000 train_time:1815718ms step_avg:234.56ms +[2025-07-17 21:54:08] [Rank 0] step:7741/10000 train_time:1815718ms step_avg:234.56ms +[2025-07-17 21:54:15] [Rank 0] PRINT: step:7750/10000 val_loss:4.4390 train_time:1819266ms step_avg:234.74ms +[2025-07-17 21:54:15] [Rank 0] PRINT: step:7750/10000 val_loss:4.4390 train_time:1819266ms step_avg:234.74ms +[2025-07-17 21:54:18] [Rank 0] step:7761/10000 train_time:1820638ms step_avg:234.59ms +[2025-07-17 21:54:18] [Rank 0] step:7761/10000 train_time:1820638ms step_avg:234.59ms +[2025-07-17 21:54:23] [Rank 0] step:7781/10000 train_time:1825569ms step_avg:234.62ms +[2025-07-17 21:54:23] [Rank 0] step:7781/10000 train_time:1825569ms step_avg:234.62ms +[2025-07-17 21:54:28] [Rank 0] step:7801/10000 train_time:1830482ms step_avg:234.65ms +[2025-07-17 21:54:28] [Rank 0] step:7801/10000 train_time:1830482ms step_avg:234.65ms +[2025-07-17 21:54:33] [Rank 0] step:7821/10000 train_time:1835405ms step_avg:234.68ms +[2025-07-17 21:54:33] [Rank 0] step:7821/10000 train_time:1835405ms step_avg:234.68ms +[2025-07-17 21:54:38] [Rank 0] step:7841/10000 train_time:1840335ms step_avg:234.71ms +[2025-07-17 21:54:38] [Rank 0] step:7841/10000 train_time:1840335ms step_avg:234.71ms +[2025-07-17 21:54:43] [Rank 0] step:7861/10000 train_time:1845243ms step_avg:234.73ms +[2025-07-17 21:54:43] [Rank 0] step:7861/10000 train_time:1845243ms step_avg:234.73ms +[2025-07-17 21:54:51] [Rank 0] PRINT: step:7875/10000 val_loss:4.4590 train_time:1850001ms step_avg:234.92ms +[2025-07-17 21:54:51] [Rank 0] PRINT: step:7875/10000 val_loss:4.4590 train_time:1850001ms step_avg:234.92ms +[2025-07-17 21:54:52] [Rank 0] step:7881/10000 train_time:1850152ms step_avg:234.76ms +[2025-07-17 21:54:52] [Rank 0] step:7881/10000 train_time:1850152ms step_avg:234.76ms +[2025-07-17 21:54:57] [Rank 0] step:7901/10000 train_time:1855069ms step_avg:234.79ms +[2025-07-17 21:54:57] [Rank 0] step:7901/10000 train_time:1855069ms step_avg:234.79ms +[2025-07-17 21:55:02] [Rank 0] step:7921/10000 train_time:1859981ms step_avg:234.82ms +[2025-07-17 21:55:02] [Rank 0] step:7921/10000 train_time:1859981ms step_avg:234.82ms +[2025-07-17 21:55:07] [Rank 0] step:7941/10000 train_time:1864901ms step_avg:234.84ms +[2025-07-17 21:55:07] [Rank 0] step:7941/10000 train_time:1864901ms step_avg:234.84ms +[2025-07-17 21:55:12] [Rank 0] step:7961/10000 train_time:1869840ms step_avg:234.88ms +[2025-07-17 21:55:12] [Rank 0] step:7961/10000 train_time:1869840ms step_avg:234.88ms +[2025-07-17 21:55:17] [Rank 0] step:7981/10000 train_time:1874759ms step_avg:234.90ms +[2025-07-17 21:55:17] [Rank 0] step:7981/10000 train_time:1874759ms step_avg:234.90ms +[2025-07-17 21:55:26] [Rank 0] PRINT: step:8000/10000 val_loss:4.4601 train_time:1880766ms step_avg:235.10ms +[2025-07-17 21:55:26] [Rank 0] PRINT: step:8000/10000 val_loss:4.4601 train_time:1880766ms step_avg:235.10ms +[2025-07-17 21:55:27] [Rank 0] step:8001/10000 train_time:1880775ms step_avg:235.07ms +[2025-07-17 21:55:27] [Rank 0] step:8001/10000 train_time:1880775ms step_avg:235.07ms +[2025-07-17 21:55:31] [Rank 0] step:8021/10000 train_time:1884600ms step_avg:234.96ms +[2025-07-17 21:55:31] [Rank 0] step:8021/10000 train_time:1884600ms step_avg:234.96ms +[2025-07-17 21:55:36] [Rank 0] step:8041/10000 train_time:1889523ms step_avg:234.99ms +[2025-07-17 21:55:36] [Rank 0] step:8041/10000 train_time:1889523ms step_avg:234.99ms +[2025-07-17 21:55:41] [Rank 0] step:8061/10000 train_time:1894443ms step_avg:235.01ms +[2025-07-17 21:55:41] [Rank 0] step:8061/10000 train_time:1894443ms step_avg:235.01ms +[2025-07-17 21:55:46] [Rank 0] step:8081/10000 train_time:1899359ms step_avg:235.04ms +[2025-07-17 21:55:46] [Rank 0] step:8081/10000 train_time:1899359ms step_avg:235.04ms +[2025-07-17 21:55:51] [Rank 0] step:8101/10000 train_time:1904262ms step_avg:235.07ms +[2025-07-17 21:55:51] [Rank 0] step:8101/10000 train_time:1904262ms step_avg:235.07ms +[2025-07-17 21:55:56] [Rank 0] step:8121/10000 train_time:1909183ms step_avg:235.09ms +[2025-07-17 21:55:56] [Rank 0] step:8121/10000 train_time:1909183ms step_avg:235.09ms +[2025-07-17 21:56:01] [Rank 0] PRINT: step:8125/10000 val_loss:4.4280 train_time:1911491ms step_avg:235.26ms +[2025-07-17 21:56:01] [Rank 0] PRINT: step:8125/10000 val_loss:4.4280 train_time:1911491ms step_avg:235.26ms +[2025-07-17 21:56:05] [Rank 0] step:8141/10000 train_time:1914102ms step_avg:235.12ms +[2025-07-17 21:56:05] [Rank 0] step:8141/10000 train_time:1914102ms step_avg:235.12ms +[2025-07-17 21:56:10] [Rank 0] step:8161/10000 train_time:1919048ms step_avg:235.15ms +[2025-07-17 21:56:10] [Rank 0] step:8161/10000 train_time:1919048ms step_avg:235.15ms +[2025-07-17 21:56:15] [Rank 0] step:8181/10000 train_time:1924028ms step_avg:235.18ms +[2025-07-17 21:56:15] [Rank 0] step:8181/10000 train_time:1924028ms step_avg:235.18ms +[2025-07-17 21:56:20] [Rank 0] step:8201/10000 train_time:1928991ms step_avg:235.21ms +[2025-07-17 21:56:20] [Rank 0] step:8201/10000 train_time:1928991ms step_avg:235.21ms +[2025-07-17 21:56:25] [Rank 0] step:8221/10000 train_time:1933979ms step_avg:235.25ms +[2025-07-17 21:56:25] [Rank 0] step:8221/10000 train_time:1933979ms step_avg:235.25ms +[2025-07-17 21:56:30] [Rank 0] step:8241/10000 train_time:1938949ms step_avg:235.28ms +[2025-07-17 21:56:30] [Rank 0] step:8241/10000 train_time:1938949ms step_avg:235.28ms +[2025-07-17 21:56:37] [Rank 0] PRINT: step:8250/10000 val_loss:4.3927 train_time:1942538ms step_avg:235.46ms +[2025-07-17 21:56:37] [Rank 0] PRINT: step:8250/10000 val_loss:4.3927 train_time:1942538ms step_avg:235.46ms +[2025-07-17 21:56:39] [Rank 0] step:8261/10000 train_time:1943935ms step_avg:235.31ms +[2025-07-17 21:56:39] [Rank 0] step:8261/10000 train_time:1943935ms step_avg:235.31ms +[2025-07-17 21:56:44] [Rank 0] step:8281/10000 train_time:1948934ms step_avg:235.35ms +[2025-07-17 21:56:44] [Rank 0] step:8281/10000 train_time:1948934ms step_avg:235.35ms +[2025-07-17 21:56:49] [Rank 0] step:8301/10000 train_time:1953908ms step_avg:235.38ms +[2025-07-17 21:56:49] [Rank 0] step:8301/10000 train_time:1953908ms step_avg:235.38ms +[2025-07-17 21:56:54] [Rank 0] step:8321/10000 train_time:1958892ms step_avg:235.42ms +[2025-07-17 21:56:54] [Rank 0] step:8321/10000 train_time:1958892ms step_avg:235.42ms +[2025-07-17 21:56:59] [Rank 0] step:8341/10000 train_time:1963883ms step_avg:235.45ms +[2025-07-17 21:56:59] [Rank 0] step:8341/10000 train_time:1963883ms step_avg:235.45ms +[2025-07-17 21:57:04] [Rank 0] step:8361/10000 train_time:1968859ms step_avg:235.48ms +[2025-07-17 21:57:04] [Rank 0] step:8361/10000 train_time:1968859ms step_avg:235.48ms +[2025-07-17 21:57:12] [Rank 0] PRINT: step:8375/10000 val_loss:4.3933 train_time:1973704ms step_avg:235.67ms +[2025-07-17 21:57:12] [Rank 0] PRINT: step:8375/10000 val_loss:4.3933 train_time:1973704ms step_avg:235.67ms +[2025-07-17 21:57:14] [Rank 0] step:8381/10000 train_time:1973853ms step_avg:235.52ms +[2025-07-17 21:57:14] [Rank 0] step:8381/10000 train_time:1973853ms step_avg:235.52ms +[2025-07-17 21:57:19] [Rank 0] step:8401/10000 train_time:1978802ms step_avg:235.54ms +[2025-07-17 21:57:19] [Rank 0] step:8401/10000 train_time:1978802ms step_avg:235.54ms +[2025-07-17 21:57:24] [Rank 0] step:8421/10000 train_time:1983775ms step_avg:235.57ms +[2025-07-17 21:57:24] [Rank 0] step:8421/10000 train_time:1983775ms step_avg:235.57ms +[2025-07-17 21:57:29] [Rank 0] step:8441/10000 train_time:1988761ms step_avg:235.61ms +[2025-07-17 21:57:29] [Rank 0] step:8441/10000 train_time:1988761ms step_avg:235.61ms +[2025-07-17 21:57:34] [Rank 0] step:8461/10000 train_time:1993745ms step_avg:235.64ms +[2025-07-17 21:57:34] [Rank 0] step:8461/10000 train_time:1993745ms step_avg:235.64ms +[2025-07-17 21:57:39] [Rank 0] step:8481/10000 train_time:1998735ms step_avg:235.67ms +[2025-07-17 21:57:39] [Rank 0] step:8481/10000 train_time:1998735ms step_avg:235.67ms +[2025-07-17 21:57:48] [Rank 0] PRINT: step:8500/10000 val_loss:4.4524 train_time:2004813ms step_avg:235.86ms +[2025-07-17 21:57:48] [Rank 0] PRINT: step:8500/10000 val_loss:4.4524 train_time:2004813ms step_avg:235.86ms +[2025-07-17 21:57:49] [Rank 0] step:8501/10000 train_time:2004822ms step_avg:235.83ms +[2025-07-17 21:57:49] [Rank 0] step:8501/10000 train_time:2004822ms step_avg:235.83ms +[2025-07-17 21:57:53] [Rank 0] step:8521/10000 train_time:2008700ms step_avg:235.74ms +[2025-07-17 21:57:53] [Rank 0] step:8521/10000 train_time:2008700ms step_avg:235.74ms +[2025-07-17 21:57:58] [Rank 0] step:8541/10000 train_time:2013682ms step_avg:235.77ms +[2025-07-17 21:57:58] [Rank 0] step:8541/10000 train_time:2013682ms step_avg:235.77ms +[2025-07-17 21:58:03] [Rank 0] step:8561/10000 train_time:2018659ms step_avg:235.80ms +[2025-07-17 21:58:03] [Rank 0] step:8561/10000 train_time:2018659ms step_avg:235.80ms +[2025-07-17 21:58:08] [Rank 0] step:8581/10000 train_time:2023630ms step_avg:235.83ms +[2025-07-17 21:58:08] [Rank 0] step:8581/10000 train_time:2023630ms step_avg:235.83ms +[2025-07-17 21:58:13] [Rank 0] step:8601/10000 train_time:2028600ms step_avg:235.86ms +[2025-07-17 21:58:13] [Rank 0] step:8601/10000 train_time:2028600ms step_avg:235.86ms +[2025-07-17 21:58:18] [Rank 0] step:8621/10000 train_time:2033560ms step_avg:235.88ms +[2025-07-17 21:58:18] [Rank 0] step:8621/10000 train_time:2033560ms step_avg:235.88ms +[2025-07-17 21:58:24] [Rank 0] PRINT: step:8625/10000 val_loss:4.2652 train_time:2035895ms step_avg:236.05ms +[2025-07-17 21:58:24] [Rank 0] PRINT: step:8625/10000 val_loss:4.2652 train_time:2035895ms step_avg:236.05ms +[2025-07-17 21:58:28] [Rank 0] step:8641/10000 train_time:2038543ms step_avg:235.92ms +[2025-07-17 21:58:28] [Rank 0] step:8641/10000 train_time:2038543ms step_avg:235.92ms +[2025-07-17 21:58:33] [Rank 0] step:8661/10000 train_time:2043516ms step_avg:235.94ms +[2025-07-17 21:58:33] [Rank 0] step:8661/10000 train_time:2043516ms step_avg:235.94ms +[2025-07-17 21:58:38] [Rank 0] step:8681/10000 train_time:2048492ms step_avg:235.97ms +[2025-07-17 21:58:38] [Rank 0] step:8681/10000 train_time:2048492ms step_avg:235.97ms +[2025-07-17 21:58:43] [Rank 0] step:8701/10000 train_time:2053477ms step_avg:236.00ms +[2025-07-17 21:58:43] [Rank 0] step:8701/10000 train_time:2053477ms step_avg:236.00ms +[2025-07-17 21:58:48] [Rank 0] step:8721/10000 train_time:2058440ms step_avg:236.03ms +[2025-07-17 21:58:48] [Rank 0] step:8721/10000 train_time:2058440ms step_avg:236.03ms +[2025-07-17 21:58:53] [Rank 0] step:8741/10000 train_time:2063432ms step_avg:236.06ms +[2025-07-17 21:58:53] [Rank 0] step:8741/10000 train_time:2063432ms step_avg:236.06ms +[2025-07-17 21:59:00] [Rank 0] PRINT: step:8750/10000 val_loss:4.3928 train_time:2067007ms step_avg:236.23ms +[2025-07-17 21:59:00] [Rank 0] PRINT: step:8750/10000 val_loss:4.3928 train_time:2067007ms step_avg:236.23ms +[2025-07-17 21:59:03] [Rank 0] step:8761/10000 train_time:2068400ms step_avg:236.09ms +[2025-07-17 21:59:03] [Rank 0] step:8761/10000 train_time:2068400ms step_avg:236.09ms +[2025-07-17 21:59:08] [Rank 0] step:8781/10000 train_time:2073377ms step_avg:236.12ms +[2025-07-17 21:59:08] [Rank 0] step:8781/10000 train_time:2073377ms step_avg:236.12ms +[2025-07-17 21:59:13] [Rank 0] step:8801/10000 train_time:2078355ms step_avg:236.15ms +[2025-07-17 21:59:13] [Rank 0] step:8801/10000 train_time:2078355ms step_avg:236.15ms +[2025-07-17 21:59:18] [Rank 0] step:8821/10000 train_time:2083337ms step_avg:236.18ms +[2025-07-17 21:59:18] [Rank 0] step:8821/10000 train_time:2083337ms step_avg:236.18ms +[2025-07-17 21:59:23] [Rank 0] step:8841/10000 train_time:2088339ms step_avg:236.21ms +[2025-07-17 21:59:23] [Rank 0] step:8841/10000 train_time:2088339ms step_avg:236.21ms +[2025-07-17 21:59:28] [Rank 0] step:8861/10000 train_time:2093327ms step_avg:236.24ms +[2025-07-17 21:59:28] [Rank 0] step:8861/10000 train_time:2093327ms step_avg:236.24ms +[2025-07-17 21:59:36] [Rank 0] PRINT: step:8875/10000 val_loss:4.4579 train_time:2098154ms step_avg:236.41ms +[2025-07-17 21:59:36] [Rank 0] PRINT: step:8875/10000 val_loss:4.4579 train_time:2098154ms step_avg:236.41ms +[2025-07-17 21:59:37] [Rank 0] step:8881/10000 train_time:2098305ms step_avg:236.27ms +[2025-07-17 21:59:37] [Rank 0] step:8881/10000 train_time:2098305ms step_avg:236.27ms +[2025-07-17 21:59:42] [Rank 0] step:8901/10000 train_time:2103286ms step_avg:236.30ms +[2025-07-17 21:59:42] [Rank 0] step:8901/10000 train_time:2103286ms step_avg:236.30ms +[2025-07-17 21:59:47] [Rank 0] step:8921/10000 train_time:2108264ms step_avg:236.33ms +[2025-07-17 21:59:47] [Rank 0] step:8921/10000 train_time:2108264ms step_avg:236.33ms +[2025-07-17 21:59:52] [Rank 0] step:8941/10000 train_time:2113256ms step_avg:236.36ms +[2025-07-17 21:59:52] [Rank 0] step:8941/10000 train_time:2113256ms step_avg:236.36ms +[2025-07-17 21:59:57] [Rank 0] step:8961/10000 train_time:2118246ms step_avg:236.39ms +[2025-07-17 21:59:57] [Rank 0] step:8961/10000 train_time:2118246ms step_avg:236.39ms +[2025-07-17 22:00:02] [Rank 0] step:8981/10000 train_time:2123233ms step_avg:236.41ms +[2025-07-17 22:00:02] [Rank 0] step:8981/10000 train_time:2123233ms step_avg:236.41ms +[2025-07-17 22:00:12] [Rank 0] PRINT: step:9000/10000 val_loss:4.4517 train_time:2129318ms step_avg:236.59ms +[2025-07-17 22:00:12] [Rank 0] PRINT: step:9000/10000 val_loss:4.4517 train_time:2129318ms step_avg:236.59ms +[2025-07-17 22:00:12] [Rank 0] step:9001/10000 train_time:2129327ms step_avg:236.57ms +[2025-07-17 22:00:12] [Rank 0] step:9001/10000 train_time:2129327ms step_avg:236.57ms +[2025-07-17 22:00:17] [Rank 0] step:9021/10000 train_time:2133211ms step_avg:236.47ms +[2025-07-17 22:00:17] [Rank 0] step:9021/10000 train_time:2133211ms step_avg:236.47ms +[2025-07-17 22:00:22] [Rank 0] step:9041/10000 train_time:2138219ms step_avg:236.50ms +[2025-07-17 22:00:22] [Rank 0] step:9041/10000 train_time:2138219ms step_avg:236.50ms +[2025-07-17 22:00:27] [Rank 0] step:9061/10000 train_time:2143205ms step_avg:236.53ms +[2025-07-17 22:00:27] [Rank 0] step:9061/10000 train_time:2143205ms step_avg:236.53ms +[2025-07-17 22:00:32] [Rank 0] step:9081/10000 train_time:2148217ms step_avg:236.56ms +[2025-07-17 22:00:32] [Rank 0] step:9081/10000 train_time:2148217ms step_avg:236.56ms +[2025-07-17 22:00:37] [Rank 0] step:9101/10000 train_time:2153233ms step_avg:236.59ms +[2025-07-17 22:00:37] [Rank 0] step:9101/10000 train_time:2153233ms step_avg:236.59ms +[2025-07-17 22:00:42] [Rank 0] step:9121/10000 train_time:2158238ms step_avg:236.62ms +[2025-07-17 22:00:42] [Rank 0] step:9121/10000 train_time:2158238ms step_avg:236.62ms +[2025-07-17 22:00:47] [Rank 0] PRINT: step:9125/10000 val_loss:4.4742 train_time:2160576ms step_avg:236.78ms +[2025-07-17 22:00:47] [Rank 0] PRINT: step:9125/10000 val_loss:4.4742 train_time:2160576ms step_avg:236.78ms +[2025-07-17 22:00:51] [Rank 0] step:9141/10000 train_time:2163215ms step_avg:236.65ms +[2025-07-17 22:00:51] [Rank 0] step:9141/10000 train_time:2163215ms step_avg:236.65ms +[2025-07-17 22:00:56] [Rank 0] step:9161/10000 train_time:2168238ms step_avg:236.68ms +[2025-07-17 22:00:56] [Rank 0] step:9161/10000 train_time:2168238ms step_avg:236.68ms +[2025-07-17 22:01:01] [Rank 0] step:9181/10000 train_time:2173247ms step_avg:236.71ms +[2025-07-17 22:01:01] [Rank 0] step:9181/10000 train_time:2173247ms step_avg:236.71ms +[2025-07-17 22:01:06] [Rank 0] step:9201/10000 train_time:2178244ms step_avg:236.74ms +[2025-07-17 22:01:06] [Rank 0] step:9201/10000 train_time:2178244ms step_avg:236.74ms +[2025-07-17 22:01:11] [Rank 0] step:9221/10000 train_time:2183252ms step_avg:236.77ms +[2025-07-17 22:01:11] [Rank 0] step:9221/10000 train_time:2183252ms step_avg:236.77ms +[2025-07-17 22:01:16] [Rank 0] step:9241/10000 train_time:2188272ms step_avg:236.80ms +[2025-07-17 22:01:16] [Rank 0] step:9241/10000 train_time:2188272ms step_avg:236.80ms +[2025-07-17 22:01:23] [Rank 0] PRINT: step:9250/10000 val_loss:4.4997 train_time:2191880ms step_avg:236.96ms +[2025-07-17 22:01:23] [Rank 0] PRINT: step:9250/10000 val_loss:4.4997 train_time:2191880ms step_avg:236.96ms +[2025-07-17 22:01:26] [Rank 0] step:9261/10000 train_time:2193284ms step_avg:236.83ms +[2025-07-17 22:01:26] [Rank 0] step:9261/10000 train_time:2193284ms step_avg:236.83ms +[2025-07-17 22:01:31] [Rank 0] step:9281/10000 train_time:2198278ms step_avg:236.86ms +[2025-07-17 22:01:31] [Rank 0] step:9281/10000 train_time:2198278ms step_avg:236.86ms +[2025-07-17 22:01:36] [Rank 0] step:9301/10000 train_time:2203271ms step_avg:236.89ms +[2025-07-17 22:01:36] [Rank 0] step:9301/10000 train_time:2203271ms step_avg:236.89ms +[2025-07-17 22:01:41] [Rank 0] step:9321/10000 train_time:2208288ms step_avg:236.92ms +[2025-07-17 22:01:41] [Rank 0] step:9321/10000 train_time:2208288ms step_avg:236.92ms +[2025-07-17 22:01:46] [Rank 0] step:9341/10000 train_time:2213298ms step_avg:236.94ms +[2025-07-17 22:01:46] [Rank 0] step:9341/10000 train_time:2213298ms step_avg:236.94ms +[2025-07-17 22:01:51] [Rank 0] step:9361/10000 train_time:2218290ms step_avg:236.97ms +[2025-07-17 22:01:51] [Rank 0] step:9361/10000 train_time:2218290ms step_avg:236.97ms +[2025-07-17 22:01:58] [Rank 0] PRINT: step:9375/10000 val_loss:4.5092 train_time:2223146ms step_avg:237.14ms +[2025-07-17 22:01:58] [Rank 0] PRINT: step:9375/10000 val_loss:4.5092 train_time:2223146ms step_avg:237.14ms +[2025-07-17 22:02:00] [Rank 0] step:9381/10000 train_time:2223299ms step_avg:237.00ms +[2025-07-17 22:02:00] [Rank 0] step:9381/10000 train_time:2223299ms step_avg:237.00ms +[2025-07-17 22:02:05] [Rank 0] step:9401/10000 train_time:2228286ms step_avg:237.03ms +[2025-07-17 22:02:05] [Rank 0] step:9401/10000 train_time:2228286ms step_avg:237.03ms +[2025-07-17 22:02:10] [Rank 0] step:9421/10000 train_time:2233285ms step_avg:237.05ms +[2025-07-17 22:02:10] [Rank 0] step:9421/10000 train_time:2233285ms step_avg:237.05ms +[2025-07-17 22:02:15] [Rank 0] step:9441/10000 train_time:2238281ms step_avg:237.08ms +[2025-07-17 22:02:15] [Rank 0] step:9441/10000 train_time:2238281ms step_avg:237.08ms +[2025-07-17 22:02:20] [Rank 0] step:9461/10000 train_time:2243293ms step_avg:237.11ms +[2025-07-17 22:02:20] [Rank 0] step:9461/10000 train_time:2243293ms step_avg:237.11ms +[2025-07-17 22:02:25] [Rank 0] step:9481/10000 train_time:2248294ms step_avg:237.14ms +[2025-07-17 22:02:25] [Rank 0] step:9481/10000 train_time:2248294ms step_avg:237.14ms +[2025-07-17 22:02:35] [Rank 0] PRINT: step:9500/10000 val_loss:4.5296 train_time:2254416ms step_avg:237.31ms +[2025-07-17 22:02:35] [Rank 0] PRINT: step:9500/10000 val_loss:4.5296 train_time:2254416ms step_avg:237.31ms +[2025-07-17 22:02:35] [Rank 0] step:9501/10000 train_time:2254425ms step_avg:237.28ms +[2025-07-17 22:02:35] [Rank 0] step:9501/10000 train_time:2254425ms step_avg:237.28ms +[2025-07-17 22:02:40] [Rank 0] step:9521/10000 train_time:2258312ms step_avg:237.19ms +[2025-07-17 22:02:40] [Rank 0] step:9521/10000 train_time:2258312ms step_avg:237.19ms +[2025-07-17 22:02:45] [Rank 0] step:9541/10000 train_time:2263316ms step_avg:237.22ms +[2025-07-17 22:02:45] [Rank 0] step:9541/10000 train_time:2263316ms step_avg:237.22ms +[2025-07-17 22:02:50] [Rank 0] step:9561/10000 train_time:2268308ms step_avg:237.25ms +[2025-07-17 22:02:50] [Rank 0] step:9561/10000 train_time:2268308ms step_avg:237.25ms +[2025-07-17 22:02:55] [Rank 0] step:9581/10000 train_time:2273300ms step_avg:237.27ms +[2025-07-17 22:02:55] [Rank 0] step:9581/10000 train_time:2273300ms step_avg:237.27ms +[2025-07-17 22:03:00] [Rank 0] step:9601/10000 train_time:2278281ms step_avg:237.30ms +[2025-07-17 22:03:00] [Rank 0] step:9601/10000 train_time:2278281ms step_avg:237.30ms +[2025-07-17 22:03:05] [Rank 0] step:9621/10000 train_time:2283294ms step_avg:237.32ms +[2025-07-17 22:03:05] [Rank 0] step:9621/10000 train_time:2283294ms step_avg:237.32ms +[2025-07-17 22:03:11] [Rank 0] PRINT: step:9625/10000 val_loss:4.5001 train_time:2285643ms step_avg:237.47ms +[2025-07-17 22:03:11] [Rank 0] PRINT: step:9625/10000 val_loss:4.5001 train_time:2285643ms step_avg:237.47ms +[2025-07-17 22:03:15] [Rank 0] step:9641/10000 train_time:2288296ms step_avg:237.35ms +[2025-07-17 22:03:15] [Rank 0] step:9641/10000 train_time:2288296ms step_avg:237.35ms +[2025-07-17 22:03:20] [Rank 0] step:9661/10000 train_time:2293377ms step_avg:237.39ms +[2025-07-17 22:03:20] [Rank 0] step:9661/10000 train_time:2293377ms step_avg:237.39ms +[2025-07-17 22:03:25] [Rank 0] step:9681/10000 train_time:2298412ms step_avg:237.41ms +[2025-07-17 22:03:25] [Rank 0] step:9681/10000 train_time:2298412ms step_avg:237.41ms +[2025-07-17 22:03:30] [Rank 0] step:9701/10000 train_time:2303484ms step_avg:237.45ms +[2025-07-17 22:03:30] [Rank 0] step:9701/10000 train_time:2303484ms step_avg:237.45ms +[2025-07-17 22:03:35] [Rank 0] step:9721/10000 train_time:2308522ms step_avg:237.48ms +[2025-07-17 22:03:35] [Rank 0] step:9721/10000 train_time:2308522ms step_avg:237.48ms +[2025-07-17 22:03:40] [Rank 0] step:9741/10000 train_time:2313583ms step_avg:237.51ms +[2025-07-17 22:03:40] [Rank 0] step:9741/10000 train_time:2313583ms step_avg:237.51ms +[2025-07-17 22:03:47] [Rank 0] PRINT: step:9750/10000 val_loss:4.5868 train_time:2317200ms step_avg:237.66ms +[2025-07-17 22:03:47] [Rank 0] PRINT: step:9750/10000 val_loss:4.5868 train_time:2317200ms step_avg:237.66ms +[2025-07-17 22:03:50] [Rank 0] step:9761/10000 train_time:2318607ms step_avg:237.54ms +[2025-07-17 22:03:50] [Rank 0] step:9761/10000 train_time:2318607ms step_avg:237.54ms +[2025-07-17 22:03:55] [Rank 0] step:9781/10000 train_time:2323649ms step_avg:237.57ms +[2025-07-17 22:03:55] [Rank 0] step:9781/10000 train_time:2323649ms step_avg:237.57ms +[2025-07-17 22:04:00] [Rank 0] step:9801/10000 train_time:2328658ms step_avg:237.59ms +[2025-07-17 22:04:00] [Rank 0] step:9801/10000 train_time:2328658ms step_avg:237.59ms +[2025-07-17 22:04:05] [Rank 0] step:9821/10000 train_time:2333694ms step_avg:237.62ms +[2025-07-17 22:04:05] [Rank 0] step:9821/10000 train_time:2333694ms step_avg:237.62ms +[2025-07-17 22:04:10] [Rank 0] step:9841/10000 train_time:2338721ms step_avg:237.65ms +[2025-07-17 22:04:10] [Rank 0] step:9841/10000 train_time:2338721ms step_avg:237.65ms +[2025-07-17 22:04:15] [Rank 0] step:9861/10000 train_time:2343750ms step_avg:237.68ms +[2025-07-17 22:04:15] [Rank 0] step:9861/10000 train_time:2343750ms step_avg:237.68ms +[2025-07-17 22:04:23] [Rank 0] PRINT: step:9875/10000 val_loss:4.5763 train_time:2348621ms step_avg:237.84ms +[2025-07-17 22:04:23] [Rank 0] PRINT: step:9875/10000 val_loss:4.5763 train_time:2348621ms step_avg:237.84ms +[2025-07-17 22:04:24] [Rank 0] step:9881/10000 train_time:2348774ms step_avg:237.71ms +[2025-07-17 22:04:24] [Rank 0] step:9881/10000 train_time:2348774ms step_avg:237.71ms +[2025-07-17 22:04:29] [Rank 0] step:9901/10000 train_time:2353808ms step_avg:237.73ms +[2025-07-17 22:04:29] [Rank 0] step:9901/10000 train_time:2353808ms step_avg:237.73ms +[2025-07-17 22:04:34] [Rank 0] step:9921/10000 train_time:2358863ms step_avg:237.76ms +[2025-07-17 22:04:34] [Rank 0] step:9921/10000 train_time:2358863ms step_avg:237.76ms +[2025-07-17 22:04:39] [Rank 0] step:9941/10000 train_time:2363930ms step_avg:237.80ms +[2025-07-17 22:04:39] [Rank 0] step:9941/10000 train_time:2363930ms step_avg:237.80ms +[2025-07-17 22:04:44] [Rank 0] step:9961/10000 train_time:2369000ms step_avg:237.83ms +[2025-07-17 22:04:44] [Rank 0] step:9961/10000 train_time:2369000ms step_avg:237.83ms +[2025-07-17 22:04:50] [Rank 0] step:9981/10000 train_time:2374067ms step_avg:237.86ms +[2025-07-17 22:04:50] [Rank 0] step:9981/10000 train_time:2374067ms step_avg:237.86ms +[2025-07-17 22:04:54] [Rank 0] step:10000/10000 train_time:2378852ms step_avg:237.89ms +[2025-07-17 22:04:54] [Rank 0] step:10000/10000 train_time:2378852ms step_avg:237.89ms +[2025-07-17 22:04:59] [Rank 0] PRINT: step:10000/10000 val_loss:4.5929 train_time:2380206ms step_avg:238.02ms +[2025-07-17 22:04:59] [Rank 0] PRINT: step:10000/10000 val_loss:4.5929 train_time:2380206ms step_avg:238.02ms +[2025-07-17 22:04:59] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 22:04:59 2025 --- +[2025-07-17 22:04:59] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 22:04:59 2025 --- +[2025-07-17 22:04:59] [Rank 0] PRINT: Peak memory allocated: 31193 MiB reserved: 31476 MiB +[2025-07-17 22:04:59] [Rank 0] PRINT: Peak memory allocated: 31193 MiB reserved: 31476 MiB diff --git a/logs_norope/diff_modes/mode_6_param_norope_seed_42/config.json b/logs_norope/diff_modes/mode_6_param_norope_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..12a6c7decdba8ecfece07292c05d9f307d4f76b0 --- /dev/null +++ b/logs_norope/diff_modes/mode_6_param_norope_seed_42/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 6, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "dac82d86-78ef-41a3-a2bd-075a721b851d", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_6_param_norope_seed_42/training_log_dac82d86-78ef-41a3-a2bd-075a721b851d.txt b/logs_norope/diff_modes/mode_6_param_norope_seed_42/training_log_dac82d86-78ef-41a3-a2bd-075a721b851d.txt new file mode 100644 index 0000000000000000000000000000000000000000..d2c8f5a67d09d4513ea63cfa82853b1713726ef3 --- /dev/null +++ b/logs_norope/diff_modes/mode_6_param_norope_seed_42/training_log_dac82d86-78ef-41a3-a2bd-075a721b851d.txt @@ -0,0 +1,2360 @@ +[2025-07-17 14:53:22] [Rank 0] PRINT: --- Script Start: Thu Jul 17 14:53:22 2025 --- +[2025-07-17 14:53:22] [Rank 0] PRINT: --- Script Start: Thu Jul 17 14:53:22 2025 --- +[2025-07-17 14:53:22] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=6, model_parameterization='norope') +[2025-07-17 14:53:22] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=6, model_parameterization='norope') +[2025-07-17 14:53:22] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 14:53:22] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 14:53:22] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 14:53:22] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 14:53:22] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_6_param_norope_seed_42 +[2025-07-17 14:53:22] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_6_param_norope_seed_42 +[2025-07-17 14:53:22] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 14:53:22] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 14:53:22] [Rank 0] PRINT: Constructing model... +[2025-07-17 14:53:22] [Rank 0] PRINT: Constructing model... +[2025-07-17 14:53:24] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 14:53:24] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 14:53:24] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 14:53:24] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 14:53:24] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 14:53:24] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 14:53:24] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 14:53:24] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 14:53:24] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 6 +[2025-07-17 14:53:24] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 6 +[2025-07-17 14:53:24] [Rank 0] PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: 0.001). +[2025-07-17 14:53:24] [Rank 0] PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: 0.001). +[2025-07-17 14:53:24] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 14:53:24] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 14:53:24] [Rank 0] PRINT: Muon optimizer is active with 12 parameters. +[2025-07-17 14:53:24] [Rank 0] PRINT: Muon optimizer is active with 12 parameters. +[2025-07-17 14:53:24] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 14:53:24] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 14:53:25] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 14:53:25] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 14:53:25] [Rank 0] PRINT: Starting warmup... +[2025-07-17 14:53:25] [Rank 0] PRINT: Starting warmup... +[2025-07-17 14:54:31] [Rank 0] PRINT: Warmup complete. +[2025-07-17 14:54:31] [Rank 0] PRINT: Warmup complete. +[2025-07-17 14:54:32] [Rank 0] PRINT: Starting training... +[2025-07-17 14:54:32] [Rank 0] PRINT: Starting training... +[2025-07-17 14:54:44] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 14:54:44] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 14:54:49] [Rank 0] step:21/10000 train_time:4313ms step_avg:205.36ms +[2025-07-17 14:54:49] [Rank 0] step:21/10000 train_time:4313ms step_avg:205.36ms +[2025-07-17 14:54:54] [Rank 0] step:41/10000 train_time:8771ms step_avg:213.92ms +[2025-07-17 14:54:54] [Rank 0] step:41/10000 train_time:8771ms step_avg:213.92ms +[2025-07-17 14:54:58] [Rank 0] step:61/10000 train_time:13238ms step_avg:217.01ms +[2025-07-17 14:54:58] [Rank 0] step:61/10000 train_time:13238ms step_avg:217.01ms +[2025-07-17 14:55:03] [Rank 0] step:81/10000 train_time:17707ms step_avg:218.60ms +[2025-07-17 14:55:03] [Rank 0] step:81/10000 train_time:17707ms step_avg:218.60ms +[2025-07-17 14:55:07] [Rank 0] step:101/10000 train_time:22187ms step_avg:219.68ms +[2025-07-17 14:55:07] [Rank 0] step:101/10000 train_time:22187ms step_avg:219.68ms +[2025-07-17 14:55:12] [Rank 0] step:121/10000 train_time:26662ms step_avg:220.35ms +[2025-07-17 14:55:12] [Rank 0] step:121/10000 train_time:26662ms step_avg:220.35ms +[2025-07-17 14:55:16] [Rank 0] PRINT: step:125/10000 val_loss:5.4924 train_time:28233ms step_avg:225.87ms +[2025-07-17 14:55:16] [Rank 0] PRINT: step:125/10000 val_loss:5.4924 train_time:28233ms step_avg:225.87ms +[2025-07-17 14:55:20] [Rank 0] step:141/10000 train_time:31139ms step_avg:220.85ms +[2025-07-17 14:55:20] [Rank 0] step:141/10000 train_time:31139ms step_avg:220.85ms +[2025-07-17 14:55:25] [Rank 0] step:161/10000 train_time:35619ms step_avg:221.23ms +[2025-07-17 14:55:25] [Rank 0] step:161/10000 train_time:35619ms step_avg:221.23ms +[2025-07-17 14:55:29] [Rank 0] step:181/10000 train_time:40097ms step_avg:221.53ms +[2025-07-17 14:55:29] [Rank 0] step:181/10000 train_time:40097ms step_avg:221.53ms +[2025-07-17 14:55:34] [Rank 0] step:201/10000 train_time:44682ms step_avg:222.30ms +[2025-07-17 14:55:34] [Rank 0] step:201/10000 train_time:44682ms step_avg:222.30ms +[2025-07-17 14:55:38] [Rank 0] step:221/10000 train_time:49272ms step_avg:222.95ms +[2025-07-17 14:55:38] [Rank 0] step:221/10000 train_time:49272ms step_avg:222.95ms +[2025-07-17 14:55:43] [Rank 0] step:241/10000 train_time:53759ms step_avg:223.07ms +[2025-07-17 14:55:43] [Rank 0] step:241/10000 train_time:53759ms step_avg:223.07ms +[2025-07-17 14:55:49] [Rank 0] PRINT: step:250/10000 val_loss:5.0832 train_time:56454ms step_avg:225.81ms +[2025-07-17 14:55:49] [Rank 0] PRINT: step:250/10000 val_loss:5.0832 train_time:56454ms step_avg:225.81ms +[2025-07-17 14:55:52] [Rank 0] step:261/10000 train_time:58240ms step_avg:223.14ms +[2025-07-17 14:55:52] [Rank 0] step:261/10000 train_time:58240ms step_avg:223.14ms +[2025-07-17 14:55:56] [Rank 0] step:281/10000 train_time:62715ms step_avg:223.18ms +[2025-07-17 14:55:56] [Rank 0] step:281/10000 train_time:62715ms step_avg:223.18ms +[2025-07-17 14:56:01] [Rank 0] step:301/10000 train_time:67193ms step_avg:223.23ms +[2025-07-17 14:56:01] [Rank 0] step:301/10000 train_time:67193ms step_avg:223.23ms +[2025-07-17 14:56:05] [Rank 0] step:321/10000 train_time:71671ms step_avg:223.27ms +[2025-07-17 14:56:05] [Rank 0] step:321/10000 train_time:71671ms step_avg:223.27ms +[2025-07-17 14:56:10] [Rank 0] step:341/10000 train_time:76148ms step_avg:223.31ms +[2025-07-17 14:56:10] [Rank 0] step:341/10000 train_time:76148ms step_avg:223.31ms +[2025-07-17 14:56:14] [Rank 0] step:361/10000 train_time:80626ms step_avg:223.34ms +[2025-07-17 14:56:14] [Rank 0] step:361/10000 train_time:80626ms step_avg:223.34ms +[2025-07-17 14:56:22] [Rank 0] PRINT: step:375/10000 val_loss:5.0577 train_time:84437ms step_avg:225.16ms +[2025-07-17 14:56:22] [Rank 0] PRINT: step:375/10000 val_loss:5.0577 train_time:84437ms step_avg:225.16ms +[2025-07-17 14:56:23] [Rank 0] step:381/10000 train_time:85105ms step_avg:223.37ms +[2025-07-17 14:56:23] [Rank 0] step:381/10000 train_time:85105ms step_avg:223.37ms +[2025-07-17 14:56:27] [Rank 0] step:401/10000 train_time:89579ms step_avg:223.39ms +[2025-07-17 14:56:27] [Rank 0] step:401/10000 train_time:89579ms step_avg:223.39ms +[2025-07-17 14:56:32] [Rank 0] step:421/10000 train_time:94055ms step_avg:223.41ms +[2025-07-17 14:56:32] [Rank 0] step:421/10000 train_time:94055ms step_avg:223.41ms +[2025-07-17 14:56:36] [Rank 0] step:441/10000 train_time:98531ms step_avg:223.43ms +[2025-07-17 14:56:36] [Rank 0] step:441/10000 train_time:98531ms step_avg:223.43ms +[2025-07-17 14:56:41] [Rank 0] step:461/10000 train_time:103009ms step_avg:223.45ms +[2025-07-17 14:56:41] [Rank 0] step:461/10000 train_time:103009ms step_avg:223.45ms +[2025-07-17 14:56:45] [Rank 0] step:481/10000 train_time:107488ms step_avg:223.47ms +[2025-07-17 14:56:45] [Rank 0] step:481/10000 train_time:107488ms step_avg:223.47ms +[2025-07-17 14:56:54] [Rank 0] PRINT: step:500/10000 val_loss:5.0453 train_time:112416ms step_avg:224.83ms +[2025-07-17 14:56:54] [Rank 0] PRINT: step:500/10000 val_loss:5.0453 train_time:112416ms step_avg:224.83ms +[2025-07-17 14:56:54] [Rank 0] step:501/10000 train_time:112427ms step_avg:224.41ms +[2025-07-17 14:56:54] [Rank 0] step:501/10000 train_time:112427ms step_avg:224.41ms +[2025-07-17 14:56:58] [Rank 0] step:521/10000 train_time:116515ms step_avg:223.64ms +[2025-07-17 14:56:58] [Rank 0] step:521/10000 train_time:116515ms step_avg:223.64ms +[2025-07-17 14:57:03] [Rank 0] step:541/10000 train_time:120993ms step_avg:223.65ms +[2025-07-17 14:57:03] [Rank 0] step:541/10000 train_time:120993ms step_avg:223.65ms +[2025-07-17 14:57:07] [Rank 0] step:561/10000 train_time:125474ms step_avg:223.66ms +[2025-07-17 14:57:07] [Rank 0] step:561/10000 train_time:125474ms step_avg:223.66ms +[2025-07-17 14:57:12] [Rank 0] step:581/10000 train_time:129956ms step_avg:223.68ms +[2025-07-17 14:57:12] [Rank 0] step:581/10000 train_time:129956ms step_avg:223.68ms +[2025-07-17 14:57:16] [Rank 0] step:601/10000 train_time:134439ms step_avg:223.69ms +[2025-07-17 14:57:16] [Rank 0] step:601/10000 train_time:134439ms step_avg:223.69ms +[2025-07-17 14:57:21] [Rank 0] step:621/10000 train_time:138922ms step_avg:223.71ms +[2025-07-17 14:57:21] [Rank 0] step:621/10000 train_time:138922ms step_avg:223.71ms +[2025-07-17 14:57:26] [Rank 0] PRINT: step:625/10000 val_loss:5.0375 train_time:140495ms step_avg:224.79ms +[2025-07-17 14:57:26] [Rank 0] PRINT: step:625/10000 val_loss:5.0375 train_time:140495ms step_avg:224.79ms +[2025-07-17 14:57:30] [Rank 0] step:641/10000 train_time:143400ms step_avg:223.71ms +[2025-07-17 14:57:30] [Rank 0] step:641/10000 train_time:143400ms step_avg:223.71ms +[2025-07-17 14:57:34] [Rank 0] step:661/10000 train_time:147883ms step_avg:223.73ms +[2025-07-17 14:57:34] [Rank 0] step:661/10000 train_time:147883ms step_avg:223.73ms +[2025-07-17 14:57:39] [Rank 0] step:681/10000 train_time:152363ms step_avg:223.73ms +[2025-07-17 14:57:39] [Rank 0] step:681/10000 train_time:152363ms step_avg:223.73ms +[2025-07-17 14:57:43] [Rank 0] step:701/10000 train_time:156848ms step_avg:223.75ms +[2025-07-17 14:57:43] [Rank 0] step:701/10000 train_time:156848ms step_avg:223.75ms +[2025-07-17 14:57:48] [Rank 0] step:721/10000 train_time:161330ms step_avg:223.76ms +[2025-07-17 14:57:48] [Rank 0] step:721/10000 train_time:161330ms step_avg:223.76ms +[2025-07-17 14:57:52] [Rank 0] step:741/10000 train_time:165813ms step_avg:223.77ms +[2025-07-17 14:57:52] [Rank 0] step:741/10000 train_time:165813ms step_avg:223.77ms +[2025-07-17 14:57:58] [Rank 0] PRINT: step:750/10000 val_loss:4.8785 train_time:168522ms step_avg:224.70ms +[2025-07-17 14:57:58] [Rank 0] PRINT: step:750/10000 val_loss:4.8785 train_time:168522ms step_avg:224.70ms +[2025-07-17 14:58:01] [Rank 0] step:761/10000 train_time:170325ms step_avg:223.82ms +[2025-07-17 14:58:01] [Rank 0] step:761/10000 train_time:170325ms step_avg:223.82ms +[2025-07-17 14:58:05] [Rank 0] step:781/10000 train_time:174845ms step_avg:223.87ms +[2025-07-17 14:58:05] [Rank 0] step:781/10000 train_time:174845ms step_avg:223.87ms +[2025-07-17 14:58:10] [Rank 0] step:801/10000 train_time:179368ms step_avg:223.93ms +[2025-07-17 14:58:10] [Rank 0] step:801/10000 train_time:179368ms step_avg:223.93ms +[2025-07-17 14:58:15] [Rank 0] step:821/10000 train_time:183893ms step_avg:223.99ms +[2025-07-17 14:58:15] [Rank 0] step:821/10000 train_time:183893ms step_avg:223.99ms +[2025-07-17 14:58:19] [Rank 0] step:841/10000 train_time:188417ms step_avg:224.04ms +[2025-07-17 14:58:19] [Rank 0] step:841/10000 train_time:188417ms step_avg:224.04ms +[2025-07-17 14:58:24] [Rank 0] step:861/10000 train_time:192941ms step_avg:224.09ms +[2025-07-17 14:58:24] [Rank 0] step:861/10000 train_time:192941ms step_avg:224.09ms +[2025-07-17 14:58:31] [Rank 0] PRINT: step:875/10000 val_loss:4.5789 train_time:196791ms step_avg:224.90ms +[2025-07-17 14:58:31] [Rank 0] PRINT: step:875/10000 val_loss:4.5789 train_time:196791ms step_avg:224.90ms +[2025-07-17 14:58:33] [Rank 0] step:881/10000 train_time:197463ms step_avg:224.14ms +[2025-07-17 14:58:33] [Rank 0] step:881/10000 train_time:197463ms step_avg:224.14ms +[2025-07-17 14:58:37] [Rank 0] step:901/10000 train_time:201986ms step_avg:224.18ms +[2025-07-17 14:58:37] [Rank 0] step:901/10000 train_time:201986ms step_avg:224.18ms +[2025-07-17 14:58:42] [Rank 0] step:921/10000 train_time:206511ms step_avg:224.22ms +[2025-07-17 14:58:42] [Rank 0] step:921/10000 train_time:206511ms step_avg:224.22ms +[2025-07-17 14:58:46] [Rank 0] step:941/10000 train_time:211040ms step_avg:224.27ms +[2025-07-17 14:58:46] [Rank 0] step:941/10000 train_time:211040ms step_avg:224.27ms +[2025-07-17 14:58:51] [Rank 0] step:961/10000 train_time:215567ms step_avg:224.31ms +[2025-07-17 14:58:51] [Rank 0] step:961/10000 train_time:215567ms step_avg:224.31ms +[2025-07-17 14:58:55] [Rank 0] step:981/10000 train_time:220094ms step_avg:224.36ms +[2025-07-17 14:58:55] [Rank 0] step:981/10000 train_time:220094ms step_avg:224.36ms +[2025-07-17 14:59:04] [Rank 0] PRINT: step:1000/10000 val_loss:4.5628 train_time:225076ms step_avg:225.08ms +[2025-07-17 14:59:04] [Rank 0] PRINT: step:1000/10000 val_loss:4.5628 train_time:225076ms step_avg:225.08ms +[2025-07-17 14:59:04] [Rank 0] step:1001/10000 train_time:225087ms step_avg:224.86ms +[2025-07-17 14:59:04] [Rank 0] step:1001/10000 train_time:225087ms step_avg:224.86ms +[2025-07-17 14:59:09] [Rank 0] step:1021/10000 train_time:229204ms step_avg:224.49ms +[2025-07-17 14:59:09] [Rank 0] step:1021/10000 train_time:229204ms step_avg:224.49ms +[2025-07-17 14:59:13] [Rank 0] step:1041/10000 train_time:233734ms step_avg:224.53ms +[2025-07-17 14:59:13] [Rank 0] step:1041/10000 train_time:233734ms step_avg:224.53ms +[2025-07-17 14:59:18] [Rank 0] step:1061/10000 train_time:238264ms step_avg:224.57ms +[2025-07-17 14:59:18] [Rank 0] step:1061/10000 train_time:238264ms step_avg:224.57ms +[2025-07-17 14:59:22] [Rank 0] step:1081/10000 train_time:242791ms step_avg:224.60ms +[2025-07-17 14:59:22] [Rank 0] step:1081/10000 train_time:242791ms step_avg:224.60ms +[2025-07-17 14:59:27] [Rank 0] step:1101/10000 train_time:247319ms step_avg:224.63ms +[2025-07-17 14:59:27] [Rank 0] step:1101/10000 train_time:247319ms step_avg:224.63ms +[2025-07-17 14:59:32] [Rank 0] step:1121/10000 train_time:251847ms step_avg:224.66ms +[2025-07-17 14:59:32] [Rank 0] step:1121/10000 train_time:251847ms step_avg:224.66ms +[2025-07-17 14:59:37] [Rank 0] PRINT: step:1125/10000 val_loss:4.5322 train_time:253438ms step_avg:225.28ms +[2025-07-17 14:59:37] [Rank 0] PRINT: step:1125/10000 val_loss:4.5322 train_time:253438ms step_avg:225.28ms +[2025-07-17 14:59:41] [Rank 0] step:1141/10000 train_time:256375ms step_avg:224.69ms +[2025-07-17 14:59:41] [Rank 0] step:1141/10000 train_time:256375ms step_avg:224.69ms +[2025-07-17 14:59:45] [Rank 0] step:1161/10000 train_time:260905ms step_avg:224.72ms +[2025-07-17 14:59:45] [Rank 0] step:1161/10000 train_time:260905ms step_avg:224.72ms +[2025-07-17 14:59:50] [Rank 0] step:1181/10000 train_time:265535ms step_avg:224.84ms +[2025-07-17 14:59:50] [Rank 0] step:1181/10000 train_time:265535ms step_avg:224.84ms +[2025-07-17 14:59:54] [Rank 0] step:1201/10000 train_time:270062ms step_avg:224.86ms +[2025-07-17 14:59:54] [Rank 0] step:1201/10000 train_time:270062ms step_avg:224.86ms +[2025-07-17 14:59:59] [Rank 0] step:1221/10000 train_time:274594ms step_avg:224.89ms +[2025-07-17 14:59:59] [Rank 0] step:1221/10000 train_time:274594ms step_avg:224.89ms +[2025-07-17 15:00:03] [Rank 0] step:1241/10000 train_time:279124ms step_avg:224.92ms +[2025-07-17 15:00:03] [Rank 0] step:1241/10000 train_time:279124ms step_avg:224.92ms +[2025-07-17 15:00:10] [Rank 0] PRINT: step:1250/10000 val_loss:4.5220 train_time:281846ms step_avg:225.48ms +[2025-07-17 15:00:10] [Rank 0] PRINT: step:1250/10000 val_loss:4.5220 train_time:281846ms step_avg:225.48ms +[2025-07-17 15:00:12] [Rank 0] step:1261/10000 train_time:283652ms step_avg:224.94ms +[2025-07-17 15:00:12] [Rank 0] step:1261/10000 train_time:283652ms step_avg:224.94ms +[2025-07-17 15:00:17] [Rank 0] step:1281/10000 train_time:288184ms step_avg:224.97ms +[2025-07-17 15:00:17] [Rank 0] step:1281/10000 train_time:288184ms step_avg:224.97ms +[2025-07-17 15:00:21] [Rank 0] step:1301/10000 train_time:292820ms step_avg:225.07ms +[2025-07-17 15:00:21] [Rank 0] step:1301/10000 train_time:292820ms step_avg:225.07ms +[2025-07-17 15:00:26] [Rank 0] step:1321/10000 train_time:297356ms step_avg:225.10ms +[2025-07-17 15:00:26] [Rank 0] step:1321/10000 train_time:297356ms step_avg:225.10ms +[2025-07-17 15:00:31] [Rank 0] step:1341/10000 train_time:301990ms step_avg:225.20ms +[2025-07-17 15:00:31] [Rank 0] step:1341/10000 train_time:301990ms step_avg:225.20ms +[2025-07-17 15:00:35] [Rank 0] step:1361/10000 train_time:306527ms step_avg:225.22ms +[2025-07-17 15:00:35] [Rank 0] step:1361/10000 train_time:306527ms step_avg:225.22ms +[2025-07-17 15:00:43] [Rank 0] PRINT: step:1375/10000 val_loss:4.5768 train_time:310388ms step_avg:225.74ms +[2025-07-17 15:00:43] [Rank 0] PRINT: step:1375/10000 val_loss:4.5768 train_time:310388ms step_avg:225.74ms +[2025-07-17 15:00:44] [Rank 0] step:1381/10000 train_time:311063ms step_avg:225.25ms +[2025-07-17 15:00:44] [Rank 0] step:1381/10000 train_time:311063ms step_avg:225.25ms +[2025-07-17 15:00:49] [Rank 0] step:1401/10000 train_time:315601ms step_avg:225.27ms +[2025-07-17 15:00:49] [Rank 0] step:1401/10000 train_time:315601ms step_avg:225.27ms +[2025-07-17 15:00:53] [Rank 0] step:1421/10000 train_time:320148ms step_avg:225.30ms +[2025-07-17 15:00:53] [Rank 0] step:1421/10000 train_time:320148ms step_avg:225.30ms +[2025-07-17 15:00:58] [Rank 0] step:1441/10000 train_time:324690ms step_avg:225.32ms +[2025-07-17 15:00:58] [Rank 0] step:1441/10000 train_time:324690ms step_avg:225.32ms +[2025-07-17 15:01:02] [Rank 0] step:1461/10000 train_time:329228ms step_avg:225.34ms +[2025-07-17 15:01:02] [Rank 0] step:1461/10000 train_time:329228ms step_avg:225.34ms +[2025-07-17 15:01:07] [Rank 0] step:1481/10000 train_time:333767ms step_avg:225.37ms +[2025-07-17 15:01:07] [Rank 0] step:1481/10000 train_time:333767ms step_avg:225.37ms +[2025-07-17 15:01:15] [Rank 0] PRINT: step:1500/10000 val_loss:4.5281 train_time:338784ms step_avg:225.86ms +[2025-07-17 15:01:15] [Rank 0] PRINT: step:1500/10000 val_loss:4.5281 train_time:338784ms step_avg:225.86ms +[2025-07-17 15:01:16] [Rank 0] step:1501/10000 train_time:338796ms step_avg:225.71ms +[2025-07-17 15:01:16] [Rank 0] step:1501/10000 train_time:338796ms step_avg:225.71ms +[2025-07-17 15:01:20] [Rank 0] step:1521/10000 train_time:342896ms step_avg:225.44ms +[2025-07-17 15:01:20] [Rank 0] step:1521/10000 train_time:342896ms step_avg:225.44ms +[2025-07-17 15:01:25] [Rank 0] step:1541/10000 train_time:347465ms step_avg:225.48ms +[2025-07-17 15:01:25] [Rank 0] step:1541/10000 train_time:347465ms step_avg:225.48ms +[2025-07-17 15:01:29] [Rank 0] step:1561/10000 train_time:352033ms step_avg:225.52ms +[2025-07-17 15:01:29] [Rank 0] step:1561/10000 train_time:352033ms step_avg:225.52ms +[2025-07-17 15:01:34] [Rank 0] step:1581/10000 train_time:356603ms step_avg:225.56ms +[2025-07-17 15:01:34] [Rank 0] step:1581/10000 train_time:356603ms step_avg:225.56ms +[2025-07-17 15:01:38] [Rank 0] step:1601/10000 train_time:361171ms step_avg:225.59ms +[2025-07-17 15:01:38] [Rank 0] step:1601/10000 train_time:361171ms step_avg:225.59ms +[2025-07-17 15:01:43] [Rank 0] step:1621/10000 train_time:365737ms step_avg:225.62ms +[2025-07-17 15:01:43] [Rank 0] step:1621/10000 train_time:365737ms step_avg:225.62ms +[2025-07-17 15:01:48] [Rank 0] PRINT: step:1625/10000 val_loss:4.5469 train_time:367342ms step_avg:226.06ms +[2025-07-17 15:01:48] [Rank 0] PRINT: step:1625/10000 val_loss:4.5469 train_time:367342ms step_avg:226.06ms +[2025-07-17 15:01:52] [Rank 0] step:1641/10000 train_time:370303ms step_avg:225.66ms +[2025-07-17 15:01:52] [Rank 0] step:1641/10000 train_time:370303ms step_avg:225.66ms +[2025-07-17 15:01:56] [Rank 0] step:1661/10000 train_time:374871ms step_avg:225.69ms +[2025-07-17 15:01:56] [Rank 0] step:1661/10000 train_time:374871ms step_avg:225.69ms +[2025-07-17 15:02:01] [Rank 0] step:1681/10000 train_time:379440ms step_avg:225.72ms +[2025-07-17 15:02:01] [Rank 0] step:1681/10000 train_time:379440ms step_avg:225.72ms +[2025-07-17 15:02:05] [Rank 0] step:1701/10000 train_time:384010ms step_avg:225.76ms +[2025-07-17 15:02:05] [Rank 0] step:1701/10000 train_time:384010ms step_avg:225.76ms +[2025-07-17 15:02:10] [Rank 0] step:1721/10000 train_time:388572ms step_avg:225.78ms +[2025-07-17 15:02:10] [Rank 0] step:1721/10000 train_time:388572ms step_avg:225.78ms +[2025-07-17 15:02:14] [Rank 0] step:1741/10000 train_time:393137ms step_avg:225.81ms +[2025-07-17 15:02:14] [Rank 0] step:1741/10000 train_time:393137ms step_avg:225.81ms +[2025-07-17 15:02:21] [Rank 0] PRINT: step:1750/10000 val_loss:4.4962 train_time:395881ms step_avg:226.22ms +[2025-07-17 15:02:21] [Rank 0] PRINT: step:1750/10000 val_loss:4.4962 train_time:395881ms step_avg:226.22ms +[2025-07-17 15:02:24] [Rank 0] step:1761/10000 train_time:397700ms step_avg:225.84ms +[2025-07-17 15:02:24] [Rank 0] step:1761/10000 train_time:397700ms step_avg:225.84ms +[2025-07-17 15:02:28] [Rank 0] step:1781/10000 train_time:402262ms step_avg:225.86ms +[2025-07-17 15:02:28] [Rank 0] step:1781/10000 train_time:402262ms step_avg:225.86ms +[2025-07-17 15:02:33] [Rank 0] step:1801/10000 train_time:406827ms step_avg:225.89ms +[2025-07-17 15:02:33] [Rank 0] step:1801/10000 train_time:406827ms step_avg:225.89ms +[2025-07-17 15:02:37] [Rank 0] step:1821/10000 train_time:411391ms step_avg:225.91ms +[2025-07-17 15:02:37] [Rank 0] step:1821/10000 train_time:411391ms step_avg:225.91ms +[2025-07-17 15:02:42] [Rank 0] step:1841/10000 train_time:415956ms step_avg:225.94ms +[2025-07-17 15:02:42] [Rank 0] step:1841/10000 train_time:415956ms step_avg:225.94ms +[2025-07-17 15:02:46] [Rank 0] step:1861/10000 train_time:420521ms step_avg:225.96ms +[2025-07-17 15:02:46] [Rank 0] step:1861/10000 train_time:420521ms step_avg:225.96ms +[2025-07-17 15:02:53] [Rank 0] PRINT: step:1875/10000 val_loss:4.5121 train_time:424401ms step_avg:226.35ms +[2025-07-17 15:02:53] [Rank 0] PRINT: step:1875/10000 val_loss:4.5121 train_time:424401ms step_avg:226.35ms +[2025-07-17 15:02:55] [Rank 0] step:1881/10000 train_time:425083ms step_avg:225.99ms +[2025-07-17 15:02:55] [Rank 0] step:1881/10000 train_time:425083ms step_avg:225.99ms +[2025-07-17 15:02:59] [Rank 0] step:1901/10000 train_time:429643ms step_avg:226.01ms +[2025-07-17 15:02:59] [Rank 0] step:1901/10000 train_time:429643ms step_avg:226.01ms +[2025-07-17 15:03:04] [Rank 0] step:1921/10000 train_time:434204ms step_avg:226.03ms +[2025-07-17 15:03:04] [Rank 0] step:1921/10000 train_time:434204ms step_avg:226.03ms +[2025-07-17 15:03:09] [Rank 0] step:1941/10000 train_time:438762ms step_avg:226.05ms +[2025-07-17 15:03:09] [Rank 0] step:1941/10000 train_time:438762ms step_avg:226.05ms +[2025-07-17 15:03:13] [Rank 0] step:1961/10000 train_time:443318ms step_avg:226.07ms +[2025-07-17 15:03:13] [Rank 0] step:1961/10000 train_time:443318ms step_avg:226.07ms +[2025-07-17 15:03:18] [Rank 0] step:1981/10000 train_time:447872ms step_avg:226.08ms +[2025-07-17 15:03:18] [Rank 0] step:1981/10000 train_time:447872ms step_avg:226.08ms +[2025-07-17 15:03:26] [Rank 0] PRINT: step:2000/10000 val_loss:4.4915 train_time:452883ms step_avg:226.44ms +[2025-07-17 15:03:26] [Rank 0] PRINT: step:2000/10000 val_loss:4.4915 train_time:452883ms step_avg:226.44ms +[2025-07-17 15:03:27] [Rank 0] step:2001/10000 train_time:452894ms step_avg:226.33ms +[2025-07-17 15:03:27] [Rank 0] step:2001/10000 train_time:452894ms step_avg:226.33ms +[2025-07-17 15:03:31] [Rank 0] step:2021/10000 train_time:456979ms step_avg:226.12ms +[2025-07-17 15:03:31] [Rank 0] step:2021/10000 train_time:456979ms step_avg:226.12ms +[2025-07-17 15:03:36] [Rank 0] step:2041/10000 train_time:461598ms step_avg:226.16ms +[2025-07-17 15:03:36] [Rank 0] step:2041/10000 train_time:461598ms step_avg:226.16ms +[2025-07-17 15:03:40] [Rank 0] step:2061/10000 train_time:466154ms step_avg:226.18ms +[2025-07-17 15:03:40] [Rank 0] step:2061/10000 train_time:466154ms step_avg:226.18ms +[2025-07-17 15:03:45] [Rank 0] step:2081/10000 train_time:470713ms step_avg:226.20ms +[2025-07-17 15:03:45] [Rank 0] step:2081/10000 train_time:470713ms step_avg:226.20ms +[2025-07-17 15:03:50] [Rank 0] step:2101/10000 train_time:475274ms step_avg:226.21ms +[2025-07-17 15:03:50] [Rank 0] step:2101/10000 train_time:475274ms step_avg:226.21ms +[2025-07-17 15:03:54] [Rank 0] step:2121/10000 train_time:479834ms step_avg:226.23ms +[2025-07-17 15:03:54] [Rank 0] step:2121/10000 train_time:479834ms step_avg:226.23ms +[2025-07-17 15:03:59] [Rank 0] PRINT: step:2125/10000 val_loss:4.5759 train_time:481437ms step_avg:226.56ms +[2025-07-17 15:03:59] [Rank 0] PRINT: step:2125/10000 val_loss:4.5759 train_time:481437ms step_avg:226.56ms +[2025-07-17 15:04:03] [Rank 0] step:2141/10000 train_time:484397ms step_avg:226.25ms +[2025-07-17 15:04:03] [Rank 0] step:2141/10000 train_time:484397ms step_avg:226.25ms +[2025-07-17 15:04:07] [Rank 0] step:2161/10000 train_time:488960ms step_avg:226.27ms +[2025-07-17 15:04:07] [Rank 0] step:2161/10000 train_time:488960ms step_avg:226.27ms +[2025-07-17 15:04:12] [Rank 0] step:2181/10000 train_time:493523ms step_avg:226.28ms +[2025-07-17 15:04:12] [Rank 0] step:2181/10000 train_time:493523ms step_avg:226.28ms +[2025-07-17 15:04:16] [Rank 0] step:2201/10000 train_time:498083ms step_avg:226.30ms +[2025-07-17 15:04:16] [Rank 0] step:2201/10000 train_time:498083ms step_avg:226.30ms +[2025-07-17 15:04:21] [Rank 0] step:2221/10000 train_time:502644ms step_avg:226.31ms +[2025-07-17 15:04:21] [Rank 0] step:2221/10000 train_time:502644ms step_avg:226.31ms +[2025-07-17 15:04:26] [Rank 0] step:2241/10000 train_time:507291ms step_avg:226.37ms +[2025-07-17 15:04:26] [Rank 0] step:2241/10000 train_time:507291ms step_avg:226.37ms +[2025-07-17 15:04:32] [Rank 0] PRINT: step:2250/10000 val_loss:4.3114 train_time:510100ms step_avg:226.71ms +[2025-07-17 15:04:32] [Rank 0] PRINT: step:2250/10000 val_loss:4.3114 train_time:510100ms step_avg:226.71ms +[2025-07-17 15:04:35] [Rank 0] step:2261/10000 train_time:511963ms step_avg:226.43ms +[2025-07-17 15:04:35] [Rank 0] step:2261/10000 train_time:511963ms step_avg:226.43ms +[2025-07-17 15:04:40] [Rank 0] step:2281/10000 train_time:516637ms step_avg:226.50ms +[2025-07-17 15:04:40] [Rank 0] step:2281/10000 train_time:516637ms step_avg:226.50ms +[2025-07-17 15:04:44] [Rank 0] step:2301/10000 train_time:521311ms step_avg:226.56ms +[2025-07-17 15:04:44] [Rank 0] step:2301/10000 train_time:521311ms step_avg:226.56ms +[2025-07-17 15:04:49] [Rank 0] step:2321/10000 train_time:525986ms step_avg:226.62ms +[2025-07-17 15:04:49] [Rank 0] step:2321/10000 train_time:525986ms step_avg:226.62ms +[2025-07-17 15:04:54] [Rank 0] step:2341/10000 train_time:530657ms step_avg:226.68ms +[2025-07-17 15:04:54] [Rank 0] step:2341/10000 train_time:530657ms step_avg:226.68ms +[2025-07-17 15:04:58] [Rank 0] step:2361/10000 train_time:535333ms step_avg:226.74ms +[2025-07-17 15:04:58] [Rank 0] step:2361/10000 train_time:535333ms step_avg:226.74ms +[2025-07-17 15:05:06] [Rank 0] PRINT: step:2375/10000 val_loss:4.3819 train_time:539311ms step_avg:227.08ms +[2025-07-17 15:05:06] [Rank 0] PRINT: step:2375/10000 val_loss:4.3819 train_time:539311ms step_avg:227.08ms +[2025-07-17 15:05:08] [Rank 0] step:2381/10000 train_time:540010ms step_avg:226.80ms +[2025-07-17 15:05:08] [Rank 0] step:2381/10000 train_time:540010ms step_avg:226.80ms +[2025-07-17 15:05:12] [Rank 0] step:2401/10000 train_time:544688ms step_avg:226.86ms +[2025-07-17 15:05:12] [Rank 0] step:2401/10000 train_time:544688ms step_avg:226.86ms +[2025-07-17 15:05:17] [Rank 0] step:2421/10000 train_time:549363ms step_avg:226.92ms +[2025-07-17 15:05:17] [Rank 0] step:2421/10000 train_time:549363ms step_avg:226.92ms +[2025-07-17 15:05:22] [Rank 0] step:2441/10000 train_time:554039ms step_avg:226.97ms +[2025-07-17 15:05:22] [Rank 0] step:2441/10000 train_time:554039ms step_avg:226.97ms +[2025-07-17 15:05:26] [Rank 0] step:2461/10000 train_time:558714ms step_avg:227.03ms +[2025-07-17 15:05:26] [Rank 0] step:2461/10000 train_time:558714ms step_avg:227.03ms +[2025-07-17 15:05:31] [Rank 0] step:2481/10000 train_time:563390ms step_avg:227.08ms +[2025-07-17 15:05:31] [Rank 0] step:2481/10000 train_time:563390ms step_avg:227.08ms +[2025-07-17 15:05:40] [Rank 0] PRINT: step:2500/10000 val_loss:4.2896 train_time:568534ms step_avg:227.41ms +[2025-07-17 15:05:40] [Rank 0] PRINT: step:2500/10000 val_loss:4.2896 train_time:568534ms step_avg:227.41ms +[2025-07-17 15:05:40] [Rank 0] step:2501/10000 train_time:568546ms step_avg:227.33ms +[2025-07-17 15:05:40] [Rank 0] step:2501/10000 train_time:568546ms step_avg:227.33ms +[2025-07-17 15:05:45] [Rank 0] step:2521/10000 train_time:572736ms step_avg:227.19ms +[2025-07-17 15:05:45] [Rank 0] step:2521/10000 train_time:572736ms step_avg:227.19ms +[2025-07-17 15:05:50] [Rank 0] step:2541/10000 train_time:577413ms step_avg:227.24ms +[2025-07-17 15:05:50] [Rank 0] step:2541/10000 train_time:577413ms step_avg:227.24ms +[2025-07-17 15:05:54] [Rank 0] step:2561/10000 train_time:582142ms step_avg:227.31ms +[2025-07-17 15:05:54] [Rank 0] step:2561/10000 train_time:582142ms step_avg:227.31ms +[2025-07-17 15:05:59] [Rank 0] step:2581/10000 train_time:586819ms step_avg:227.36ms +[2025-07-17 15:05:59] [Rank 0] step:2581/10000 train_time:586819ms step_avg:227.36ms +[2025-07-17 15:06:04] [Rank 0] step:2601/10000 train_time:591493ms step_avg:227.41ms +[2025-07-17 15:06:04] [Rank 0] step:2601/10000 train_time:591493ms step_avg:227.41ms +[2025-07-17 15:06:08] [Rank 0] step:2621/10000 train_time:596170ms step_avg:227.46ms +[2025-07-17 15:06:08] [Rank 0] step:2621/10000 train_time:596170ms step_avg:227.46ms +[2025-07-17 15:06:14] [Rank 0] PRINT: step:2625/10000 val_loss:4.4503 train_time:597811ms step_avg:227.74ms +[2025-07-17 15:06:14] [Rank 0] PRINT: step:2625/10000 val_loss:4.4503 train_time:597811ms step_avg:227.74ms +[2025-07-17 15:06:18] [Rank 0] step:2641/10000 train_time:600840ms step_avg:227.50ms +[2025-07-17 15:06:18] [Rank 0] step:2641/10000 train_time:600840ms step_avg:227.50ms +[2025-07-17 15:06:22] [Rank 0] step:2661/10000 train_time:605510ms step_avg:227.55ms +[2025-07-17 15:06:22] [Rank 0] step:2661/10000 train_time:605510ms step_avg:227.55ms +[2025-07-17 15:06:27] [Rank 0] step:2681/10000 train_time:610182ms step_avg:227.60ms +[2025-07-17 15:06:27] [Rank 0] step:2681/10000 train_time:610182ms step_avg:227.60ms +[2025-07-17 15:06:32] [Rank 0] step:2701/10000 train_time:614854ms step_avg:227.64ms +[2025-07-17 15:06:32] [Rank 0] step:2701/10000 train_time:614854ms step_avg:227.64ms +[2025-07-17 15:06:36] [Rank 0] step:2721/10000 train_time:619528ms step_avg:227.68ms +[2025-07-17 15:06:36] [Rank 0] step:2721/10000 train_time:619528ms step_avg:227.68ms +[2025-07-17 15:06:41] [Rank 0] step:2741/10000 train_time:624202ms step_avg:227.73ms +[2025-07-17 15:06:41] [Rank 0] step:2741/10000 train_time:624202ms step_avg:227.73ms +[2025-07-17 15:06:48] [Rank 0] PRINT: step:2750/10000 val_loss:4.4317 train_time:627012ms step_avg:228.00ms +[2025-07-17 15:06:48] [Rank 0] PRINT: step:2750/10000 val_loss:4.4317 train_time:627012ms step_avg:228.00ms +[2025-07-17 15:06:50] [Rank 0] step:2761/10000 train_time:628875ms step_avg:227.77ms +[2025-07-17 15:06:50] [Rank 0] step:2761/10000 train_time:628875ms step_avg:227.77ms +[2025-07-17 15:06:55] [Rank 0] step:2781/10000 train_time:633547ms step_avg:227.81ms +[2025-07-17 15:06:55] [Rank 0] step:2781/10000 train_time:633547ms step_avg:227.81ms +[2025-07-17 15:07:00] [Rank 0] step:2801/10000 train_time:638219ms step_avg:227.85ms +[2025-07-17 15:07:00] [Rank 0] step:2801/10000 train_time:638219ms step_avg:227.85ms +[2025-07-17 15:07:04] [Rank 0] step:2821/10000 train_time:642896ms step_avg:227.90ms +[2025-07-17 15:07:04] [Rank 0] step:2821/10000 train_time:642896ms step_avg:227.90ms +[2025-07-17 15:07:09] [Rank 0] step:2841/10000 train_time:647572ms step_avg:227.94ms +[2025-07-17 15:07:09] [Rank 0] step:2841/10000 train_time:647572ms step_avg:227.94ms +[2025-07-17 15:07:14] [Rank 0] step:2861/10000 train_time:652248ms step_avg:227.98ms +[2025-07-17 15:07:14] [Rank 0] step:2861/10000 train_time:652248ms step_avg:227.98ms +[2025-07-17 15:07:21] [Rank 0] PRINT: step:2875/10000 val_loss:4.4197 train_time:656225ms step_avg:228.25ms +[2025-07-17 15:07:21] [Rank 0] PRINT: step:2875/10000 val_loss:4.4197 train_time:656225ms step_avg:228.25ms +[2025-07-17 15:07:23] [Rank 0] step:2881/10000 train_time:656922ms step_avg:228.02ms +[2025-07-17 15:07:23] [Rank 0] step:2881/10000 train_time:656922ms step_avg:228.02ms +[2025-07-17 15:07:28] [Rank 0] step:2901/10000 train_time:661595ms step_avg:228.06ms +[2025-07-17 15:07:28] [Rank 0] step:2901/10000 train_time:661595ms step_avg:228.06ms +[2025-07-17 15:07:32] [Rank 0] step:2921/10000 train_time:666270ms step_avg:228.10ms +[2025-07-17 15:07:32] [Rank 0] step:2921/10000 train_time:666270ms step_avg:228.10ms +[2025-07-17 15:07:37] [Rank 0] step:2941/10000 train_time:670945ms step_avg:228.14ms +[2025-07-17 15:07:37] [Rank 0] step:2941/10000 train_time:670945ms step_avg:228.14ms +[2025-07-17 15:07:42] [Rank 0] step:2961/10000 train_time:675621ms step_avg:228.17ms +[2025-07-17 15:07:42] [Rank 0] step:2961/10000 train_time:675621ms step_avg:228.17ms +[2025-07-17 15:07:46] [Rank 0] step:2981/10000 train_time:680311ms step_avg:228.22ms +[2025-07-17 15:07:46] [Rank 0] step:2981/10000 train_time:680311ms step_avg:228.22ms +[2025-07-17 15:07:55] [Rank 0] PRINT: step:3000/10000 val_loss:4.1720 train_time:685474ms step_avg:228.49ms +[2025-07-17 15:07:55] [Rank 0] PRINT: step:3000/10000 val_loss:4.1720 train_time:685474ms step_avg:228.49ms +[2025-07-17 15:07:56] [Rank 0] step:3001/10000 train_time:685485ms step_avg:228.42ms +[2025-07-17 15:07:56] [Rank 0] step:3001/10000 train_time:685485ms step_avg:228.42ms +[2025-07-17 15:08:00] [Rank 0] step:3021/10000 train_time:689696ms step_avg:228.30ms +[2025-07-17 15:08:00] [Rank 0] step:3021/10000 train_time:689696ms step_avg:228.30ms +[2025-07-17 15:08:05] [Rank 0] step:3041/10000 train_time:694391ms step_avg:228.34ms +[2025-07-17 15:08:05] [Rank 0] step:3041/10000 train_time:694391ms step_avg:228.34ms +[2025-07-17 15:08:10] [Rank 0] step:3061/10000 train_time:699084ms step_avg:228.38ms +[2025-07-17 15:08:10] [Rank 0] step:3061/10000 train_time:699084ms step_avg:228.38ms +[2025-07-17 15:08:14] [Rank 0] step:3081/10000 train_time:703779ms step_avg:228.43ms +[2025-07-17 15:08:14] [Rank 0] step:3081/10000 train_time:703779ms step_avg:228.43ms +[2025-07-17 15:08:19] [Rank 0] step:3101/10000 train_time:708574ms step_avg:228.50ms +[2025-07-17 15:08:19] [Rank 0] step:3101/10000 train_time:708574ms step_avg:228.50ms +[2025-07-17 15:08:24] [Rank 0] step:3121/10000 train_time:713268ms step_avg:228.54ms +[2025-07-17 15:08:24] [Rank 0] step:3121/10000 train_time:713268ms step_avg:228.54ms +[2025-07-17 15:08:29] [Rank 0] PRINT: step:3125/10000 val_loss:4.2536 train_time:714918ms step_avg:228.77ms +[2025-07-17 15:08:29] [Rank 0] PRINT: step:3125/10000 val_loss:4.2536 train_time:714918ms step_avg:228.77ms +[2025-07-17 15:08:33] [Rank 0] step:3141/10000 train_time:717961ms step_avg:228.58ms +[2025-07-17 15:08:33] [Rank 0] step:3141/10000 train_time:717961ms step_avg:228.58ms +[2025-07-17 15:08:38] [Rank 0] step:3161/10000 train_time:722657ms step_avg:228.62ms +[2025-07-17 15:08:38] [Rank 0] step:3161/10000 train_time:722657ms step_avg:228.62ms +[2025-07-17 15:08:42] [Rank 0] step:3181/10000 train_time:727352ms step_avg:228.66ms +[2025-07-17 15:08:42] [Rank 0] step:3181/10000 train_time:727352ms step_avg:228.66ms +[2025-07-17 15:08:47] [Rank 0] step:3201/10000 train_time:732051ms step_avg:228.69ms +[2025-07-17 15:08:47] [Rank 0] step:3201/10000 train_time:732051ms step_avg:228.69ms +[2025-07-17 15:08:52] [Rank 0] step:3221/10000 train_time:736749ms step_avg:228.73ms +[2025-07-17 15:08:52] [Rank 0] step:3221/10000 train_time:736749ms step_avg:228.73ms +[2025-07-17 15:08:57] [Rank 0] step:3241/10000 train_time:741446ms step_avg:228.77ms +[2025-07-17 15:08:57] [Rank 0] step:3241/10000 train_time:741446ms step_avg:228.77ms +[2025-07-17 15:09:03] [Rank 0] PRINT: step:3250/10000 val_loss:4.3703 train_time:744268ms step_avg:229.01ms +[2025-07-17 15:09:03] [Rank 0] PRINT: step:3250/10000 val_loss:4.3703 train_time:744268ms step_avg:229.01ms +[2025-07-17 15:09:06] [Rank 0] step:3261/10000 train_time:746142ms step_avg:228.81ms +[2025-07-17 15:09:06] [Rank 0] step:3261/10000 train_time:746142ms step_avg:228.81ms +[2025-07-17 15:09:10] [Rank 0] step:3281/10000 train_time:750843ms step_avg:228.85ms +[2025-07-17 15:09:10] [Rank 0] step:3281/10000 train_time:750843ms step_avg:228.85ms +[2025-07-17 15:09:15] [Rank 0] step:3301/10000 train_time:755542ms step_avg:228.88ms +[2025-07-17 15:09:15] [Rank 0] step:3301/10000 train_time:755542ms step_avg:228.88ms +[2025-07-17 15:09:20] [Rank 0] step:3321/10000 train_time:760243ms step_avg:228.92ms +[2025-07-17 15:09:20] [Rank 0] step:3321/10000 train_time:760243ms step_avg:228.92ms +[2025-07-17 15:09:25] [Rank 0] step:3341/10000 train_time:765046ms step_avg:228.99ms +[2025-07-17 15:09:25] [Rank 0] step:3341/10000 train_time:765046ms step_avg:228.99ms +[2025-07-17 15:09:29] [Rank 0] step:3361/10000 train_time:769746ms step_avg:229.02ms +[2025-07-17 15:09:29] [Rank 0] step:3361/10000 train_time:769746ms step_avg:229.02ms +[2025-07-17 15:09:37] [Rank 0] PRINT: step:3375/10000 val_loss:4.3274 train_time:773746ms step_avg:229.26ms +[2025-07-17 15:09:37] [Rank 0] PRINT: step:3375/10000 val_loss:4.3274 train_time:773746ms step_avg:229.26ms +[2025-07-17 15:09:39] [Rank 0] step:3381/10000 train_time:774446ms step_avg:229.06ms +[2025-07-17 15:09:39] [Rank 0] step:3381/10000 train_time:774446ms step_avg:229.06ms +[2025-07-17 15:09:43] [Rank 0] step:3401/10000 train_time:779148ms step_avg:229.09ms +[2025-07-17 15:09:43] [Rank 0] step:3401/10000 train_time:779148ms step_avg:229.09ms +[2025-07-17 15:09:48] [Rank 0] step:3421/10000 train_time:783848ms step_avg:229.13ms +[2025-07-17 15:09:48] [Rank 0] step:3421/10000 train_time:783848ms step_avg:229.13ms +[2025-07-17 15:09:53] [Rank 0] step:3441/10000 train_time:788550ms step_avg:229.16ms +[2025-07-17 15:09:53] [Rank 0] step:3441/10000 train_time:788550ms step_avg:229.16ms +[2025-07-17 15:09:57] [Rank 0] step:3461/10000 train_time:793255ms step_avg:229.20ms +[2025-07-17 15:09:57] [Rank 0] step:3461/10000 train_time:793255ms step_avg:229.20ms +[2025-07-17 15:10:02] [Rank 0] step:3481/10000 train_time:797956ms step_avg:229.23ms +[2025-07-17 15:10:02] [Rank 0] step:3481/10000 train_time:797956ms step_avg:229.23ms +[2025-07-17 15:10:11] [Rank 0] PRINT: step:3500/10000 val_loss:4.2551 train_time:803127ms step_avg:229.46ms +[2025-07-17 15:10:11] [Rank 0] PRINT: step:3500/10000 val_loss:4.2551 train_time:803127ms step_avg:229.46ms +[2025-07-17 15:10:11] [Rank 0] step:3501/10000 train_time:803138ms step_avg:229.40ms +[2025-07-17 15:10:11] [Rank 0] step:3501/10000 train_time:803138ms step_avg:229.40ms +[2025-07-17 15:10:16] [Rank 0] step:3521/10000 train_time:807350ms step_avg:229.30ms +[2025-07-17 15:10:16] [Rank 0] step:3521/10000 train_time:807350ms step_avg:229.30ms +[2025-07-17 15:10:21] [Rank 0] step:3541/10000 train_time:812050ms step_avg:229.33ms +[2025-07-17 15:10:21] [Rank 0] step:3541/10000 train_time:812050ms step_avg:229.33ms +[2025-07-17 15:10:25] [Rank 0] step:3561/10000 train_time:816797ms step_avg:229.37ms +[2025-07-17 15:10:25] [Rank 0] step:3561/10000 train_time:816797ms step_avg:229.37ms +[2025-07-17 15:10:30] [Rank 0] step:3581/10000 train_time:821495ms step_avg:229.40ms +[2025-07-17 15:10:30] [Rank 0] step:3581/10000 train_time:821495ms step_avg:229.40ms +[2025-07-17 15:10:35] [Rank 0] step:3601/10000 train_time:826193ms step_avg:229.43ms +[2025-07-17 15:10:35] [Rank 0] step:3601/10000 train_time:826193ms step_avg:229.43ms +[2025-07-17 15:10:40] [Rank 0] step:3621/10000 train_time:830891ms step_avg:229.46ms +[2025-07-17 15:10:40] [Rank 0] step:3621/10000 train_time:830891ms step_avg:229.46ms +[2025-07-17 15:10:45] [Rank 0] PRINT: step:3625/10000 val_loss:4.2701 train_time:832541ms step_avg:229.67ms +[2025-07-17 15:10:45] [Rank 0] PRINT: step:3625/10000 val_loss:4.2701 train_time:832541ms step_avg:229.67ms +[2025-07-17 15:10:49] [Rank 0] step:3641/10000 train_time:835588ms step_avg:229.49ms +[2025-07-17 15:10:49] [Rank 0] step:3641/10000 train_time:835588ms step_avg:229.49ms +[2025-07-17 15:10:54] [Rank 0] step:3661/10000 train_time:840285ms step_avg:229.52ms +[2025-07-17 15:10:54] [Rank 0] step:3661/10000 train_time:840285ms step_avg:229.52ms +[2025-07-17 15:10:58] [Rank 0] step:3681/10000 train_time:844985ms step_avg:229.55ms +[2025-07-17 15:10:58] [Rank 0] step:3681/10000 train_time:844985ms step_avg:229.55ms +[2025-07-17 15:11:03] [Rank 0] step:3701/10000 train_time:849681ms step_avg:229.58ms +[2025-07-17 15:11:03] [Rank 0] step:3701/10000 train_time:849681ms step_avg:229.58ms +[2025-07-17 15:11:08] [Rank 0] step:3721/10000 train_time:854437ms step_avg:229.63ms +[2025-07-17 15:11:08] [Rank 0] step:3721/10000 train_time:854437ms step_avg:229.63ms +[2025-07-17 15:11:12] [Rank 0] step:3741/10000 train_time:859218ms step_avg:229.68ms +[2025-07-17 15:11:12] [Rank 0] step:3741/10000 train_time:859218ms step_avg:229.68ms +[2025-07-17 15:11:19] [Rank 0] PRINT: step:3750/10000 val_loss:4.3138 train_time:862096ms step_avg:229.89ms +[2025-07-17 15:11:19] [Rank 0] PRINT: step:3750/10000 val_loss:4.3138 train_time:862096ms step_avg:229.89ms +[2025-07-17 15:11:22] [Rank 0] step:3761/10000 train_time:864003ms step_avg:229.73ms +[2025-07-17 15:11:22] [Rank 0] step:3761/10000 train_time:864003ms step_avg:229.73ms +[2025-07-17 15:11:26] [Rank 0] step:3781/10000 train_time:868783ms step_avg:229.78ms +[2025-07-17 15:11:26] [Rank 0] step:3781/10000 train_time:868783ms step_avg:229.78ms +[2025-07-17 15:11:31] [Rank 0] step:3801/10000 train_time:873565ms step_avg:229.83ms +[2025-07-17 15:11:31] [Rank 0] step:3801/10000 train_time:873565ms step_avg:229.83ms +[2025-07-17 15:11:36] [Rank 0] step:3821/10000 train_time:878350ms step_avg:229.87ms +[2025-07-17 15:11:36] [Rank 0] step:3821/10000 train_time:878350ms step_avg:229.87ms +[2025-07-17 15:11:41] [Rank 0] step:3841/10000 train_time:883138ms step_avg:229.92ms +[2025-07-17 15:11:41] [Rank 0] step:3841/10000 train_time:883138ms step_avg:229.92ms +[2025-07-17 15:11:45] [Rank 0] step:3861/10000 train_time:887923ms step_avg:229.97ms +[2025-07-17 15:11:45] [Rank 0] step:3861/10000 train_time:887923ms step_avg:229.97ms +[2025-07-17 15:11:53] [Rank 0] PRINT: step:3875/10000 val_loss:4.2865 train_time:891996ms step_avg:230.19ms +[2025-07-17 15:11:53] [Rank 0] PRINT: step:3875/10000 val_loss:4.2865 train_time:891996ms step_avg:230.19ms +[2025-07-17 15:11:55] [Rank 0] step:3881/10000 train_time:892713ms step_avg:230.02ms +[2025-07-17 15:11:55] [Rank 0] step:3881/10000 train_time:892713ms step_avg:230.02ms +[2025-07-17 15:12:00] [Rank 0] step:3901/10000 train_time:897494ms step_avg:230.07ms +[2025-07-17 15:12:00] [Rank 0] step:3901/10000 train_time:897494ms step_avg:230.07ms +[2025-07-17 15:12:04] [Rank 0] step:3921/10000 train_time:902275ms step_avg:230.11ms +[2025-07-17 15:12:04] [Rank 0] step:3921/10000 train_time:902275ms step_avg:230.11ms +[2025-07-17 15:12:09] [Rank 0] step:3941/10000 train_time:907059ms step_avg:230.16ms +[2025-07-17 15:12:09] [Rank 0] step:3941/10000 train_time:907059ms step_avg:230.16ms +[2025-07-17 15:12:14] [Rank 0] step:3961/10000 train_time:911842ms step_avg:230.20ms +[2025-07-17 15:12:14] [Rank 0] step:3961/10000 train_time:911842ms step_avg:230.20ms +[2025-07-17 15:12:19] [Rank 0] step:3981/10000 train_time:916628ms step_avg:230.25ms +[2025-07-17 15:12:19] [Rank 0] step:3981/10000 train_time:916628ms step_avg:230.25ms +[2025-07-17 15:12:28] [Rank 0] PRINT: step:4000/10000 val_loss:4.3216 train_time:921891ms step_avg:230.47ms +[2025-07-17 15:12:28] [Rank 0] PRINT: step:4000/10000 val_loss:4.3216 train_time:921891ms step_avg:230.47ms +[2025-07-17 15:12:28] [Rank 0] step:4001/10000 train_time:921902ms step_avg:230.42ms +[2025-07-17 15:12:28] [Rank 0] step:4001/10000 train_time:921902ms step_avg:230.42ms +[2025-07-17 15:12:33] [Rank 0] step:4021/10000 train_time:926198ms step_avg:230.34ms +[2025-07-17 15:12:33] [Rank 0] step:4021/10000 train_time:926198ms step_avg:230.34ms +[2025-07-17 15:12:38] [Rank 0] step:4041/10000 train_time:930980ms step_avg:230.38ms +[2025-07-17 15:12:38] [Rank 0] step:4041/10000 train_time:930980ms step_avg:230.38ms +[2025-07-17 15:12:43] [Rank 0] step:4061/10000 train_time:935768ms step_avg:230.43ms +[2025-07-17 15:12:43] [Rank 0] step:4061/10000 train_time:935768ms step_avg:230.43ms +[2025-07-17 15:12:47] [Rank 0] step:4081/10000 train_time:940558ms step_avg:230.47ms +[2025-07-17 15:12:47] [Rank 0] step:4081/10000 train_time:940558ms step_avg:230.47ms +[2025-07-17 15:12:52] [Rank 0] step:4101/10000 train_time:945343ms step_avg:230.52ms +[2025-07-17 15:12:52] [Rank 0] step:4101/10000 train_time:945343ms step_avg:230.52ms +[2025-07-17 15:12:57] [Rank 0] step:4121/10000 train_time:950129ms step_avg:230.56ms +[2025-07-17 15:12:57] [Rank 0] step:4121/10000 train_time:950129ms step_avg:230.56ms +[2025-07-17 15:13:03] [Rank 0] PRINT: step:4125/10000 val_loss:4.3073 train_time:951808ms step_avg:230.74ms +[2025-07-17 15:13:03] [Rank 0] PRINT: step:4125/10000 val_loss:4.3073 train_time:951808ms step_avg:230.74ms +[2025-07-17 15:13:06] [Rank 0] step:4141/10000 train_time:954907ms step_avg:230.60ms +[2025-07-17 15:13:06] [Rank 0] step:4141/10000 train_time:954907ms step_avg:230.60ms +[2025-07-17 15:13:11] [Rank 0] step:4161/10000 train_time:959691ms step_avg:230.64ms +[2025-07-17 15:13:11] [Rank 0] step:4161/10000 train_time:959691ms step_avg:230.64ms +[2025-07-17 15:13:16] [Rank 0] step:4181/10000 train_time:964470ms step_avg:230.68ms +[2025-07-17 15:13:16] [Rank 0] step:4181/10000 train_time:964470ms step_avg:230.68ms +[2025-07-17 15:13:21] [Rank 0] step:4201/10000 train_time:969257ms step_avg:230.72ms +[2025-07-17 15:13:21] [Rank 0] step:4201/10000 train_time:969257ms step_avg:230.72ms +[2025-07-17 15:13:25] [Rank 0] step:4221/10000 train_time:974040ms step_avg:230.76ms +[2025-07-17 15:13:25] [Rank 0] step:4221/10000 train_time:974040ms step_avg:230.76ms +[2025-07-17 15:13:30] [Rank 0] step:4241/10000 train_time:978824ms step_avg:230.80ms +[2025-07-17 15:13:30] [Rank 0] step:4241/10000 train_time:978824ms step_avg:230.80ms +[2025-07-17 15:13:37] [Rank 0] PRINT: step:4250/10000 val_loss:4.3450 train_time:981702ms step_avg:230.99ms +[2025-07-17 15:13:37] [Rank 0] PRINT: step:4250/10000 val_loss:4.3450 train_time:981702ms step_avg:230.99ms +[2025-07-17 15:13:40] [Rank 0] step:4261/10000 train_time:983613ms step_avg:230.84ms +[2025-07-17 15:13:40] [Rank 0] step:4261/10000 train_time:983613ms step_avg:230.84ms +[2025-07-17 15:13:44] [Rank 0] step:4281/10000 train_time:988396ms step_avg:230.88ms +[2025-07-17 15:13:44] [Rank 0] step:4281/10000 train_time:988396ms step_avg:230.88ms +[2025-07-17 15:13:49] [Rank 0] step:4301/10000 train_time:993180ms step_avg:230.92ms +[2025-07-17 15:13:49] [Rank 0] step:4301/10000 train_time:993180ms step_avg:230.92ms +[2025-07-17 15:13:54] [Rank 0] step:4321/10000 train_time:997969ms step_avg:230.96ms +[2025-07-17 15:13:54] [Rank 0] step:4321/10000 train_time:997969ms step_avg:230.96ms +[2025-07-17 15:13:59] [Rank 0] step:4341/10000 train_time:1002753ms step_avg:231.00ms +[2025-07-17 15:13:59] [Rank 0] step:4341/10000 train_time:1002753ms step_avg:231.00ms +[2025-07-17 15:14:04] [Rank 0] step:4361/10000 train_time:1007538ms step_avg:231.03ms +[2025-07-17 15:14:04] [Rank 0] step:4361/10000 train_time:1007538ms step_avg:231.03ms +[2025-07-17 15:14:12] [Rank 0] PRINT: step:4375/10000 val_loss:4.3464 train_time:1011607ms step_avg:231.22ms +[2025-07-17 15:14:12] [Rank 0] PRINT: step:4375/10000 val_loss:4.3464 train_time:1011607ms step_avg:231.22ms +[2025-07-17 15:14:13] [Rank 0] step:4381/10000 train_time:1012318ms step_avg:231.07ms +[2025-07-17 15:14:13] [Rank 0] step:4381/10000 train_time:1012318ms step_avg:231.07ms +[2025-07-17 15:14:18] [Rank 0] step:4401/10000 train_time:1017101ms step_avg:231.11ms +[2025-07-17 15:14:18] [Rank 0] step:4401/10000 train_time:1017101ms step_avg:231.11ms +[2025-07-17 15:14:23] [Rank 0] step:4421/10000 train_time:1021886ms step_avg:231.14ms +[2025-07-17 15:14:23] [Rank 0] step:4421/10000 train_time:1021886ms step_avg:231.14ms +[2025-07-17 15:14:27] [Rank 0] step:4441/10000 train_time:1026671ms step_avg:231.18ms +[2025-07-17 15:14:27] [Rank 0] step:4441/10000 train_time:1026671ms step_avg:231.18ms +[2025-07-17 15:14:32] [Rank 0] step:4461/10000 train_time:1031468ms step_avg:231.22ms +[2025-07-17 15:14:32] [Rank 0] step:4461/10000 train_time:1031468ms step_avg:231.22ms +[2025-07-17 15:14:37] [Rank 0] step:4481/10000 train_time:1036270ms step_avg:231.26ms +[2025-07-17 15:14:37] [Rank 0] step:4481/10000 train_time:1036270ms step_avg:231.26ms +[2025-07-17 15:14:46] [Rank 0] PRINT: step:4500/10000 val_loss:4.2976 train_time:1041548ms step_avg:231.46ms +[2025-07-17 15:14:46] [Rank 0] PRINT: step:4500/10000 val_loss:4.2976 train_time:1041548ms step_avg:231.46ms +[2025-07-17 15:14:46] [Rank 0] step:4501/10000 train_time:1041559ms step_avg:231.41ms +[2025-07-17 15:14:46] [Rank 0] step:4501/10000 train_time:1041559ms step_avg:231.41ms +[2025-07-17 15:14:51] [Rank 0] step:4521/10000 train_time:1045863ms step_avg:231.33ms +[2025-07-17 15:14:51] [Rank 0] step:4521/10000 train_time:1045863ms step_avg:231.33ms +[2025-07-17 15:14:56] [Rank 0] step:4541/10000 train_time:1050658ms step_avg:231.37ms +[2025-07-17 15:14:56] [Rank 0] step:4541/10000 train_time:1050658ms step_avg:231.37ms +[2025-07-17 15:15:01] [Rank 0] step:4561/10000 train_time:1055453ms step_avg:231.41ms +[2025-07-17 15:15:01] [Rank 0] step:4561/10000 train_time:1055453ms step_avg:231.41ms +[2025-07-17 15:15:06] [Rank 0] step:4581/10000 train_time:1060271ms step_avg:231.45ms +[2025-07-17 15:15:06] [Rank 0] step:4581/10000 train_time:1060271ms step_avg:231.45ms +[2025-07-17 15:15:10] [Rank 0] step:4601/10000 train_time:1065074ms step_avg:231.49ms +[2025-07-17 15:15:10] [Rank 0] step:4601/10000 train_time:1065074ms step_avg:231.49ms +[2025-07-17 15:15:15] [Rank 0] step:4621/10000 train_time:1069871ms step_avg:231.52ms +[2025-07-17 15:15:15] [Rank 0] step:4621/10000 train_time:1069871ms step_avg:231.52ms +[2025-07-17 15:15:21] [Rank 0] PRINT: step:4625/10000 val_loss:4.2021 train_time:1071554ms step_avg:231.69ms +[2025-07-17 15:15:21] [Rank 0] PRINT: step:4625/10000 val_loss:4.2021 train_time:1071554ms step_avg:231.69ms +[2025-07-17 15:15:25] [Rank 0] step:4641/10000 train_time:1074665ms step_avg:231.56ms +[2025-07-17 15:15:25] [Rank 0] step:4641/10000 train_time:1074665ms step_avg:231.56ms +[2025-07-17 15:15:29] [Rank 0] step:4661/10000 train_time:1079467ms step_avg:231.60ms +[2025-07-17 15:15:29] [Rank 0] step:4661/10000 train_time:1079467ms step_avg:231.60ms +[2025-07-17 15:15:34] [Rank 0] step:4681/10000 train_time:1084262ms step_avg:231.63ms +[2025-07-17 15:15:34] [Rank 0] step:4681/10000 train_time:1084262ms step_avg:231.63ms +[2025-07-17 15:15:39] [Rank 0] step:4701/10000 train_time:1089057ms step_avg:231.67ms +[2025-07-17 15:15:39] [Rank 0] step:4701/10000 train_time:1089057ms step_avg:231.67ms +[2025-07-17 15:15:44] [Rank 0] step:4721/10000 train_time:1093852ms step_avg:231.70ms +[2025-07-17 15:15:44] [Rank 0] step:4721/10000 train_time:1093852ms step_avg:231.70ms +[2025-07-17 15:15:49] [Rank 0] step:4741/10000 train_time:1098650ms step_avg:231.73ms +[2025-07-17 15:15:49] [Rank 0] step:4741/10000 train_time:1098650ms step_avg:231.73ms +[2025-07-17 15:15:55] [Rank 0] PRINT: step:4750/10000 val_loss:4.2165 train_time:1101536ms step_avg:231.90ms +[2025-07-17 15:15:55] [Rank 0] PRINT: step:4750/10000 val_loss:4.2165 train_time:1101536ms step_avg:231.90ms +[2025-07-17 15:15:58] [Rank 0] step:4761/10000 train_time:1103453ms step_avg:231.77ms +[2025-07-17 15:15:58] [Rank 0] step:4761/10000 train_time:1103453ms step_avg:231.77ms +[2025-07-17 15:16:03] [Rank 0] step:4781/10000 train_time:1108254ms step_avg:231.80ms +[2025-07-17 15:16:03] [Rank 0] step:4781/10000 train_time:1108254ms step_avg:231.80ms +[2025-07-17 15:16:08] [Rank 0] step:4801/10000 train_time:1113051ms step_avg:231.84ms +[2025-07-17 15:16:08] [Rank 0] step:4801/10000 train_time:1113051ms step_avg:231.84ms +[2025-07-17 15:16:12] [Rank 0] step:4821/10000 train_time:1117851ms step_avg:231.87ms +[2025-07-17 15:16:12] [Rank 0] step:4821/10000 train_time:1117851ms step_avg:231.87ms +[2025-07-17 15:16:17] [Rank 0] step:4841/10000 train_time:1122653ms step_avg:231.91ms +[2025-07-17 15:16:17] [Rank 0] step:4841/10000 train_time:1122653ms step_avg:231.91ms +[2025-07-17 15:16:22] [Rank 0] step:4861/10000 train_time:1127452ms step_avg:231.94ms +[2025-07-17 15:16:22] [Rank 0] step:4861/10000 train_time:1127452ms step_avg:231.94ms +[2025-07-17 15:16:30] [Rank 0] PRINT: step:4875/10000 val_loss:4.2939 train_time:1131541ms step_avg:232.11ms +[2025-07-17 15:16:30] [Rank 0] PRINT: step:4875/10000 val_loss:4.2939 train_time:1131541ms step_avg:232.11ms +[2025-07-17 15:16:31] [Rank 0] step:4881/10000 train_time:1132260ms step_avg:231.97ms +[2025-07-17 15:16:31] [Rank 0] step:4881/10000 train_time:1132260ms step_avg:231.97ms +[2025-07-17 15:16:36] [Rank 0] step:4901/10000 train_time:1137061ms step_avg:232.01ms +[2025-07-17 15:16:36] [Rank 0] step:4901/10000 train_time:1137061ms step_avg:232.01ms +[2025-07-17 15:16:41] [Rank 0] step:4921/10000 train_time:1141863ms step_avg:232.04ms +[2025-07-17 15:16:41] [Rank 0] step:4921/10000 train_time:1141863ms step_avg:232.04ms +[2025-07-17 15:16:46] [Rank 0] step:4941/10000 train_time:1146670ms step_avg:232.07ms +[2025-07-17 15:16:46] [Rank 0] step:4941/10000 train_time:1146670ms step_avg:232.07ms +[2025-07-17 15:16:51] [Rank 0] step:4961/10000 train_time:1151468ms step_avg:232.10ms +[2025-07-17 15:16:51] [Rank 0] step:4961/10000 train_time:1151468ms step_avg:232.10ms +[2025-07-17 15:16:55] [Rank 0] step:4981/10000 train_time:1156263ms step_avg:232.13ms +[2025-07-17 15:16:55] [Rank 0] step:4981/10000 train_time:1156263ms step_avg:232.13ms +[2025-07-17 15:17:05] [Rank 0] PRINT: step:5000/10000 val_loss:4.1999 train_time:1161547ms step_avg:232.31ms +[2025-07-17 15:17:05] [Rank 0] PRINT: step:5000/10000 val_loss:4.1999 train_time:1161547ms step_avg:232.31ms +[2025-07-17 15:17:05] [Rank 0] step:5001/10000 train_time:1161560ms step_avg:232.27ms +[2025-07-17 15:17:05] [Rank 0] step:5001/10000 train_time:1161560ms step_avg:232.27ms +[2025-07-17 15:17:10] [Rank 0] step:5021/10000 train_time:1165865ms step_avg:232.20ms +[2025-07-17 15:17:10] [Rank 0] step:5021/10000 train_time:1165865ms step_avg:232.20ms +[2025-07-17 15:17:15] [Rank 0] step:5041/10000 train_time:1170668ms step_avg:232.23ms +[2025-07-17 15:17:15] [Rank 0] step:5041/10000 train_time:1170668ms step_avg:232.23ms +[2025-07-17 15:17:19] [Rank 0] step:5061/10000 train_time:1175468ms step_avg:232.26ms +[2025-07-17 15:17:19] [Rank 0] step:5061/10000 train_time:1175468ms step_avg:232.26ms +[2025-07-17 15:17:24] [Rank 0] step:5081/10000 train_time:1180377ms step_avg:232.31ms +[2025-07-17 15:17:24] [Rank 0] step:5081/10000 train_time:1180377ms step_avg:232.31ms +[2025-07-17 15:17:29] [Rank 0] step:5101/10000 train_time:1185089ms step_avg:232.32ms +[2025-07-17 15:17:29] [Rank 0] step:5101/10000 train_time:1185089ms step_avg:232.32ms +[2025-07-17 15:17:34] [Rank 0] step:5121/10000 train_time:1189886ms step_avg:232.35ms +[2025-07-17 15:17:34] [Rank 0] step:5121/10000 train_time:1189886ms step_avg:232.35ms +[2025-07-17 15:17:39] [Rank 0] PRINT: step:5125/10000 val_loss:4.2111 train_time:1191573ms step_avg:232.50ms +[2025-07-17 15:17:39] [Rank 0] PRINT: step:5125/10000 val_loss:4.2111 train_time:1191573ms step_avg:232.50ms +[2025-07-17 15:17:43] [Rank 0] step:5141/10000 train_time:1194685ms step_avg:232.38ms +[2025-07-17 15:17:43] [Rank 0] step:5141/10000 train_time:1194685ms step_avg:232.38ms +[2025-07-17 15:17:48] [Rank 0] step:5161/10000 train_time:1199480ms step_avg:232.41ms +[2025-07-17 15:17:48] [Rank 0] step:5161/10000 train_time:1199480ms step_avg:232.41ms +[2025-07-17 15:17:53] [Rank 0] step:5181/10000 train_time:1204278ms step_avg:232.44ms +[2025-07-17 15:17:53] [Rank 0] step:5181/10000 train_time:1204278ms step_avg:232.44ms +[2025-07-17 15:17:58] [Rank 0] step:5201/10000 train_time:1209109ms step_avg:232.48ms +[2025-07-17 15:17:58] [Rank 0] step:5201/10000 train_time:1209109ms step_avg:232.48ms +[2025-07-17 15:18:03] [Rank 0] step:5221/10000 train_time:1213979ms step_avg:232.52ms +[2025-07-17 15:18:03] [Rank 0] step:5221/10000 train_time:1213979ms step_avg:232.52ms +[2025-07-17 15:18:07] [Rank 0] step:5241/10000 train_time:1218847ms step_avg:232.56ms +[2025-07-17 15:18:07] [Rank 0] step:5241/10000 train_time:1218847ms step_avg:232.56ms +[2025-07-17 15:18:14] [Rank 0] PRINT: step:5250/10000 val_loss:4.3035 train_time:1221769ms step_avg:232.72ms +[2025-07-17 15:18:14] [Rank 0] PRINT: step:5250/10000 val_loss:4.3035 train_time:1221769ms step_avg:232.72ms +[2025-07-17 15:18:17] [Rank 0] step:5261/10000 train_time:1223705ms step_avg:232.60ms +[2025-07-17 15:18:17] [Rank 0] step:5261/10000 train_time:1223705ms step_avg:232.60ms +[2025-07-17 15:18:22] [Rank 0] step:5281/10000 train_time:1228571ms step_avg:232.64ms +[2025-07-17 15:18:22] [Rank 0] step:5281/10000 train_time:1228571ms step_avg:232.64ms +[2025-07-17 15:18:27] [Rank 0] step:5301/10000 train_time:1233430ms step_avg:232.68ms +[2025-07-17 15:18:27] [Rank 0] step:5301/10000 train_time:1233430ms step_avg:232.68ms +[2025-07-17 15:18:32] [Rank 0] step:5321/10000 train_time:1238292ms step_avg:232.72ms +[2025-07-17 15:18:32] [Rank 0] step:5321/10000 train_time:1238292ms step_avg:232.72ms +[2025-07-17 15:18:36] [Rank 0] step:5341/10000 train_time:1243160ms step_avg:232.76ms +[2025-07-17 15:18:36] [Rank 0] step:5341/10000 train_time:1243160ms step_avg:232.76ms +[2025-07-17 15:18:41] [Rank 0] step:5361/10000 train_time:1248023ms step_avg:232.80ms +[2025-07-17 15:18:41] [Rank 0] step:5361/10000 train_time:1248023ms step_avg:232.80ms +[2025-07-17 15:18:49] [Rank 0] PRINT: step:5375/10000 val_loss:4.2468 train_time:1252165ms step_avg:232.96ms +[2025-07-17 15:18:49] [Rank 0] PRINT: step:5375/10000 val_loss:4.2468 train_time:1252165ms step_avg:232.96ms +[2025-07-17 15:18:51] [Rank 0] step:5381/10000 train_time:1252891ms step_avg:232.84ms +[2025-07-17 15:18:51] [Rank 0] step:5381/10000 train_time:1252891ms step_avg:232.84ms +[2025-07-17 15:18:56] [Rank 0] step:5401/10000 train_time:1257763ms step_avg:232.88ms +[2025-07-17 15:18:56] [Rank 0] step:5401/10000 train_time:1257763ms step_avg:232.88ms +[2025-07-17 15:19:01] [Rank 0] step:5421/10000 train_time:1262644ms step_avg:232.92ms +[2025-07-17 15:19:01] [Rank 0] step:5421/10000 train_time:1262644ms step_avg:232.92ms +[2025-07-17 15:19:05] [Rank 0] step:5441/10000 train_time:1267514ms step_avg:232.96ms +[2025-07-17 15:19:05] [Rank 0] step:5441/10000 train_time:1267514ms step_avg:232.96ms +[2025-07-17 15:19:10] [Rank 0] step:5461/10000 train_time:1272388ms step_avg:233.00ms +[2025-07-17 15:19:10] [Rank 0] step:5461/10000 train_time:1272388ms step_avg:233.00ms +[2025-07-17 15:19:15] [Rank 0] step:5481/10000 train_time:1277264ms step_avg:233.03ms +[2025-07-17 15:19:15] [Rank 0] step:5481/10000 train_time:1277264ms step_avg:233.03ms +[2025-07-17 15:19:24] [Rank 0] PRINT: step:5500/10000 val_loss:4.2878 train_time:1282622ms step_avg:233.20ms +[2025-07-17 15:19:24] [Rank 0] PRINT: step:5500/10000 val_loss:4.2878 train_time:1282622ms step_avg:233.20ms +[2025-07-17 15:19:25] [Rank 0] step:5501/10000 train_time:1282633ms step_avg:233.16ms +[2025-07-17 15:19:25] [Rank 0] step:5501/10000 train_time:1282633ms step_avg:233.16ms +[2025-07-17 15:19:30] [Rank 0] step:5521/10000 train_time:1286992ms step_avg:233.11ms +[2025-07-17 15:19:30] [Rank 0] step:5521/10000 train_time:1286992ms step_avg:233.11ms +[2025-07-17 15:19:34] [Rank 0] step:5541/10000 train_time:1291864ms step_avg:233.15ms +[2025-07-17 15:19:34] [Rank 0] step:5541/10000 train_time:1291864ms step_avg:233.15ms +[2025-07-17 15:19:39] [Rank 0] step:5561/10000 train_time:1296737ms step_avg:233.18ms +[2025-07-17 15:19:39] [Rank 0] step:5561/10000 train_time:1296737ms step_avg:233.18ms +[2025-07-17 15:19:44] [Rank 0] step:5581/10000 train_time:1301604ms step_avg:233.22ms +[2025-07-17 15:19:44] [Rank 0] step:5581/10000 train_time:1301604ms step_avg:233.22ms +[2025-07-17 15:19:49] [Rank 0] step:5601/10000 train_time:1306495ms step_avg:233.26ms +[2025-07-17 15:19:49] [Rank 0] step:5601/10000 train_time:1306495ms step_avg:233.26ms +[2025-07-17 15:19:54] [Rank 0] step:5621/10000 train_time:1311367ms step_avg:233.30ms +[2025-07-17 15:19:54] [Rank 0] step:5621/10000 train_time:1311367ms step_avg:233.30ms +[2025-07-17 15:20:00] [Rank 0] PRINT: step:5625/10000 val_loss:4.2248 train_time:1313074ms step_avg:233.44ms +[2025-07-17 15:20:00] [Rank 0] PRINT: step:5625/10000 val_loss:4.2248 train_time:1313074ms step_avg:233.44ms +[2025-07-17 15:20:03] [Rank 0] step:5641/10000 train_time:1316236ms step_avg:233.33ms +[2025-07-17 15:20:03] [Rank 0] step:5641/10000 train_time:1316236ms step_avg:233.33ms +[2025-07-17 15:20:08] [Rank 0] step:5661/10000 train_time:1321113ms step_avg:233.37ms +[2025-07-17 15:20:08] [Rank 0] step:5661/10000 train_time:1321113ms step_avg:233.37ms +[2025-07-17 15:20:13] [Rank 0] step:5681/10000 train_time:1325981ms step_avg:233.41ms +[2025-07-17 15:20:13] [Rank 0] step:5681/10000 train_time:1325981ms step_avg:233.41ms +[2025-07-17 15:20:18] [Rank 0] step:5701/10000 train_time:1330851ms step_avg:233.44ms +[2025-07-17 15:20:18] [Rank 0] step:5701/10000 train_time:1330851ms step_avg:233.44ms +[2025-07-17 15:20:23] [Rank 0] step:5721/10000 train_time:1335719ms step_avg:233.48ms +[2025-07-17 15:20:23] [Rank 0] step:5721/10000 train_time:1335719ms step_avg:233.48ms +[2025-07-17 15:20:28] [Rank 0] step:5741/10000 train_time:1340594ms step_avg:233.51ms +[2025-07-17 15:20:28] [Rank 0] step:5741/10000 train_time:1340594ms step_avg:233.51ms +[2025-07-17 15:20:35] [Rank 0] PRINT: step:5750/10000 val_loss:4.1902 train_time:1343523ms step_avg:233.66ms +[2025-07-17 15:20:35] [Rank 0] PRINT: step:5750/10000 val_loss:4.1902 train_time:1343523ms step_avg:233.66ms +[2025-07-17 15:20:37] [Rank 0] step:5761/10000 train_time:1345466ms step_avg:233.55ms +[2025-07-17 15:20:37] [Rank 0] step:5761/10000 train_time:1345466ms step_avg:233.55ms +[2025-07-17 15:20:42] [Rank 0] step:5781/10000 train_time:1350437ms step_avg:233.60ms +[2025-07-17 15:20:42] [Rank 0] step:5781/10000 train_time:1350437ms step_avg:233.60ms +[2025-07-17 15:20:47] [Rank 0] step:5801/10000 train_time:1355305ms step_avg:233.63ms +[2025-07-17 15:20:47] [Rank 0] step:5801/10000 train_time:1355305ms step_avg:233.63ms +[2025-07-17 15:20:52] [Rank 0] step:5821/10000 train_time:1360175ms step_avg:233.67ms +[2025-07-17 15:20:52] [Rank 0] step:5821/10000 train_time:1360175ms step_avg:233.67ms +[2025-07-17 15:20:57] [Rank 0] step:5841/10000 train_time:1365047ms step_avg:233.70ms +[2025-07-17 15:20:57] [Rank 0] step:5841/10000 train_time:1365047ms step_avg:233.70ms +[2025-07-17 15:21:02] [Rank 0] step:5861/10000 train_time:1369919ms step_avg:233.73ms +[2025-07-17 15:21:02] [Rank 0] step:5861/10000 train_time:1369919ms step_avg:233.73ms +[2025-07-17 15:21:10] [Rank 0] PRINT: step:5875/10000 val_loss:4.1990 train_time:1374051ms step_avg:233.88ms +[2025-07-17 15:21:10] [Rank 0] PRINT: step:5875/10000 val_loss:4.1990 train_time:1374051ms step_avg:233.88ms +[2025-07-17 15:21:11] [Rank 0] step:5881/10000 train_time:1374778ms step_avg:233.77ms +[2025-07-17 15:21:11] [Rank 0] step:5881/10000 train_time:1374778ms step_avg:233.77ms +[2025-07-17 15:21:16] [Rank 0] step:5901/10000 train_time:1379658ms step_avg:233.80ms +[2025-07-17 15:21:16] [Rank 0] step:5901/10000 train_time:1379658ms step_avg:233.80ms +[2025-07-17 15:21:21] [Rank 0] step:5921/10000 train_time:1384523ms step_avg:233.83ms +[2025-07-17 15:21:21] [Rank 0] step:5921/10000 train_time:1384523ms step_avg:233.83ms +[2025-07-17 15:21:26] [Rank 0] step:5941/10000 train_time:1389405ms step_avg:233.87ms +[2025-07-17 15:21:26] [Rank 0] step:5941/10000 train_time:1389405ms step_avg:233.87ms +[2025-07-17 15:21:31] [Rank 0] step:5961/10000 train_time:1394287ms step_avg:233.90ms +[2025-07-17 15:21:31] [Rank 0] step:5961/10000 train_time:1394287ms step_avg:233.90ms +[2025-07-17 15:21:36] [Rank 0] step:5981/10000 train_time:1399170ms step_avg:233.94ms +[2025-07-17 15:21:36] [Rank 0] step:5981/10000 train_time:1399170ms step_avg:233.94ms +[2025-07-17 15:21:45] [Rank 0] PRINT: step:6000/10000 val_loss:4.2519 train_time:1404546ms step_avg:234.09ms +[2025-07-17 15:21:45] [Rank 0] PRINT: step:6000/10000 val_loss:4.2519 train_time:1404546ms step_avg:234.09ms +[2025-07-17 15:21:45] [Rank 0] step:6001/10000 train_time:1404558ms step_avg:234.05ms +[2025-07-17 15:21:45] [Rank 0] step:6001/10000 train_time:1404558ms step_avg:234.05ms +[2025-07-17 15:21:50] [Rank 0] step:6021/10000 train_time:1408937ms step_avg:234.00ms +[2025-07-17 15:21:50] [Rank 0] step:6021/10000 train_time:1408937ms step_avg:234.00ms +[2025-07-17 15:21:55] [Rank 0] step:6041/10000 train_time:1413816ms step_avg:234.04ms +[2025-07-17 15:21:55] [Rank 0] step:6041/10000 train_time:1413816ms step_avg:234.04ms +[2025-07-17 15:22:00] [Rank 0] step:6061/10000 train_time:1418695ms step_avg:234.07ms +[2025-07-17 15:22:00] [Rank 0] step:6061/10000 train_time:1418695ms step_avg:234.07ms +[2025-07-17 15:22:05] [Rank 0] step:6081/10000 train_time:1423578ms step_avg:234.10ms +[2025-07-17 15:22:05] [Rank 0] step:6081/10000 train_time:1423578ms step_avg:234.10ms +[2025-07-17 15:22:10] [Rank 0] step:6101/10000 train_time:1428487ms step_avg:234.14ms +[2025-07-17 15:22:10] [Rank 0] step:6101/10000 train_time:1428487ms step_avg:234.14ms +[2025-07-17 15:22:15] [Rank 0] step:6121/10000 train_time:1433371ms step_avg:234.17ms +[2025-07-17 15:22:15] [Rank 0] step:6121/10000 train_time:1433371ms step_avg:234.17ms +[2025-07-17 15:22:20] [Rank 0] PRINT: step:6125/10000 val_loss:4.1283 train_time:1435082ms step_avg:234.30ms +[2025-07-17 15:22:20] [Rank 0] PRINT: step:6125/10000 val_loss:4.1283 train_time:1435082ms step_avg:234.30ms +[2025-07-17 15:22:24] [Rank 0] step:6141/10000 train_time:1438253ms step_avg:234.20ms +[2025-07-17 15:22:24] [Rank 0] step:6141/10000 train_time:1438253ms step_avg:234.20ms +[2025-07-17 15:22:29] [Rank 0] step:6161/10000 train_time:1443124ms step_avg:234.24ms +[2025-07-17 15:22:29] [Rank 0] step:6161/10000 train_time:1443124ms step_avg:234.24ms +[2025-07-17 15:22:34] [Rank 0] step:6181/10000 train_time:1448004ms step_avg:234.27ms +[2025-07-17 15:22:34] [Rank 0] step:6181/10000 train_time:1448004ms step_avg:234.27ms +[2025-07-17 15:22:39] [Rank 0] step:6201/10000 train_time:1452886ms step_avg:234.30ms +[2025-07-17 15:22:39] [Rank 0] step:6201/10000 train_time:1452886ms step_avg:234.30ms +[2025-07-17 15:22:43] [Rank 0] step:6221/10000 train_time:1457770ms step_avg:234.33ms +[2025-07-17 15:22:43] [Rank 0] step:6221/10000 train_time:1457770ms step_avg:234.33ms +[2025-07-17 15:22:48] [Rank 0] step:6241/10000 train_time:1462648ms step_avg:234.36ms +[2025-07-17 15:22:48] [Rank 0] step:6241/10000 train_time:1462648ms step_avg:234.36ms +[2025-07-17 15:22:55] [Rank 0] PRINT: step:6250/10000 val_loss:4.2021 train_time:1465577ms step_avg:234.49ms +[2025-07-17 15:22:55] [Rank 0] PRINT: step:6250/10000 val_loss:4.2021 train_time:1465577ms step_avg:234.49ms +[2025-07-17 15:22:57] [Rank 0] step:6261/10000 train_time:1467521ms step_avg:234.39ms +[2025-07-17 15:22:57] [Rank 0] step:6261/10000 train_time:1467521ms step_avg:234.39ms +[2025-07-17 15:23:02] [Rank 0] step:6281/10000 train_time:1472402ms step_avg:234.42ms +[2025-07-17 15:23:02] [Rank 0] step:6281/10000 train_time:1472402ms step_avg:234.42ms +[2025-07-17 15:23:07] [Rank 0] step:6301/10000 train_time:1477274ms step_avg:234.45ms +[2025-07-17 15:23:07] [Rank 0] step:6301/10000 train_time:1477274ms step_avg:234.45ms +[2025-07-17 15:23:12] [Rank 0] step:6321/10000 train_time:1482145ms step_avg:234.48ms +[2025-07-17 15:23:12] [Rank 0] step:6321/10000 train_time:1482145ms step_avg:234.48ms +[2025-07-17 15:23:17] [Rank 0] step:6341/10000 train_time:1487027ms step_avg:234.51ms +[2025-07-17 15:23:17] [Rank 0] step:6341/10000 train_time:1487027ms step_avg:234.51ms +[2025-07-17 15:23:22] [Rank 0] step:6361/10000 train_time:1491901ms step_avg:234.54ms +[2025-07-17 15:23:22] [Rank 0] step:6361/10000 train_time:1491901ms step_avg:234.54ms +[2025-07-17 15:23:30] [Rank 0] PRINT: step:6375/10000 val_loss:4.1871 train_time:1496039ms step_avg:234.67ms +[2025-07-17 15:23:30] [Rank 0] PRINT: step:6375/10000 val_loss:4.1871 train_time:1496039ms step_avg:234.67ms +[2025-07-17 15:23:31] [Rank 0] step:6381/10000 train_time:1496764ms step_avg:234.57ms +[2025-07-17 15:23:31] [Rank 0] step:6381/10000 train_time:1496764ms step_avg:234.57ms +[2025-07-17 15:23:36] [Rank 0] step:6401/10000 train_time:1501634ms step_avg:234.59ms +[2025-07-17 15:23:36] [Rank 0] step:6401/10000 train_time:1501634ms step_avg:234.59ms +[2025-07-17 15:23:41] [Rank 0] step:6421/10000 train_time:1506499ms step_avg:234.62ms +[2025-07-17 15:23:41] [Rank 0] step:6421/10000 train_time:1506499ms step_avg:234.62ms +[2025-07-17 15:23:46] [Rank 0] step:6441/10000 train_time:1511370ms step_avg:234.65ms +[2025-07-17 15:23:46] [Rank 0] step:6441/10000 train_time:1511370ms step_avg:234.65ms +[2025-07-17 15:23:51] [Rank 0] step:6461/10000 train_time:1516252ms step_avg:234.68ms +[2025-07-17 15:23:51] [Rank 0] step:6461/10000 train_time:1516252ms step_avg:234.68ms +[2025-07-17 15:23:56] [Rank 0] step:6481/10000 train_time:1521129ms step_avg:234.71ms +[2025-07-17 15:23:56] [Rank 0] step:6481/10000 train_time:1521129ms step_avg:234.71ms +[2025-07-17 15:24:05] [Rank 0] PRINT: step:6500/10000 val_loss:4.1654 train_time:1526496ms step_avg:234.85ms +[2025-07-17 15:24:05] [Rank 0] PRINT: step:6500/10000 val_loss:4.1654 train_time:1526496ms step_avg:234.85ms +[2025-07-17 15:24:05] [Rank 0] step:6501/10000 train_time:1526507ms step_avg:234.81ms +[2025-07-17 15:24:05] [Rank 0] step:6501/10000 train_time:1526507ms step_avg:234.81ms +[2025-07-17 15:24:10] [Rank 0] step:6521/10000 train_time:1530878ms step_avg:234.76ms +[2025-07-17 15:24:10] [Rank 0] step:6521/10000 train_time:1530878ms step_avg:234.76ms +[2025-07-17 15:24:15] [Rank 0] step:6541/10000 train_time:1535757ms step_avg:234.79ms +[2025-07-17 15:24:15] [Rank 0] step:6541/10000 train_time:1535757ms step_avg:234.79ms +[2025-07-17 15:24:20] [Rank 0] step:6561/10000 train_time:1540642ms step_avg:234.82ms +[2025-07-17 15:24:20] [Rank 0] step:6561/10000 train_time:1540642ms step_avg:234.82ms +[2025-07-17 15:24:25] [Rank 0] step:6581/10000 train_time:1545528ms step_avg:234.85ms +[2025-07-17 15:24:25] [Rank 0] step:6581/10000 train_time:1545528ms step_avg:234.85ms +[2025-07-17 15:24:30] [Rank 0] step:6601/10000 train_time:1550413ms step_avg:234.88ms +[2025-07-17 15:24:30] [Rank 0] step:6601/10000 train_time:1550413ms step_avg:234.88ms +[2025-07-17 15:24:35] [Rank 0] step:6621/10000 train_time:1555293ms step_avg:234.90ms +[2025-07-17 15:24:35] [Rank 0] step:6621/10000 train_time:1555293ms step_avg:234.90ms +[2025-07-17 15:24:40] [Rank 0] PRINT: step:6625/10000 val_loss:4.1065 train_time:1557004ms step_avg:235.02ms +[2025-07-17 15:24:40] [Rank 0] PRINT: step:6625/10000 val_loss:4.1065 train_time:1557004ms step_avg:235.02ms +[2025-07-17 15:24:44] [Rank 0] step:6641/10000 train_time:1560166ms step_avg:234.93ms +[2025-07-17 15:24:44] [Rank 0] step:6641/10000 train_time:1560166ms step_avg:234.93ms +[2025-07-17 15:24:49] [Rank 0] step:6661/10000 train_time:1565049ms step_avg:234.96ms +[2025-07-17 15:24:49] [Rank 0] step:6661/10000 train_time:1565049ms step_avg:234.96ms +[2025-07-17 15:24:54] [Rank 0] step:6681/10000 train_time:1569972ms step_avg:234.99ms +[2025-07-17 15:24:54] [Rank 0] step:6681/10000 train_time:1569972ms step_avg:234.99ms +[2025-07-17 15:24:58] [Rank 0] step:6701/10000 train_time:1574921ms step_avg:235.03ms +[2025-07-17 15:24:58] [Rank 0] step:6701/10000 train_time:1574921ms step_avg:235.03ms +[2025-07-17 15:25:03] [Rank 0] step:6721/10000 train_time:1579887ms step_avg:235.07ms +[2025-07-17 15:25:03] [Rank 0] step:6721/10000 train_time:1579887ms step_avg:235.07ms +[2025-07-17 15:25:08] [Rank 0] step:6741/10000 train_time:1584842ms step_avg:235.10ms +[2025-07-17 15:25:08] [Rank 0] step:6741/10000 train_time:1584842ms step_avg:235.10ms +[2025-07-17 15:25:15] [Rank 0] PRINT: step:6750/10000 val_loss:4.1325 train_time:1587820ms step_avg:235.23ms +[2025-07-17 15:25:15] [Rank 0] PRINT: step:6750/10000 val_loss:4.1325 train_time:1587820ms step_avg:235.23ms +[2025-07-17 15:25:18] [Rank 0] step:6761/10000 train_time:1589794ms step_avg:235.14ms +[2025-07-17 15:25:18] [Rank 0] step:6761/10000 train_time:1589794ms step_avg:235.14ms +[2025-07-17 15:25:23] [Rank 0] step:6781/10000 train_time:1594742ms step_avg:235.18ms +[2025-07-17 15:25:23] [Rank 0] step:6781/10000 train_time:1594742ms step_avg:235.18ms +[2025-07-17 15:25:28] [Rank 0] step:6801/10000 train_time:1599699ms step_avg:235.22ms +[2025-07-17 15:25:28] [Rank 0] step:6801/10000 train_time:1599699ms step_avg:235.22ms +[2025-07-17 15:25:33] [Rank 0] step:6821/10000 train_time:1604651ms step_avg:235.25ms +[2025-07-17 15:25:33] [Rank 0] step:6821/10000 train_time:1604651ms step_avg:235.25ms +[2025-07-17 15:25:38] [Rank 0] step:6841/10000 train_time:1609602ms step_avg:235.29ms +[2025-07-17 15:25:38] [Rank 0] step:6841/10000 train_time:1609602ms step_avg:235.29ms +[2025-07-17 15:25:43] [Rank 0] step:6861/10000 train_time:1614546ms step_avg:235.32ms +[2025-07-17 15:25:43] [Rank 0] step:6861/10000 train_time:1614546ms step_avg:235.32ms +[2025-07-17 15:25:51] [Rank 0] PRINT: step:6875/10000 val_loss:4.1315 train_time:1618747ms step_avg:235.45ms +[2025-07-17 15:25:51] [Rank 0] PRINT: step:6875/10000 val_loss:4.1315 train_time:1618747ms step_avg:235.45ms +[2025-07-17 15:25:52] [Rank 0] step:6881/10000 train_time:1619485ms step_avg:235.36ms +[2025-07-17 15:25:52] [Rank 0] step:6881/10000 train_time:1619485ms step_avg:235.36ms +[2025-07-17 15:25:57] [Rank 0] step:6901/10000 train_time:1624430ms step_avg:235.39ms +[2025-07-17 15:25:57] [Rank 0] step:6901/10000 train_time:1624430ms step_avg:235.39ms +[2025-07-17 15:26:02] [Rank 0] step:6921/10000 train_time:1629374ms step_avg:235.42ms +[2025-07-17 15:26:02] [Rank 0] step:6921/10000 train_time:1629374ms step_avg:235.42ms +[2025-07-17 15:26:07] [Rank 0] step:6941/10000 train_time:1634324ms step_avg:235.46ms +[2025-07-17 15:26:07] [Rank 0] step:6941/10000 train_time:1634324ms step_avg:235.46ms +[2025-07-17 15:26:12] [Rank 0] step:6961/10000 train_time:1639274ms step_avg:235.49ms +[2025-07-17 15:26:12] [Rank 0] step:6961/10000 train_time:1639274ms step_avg:235.49ms +[2025-07-17 15:26:17] [Rank 0] step:6981/10000 train_time:1644227ms step_avg:235.53ms +[2025-07-17 15:26:17] [Rank 0] step:6981/10000 train_time:1644227ms step_avg:235.53ms +[2025-07-17 15:26:26] [Rank 0] PRINT: step:7000/10000 val_loss:4.2442 train_time:1649669ms step_avg:235.67ms +[2025-07-17 15:26:26] [Rank 0] PRINT: step:7000/10000 val_loss:4.2442 train_time:1649669ms step_avg:235.67ms +[2025-07-17 15:26:27] [Rank 0] step:7001/10000 train_time:1649680ms step_avg:235.63ms +[2025-07-17 15:26:27] [Rank 0] step:7001/10000 train_time:1649680ms step_avg:235.63ms +[2025-07-17 15:26:32] [Rank 0] step:7021/10000 train_time:1654117ms step_avg:235.60ms +[2025-07-17 15:26:32] [Rank 0] step:7021/10000 train_time:1654117ms step_avg:235.60ms +[2025-07-17 15:26:36] [Rank 0] step:7041/10000 train_time:1659063ms step_avg:235.63ms +[2025-07-17 15:26:36] [Rank 0] step:7041/10000 train_time:1659063ms step_avg:235.63ms +[2025-07-17 15:26:41] [Rank 0] step:7061/10000 train_time:1664007ms step_avg:235.66ms +[2025-07-17 15:26:41] [Rank 0] step:7061/10000 train_time:1664007ms step_avg:235.66ms +[2025-07-17 15:26:46] [Rank 0] step:7081/10000 train_time:1668951ms step_avg:235.69ms +[2025-07-17 15:26:46] [Rank 0] step:7081/10000 train_time:1668951ms step_avg:235.69ms +[2025-07-17 15:26:51] [Rank 0] step:7101/10000 train_time:1673892ms step_avg:235.73ms +[2025-07-17 15:26:51] [Rank 0] step:7101/10000 train_time:1673892ms step_avg:235.73ms +[2025-07-17 15:26:56] [Rank 0] step:7121/10000 train_time:1678837ms step_avg:235.76ms +[2025-07-17 15:26:56] [Rank 0] step:7121/10000 train_time:1678837ms step_avg:235.76ms +[2025-07-17 15:27:02] [Rank 0] PRINT: step:7125/10000 val_loss:4.1303 train_time:1680569ms step_avg:235.87ms +[2025-07-17 15:27:02] [Rank 0] PRINT: step:7125/10000 val_loss:4.1303 train_time:1680569ms step_avg:235.87ms +[2025-07-17 15:27:06] [Rank 0] step:7141/10000 train_time:1683781ms step_avg:235.79ms +[2025-07-17 15:27:06] [Rank 0] step:7141/10000 train_time:1683781ms step_avg:235.79ms +[2025-07-17 15:27:11] [Rank 0] step:7161/10000 train_time:1688725ms step_avg:235.82ms +[2025-07-17 15:27:11] [Rank 0] step:7161/10000 train_time:1688725ms step_avg:235.82ms +[2025-07-17 15:27:16] [Rank 0] step:7181/10000 train_time:1693666ms step_avg:235.85ms +[2025-07-17 15:27:16] [Rank 0] step:7181/10000 train_time:1693666ms step_avg:235.85ms +[2025-07-17 15:27:21] [Rank 0] step:7201/10000 train_time:1698620ms step_avg:235.89ms +[2025-07-17 15:27:21] [Rank 0] step:7201/10000 train_time:1698620ms step_avg:235.89ms +[2025-07-17 15:27:26] [Rank 0] step:7221/10000 train_time:1703564ms step_avg:235.92ms +[2025-07-17 15:27:26] [Rank 0] step:7221/10000 train_time:1703564ms step_avg:235.92ms +[2025-07-17 15:27:31] [Rank 0] step:7241/10000 train_time:1708503ms step_avg:235.95ms +[2025-07-17 15:27:31] [Rank 0] step:7241/10000 train_time:1708503ms step_avg:235.95ms +[2025-07-17 15:27:38] [Rank 0] PRINT: step:7250/10000 val_loss:4.2641 train_time:1711477ms step_avg:236.07ms +[2025-07-17 15:27:38] [Rank 0] PRINT: step:7250/10000 val_loss:4.2641 train_time:1711477ms step_avg:236.07ms +[2025-07-17 15:27:40] [Rank 0] step:7261/10000 train_time:1713441ms step_avg:235.98ms +[2025-07-17 15:27:40] [Rank 0] step:7261/10000 train_time:1713441ms step_avg:235.98ms +[2025-07-17 15:27:45] [Rank 0] step:7281/10000 train_time:1718479ms step_avg:236.02ms +[2025-07-17 15:27:45] [Rank 0] step:7281/10000 train_time:1718479ms step_avg:236.02ms +[2025-07-17 15:27:50] [Rank 0] step:7301/10000 train_time:1723421ms step_avg:236.05ms +[2025-07-17 15:27:50] [Rank 0] step:7301/10000 train_time:1723421ms step_avg:236.05ms +[2025-07-17 15:27:55] [Rank 0] step:7321/10000 train_time:1728378ms step_avg:236.08ms +[2025-07-17 15:27:55] [Rank 0] step:7321/10000 train_time:1728378ms step_avg:236.08ms +[2025-07-17 15:28:00] [Rank 0] step:7341/10000 train_time:1733325ms step_avg:236.12ms +[2025-07-17 15:28:00] [Rank 0] step:7341/10000 train_time:1733325ms step_avg:236.12ms +[2025-07-17 15:28:05] [Rank 0] step:7361/10000 train_time:1738281ms step_avg:236.15ms +[2025-07-17 15:28:05] [Rank 0] step:7361/10000 train_time:1738281ms step_avg:236.15ms +[2025-07-17 15:28:13] [Rank 0] PRINT: step:7375/10000 val_loss:4.1369 train_time:1742489ms step_avg:236.27ms +[2025-07-17 15:28:13] [Rank 0] PRINT: step:7375/10000 val_loss:4.1369 train_time:1742489ms step_avg:236.27ms +[2025-07-17 15:28:14] [Rank 0] step:7381/10000 train_time:1743229ms step_avg:236.18ms +[2025-07-17 15:28:14] [Rank 0] step:7381/10000 train_time:1743229ms step_avg:236.18ms +[2025-07-17 15:28:19] [Rank 0] step:7401/10000 train_time:1748181ms step_avg:236.21ms +[2025-07-17 15:28:19] [Rank 0] step:7401/10000 train_time:1748181ms step_avg:236.21ms +[2025-07-17 15:28:24] [Rank 0] step:7421/10000 train_time:1753129ms step_avg:236.24ms +[2025-07-17 15:28:24] [Rank 0] step:7421/10000 train_time:1753129ms step_avg:236.24ms +[2025-07-17 15:28:29] [Rank 0] step:7441/10000 train_time:1758093ms step_avg:236.27ms +[2025-07-17 15:28:29] [Rank 0] step:7441/10000 train_time:1758093ms step_avg:236.27ms +[2025-07-17 15:28:34] [Rank 0] step:7461/10000 train_time:1763039ms step_avg:236.30ms +[2025-07-17 15:28:34] [Rank 0] step:7461/10000 train_time:1763039ms step_avg:236.30ms +[2025-07-17 15:28:39] [Rank 0] step:7481/10000 train_time:1768003ms step_avg:236.33ms +[2025-07-17 15:28:39] [Rank 0] step:7481/10000 train_time:1768003ms step_avg:236.33ms +[2025-07-17 15:28:48] [Rank 0] PRINT: step:7500/10000 val_loss:4.1835 train_time:1773467ms step_avg:236.46ms +[2025-07-17 15:28:48] [Rank 0] PRINT: step:7500/10000 val_loss:4.1835 train_time:1773467ms step_avg:236.46ms +[2025-07-17 15:28:49] [Rank 0] step:7501/10000 train_time:1773480ms step_avg:236.43ms +[2025-07-17 15:28:49] [Rank 0] step:7501/10000 train_time:1773480ms step_avg:236.43ms +[2025-07-17 15:28:54] [Rank 0] step:7521/10000 train_time:1777933ms step_avg:236.40ms +[2025-07-17 15:28:54] [Rank 0] step:7521/10000 train_time:1777933ms step_avg:236.40ms +[2025-07-17 15:28:58] [Rank 0] step:7541/10000 train_time:1782888ms step_avg:236.43ms +[2025-07-17 15:28:58] [Rank 0] step:7541/10000 train_time:1782888ms step_avg:236.43ms +[2025-07-17 15:29:03] [Rank 0] step:7561/10000 train_time:1787847ms step_avg:236.46ms +[2025-07-17 15:29:03] [Rank 0] step:7561/10000 train_time:1787847ms step_avg:236.46ms +[2025-07-17 15:29:08] [Rank 0] step:7581/10000 train_time:1792812ms step_avg:236.49ms +[2025-07-17 15:29:08] [Rank 0] step:7581/10000 train_time:1792812ms step_avg:236.49ms +[2025-07-17 15:29:13] [Rank 0] step:7601/10000 train_time:1797780ms step_avg:236.52ms +[2025-07-17 15:29:13] [Rank 0] step:7601/10000 train_time:1797780ms step_avg:236.52ms +[2025-07-17 15:29:18] [Rank 0] step:7621/10000 train_time:1803274ms step_avg:236.62ms +[2025-07-17 15:29:18] [Rank 0] step:7621/10000 train_time:1803274ms step_avg:236.62ms +[2025-07-17 15:29:24] [Rank 0] PRINT: step:7625/10000 val_loss:4.1085 train_time:1804506ms step_avg:236.66ms +[2025-07-17 15:29:24] [Rank 0] PRINT: step:7625/10000 val_loss:4.1085 train_time:1804506ms step_avg:236.66ms +[2025-07-17 15:29:28] [Rank 0] step:7641/10000 train_time:1807724ms step_avg:236.58ms +[2025-07-17 15:29:28] [Rank 0] step:7641/10000 train_time:1807724ms step_avg:236.58ms +[2025-07-17 15:29:33] [Rank 0] step:7661/10000 train_time:1812696ms step_avg:236.61ms +[2025-07-17 15:29:33] [Rank 0] step:7661/10000 train_time:1812696ms step_avg:236.61ms +[2025-07-17 15:29:38] [Rank 0] step:7681/10000 train_time:1817679ms step_avg:236.65ms +[2025-07-17 15:29:38] [Rank 0] step:7681/10000 train_time:1817679ms step_avg:236.65ms +[2025-07-17 15:29:43] [Rank 0] step:7701/10000 train_time:1822642ms step_avg:236.68ms +[2025-07-17 15:29:43] [Rank 0] step:7701/10000 train_time:1822642ms step_avg:236.68ms +[2025-07-17 15:29:48] [Rank 0] step:7721/10000 train_time:1827612ms step_avg:236.71ms +[2025-07-17 15:29:48] [Rank 0] step:7721/10000 train_time:1827612ms step_avg:236.71ms +[2025-07-17 15:29:53] [Rank 0] step:7741/10000 train_time:1832573ms step_avg:236.74ms +[2025-07-17 15:29:53] [Rank 0] step:7741/10000 train_time:1832573ms step_avg:236.74ms +[2025-07-17 15:30:00] [Rank 0] PRINT: step:7750/10000 val_loss:4.1661 train_time:1835571ms step_avg:236.85ms +[2025-07-17 15:30:00] [Rank 0] PRINT: step:7750/10000 val_loss:4.1661 train_time:1835571ms step_avg:236.85ms +[2025-07-17 15:30:03] [Rank 0] step:7761/10000 train_time:1837554ms step_avg:236.77ms +[2025-07-17 15:30:03] [Rank 0] step:7761/10000 train_time:1837554ms step_avg:236.77ms +[2025-07-17 15:30:08] [Rank 0] step:7781/10000 train_time:1842527ms step_avg:236.80ms +[2025-07-17 15:30:08] [Rank 0] step:7781/10000 train_time:1842527ms step_avg:236.80ms +[2025-07-17 15:30:13] [Rank 0] step:7801/10000 train_time:1847491ms step_avg:236.83ms +[2025-07-17 15:30:13] [Rank 0] step:7801/10000 train_time:1847491ms step_avg:236.83ms +[2025-07-17 15:30:18] [Rank 0] step:7821/10000 train_time:1852471ms step_avg:236.86ms +[2025-07-17 15:30:18] [Rank 0] step:7821/10000 train_time:1852471ms step_avg:236.86ms +[2025-07-17 15:30:23] [Rank 0] step:7841/10000 train_time:1857444ms step_avg:236.89ms +[2025-07-17 15:30:23] [Rank 0] step:7841/10000 train_time:1857444ms step_avg:236.89ms +[2025-07-17 15:30:28] [Rank 0] step:7861/10000 train_time:1862400ms step_avg:236.92ms +[2025-07-17 15:30:28] [Rank 0] step:7861/10000 train_time:1862400ms step_avg:236.92ms +[2025-07-17 15:30:35] [Rank 0] PRINT: step:7875/10000 val_loss:4.1752 train_time:1866617ms step_avg:237.03ms +[2025-07-17 15:30:35] [Rank 0] PRINT: step:7875/10000 val_loss:4.1752 train_time:1866617ms step_avg:237.03ms +[2025-07-17 15:30:37] [Rank 0] step:7881/10000 train_time:1867354ms step_avg:236.94ms +[2025-07-17 15:30:37] [Rank 0] step:7881/10000 train_time:1867354ms step_avg:236.94ms +[2025-07-17 15:30:42] [Rank 0] step:7901/10000 train_time:1872323ms step_avg:236.97ms +[2025-07-17 15:30:42] [Rank 0] step:7901/10000 train_time:1872323ms step_avg:236.97ms +[2025-07-17 15:30:47] [Rank 0] step:7921/10000 train_time:1877295ms step_avg:237.00ms +[2025-07-17 15:30:47] [Rank 0] step:7921/10000 train_time:1877295ms step_avg:237.00ms +[2025-07-17 15:30:52] [Rank 0] step:7941/10000 train_time:1882272ms step_avg:237.03ms +[2025-07-17 15:30:52] [Rank 0] step:7941/10000 train_time:1882272ms step_avg:237.03ms +[2025-07-17 15:30:57] [Rank 0] step:7961/10000 train_time:1887262ms step_avg:237.06ms +[2025-07-17 15:30:57] [Rank 0] step:7961/10000 train_time:1887262ms step_avg:237.06ms +[2025-07-17 15:31:02] [Rank 0] step:7981/10000 train_time:1892231ms step_avg:237.09ms +[2025-07-17 15:31:02] [Rank 0] step:7981/10000 train_time:1892231ms step_avg:237.09ms +[2025-07-17 15:31:11] [Rank 0] PRINT: step:8000/10000 val_loss:4.0906 train_time:1897716ms step_avg:237.21ms +[2025-07-17 15:31:11] [Rank 0] PRINT: step:8000/10000 val_loss:4.0906 train_time:1897716ms step_avg:237.21ms +[2025-07-17 15:31:11] [Rank 0] step:8001/10000 train_time:1897727ms step_avg:237.19ms +[2025-07-17 15:31:11] [Rank 0] step:8001/10000 train_time:1897727ms step_avg:237.19ms +[2025-07-17 15:31:16] [Rank 0] step:8021/10000 train_time:1902189ms step_avg:237.15ms +[2025-07-17 15:31:16] [Rank 0] step:8021/10000 train_time:1902189ms step_avg:237.15ms +[2025-07-17 15:31:21] [Rank 0] step:8041/10000 train_time:1907173ms step_avg:237.18ms +[2025-07-17 15:31:21] [Rank 0] step:8041/10000 train_time:1907173ms step_avg:237.18ms +[2025-07-17 15:31:26] [Rank 0] step:8061/10000 train_time:1912141ms step_avg:237.21ms +[2025-07-17 15:31:26] [Rank 0] step:8061/10000 train_time:1912141ms step_avg:237.21ms +[2025-07-17 15:31:31] [Rank 0] step:8081/10000 train_time:1917110ms step_avg:237.24ms +[2025-07-17 15:31:31] [Rank 0] step:8081/10000 train_time:1917110ms step_avg:237.24ms +[2025-07-17 15:31:36] [Rank 0] step:8101/10000 train_time:1922075ms step_avg:237.26ms +[2025-07-17 15:31:36] [Rank 0] step:8101/10000 train_time:1922075ms step_avg:237.26ms +[2025-07-17 15:31:41] [Rank 0] step:8121/10000 train_time:1927044ms step_avg:237.29ms +[2025-07-17 15:31:41] [Rank 0] step:8121/10000 train_time:1927044ms step_avg:237.29ms +[2025-07-17 15:31:47] [Rank 0] PRINT: step:8125/10000 val_loss:4.2562 train_time:1928784ms step_avg:237.39ms +[2025-07-17 15:31:47] [Rank 0] PRINT: step:8125/10000 val_loss:4.2562 train_time:1928784ms step_avg:237.39ms +[2025-07-17 15:31:51] [Rank 0] step:8141/10000 train_time:1932008ms step_avg:237.32ms +[2025-07-17 15:31:51] [Rank 0] step:8141/10000 train_time:1932008ms step_avg:237.32ms +[2025-07-17 15:31:56] [Rank 0] step:8161/10000 train_time:1937006ms step_avg:237.35ms +[2025-07-17 15:31:56] [Rank 0] step:8161/10000 train_time:1937006ms step_avg:237.35ms +[2025-07-17 15:32:01] [Rank 0] step:8181/10000 train_time:1942037ms step_avg:237.38ms +[2025-07-17 15:32:01] [Rank 0] step:8181/10000 train_time:1942037ms step_avg:237.38ms +[2025-07-17 15:32:06] [Rank 0] step:8201/10000 train_time:1947046ms step_avg:237.42ms +[2025-07-17 15:32:06] [Rank 0] step:8201/10000 train_time:1947046ms step_avg:237.42ms +[2025-07-17 15:32:11] [Rank 0] step:8221/10000 train_time:1952075ms step_avg:237.45ms +[2025-07-17 15:32:11] [Rank 0] step:8221/10000 train_time:1952075ms step_avg:237.45ms +[2025-07-17 15:32:16] [Rank 0] step:8241/10000 train_time:1957104ms step_avg:237.48ms +[2025-07-17 15:32:16] [Rank 0] step:8241/10000 train_time:1957104ms step_avg:237.48ms +[2025-07-17 15:32:23] [Rank 0] PRINT: step:8250/10000 val_loss:4.1397 train_time:1960123ms step_avg:237.59ms +[2025-07-17 15:32:23] [Rank 0] PRINT: step:8250/10000 val_loss:4.1397 train_time:1960123ms step_avg:237.59ms +[2025-07-17 15:32:26] [Rank 0] step:8261/10000 train_time:1962125ms step_avg:237.52ms +[2025-07-17 15:32:26] [Rank 0] step:8261/10000 train_time:1962125ms step_avg:237.52ms +[2025-07-17 15:32:31] [Rank 0] step:8281/10000 train_time:1967178ms step_avg:237.55ms +[2025-07-17 15:32:31] [Rank 0] step:8281/10000 train_time:1967178ms step_avg:237.55ms +[2025-07-17 15:32:36] [Rank 0] step:8301/10000 train_time:1972199ms step_avg:237.59ms +[2025-07-17 15:32:36] [Rank 0] step:8301/10000 train_time:1972199ms step_avg:237.59ms +[2025-07-17 15:32:41] [Rank 0] step:8321/10000 train_time:1977229ms step_avg:237.62ms +[2025-07-17 15:32:41] [Rank 0] step:8321/10000 train_time:1977229ms step_avg:237.62ms +[2025-07-17 15:32:46] [Rank 0] step:8341/10000 train_time:1982265ms step_avg:237.65ms +[2025-07-17 15:32:46] [Rank 0] step:8341/10000 train_time:1982265ms step_avg:237.65ms +[2025-07-17 15:32:51] [Rank 0] step:8361/10000 train_time:1987290ms step_avg:237.69ms +[2025-07-17 15:32:51] [Rank 0] step:8361/10000 train_time:1987290ms step_avg:237.69ms +[2025-07-17 15:32:59] [Rank 0] PRINT: step:8375/10000 val_loss:4.2471 train_time:1991557ms step_avg:237.80ms +[2025-07-17 15:32:59] [Rank 0] PRINT: step:8375/10000 val_loss:4.2471 train_time:1991557ms step_avg:237.80ms +[2025-07-17 15:33:01] [Rank 0] step:8381/10000 train_time:1992300ms step_avg:237.72ms +[2025-07-17 15:33:01] [Rank 0] step:8381/10000 train_time:1992300ms step_avg:237.72ms +[2025-07-17 15:33:06] [Rank 0] step:8401/10000 train_time:1997307ms step_avg:237.75ms +[2025-07-17 15:33:06] [Rank 0] step:8401/10000 train_time:1997307ms step_avg:237.75ms +[2025-07-17 15:33:11] [Rank 0] step:8421/10000 train_time:2002331ms step_avg:237.78ms +[2025-07-17 15:33:11] [Rank 0] step:8421/10000 train_time:2002331ms step_avg:237.78ms +[2025-07-17 15:33:16] [Rank 0] step:8441/10000 train_time:2007358ms step_avg:237.81ms +[2025-07-17 15:33:16] [Rank 0] step:8441/10000 train_time:2007358ms step_avg:237.81ms +[2025-07-17 15:33:21] [Rank 0] step:8461/10000 train_time:2012395ms step_avg:237.84ms +[2025-07-17 15:33:21] [Rank 0] step:8461/10000 train_time:2012395ms step_avg:237.84ms +[2025-07-17 15:33:26] [Rank 0] step:8481/10000 train_time:2017407ms step_avg:237.87ms +[2025-07-17 15:33:26] [Rank 0] step:8481/10000 train_time:2017407ms step_avg:237.87ms +[2025-07-17 15:33:36] [Rank 0] PRINT: step:8500/10000 val_loss:4.2401 train_time:2022944ms step_avg:237.99ms +[2025-07-17 15:33:36] [Rank 0] PRINT: step:8500/10000 val_loss:4.2401 train_time:2022944ms step_avg:237.99ms +[2025-07-17 15:33:36] [Rank 0] step:8501/10000 train_time:2022955ms step_avg:237.97ms +[2025-07-17 15:33:36] [Rank 0] step:8501/10000 train_time:2022955ms step_avg:237.97ms +[2025-07-17 15:33:41] [Rank 0] step:8521/10000 train_time:2027464ms step_avg:237.94ms +[2025-07-17 15:33:41] [Rank 0] step:8521/10000 train_time:2027464ms step_avg:237.94ms +[2025-07-17 15:33:46] [Rank 0] step:8541/10000 train_time:2032504ms step_avg:237.97ms +[2025-07-17 15:33:46] [Rank 0] step:8541/10000 train_time:2032504ms step_avg:237.97ms +[2025-07-17 15:33:51] [Rank 0] step:8561/10000 train_time:2037537ms step_avg:238.00ms +[2025-07-17 15:33:51] [Rank 0] step:8561/10000 train_time:2037537ms step_avg:238.00ms +[2025-07-17 15:33:56] [Rank 0] step:8581/10000 train_time:2042562ms step_avg:238.03ms +[2025-07-17 15:33:56] [Rank 0] step:8581/10000 train_time:2042562ms step_avg:238.03ms +[2025-07-17 15:34:01] [Rank 0] step:8601/10000 train_time:2047585ms step_avg:238.06ms +[2025-07-17 15:34:01] [Rank 0] step:8601/10000 train_time:2047585ms step_avg:238.06ms +[2025-07-17 15:34:06] [Rank 0] step:8621/10000 train_time:2052609ms step_avg:238.09ms +[2025-07-17 15:34:06] [Rank 0] step:8621/10000 train_time:2052609ms step_avg:238.09ms +[2025-07-17 15:34:12] [Rank 0] PRINT: step:8625/10000 val_loss:4.2070 train_time:2054366ms step_avg:238.19ms +[2025-07-17 15:34:12] [Rank 0] PRINT: step:8625/10000 val_loss:4.2070 train_time:2054366ms step_avg:238.19ms +[2025-07-17 15:34:16] [Rank 0] step:8641/10000 train_time:2057642ms step_avg:238.13ms +[2025-07-17 15:34:16] [Rank 0] step:8641/10000 train_time:2057642ms step_avg:238.13ms +[2025-07-17 15:34:21] [Rank 0] step:8661/10000 train_time:2062658ms step_avg:238.15ms +[2025-07-17 15:34:21] [Rank 0] step:8661/10000 train_time:2062658ms step_avg:238.15ms +[2025-07-17 15:34:26] [Rank 0] step:8681/10000 train_time:2067680ms step_avg:238.18ms +[2025-07-17 15:34:26] [Rank 0] step:8681/10000 train_time:2067680ms step_avg:238.18ms +[2025-07-17 15:34:31] [Rank 0] step:8701/10000 train_time:2072713ms step_avg:238.22ms +[2025-07-17 15:34:31] [Rank 0] step:8701/10000 train_time:2072713ms step_avg:238.22ms +[2025-07-17 15:34:36] [Rank 0] step:8721/10000 train_time:2077740ms step_avg:238.25ms +[2025-07-17 15:34:36] [Rank 0] step:8721/10000 train_time:2077740ms step_avg:238.25ms +[2025-07-17 15:34:41] [Rank 0] step:8741/10000 train_time:2082770ms step_avg:238.28ms +[2025-07-17 15:34:41] [Rank 0] step:8741/10000 train_time:2082770ms step_avg:238.28ms +[2025-07-17 15:34:48] [Rank 0] PRINT: step:8750/10000 val_loss:4.2729 train_time:2085780ms step_avg:238.37ms +[2025-07-17 15:34:48] [Rank 0] PRINT: step:8750/10000 val_loss:4.2729 train_time:2085780ms step_avg:238.37ms +[2025-07-17 15:34:51] [Rank 0] step:8761/10000 train_time:2087785ms step_avg:238.30ms +[2025-07-17 15:34:51] [Rank 0] step:8761/10000 train_time:2087785ms step_avg:238.30ms +[2025-07-17 15:34:56] [Rank 0] step:8781/10000 train_time:2092807ms step_avg:238.33ms +[2025-07-17 15:34:56] [Rank 0] step:8781/10000 train_time:2092807ms step_avg:238.33ms +[2025-07-17 15:35:01] [Rank 0] step:8801/10000 train_time:2097826ms step_avg:238.36ms +[2025-07-17 15:35:01] [Rank 0] step:8801/10000 train_time:2097826ms step_avg:238.36ms +[2025-07-17 15:35:06] [Rank 0] step:8821/10000 train_time:2102858ms step_avg:238.39ms +[2025-07-17 15:35:06] [Rank 0] step:8821/10000 train_time:2102858ms step_avg:238.39ms +[2025-07-17 15:35:11] [Rank 0] step:8841/10000 train_time:2107903ms step_avg:238.42ms +[2025-07-17 15:35:11] [Rank 0] step:8841/10000 train_time:2107903ms step_avg:238.42ms +[2025-07-17 15:35:16] [Rank 0] step:8861/10000 train_time:2112932ms step_avg:238.45ms +[2025-07-17 15:35:16] [Rank 0] step:8861/10000 train_time:2112932ms step_avg:238.45ms +[2025-07-17 15:35:24] [Rank 0] PRINT: step:8875/10000 val_loss:4.3385 train_time:2117200ms step_avg:238.56ms +[2025-07-17 15:35:24] [Rank 0] PRINT: step:8875/10000 val_loss:4.3385 train_time:2117200ms step_avg:238.56ms +[2025-07-17 15:35:26] [Rank 0] step:8881/10000 train_time:2117949ms step_avg:238.48ms +[2025-07-17 15:35:26] [Rank 0] step:8881/10000 train_time:2117949ms step_avg:238.48ms +[2025-07-17 15:35:31] [Rank 0] step:8901/10000 train_time:2122970ms step_avg:238.51ms +[2025-07-17 15:35:31] [Rank 0] step:8901/10000 train_time:2122970ms step_avg:238.51ms +[2025-07-17 15:35:36] [Rank 0] step:8921/10000 train_time:2127997ms step_avg:238.54ms +[2025-07-17 15:35:36] [Rank 0] step:8921/10000 train_time:2127997ms step_avg:238.54ms +[2025-07-17 15:35:41] [Rank 0] step:8941/10000 train_time:2133022ms step_avg:238.57ms +[2025-07-17 15:35:41] [Rank 0] step:8941/10000 train_time:2133022ms step_avg:238.57ms +[2025-07-17 15:35:46] [Rank 0] step:8961/10000 train_time:2138053ms step_avg:238.60ms +[2025-07-17 15:35:46] [Rank 0] step:8961/10000 train_time:2138053ms step_avg:238.60ms +[2025-07-17 15:35:51] [Rank 0] step:8981/10000 train_time:2143086ms step_avg:238.62ms +[2025-07-17 15:35:51] [Rank 0] step:8981/10000 train_time:2143086ms step_avg:238.62ms +[2025-07-17 15:36:00] [Rank 0] PRINT: step:9000/10000 val_loss:4.2285 train_time:2148618ms step_avg:238.74ms +[2025-07-17 15:36:00] [Rank 0] PRINT: step:9000/10000 val_loss:4.2285 train_time:2148618ms step_avg:238.74ms +[2025-07-17 15:36:01] [Rank 0] step:9001/10000 train_time:2148629ms step_avg:238.71ms +[2025-07-17 15:36:01] [Rank 0] step:9001/10000 train_time:2148629ms step_avg:238.71ms +[2025-07-17 15:36:06] [Rank 0] step:9021/10000 train_time:2153130ms step_avg:238.68ms +[2025-07-17 15:36:06] [Rank 0] step:9021/10000 train_time:2153130ms step_avg:238.68ms +[2025-07-17 15:36:11] [Rank 0] step:9041/10000 train_time:2158179ms step_avg:238.71ms +[2025-07-17 15:36:11] [Rank 0] step:9041/10000 train_time:2158179ms step_avg:238.71ms +[2025-07-17 15:36:16] [Rank 0] step:9061/10000 train_time:2163204ms step_avg:238.74ms +[2025-07-17 15:36:16] [Rank 0] step:9061/10000 train_time:2163204ms step_avg:238.74ms +[2025-07-17 15:36:21] [Rank 0] step:9081/10000 train_time:2168255ms step_avg:238.77ms +[2025-07-17 15:36:21] [Rank 0] step:9081/10000 train_time:2168255ms step_avg:238.77ms +[2025-07-17 15:36:26] [Rank 0] step:9101/10000 train_time:2173300ms step_avg:238.80ms +[2025-07-17 15:36:26] [Rank 0] step:9101/10000 train_time:2173300ms step_avg:238.80ms +[2025-07-17 15:36:31] [Rank 0] step:9121/10000 train_time:2178338ms step_avg:238.83ms +[2025-07-17 15:36:31] [Rank 0] step:9121/10000 train_time:2178338ms step_avg:238.83ms +[2025-07-17 15:36:37] [Rank 0] PRINT: step:9125/10000 val_loss:4.2947 train_time:2180100ms step_avg:238.92ms +[2025-07-17 15:36:37] [Rank 0] PRINT: step:9125/10000 val_loss:4.2947 train_time:2180100ms step_avg:238.92ms +[2025-07-17 15:36:41] [Rank 0] step:9141/10000 train_time:2183346ms step_avg:238.85ms +[2025-07-17 15:36:41] [Rank 0] step:9141/10000 train_time:2183346ms step_avg:238.85ms +[2025-07-17 15:36:46] [Rank 0] step:9161/10000 train_time:2188405ms step_avg:238.88ms +[2025-07-17 15:36:46] [Rank 0] step:9161/10000 train_time:2188405ms step_avg:238.88ms +[2025-07-17 15:36:51] [Rank 0] step:9181/10000 train_time:2193441ms step_avg:238.91ms +[2025-07-17 15:36:51] [Rank 0] step:9181/10000 train_time:2193441ms step_avg:238.91ms +[2025-07-17 15:36:56] [Rank 0] step:9201/10000 train_time:2198466ms step_avg:238.94ms +[2025-07-17 15:36:56] [Rank 0] step:9201/10000 train_time:2198466ms step_avg:238.94ms +[2025-07-17 15:37:01] [Rank 0] step:9221/10000 train_time:2203521ms step_avg:238.97ms +[2025-07-17 15:37:01] [Rank 0] step:9221/10000 train_time:2203521ms step_avg:238.97ms +[2025-07-17 15:37:06] [Rank 0] step:9241/10000 train_time:2208555ms step_avg:239.00ms +[2025-07-17 15:37:06] [Rank 0] step:9241/10000 train_time:2208555ms step_avg:239.00ms +[2025-07-17 15:37:12] [Rank 0] PRINT: step:9250/10000 val_loss:4.3158 train_time:2211576ms step_avg:239.09ms +[2025-07-17 15:37:12] [Rank 0] PRINT: step:9250/10000 val_loss:4.3158 train_time:2211576ms step_avg:239.09ms +[2025-07-17 15:37:15] [Rank 0] step:9261/10000 train_time:2213588ms step_avg:239.02ms +[2025-07-17 15:37:15] [Rank 0] step:9261/10000 train_time:2213588ms step_avg:239.02ms +[2025-07-17 15:37:20] [Rank 0] step:9281/10000 train_time:2218582ms step_avg:239.05ms +[2025-07-17 15:37:20] [Rank 0] step:9281/10000 train_time:2218582ms step_avg:239.05ms +[2025-07-17 15:37:25] [Rank 0] step:9301/10000 train_time:2223600ms step_avg:239.07ms +[2025-07-17 15:37:25] [Rank 0] step:9301/10000 train_time:2223600ms step_avg:239.07ms +[2025-07-17 15:37:30] [Rank 0] step:9321/10000 train_time:2228634ms step_avg:239.10ms +[2025-07-17 15:37:30] [Rank 0] step:9321/10000 train_time:2228634ms step_avg:239.10ms +[2025-07-17 15:37:35] [Rank 0] step:9341/10000 train_time:2233653ms step_avg:239.12ms +[2025-07-17 15:37:35] [Rank 0] step:9341/10000 train_time:2233653ms step_avg:239.12ms +[2025-07-17 15:37:40] [Rank 0] step:9361/10000 train_time:2238674ms step_avg:239.15ms +[2025-07-17 15:37:40] [Rank 0] step:9361/10000 train_time:2238674ms step_avg:239.15ms +[2025-07-17 15:37:48] [Rank 0] PRINT: step:9375/10000 val_loss:4.2660 train_time:2242940ms step_avg:239.25ms +[2025-07-17 15:37:48] [Rank 0] PRINT: step:9375/10000 val_loss:4.2660 train_time:2242940ms step_avg:239.25ms +[2025-07-17 15:37:50] [Rank 0] step:9381/10000 train_time:2243686ms step_avg:239.17ms +[2025-07-17 15:37:50] [Rank 0] step:9381/10000 train_time:2243686ms step_avg:239.17ms +[2025-07-17 15:37:55] [Rank 0] step:9401/10000 train_time:2248691ms step_avg:239.20ms +[2025-07-17 15:37:55] [Rank 0] step:9401/10000 train_time:2248691ms step_avg:239.20ms +[2025-07-17 15:38:00] [Rank 0] step:9421/10000 train_time:2253718ms step_avg:239.22ms +[2025-07-17 15:38:00] [Rank 0] step:9421/10000 train_time:2253718ms step_avg:239.22ms +[2025-07-17 15:38:05] [Rank 0] step:9441/10000 train_time:2258737ms step_avg:239.25ms +[2025-07-17 15:38:05] [Rank 0] step:9441/10000 train_time:2258737ms step_avg:239.25ms +[2025-07-17 15:38:10] [Rank 0] step:9461/10000 train_time:2263778ms step_avg:239.27ms +[2025-07-17 15:38:10] [Rank 0] step:9461/10000 train_time:2263778ms step_avg:239.27ms +[2025-07-17 15:38:15] [Rank 0] step:9481/10000 train_time:2268807ms step_avg:239.30ms +[2025-07-17 15:38:15] [Rank 0] step:9481/10000 train_time:2268807ms step_avg:239.30ms +[2025-07-17 15:38:24] [Rank 0] PRINT: step:9500/10000 val_loss:4.2819 train_time:2274370ms step_avg:239.41ms +[2025-07-17 15:38:24] [Rank 0] PRINT: step:9500/10000 val_loss:4.2819 train_time:2274370ms step_avg:239.41ms +[2025-07-17 15:38:24] [Rank 0] step:9501/10000 train_time:2274381ms step_avg:239.38ms +[2025-07-17 15:38:24] [Rank 0] step:9501/10000 train_time:2274381ms step_avg:239.38ms +[2025-07-17 15:38:29] [Rank 0] step:9521/10000 train_time:2278887ms step_avg:239.35ms +[2025-07-17 15:38:29] [Rank 0] step:9521/10000 train_time:2278887ms step_avg:239.35ms +[2025-07-17 15:38:34] [Rank 0] step:9541/10000 train_time:2283930ms step_avg:239.38ms +[2025-07-17 15:38:34] [Rank 0] step:9541/10000 train_time:2283930ms step_avg:239.38ms +[2025-07-17 15:38:39] [Rank 0] step:9561/10000 train_time:2288950ms step_avg:239.40ms +[2025-07-17 15:38:39] [Rank 0] step:9561/10000 train_time:2288950ms step_avg:239.40ms +[2025-07-17 15:38:44] [Rank 0] step:9581/10000 train_time:2293965ms step_avg:239.43ms +[2025-07-17 15:38:44] [Rank 0] step:9581/10000 train_time:2293965ms step_avg:239.43ms +[2025-07-17 15:38:49] [Rank 0] step:9601/10000 train_time:2298981ms step_avg:239.45ms +[2025-07-17 15:38:49] [Rank 0] step:9601/10000 train_time:2298981ms step_avg:239.45ms +[2025-07-17 15:38:54] [Rank 0] step:9621/10000 train_time:2304037ms step_avg:239.48ms +[2025-07-17 15:38:54] [Rank 0] step:9621/10000 train_time:2304037ms step_avg:239.48ms +[2025-07-17 15:39:00] [Rank 0] PRINT: step:9625/10000 val_loss:4.2685 train_time:2305793ms step_avg:239.56ms +[2025-07-17 15:39:00] [Rank 0] PRINT: step:9625/10000 val_loss:4.2685 train_time:2305793ms step_avg:239.56ms +[2025-07-17 15:39:04] [Rank 0] step:9641/10000 train_time:2309078ms step_avg:239.51ms +[2025-07-17 15:39:04] [Rank 0] step:9641/10000 train_time:2309078ms step_avg:239.51ms +[2025-07-17 15:39:09] [Rank 0] step:9661/10000 train_time:2314172ms step_avg:239.54ms +[2025-07-17 15:39:09] [Rank 0] step:9661/10000 train_time:2314172ms step_avg:239.54ms +[2025-07-17 15:39:14] [Rank 0] step:9681/10000 train_time:2319267ms step_avg:239.57ms +[2025-07-17 15:39:14] [Rank 0] step:9681/10000 train_time:2319267ms step_avg:239.57ms +[2025-07-17 15:39:19] [Rank 0] step:9701/10000 train_time:2324462ms step_avg:239.61ms +[2025-07-17 15:39:19] [Rank 0] step:9701/10000 train_time:2324462ms step_avg:239.61ms +[2025-07-17 15:39:24] [Rank 0] step:9721/10000 train_time:2329538ms step_avg:239.64ms +[2025-07-17 15:39:24] [Rank 0] step:9721/10000 train_time:2329538ms step_avg:239.64ms +[2025-07-17 15:39:29] [Rank 0] step:9741/10000 train_time:2334633ms step_avg:239.67ms +[2025-07-17 15:39:29] [Rank 0] step:9741/10000 train_time:2334633ms step_avg:239.67ms +[2025-07-17 15:39:36] [Rank 0] PRINT: step:9750/10000 val_loss:4.3036 train_time:2337680ms step_avg:239.76ms +[2025-07-17 15:39:36] [Rank 0] PRINT: step:9750/10000 val_loss:4.3036 train_time:2337680ms step_avg:239.76ms +[2025-07-17 15:39:39] [Rank 0] step:9761/10000 train_time:2339711ms step_avg:239.70ms +[2025-07-17 15:39:39] [Rank 0] step:9761/10000 train_time:2339711ms step_avg:239.70ms +[2025-07-17 15:39:44] [Rank 0] step:9781/10000 train_time:2344801ms step_avg:239.73ms +[2025-07-17 15:39:44] [Rank 0] step:9781/10000 train_time:2344801ms step_avg:239.73ms +[2025-07-17 15:39:49] [Rank 0] step:9801/10000 train_time:2349871ms step_avg:239.76ms +[2025-07-17 15:39:49] [Rank 0] step:9801/10000 train_time:2349871ms step_avg:239.76ms +[2025-07-17 15:39:54] [Rank 0] step:9821/10000 train_time:2354956ms step_avg:239.79ms +[2025-07-17 15:39:54] [Rank 0] step:9821/10000 train_time:2354956ms step_avg:239.79ms +[2025-07-17 15:39:59] [Rank 0] step:9841/10000 train_time:2360034ms step_avg:239.82ms +[2025-07-17 15:39:59] [Rank 0] step:9841/10000 train_time:2360034ms step_avg:239.82ms +[2025-07-17 15:40:04] [Rank 0] step:9861/10000 train_time:2365119ms step_avg:239.85ms +[2025-07-17 15:40:04] [Rank 0] step:9861/10000 train_time:2365119ms step_avg:239.85ms +[2025-07-17 15:40:13] [Rank 0] PRINT: step:9875/10000 val_loss:4.3422 train_time:2369432ms step_avg:239.94ms +[2025-07-17 15:40:13] [Rank 0] PRINT: step:9875/10000 val_loss:4.3422 train_time:2369432ms step_avg:239.94ms +[2025-07-17 15:40:14] [Rank 0] step:9881/10000 train_time:2370187ms step_avg:239.87ms +[2025-07-17 15:40:14] [Rank 0] step:9881/10000 train_time:2370187ms step_avg:239.87ms +[2025-07-17 15:40:19] [Rank 0] step:9901/10000 train_time:2375267ms step_avg:239.90ms +[2025-07-17 15:40:19] [Rank 0] step:9901/10000 train_time:2375267ms step_avg:239.90ms +[2025-07-17 15:40:24] [Rank 0] step:9921/10000 train_time:2380363ms step_avg:239.93ms +[2025-07-17 15:40:24] [Rank 0] step:9921/10000 train_time:2380363ms step_avg:239.93ms +[2025-07-17 15:40:30] [Rank 0] step:9941/10000 train_time:2385474ms step_avg:239.96ms +[2025-07-17 15:40:30] [Rank 0] step:9941/10000 train_time:2385474ms step_avg:239.96ms +[2025-07-17 15:40:35] [Rank 0] step:9961/10000 train_time:2390576ms step_avg:239.99ms +[2025-07-17 15:40:35] [Rank 0] step:9961/10000 train_time:2390576ms step_avg:239.99ms +[2025-07-17 15:40:40] [Rank 0] step:9981/10000 train_time:2395686ms step_avg:240.02ms +[2025-07-17 15:40:40] [Rank 0] step:9981/10000 train_time:2395686ms step_avg:240.02ms +[2025-07-17 15:40:45] [Rank 0] step:10000/10000 train_time:2400506ms step_avg:240.05ms +[2025-07-17 15:40:45] [Rank 0] step:10000/10000 train_time:2400506ms step_avg:240.05ms +[2025-07-17 15:40:49] [Rank 0] PRINT: step:10000/10000 val_loss:4.3123 train_time:2401276ms step_avg:240.13ms +[2025-07-17 15:40:49] [Rank 0] PRINT: step:10000/10000 val_loss:4.3123 train_time:2401276ms step_avg:240.13ms +[2025-07-17 15:40:49] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 15:40:49 2025 --- +[2025-07-17 15:40:49] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 15:40:49 2025 --- +[2025-07-17 15:40:49] [Rank 0] PRINT: Peak memory allocated: 31040 MiB reserved: 31574 MiB +[2025-07-17 15:40:49] [Rank 0] PRINT: Peak memory allocated: 31040 MiB reserved: 31574 MiB diff --git a/logs_norope/diff_modes/mode_6_param_norope_seed_43/config.json b/logs_norope/diff_modes/mode_6_param_norope_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..c613f7c884531d12ced6af341b73088c7d6f150f --- /dev/null +++ b/logs_norope/diff_modes/mode_6_param_norope_seed_43/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 6, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "ed280fb2-0267-461d-aa21-443c17adea2d", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_6_param_norope_seed_43/training_log_ed280fb2-0267-461d-aa21-443c17adea2d.txt b/logs_norope/diff_modes/mode_6_param_norope_seed_43/training_log_ed280fb2-0267-461d-aa21-443c17adea2d.txt new file mode 100644 index 0000000000000000000000000000000000000000..bed97a3f0156a5135237c496ae852669a1632113 --- /dev/null +++ b/logs_norope/diff_modes/mode_6_param_norope_seed_43/training_log_ed280fb2-0267-461d-aa21-443c17adea2d.txt @@ -0,0 +1,2360 @@ +[2025-07-17 22:05:21] [Rank 0] PRINT: --- Script Start: Thu Jul 17 22:05:21 2025 --- +[2025-07-17 22:05:21] [Rank 0] PRINT: --- Script Start: Thu Jul 17 22:05:21 2025 --- +[2025-07-17 22:05:21] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=6, model_parameterization='norope') +[2025-07-17 22:05:21] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=6, model_parameterization='norope') +[2025-07-17 22:05:21] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 22:05:21] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 22:05:21] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 22:05:21] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 22:05:21] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_6_param_norope_seed_43 +[2025-07-17 22:05:21] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_6_param_norope_seed_43 +[2025-07-17 22:05:21] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 22:05:21] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 22:05:21] [Rank 0] PRINT: Constructing model... +[2025-07-17 22:05:21] [Rank 0] PRINT: Constructing model... +[2025-07-17 22:05:24] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 22:05:24] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 22:05:24] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 22:05:24] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 22:05:24] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 22:05:24] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 22:05:24] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 22:05:24] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 22:05:24] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 6 +[2025-07-17 22:05:24] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 6 +[2025-07-17 22:05:24] [Rank 0] PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: 0.001). +[2025-07-17 22:05:24] [Rank 0] PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: 0.001). +[2025-07-17 22:05:24] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 22:05:24] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 22:05:24] [Rank 0] PRINT: Muon optimizer is active with 12 parameters. +[2025-07-17 22:05:24] [Rank 0] PRINT: Muon optimizer is active with 12 parameters. +[2025-07-17 22:05:24] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 22:05:24] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 22:05:24] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 22:05:24] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 22:05:24] [Rank 0] PRINT: Starting warmup... +[2025-07-17 22:05:24] [Rank 0] PRINT: Starting warmup... +[2025-07-17 22:06:26] [Rank 0] PRINT: Warmup complete. +[2025-07-17 22:06:26] [Rank 0] PRINT: Warmup complete. +[2025-07-17 22:06:27] [Rank 0] PRINT: Starting training... +[2025-07-17 22:06:27] [Rank 0] PRINT: Starting training... +[2025-07-17 22:06:36] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 22:06:36] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 22:06:41] [Rank 0] step:21/10000 train_time:4314ms step_avg:205.43ms +[2025-07-17 22:06:41] [Rank 0] step:21/10000 train_time:4314ms step_avg:205.43ms +[2025-07-17 22:06:46] [Rank 0] step:41/10000 train_time:8772ms step_avg:213.94ms +[2025-07-17 22:06:46] [Rank 0] step:41/10000 train_time:8772ms step_avg:213.94ms +[2025-07-17 22:06:50] [Rank 0] step:61/10000 train_time:13239ms step_avg:217.03ms +[2025-07-17 22:06:50] [Rank 0] step:61/10000 train_time:13239ms step_avg:217.03ms +[2025-07-17 22:06:55] [Rank 0] step:81/10000 train_time:17711ms step_avg:218.65ms +[2025-07-17 22:06:55] [Rank 0] step:81/10000 train_time:17711ms step_avg:218.65ms +[2025-07-17 22:06:59] [Rank 0] step:101/10000 train_time:22174ms step_avg:219.54ms +[2025-07-17 22:06:59] [Rank 0] step:101/10000 train_time:22174ms step_avg:219.54ms +[2025-07-17 22:07:04] [Rank 0] step:121/10000 train_time:26646ms step_avg:220.21ms +[2025-07-17 22:07:04] [Rank 0] step:121/10000 train_time:26646ms step_avg:220.21ms +[2025-07-17 22:07:09] [Rank 0] PRINT: step:125/10000 val_loss:5.5034 train_time:28216ms step_avg:225.73ms +[2025-07-17 22:07:09] [Rank 0] PRINT: step:125/10000 val_loss:5.5034 train_time:28216ms step_avg:225.73ms +[2025-07-17 22:07:13] [Rank 0] step:141/10000 train_time:31119ms step_avg:220.70ms +[2025-07-17 22:07:13] [Rank 0] step:141/10000 train_time:31119ms step_avg:220.70ms +[2025-07-17 22:07:17] [Rank 0] step:161/10000 train_time:35595ms step_avg:221.09ms +[2025-07-17 22:07:17] [Rank 0] step:161/10000 train_time:35595ms step_avg:221.09ms +[2025-07-17 22:07:22] [Rank 0] step:181/10000 train_time:40076ms step_avg:221.41ms +[2025-07-17 22:07:22] [Rank 0] step:181/10000 train_time:40076ms step_avg:221.41ms +[2025-07-17 22:07:26] [Rank 0] step:201/10000 train_time:44554ms step_avg:221.66ms +[2025-07-17 22:07:26] [Rank 0] step:201/10000 train_time:44554ms step_avg:221.66ms +[2025-07-17 22:07:31] [Rank 0] step:221/10000 train_time:49032ms step_avg:221.86ms +[2025-07-17 22:07:31] [Rank 0] step:221/10000 train_time:49032ms step_avg:221.86ms +[2025-07-17 22:07:35] [Rank 0] step:241/10000 train_time:53514ms step_avg:222.05ms +[2025-07-17 22:07:35] [Rank 0] step:241/10000 train_time:53514ms step_avg:222.05ms +[2025-07-17 22:07:41] [Rank 0] PRINT: step:250/10000 val_loss:5.0664 train_time:56204ms step_avg:224.82ms +[2025-07-17 22:07:41] [Rank 0] PRINT: step:250/10000 val_loss:5.0664 train_time:56204ms step_avg:224.82ms +[2025-07-17 22:07:43] [Rank 0] step:261/10000 train_time:57991ms step_avg:222.19ms +[2025-07-17 22:07:43] [Rank 0] step:261/10000 train_time:57991ms step_avg:222.19ms +[2025-07-17 22:07:48] [Rank 0] step:281/10000 train_time:62474ms step_avg:222.33ms +[2025-07-17 22:07:48] [Rank 0] step:281/10000 train_time:62474ms step_avg:222.33ms +[2025-07-17 22:07:52] [Rank 0] step:301/10000 train_time:66949ms step_avg:222.42ms +[2025-07-17 22:07:52] [Rank 0] step:301/10000 train_time:66949ms step_avg:222.42ms +[2025-07-17 22:07:57] [Rank 0] step:321/10000 train_time:71427ms step_avg:222.51ms +[2025-07-17 22:07:57] [Rank 0] step:321/10000 train_time:71427ms step_avg:222.51ms +[2025-07-17 22:08:01] [Rank 0] step:341/10000 train_time:75905ms step_avg:222.59ms +[2025-07-17 22:08:01] [Rank 0] step:341/10000 train_time:75905ms step_avg:222.59ms +[2025-07-17 22:08:06] [Rank 0] step:361/10000 train_time:80387ms step_avg:222.68ms +[2025-07-17 22:08:06] [Rank 0] step:361/10000 train_time:80387ms step_avg:222.68ms +[2025-07-17 22:08:13] [Rank 0] PRINT: step:375/10000 val_loss:4.9332 train_time:84199ms step_avg:224.53ms +[2025-07-17 22:08:13] [Rank 0] PRINT: step:375/10000 val_loss:4.9332 train_time:84199ms step_avg:224.53ms +[2025-07-17 22:08:15] [Rank 0] step:381/10000 train_time:84867ms step_avg:222.75ms +[2025-07-17 22:08:15] [Rank 0] step:381/10000 train_time:84867ms step_avg:222.75ms +[2025-07-17 22:08:19] [Rank 0] step:401/10000 train_time:89346ms step_avg:222.81ms +[2025-07-17 22:08:19] [Rank 0] step:401/10000 train_time:89346ms step_avg:222.81ms +[2025-07-17 22:08:24] [Rank 0] step:421/10000 train_time:93824ms step_avg:222.86ms +[2025-07-17 22:08:24] [Rank 0] step:421/10000 train_time:93824ms step_avg:222.86ms +[2025-07-17 22:08:28] [Rank 0] step:441/10000 train_time:98305ms step_avg:222.91ms +[2025-07-17 22:08:28] [Rank 0] step:441/10000 train_time:98305ms step_avg:222.91ms +[2025-07-17 22:08:33] [Rank 0] step:461/10000 train_time:102783ms step_avg:222.96ms +[2025-07-17 22:08:33] [Rank 0] step:461/10000 train_time:102783ms step_avg:222.96ms +[2025-07-17 22:08:37] [Rank 0] step:481/10000 train_time:107270ms step_avg:223.01ms +[2025-07-17 22:08:37] [Rank 0] step:481/10000 train_time:107270ms step_avg:223.01ms +[2025-07-17 22:08:46] [Rank 0] PRINT: step:500/10000 val_loss:4.9070 train_time:112203ms step_avg:224.41ms +[2025-07-17 22:08:46] [Rank 0] PRINT: step:500/10000 val_loss:4.9070 train_time:112203ms step_avg:224.41ms +[2025-07-17 22:08:46] [Rank 0] step:501/10000 train_time:112214ms step_avg:223.98ms +[2025-07-17 22:08:46] [Rank 0] step:501/10000 train_time:112214ms step_avg:223.98ms +[2025-07-17 22:08:51] [Rank 0] step:521/10000 train_time:116326ms step_avg:223.28ms +[2025-07-17 22:08:51] [Rank 0] step:521/10000 train_time:116326ms step_avg:223.28ms +[2025-07-17 22:08:55] [Rank 0] step:541/10000 train_time:120810ms step_avg:223.31ms +[2025-07-17 22:08:55] [Rank 0] step:541/10000 train_time:120810ms step_avg:223.31ms +[2025-07-17 22:09:00] [Rank 0] step:561/10000 train_time:125294ms step_avg:223.34ms +[2025-07-17 22:09:00] [Rank 0] step:561/10000 train_time:125294ms step_avg:223.34ms +[2025-07-17 22:09:04] [Rank 0] step:581/10000 train_time:129783ms step_avg:223.38ms +[2025-07-17 22:09:04] [Rank 0] step:581/10000 train_time:129783ms step_avg:223.38ms +[2025-07-17 22:09:09] [Rank 0] step:601/10000 train_time:134272ms step_avg:223.41ms +[2025-07-17 22:09:09] [Rank 0] step:601/10000 train_time:134272ms step_avg:223.41ms +[2025-07-17 22:09:13] [Rank 0] step:621/10000 train_time:138759ms step_avg:223.44ms +[2025-07-17 22:09:13] [Rank 0] step:621/10000 train_time:138759ms step_avg:223.44ms +[2025-07-17 22:09:18] [Rank 0] PRINT: step:625/10000 val_loss:4.9150 train_time:140335ms step_avg:224.54ms +[2025-07-17 22:09:18] [Rank 0] PRINT: step:625/10000 val_loss:4.9150 train_time:140335ms step_avg:224.54ms +[2025-07-17 22:09:22] [Rank 0] step:641/10000 train_time:143245ms step_avg:223.47ms +[2025-07-17 22:09:22] [Rank 0] step:641/10000 train_time:143245ms step_avg:223.47ms +[2025-07-17 22:09:26] [Rank 0] step:661/10000 train_time:147737ms step_avg:223.51ms +[2025-07-17 22:09:26] [Rank 0] step:661/10000 train_time:147737ms step_avg:223.51ms +[2025-07-17 22:09:31] [Rank 0] step:681/10000 train_time:152226ms step_avg:223.53ms +[2025-07-17 22:09:31] [Rank 0] step:681/10000 train_time:152226ms step_avg:223.53ms +[2025-07-17 22:09:35] [Rank 0] step:701/10000 train_time:156717ms step_avg:223.56ms +[2025-07-17 22:09:35] [Rank 0] step:701/10000 train_time:156717ms step_avg:223.56ms +[2025-07-17 22:09:40] [Rank 0] step:721/10000 train_time:161211ms step_avg:223.59ms +[2025-07-17 22:09:40] [Rank 0] step:721/10000 train_time:161211ms step_avg:223.59ms +[2025-07-17 22:09:44] [Rank 0] step:741/10000 train_time:165704ms step_avg:223.62ms +[2025-07-17 22:09:44] [Rank 0] step:741/10000 train_time:165704ms step_avg:223.62ms +[2025-07-17 22:09:50] [Rank 0] PRINT: step:750/10000 val_loss:4.7204 train_time:168421ms step_avg:224.56ms +[2025-07-17 22:09:50] [Rank 0] PRINT: step:750/10000 val_loss:4.7204 train_time:168421ms step_avg:224.56ms +[2025-07-17 22:09:53] [Rank 0] step:761/10000 train_time:170226ms step_avg:223.69ms +[2025-07-17 22:09:53] [Rank 0] step:761/10000 train_time:170226ms step_avg:223.69ms +[2025-07-17 22:09:57] [Rank 0] step:781/10000 train_time:174750ms step_avg:223.75ms +[2025-07-17 22:09:57] [Rank 0] step:781/10000 train_time:174750ms step_avg:223.75ms +[2025-07-17 22:10:02] [Rank 0] step:801/10000 train_time:179278ms step_avg:223.82ms +[2025-07-17 22:10:02] [Rank 0] step:801/10000 train_time:179278ms step_avg:223.82ms +[2025-07-17 22:10:06] [Rank 0] step:821/10000 train_time:183805ms step_avg:223.88ms +[2025-07-17 22:10:06] [Rank 0] step:821/10000 train_time:183805ms step_avg:223.88ms +[2025-07-17 22:10:11] [Rank 0] step:841/10000 train_time:188332ms step_avg:223.94ms +[2025-07-17 22:10:11] [Rank 0] step:841/10000 train_time:188332ms step_avg:223.94ms +[2025-07-17 22:10:15] [Rank 0] step:861/10000 train_time:192857ms step_avg:223.99ms +[2025-07-17 22:10:15] [Rank 0] step:861/10000 train_time:192857ms step_avg:223.99ms +[2025-07-17 22:10:23] [Rank 0] PRINT: step:875/10000 val_loss:4.6868 train_time:196706ms step_avg:224.81ms +[2025-07-17 22:10:23] [Rank 0] PRINT: step:875/10000 val_loss:4.6868 train_time:196706ms step_avg:224.81ms +[2025-07-17 22:10:24] [Rank 0] step:881/10000 train_time:197379ms step_avg:224.04ms +[2025-07-17 22:10:24] [Rank 0] step:881/10000 train_time:197379ms step_avg:224.04ms +[2025-07-17 22:10:29] [Rank 0] step:901/10000 train_time:201906ms step_avg:224.09ms +[2025-07-17 22:10:29] [Rank 0] step:901/10000 train_time:201906ms step_avg:224.09ms +[2025-07-17 22:10:33] [Rank 0] step:921/10000 train_time:206434ms step_avg:224.14ms +[2025-07-17 22:10:33] [Rank 0] step:921/10000 train_time:206434ms step_avg:224.14ms +[2025-07-17 22:10:38] [Rank 0] step:941/10000 train_time:210962ms step_avg:224.19ms +[2025-07-17 22:10:38] [Rank 0] step:941/10000 train_time:210962ms step_avg:224.19ms +[2025-07-17 22:10:42] [Rank 0] step:961/10000 train_time:215492ms step_avg:224.24ms +[2025-07-17 22:10:42] [Rank 0] step:961/10000 train_time:215492ms step_avg:224.24ms +[2025-07-17 22:10:47] [Rank 0] step:981/10000 train_time:220021ms step_avg:224.28ms +[2025-07-17 22:10:47] [Rank 0] step:981/10000 train_time:220021ms step_avg:224.28ms +[2025-07-17 22:10:55] [Rank 0] PRINT: step:1000/10000 val_loss:4.6792 train_time:225002ms step_avg:225.00ms +[2025-07-17 22:10:55] [Rank 0] PRINT: step:1000/10000 val_loss:4.6792 train_time:225002ms step_avg:225.00ms +[2025-07-17 22:10:56] [Rank 0] step:1001/10000 train_time:225013ms step_avg:224.79ms +[2025-07-17 22:10:56] [Rank 0] step:1001/10000 train_time:225013ms step_avg:224.79ms +[2025-07-17 22:11:00] [Rank 0] step:1021/10000 train_time:229136ms step_avg:224.42ms +[2025-07-17 22:11:00] [Rank 0] step:1021/10000 train_time:229136ms step_avg:224.42ms +[2025-07-17 22:11:05] [Rank 0] step:1041/10000 train_time:233665ms step_avg:224.46ms +[2025-07-17 22:11:05] [Rank 0] step:1041/10000 train_time:233665ms step_avg:224.46ms +[2025-07-17 22:11:09] [Rank 0] step:1061/10000 train_time:238196ms step_avg:224.50ms +[2025-07-17 22:11:09] [Rank 0] step:1061/10000 train_time:238196ms step_avg:224.50ms +[2025-07-17 22:11:14] [Rank 0] step:1081/10000 train_time:242728ms step_avg:224.54ms +[2025-07-17 22:11:14] [Rank 0] step:1081/10000 train_time:242728ms step_avg:224.54ms +[2025-07-17 22:11:18] [Rank 0] step:1101/10000 train_time:247258ms step_avg:224.58ms +[2025-07-17 22:11:18] [Rank 0] step:1101/10000 train_time:247258ms step_avg:224.58ms +[2025-07-17 22:11:23] [Rank 0] step:1121/10000 train_time:251793ms step_avg:224.61ms +[2025-07-17 22:11:23] [Rank 0] step:1121/10000 train_time:251793ms step_avg:224.61ms +[2025-07-17 22:11:28] [Rank 0] PRINT: step:1125/10000 val_loss:4.6608 train_time:253385ms step_avg:225.23ms +[2025-07-17 22:11:28] [Rank 0] PRINT: step:1125/10000 val_loss:4.6608 train_time:253385ms step_avg:225.23ms +[2025-07-17 22:11:32] [Rank 0] step:1141/10000 train_time:256328ms step_avg:224.65ms +[2025-07-17 22:11:32] [Rank 0] step:1141/10000 train_time:256328ms step_avg:224.65ms +[2025-07-17 22:11:36] [Rank 0] step:1161/10000 train_time:260862ms step_avg:224.69ms +[2025-07-17 22:11:36] [Rank 0] step:1161/10000 train_time:260862ms step_avg:224.69ms +[2025-07-17 22:11:41] [Rank 0] step:1181/10000 train_time:265397ms step_avg:224.72ms +[2025-07-17 22:11:41] [Rank 0] step:1181/10000 train_time:265397ms step_avg:224.72ms +[2025-07-17 22:11:45] [Rank 0] step:1201/10000 train_time:269933ms step_avg:224.76ms +[2025-07-17 22:11:45] [Rank 0] step:1201/10000 train_time:269933ms step_avg:224.76ms +[2025-07-17 22:11:50] [Rank 0] step:1221/10000 train_time:274466ms step_avg:224.79ms +[2025-07-17 22:11:50] [Rank 0] step:1221/10000 train_time:274466ms step_avg:224.79ms +[2025-07-17 22:11:54] [Rank 0] step:1241/10000 train_time:279005ms step_avg:224.82ms +[2025-07-17 22:11:54] [Rank 0] step:1241/10000 train_time:279005ms step_avg:224.82ms +[2025-07-17 22:12:01] [Rank 0] PRINT: step:1250/10000 val_loss:4.7362 train_time:281732ms step_avg:225.39ms +[2025-07-17 22:12:01] [Rank 0] PRINT: step:1250/10000 val_loss:4.7362 train_time:281732ms step_avg:225.39ms +[2025-07-17 22:12:03] [Rank 0] step:1261/10000 train_time:283538ms step_avg:224.85ms +[2025-07-17 22:12:03] [Rank 0] step:1261/10000 train_time:283538ms step_avg:224.85ms +[2025-07-17 22:12:08] [Rank 0] step:1281/10000 train_time:288075ms step_avg:224.88ms +[2025-07-17 22:12:08] [Rank 0] step:1281/10000 train_time:288075ms step_avg:224.88ms +[2025-07-17 22:12:13] [Rank 0] step:1301/10000 train_time:292611ms step_avg:224.91ms +[2025-07-17 22:12:13] [Rank 0] step:1301/10000 train_time:292611ms step_avg:224.91ms +[2025-07-17 22:12:17] [Rank 0] step:1321/10000 train_time:297148ms step_avg:224.94ms +[2025-07-17 22:12:17] [Rank 0] step:1321/10000 train_time:297148ms step_avg:224.94ms +[2025-07-17 22:12:22] [Rank 0] step:1341/10000 train_time:301684ms step_avg:224.97ms +[2025-07-17 22:12:22] [Rank 0] step:1341/10000 train_time:301684ms step_avg:224.97ms +[2025-07-17 22:12:26] [Rank 0] step:1361/10000 train_time:306222ms step_avg:225.00ms +[2025-07-17 22:12:26] [Rank 0] step:1361/10000 train_time:306222ms step_avg:225.00ms +[2025-07-17 22:12:34] [Rank 0] PRINT: step:1375/10000 val_loss:4.7220 train_time:310083ms step_avg:225.51ms +[2025-07-17 22:12:34] [Rank 0] PRINT: step:1375/10000 val_loss:4.7220 train_time:310083ms step_avg:225.51ms +[2025-07-17 22:12:35] [Rank 0] step:1381/10000 train_time:310759ms step_avg:225.02ms +[2025-07-17 22:12:35] [Rank 0] step:1381/10000 train_time:310759ms step_avg:225.02ms +[2025-07-17 22:12:40] [Rank 0] step:1401/10000 train_time:315294ms step_avg:225.05ms +[2025-07-17 22:12:40] [Rank 0] step:1401/10000 train_time:315294ms step_avg:225.05ms +[2025-07-17 22:12:44] [Rank 0] step:1421/10000 train_time:319830ms step_avg:225.07ms +[2025-07-17 22:12:44] [Rank 0] step:1421/10000 train_time:319830ms step_avg:225.07ms +[2025-07-17 22:12:49] [Rank 0] step:1441/10000 train_time:324368ms step_avg:225.10ms +[2025-07-17 22:12:49] [Rank 0] step:1441/10000 train_time:324368ms step_avg:225.10ms +[2025-07-17 22:12:53] [Rank 0] step:1461/10000 train_time:328905ms step_avg:225.12ms +[2025-07-17 22:12:53] [Rank 0] step:1461/10000 train_time:328905ms step_avg:225.12ms +[2025-07-17 22:12:58] [Rank 0] step:1481/10000 train_time:333443ms step_avg:225.15ms +[2025-07-17 22:12:58] [Rank 0] step:1481/10000 train_time:333443ms step_avg:225.15ms +[2025-07-17 22:13:07] [Rank 0] PRINT: step:1500/10000 val_loss:4.7872 train_time:338459ms step_avg:225.64ms +[2025-07-17 22:13:07] [Rank 0] PRINT: step:1500/10000 val_loss:4.7872 train_time:338459ms step_avg:225.64ms +[2025-07-17 22:13:07] [Rank 0] step:1501/10000 train_time:338469ms step_avg:225.50ms +[2025-07-17 22:13:07] [Rank 0] step:1501/10000 train_time:338469ms step_avg:225.50ms +[2025-07-17 22:13:11] [Rank 0] step:1521/10000 train_time:342562ms step_avg:225.22ms +[2025-07-17 22:13:11] [Rank 0] step:1521/10000 train_time:342562ms step_avg:225.22ms +[2025-07-17 22:13:16] [Rank 0] step:1541/10000 train_time:347124ms step_avg:225.26ms +[2025-07-17 22:13:16] [Rank 0] step:1541/10000 train_time:347124ms step_avg:225.26ms +[2025-07-17 22:13:21] [Rank 0] step:1561/10000 train_time:351687ms step_avg:225.30ms +[2025-07-17 22:13:21] [Rank 0] step:1561/10000 train_time:351687ms step_avg:225.30ms +[2025-07-17 22:13:25] [Rank 0] step:1581/10000 train_time:356252ms step_avg:225.33ms +[2025-07-17 22:13:25] [Rank 0] step:1581/10000 train_time:356252ms step_avg:225.33ms +[2025-07-17 22:13:30] [Rank 0] step:1601/10000 train_time:360816ms step_avg:225.37ms +[2025-07-17 22:13:30] [Rank 0] step:1601/10000 train_time:360816ms step_avg:225.37ms +[2025-07-17 22:13:34] [Rank 0] step:1621/10000 train_time:365382ms step_avg:225.41ms +[2025-07-17 22:13:34] [Rank 0] step:1621/10000 train_time:365382ms step_avg:225.41ms +[2025-07-17 22:13:40] [Rank 0] PRINT: step:1625/10000 val_loss:4.7534 train_time:366984ms step_avg:225.84ms +[2025-07-17 22:13:40] [Rank 0] PRINT: step:1625/10000 val_loss:4.7534 train_time:366984ms step_avg:225.84ms +[2025-07-17 22:13:43] [Rank 0] step:1641/10000 train_time:369944ms step_avg:225.44ms +[2025-07-17 22:13:43] [Rank 0] step:1641/10000 train_time:369944ms step_avg:225.44ms +[2025-07-17 22:13:48] [Rank 0] step:1661/10000 train_time:374507ms step_avg:225.47ms +[2025-07-17 22:13:48] [Rank 0] step:1661/10000 train_time:374507ms step_avg:225.47ms +[2025-07-17 22:13:52] [Rank 0] step:1681/10000 train_time:379074ms step_avg:225.50ms +[2025-07-17 22:13:52] [Rank 0] step:1681/10000 train_time:379074ms step_avg:225.50ms +[2025-07-17 22:13:57] [Rank 0] step:1701/10000 train_time:383638ms step_avg:225.54ms +[2025-07-17 22:13:57] [Rank 0] step:1701/10000 train_time:383638ms step_avg:225.54ms +[2025-07-17 22:14:02] [Rank 0] step:1721/10000 train_time:388204ms step_avg:225.57ms +[2025-07-17 22:14:02] [Rank 0] step:1721/10000 train_time:388204ms step_avg:225.57ms +[2025-07-17 22:14:06] [Rank 0] step:1741/10000 train_time:392769ms step_avg:225.60ms +[2025-07-17 22:14:06] [Rank 0] step:1741/10000 train_time:392769ms step_avg:225.60ms +[2025-07-17 22:14:13] [Rank 0] PRINT: step:1750/10000 val_loss:4.7304 train_time:395512ms step_avg:226.01ms +[2025-07-17 22:14:13] [Rank 0] PRINT: step:1750/10000 val_loss:4.7304 train_time:395512ms step_avg:226.01ms +[2025-07-17 22:14:15] [Rank 0] step:1761/10000 train_time:397329ms step_avg:225.63ms +[2025-07-17 22:14:15] [Rank 0] step:1761/10000 train_time:397329ms step_avg:225.63ms +[2025-07-17 22:14:20] [Rank 0] step:1781/10000 train_time:401894ms step_avg:225.66ms +[2025-07-17 22:14:20] [Rank 0] step:1781/10000 train_time:401894ms step_avg:225.66ms +[2025-07-17 22:14:24] [Rank 0] step:1801/10000 train_time:406460ms step_avg:225.69ms +[2025-07-17 22:14:24] [Rank 0] step:1801/10000 train_time:406460ms step_avg:225.69ms +[2025-07-17 22:14:29] [Rank 0] step:1821/10000 train_time:411023ms step_avg:225.71ms +[2025-07-17 22:14:29] [Rank 0] step:1821/10000 train_time:411023ms step_avg:225.71ms +[2025-07-17 22:14:34] [Rank 0] step:1841/10000 train_time:415587ms step_avg:225.74ms +[2025-07-17 22:14:34] [Rank 0] step:1841/10000 train_time:415587ms step_avg:225.74ms +[2025-07-17 22:14:38] [Rank 0] step:1861/10000 train_time:420152ms step_avg:225.77ms +[2025-07-17 22:14:38] [Rank 0] step:1861/10000 train_time:420152ms step_avg:225.77ms +[2025-07-17 22:14:45] [Rank 0] PRINT: step:1875/10000 val_loss:4.6477 train_time:424037ms step_avg:226.15ms +[2025-07-17 22:14:45] [Rank 0] PRINT: step:1875/10000 val_loss:4.6477 train_time:424037ms step_avg:226.15ms +[2025-07-17 22:14:47] [Rank 0] step:1881/10000 train_time:424719ms step_avg:225.79ms +[2025-07-17 22:14:47] [Rank 0] step:1881/10000 train_time:424719ms step_avg:225.79ms +[2025-07-17 22:14:51] [Rank 0] step:1901/10000 train_time:429283ms step_avg:225.82ms +[2025-07-17 22:14:51] [Rank 0] step:1901/10000 train_time:429283ms step_avg:225.82ms +[2025-07-17 22:14:56] [Rank 0] step:1921/10000 train_time:433853ms step_avg:225.85ms +[2025-07-17 22:14:56] [Rank 0] step:1921/10000 train_time:433853ms step_avg:225.85ms +[2025-07-17 22:15:00] [Rank 0] step:1941/10000 train_time:438418ms step_avg:225.87ms +[2025-07-17 22:15:00] [Rank 0] step:1941/10000 train_time:438418ms step_avg:225.87ms +[2025-07-17 22:15:05] [Rank 0] step:1961/10000 train_time:442985ms step_avg:225.90ms +[2025-07-17 22:15:05] [Rank 0] step:1961/10000 train_time:442985ms step_avg:225.90ms +[2025-07-17 22:15:10] [Rank 0] step:1981/10000 train_time:447553ms step_avg:225.92ms +[2025-07-17 22:15:10] [Rank 0] step:1981/10000 train_time:447553ms step_avg:225.92ms +[2025-07-17 22:15:18] [Rank 0] PRINT: step:2000/10000 val_loss:4.6862 train_time:452577ms step_avg:226.29ms +[2025-07-17 22:15:18] [Rank 0] PRINT: step:2000/10000 val_loss:4.6862 train_time:452577ms step_avg:226.29ms +[2025-07-17 22:15:19] [Rank 0] step:2001/10000 train_time:452588ms step_avg:226.18ms +[2025-07-17 22:15:19] [Rank 0] step:2001/10000 train_time:452588ms step_avg:226.18ms +[2025-07-17 22:15:23] [Rank 0] step:2021/10000 train_time:456677ms step_avg:225.97ms +[2025-07-17 22:15:23] [Rank 0] step:2021/10000 train_time:456677ms step_avg:225.97ms +[2025-07-17 22:15:28] [Rank 0] step:2041/10000 train_time:461309ms step_avg:226.02ms +[2025-07-17 22:15:28] [Rank 0] step:2041/10000 train_time:461309ms step_avg:226.02ms +[2025-07-17 22:15:32] [Rank 0] step:2061/10000 train_time:465875ms step_avg:226.04ms +[2025-07-17 22:15:32] [Rank 0] step:2061/10000 train_time:465875ms step_avg:226.04ms +[2025-07-17 22:15:37] [Rank 0] step:2081/10000 train_time:470440ms step_avg:226.06ms +[2025-07-17 22:15:37] [Rank 0] step:2081/10000 train_time:470440ms step_avg:226.06ms +[2025-07-17 22:15:42] [Rank 0] step:2101/10000 train_time:475007ms step_avg:226.09ms +[2025-07-17 22:15:42] [Rank 0] step:2101/10000 train_time:475007ms step_avg:226.09ms +[2025-07-17 22:15:46] [Rank 0] step:2121/10000 train_time:479572ms step_avg:226.11ms +[2025-07-17 22:15:46] [Rank 0] step:2121/10000 train_time:479572ms step_avg:226.11ms +[2025-07-17 22:15:52] [Rank 0] PRINT: step:2125/10000 val_loss:4.8671 train_time:481174ms step_avg:226.43ms +[2025-07-17 22:15:52] [Rank 0] PRINT: step:2125/10000 val_loss:4.8671 train_time:481174ms step_avg:226.43ms +[2025-07-17 22:15:55] [Rank 0] step:2141/10000 train_time:484135ms step_avg:226.13ms +[2025-07-17 22:15:55] [Rank 0] step:2141/10000 train_time:484135ms step_avg:226.13ms +[2025-07-17 22:16:00] [Rank 0] step:2161/10000 train_time:488702ms step_avg:226.15ms +[2025-07-17 22:16:00] [Rank 0] step:2161/10000 train_time:488702ms step_avg:226.15ms +[2025-07-17 22:16:04] [Rank 0] step:2181/10000 train_time:493267ms step_avg:226.17ms +[2025-07-17 22:16:04] [Rank 0] step:2181/10000 train_time:493267ms step_avg:226.17ms +[2025-07-17 22:16:09] [Rank 0] step:2201/10000 train_time:497832ms step_avg:226.18ms +[2025-07-17 22:16:09] [Rank 0] step:2201/10000 train_time:497832ms step_avg:226.18ms +[2025-07-17 22:16:13] [Rank 0] step:2221/10000 train_time:502401ms step_avg:226.20ms +[2025-07-17 22:16:13] [Rank 0] step:2221/10000 train_time:502401ms step_avg:226.20ms +[2025-07-17 22:16:18] [Rank 0] step:2241/10000 train_time:507049ms step_avg:226.26ms +[2025-07-17 22:16:18] [Rank 0] step:2241/10000 train_time:507049ms step_avg:226.26ms +[2025-07-17 22:16:24] [Rank 0] PRINT: step:2250/10000 val_loss:4.3325 train_time:509860ms step_avg:226.60ms +[2025-07-17 22:16:24] [Rank 0] PRINT: step:2250/10000 val_loss:4.3325 train_time:509860ms step_avg:226.60ms +[2025-07-17 22:16:27] [Rank 0] step:2261/10000 train_time:511728ms step_avg:226.33ms +[2025-07-17 22:16:27] [Rank 0] step:2261/10000 train_time:511728ms step_avg:226.33ms +[2025-07-17 22:16:32] [Rank 0] step:2281/10000 train_time:516404ms step_avg:226.39ms +[2025-07-17 22:16:32] [Rank 0] step:2281/10000 train_time:516404ms step_avg:226.39ms +[2025-07-17 22:16:36] [Rank 0] step:2301/10000 train_time:521081ms step_avg:226.46ms +[2025-07-17 22:16:36] [Rank 0] step:2301/10000 train_time:521081ms step_avg:226.46ms +[2025-07-17 22:16:41] [Rank 0] step:2321/10000 train_time:525756ms step_avg:226.52ms +[2025-07-17 22:16:41] [Rank 0] step:2321/10000 train_time:525756ms step_avg:226.52ms +[2025-07-17 22:16:46] [Rank 0] step:2341/10000 train_time:530432ms step_avg:226.58ms +[2025-07-17 22:16:46] [Rank 0] step:2341/10000 train_time:530432ms step_avg:226.58ms +[2025-07-17 22:16:50] [Rank 0] step:2361/10000 train_time:535108ms step_avg:226.64ms +[2025-07-17 22:16:50] [Rank 0] step:2361/10000 train_time:535108ms step_avg:226.64ms +[2025-07-17 22:16:58] [Rank 0] PRINT: step:2375/10000 val_loss:4.2234 train_time:539084ms step_avg:226.98ms +[2025-07-17 22:16:58] [Rank 0] PRINT: step:2375/10000 val_loss:4.2234 train_time:539084ms step_avg:226.98ms +[2025-07-17 22:17:00] [Rank 0] step:2381/10000 train_time:539781ms step_avg:226.70ms +[2025-07-17 22:17:00] [Rank 0] step:2381/10000 train_time:539781ms step_avg:226.70ms +[2025-07-17 22:17:04] [Rank 0] step:2401/10000 train_time:544455ms step_avg:226.76ms +[2025-07-17 22:17:04] [Rank 0] step:2401/10000 train_time:544455ms step_avg:226.76ms +[2025-07-17 22:17:09] [Rank 0] step:2421/10000 train_time:549129ms step_avg:226.82ms +[2025-07-17 22:17:09] [Rank 0] step:2421/10000 train_time:549129ms step_avg:226.82ms +[2025-07-17 22:17:14] [Rank 0] step:2441/10000 train_time:553804ms step_avg:226.88ms +[2025-07-17 22:17:14] [Rank 0] step:2441/10000 train_time:553804ms step_avg:226.88ms +[2025-07-17 22:17:18] [Rank 0] step:2461/10000 train_time:558478ms step_avg:226.93ms +[2025-07-17 22:17:18] [Rank 0] step:2461/10000 train_time:558478ms step_avg:226.93ms +[2025-07-17 22:17:23] [Rank 0] step:2481/10000 train_time:563153ms step_avg:226.99ms +[2025-07-17 22:17:23] [Rank 0] step:2481/10000 train_time:563153ms step_avg:226.99ms +[2025-07-17 22:17:32] [Rank 0] PRINT: step:2500/10000 val_loss:4.2501 train_time:568297ms step_avg:227.32ms +[2025-07-17 22:17:32] [Rank 0] PRINT: step:2500/10000 val_loss:4.2501 train_time:568297ms step_avg:227.32ms +[2025-07-17 22:17:32] [Rank 0] step:2501/10000 train_time:568308ms step_avg:227.23ms +[2025-07-17 22:17:32] [Rank 0] step:2501/10000 train_time:568308ms step_avg:227.23ms +[2025-07-17 22:17:37] [Rank 0] step:2521/10000 train_time:572502ms step_avg:227.09ms +[2025-07-17 22:17:37] [Rank 0] step:2521/10000 train_time:572502ms step_avg:227.09ms +[2025-07-17 22:17:42] [Rank 0] step:2541/10000 train_time:577180ms step_avg:227.15ms +[2025-07-17 22:17:42] [Rank 0] step:2541/10000 train_time:577180ms step_avg:227.15ms +[2025-07-17 22:17:46] [Rank 0] step:2561/10000 train_time:581900ms step_avg:227.22ms +[2025-07-17 22:17:46] [Rank 0] step:2561/10000 train_time:581900ms step_avg:227.22ms +[2025-07-17 22:17:51] [Rank 0] step:2581/10000 train_time:586576ms step_avg:227.27ms +[2025-07-17 22:17:51] [Rank 0] step:2581/10000 train_time:586576ms step_avg:227.27ms +[2025-07-17 22:17:56] [Rank 0] step:2601/10000 train_time:591254ms step_avg:227.32ms +[2025-07-17 22:17:56] [Rank 0] step:2601/10000 train_time:591254ms step_avg:227.32ms +[2025-07-17 22:18:00] [Rank 0] step:2621/10000 train_time:595929ms step_avg:227.37ms +[2025-07-17 22:18:00] [Rank 0] step:2621/10000 train_time:595929ms step_avg:227.37ms +[2025-07-17 22:18:06] [Rank 0] PRINT: step:2625/10000 val_loss:4.3267 train_time:597572ms step_avg:227.65ms +[2025-07-17 22:18:06] [Rank 0] PRINT: step:2625/10000 val_loss:4.3267 train_time:597572ms step_avg:227.65ms +[2025-07-17 22:18:10] [Rank 0] step:2641/10000 train_time:600603ms step_avg:227.42ms +[2025-07-17 22:18:10] [Rank 0] step:2641/10000 train_time:600603ms step_avg:227.42ms +[2025-07-17 22:18:14] [Rank 0] step:2661/10000 train_time:605280ms step_avg:227.46ms +[2025-07-17 22:18:14] [Rank 0] step:2661/10000 train_time:605280ms step_avg:227.46ms +[2025-07-17 22:18:19] [Rank 0] step:2681/10000 train_time:609957ms step_avg:227.51ms +[2025-07-17 22:18:19] [Rank 0] step:2681/10000 train_time:609957ms step_avg:227.51ms +[2025-07-17 22:18:24] [Rank 0] step:2701/10000 train_time:614631ms step_avg:227.56ms +[2025-07-17 22:18:24] [Rank 0] step:2701/10000 train_time:614631ms step_avg:227.56ms +[2025-07-17 22:18:28] [Rank 0] step:2721/10000 train_time:619303ms step_avg:227.60ms +[2025-07-17 22:18:28] [Rank 0] step:2721/10000 train_time:619303ms step_avg:227.60ms +[2025-07-17 22:18:33] [Rank 0] step:2741/10000 train_time:623974ms step_avg:227.64ms +[2025-07-17 22:18:33] [Rank 0] step:2741/10000 train_time:623974ms step_avg:227.64ms +[2025-07-17 22:18:40] [Rank 0] PRINT: step:2750/10000 val_loss:4.3620 train_time:626781ms step_avg:227.92ms +[2025-07-17 22:18:40] [Rank 0] PRINT: step:2750/10000 val_loss:4.3620 train_time:626781ms step_avg:227.92ms +[2025-07-17 22:18:42] [Rank 0] step:2761/10000 train_time:628644ms step_avg:227.69ms +[2025-07-17 22:18:42] [Rank 0] step:2761/10000 train_time:628644ms step_avg:227.69ms +[2025-07-17 22:18:47] [Rank 0] step:2781/10000 train_time:633309ms step_avg:227.73ms +[2025-07-17 22:18:47] [Rank 0] step:2781/10000 train_time:633309ms step_avg:227.73ms +[2025-07-17 22:18:52] [Rank 0] step:2801/10000 train_time:637977ms step_avg:227.77ms +[2025-07-17 22:18:52] [Rank 0] step:2801/10000 train_time:637977ms step_avg:227.77ms +[2025-07-17 22:18:56] [Rank 0] step:2821/10000 train_time:642642ms step_avg:227.81ms +[2025-07-17 22:18:56] [Rank 0] step:2821/10000 train_time:642642ms step_avg:227.81ms +[2025-07-17 22:19:01] [Rank 0] step:2841/10000 train_time:647308ms step_avg:227.84ms +[2025-07-17 22:19:01] [Rank 0] step:2841/10000 train_time:647308ms step_avg:227.84ms +[2025-07-17 22:19:06] [Rank 0] step:2861/10000 train_time:651976ms step_avg:227.88ms +[2025-07-17 22:19:06] [Rank 0] step:2861/10000 train_time:651976ms step_avg:227.88ms +[2025-07-17 22:19:13] [Rank 0] PRINT: step:2875/10000 val_loss:4.3842 train_time:655948ms step_avg:228.16ms +[2025-07-17 22:19:13] [Rank 0] PRINT: step:2875/10000 val_loss:4.3842 train_time:655948ms step_avg:228.16ms +[2025-07-17 22:19:15] [Rank 0] step:2881/10000 train_time:656644ms step_avg:227.92ms +[2025-07-17 22:19:15] [Rank 0] step:2881/10000 train_time:656644ms step_avg:227.92ms +[2025-07-17 22:19:19] [Rank 0] step:2901/10000 train_time:661311ms step_avg:227.96ms +[2025-07-17 22:19:19] [Rank 0] step:2901/10000 train_time:661311ms step_avg:227.96ms +[2025-07-17 22:19:24] [Rank 0] step:2921/10000 train_time:665979ms step_avg:228.00ms +[2025-07-17 22:19:24] [Rank 0] step:2921/10000 train_time:665979ms step_avg:228.00ms +[2025-07-17 22:19:29] [Rank 0] step:2941/10000 train_time:670651ms step_avg:228.03ms +[2025-07-17 22:19:29] [Rank 0] step:2941/10000 train_time:670651ms step_avg:228.03ms +[2025-07-17 22:19:33] [Rank 0] step:2961/10000 train_time:675321ms step_avg:228.07ms +[2025-07-17 22:19:33] [Rank 0] step:2961/10000 train_time:675321ms step_avg:228.07ms +[2025-07-17 22:19:38] [Rank 0] step:2981/10000 train_time:680006ms step_avg:228.11ms +[2025-07-17 22:19:38] [Rank 0] step:2981/10000 train_time:680006ms step_avg:228.11ms +[2025-07-17 22:19:47] [Rank 0] PRINT: step:3000/10000 val_loss:4.3316 train_time:685166ms step_avg:228.39ms +[2025-07-17 22:19:47] [Rank 0] PRINT: step:3000/10000 val_loss:4.3316 train_time:685166ms step_avg:228.39ms +[2025-07-17 22:19:47] [Rank 0] step:3001/10000 train_time:685177ms step_avg:228.32ms +[2025-07-17 22:19:47] [Rank 0] step:3001/10000 train_time:685177ms step_avg:228.32ms +[2025-07-17 22:19:52] [Rank 0] step:3021/10000 train_time:689382ms step_avg:228.20ms +[2025-07-17 22:19:52] [Rank 0] step:3021/10000 train_time:689382ms step_avg:228.20ms +[2025-07-17 22:19:57] [Rank 0] step:3041/10000 train_time:694075ms step_avg:228.24ms +[2025-07-17 22:19:57] [Rank 0] step:3041/10000 train_time:694075ms step_avg:228.24ms +[2025-07-17 22:20:02] [Rank 0] step:3061/10000 train_time:698821ms step_avg:228.30ms +[2025-07-17 22:20:02] [Rank 0] step:3061/10000 train_time:698821ms step_avg:228.30ms +[2025-07-17 22:20:06] [Rank 0] step:3081/10000 train_time:703518ms step_avg:228.34ms +[2025-07-17 22:20:06] [Rank 0] step:3081/10000 train_time:703518ms step_avg:228.34ms +[2025-07-17 22:20:11] [Rank 0] step:3101/10000 train_time:708213ms step_avg:228.38ms +[2025-07-17 22:20:11] [Rank 0] step:3101/10000 train_time:708213ms step_avg:228.38ms +[2025-07-17 22:20:16] [Rank 0] step:3121/10000 train_time:712908ms step_avg:228.42ms +[2025-07-17 22:20:16] [Rank 0] step:3121/10000 train_time:712908ms step_avg:228.42ms +[2025-07-17 22:20:21] [Rank 0] PRINT: step:3125/10000 val_loss:4.3294 train_time:714559ms step_avg:228.66ms +[2025-07-17 22:20:21] [Rank 0] PRINT: step:3125/10000 val_loss:4.3294 train_time:714559ms step_avg:228.66ms +[2025-07-17 22:20:25] [Rank 0] step:3141/10000 train_time:717605ms step_avg:228.46ms +[2025-07-17 22:20:25] [Rank 0] step:3141/10000 train_time:717605ms step_avg:228.46ms +[2025-07-17 22:20:29] [Rank 0] step:3161/10000 train_time:722300ms step_avg:228.50ms +[2025-07-17 22:20:29] [Rank 0] step:3161/10000 train_time:722300ms step_avg:228.50ms +[2025-07-17 22:20:34] [Rank 0] step:3181/10000 train_time:726997ms step_avg:228.54ms +[2025-07-17 22:20:34] [Rank 0] step:3181/10000 train_time:726997ms step_avg:228.54ms +[2025-07-17 22:20:39] [Rank 0] step:3201/10000 train_time:731694ms step_avg:228.58ms +[2025-07-17 22:20:39] [Rank 0] step:3201/10000 train_time:731694ms step_avg:228.58ms +[2025-07-17 22:20:43] [Rank 0] step:3221/10000 train_time:736393ms step_avg:228.62ms +[2025-07-17 22:20:43] [Rank 0] step:3221/10000 train_time:736393ms step_avg:228.62ms +[2025-07-17 22:20:48] [Rank 0] step:3241/10000 train_time:741091ms step_avg:228.66ms +[2025-07-17 22:20:48] [Rank 0] step:3241/10000 train_time:741091ms step_avg:228.66ms +[2025-07-17 22:20:55] [Rank 0] PRINT: step:3250/10000 val_loss:4.2346 train_time:743914ms step_avg:228.90ms +[2025-07-17 22:20:55] [Rank 0] PRINT: step:3250/10000 val_loss:4.2346 train_time:743914ms step_avg:228.90ms +[2025-07-17 22:20:57] [Rank 0] step:3261/10000 train_time:745787ms step_avg:228.70ms +[2025-07-17 22:20:57] [Rank 0] step:3261/10000 train_time:745787ms step_avg:228.70ms +[2025-07-17 22:21:02] [Rank 0] step:3281/10000 train_time:750484ms step_avg:228.74ms +[2025-07-17 22:21:02] [Rank 0] step:3281/10000 train_time:750484ms step_avg:228.74ms +[2025-07-17 22:21:07] [Rank 0] step:3301/10000 train_time:755185ms step_avg:228.77ms +[2025-07-17 22:21:07] [Rank 0] step:3301/10000 train_time:755185ms step_avg:228.77ms +[2025-07-17 22:21:11] [Rank 0] step:3321/10000 train_time:759888ms step_avg:228.81ms +[2025-07-17 22:21:11] [Rank 0] step:3321/10000 train_time:759888ms step_avg:228.81ms +[2025-07-17 22:21:16] [Rank 0] step:3341/10000 train_time:764591ms step_avg:228.85ms +[2025-07-17 22:21:16] [Rank 0] step:3341/10000 train_time:764591ms step_avg:228.85ms +[2025-07-17 22:21:21] [Rank 0] step:3361/10000 train_time:769293ms step_avg:228.89ms +[2025-07-17 22:21:21] [Rank 0] step:3361/10000 train_time:769293ms step_avg:228.89ms +[2025-07-17 22:21:29] [Rank 0] PRINT: step:3375/10000 val_loss:4.2859 train_time:773292ms step_avg:229.12ms +[2025-07-17 22:21:29] [Rank 0] PRINT: step:3375/10000 val_loss:4.2859 train_time:773292ms step_avg:229.12ms +[2025-07-17 22:21:30] [Rank 0] step:3381/10000 train_time:773992ms step_avg:228.92ms +[2025-07-17 22:21:30] [Rank 0] step:3381/10000 train_time:773992ms step_avg:228.92ms +[2025-07-17 22:21:35] [Rank 0] step:3401/10000 train_time:778693ms step_avg:228.96ms +[2025-07-17 22:21:35] [Rank 0] step:3401/10000 train_time:778693ms step_avg:228.96ms +[2025-07-17 22:21:39] [Rank 0] step:3421/10000 train_time:783401ms step_avg:229.00ms +[2025-07-17 22:21:39] [Rank 0] step:3421/10000 train_time:783401ms step_avg:229.00ms +[2025-07-17 22:21:44] [Rank 0] step:3441/10000 train_time:788104ms step_avg:229.03ms +[2025-07-17 22:21:44] [Rank 0] step:3441/10000 train_time:788104ms step_avg:229.03ms +[2025-07-17 22:21:49] [Rank 0] step:3461/10000 train_time:792811ms step_avg:229.07ms +[2025-07-17 22:21:49] [Rank 0] step:3461/10000 train_time:792811ms step_avg:229.07ms +[2025-07-17 22:21:54] [Rank 0] step:3481/10000 train_time:797514ms step_avg:229.10ms +[2025-07-17 22:21:54] [Rank 0] step:3481/10000 train_time:797514ms step_avg:229.10ms +[2025-07-17 22:22:02] [Rank 0] PRINT: step:3500/10000 val_loss:4.3410 train_time:802690ms step_avg:229.34ms +[2025-07-17 22:22:02] [Rank 0] PRINT: step:3500/10000 val_loss:4.3410 train_time:802690ms step_avg:229.34ms +[2025-07-17 22:22:02] [Rank 0] step:3501/10000 train_time:802700ms step_avg:229.28ms +[2025-07-17 22:22:02] [Rank 0] step:3501/10000 train_time:802700ms step_avg:229.28ms +[2025-07-17 22:22:07] [Rank 0] step:3521/10000 train_time:806926ms step_avg:229.18ms +[2025-07-17 22:22:07] [Rank 0] step:3521/10000 train_time:806926ms step_avg:229.18ms +[2025-07-17 22:22:12] [Rank 0] step:3541/10000 train_time:811635ms step_avg:229.21ms +[2025-07-17 22:22:12] [Rank 0] step:3541/10000 train_time:811635ms step_avg:229.21ms +[2025-07-17 22:22:17] [Rank 0] step:3561/10000 train_time:816389ms step_avg:229.26ms +[2025-07-17 22:22:17] [Rank 0] step:3561/10000 train_time:816389ms step_avg:229.26ms +[2025-07-17 22:22:21] [Rank 0] step:3581/10000 train_time:821098ms step_avg:229.29ms +[2025-07-17 22:22:21] [Rank 0] step:3581/10000 train_time:821098ms step_avg:229.29ms +[2025-07-17 22:22:26] [Rank 0] step:3601/10000 train_time:825807ms step_avg:229.33ms +[2025-07-17 22:22:26] [Rank 0] step:3601/10000 train_time:825807ms step_avg:229.33ms +[2025-07-17 22:22:31] [Rank 0] step:3621/10000 train_time:830517ms step_avg:229.36ms +[2025-07-17 22:22:31] [Rank 0] step:3621/10000 train_time:830517ms step_avg:229.36ms +[2025-07-17 22:22:36] [Rank 0] PRINT: step:3625/10000 val_loss:4.4750 train_time:832169ms step_avg:229.56ms +[2025-07-17 22:22:36] [Rank 0] PRINT: step:3625/10000 val_loss:4.4750 train_time:832169ms step_avg:229.56ms +[2025-07-17 22:22:40] [Rank 0] step:3641/10000 train_time:835219ms step_avg:229.39ms +[2025-07-17 22:22:40] [Rank 0] step:3641/10000 train_time:835219ms step_avg:229.39ms +[2025-07-17 22:22:45] [Rank 0] step:3661/10000 train_time:839924ms step_avg:229.42ms +[2025-07-17 22:22:45] [Rank 0] step:3661/10000 train_time:839924ms step_avg:229.42ms +[2025-07-17 22:22:49] [Rank 0] step:3681/10000 train_time:844629ms step_avg:229.46ms +[2025-07-17 22:22:49] [Rank 0] step:3681/10000 train_time:844629ms step_avg:229.46ms +[2025-07-17 22:22:54] [Rank 0] step:3701/10000 train_time:849336ms step_avg:229.49ms +[2025-07-17 22:22:54] [Rank 0] step:3701/10000 train_time:849336ms step_avg:229.49ms +[2025-07-17 22:22:59] [Rank 0] step:3721/10000 train_time:854106ms step_avg:229.54ms +[2025-07-17 22:22:59] [Rank 0] step:3721/10000 train_time:854106ms step_avg:229.54ms +[2025-07-17 22:23:04] [Rank 0] step:3741/10000 train_time:858900ms step_avg:229.59ms +[2025-07-17 22:23:04] [Rank 0] step:3741/10000 train_time:858900ms step_avg:229.59ms +[2025-07-17 22:23:10] [Rank 0] PRINT: step:3750/10000 val_loss:4.3906 train_time:861781ms step_avg:229.81ms +[2025-07-17 22:23:10] [Rank 0] PRINT: step:3750/10000 val_loss:4.3906 train_time:861781ms step_avg:229.81ms +[2025-07-17 22:23:13] [Rank 0] step:3761/10000 train_time:863694ms step_avg:229.64ms +[2025-07-17 22:23:13] [Rank 0] step:3761/10000 train_time:863694ms step_avg:229.64ms +[2025-07-17 22:23:18] [Rank 0] step:3781/10000 train_time:868486ms step_avg:229.70ms +[2025-07-17 22:23:18] [Rank 0] step:3781/10000 train_time:868486ms step_avg:229.70ms +[2025-07-17 22:23:22] [Rank 0] step:3801/10000 train_time:873282ms step_avg:229.75ms +[2025-07-17 22:23:22] [Rank 0] step:3801/10000 train_time:873282ms step_avg:229.75ms +[2025-07-17 22:23:27] [Rank 0] step:3821/10000 train_time:878079ms step_avg:229.80ms +[2025-07-17 22:23:27] [Rank 0] step:3821/10000 train_time:878079ms step_avg:229.80ms +[2025-07-17 22:23:32] [Rank 0] step:3841/10000 train_time:882876ms step_avg:229.86ms +[2025-07-17 22:23:32] [Rank 0] step:3841/10000 train_time:882876ms step_avg:229.86ms +[2025-07-17 22:23:37] [Rank 0] step:3861/10000 train_time:887669ms step_avg:229.91ms +[2025-07-17 22:23:37] [Rank 0] step:3861/10000 train_time:887669ms step_avg:229.91ms +[2025-07-17 22:23:44] [Rank 0] PRINT: step:3875/10000 val_loss:4.3547 train_time:891746ms step_avg:230.13ms +[2025-07-17 22:23:44] [Rank 0] PRINT: step:3875/10000 val_loss:4.3547 train_time:891746ms step_avg:230.13ms +[2025-07-17 22:23:46] [Rank 0] step:3881/10000 train_time:892458ms step_avg:229.96ms +[2025-07-17 22:23:46] [Rank 0] step:3881/10000 train_time:892458ms step_avg:229.96ms +[2025-07-17 22:23:51] [Rank 0] step:3901/10000 train_time:897248ms step_avg:230.00ms +[2025-07-17 22:23:51] [Rank 0] step:3901/10000 train_time:897248ms step_avg:230.00ms +[2025-07-17 22:23:55] [Rank 0] step:3921/10000 train_time:902034ms step_avg:230.05ms +[2025-07-17 22:23:55] [Rank 0] step:3921/10000 train_time:902034ms step_avg:230.05ms +[2025-07-17 22:24:00] [Rank 0] step:3941/10000 train_time:906823ms step_avg:230.10ms +[2025-07-17 22:24:00] [Rank 0] step:3941/10000 train_time:906823ms step_avg:230.10ms +[2025-07-17 22:24:05] [Rank 0] step:3961/10000 train_time:911611ms step_avg:230.15ms +[2025-07-17 22:24:05] [Rank 0] step:3961/10000 train_time:911611ms step_avg:230.15ms +[2025-07-17 22:24:10] [Rank 0] step:3981/10000 train_time:916403ms step_avg:230.19ms +[2025-07-17 22:24:10] [Rank 0] step:3981/10000 train_time:916403ms step_avg:230.19ms +[2025-07-17 22:24:19] [Rank 0] PRINT: step:4000/10000 val_loss:4.1461 train_time:921672ms step_avg:230.42ms +[2025-07-17 22:24:19] [Rank 0] PRINT: step:4000/10000 val_loss:4.1461 train_time:921672ms step_avg:230.42ms +[2025-07-17 22:24:19] [Rank 0] step:4001/10000 train_time:921683ms step_avg:230.36ms +[2025-07-17 22:24:19] [Rank 0] step:4001/10000 train_time:921683ms step_avg:230.36ms +[2025-07-17 22:24:24] [Rank 0] step:4021/10000 train_time:925981ms step_avg:230.29ms +[2025-07-17 22:24:24] [Rank 0] step:4021/10000 train_time:925981ms step_avg:230.29ms +[2025-07-17 22:24:29] [Rank 0] step:4041/10000 train_time:930769ms step_avg:230.33ms +[2025-07-17 22:24:29] [Rank 0] step:4041/10000 train_time:930769ms step_avg:230.33ms +[2025-07-17 22:24:34] [Rank 0] step:4061/10000 train_time:935565ms step_avg:230.38ms +[2025-07-17 22:24:34] [Rank 0] step:4061/10000 train_time:935565ms step_avg:230.38ms +[2025-07-17 22:24:38] [Rank 0] step:4081/10000 train_time:940358ms step_avg:230.42ms +[2025-07-17 22:24:38] [Rank 0] step:4081/10000 train_time:940358ms step_avg:230.42ms +[2025-07-17 22:24:43] [Rank 0] step:4101/10000 train_time:945151ms step_avg:230.47ms +[2025-07-17 22:24:43] [Rank 0] step:4101/10000 train_time:945151ms step_avg:230.47ms +[2025-07-17 22:24:48] [Rank 0] step:4121/10000 train_time:949947ms step_avg:230.51ms +[2025-07-17 22:24:48] [Rank 0] step:4121/10000 train_time:949947ms step_avg:230.51ms +[2025-07-17 22:24:53] [Rank 0] PRINT: step:4125/10000 val_loss:4.2777 train_time:951631ms step_avg:230.70ms +[2025-07-17 22:24:53] [Rank 0] PRINT: step:4125/10000 val_loss:4.2777 train_time:951631ms step_avg:230.70ms +[2025-07-17 22:24:57] [Rank 0] step:4141/10000 train_time:954735ms step_avg:230.56ms +[2025-07-17 22:24:57] [Rank 0] step:4141/10000 train_time:954735ms step_avg:230.56ms +[2025-07-17 22:25:02] [Rank 0] step:4161/10000 train_time:959524ms step_avg:230.60ms +[2025-07-17 22:25:02] [Rank 0] step:4161/10000 train_time:959524ms step_avg:230.60ms +[2025-07-17 22:25:07] [Rank 0] step:4181/10000 train_time:964313ms step_avg:230.64ms +[2025-07-17 22:25:07] [Rank 0] step:4181/10000 train_time:964313ms step_avg:230.64ms +[2025-07-17 22:25:12] [Rank 0] step:4201/10000 train_time:969103ms step_avg:230.68ms +[2025-07-17 22:25:12] [Rank 0] step:4201/10000 train_time:969103ms step_avg:230.68ms +[2025-07-17 22:25:16] [Rank 0] step:4221/10000 train_time:973890ms step_avg:230.72ms +[2025-07-17 22:25:16] [Rank 0] step:4221/10000 train_time:973890ms step_avg:230.72ms +[2025-07-17 22:25:21] [Rank 0] step:4241/10000 train_time:978679ms step_avg:230.77ms +[2025-07-17 22:25:21] [Rank 0] step:4241/10000 train_time:978679ms step_avg:230.77ms +[2025-07-17 22:25:28] [Rank 0] PRINT: step:4250/10000 val_loss:4.3533 train_time:981554ms step_avg:230.95ms +[2025-07-17 22:25:28] [Rank 0] PRINT: step:4250/10000 val_loss:4.3533 train_time:981554ms step_avg:230.95ms +[2025-07-17 22:25:31] [Rank 0] step:4261/10000 train_time:983464ms step_avg:230.81ms +[2025-07-17 22:25:31] [Rank 0] step:4261/10000 train_time:983464ms step_avg:230.81ms +[2025-07-17 22:25:35] [Rank 0] step:4281/10000 train_time:988254ms step_avg:230.85ms +[2025-07-17 22:25:35] [Rank 0] step:4281/10000 train_time:988254ms step_avg:230.85ms +[2025-07-17 22:25:40] [Rank 0] step:4301/10000 train_time:993044ms step_avg:230.89ms +[2025-07-17 22:25:40] [Rank 0] step:4301/10000 train_time:993044ms step_avg:230.89ms +[2025-07-17 22:25:45] [Rank 0] step:4321/10000 train_time:997835ms step_avg:230.93ms +[2025-07-17 22:25:45] [Rank 0] step:4321/10000 train_time:997835ms step_avg:230.93ms +[2025-07-17 22:25:50] [Rank 0] step:4341/10000 train_time:1002625ms step_avg:230.97ms +[2025-07-17 22:25:50] [Rank 0] step:4341/10000 train_time:1002625ms step_avg:230.97ms +[2025-07-17 22:25:55] [Rank 0] step:4361/10000 train_time:1007415ms step_avg:231.01ms +[2025-07-17 22:25:55] [Rank 0] step:4361/10000 train_time:1007415ms step_avg:231.01ms +[2025-07-17 22:26:03] [Rank 0] PRINT: step:4375/10000 val_loss:4.4274 train_time:1011492ms step_avg:231.20ms +[2025-07-17 22:26:03] [Rank 0] PRINT: step:4375/10000 val_loss:4.4274 train_time:1011492ms step_avg:231.20ms +[2025-07-17 22:26:04] [Rank 0] step:4381/10000 train_time:1012206ms step_avg:231.04ms +[2025-07-17 22:26:04] [Rank 0] step:4381/10000 train_time:1012206ms step_avg:231.04ms +[2025-07-17 22:26:09] [Rank 0] step:4401/10000 train_time:1017000ms step_avg:231.08ms +[2025-07-17 22:26:09] [Rank 0] step:4401/10000 train_time:1017000ms step_avg:231.08ms +[2025-07-17 22:26:14] [Rank 0] step:4421/10000 train_time:1021795ms step_avg:231.12ms +[2025-07-17 22:26:14] [Rank 0] step:4421/10000 train_time:1021795ms step_avg:231.12ms +[2025-07-17 22:26:18] [Rank 0] step:4441/10000 train_time:1026586ms step_avg:231.16ms +[2025-07-17 22:26:18] [Rank 0] step:4441/10000 train_time:1026586ms step_avg:231.16ms +[2025-07-17 22:26:23] [Rank 0] step:4461/10000 train_time:1031387ms step_avg:231.20ms +[2025-07-17 22:26:23] [Rank 0] step:4461/10000 train_time:1031387ms step_avg:231.20ms +[2025-07-17 22:26:28] [Rank 0] step:4481/10000 train_time:1036196ms step_avg:231.24ms +[2025-07-17 22:26:28] [Rank 0] step:4481/10000 train_time:1036196ms step_avg:231.24ms +[2025-07-17 22:26:37] [Rank 0] PRINT: step:4500/10000 val_loss:4.4003 train_time:1041486ms step_avg:231.44ms +[2025-07-17 22:26:37] [Rank 0] PRINT: step:4500/10000 val_loss:4.4003 train_time:1041486ms step_avg:231.44ms +[2025-07-17 22:26:38] [Rank 0] step:4501/10000 train_time:1041497ms step_avg:231.39ms +[2025-07-17 22:26:38] [Rank 0] step:4501/10000 train_time:1041497ms step_avg:231.39ms +[2025-07-17 22:26:42] [Rank 0] step:4521/10000 train_time:1045811ms step_avg:231.32ms +[2025-07-17 22:26:42] [Rank 0] step:4521/10000 train_time:1045811ms step_avg:231.32ms +[2025-07-17 22:26:47] [Rank 0] step:4541/10000 train_time:1050612ms step_avg:231.36ms +[2025-07-17 22:26:47] [Rank 0] step:4541/10000 train_time:1050612ms step_avg:231.36ms +[2025-07-17 22:26:52] [Rank 0] step:4561/10000 train_time:1055411ms step_avg:231.40ms +[2025-07-17 22:26:52] [Rank 0] step:4561/10000 train_time:1055411ms step_avg:231.40ms +[2025-07-17 22:26:57] [Rank 0] step:4581/10000 train_time:1060215ms step_avg:231.44ms +[2025-07-17 22:26:57] [Rank 0] step:4581/10000 train_time:1060215ms step_avg:231.44ms +[2025-07-17 22:27:02] [Rank 0] step:4601/10000 train_time:1065020ms step_avg:231.48ms +[2025-07-17 22:27:02] [Rank 0] step:4601/10000 train_time:1065020ms step_avg:231.48ms +[2025-07-17 22:27:06] [Rank 0] step:4621/10000 train_time:1069821ms step_avg:231.51ms +[2025-07-17 22:27:06] [Rank 0] step:4621/10000 train_time:1069821ms step_avg:231.51ms +[2025-07-17 22:27:12] [Rank 0] PRINT: step:4625/10000 val_loss:4.4567 train_time:1071511ms step_avg:231.68ms +[2025-07-17 22:27:12] [Rank 0] PRINT: step:4625/10000 val_loss:4.4567 train_time:1071511ms step_avg:231.68ms +[2025-07-17 22:27:16] [Rank 0] step:4641/10000 train_time:1074631ms step_avg:231.55ms +[2025-07-17 22:27:16] [Rank 0] step:4641/10000 train_time:1074631ms step_avg:231.55ms +[2025-07-17 22:27:21] [Rank 0] step:4661/10000 train_time:1079443ms step_avg:231.59ms +[2025-07-17 22:27:21] [Rank 0] step:4661/10000 train_time:1079443ms step_avg:231.59ms +[2025-07-17 22:27:25] [Rank 0] step:4681/10000 train_time:1084248ms step_avg:231.63ms +[2025-07-17 22:27:25] [Rank 0] step:4681/10000 train_time:1084248ms step_avg:231.63ms +[2025-07-17 22:27:30] [Rank 0] step:4701/10000 train_time:1089051ms step_avg:231.66ms +[2025-07-17 22:27:30] [Rank 0] step:4701/10000 train_time:1089051ms step_avg:231.66ms +[2025-07-17 22:27:35] [Rank 0] step:4721/10000 train_time:1093850ms step_avg:231.70ms +[2025-07-17 22:27:35] [Rank 0] step:4721/10000 train_time:1093850ms step_avg:231.70ms +[2025-07-17 22:27:40] [Rank 0] step:4741/10000 train_time:1098652ms step_avg:231.73ms +[2025-07-17 22:27:40] [Rank 0] step:4741/10000 train_time:1098652ms step_avg:231.73ms +[2025-07-17 22:27:46] [Rank 0] PRINT: step:4750/10000 val_loss:4.3116 train_time:1101537ms step_avg:231.90ms +[2025-07-17 22:27:46] [Rank 0] PRINT: step:4750/10000 val_loss:4.3116 train_time:1101537ms step_avg:231.90ms +[2025-07-17 22:27:49] [Rank 0] step:4761/10000 train_time:1103450ms step_avg:231.77ms +[2025-07-17 22:27:49] [Rank 0] step:4761/10000 train_time:1103450ms step_avg:231.77ms +[2025-07-17 22:27:54] [Rank 0] step:4781/10000 train_time:1108249ms step_avg:231.80ms +[2025-07-17 22:27:54] [Rank 0] step:4781/10000 train_time:1108249ms step_avg:231.80ms +[2025-07-17 22:27:59] [Rank 0] step:4801/10000 train_time:1113043ms step_avg:231.84ms +[2025-07-17 22:27:59] [Rank 0] step:4801/10000 train_time:1113043ms step_avg:231.84ms +[2025-07-17 22:28:03] [Rank 0] step:4821/10000 train_time:1117839ms step_avg:231.87ms +[2025-07-17 22:28:03] [Rank 0] step:4821/10000 train_time:1117839ms step_avg:231.87ms +[2025-07-17 22:28:08] [Rank 0] step:4841/10000 train_time:1122642ms step_avg:231.90ms +[2025-07-17 22:28:08] [Rank 0] step:4841/10000 train_time:1122642ms step_avg:231.90ms +[2025-07-17 22:28:13] [Rank 0] step:4861/10000 train_time:1127440ms step_avg:231.94ms +[2025-07-17 22:28:13] [Rank 0] step:4861/10000 train_time:1127440ms step_avg:231.94ms +[2025-07-17 22:28:21] [Rank 0] PRINT: step:4875/10000 val_loss:4.4345 train_time:1131523ms step_avg:232.11ms +[2025-07-17 22:28:21] [Rank 0] PRINT: step:4875/10000 val_loss:4.4345 train_time:1131523ms step_avg:232.11ms +[2025-07-17 22:28:22] [Rank 0] step:4881/10000 train_time:1132241ms step_avg:231.97ms +[2025-07-17 22:28:22] [Rank 0] step:4881/10000 train_time:1132241ms step_avg:231.97ms +[2025-07-17 22:28:27] [Rank 0] step:4901/10000 train_time:1137048ms step_avg:232.00ms +[2025-07-17 22:28:27] [Rank 0] step:4901/10000 train_time:1137048ms step_avg:232.00ms +[2025-07-17 22:28:32] [Rank 0] step:4921/10000 train_time:1141848ms step_avg:232.04ms +[2025-07-17 22:28:32] [Rank 0] step:4921/10000 train_time:1141848ms step_avg:232.04ms +[2025-07-17 22:28:37] [Rank 0] step:4941/10000 train_time:1146657ms step_avg:232.07ms +[2025-07-17 22:28:37] [Rank 0] step:4941/10000 train_time:1146657ms step_avg:232.07ms +[2025-07-17 22:28:42] [Rank 0] step:4961/10000 train_time:1151461ms step_avg:232.10ms +[2025-07-17 22:28:42] [Rank 0] step:4961/10000 train_time:1151461ms step_avg:232.10ms +[2025-07-17 22:28:46] [Rank 0] step:4981/10000 train_time:1156261ms step_avg:232.13ms +[2025-07-17 22:28:46] [Rank 0] step:4981/10000 train_time:1156261ms step_avg:232.13ms +[2025-07-17 22:28:56] [Rank 0] PRINT: step:5000/10000 val_loss:4.4373 train_time:1161544ms step_avg:232.31ms +[2025-07-17 22:28:56] [Rank 0] PRINT: step:5000/10000 val_loss:4.4373 train_time:1161544ms step_avg:232.31ms +[2025-07-17 22:28:56] [Rank 0] step:5001/10000 train_time:1161555ms step_avg:232.26ms +[2025-07-17 22:28:56] [Rank 0] step:5001/10000 train_time:1161555ms step_avg:232.26ms +[2025-07-17 22:29:01] [Rank 0] step:5021/10000 train_time:1165851ms step_avg:232.20ms +[2025-07-17 22:29:01] [Rank 0] step:5021/10000 train_time:1165851ms step_avg:232.20ms +[2025-07-17 22:29:05] [Rank 0] step:5041/10000 train_time:1170644ms step_avg:232.22ms +[2025-07-17 22:29:05] [Rank 0] step:5041/10000 train_time:1170644ms step_avg:232.22ms +[2025-07-17 22:29:10] [Rank 0] step:5061/10000 train_time:1175437ms step_avg:232.25ms +[2025-07-17 22:29:10] [Rank 0] step:5061/10000 train_time:1175437ms step_avg:232.25ms +[2025-07-17 22:29:15] [Rank 0] step:5081/10000 train_time:1180334ms step_avg:232.30ms +[2025-07-17 22:29:15] [Rank 0] step:5081/10000 train_time:1180334ms step_avg:232.30ms +[2025-07-17 22:29:20] [Rank 0] step:5101/10000 train_time:1185046ms step_avg:232.32ms +[2025-07-17 22:29:20] [Rank 0] step:5101/10000 train_time:1185046ms step_avg:232.32ms +[2025-07-17 22:29:25] [Rank 0] step:5121/10000 train_time:1189837ms step_avg:232.34ms +[2025-07-17 22:29:25] [Rank 0] step:5121/10000 train_time:1189837ms step_avg:232.34ms +[2025-07-17 22:29:30] [Rank 0] PRINT: step:5125/10000 val_loss:4.2469 train_time:1191517ms step_avg:232.49ms +[2025-07-17 22:29:30] [Rank 0] PRINT: step:5125/10000 val_loss:4.2469 train_time:1191517ms step_avg:232.49ms +[2025-07-17 22:29:34] [Rank 0] step:5141/10000 train_time:1194621ms step_avg:232.37ms +[2025-07-17 22:29:34] [Rank 0] step:5141/10000 train_time:1194621ms step_avg:232.37ms +[2025-07-17 22:29:39] [Rank 0] step:5161/10000 train_time:1199410ms step_avg:232.40ms +[2025-07-17 22:29:39] [Rank 0] step:5161/10000 train_time:1199410ms step_avg:232.40ms +[2025-07-17 22:29:44] [Rank 0] step:5181/10000 train_time:1204204ms step_avg:232.43ms +[2025-07-17 22:29:44] [Rank 0] step:5181/10000 train_time:1204204ms step_avg:232.43ms +[2025-07-17 22:29:48] [Rank 0] step:5201/10000 train_time:1209029ms step_avg:232.46ms +[2025-07-17 22:29:48] [Rank 0] step:5201/10000 train_time:1209029ms step_avg:232.46ms +[2025-07-17 22:29:53] [Rank 0] step:5221/10000 train_time:1213892ms step_avg:232.50ms +[2025-07-17 22:29:53] [Rank 0] step:5221/10000 train_time:1213892ms step_avg:232.50ms +[2025-07-17 22:29:58] [Rank 0] step:5241/10000 train_time:1218751ms step_avg:232.54ms +[2025-07-17 22:29:58] [Rank 0] step:5241/10000 train_time:1218751ms step_avg:232.54ms +[2025-07-17 22:30:05] [Rank 0] PRINT: step:5250/10000 val_loss:4.4199 train_time:1221667ms step_avg:232.70ms +[2025-07-17 22:30:05] [Rank 0] PRINT: step:5250/10000 val_loss:4.4199 train_time:1221667ms step_avg:232.70ms +[2025-07-17 22:30:08] [Rank 0] step:5261/10000 train_time:1223602ms step_avg:232.58ms +[2025-07-17 22:30:08] [Rank 0] step:5261/10000 train_time:1223602ms step_avg:232.58ms +[2025-07-17 22:30:13] [Rank 0] step:5281/10000 train_time:1228465ms step_avg:232.62ms +[2025-07-17 22:30:13] [Rank 0] step:5281/10000 train_time:1228465ms step_avg:232.62ms +[2025-07-17 22:30:17] [Rank 0] step:5301/10000 train_time:1233322ms step_avg:232.66ms +[2025-07-17 22:30:17] [Rank 0] step:5301/10000 train_time:1233322ms step_avg:232.66ms +[2025-07-17 22:30:22] [Rank 0] step:5321/10000 train_time:1238186ms step_avg:232.70ms +[2025-07-17 22:30:22] [Rank 0] step:5321/10000 train_time:1238186ms step_avg:232.70ms +[2025-07-17 22:30:27] [Rank 0] step:5341/10000 train_time:1243052ms step_avg:232.74ms +[2025-07-17 22:30:27] [Rank 0] step:5341/10000 train_time:1243052ms step_avg:232.74ms +[2025-07-17 22:30:32] [Rank 0] step:5361/10000 train_time:1247913ms step_avg:232.78ms +[2025-07-17 22:30:32] [Rank 0] step:5361/10000 train_time:1247913ms step_avg:232.78ms +[2025-07-17 22:30:40] [Rank 0] PRINT: step:5375/10000 val_loss:4.1224 train_time:1252051ms step_avg:232.94ms +[2025-07-17 22:30:40] [Rank 0] PRINT: step:5375/10000 val_loss:4.1224 train_time:1252051ms step_avg:232.94ms +[2025-07-17 22:30:42] [Rank 0] step:5381/10000 train_time:1252777ms step_avg:232.81ms +[2025-07-17 22:30:42] [Rank 0] step:5381/10000 train_time:1252777ms step_avg:232.81ms +[2025-07-17 22:30:46] [Rank 0] step:5401/10000 train_time:1257639ms step_avg:232.85ms +[2025-07-17 22:30:46] [Rank 0] step:5401/10000 train_time:1257639ms step_avg:232.85ms +[2025-07-17 22:30:51] [Rank 0] step:5421/10000 train_time:1262506ms step_avg:232.89ms +[2025-07-17 22:30:51] [Rank 0] step:5421/10000 train_time:1262506ms step_avg:232.89ms +[2025-07-17 22:30:56] [Rank 0] step:5441/10000 train_time:1267366ms step_avg:232.93ms +[2025-07-17 22:30:56] [Rank 0] step:5441/10000 train_time:1267366ms step_avg:232.93ms +[2025-07-17 22:31:01] [Rank 0] step:5461/10000 train_time:1272237ms step_avg:232.97ms +[2025-07-17 22:31:01] [Rank 0] step:5461/10000 train_time:1272237ms step_avg:232.97ms +[2025-07-17 22:31:06] [Rank 0] step:5481/10000 train_time:1277109ms step_avg:233.01ms +[2025-07-17 22:31:06] [Rank 0] step:5481/10000 train_time:1277109ms step_avg:233.01ms +[2025-07-17 22:31:15] [Rank 0] PRINT: step:5500/10000 val_loss:4.5238 train_time:1282465ms step_avg:233.18ms +[2025-07-17 22:31:15] [Rank 0] PRINT: step:5500/10000 val_loss:4.5238 train_time:1282465ms step_avg:233.18ms +[2025-07-17 22:31:15] [Rank 0] step:5501/10000 train_time:1282475ms step_avg:233.13ms +[2025-07-17 22:31:15] [Rank 0] step:5501/10000 train_time:1282475ms step_avg:233.13ms +[2025-07-17 22:31:20] [Rank 0] step:5521/10000 train_time:1286843ms step_avg:233.08ms +[2025-07-17 22:31:20] [Rank 0] step:5521/10000 train_time:1286843ms step_avg:233.08ms +[2025-07-17 22:31:25] [Rank 0] step:5541/10000 train_time:1291717ms step_avg:233.12ms +[2025-07-17 22:31:25] [Rank 0] step:5541/10000 train_time:1291717ms step_avg:233.12ms +[2025-07-17 22:31:30] [Rank 0] step:5561/10000 train_time:1296593ms step_avg:233.16ms +[2025-07-17 22:31:30] [Rank 0] step:5561/10000 train_time:1296593ms step_avg:233.16ms +[2025-07-17 22:31:35] [Rank 0] step:5581/10000 train_time:1301458ms step_avg:233.19ms +[2025-07-17 22:31:35] [Rank 0] step:5581/10000 train_time:1301458ms step_avg:233.19ms +[2025-07-17 22:31:40] [Rank 0] step:5601/10000 train_time:1306333ms step_avg:233.23ms +[2025-07-17 22:31:40] [Rank 0] step:5601/10000 train_time:1306333ms step_avg:233.23ms +[2025-07-17 22:31:45] [Rank 0] step:5621/10000 train_time:1311211ms step_avg:233.27ms +[2025-07-17 22:31:45] [Rank 0] step:5621/10000 train_time:1311211ms step_avg:233.27ms +[2025-07-17 22:31:50] [Rank 0] PRINT: step:5625/10000 val_loss:4.3613 train_time:1312918ms step_avg:233.41ms +[2025-07-17 22:31:50] [Rank 0] PRINT: step:5625/10000 val_loss:4.3613 train_time:1312918ms step_avg:233.41ms +[2025-07-17 22:31:54] [Rank 0] step:5641/10000 train_time:1316080ms step_avg:233.31ms +[2025-07-17 22:31:54] [Rank 0] step:5641/10000 train_time:1316080ms step_avg:233.31ms +[2025-07-17 22:31:59] [Rank 0] step:5661/10000 train_time:1320953ms step_avg:233.34ms +[2025-07-17 22:31:59] [Rank 0] step:5661/10000 train_time:1320953ms step_avg:233.34ms +[2025-07-17 22:32:04] [Rank 0] step:5681/10000 train_time:1325831ms step_avg:233.38ms +[2025-07-17 22:32:04] [Rank 0] step:5681/10000 train_time:1325831ms step_avg:233.38ms +[2025-07-17 22:32:09] [Rank 0] step:5701/10000 train_time:1330704ms step_avg:233.42ms +[2025-07-17 22:32:09] [Rank 0] step:5701/10000 train_time:1330704ms step_avg:233.42ms +[2025-07-17 22:32:14] [Rank 0] step:5721/10000 train_time:1335576ms step_avg:233.45ms +[2025-07-17 22:32:14] [Rank 0] step:5721/10000 train_time:1335576ms step_avg:233.45ms +[2025-07-17 22:32:19] [Rank 0] step:5741/10000 train_time:1340453ms step_avg:233.49ms +[2025-07-17 22:32:19] [Rank 0] step:5741/10000 train_time:1340453ms step_avg:233.49ms +[2025-07-17 22:32:26] [Rank 0] PRINT: step:5750/10000 val_loss:4.4274 train_time:1343384ms step_avg:233.63ms +[2025-07-17 22:32:26] [Rank 0] PRINT: step:5750/10000 val_loss:4.4274 train_time:1343384ms step_avg:233.63ms +[2025-07-17 22:32:28] [Rank 0] step:5761/10000 train_time:1345331ms step_avg:233.52ms +[2025-07-17 22:32:28] [Rank 0] step:5761/10000 train_time:1345331ms step_avg:233.52ms +[2025-07-17 22:32:33] [Rank 0] step:5781/10000 train_time:1350209ms step_avg:233.56ms +[2025-07-17 22:32:33] [Rank 0] step:5781/10000 train_time:1350209ms step_avg:233.56ms +[2025-07-17 22:32:38] [Rank 0] step:5801/10000 train_time:1355089ms step_avg:233.60ms +[2025-07-17 22:32:38] [Rank 0] step:5801/10000 train_time:1355089ms step_avg:233.60ms +[2025-07-17 22:32:43] [Rank 0] step:5821/10000 train_time:1359968ms step_avg:233.63ms +[2025-07-17 22:32:43] [Rank 0] step:5821/10000 train_time:1359968ms step_avg:233.63ms +[2025-07-17 22:32:48] [Rank 0] step:5841/10000 train_time:1364851ms step_avg:233.67ms +[2025-07-17 22:32:48] [Rank 0] step:5841/10000 train_time:1364851ms step_avg:233.67ms +[2025-07-17 22:32:53] [Rank 0] step:5861/10000 train_time:1369724ms step_avg:233.70ms +[2025-07-17 22:32:53] [Rank 0] step:5861/10000 train_time:1369724ms step_avg:233.70ms +[2025-07-17 22:33:00] [Rank 0] PRINT: step:5875/10000 val_loss:4.4494 train_time:1373872ms step_avg:233.85ms +[2025-07-17 22:33:00] [Rank 0] PRINT: step:5875/10000 val_loss:4.4494 train_time:1373872ms step_avg:233.85ms +[2025-07-17 22:33:02] [Rank 0] step:5881/10000 train_time:1374599ms step_avg:233.74ms +[2025-07-17 22:33:02] [Rank 0] step:5881/10000 train_time:1374599ms step_avg:233.74ms +[2025-07-17 22:33:07] [Rank 0] step:5901/10000 train_time:1379490ms step_avg:233.77ms +[2025-07-17 22:33:07] [Rank 0] step:5901/10000 train_time:1379490ms step_avg:233.77ms +[2025-07-17 22:33:12] [Rank 0] step:5921/10000 train_time:1384366ms step_avg:233.81ms +[2025-07-17 22:33:12] [Rank 0] step:5921/10000 train_time:1384366ms step_avg:233.81ms +[2025-07-17 22:33:16] [Rank 0] step:5941/10000 train_time:1389256ms step_avg:233.84ms +[2025-07-17 22:33:16] [Rank 0] step:5941/10000 train_time:1389256ms step_avg:233.84ms +[2025-07-17 22:33:21] [Rank 0] step:5961/10000 train_time:1394144ms step_avg:233.88ms +[2025-07-17 22:33:21] [Rank 0] step:5961/10000 train_time:1394144ms step_avg:233.88ms +[2025-07-17 22:33:26] [Rank 0] step:5981/10000 train_time:1399031ms step_avg:233.91ms +[2025-07-17 22:33:26] [Rank 0] step:5981/10000 train_time:1399031ms step_avg:233.91ms +[2025-07-17 22:33:36] [Rank 0] PRINT: step:6000/10000 val_loss:4.5902 train_time:1404414ms step_avg:234.07ms +[2025-07-17 22:33:36] [Rank 0] PRINT: step:6000/10000 val_loss:4.5902 train_time:1404414ms step_avg:234.07ms +[2025-07-17 22:33:36] [Rank 0] step:6001/10000 train_time:1404424ms step_avg:234.03ms +[2025-07-17 22:33:36] [Rank 0] step:6001/10000 train_time:1404424ms step_avg:234.03ms +[2025-07-17 22:33:41] [Rank 0] step:6021/10000 train_time:1408809ms step_avg:233.98ms +[2025-07-17 22:33:41] [Rank 0] step:6021/10000 train_time:1408809ms step_avg:233.98ms +[2025-07-17 22:33:46] [Rank 0] step:6041/10000 train_time:1413699ms step_avg:234.02ms +[2025-07-17 22:33:46] [Rank 0] step:6041/10000 train_time:1413699ms step_avg:234.02ms +[2025-07-17 22:33:50] [Rank 0] step:6061/10000 train_time:1418586ms step_avg:234.05ms +[2025-07-17 22:33:50] [Rank 0] step:6061/10000 train_time:1418586ms step_avg:234.05ms +[2025-07-17 22:33:55] [Rank 0] step:6081/10000 train_time:1423474ms step_avg:234.09ms +[2025-07-17 22:33:55] [Rank 0] step:6081/10000 train_time:1423474ms step_avg:234.09ms +[2025-07-17 22:34:00] [Rank 0] step:6101/10000 train_time:1428375ms step_avg:234.12ms +[2025-07-17 22:34:00] [Rank 0] step:6101/10000 train_time:1428375ms step_avg:234.12ms +[2025-07-17 22:34:05] [Rank 0] step:6121/10000 train_time:1433270ms step_avg:234.16ms +[2025-07-17 22:34:05] [Rank 0] step:6121/10000 train_time:1433270ms step_avg:234.16ms +[2025-07-17 22:34:11] [Rank 0] PRINT: step:6125/10000 val_loss:4.3796 train_time:1434983ms step_avg:234.28ms +[2025-07-17 22:34:11] [Rank 0] PRINT: step:6125/10000 val_loss:4.3796 train_time:1434983ms step_avg:234.28ms +[2025-07-17 22:34:15] [Rank 0] step:6141/10000 train_time:1438155ms step_avg:234.19ms +[2025-07-17 22:34:15] [Rank 0] step:6141/10000 train_time:1438155ms step_avg:234.19ms +[2025-07-17 22:34:20] [Rank 0] step:6161/10000 train_time:1443049ms step_avg:234.22ms +[2025-07-17 22:34:20] [Rank 0] step:6161/10000 train_time:1443049ms step_avg:234.22ms +[2025-07-17 22:34:25] [Rank 0] step:6181/10000 train_time:1447943ms step_avg:234.26ms +[2025-07-17 22:34:25] [Rank 0] step:6181/10000 train_time:1447943ms step_avg:234.26ms +[2025-07-17 22:34:29] [Rank 0] step:6201/10000 train_time:1452840ms step_avg:234.29ms +[2025-07-17 22:34:29] [Rank 0] step:6201/10000 train_time:1452840ms step_avg:234.29ms +[2025-07-17 22:34:34] [Rank 0] step:6221/10000 train_time:1457737ms step_avg:234.33ms +[2025-07-17 22:34:34] [Rank 0] step:6221/10000 train_time:1457737ms step_avg:234.33ms +[2025-07-17 22:34:39] [Rank 0] step:6241/10000 train_time:1462632ms step_avg:234.36ms +[2025-07-17 22:34:39] [Rank 0] step:6241/10000 train_time:1462632ms step_avg:234.36ms +[2025-07-17 22:34:46] [Rank 0] PRINT: step:6250/10000 val_loss:4.3668 train_time:1465571ms step_avg:234.49ms +[2025-07-17 22:34:46] [Rank 0] PRINT: step:6250/10000 val_loss:4.3668 train_time:1465571ms step_avg:234.49ms +[2025-07-17 22:34:49] [Rank 0] step:6261/10000 train_time:1467519ms step_avg:234.39ms +[2025-07-17 22:34:49] [Rank 0] step:6261/10000 train_time:1467519ms step_avg:234.39ms +[2025-07-17 22:34:54] [Rank 0] step:6281/10000 train_time:1472415ms step_avg:234.42ms +[2025-07-17 22:34:54] [Rank 0] step:6281/10000 train_time:1472415ms step_avg:234.42ms +[2025-07-17 22:34:59] [Rank 0] step:6301/10000 train_time:1477311ms step_avg:234.46ms +[2025-07-17 22:34:59] [Rank 0] step:6301/10000 train_time:1477311ms step_avg:234.46ms +[2025-07-17 22:35:04] [Rank 0] step:6321/10000 train_time:1482207ms step_avg:234.49ms +[2025-07-17 22:35:04] [Rank 0] step:6321/10000 train_time:1482207ms step_avg:234.49ms +[2025-07-17 22:35:08] [Rank 0] step:6341/10000 train_time:1487109ms step_avg:234.52ms +[2025-07-17 22:35:08] [Rank 0] step:6341/10000 train_time:1487109ms step_avg:234.52ms +[2025-07-17 22:35:13] [Rank 0] step:6361/10000 train_time:1491996ms step_avg:234.55ms +[2025-07-17 22:35:13] [Rank 0] step:6361/10000 train_time:1491996ms step_avg:234.55ms +[2025-07-17 22:35:21] [Rank 0] PRINT: step:6375/10000 val_loss:4.4035 train_time:1496143ms step_avg:234.69ms +[2025-07-17 22:35:21] [Rank 0] PRINT: step:6375/10000 val_loss:4.4035 train_time:1496143ms step_avg:234.69ms +[2025-07-17 22:35:23] [Rank 0] step:6381/10000 train_time:1496871ms step_avg:234.58ms +[2025-07-17 22:35:23] [Rank 0] step:6381/10000 train_time:1496871ms step_avg:234.58ms +[2025-07-17 22:35:28] [Rank 0] step:6401/10000 train_time:1501746ms step_avg:234.61ms +[2025-07-17 22:35:28] [Rank 0] step:6401/10000 train_time:1501746ms step_avg:234.61ms +[2025-07-17 22:35:33] [Rank 0] step:6421/10000 train_time:1506623ms step_avg:234.64ms +[2025-07-17 22:35:33] [Rank 0] step:6421/10000 train_time:1506623ms step_avg:234.64ms +[2025-07-17 22:35:38] [Rank 0] step:6441/10000 train_time:1511507ms step_avg:234.67ms +[2025-07-17 22:35:38] [Rank 0] step:6441/10000 train_time:1511507ms step_avg:234.67ms +[2025-07-17 22:35:42] [Rank 0] step:6461/10000 train_time:1516401ms step_avg:234.70ms +[2025-07-17 22:35:42] [Rank 0] step:6461/10000 train_time:1516401ms step_avg:234.70ms +[2025-07-17 22:35:47] [Rank 0] step:6481/10000 train_time:1521287ms step_avg:234.73ms +[2025-07-17 22:35:47] [Rank 0] step:6481/10000 train_time:1521287ms step_avg:234.73ms +[2025-07-17 22:35:57] [Rank 0] PRINT: step:6500/10000 val_loss:4.1686 train_time:1526659ms step_avg:234.87ms +[2025-07-17 22:35:57] [Rank 0] PRINT: step:6500/10000 val_loss:4.1686 train_time:1526659ms step_avg:234.87ms +[2025-07-17 22:35:57] [Rank 0] step:6501/10000 train_time:1526671ms step_avg:234.84ms +[2025-07-17 22:35:57] [Rank 0] step:6501/10000 train_time:1526671ms step_avg:234.84ms +[2025-07-17 22:36:02] [Rank 0] step:6521/10000 train_time:1531051ms step_avg:234.79ms +[2025-07-17 22:36:02] [Rank 0] step:6521/10000 train_time:1531051ms step_avg:234.79ms +[2025-07-17 22:36:07] [Rank 0] step:6541/10000 train_time:1535935ms step_avg:234.82ms +[2025-07-17 22:36:07] [Rank 0] step:6541/10000 train_time:1535935ms step_avg:234.82ms +[2025-07-17 22:36:12] [Rank 0] step:6561/10000 train_time:1540824ms step_avg:234.85ms +[2025-07-17 22:36:12] [Rank 0] step:6561/10000 train_time:1540824ms step_avg:234.85ms +[2025-07-17 22:36:17] [Rank 0] step:6581/10000 train_time:1545718ms step_avg:234.88ms +[2025-07-17 22:36:17] [Rank 0] step:6581/10000 train_time:1545718ms step_avg:234.88ms +[2025-07-17 22:36:21] [Rank 0] step:6601/10000 train_time:1550613ms step_avg:234.91ms +[2025-07-17 22:36:21] [Rank 0] step:6601/10000 train_time:1550613ms step_avg:234.91ms +[2025-07-17 22:36:26] [Rank 0] step:6621/10000 train_time:1555497ms step_avg:234.93ms +[2025-07-17 22:36:26] [Rank 0] step:6621/10000 train_time:1555497ms step_avg:234.93ms +[2025-07-17 22:36:32] [Rank 0] PRINT: step:6625/10000 val_loss:4.3037 train_time:1557208ms step_avg:235.05ms +[2025-07-17 22:36:32] [Rank 0] PRINT: step:6625/10000 val_loss:4.3037 train_time:1557208ms step_avg:235.05ms +[2025-07-17 22:36:36] [Rank 0] step:6641/10000 train_time:1560371ms step_avg:234.96ms +[2025-07-17 22:36:36] [Rank 0] step:6641/10000 train_time:1560371ms step_avg:234.96ms +[2025-07-17 22:36:41] [Rank 0] step:6661/10000 train_time:1565255ms step_avg:234.99ms +[2025-07-17 22:36:41] [Rank 0] step:6661/10000 train_time:1565255ms step_avg:234.99ms +[2025-07-17 22:36:46] [Rank 0] step:6681/10000 train_time:1570181ms step_avg:235.02ms +[2025-07-17 22:36:46] [Rank 0] step:6681/10000 train_time:1570181ms step_avg:235.02ms +[2025-07-17 22:36:51] [Rank 0] step:6701/10000 train_time:1575130ms step_avg:235.06ms +[2025-07-17 22:36:51] [Rank 0] step:6701/10000 train_time:1575130ms step_avg:235.06ms +[2025-07-17 22:36:56] [Rank 0] step:6721/10000 train_time:1580098ms step_avg:235.10ms +[2025-07-17 22:36:56] [Rank 0] step:6721/10000 train_time:1580098ms step_avg:235.10ms +[2025-07-17 22:37:01] [Rank 0] step:6741/10000 train_time:1585060ms step_avg:235.14ms +[2025-07-17 22:37:01] [Rank 0] step:6741/10000 train_time:1585060ms step_avg:235.14ms +[2025-07-17 22:37:08] [Rank 0] PRINT: step:6750/10000 val_loss:4.4029 train_time:1588036ms step_avg:235.26ms +[2025-07-17 22:37:08] [Rank 0] PRINT: step:6750/10000 val_loss:4.4029 train_time:1588036ms step_avg:235.26ms +[2025-07-17 22:37:10] [Rank 0] step:6761/10000 train_time:1590011ms step_avg:235.17ms +[2025-07-17 22:37:10] [Rank 0] step:6761/10000 train_time:1590011ms step_avg:235.17ms +[2025-07-17 22:37:15] [Rank 0] step:6781/10000 train_time:1594963ms step_avg:235.21ms +[2025-07-17 22:37:15] [Rank 0] step:6781/10000 train_time:1594963ms step_avg:235.21ms +[2025-07-17 22:37:20] [Rank 0] step:6801/10000 train_time:1599925ms step_avg:235.25ms +[2025-07-17 22:37:20] [Rank 0] step:6801/10000 train_time:1599925ms step_avg:235.25ms +[2025-07-17 22:37:25] [Rank 0] step:6821/10000 train_time:1604878ms step_avg:235.28ms +[2025-07-17 22:37:25] [Rank 0] step:6821/10000 train_time:1604878ms step_avg:235.28ms +[2025-07-17 22:37:30] [Rank 0] step:6841/10000 train_time:1609831ms step_avg:235.32ms +[2025-07-17 22:37:30] [Rank 0] step:6841/10000 train_time:1609831ms step_avg:235.32ms +[2025-07-17 22:37:35] [Rank 0] step:6861/10000 train_time:1614778ms step_avg:235.36ms +[2025-07-17 22:37:35] [Rank 0] step:6861/10000 train_time:1614778ms step_avg:235.36ms +[2025-07-17 22:37:43] [Rank 0] PRINT: step:6875/10000 val_loss:4.4011 train_time:1618987ms step_avg:235.49ms +[2025-07-17 22:37:43] [Rank 0] PRINT: step:6875/10000 val_loss:4.4011 train_time:1618987ms step_avg:235.49ms +[2025-07-17 22:37:45] [Rank 0] step:6881/10000 train_time:1619724ms step_avg:235.39ms +[2025-07-17 22:37:45] [Rank 0] step:6881/10000 train_time:1619724ms step_avg:235.39ms +[2025-07-17 22:37:50] [Rank 0] step:6901/10000 train_time:1624669ms step_avg:235.43ms +[2025-07-17 22:37:50] [Rank 0] step:6901/10000 train_time:1624669ms step_avg:235.43ms +[2025-07-17 22:37:55] [Rank 0] step:6921/10000 train_time:1629612ms step_avg:235.46ms +[2025-07-17 22:37:55] [Rank 0] step:6921/10000 train_time:1629612ms step_avg:235.46ms +[2025-07-17 22:38:00] [Rank 0] step:6941/10000 train_time:1634560ms step_avg:235.49ms +[2025-07-17 22:38:00] [Rank 0] step:6941/10000 train_time:1634560ms step_avg:235.49ms +[2025-07-17 22:38:04] [Rank 0] step:6961/10000 train_time:1639505ms step_avg:235.53ms +[2025-07-17 22:38:04] [Rank 0] step:6961/10000 train_time:1639505ms step_avg:235.53ms +[2025-07-17 22:38:09] [Rank 0] step:6981/10000 train_time:1644449ms step_avg:235.56ms +[2025-07-17 22:38:09] [Rank 0] step:6981/10000 train_time:1644449ms step_avg:235.56ms +[2025-07-17 22:38:19] [Rank 0] PRINT: step:7000/10000 val_loss:4.3791 train_time:1649879ms step_avg:235.70ms +[2025-07-17 22:38:19] [Rank 0] PRINT: step:7000/10000 val_loss:4.3791 train_time:1649879ms step_avg:235.70ms +[2025-07-17 22:38:19] [Rank 0] step:7001/10000 train_time:1649890ms step_avg:235.66ms +[2025-07-17 22:38:19] [Rank 0] step:7001/10000 train_time:1649890ms step_avg:235.66ms +[2025-07-17 22:38:24] [Rank 0] step:7021/10000 train_time:1654318ms step_avg:235.62ms +[2025-07-17 22:38:24] [Rank 0] step:7021/10000 train_time:1654318ms step_avg:235.62ms +[2025-07-17 22:38:29] [Rank 0] step:7041/10000 train_time:1659247ms step_avg:235.65ms +[2025-07-17 22:38:29] [Rank 0] step:7041/10000 train_time:1659247ms step_avg:235.65ms +[2025-07-17 22:38:34] [Rank 0] step:7061/10000 train_time:1664176ms step_avg:235.69ms +[2025-07-17 22:38:34] [Rank 0] step:7061/10000 train_time:1664176ms step_avg:235.69ms +[2025-07-17 22:38:39] [Rank 0] step:7081/10000 train_time:1669102ms step_avg:235.72ms +[2025-07-17 22:38:39] [Rank 0] step:7081/10000 train_time:1669102ms step_avg:235.72ms +[2025-07-17 22:38:44] [Rank 0] step:7101/10000 train_time:1674031ms step_avg:235.75ms +[2025-07-17 22:38:44] [Rank 0] step:7101/10000 train_time:1674031ms step_avg:235.75ms +[2025-07-17 22:38:49] [Rank 0] step:7121/10000 train_time:1678968ms step_avg:235.78ms +[2025-07-17 22:38:49] [Rank 0] step:7121/10000 train_time:1678968ms step_avg:235.78ms +[2025-07-17 22:38:54] [Rank 0] PRINT: step:7125/10000 val_loss:4.1922 train_time:1680695ms step_avg:235.89ms +[2025-07-17 22:38:54] [Rank 0] PRINT: step:7125/10000 val_loss:4.1922 train_time:1680695ms step_avg:235.89ms +[2025-07-17 22:38:58] [Rank 0] step:7141/10000 train_time:1683908ms step_avg:235.81ms +[2025-07-17 22:38:58] [Rank 0] step:7141/10000 train_time:1683908ms step_avg:235.81ms +[2025-07-17 22:39:03] [Rank 0] step:7161/10000 train_time:1688844ms step_avg:235.84ms +[2025-07-17 22:39:03] [Rank 0] step:7161/10000 train_time:1688844ms step_avg:235.84ms +[2025-07-17 22:39:08] [Rank 0] step:7181/10000 train_time:1693786ms step_avg:235.87ms +[2025-07-17 22:39:08] [Rank 0] step:7181/10000 train_time:1693786ms step_avg:235.87ms +[2025-07-17 22:39:13] [Rank 0] step:7201/10000 train_time:1698736ms step_avg:235.90ms +[2025-07-17 22:39:13] [Rank 0] step:7201/10000 train_time:1698736ms step_avg:235.90ms +[2025-07-17 22:39:18] [Rank 0] step:7221/10000 train_time:1703680ms step_avg:235.93ms +[2025-07-17 22:39:18] [Rank 0] step:7221/10000 train_time:1703680ms step_avg:235.93ms +[2025-07-17 22:39:23] [Rank 0] step:7241/10000 train_time:1708616ms step_avg:235.96ms +[2025-07-17 22:39:23] [Rank 0] step:7241/10000 train_time:1708616ms step_avg:235.96ms +[2025-07-17 22:39:30] [Rank 0] PRINT: step:7250/10000 val_loss:4.3856 train_time:1711586ms step_avg:236.08ms +[2025-07-17 22:39:30] [Rank 0] PRINT: step:7250/10000 val_loss:4.3856 train_time:1711586ms step_avg:236.08ms +[2025-07-17 22:39:33] [Rank 0] step:7261/10000 train_time:1713552ms step_avg:235.99ms +[2025-07-17 22:39:33] [Rank 0] step:7261/10000 train_time:1713552ms step_avg:235.99ms +[2025-07-17 22:39:38] [Rank 0] step:7281/10000 train_time:1718489ms step_avg:236.02ms +[2025-07-17 22:39:38] [Rank 0] step:7281/10000 train_time:1718489ms step_avg:236.02ms +[2025-07-17 22:39:43] [Rank 0] step:7301/10000 train_time:1723431ms step_avg:236.05ms +[2025-07-17 22:39:43] [Rank 0] step:7301/10000 train_time:1723431ms step_avg:236.05ms +[2025-07-17 22:39:48] [Rank 0] step:7321/10000 train_time:1728383ms step_avg:236.09ms +[2025-07-17 22:39:48] [Rank 0] step:7321/10000 train_time:1728383ms step_avg:236.09ms +[2025-07-17 22:39:52] [Rank 0] step:7341/10000 train_time:1733330ms step_avg:236.12ms +[2025-07-17 22:39:52] [Rank 0] step:7341/10000 train_time:1733330ms step_avg:236.12ms +[2025-07-17 22:39:57] [Rank 0] step:7361/10000 train_time:1738283ms step_avg:236.15ms +[2025-07-17 22:39:57] [Rank 0] step:7361/10000 train_time:1738283ms step_avg:236.15ms +[2025-07-17 22:40:05] [Rank 0] PRINT: step:7375/10000 val_loss:4.3813 train_time:1742496ms step_avg:236.27ms +[2025-07-17 22:40:05] [Rank 0] PRINT: step:7375/10000 val_loss:4.3813 train_time:1742496ms step_avg:236.27ms +[2025-07-17 22:40:07] [Rank 0] step:7381/10000 train_time:1743234ms step_avg:236.18ms +[2025-07-17 22:40:07] [Rank 0] step:7381/10000 train_time:1743234ms step_avg:236.18ms +[2025-07-17 22:40:12] [Rank 0] step:7401/10000 train_time:1748188ms step_avg:236.21ms +[2025-07-17 22:40:12] [Rank 0] step:7401/10000 train_time:1748188ms step_avg:236.21ms +[2025-07-17 22:40:17] [Rank 0] step:7421/10000 train_time:1753144ms step_avg:236.24ms +[2025-07-17 22:40:17] [Rank 0] step:7421/10000 train_time:1753144ms step_avg:236.24ms +[2025-07-17 22:40:22] [Rank 0] step:7441/10000 train_time:1758110ms step_avg:236.27ms +[2025-07-17 22:40:22] [Rank 0] step:7441/10000 train_time:1758110ms step_avg:236.27ms +[2025-07-17 22:40:27] [Rank 0] step:7461/10000 train_time:1763063ms step_avg:236.30ms +[2025-07-17 22:40:27] [Rank 0] step:7461/10000 train_time:1763063ms step_avg:236.30ms +[2025-07-17 22:40:32] [Rank 0] step:7481/10000 train_time:1768025ms step_avg:236.34ms +[2025-07-17 22:40:32] [Rank 0] step:7481/10000 train_time:1768025ms step_avg:236.34ms +[2025-07-17 22:40:41] [Rank 0] PRINT: step:7500/10000 val_loss:4.4760 train_time:1773490ms step_avg:236.47ms +[2025-07-17 22:40:41] [Rank 0] PRINT: step:7500/10000 val_loss:4.4760 train_time:1773490ms step_avg:236.47ms +[2025-07-17 22:40:41] [Rank 0] step:7501/10000 train_time:1773500ms step_avg:236.44ms +[2025-07-17 22:40:41] [Rank 0] step:7501/10000 train_time:1773500ms step_avg:236.44ms +[2025-07-17 22:40:46] [Rank 0] step:7521/10000 train_time:1777956ms step_avg:236.40ms +[2025-07-17 22:40:46] [Rank 0] step:7521/10000 train_time:1777956ms step_avg:236.40ms +[2025-07-17 22:40:51] [Rank 0] step:7541/10000 train_time:1782912ms step_avg:236.43ms +[2025-07-17 22:40:51] [Rank 0] step:7541/10000 train_time:1782912ms step_avg:236.43ms +[2025-07-17 22:40:56] [Rank 0] step:7561/10000 train_time:1787868ms step_avg:236.46ms +[2025-07-17 22:40:56] [Rank 0] step:7561/10000 train_time:1787868ms step_avg:236.46ms +[2025-07-17 22:41:01] [Rank 0] step:7581/10000 train_time:1792837ms step_avg:236.49ms +[2025-07-17 22:41:01] [Rank 0] step:7581/10000 train_time:1792837ms step_avg:236.49ms +[2025-07-17 22:41:06] [Rank 0] step:7601/10000 train_time:1797804ms step_avg:236.52ms +[2025-07-17 22:41:06] [Rank 0] step:7601/10000 train_time:1797804ms step_avg:236.52ms +[2025-07-17 22:41:11] [Rank 0] step:7621/10000 train_time:1803296ms step_avg:236.62ms +[2025-07-17 22:41:11] [Rank 0] step:7621/10000 train_time:1803296ms step_avg:236.62ms +[2025-07-17 22:41:17] [Rank 0] PRINT: step:7625/10000 val_loss:4.4422 train_time:1804530ms step_avg:236.66ms +[2025-07-17 22:41:17] [Rank 0] PRINT: step:7625/10000 val_loss:4.4422 train_time:1804530ms step_avg:236.66ms +[2025-07-17 22:41:21] [Rank 0] step:7641/10000 train_time:1807753ms step_avg:236.59ms +[2025-07-17 22:41:21] [Rank 0] step:7641/10000 train_time:1807753ms step_avg:236.59ms +[2025-07-17 22:41:26] [Rank 0] step:7661/10000 train_time:1812733ms step_avg:236.62ms +[2025-07-17 22:41:26] [Rank 0] step:7661/10000 train_time:1812733ms step_avg:236.62ms +[2025-07-17 22:41:31] [Rank 0] step:7681/10000 train_time:1817724ms step_avg:236.65ms +[2025-07-17 22:41:31] [Rank 0] step:7681/10000 train_time:1817724ms step_avg:236.65ms +[2025-07-17 22:41:36] [Rank 0] step:7701/10000 train_time:1822692ms step_avg:236.68ms +[2025-07-17 22:41:36] [Rank 0] step:7701/10000 train_time:1822692ms step_avg:236.68ms +[2025-07-17 22:41:41] [Rank 0] step:7721/10000 train_time:1827670ms step_avg:236.71ms +[2025-07-17 22:41:41] [Rank 0] step:7721/10000 train_time:1827670ms step_avg:236.71ms +[2025-07-17 22:41:46] [Rank 0] step:7741/10000 train_time:1832642ms step_avg:236.74ms +[2025-07-17 22:41:46] [Rank 0] step:7741/10000 train_time:1832642ms step_avg:236.74ms +[2025-07-17 22:41:53] [Rank 0] PRINT: step:7750/10000 val_loss:4.4026 train_time:1835644ms step_avg:236.86ms +[2025-07-17 22:41:53] [Rank 0] PRINT: step:7750/10000 val_loss:4.4026 train_time:1835644ms step_avg:236.86ms +[2025-07-17 22:41:55] [Rank 0] step:7761/10000 train_time:1837628ms step_avg:236.78ms +[2025-07-17 22:41:55] [Rank 0] step:7761/10000 train_time:1837628ms step_avg:236.78ms +[2025-07-17 22:42:00] [Rank 0] step:7781/10000 train_time:1842605ms step_avg:236.81ms +[2025-07-17 22:42:00] [Rank 0] step:7781/10000 train_time:1842605ms step_avg:236.81ms +[2025-07-17 22:42:05] [Rank 0] step:7801/10000 train_time:1847574ms step_avg:236.84ms +[2025-07-17 22:42:05] [Rank 0] step:7801/10000 train_time:1847574ms step_avg:236.84ms +[2025-07-17 22:42:10] [Rank 0] step:7821/10000 train_time:1852553ms step_avg:236.87ms +[2025-07-17 22:42:10] [Rank 0] step:7821/10000 train_time:1852553ms step_avg:236.87ms +[2025-07-17 22:42:15] [Rank 0] step:7841/10000 train_time:1857526ms step_avg:236.90ms +[2025-07-17 22:42:15] [Rank 0] step:7841/10000 train_time:1857526ms step_avg:236.90ms +[2025-07-17 22:42:20] [Rank 0] step:7861/10000 train_time:1862486ms step_avg:236.93ms +[2025-07-17 22:42:20] [Rank 0] step:7861/10000 train_time:1862486ms step_avg:236.93ms +[2025-07-17 22:42:28] [Rank 0] PRINT: step:7875/10000 val_loss:4.4837 train_time:1866707ms step_avg:237.04ms +[2025-07-17 22:42:28] [Rank 0] PRINT: step:7875/10000 val_loss:4.4837 train_time:1866707ms step_avg:237.04ms +[2025-07-17 22:42:30] [Rank 0] step:7881/10000 train_time:1867447ms step_avg:236.96ms +[2025-07-17 22:42:30] [Rank 0] step:7881/10000 train_time:1867447ms step_avg:236.96ms +[2025-07-17 22:42:35] [Rank 0] step:7901/10000 train_time:1872417ms step_avg:236.98ms +[2025-07-17 22:42:35] [Rank 0] step:7901/10000 train_time:1872417ms step_avg:236.98ms +[2025-07-17 22:42:40] [Rank 0] step:7921/10000 train_time:1877388ms step_avg:237.01ms +[2025-07-17 22:42:40] [Rank 0] step:7921/10000 train_time:1877388ms step_avg:237.01ms +[2025-07-17 22:42:44] [Rank 0] step:7941/10000 train_time:1882366ms step_avg:237.04ms +[2025-07-17 22:42:44] [Rank 0] step:7941/10000 train_time:1882366ms step_avg:237.04ms +[2025-07-17 22:42:49] [Rank 0] step:7961/10000 train_time:1887354ms step_avg:237.08ms +[2025-07-17 22:42:49] [Rank 0] step:7961/10000 train_time:1887354ms step_avg:237.08ms +[2025-07-17 22:42:54] [Rank 0] step:7981/10000 train_time:1892322ms step_avg:237.10ms +[2025-07-17 22:42:54] [Rank 0] step:7981/10000 train_time:1892322ms step_avg:237.10ms +[2025-07-17 22:43:04] [Rank 0] PRINT: step:8000/10000 val_loss:4.4760 train_time:1897805ms step_avg:237.23ms +[2025-07-17 22:43:04] [Rank 0] PRINT: step:8000/10000 val_loss:4.4760 train_time:1897805ms step_avg:237.23ms +[2025-07-17 22:43:04] [Rank 0] step:8001/10000 train_time:1897815ms step_avg:237.20ms +[2025-07-17 22:43:04] [Rank 0] step:8001/10000 train_time:1897815ms step_avg:237.20ms +[2025-07-17 22:43:09] [Rank 0] step:8021/10000 train_time:1902277ms step_avg:237.16ms +[2025-07-17 22:43:09] [Rank 0] step:8021/10000 train_time:1902277ms step_avg:237.16ms +[2025-07-17 22:43:14] [Rank 0] step:8041/10000 train_time:1907267ms step_avg:237.19ms +[2025-07-17 22:43:14] [Rank 0] step:8041/10000 train_time:1907267ms step_avg:237.19ms +[2025-07-17 22:43:19] [Rank 0] step:8061/10000 train_time:1912241ms step_avg:237.22ms +[2025-07-17 22:43:19] [Rank 0] step:8061/10000 train_time:1912241ms step_avg:237.22ms +[2025-07-17 22:43:24] [Rank 0] step:8081/10000 train_time:1917212ms step_avg:237.25ms +[2025-07-17 22:43:24] [Rank 0] step:8081/10000 train_time:1917212ms step_avg:237.25ms +[2025-07-17 22:43:29] [Rank 0] step:8101/10000 train_time:1922182ms step_avg:237.28ms +[2025-07-17 22:43:29] [Rank 0] step:8101/10000 train_time:1922182ms step_avg:237.28ms +[2025-07-17 22:43:34] [Rank 0] step:8121/10000 train_time:1927158ms step_avg:237.31ms +[2025-07-17 22:43:34] [Rank 0] step:8121/10000 train_time:1927158ms step_avg:237.31ms +[2025-07-17 22:43:40] [Rank 0] PRINT: step:8125/10000 val_loss:4.4493 train_time:1928899ms step_avg:237.40ms +[2025-07-17 22:43:40] [Rank 0] PRINT: step:8125/10000 val_loss:4.4493 train_time:1928899ms step_avg:237.40ms +[2025-07-17 22:43:44] [Rank 0] step:8141/10000 train_time:1932126ms step_avg:237.33ms +[2025-07-17 22:43:44] [Rank 0] step:8141/10000 train_time:1932126ms step_avg:237.33ms +[2025-07-17 22:43:49] [Rank 0] step:8161/10000 train_time:1937139ms step_avg:237.37ms +[2025-07-17 22:43:49] [Rank 0] step:8161/10000 train_time:1937139ms step_avg:237.37ms +[2025-07-17 22:43:54] [Rank 0] step:8181/10000 train_time:1942180ms step_avg:237.40ms +[2025-07-17 22:43:54] [Rank 0] step:8181/10000 train_time:1942180ms step_avg:237.40ms +[2025-07-17 22:43:59] [Rank 0] step:8201/10000 train_time:1947197ms step_avg:237.43ms +[2025-07-17 22:43:59] [Rank 0] step:8201/10000 train_time:1947197ms step_avg:237.43ms +[2025-07-17 22:44:04] [Rank 0] step:8221/10000 train_time:1952233ms step_avg:237.47ms +[2025-07-17 22:44:04] [Rank 0] step:8221/10000 train_time:1952233ms step_avg:237.47ms +[2025-07-17 22:44:09] [Rank 0] step:8241/10000 train_time:1957269ms step_avg:237.50ms +[2025-07-17 22:44:09] [Rank 0] step:8241/10000 train_time:1957269ms step_avg:237.50ms +[2025-07-17 22:44:16] [Rank 0] PRINT: step:8250/10000 val_loss:4.4886 train_time:1960297ms step_avg:237.61ms +[2025-07-17 22:44:16] [Rank 0] PRINT: step:8250/10000 val_loss:4.4886 train_time:1960297ms step_avg:237.61ms +[2025-07-17 22:44:19] [Rank 0] step:8261/10000 train_time:1962305ms step_avg:237.54ms +[2025-07-17 22:44:19] [Rank 0] step:8261/10000 train_time:1962305ms step_avg:237.54ms +[2025-07-17 22:44:24] [Rank 0] step:8281/10000 train_time:1967357ms step_avg:237.57ms +[2025-07-17 22:44:24] [Rank 0] step:8281/10000 train_time:1967357ms step_avg:237.57ms +[2025-07-17 22:44:29] [Rank 0] step:8301/10000 train_time:1972376ms step_avg:237.61ms +[2025-07-17 22:44:29] [Rank 0] step:8301/10000 train_time:1972376ms step_avg:237.61ms +[2025-07-17 22:44:34] [Rank 0] step:8321/10000 train_time:1977410ms step_avg:237.64ms +[2025-07-17 22:44:34] [Rank 0] step:8321/10000 train_time:1977410ms step_avg:237.64ms +[2025-07-17 22:44:39] [Rank 0] step:8341/10000 train_time:1982447ms step_avg:237.67ms +[2025-07-17 22:44:39] [Rank 0] step:8341/10000 train_time:1982447ms step_avg:237.67ms +[2025-07-17 22:44:44] [Rank 0] step:8361/10000 train_time:1987473ms step_avg:237.71ms +[2025-07-17 22:44:44] [Rank 0] step:8361/10000 train_time:1987473ms step_avg:237.71ms +[2025-07-17 22:44:52] [Rank 0] PRINT: step:8375/10000 val_loss:4.5733 train_time:1991746ms step_avg:237.82ms +[2025-07-17 22:44:52] [Rank 0] PRINT: step:8375/10000 val_loss:4.5733 train_time:1991746ms step_avg:237.82ms +[2025-07-17 22:44:54] [Rank 0] step:8381/10000 train_time:1992488ms step_avg:237.74ms +[2025-07-17 22:44:54] [Rank 0] step:8381/10000 train_time:1992488ms step_avg:237.74ms +[2025-07-17 22:44:59] [Rank 0] step:8401/10000 train_time:1997496ms step_avg:237.77ms +[2025-07-17 22:44:59] [Rank 0] step:8401/10000 train_time:1997496ms step_avg:237.77ms +[2025-07-17 22:45:04] [Rank 0] step:8421/10000 train_time:2002526ms step_avg:237.80ms +[2025-07-17 22:45:04] [Rank 0] step:8421/10000 train_time:2002526ms step_avg:237.80ms +[2025-07-17 22:45:09] [Rank 0] step:8441/10000 train_time:2007560ms step_avg:237.83ms +[2025-07-17 22:45:09] [Rank 0] step:8441/10000 train_time:2007560ms step_avg:237.83ms +[2025-07-17 22:45:14] [Rank 0] step:8461/10000 train_time:2012603ms step_avg:237.87ms +[2025-07-17 22:45:14] [Rank 0] step:8461/10000 train_time:2012603ms step_avg:237.87ms +[2025-07-17 22:45:19] [Rank 0] step:8481/10000 train_time:2017620ms step_avg:237.90ms +[2025-07-17 22:45:19] [Rank 0] step:8481/10000 train_time:2017620ms step_avg:237.90ms +[2025-07-17 22:45:28] [Rank 0] PRINT: step:8500/10000 val_loss:4.4941 train_time:2023154ms step_avg:238.02ms +[2025-07-17 22:45:28] [Rank 0] PRINT: step:8500/10000 val_loss:4.4941 train_time:2023154ms step_avg:238.02ms +[2025-07-17 22:45:28] [Rank 0] step:8501/10000 train_time:2023165ms step_avg:237.99ms +[2025-07-17 22:45:28] [Rank 0] step:8501/10000 train_time:2023165ms step_avg:237.99ms +[2025-07-17 22:45:33] [Rank 0] step:8521/10000 train_time:2027675ms step_avg:237.96ms +[2025-07-17 22:45:33] [Rank 0] step:8521/10000 train_time:2027675ms step_avg:237.96ms +[2025-07-17 22:45:38] [Rank 0] step:8541/10000 train_time:2032708ms step_avg:237.99ms +[2025-07-17 22:45:38] [Rank 0] step:8541/10000 train_time:2032708ms step_avg:237.99ms +[2025-07-17 22:45:43] [Rank 0] step:8561/10000 train_time:2037741ms step_avg:238.03ms +[2025-07-17 22:45:43] [Rank 0] step:8561/10000 train_time:2037741ms step_avg:238.03ms +[2025-07-17 22:45:48] [Rank 0] step:8581/10000 train_time:2042769ms step_avg:238.06ms +[2025-07-17 22:45:48] [Rank 0] step:8581/10000 train_time:2042769ms step_avg:238.06ms +[2025-07-17 22:45:53] [Rank 0] step:8601/10000 train_time:2047784ms step_avg:238.09ms +[2025-07-17 22:45:53] [Rank 0] step:8601/10000 train_time:2047784ms step_avg:238.09ms +[2025-07-17 22:45:58] [Rank 0] step:8621/10000 train_time:2052804ms step_avg:238.12ms +[2025-07-17 22:45:58] [Rank 0] step:8621/10000 train_time:2052804ms step_avg:238.12ms +[2025-07-17 22:46:04] [Rank 0] PRINT: step:8625/10000 val_loss:4.4648 train_time:2054564ms step_avg:238.21ms +[2025-07-17 22:46:04] [Rank 0] PRINT: step:8625/10000 val_loss:4.4648 train_time:2054564ms step_avg:238.21ms +[2025-07-17 22:46:08] [Rank 0] step:8641/10000 train_time:2057847ms step_avg:238.15ms +[2025-07-17 22:46:08] [Rank 0] step:8641/10000 train_time:2057847ms step_avg:238.15ms +[2025-07-17 22:46:13] [Rank 0] step:8661/10000 train_time:2062870ms step_avg:238.18ms +[2025-07-17 22:46:13] [Rank 0] step:8661/10000 train_time:2062870ms step_avg:238.18ms +[2025-07-17 22:46:18] [Rank 0] step:8681/10000 train_time:2067898ms step_avg:238.21ms +[2025-07-17 22:46:18] [Rank 0] step:8681/10000 train_time:2067898ms step_avg:238.21ms +[2025-07-17 22:46:23] [Rank 0] step:8701/10000 train_time:2072933ms step_avg:238.24ms +[2025-07-17 22:46:23] [Rank 0] step:8701/10000 train_time:2072933ms step_avg:238.24ms +[2025-07-17 22:46:28] [Rank 0] step:8721/10000 train_time:2077960ms step_avg:238.27ms +[2025-07-17 22:46:28] [Rank 0] step:8721/10000 train_time:2077960ms step_avg:238.27ms +[2025-07-17 22:46:33] [Rank 0] step:8741/10000 train_time:2082990ms step_avg:238.30ms +[2025-07-17 22:46:33] [Rank 0] step:8741/10000 train_time:2082990ms step_avg:238.30ms +[2025-07-17 22:46:40] [Rank 0] PRINT: step:8750/10000 val_loss:4.5020 train_time:2086001ms step_avg:238.40ms +[2025-07-17 22:46:40] [Rank 0] PRINT: step:8750/10000 val_loss:4.5020 train_time:2086001ms step_avg:238.40ms +[2025-07-17 22:46:43] [Rank 0] step:8761/10000 train_time:2088004ms step_avg:238.33ms +[2025-07-17 22:46:43] [Rank 0] step:8761/10000 train_time:2088004ms step_avg:238.33ms +[2025-07-17 22:46:48] [Rank 0] step:8781/10000 train_time:2093026ms step_avg:238.36ms +[2025-07-17 22:46:48] [Rank 0] step:8781/10000 train_time:2093026ms step_avg:238.36ms +[2025-07-17 22:46:53] [Rank 0] step:8801/10000 train_time:2098045ms step_avg:238.39ms +[2025-07-17 22:46:53] [Rank 0] step:8801/10000 train_time:2098045ms step_avg:238.39ms +[2025-07-17 22:46:58] [Rank 0] step:8821/10000 train_time:2103077ms step_avg:238.42ms +[2025-07-17 22:46:58] [Rank 0] step:8821/10000 train_time:2103077ms step_avg:238.42ms +[2025-07-17 22:47:03] [Rank 0] step:8841/10000 train_time:2108123ms step_avg:238.45ms +[2025-07-17 22:47:03] [Rank 0] step:8841/10000 train_time:2108123ms step_avg:238.45ms +[2025-07-17 22:47:08] [Rank 0] step:8861/10000 train_time:2113158ms step_avg:238.48ms +[2025-07-17 22:47:08] [Rank 0] step:8861/10000 train_time:2113158ms step_avg:238.48ms +[2025-07-17 22:47:17] [Rank 0] PRINT: step:8875/10000 val_loss:4.4943 train_time:2117433ms step_avg:238.58ms +[2025-07-17 22:47:17] [Rank 0] PRINT: step:8875/10000 val_loss:4.4943 train_time:2117433ms step_avg:238.58ms +[2025-07-17 22:47:18] [Rank 0] step:8881/10000 train_time:2118182ms step_avg:238.51ms +[2025-07-17 22:47:18] [Rank 0] step:8881/10000 train_time:2118182ms step_avg:238.51ms +[2025-07-17 22:47:23] [Rank 0] step:8901/10000 train_time:2123204ms step_avg:238.54ms +[2025-07-17 22:47:23] [Rank 0] step:8901/10000 train_time:2123204ms step_avg:238.54ms +[2025-07-17 22:47:28] [Rank 0] step:8921/10000 train_time:2128231ms step_avg:238.56ms +[2025-07-17 22:47:28] [Rank 0] step:8921/10000 train_time:2128231ms step_avg:238.56ms +[2025-07-17 22:47:33] [Rank 0] step:8941/10000 train_time:2133266ms step_avg:238.59ms +[2025-07-17 22:47:33] [Rank 0] step:8941/10000 train_time:2133266ms step_avg:238.59ms +[2025-07-17 22:47:38] [Rank 0] step:8961/10000 train_time:2138300ms step_avg:238.62ms +[2025-07-17 22:47:38] [Rank 0] step:8961/10000 train_time:2138300ms step_avg:238.62ms +[2025-07-17 22:47:43] [Rank 0] step:8981/10000 train_time:2143334ms step_avg:238.65ms +[2025-07-17 22:47:43] [Rank 0] step:8981/10000 train_time:2143334ms step_avg:238.65ms +[2025-07-17 22:47:52] [Rank 0] PRINT: step:9000/10000 val_loss:4.5097 train_time:2148869ms step_avg:238.76ms +[2025-07-17 22:47:52] [Rank 0] PRINT: step:9000/10000 val_loss:4.5097 train_time:2148869ms step_avg:238.76ms +[2025-07-17 22:47:53] [Rank 0] step:9001/10000 train_time:2148880ms step_avg:238.74ms +[2025-07-17 22:47:53] [Rank 0] step:9001/10000 train_time:2148880ms step_avg:238.74ms +[2025-07-17 22:47:58] [Rank 0] step:9021/10000 train_time:2153385ms step_avg:238.71ms +[2025-07-17 22:47:58] [Rank 0] step:9021/10000 train_time:2153385ms step_avg:238.71ms +[2025-07-17 22:48:03] [Rank 0] step:9041/10000 train_time:2158437ms step_avg:238.74ms +[2025-07-17 22:48:03] [Rank 0] step:9041/10000 train_time:2158437ms step_avg:238.74ms +[2025-07-17 22:48:08] [Rank 0] step:9061/10000 train_time:2163465ms step_avg:238.77ms +[2025-07-17 22:48:08] [Rank 0] step:9061/10000 train_time:2163465ms step_avg:238.77ms +[2025-07-17 22:48:13] [Rank 0] step:9081/10000 train_time:2168518ms step_avg:238.80ms +[2025-07-17 22:48:13] [Rank 0] step:9081/10000 train_time:2168518ms step_avg:238.80ms +[2025-07-17 22:48:18] [Rank 0] step:9101/10000 train_time:2173563ms step_avg:238.83ms +[2025-07-17 22:48:18] [Rank 0] step:9101/10000 train_time:2173563ms step_avg:238.83ms +[2025-07-17 22:48:23] [Rank 0] step:9121/10000 train_time:2178604ms step_avg:238.86ms +[2025-07-17 22:48:23] [Rank 0] step:9121/10000 train_time:2178604ms step_avg:238.86ms +[2025-07-17 22:48:28] [Rank 0] PRINT: step:9125/10000 val_loss:4.4666 train_time:2180366ms step_avg:238.94ms +[2025-07-17 22:48:28] [Rank 0] PRINT: step:9125/10000 val_loss:4.4666 train_time:2180366ms step_avg:238.94ms +[2025-07-17 22:48:32] [Rank 0] step:9141/10000 train_time:2183617ms step_avg:238.88ms +[2025-07-17 22:48:32] [Rank 0] step:9141/10000 train_time:2183617ms step_avg:238.88ms +[2025-07-17 22:48:37] [Rank 0] step:9161/10000 train_time:2188679ms step_avg:238.91ms +[2025-07-17 22:48:37] [Rank 0] step:9161/10000 train_time:2188679ms step_avg:238.91ms +[2025-07-17 22:48:42] [Rank 0] step:9181/10000 train_time:2193715ms step_avg:238.94ms +[2025-07-17 22:48:42] [Rank 0] step:9181/10000 train_time:2193715ms step_avg:238.94ms +[2025-07-17 22:48:48] [Rank 0] step:9201/10000 train_time:2198742ms step_avg:238.97ms +[2025-07-17 22:48:48] [Rank 0] step:9201/10000 train_time:2198742ms step_avg:238.97ms +[2025-07-17 22:48:53] [Rank 0] step:9221/10000 train_time:2203803ms step_avg:239.00ms +[2025-07-17 22:48:53] [Rank 0] step:9221/10000 train_time:2203803ms step_avg:239.00ms +[2025-07-17 22:48:58] [Rank 0] step:9241/10000 train_time:2208847ms step_avg:239.03ms +[2025-07-17 22:48:58] [Rank 0] step:9241/10000 train_time:2208847ms step_avg:239.03ms +[2025-07-17 22:49:04] [Rank 0] PRINT: step:9250/10000 val_loss:4.4735 train_time:2211872ms step_avg:239.12ms +[2025-07-17 22:49:04] [Rank 0] PRINT: step:9250/10000 val_loss:4.4735 train_time:2211872ms step_avg:239.12ms +[2025-07-17 22:49:07] [Rank 0] step:9261/10000 train_time:2213890ms step_avg:239.06ms +[2025-07-17 22:49:07] [Rank 0] step:9261/10000 train_time:2213890ms step_avg:239.06ms +[2025-07-17 22:49:12] [Rank 0] step:9281/10000 train_time:2218908ms step_avg:239.08ms +[2025-07-17 22:49:12] [Rank 0] step:9281/10000 train_time:2218908ms step_avg:239.08ms +[2025-07-17 22:49:17] [Rank 0] step:9301/10000 train_time:2223949ms step_avg:239.11ms +[2025-07-17 22:49:17] [Rank 0] step:9301/10000 train_time:2223949ms step_avg:239.11ms +[2025-07-17 22:49:22] [Rank 0] step:9321/10000 train_time:2229005ms step_avg:239.14ms +[2025-07-17 22:49:22] [Rank 0] step:9321/10000 train_time:2229005ms step_avg:239.14ms +[2025-07-17 22:49:27] [Rank 0] step:9341/10000 train_time:2234049ms step_avg:239.17ms +[2025-07-17 22:49:27] [Rank 0] step:9341/10000 train_time:2234049ms step_avg:239.17ms +[2025-07-17 22:49:32] [Rank 0] step:9361/10000 train_time:2239094ms step_avg:239.19ms +[2025-07-17 22:49:32] [Rank 0] step:9361/10000 train_time:2239094ms step_avg:239.19ms +[2025-07-17 22:49:41] [Rank 0] PRINT: step:9375/10000 val_loss:4.4687 train_time:2243371ms step_avg:239.29ms +[2025-07-17 22:49:41] [Rank 0] PRINT: step:9375/10000 val_loss:4.4687 train_time:2243371ms step_avg:239.29ms +[2025-07-17 22:49:42] [Rank 0] step:9381/10000 train_time:2244121ms step_avg:239.22ms +[2025-07-17 22:49:42] [Rank 0] step:9381/10000 train_time:2244121ms step_avg:239.22ms +[2025-07-17 22:49:47] [Rank 0] step:9401/10000 train_time:2249138ms step_avg:239.24ms +[2025-07-17 22:49:47] [Rank 0] step:9401/10000 train_time:2249138ms step_avg:239.24ms +[2025-07-17 22:49:52] [Rank 0] step:9421/10000 train_time:2254178ms step_avg:239.27ms +[2025-07-17 22:49:52] [Rank 0] step:9421/10000 train_time:2254178ms step_avg:239.27ms +[2025-07-17 22:49:57] [Rank 0] step:9441/10000 train_time:2259214ms step_avg:239.30ms +[2025-07-17 22:49:57] [Rank 0] step:9441/10000 train_time:2259214ms step_avg:239.30ms +[2025-07-17 22:50:02] [Rank 0] step:9461/10000 train_time:2264267ms step_avg:239.33ms +[2025-07-17 22:50:02] [Rank 0] step:9461/10000 train_time:2264267ms step_avg:239.33ms +[2025-07-17 22:50:07] [Rank 0] step:9481/10000 train_time:2269312ms step_avg:239.35ms +[2025-07-17 22:50:07] [Rank 0] step:9481/10000 train_time:2269312ms step_avg:239.35ms +[2025-07-17 22:50:17] [Rank 0] PRINT: step:9500/10000 val_loss:4.4368 train_time:2274890ms step_avg:239.46ms +[2025-07-17 22:50:17] [Rank 0] PRINT: step:9500/10000 val_loss:4.4368 train_time:2274890ms step_avg:239.46ms +[2025-07-17 22:50:17] [Rank 0] step:9501/10000 train_time:2274901ms step_avg:239.44ms +[2025-07-17 22:50:17] [Rank 0] step:9501/10000 train_time:2274901ms step_avg:239.44ms +[2025-07-17 22:50:22] [Rank 0] step:9521/10000 train_time:2279414ms step_avg:239.41ms +[2025-07-17 22:50:22] [Rank 0] step:9521/10000 train_time:2279414ms step_avg:239.41ms +[2025-07-17 22:50:27] [Rank 0] step:9541/10000 train_time:2284470ms step_avg:239.44ms +[2025-07-17 22:50:27] [Rank 0] step:9541/10000 train_time:2284470ms step_avg:239.44ms +[2025-07-17 22:50:32] [Rank 0] step:9561/10000 train_time:2289501ms step_avg:239.46ms +[2025-07-17 22:50:32] [Rank 0] step:9561/10000 train_time:2289501ms step_avg:239.46ms +[2025-07-17 22:50:37] [Rank 0] step:9581/10000 train_time:2294530ms step_avg:239.49ms +[2025-07-17 22:50:37] [Rank 0] step:9581/10000 train_time:2294530ms step_avg:239.49ms +[2025-07-17 22:50:42] [Rank 0] step:9601/10000 train_time:2299562ms step_avg:239.51ms +[2025-07-17 22:50:42] [Rank 0] step:9601/10000 train_time:2299562ms step_avg:239.51ms +[2025-07-17 22:50:47] [Rank 0] step:9621/10000 train_time:2304636ms step_avg:239.54ms +[2025-07-17 22:50:47] [Rank 0] step:9621/10000 train_time:2304636ms step_avg:239.54ms +[2025-07-17 22:50:53] [Rank 0] PRINT: step:9625/10000 val_loss:4.4879 train_time:2306400ms step_avg:239.63ms +[2025-07-17 22:50:53] [Rank 0] PRINT: step:9625/10000 val_loss:4.4879 train_time:2306400ms step_avg:239.63ms +[2025-07-17 22:50:57] [Rank 0] step:9641/10000 train_time:2309696ms step_avg:239.57ms +[2025-07-17 22:50:57] [Rank 0] step:9641/10000 train_time:2309696ms step_avg:239.57ms +[2025-07-17 22:51:02] [Rank 0] step:9661/10000 train_time:2314795ms step_avg:239.60ms +[2025-07-17 22:51:02] [Rank 0] step:9661/10000 train_time:2314795ms step_avg:239.60ms +[2025-07-17 22:51:07] [Rank 0] step:9681/10000 train_time:2319895ms step_avg:239.63ms +[2025-07-17 22:51:07] [Rank 0] step:9681/10000 train_time:2319895ms step_avg:239.63ms +[2025-07-17 22:51:13] [Rank 0] step:9701/10000 train_time:2325004ms step_avg:239.67ms +[2025-07-17 22:51:13] [Rank 0] step:9701/10000 train_time:2325004ms step_avg:239.67ms +[2025-07-17 22:51:18] [Rank 0] step:9721/10000 train_time:2330088ms step_avg:239.70ms +[2025-07-17 22:51:18] [Rank 0] step:9721/10000 train_time:2330088ms step_avg:239.70ms +[2025-07-17 22:51:23] [Rank 0] step:9741/10000 train_time:2335193ms step_avg:239.73ms +[2025-07-17 22:51:23] [Rank 0] step:9741/10000 train_time:2335193ms step_avg:239.73ms +[2025-07-17 22:51:29] [Rank 0] PRINT: step:9750/10000 val_loss:4.5052 train_time:2338246ms step_avg:239.82ms +[2025-07-17 22:51:29] [Rank 0] PRINT: step:9750/10000 val_loss:4.5052 train_time:2338246ms step_avg:239.82ms +[2025-07-17 22:51:32] [Rank 0] step:9761/10000 train_time:2340281ms step_avg:239.76ms +[2025-07-17 22:51:32] [Rank 0] step:9761/10000 train_time:2340281ms step_avg:239.76ms +[2025-07-17 22:51:37] [Rank 0] step:9781/10000 train_time:2345376ms step_avg:239.79ms +[2025-07-17 22:51:37] [Rank 0] step:9781/10000 train_time:2345376ms step_avg:239.79ms +[2025-07-17 22:51:42] [Rank 0] step:9801/10000 train_time:2350449ms step_avg:239.82ms +[2025-07-17 22:51:42] [Rank 0] step:9801/10000 train_time:2350449ms step_avg:239.82ms +[2025-07-17 22:51:48] [Rank 0] step:9821/10000 train_time:2355537ms step_avg:239.85ms +[2025-07-17 22:51:48] [Rank 0] step:9821/10000 train_time:2355537ms step_avg:239.85ms +[2025-07-17 22:51:53] [Rank 0] step:9841/10000 train_time:2360617ms step_avg:239.88ms +[2025-07-17 22:51:53] [Rank 0] step:9841/10000 train_time:2360617ms step_avg:239.88ms +[2025-07-17 22:51:58] [Rank 0] step:9861/10000 train_time:2365697ms step_avg:239.90ms +[2025-07-17 22:51:58] [Rank 0] step:9861/10000 train_time:2365697ms step_avg:239.90ms +[2025-07-17 22:52:06] [Rank 0] PRINT: step:9875/10000 val_loss:4.4946 train_time:2370010ms step_avg:240.00ms +[2025-07-17 22:52:06] [Rank 0] PRINT: step:9875/10000 val_loss:4.4946 train_time:2370010ms step_avg:240.00ms +[2025-07-17 22:52:08] [Rank 0] step:9881/10000 train_time:2370769ms step_avg:239.93ms +[2025-07-17 22:52:08] [Rank 0] step:9881/10000 train_time:2370769ms step_avg:239.93ms +[2025-07-17 22:52:13] [Rank 0] step:9901/10000 train_time:2375856ms step_avg:239.96ms +[2025-07-17 22:52:13] [Rank 0] step:9901/10000 train_time:2375856ms step_avg:239.96ms +[2025-07-17 22:52:18] [Rank 0] step:9921/10000 train_time:2380960ms step_avg:239.99ms +[2025-07-17 22:52:18] [Rank 0] step:9921/10000 train_time:2380960ms step_avg:239.99ms +[2025-07-17 22:52:23] [Rank 0] step:9941/10000 train_time:2386081ms step_avg:240.02ms +[2025-07-17 22:52:23] [Rank 0] step:9941/10000 train_time:2386081ms step_avg:240.02ms +[2025-07-17 22:52:28] [Rank 0] step:9961/10000 train_time:2391191ms step_avg:240.06ms +[2025-07-17 22:52:28] [Rank 0] step:9961/10000 train_time:2391191ms step_avg:240.06ms +[2025-07-17 22:52:33] [Rank 0] step:9981/10000 train_time:2396314ms step_avg:240.09ms +[2025-07-17 22:52:33] [Rank 0] step:9981/10000 train_time:2396314ms step_avg:240.09ms +[2025-07-17 22:52:38] [Rank 0] step:10000/10000 train_time:2401168ms step_avg:240.12ms +[2025-07-17 22:52:38] [Rank 0] step:10000/10000 train_time:2401168ms step_avg:240.12ms +[2025-07-17 22:52:42] [Rank 0] PRINT: step:10000/10000 val_loss:4.4732 train_time:2401939ms step_avg:240.19ms +[2025-07-17 22:52:42] [Rank 0] PRINT: step:10000/10000 val_loss:4.4732 train_time:2401939ms step_avg:240.19ms +[2025-07-17 22:52:42] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 22:52:42 2025 --- +[2025-07-17 22:52:42] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 22:52:42 2025 --- +[2025-07-17 22:52:42] [Rank 0] PRINT: Peak memory allocated: 31040 MiB reserved: 31396 MiB +[2025-07-17 22:52:42] [Rank 0] PRINT: Peak memory allocated: 31040 MiB reserved: 31396 MiB diff --git a/logs_norope/diff_modes/mode_7_param_norope_seed_42/config.json b/logs_norope/diff_modes/mode_7_param_norope_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..303927c4ab61891cbabe7181400bb434e68228a3 --- /dev/null +++ b/logs_norope/diff_modes/mode_7_param_norope_seed_42/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 7, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "835a96b9-a75d-45f9-b562-f0865da6ff43", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_7_param_norope_seed_42/training_log_835a96b9-a75d-45f9-b562-f0865da6ff43.txt b/logs_norope/diff_modes/mode_7_param_norope_seed_42/training_log_835a96b9-a75d-45f9-b562-f0865da6ff43.txt new file mode 100644 index 0000000000000000000000000000000000000000..1f872dc75e501cd7c62dbaef6381a9834ea878fd --- /dev/null +++ b/logs_norope/diff_modes/mode_7_param_norope_seed_42/training_log_835a96b9-a75d-45f9-b562-f0865da6ff43.txt @@ -0,0 +1,2360 @@ +[2025-07-17 15:41:08] [Rank 0] PRINT: --- Script Start: Thu Jul 17 15:41:08 2025 --- +[2025-07-17 15:41:08] [Rank 0] PRINT: --- Script Start: Thu Jul 17 15:41:08 2025 --- +[2025-07-17 15:41:08] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=7, model_parameterization='norope') +[2025-07-17 15:41:08] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=7, model_parameterization='norope') +[2025-07-17 15:41:08] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 15:41:08] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 15:41:08] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 15:41:08] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 15:41:08] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_7_param_norope_seed_42 +[2025-07-17 15:41:08] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_7_param_norope_seed_42 +[2025-07-17 15:41:08] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 15:41:08] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 15:41:08] [Rank 0] PRINT: Constructing model... +[2025-07-17 15:41:08] [Rank 0] PRINT: Constructing model... +[2025-07-17 15:41:11] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 15:41:11] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 15:41:11] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 15:41:11] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 15:41:11] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 15:41:11] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 15:41:11] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 15:41:11] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 15:41:11] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 7 +[2025-07-17 15:41:11] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 7 +[2025-07-17 15:41:11] [Rank 0] PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: 0.001). +[2025-07-17 15:41:11] [Rank 0] PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: 0.001). +[2025-07-17 15:41:11] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 15:41:11] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 15:41:11] [Rank 0] PRINT: Muon optimizer is active with 46 parameters. +[2025-07-17 15:41:11] [Rank 0] PRINT: Muon optimizer is active with 46 parameters. +[2025-07-17 15:41:11] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 15:41:11] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 15:41:11] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 15:41:11] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 15:41:11] [Rank 0] PRINT: Starting warmup... +[2025-07-17 15:41:11] [Rank 0] PRINT: Starting warmup... +[2025-07-17 15:42:17] [Rank 0] PRINT: Warmup complete. +[2025-07-17 15:42:17] [Rank 0] PRINT: Warmup complete. +[2025-07-17 15:42:17] [Rank 0] PRINT: Starting training... +[2025-07-17 15:42:17] [Rank 0] PRINT: Starting training... +[2025-07-17 15:42:27] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 15:42:27] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 15:42:32] [Rank 0] step:21/10000 train_time:4827ms step_avg:229.86ms +[2025-07-17 15:42:32] [Rank 0] step:21/10000 train_time:4827ms step_avg:229.86ms +[2025-07-17 15:42:36] [Rank 0] step:41/10000 train_time:9362ms step_avg:228.34ms +[2025-07-17 15:42:36] [Rank 0] step:41/10000 train_time:9362ms step_avg:228.34ms +[2025-07-17 15:42:41] [Rank 0] step:61/10000 train_time:13901ms step_avg:227.88ms +[2025-07-17 15:42:41] [Rank 0] step:61/10000 train_time:13901ms step_avg:227.88ms +[2025-07-17 15:42:45] [Rank 0] step:81/10000 train_time:18442ms step_avg:227.68ms +[2025-07-17 15:42:45] [Rank 0] step:81/10000 train_time:18442ms step_avg:227.68ms +[2025-07-17 15:42:50] [Rank 0] step:101/10000 train_time:22986ms step_avg:227.59ms +[2025-07-17 15:42:50] [Rank 0] step:101/10000 train_time:22986ms step_avg:227.59ms +[2025-07-17 15:42:55] [Rank 0] step:121/10000 train_time:27533ms step_avg:227.55ms +[2025-07-17 15:42:55] [Rank 0] step:121/10000 train_time:27533ms step_avg:227.55ms +[2025-07-17 15:43:00] [Rank 0] PRINT: step:125/10000 val_loss:5.1415 train_time:28680ms step_avg:229.44ms +[2025-07-17 15:43:00] [Rank 0] PRINT: step:125/10000 val_loss:5.1415 train_time:28680ms step_avg:229.44ms +[2025-07-17 15:43:03] [Rank 0] step:141/10000 train_time:32082ms step_avg:227.54ms +[2025-07-17 15:43:03] [Rank 0] step:141/10000 train_time:32082ms step_avg:227.54ms +[2025-07-17 15:43:08] [Rank 0] step:161/10000 train_time:36631ms step_avg:227.52ms +[2025-07-17 15:43:08] [Rank 0] step:161/10000 train_time:36631ms step_avg:227.52ms +[2025-07-17 15:43:12] [Rank 0] step:181/10000 train_time:41181ms step_avg:227.52ms +[2025-07-17 15:43:12] [Rank 0] step:181/10000 train_time:41181ms step_avg:227.52ms +[2025-07-17 15:43:17] [Rank 0] step:201/10000 train_time:45730ms step_avg:227.51ms +[2025-07-17 15:43:17] [Rank 0] step:201/10000 train_time:45730ms step_avg:227.51ms +[2025-07-17 15:43:21] [Rank 0] step:221/10000 train_time:50279ms step_avg:227.51ms +[2025-07-17 15:43:21] [Rank 0] step:221/10000 train_time:50279ms step_avg:227.51ms +[2025-07-17 15:43:26] [Rank 0] step:241/10000 train_time:54825ms step_avg:227.49ms +[2025-07-17 15:43:26] [Rank 0] step:241/10000 train_time:54825ms step_avg:227.49ms +[2025-07-17 15:43:32] [Rank 0] PRINT: step:250/10000 val_loss:4.6952 train_time:57107ms step_avg:228.43ms +[2025-07-17 15:43:32] [Rank 0] PRINT: step:250/10000 val_loss:4.6952 train_time:57107ms step_avg:228.43ms +[2025-07-17 15:43:35] [Rank 0] step:261/10000 train_time:59377ms step_avg:227.50ms +[2025-07-17 15:43:35] [Rank 0] step:261/10000 train_time:59377ms step_avg:227.50ms +[2025-07-17 15:43:39] [Rank 0] step:281/10000 train_time:63926ms step_avg:227.49ms +[2025-07-17 15:43:39] [Rank 0] step:281/10000 train_time:63926ms step_avg:227.49ms +[2025-07-17 15:43:44] [Rank 0] step:301/10000 train_time:68474ms step_avg:227.49ms +[2025-07-17 15:43:44] [Rank 0] step:301/10000 train_time:68474ms step_avg:227.49ms +[2025-07-17 15:43:49] [Rank 0] step:321/10000 train_time:73018ms step_avg:227.47ms +[2025-07-17 15:43:49] [Rank 0] step:321/10000 train_time:73018ms step_avg:227.47ms +[2025-07-17 15:43:53] [Rank 0] step:341/10000 train_time:77566ms step_avg:227.47ms +[2025-07-17 15:43:53] [Rank 0] step:341/10000 train_time:77566ms step_avg:227.47ms +[2025-07-17 15:43:58] [Rank 0] step:361/10000 train_time:82115ms step_avg:227.47ms +[2025-07-17 15:43:58] [Rank 0] step:361/10000 train_time:82115ms step_avg:227.47ms +[2025-07-17 15:44:05] [Rank 0] PRINT: step:375/10000 val_loss:4.4974 train_time:85527ms step_avg:228.07ms +[2025-07-17 15:44:05] [Rank 0] PRINT: step:375/10000 val_loss:4.4974 train_time:85527ms step_avg:228.07ms +[2025-07-17 15:44:07] [Rank 0] step:381/10000 train_time:86661ms step_avg:227.46ms +[2025-07-17 15:44:07] [Rank 0] step:381/10000 train_time:86661ms step_avg:227.46ms +[2025-07-17 15:44:11] [Rank 0] step:401/10000 train_time:91207ms step_avg:227.45ms +[2025-07-17 15:44:11] [Rank 0] step:401/10000 train_time:91207ms step_avg:227.45ms +[2025-07-17 15:44:16] [Rank 0] step:421/10000 train_time:95755ms step_avg:227.45ms +[2025-07-17 15:44:16] [Rank 0] step:421/10000 train_time:95755ms step_avg:227.45ms +[2025-07-17 15:44:20] [Rank 0] step:441/10000 train_time:100302ms step_avg:227.44ms +[2025-07-17 15:44:20] [Rank 0] step:441/10000 train_time:100302ms step_avg:227.44ms +[2025-07-17 15:44:25] [Rank 0] step:461/10000 train_time:104849ms step_avg:227.44ms +[2025-07-17 15:44:25] [Rank 0] step:461/10000 train_time:104849ms step_avg:227.44ms +[2025-07-17 15:44:29] [Rank 0] step:481/10000 train_time:109399ms step_avg:227.44ms +[2025-07-17 15:44:29] [Rank 0] step:481/10000 train_time:109399ms step_avg:227.44ms +[2025-07-17 15:44:38] [Rank 0] PRINT: step:500/10000 val_loss:4.4003 train_time:113951ms step_avg:227.90ms +[2025-07-17 15:44:38] [Rank 0] PRINT: step:500/10000 val_loss:4.4003 train_time:113951ms step_avg:227.90ms +[2025-07-17 15:44:38] [Rank 0] step:501/10000 train_time:113968ms step_avg:227.48ms +[2025-07-17 15:44:38] [Rank 0] step:501/10000 train_time:113968ms step_avg:227.48ms +[2025-07-17 15:44:43] [Rank 0] step:521/10000 train_time:119010ms step_avg:228.43ms +[2025-07-17 15:44:43] [Rank 0] step:521/10000 train_time:119010ms step_avg:228.43ms +[2025-07-17 15:44:48] [Rank 0] step:541/10000 train_time:123564ms step_avg:228.40ms +[2025-07-17 15:44:48] [Rank 0] step:541/10000 train_time:123564ms step_avg:228.40ms +[2025-07-17 15:44:53] [Rank 0] step:561/10000 train_time:128118ms step_avg:228.37ms +[2025-07-17 15:44:53] [Rank 0] step:561/10000 train_time:128118ms step_avg:228.37ms +[2025-07-17 15:44:57] [Rank 0] step:581/10000 train_time:132671ms step_avg:228.35ms +[2025-07-17 15:44:57] [Rank 0] step:581/10000 train_time:132671ms step_avg:228.35ms +[2025-07-17 15:45:02] [Rank 0] step:601/10000 train_time:137228ms step_avg:228.33ms +[2025-07-17 15:45:02] [Rank 0] step:601/10000 train_time:137228ms step_avg:228.33ms +[2025-07-17 15:45:06] [Rank 0] step:621/10000 train_time:141786ms step_avg:228.32ms +[2025-07-17 15:45:06] [Rank 0] step:621/10000 train_time:141786ms step_avg:228.32ms +[2025-07-17 15:45:12] [Rank 0] PRINT: step:625/10000 val_loss:4.4985 train_time:142936ms step_avg:228.70ms +[2025-07-17 15:45:12] [Rank 0] PRINT: step:625/10000 val_loss:4.4985 train_time:142936ms step_avg:228.70ms +[2025-07-17 15:45:15] [Rank 0] step:641/10000 train_time:146349ms step_avg:228.31ms +[2025-07-17 15:45:15] [Rank 0] step:641/10000 train_time:146349ms step_avg:228.31ms +[2025-07-17 15:45:20] [Rank 0] step:661/10000 train_time:150910ms step_avg:228.31ms +[2025-07-17 15:45:20] [Rank 0] step:661/10000 train_time:150910ms step_avg:228.31ms +[2025-07-17 15:45:24] [Rank 0] step:681/10000 train_time:155470ms step_avg:228.30ms +[2025-07-17 15:45:24] [Rank 0] step:681/10000 train_time:155470ms step_avg:228.30ms +[2025-07-17 15:45:29] [Rank 0] step:701/10000 train_time:160029ms step_avg:228.29ms +[2025-07-17 15:45:29] [Rank 0] step:701/10000 train_time:160029ms step_avg:228.29ms +[2025-07-17 15:45:33] [Rank 0] step:721/10000 train_time:164591ms step_avg:228.28ms +[2025-07-17 15:45:33] [Rank 0] step:721/10000 train_time:164591ms step_avg:228.28ms +[2025-07-17 15:45:38] [Rank 0] step:741/10000 train_time:169150ms step_avg:228.27ms +[2025-07-17 15:45:38] [Rank 0] step:741/10000 train_time:169150ms step_avg:228.27ms +[2025-07-17 15:45:45] [Rank 0] PRINT: step:750/10000 val_loss:4.4855 train_time:171451ms step_avg:228.60ms +[2025-07-17 15:45:45] [Rank 0] PRINT: step:750/10000 val_loss:4.4855 train_time:171451ms step_avg:228.60ms +[2025-07-17 15:45:47] [Rank 0] step:761/10000 train_time:173740ms step_avg:228.30ms +[2025-07-17 15:45:47] [Rank 0] step:761/10000 train_time:173740ms step_avg:228.30ms +[2025-07-17 15:45:52] [Rank 0] step:781/10000 train_time:178333ms step_avg:228.34ms +[2025-07-17 15:45:52] [Rank 0] step:781/10000 train_time:178333ms step_avg:228.34ms +[2025-07-17 15:45:56] [Rank 0] step:801/10000 train_time:182929ms step_avg:228.38ms +[2025-07-17 15:45:56] [Rank 0] step:801/10000 train_time:182929ms step_avg:228.38ms +[2025-07-17 15:46:01] [Rank 0] step:821/10000 train_time:187528ms step_avg:228.41ms +[2025-07-17 15:46:01] [Rank 0] step:821/10000 train_time:187528ms step_avg:228.41ms +[2025-07-17 15:46:05] [Rank 0] step:841/10000 train_time:192124ms step_avg:228.45ms +[2025-07-17 15:46:05] [Rank 0] step:841/10000 train_time:192124ms step_avg:228.45ms +[2025-07-17 15:46:10] [Rank 0] step:861/10000 train_time:196721ms step_avg:228.48ms +[2025-07-17 15:46:10] [Rank 0] step:861/10000 train_time:196721ms step_avg:228.48ms +[2025-07-17 15:46:18] [Rank 0] PRINT: step:875/10000 val_loss:4.6586 train_time:200172ms step_avg:228.77ms +[2025-07-17 15:46:18] [Rank 0] PRINT: step:875/10000 val_loss:4.6586 train_time:200172ms step_avg:228.77ms +[2025-07-17 15:46:19] [Rank 0] step:881/10000 train_time:201317ms step_avg:228.51ms +[2025-07-17 15:46:19] [Rank 0] step:881/10000 train_time:201317ms step_avg:228.51ms +[2025-07-17 15:46:24] [Rank 0] step:901/10000 train_time:205909ms step_avg:228.53ms +[2025-07-17 15:46:24] [Rank 0] step:901/10000 train_time:205909ms step_avg:228.53ms +[2025-07-17 15:46:28] [Rank 0] step:921/10000 train_time:210515ms step_avg:228.57ms +[2025-07-17 15:46:28] [Rank 0] step:921/10000 train_time:210515ms step_avg:228.57ms +[2025-07-17 15:46:33] [Rank 0] step:941/10000 train_time:215109ms step_avg:228.60ms +[2025-07-17 15:46:33] [Rank 0] step:941/10000 train_time:215109ms step_avg:228.60ms +[2025-07-17 15:46:38] [Rank 0] step:961/10000 train_time:219708ms step_avg:228.62ms +[2025-07-17 15:46:38] [Rank 0] step:961/10000 train_time:219708ms step_avg:228.62ms +[2025-07-17 15:46:42] [Rank 0] step:981/10000 train_time:224308ms step_avg:228.65ms +[2025-07-17 15:46:42] [Rank 0] step:981/10000 train_time:224308ms step_avg:228.65ms +[2025-07-17 15:46:51] [Rank 0] PRINT: step:1000/10000 val_loss:4.6734 train_time:228913ms step_avg:228.91ms +[2025-07-17 15:46:51] [Rank 0] PRINT: step:1000/10000 val_loss:4.6734 train_time:228913ms step_avg:228.91ms +[2025-07-17 15:46:51] [Rank 0] step:1001/10000 train_time:228930ms step_avg:228.70ms +[2025-07-17 15:46:51] [Rank 0] step:1001/10000 train_time:228930ms step_avg:228.70ms +[2025-07-17 15:46:56] [Rank 0] step:1021/10000 train_time:233603ms step_avg:228.80ms +[2025-07-17 15:46:56] [Rank 0] step:1021/10000 train_time:233603ms step_avg:228.80ms +[2025-07-17 15:47:01] [Rank 0] step:1041/10000 train_time:238208ms step_avg:228.83ms +[2025-07-17 15:47:01] [Rank 0] step:1041/10000 train_time:238208ms step_avg:228.83ms +[2025-07-17 15:47:05] [Rank 0] step:1061/10000 train_time:242814ms step_avg:228.85ms +[2025-07-17 15:47:05] [Rank 0] step:1061/10000 train_time:242814ms step_avg:228.85ms +[2025-07-17 15:47:10] [Rank 0] step:1081/10000 train_time:247420ms step_avg:228.88ms +[2025-07-17 15:47:10] [Rank 0] step:1081/10000 train_time:247420ms step_avg:228.88ms +[2025-07-17 15:47:14] [Rank 0] step:1101/10000 train_time:252028ms step_avg:228.91ms +[2025-07-17 15:47:14] [Rank 0] step:1101/10000 train_time:252028ms step_avg:228.91ms +[2025-07-17 15:47:19] [Rank 0] step:1121/10000 train_time:256635ms step_avg:228.93ms +[2025-07-17 15:47:19] [Rank 0] step:1121/10000 train_time:256635ms step_avg:228.93ms +[2025-07-17 15:47:24] [Rank 0] PRINT: step:1125/10000 val_loss:4.6237 train_time:257794ms step_avg:229.15ms +[2025-07-17 15:47:24] [Rank 0] PRINT: step:1125/10000 val_loss:4.6237 train_time:257794ms step_avg:229.15ms +[2025-07-17 15:47:28] [Rank 0] step:1141/10000 train_time:261242ms step_avg:228.96ms +[2025-07-17 15:47:28] [Rank 0] step:1141/10000 train_time:261242ms step_avg:228.96ms +[2025-07-17 15:47:33] [Rank 0] step:1161/10000 train_time:265850ms step_avg:228.98ms +[2025-07-17 15:47:33] [Rank 0] step:1161/10000 train_time:265850ms step_avg:228.98ms +[2025-07-17 15:47:37] [Rank 0] step:1181/10000 train_time:270459ms step_avg:229.01ms +[2025-07-17 15:47:37] [Rank 0] step:1181/10000 train_time:270459ms step_avg:229.01ms +[2025-07-17 15:47:42] [Rank 0] step:1201/10000 train_time:275070ms step_avg:229.03ms +[2025-07-17 15:47:42] [Rank 0] step:1201/10000 train_time:275070ms step_avg:229.03ms +[2025-07-17 15:47:46] [Rank 0] step:1221/10000 train_time:279677ms step_avg:229.06ms +[2025-07-17 15:47:46] [Rank 0] step:1221/10000 train_time:279677ms step_avg:229.06ms +[2025-07-17 15:47:51] [Rank 0] step:1241/10000 train_time:284280ms step_avg:229.07ms +[2025-07-17 15:47:51] [Rank 0] step:1241/10000 train_time:284280ms step_avg:229.07ms +[2025-07-17 15:47:58] [Rank 0] PRINT: step:1250/10000 val_loss:4.6868 train_time:286588ms step_avg:229.27ms +[2025-07-17 15:47:58] [Rank 0] PRINT: step:1250/10000 val_loss:4.6868 train_time:286588ms step_avg:229.27ms +[2025-07-17 15:48:00] [Rank 0] step:1261/10000 train_time:288880ms step_avg:229.09ms +[2025-07-17 15:48:00] [Rank 0] step:1261/10000 train_time:288880ms step_avg:229.09ms +[2025-07-17 15:48:05] [Rank 0] step:1281/10000 train_time:293479ms step_avg:229.10ms +[2025-07-17 15:48:05] [Rank 0] step:1281/10000 train_time:293479ms step_avg:229.10ms +[2025-07-17 15:48:09] [Rank 0] step:1301/10000 train_time:298077ms step_avg:229.11ms +[2025-07-17 15:48:09] [Rank 0] step:1301/10000 train_time:298077ms step_avg:229.11ms +[2025-07-17 15:48:14] [Rank 0] step:1321/10000 train_time:302675ms step_avg:229.13ms +[2025-07-17 15:48:14] [Rank 0] step:1321/10000 train_time:302675ms step_avg:229.13ms +[2025-07-17 15:48:19] [Rank 0] step:1341/10000 train_time:307276ms step_avg:229.14ms +[2025-07-17 15:48:19] [Rank 0] step:1341/10000 train_time:307276ms step_avg:229.14ms +[2025-07-17 15:48:23] [Rank 0] step:1361/10000 train_time:311872ms step_avg:229.15ms +[2025-07-17 15:48:23] [Rank 0] step:1361/10000 train_time:311872ms step_avg:229.15ms +[2025-07-17 15:48:31] [Rank 0] PRINT: step:1375/10000 val_loss:4.6442 train_time:315323ms step_avg:229.33ms +[2025-07-17 15:48:31] [Rank 0] PRINT: step:1375/10000 val_loss:4.6442 train_time:315323ms step_avg:229.33ms +[2025-07-17 15:48:32] [Rank 0] step:1381/10000 train_time:316467ms step_avg:229.16ms +[2025-07-17 15:48:32] [Rank 0] step:1381/10000 train_time:316467ms step_avg:229.16ms +[2025-07-17 15:48:37] [Rank 0] step:1401/10000 train_time:321064ms step_avg:229.17ms +[2025-07-17 15:48:37] [Rank 0] step:1401/10000 train_time:321064ms step_avg:229.17ms +[2025-07-17 15:48:41] [Rank 0] step:1421/10000 train_time:325659ms step_avg:229.18ms +[2025-07-17 15:48:41] [Rank 0] step:1421/10000 train_time:325659ms step_avg:229.18ms +[2025-07-17 15:48:46] [Rank 0] step:1441/10000 train_time:330258ms step_avg:229.19ms +[2025-07-17 15:48:46] [Rank 0] step:1441/10000 train_time:330258ms step_avg:229.19ms +[2025-07-17 15:48:51] [Rank 0] step:1461/10000 train_time:334856ms step_avg:229.20ms +[2025-07-17 15:48:51] [Rank 0] step:1461/10000 train_time:334856ms step_avg:229.20ms +[2025-07-17 15:48:55] [Rank 0] step:1481/10000 train_time:339449ms step_avg:229.20ms +[2025-07-17 15:48:55] [Rank 0] step:1481/10000 train_time:339449ms step_avg:229.20ms +[2025-07-17 15:49:04] [Rank 0] PRINT: step:1500/10000 val_loss:4.6244 train_time:344074ms step_avg:229.38ms +[2025-07-17 15:49:04] [Rank 0] PRINT: step:1500/10000 val_loss:4.6244 train_time:344074ms step_avg:229.38ms +[2025-07-17 15:49:04] [Rank 0] step:1501/10000 train_time:344092ms step_avg:229.24ms +[2025-07-17 15:49:04] [Rank 0] step:1501/10000 train_time:344092ms step_avg:229.24ms +[2025-07-17 15:49:09] [Rank 0] step:1521/10000 train_time:348696ms step_avg:229.25ms +[2025-07-17 15:49:09] [Rank 0] step:1521/10000 train_time:348696ms step_avg:229.25ms +[2025-07-17 15:49:14] [Rank 0] step:1541/10000 train_time:353834ms step_avg:229.61ms +[2025-07-17 15:49:14] [Rank 0] step:1541/10000 train_time:353834ms step_avg:229.61ms +[2025-07-17 15:49:18] [Rank 0] step:1561/10000 train_time:358460ms step_avg:229.63ms +[2025-07-17 15:49:18] [Rank 0] step:1561/10000 train_time:358460ms step_avg:229.63ms +[2025-07-17 15:49:23] [Rank 0] step:1581/10000 train_time:363085ms step_avg:229.66ms +[2025-07-17 15:49:23] [Rank 0] step:1581/10000 train_time:363085ms step_avg:229.66ms +[2025-07-17 15:49:28] [Rank 0] step:1601/10000 train_time:367713ms step_avg:229.68ms +[2025-07-17 15:49:28] [Rank 0] step:1601/10000 train_time:367713ms step_avg:229.68ms +[2025-07-17 15:49:32] [Rank 0] step:1621/10000 train_time:372338ms step_avg:229.70ms +[2025-07-17 15:49:32] [Rank 0] step:1621/10000 train_time:372338ms step_avg:229.70ms +[2025-07-17 15:49:38] [Rank 0] PRINT: step:1625/10000 val_loss:4.6996 train_time:373502ms step_avg:229.85ms +[2025-07-17 15:49:38] [Rank 0] PRINT: step:1625/10000 val_loss:4.6996 train_time:373502ms step_avg:229.85ms +[2025-07-17 15:49:41] [Rank 0] step:1641/10000 train_time:376965ms step_avg:229.72ms +[2025-07-17 15:49:41] [Rank 0] step:1641/10000 train_time:376965ms step_avg:229.72ms +[2025-07-17 15:49:46] [Rank 0] step:1661/10000 train_time:381591ms step_avg:229.74ms +[2025-07-17 15:49:46] [Rank 0] step:1661/10000 train_time:381591ms step_avg:229.74ms +[2025-07-17 15:49:50] [Rank 0] step:1681/10000 train_time:386220ms step_avg:229.76ms +[2025-07-17 15:49:50] [Rank 0] step:1681/10000 train_time:386220ms step_avg:229.76ms +[2025-07-17 15:49:55] [Rank 0] step:1701/10000 train_time:390850ms step_avg:229.78ms +[2025-07-17 15:49:55] [Rank 0] step:1701/10000 train_time:390850ms step_avg:229.78ms +[2025-07-17 15:50:00] [Rank 0] step:1721/10000 train_time:395479ms step_avg:229.80ms +[2025-07-17 15:50:00] [Rank 0] step:1721/10000 train_time:395479ms step_avg:229.80ms +[2025-07-17 15:50:04] [Rank 0] step:1741/10000 train_time:400109ms step_avg:229.82ms +[2025-07-17 15:50:04] [Rank 0] step:1741/10000 train_time:400109ms step_avg:229.82ms +[2025-07-17 15:50:11] [Rank 0] PRINT: step:1750/10000 val_loss:4.7021 train_time:402432ms step_avg:229.96ms +[2025-07-17 15:50:11] [Rank 0] PRINT: step:1750/10000 val_loss:4.7021 train_time:402432ms step_avg:229.96ms +[2025-07-17 15:50:13] [Rank 0] step:1761/10000 train_time:404740ms step_avg:229.84ms +[2025-07-17 15:50:13] [Rank 0] step:1761/10000 train_time:404740ms step_avg:229.84ms +[2025-07-17 15:50:18] [Rank 0] step:1781/10000 train_time:409371ms step_avg:229.85ms +[2025-07-17 15:50:18] [Rank 0] step:1781/10000 train_time:409371ms step_avg:229.85ms +[2025-07-17 15:50:23] [Rank 0] step:1801/10000 train_time:414004ms step_avg:229.87ms +[2025-07-17 15:50:23] [Rank 0] step:1801/10000 train_time:414004ms step_avg:229.87ms +[2025-07-17 15:50:27] [Rank 0] step:1821/10000 train_time:418638ms step_avg:229.89ms +[2025-07-17 15:50:27] [Rank 0] step:1821/10000 train_time:418638ms step_avg:229.89ms +[2025-07-17 15:50:32] [Rank 0] step:1841/10000 train_time:423273ms step_avg:229.91ms +[2025-07-17 15:50:32] [Rank 0] step:1841/10000 train_time:423273ms step_avg:229.91ms +[2025-07-17 15:50:37] [Rank 0] step:1861/10000 train_time:427908ms step_avg:229.93ms +[2025-07-17 15:50:37] [Rank 0] step:1861/10000 train_time:427908ms step_avg:229.93ms +[2025-07-17 15:50:44] [Rank 0] PRINT: step:1875/10000 val_loss:4.6306 train_time:431386ms step_avg:230.07ms +[2025-07-17 15:50:44] [Rank 0] PRINT: step:1875/10000 val_loss:4.6306 train_time:431386ms step_avg:230.07ms +[2025-07-17 15:50:46] [Rank 0] step:1881/10000 train_time:432538ms step_avg:229.95ms +[2025-07-17 15:50:46] [Rank 0] step:1881/10000 train_time:432538ms step_avg:229.95ms +[2025-07-17 15:50:50] [Rank 0] step:1901/10000 train_time:437171ms step_avg:229.97ms +[2025-07-17 15:50:50] [Rank 0] step:1901/10000 train_time:437171ms step_avg:229.97ms +[2025-07-17 15:50:55] [Rank 0] step:1921/10000 train_time:441809ms step_avg:229.99ms +[2025-07-17 15:50:55] [Rank 0] step:1921/10000 train_time:441809ms step_avg:229.99ms +[2025-07-17 15:51:00] [Rank 0] step:1941/10000 train_time:446443ms step_avg:230.01ms +[2025-07-17 15:51:00] [Rank 0] step:1941/10000 train_time:446443ms step_avg:230.01ms +[2025-07-17 15:51:04] [Rank 0] step:1961/10000 train_time:451077ms step_avg:230.02ms +[2025-07-17 15:51:04] [Rank 0] step:1961/10000 train_time:451077ms step_avg:230.02ms +[2025-07-17 15:51:09] [Rank 0] step:1981/10000 train_time:455714ms step_avg:230.04ms +[2025-07-17 15:51:09] [Rank 0] step:1981/10000 train_time:455714ms step_avg:230.04ms +[2025-07-17 15:51:18] [Rank 0] PRINT: step:2000/10000 val_loss:4.7145 train_time:460353ms step_avg:230.18ms +[2025-07-17 15:51:18] [Rank 0] PRINT: step:2000/10000 val_loss:4.7145 train_time:460353ms step_avg:230.18ms +[2025-07-17 15:51:18] [Rank 0] step:2001/10000 train_time:460371ms step_avg:230.07ms +[2025-07-17 15:51:18] [Rank 0] step:2001/10000 train_time:460371ms step_avg:230.07ms +[2025-07-17 15:51:23] [Rank 0] step:2021/10000 train_time:464983ms step_avg:230.08ms +[2025-07-17 15:51:23] [Rank 0] step:2021/10000 train_time:464983ms step_avg:230.08ms +[2025-07-17 15:51:28] [Rank 0] step:2041/10000 train_time:470138ms step_avg:230.35ms +[2025-07-17 15:51:28] [Rank 0] step:2041/10000 train_time:470138ms step_avg:230.35ms +[2025-07-17 15:51:32] [Rank 0] step:2061/10000 train_time:474774ms step_avg:230.36ms +[2025-07-17 15:51:32] [Rank 0] step:2061/10000 train_time:474774ms step_avg:230.36ms +[2025-07-17 15:51:37] [Rank 0] step:2081/10000 train_time:479410ms step_avg:230.37ms +[2025-07-17 15:51:37] [Rank 0] step:2081/10000 train_time:479410ms step_avg:230.37ms +[2025-07-17 15:51:42] [Rank 0] step:2101/10000 train_time:484045ms step_avg:230.39ms +[2025-07-17 15:51:42] [Rank 0] step:2101/10000 train_time:484045ms step_avg:230.39ms +[2025-07-17 15:51:46] [Rank 0] step:2121/10000 train_time:488681ms step_avg:230.40ms +[2025-07-17 15:51:46] [Rank 0] step:2121/10000 train_time:488681ms step_avg:230.40ms +[2025-07-17 15:51:52] [Rank 0] PRINT: step:2125/10000 val_loss:4.7858 train_time:489846ms step_avg:230.52ms +[2025-07-17 15:51:52] [Rank 0] PRINT: step:2125/10000 val_loss:4.7858 train_time:489846ms step_avg:230.52ms +[2025-07-17 15:51:56] [Rank 0] step:2141/10000 train_time:493312ms step_avg:230.41ms +[2025-07-17 15:51:56] [Rank 0] step:2141/10000 train_time:493312ms step_avg:230.41ms +[2025-07-17 15:52:00] [Rank 0] step:2161/10000 train_time:497946ms step_avg:230.42ms +[2025-07-17 15:52:00] [Rank 0] step:2161/10000 train_time:497946ms step_avg:230.42ms +[2025-07-17 15:52:05] [Rank 0] step:2181/10000 train_time:502582ms step_avg:230.44ms +[2025-07-17 15:52:05] [Rank 0] step:2181/10000 train_time:502582ms step_avg:230.44ms +[2025-07-17 15:52:09] [Rank 0] step:2201/10000 train_time:507219ms step_avg:230.45ms +[2025-07-17 15:52:09] [Rank 0] step:2201/10000 train_time:507219ms step_avg:230.45ms +[2025-07-17 15:52:14] [Rank 0] step:2221/10000 train_time:511858ms step_avg:230.46ms +[2025-07-17 15:52:14] [Rank 0] step:2221/10000 train_time:511858ms step_avg:230.46ms +[2025-07-17 15:52:19] [Rank 0] step:2241/10000 train_time:516589ms step_avg:230.52ms +[2025-07-17 15:52:19] [Rank 0] step:2241/10000 train_time:516589ms step_avg:230.52ms +[2025-07-17 15:52:25] [Rank 0] PRINT: step:2250/10000 val_loss:4.2426 train_time:518970ms step_avg:230.65ms +[2025-07-17 15:52:25] [Rank 0] PRINT: step:2250/10000 val_loss:4.2426 train_time:518970ms step_avg:230.65ms +[2025-07-17 15:52:28] [Rank 0] step:2261/10000 train_time:521337ms step_avg:230.58ms +[2025-07-17 15:52:28] [Rank 0] step:2261/10000 train_time:521337ms step_avg:230.58ms +[2025-07-17 15:52:33] [Rank 0] step:2281/10000 train_time:526087ms step_avg:230.64ms +[2025-07-17 15:52:33] [Rank 0] step:2281/10000 train_time:526087ms step_avg:230.64ms +[2025-07-17 15:52:38] [Rank 0] step:2301/10000 train_time:530836ms step_avg:230.70ms +[2025-07-17 15:52:38] [Rank 0] step:2301/10000 train_time:530836ms step_avg:230.70ms +[2025-07-17 15:52:42] [Rank 0] step:2321/10000 train_time:535583ms step_avg:230.76ms +[2025-07-17 15:52:42] [Rank 0] step:2321/10000 train_time:535583ms step_avg:230.76ms +[2025-07-17 15:52:47] [Rank 0] step:2341/10000 train_time:540333ms step_avg:230.81ms +[2025-07-17 15:52:47] [Rank 0] step:2341/10000 train_time:540333ms step_avg:230.81ms +[2025-07-17 15:52:52] [Rank 0] step:2361/10000 train_time:545084ms step_avg:230.87ms +[2025-07-17 15:52:52] [Rank 0] step:2361/10000 train_time:545084ms step_avg:230.87ms +[2025-07-17 15:53:00] [Rank 0] PRINT: step:2375/10000 val_loss:4.3018 train_time:548652ms step_avg:231.01ms +[2025-07-17 15:53:00] [Rank 0] PRINT: step:2375/10000 val_loss:4.3018 train_time:548652ms step_avg:231.01ms +[2025-07-17 15:53:01] [Rank 0] step:2381/10000 train_time:549831ms step_avg:230.92ms +[2025-07-17 15:53:01] [Rank 0] step:2381/10000 train_time:549831ms step_avg:230.92ms +[2025-07-17 15:53:06] [Rank 0] step:2401/10000 train_time:554582ms step_avg:230.98ms +[2025-07-17 15:53:06] [Rank 0] step:2401/10000 train_time:554582ms step_avg:230.98ms +[2025-07-17 15:53:11] [Rank 0] step:2421/10000 train_time:559334ms step_avg:231.03ms +[2025-07-17 15:53:11] [Rank 0] step:2421/10000 train_time:559334ms step_avg:231.03ms +[2025-07-17 15:53:15] [Rank 0] step:2441/10000 train_time:564086ms step_avg:231.09ms +[2025-07-17 15:53:15] [Rank 0] step:2441/10000 train_time:564086ms step_avg:231.09ms +[2025-07-17 15:53:20] [Rank 0] step:2461/10000 train_time:568833ms step_avg:231.14ms +[2025-07-17 15:53:20] [Rank 0] step:2461/10000 train_time:568833ms step_avg:231.14ms +[2025-07-17 15:53:25] [Rank 0] step:2481/10000 train_time:573587ms step_avg:231.19ms +[2025-07-17 15:53:25] [Rank 0] step:2481/10000 train_time:573587ms step_avg:231.19ms +[2025-07-17 15:53:34] [Rank 0] PRINT: step:2500/10000 val_loss:4.2249 train_time:578342ms step_avg:231.34ms +[2025-07-17 15:53:34] [Rank 0] PRINT: step:2500/10000 val_loss:4.2249 train_time:578342ms step_avg:231.34ms +[2025-07-17 15:53:34] [Rank 0] step:2501/10000 train_time:578359ms step_avg:231.25ms +[2025-07-17 15:53:34] [Rank 0] step:2501/10000 train_time:578359ms step_avg:231.25ms +[2025-07-17 15:53:39] [Rank 0] step:2521/10000 train_time:583092ms step_avg:231.29ms +[2025-07-17 15:53:39] [Rank 0] step:2521/10000 train_time:583092ms step_avg:231.29ms +[2025-07-17 15:53:44] [Rank 0] step:2541/10000 train_time:587843ms step_avg:231.34ms +[2025-07-17 15:53:44] [Rank 0] step:2541/10000 train_time:587843ms step_avg:231.34ms +[2025-07-17 15:53:48] [Rank 0] step:2561/10000 train_time:592592ms step_avg:231.39ms +[2025-07-17 15:53:48] [Rank 0] step:2561/10000 train_time:592592ms step_avg:231.39ms +[2025-07-17 15:53:53] [Rank 0] step:2581/10000 train_time:597340ms step_avg:231.44ms +[2025-07-17 15:53:53] [Rank 0] step:2581/10000 train_time:597340ms step_avg:231.44ms +[2025-07-17 15:53:58] [Rank 0] step:2601/10000 train_time:602193ms step_avg:231.52ms +[2025-07-17 15:53:58] [Rank 0] step:2601/10000 train_time:602193ms step_avg:231.52ms +[2025-07-17 15:54:03] [Rank 0] step:2621/10000 train_time:606940ms step_avg:231.57ms +[2025-07-17 15:54:03] [Rank 0] step:2621/10000 train_time:606940ms step_avg:231.57ms +[2025-07-17 15:54:08] [Rank 0] PRINT: step:2625/10000 val_loss:4.2888 train_time:608134ms step_avg:231.67ms +[2025-07-17 15:54:08] [Rank 0] PRINT: step:2625/10000 val_loss:4.2888 train_time:608134ms step_avg:231.67ms +[2025-07-17 15:54:12] [Rank 0] step:2641/10000 train_time:611685ms step_avg:231.61ms +[2025-07-17 15:54:12] [Rank 0] step:2641/10000 train_time:611685ms step_avg:231.61ms +[2025-07-17 15:54:17] [Rank 0] step:2661/10000 train_time:616434ms step_avg:231.66ms +[2025-07-17 15:54:17] [Rank 0] step:2661/10000 train_time:616434ms step_avg:231.66ms +[2025-07-17 15:54:22] [Rank 0] step:2681/10000 train_time:621185ms step_avg:231.70ms +[2025-07-17 15:54:22] [Rank 0] step:2681/10000 train_time:621185ms step_avg:231.70ms +[2025-07-17 15:54:26] [Rank 0] step:2701/10000 train_time:625937ms step_avg:231.74ms +[2025-07-17 15:54:26] [Rank 0] step:2701/10000 train_time:625937ms step_avg:231.74ms +[2025-07-17 15:54:31] [Rank 0] step:2721/10000 train_time:630688ms step_avg:231.79ms +[2025-07-17 15:54:31] [Rank 0] step:2721/10000 train_time:630688ms step_avg:231.79ms +[2025-07-17 15:54:36] [Rank 0] step:2741/10000 train_time:635444ms step_avg:231.83ms +[2025-07-17 15:54:36] [Rank 0] step:2741/10000 train_time:635444ms step_avg:231.83ms +[2025-07-17 15:54:42] [Rank 0] PRINT: step:2750/10000 val_loss:4.3241 train_time:637828ms step_avg:231.94ms +[2025-07-17 15:54:42] [Rank 0] PRINT: step:2750/10000 val_loss:4.3241 train_time:637828ms step_avg:231.94ms +[2025-07-17 15:54:45] [Rank 0] step:2761/10000 train_time:640198ms step_avg:231.87ms +[2025-07-17 15:54:45] [Rank 0] step:2761/10000 train_time:640198ms step_avg:231.87ms +[2025-07-17 15:54:50] [Rank 0] step:2781/10000 train_time:644956ms step_avg:231.92ms +[2025-07-17 15:54:50] [Rank 0] step:2781/10000 train_time:644956ms step_avg:231.92ms +[2025-07-17 15:54:54] [Rank 0] step:2801/10000 train_time:649712ms step_avg:231.96ms +[2025-07-17 15:54:54] [Rank 0] step:2801/10000 train_time:649712ms step_avg:231.96ms +[2025-07-17 15:54:59] [Rank 0] step:2821/10000 train_time:654472ms step_avg:232.00ms +[2025-07-17 15:54:59] [Rank 0] step:2821/10000 train_time:654472ms step_avg:232.00ms +[2025-07-17 15:55:04] [Rank 0] step:2841/10000 train_time:659227ms step_avg:232.04ms +[2025-07-17 15:55:04] [Rank 0] step:2841/10000 train_time:659227ms step_avg:232.04ms +[2025-07-17 15:55:09] [Rank 0] step:2861/10000 train_time:663985ms step_avg:232.08ms +[2025-07-17 15:55:09] [Rank 0] step:2861/10000 train_time:663985ms step_avg:232.08ms +[2025-07-17 15:55:17] [Rank 0] PRINT: step:2875/10000 val_loss:4.2368 train_time:667560ms step_avg:232.19ms +[2025-07-17 15:55:17] [Rank 0] PRINT: step:2875/10000 val_loss:4.2368 train_time:667560ms step_avg:232.19ms +[2025-07-17 15:55:18] [Rank 0] step:2881/10000 train_time:668742ms step_avg:232.12ms +[2025-07-17 15:55:18] [Rank 0] step:2881/10000 train_time:668742ms step_avg:232.12ms +[2025-07-17 15:55:23] [Rank 0] step:2901/10000 train_time:673498ms step_avg:232.16ms +[2025-07-17 15:55:23] [Rank 0] step:2901/10000 train_time:673498ms step_avg:232.16ms +[2025-07-17 15:55:27] [Rank 0] step:2921/10000 train_time:678255ms step_avg:232.20ms +[2025-07-17 15:55:27] [Rank 0] step:2921/10000 train_time:678255ms step_avg:232.20ms +[2025-07-17 15:55:32] [Rank 0] step:2941/10000 train_time:683013ms step_avg:232.24ms +[2025-07-17 15:55:32] [Rank 0] step:2941/10000 train_time:683013ms step_avg:232.24ms +[2025-07-17 15:55:37] [Rank 0] step:2961/10000 train_time:687769ms step_avg:232.28ms +[2025-07-17 15:55:37] [Rank 0] step:2961/10000 train_time:687769ms step_avg:232.28ms +[2025-07-17 15:55:42] [Rank 0] step:2981/10000 train_time:692541ms step_avg:232.32ms +[2025-07-17 15:55:42] [Rank 0] step:2981/10000 train_time:692541ms step_avg:232.32ms +[2025-07-17 15:55:51] [Rank 0] PRINT: step:3000/10000 val_loss:4.2130 train_time:697320ms step_avg:232.44ms +[2025-07-17 15:55:51] [Rank 0] PRINT: step:3000/10000 val_loss:4.2130 train_time:697320ms step_avg:232.44ms +[2025-07-17 15:55:51] [Rank 0] step:3001/10000 train_time:697337ms step_avg:232.37ms +[2025-07-17 15:55:51] [Rank 0] step:3001/10000 train_time:697337ms step_avg:232.37ms +[2025-07-17 15:55:56] [Rank 0] step:3021/10000 train_time:702093ms step_avg:232.40ms +[2025-07-17 15:55:56] [Rank 0] step:3021/10000 train_time:702093ms step_avg:232.40ms +[2025-07-17 15:56:01] [Rank 0] step:3041/10000 train_time:706874ms step_avg:232.45ms +[2025-07-17 15:56:01] [Rank 0] step:3041/10000 train_time:706874ms step_avg:232.45ms +[2025-07-17 15:56:06] [Rank 0] step:3061/10000 train_time:712156ms step_avg:232.65ms +[2025-07-17 15:56:06] [Rank 0] step:3061/10000 train_time:712156ms step_avg:232.65ms +[2025-07-17 15:56:11] [Rank 0] step:3081/10000 train_time:716929ms step_avg:232.69ms +[2025-07-17 15:56:11] [Rank 0] step:3081/10000 train_time:716929ms step_avg:232.69ms +[2025-07-17 15:56:16] [Rank 0] step:3101/10000 train_time:721708ms step_avg:232.73ms +[2025-07-17 15:56:16] [Rank 0] step:3101/10000 train_time:721708ms step_avg:232.73ms +[2025-07-17 15:56:20] [Rank 0] step:3121/10000 train_time:726487ms step_avg:232.77ms +[2025-07-17 15:56:20] [Rank 0] step:3121/10000 train_time:726487ms step_avg:232.77ms +[2025-07-17 15:56:26] [Rank 0] PRINT: step:3125/10000 val_loss:4.3061 train_time:727688ms step_avg:232.86ms +[2025-07-17 15:56:26] [Rank 0] PRINT: step:3125/10000 val_loss:4.3061 train_time:727688ms step_avg:232.86ms +[2025-07-17 15:56:30] [Rank 0] step:3141/10000 train_time:731258ms step_avg:232.81ms +[2025-07-17 15:56:30] [Rank 0] step:3141/10000 train_time:731258ms step_avg:232.81ms +[2025-07-17 15:56:34] [Rank 0] step:3161/10000 train_time:736031ms step_avg:232.85ms +[2025-07-17 15:56:34] [Rank 0] step:3161/10000 train_time:736031ms step_avg:232.85ms +[2025-07-17 15:56:39] [Rank 0] step:3181/10000 train_time:740808ms step_avg:232.89ms +[2025-07-17 15:56:39] [Rank 0] step:3181/10000 train_time:740808ms step_avg:232.89ms +[2025-07-17 15:56:44] [Rank 0] step:3201/10000 train_time:745588ms step_avg:232.92ms +[2025-07-17 15:56:44] [Rank 0] step:3201/10000 train_time:745588ms step_avg:232.92ms +[2025-07-17 15:56:49] [Rank 0] step:3221/10000 train_time:750362ms step_avg:232.96ms +[2025-07-17 15:56:49] [Rank 0] step:3221/10000 train_time:750362ms step_avg:232.96ms +[2025-07-17 15:56:54] [Rank 0] step:3241/10000 train_time:755140ms step_avg:233.00ms +[2025-07-17 15:56:54] [Rank 0] step:3241/10000 train_time:755140ms step_avg:233.00ms +[2025-07-17 15:57:00] [Rank 0] PRINT: step:3250/10000 val_loss:4.3564 train_time:757532ms step_avg:233.09ms +[2025-07-17 15:57:00] [Rank 0] PRINT: step:3250/10000 val_loss:4.3564 train_time:757532ms step_avg:233.09ms +[2025-07-17 15:57:03] [Rank 0] step:3261/10000 train_time:759914ms step_avg:233.03ms +[2025-07-17 15:57:03] [Rank 0] step:3261/10000 train_time:759914ms step_avg:233.03ms +[2025-07-17 15:57:08] [Rank 0] step:3281/10000 train_time:764694ms step_avg:233.07ms +[2025-07-17 15:57:08] [Rank 0] step:3281/10000 train_time:764694ms step_avg:233.07ms +[2025-07-17 15:57:13] [Rank 0] step:3301/10000 train_time:769475ms step_avg:233.10ms +[2025-07-17 15:57:13] [Rank 0] step:3301/10000 train_time:769475ms step_avg:233.10ms +[2025-07-17 15:57:17] [Rank 0] step:3321/10000 train_time:774257ms step_avg:233.14ms +[2025-07-17 15:57:17] [Rank 0] step:3321/10000 train_time:774257ms step_avg:233.14ms +[2025-07-17 15:57:22] [Rank 0] step:3341/10000 train_time:779038ms step_avg:233.17ms +[2025-07-17 15:57:22] [Rank 0] step:3341/10000 train_time:779038ms step_avg:233.17ms +[2025-07-17 15:57:27] [Rank 0] step:3361/10000 train_time:783815ms step_avg:233.21ms +[2025-07-17 15:57:27] [Rank 0] step:3361/10000 train_time:783815ms step_avg:233.21ms +[2025-07-17 15:57:35] [Rank 0] PRINT: step:3375/10000 val_loss:4.2509 train_time:787406ms step_avg:233.31ms +[2025-07-17 15:57:35] [Rank 0] PRINT: step:3375/10000 val_loss:4.2509 train_time:787406ms step_avg:233.31ms +[2025-07-17 15:57:36] [Rank 0] step:3381/10000 train_time:788593ms step_avg:233.24ms +[2025-07-17 15:57:36] [Rank 0] step:3381/10000 train_time:788593ms step_avg:233.24ms +[2025-07-17 15:57:41] [Rank 0] step:3401/10000 train_time:793372ms step_avg:233.28ms +[2025-07-17 15:57:41] [Rank 0] step:3401/10000 train_time:793372ms step_avg:233.28ms +[2025-07-17 15:57:46] [Rank 0] step:3421/10000 train_time:798151ms step_avg:233.31ms +[2025-07-17 15:57:46] [Rank 0] step:3421/10000 train_time:798151ms step_avg:233.31ms +[2025-07-17 15:57:51] [Rank 0] step:3441/10000 train_time:802927ms step_avg:233.34ms +[2025-07-17 15:57:51] [Rank 0] step:3441/10000 train_time:802927ms step_avg:233.34ms +[2025-07-17 15:57:55] [Rank 0] step:3461/10000 train_time:807709ms step_avg:233.37ms +[2025-07-17 15:57:55] [Rank 0] step:3461/10000 train_time:807709ms step_avg:233.37ms +[2025-07-17 15:58:00] [Rank 0] step:3481/10000 train_time:812488ms step_avg:233.41ms +[2025-07-17 15:58:00] [Rank 0] step:3481/10000 train_time:812488ms step_avg:233.41ms +[2025-07-17 15:58:09] [Rank 0] PRINT: step:3500/10000 val_loss:4.4013 train_time:817270ms step_avg:233.51ms +[2025-07-17 15:58:09] [Rank 0] PRINT: step:3500/10000 val_loss:4.4013 train_time:817270ms step_avg:233.51ms +[2025-07-17 15:58:09] [Rank 0] step:3501/10000 train_time:817287ms step_avg:233.44ms +[2025-07-17 15:58:09] [Rank 0] step:3501/10000 train_time:817287ms step_avg:233.44ms +[2025-07-17 15:58:14] [Rank 0] step:3521/10000 train_time:822045ms step_avg:233.47ms +[2025-07-17 15:58:14] [Rank 0] step:3521/10000 train_time:822045ms step_avg:233.47ms +[2025-07-17 15:58:19] [Rank 0] step:3541/10000 train_time:826929ms step_avg:233.53ms +[2025-07-17 15:58:19] [Rank 0] step:3541/10000 train_time:826929ms step_avg:233.53ms +[2025-07-17 15:58:24] [Rank 0] step:3561/10000 train_time:831806ms step_avg:233.59ms +[2025-07-17 15:58:24] [Rank 0] step:3561/10000 train_time:831806ms step_avg:233.59ms +[2025-07-17 15:58:28] [Rank 0] step:3581/10000 train_time:836590ms step_avg:233.62ms +[2025-07-17 15:58:28] [Rank 0] step:3581/10000 train_time:836590ms step_avg:233.62ms +[2025-07-17 15:58:33] [Rank 0] step:3601/10000 train_time:841367ms step_avg:233.65ms +[2025-07-17 15:58:33] [Rank 0] step:3601/10000 train_time:841367ms step_avg:233.65ms +[2025-07-17 15:58:38] [Rank 0] step:3621/10000 train_time:846145ms step_avg:233.68ms +[2025-07-17 15:58:38] [Rank 0] step:3621/10000 train_time:846145ms step_avg:233.68ms +[2025-07-17 15:58:43] [Rank 0] PRINT: step:3625/10000 val_loss:4.4273 train_time:847346ms step_avg:233.75ms +[2025-07-17 15:58:43] [Rank 0] PRINT: step:3625/10000 val_loss:4.4273 train_time:847346ms step_avg:233.75ms +[2025-07-17 15:58:47] [Rank 0] step:3641/10000 train_time:850919ms step_avg:233.70ms +[2025-07-17 15:58:47] [Rank 0] step:3641/10000 train_time:850919ms step_avg:233.70ms +[2025-07-17 15:58:52] [Rank 0] step:3661/10000 train_time:855697ms step_avg:233.73ms +[2025-07-17 15:58:52] [Rank 0] step:3661/10000 train_time:855697ms step_avg:233.73ms +[2025-07-17 15:58:57] [Rank 0] step:3681/10000 train_time:860473ms step_avg:233.76ms +[2025-07-17 15:58:57] [Rank 0] step:3681/10000 train_time:860473ms step_avg:233.76ms +[2025-07-17 15:59:02] [Rank 0] step:3701/10000 train_time:865247ms step_avg:233.79ms +[2025-07-17 15:59:02] [Rank 0] step:3701/10000 train_time:865247ms step_avg:233.79ms +[2025-07-17 15:59:06] [Rank 0] step:3721/10000 train_time:870094ms step_avg:233.83ms +[2025-07-17 15:59:06] [Rank 0] step:3721/10000 train_time:870094ms step_avg:233.83ms +[2025-07-17 15:59:11] [Rank 0] step:3741/10000 train_time:874959ms step_avg:233.88ms +[2025-07-17 15:59:11] [Rank 0] step:3741/10000 train_time:874959ms step_avg:233.88ms +[2025-07-17 15:59:18] [Rank 0] PRINT: step:3750/10000 val_loss:4.1000 train_time:877395ms step_avg:233.97ms +[2025-07-17 15:59:18] [Rank 0] PRINT: step:3750/10000 val_loss:4.1000 train_time:877395ms step_avg:233.97ms +[2025-07-17 15:59:21] [Rank 0] step:3761/10000 train_time:879812ms step_avg:233.93ms +[2025-07-17 15:59:21] [Rank 0] step:3761/10000 train_time:879812ms step_avg:233.93ms +[2025-07-17 15:59:26] [Rank 0] step:3781/10000 train_time:884669ms step_avg:233.98ms +[2025-07-17 15:59:26] [Rank 0] step:3781/10000 train_time:884669ms step_avg:233.98ms +[2025-07-17 15:59:31] [Rank 0] step:3801/10000 train_time:889525ms step_avg:234.02ms +[2025-07-17 15:59:31] [Rank 0] step:3801/10000 train_time:889525ms step_avg:234.02ms +[2025-07-17 15:59:35] [Rank 0] step:3821/10000 train_time:894385ms step_avg:234.07ms +[2025-07-17 15:59:35] [Rank 0] step:3821/10000 train_time:894385ms step_avg:234.07ms +[2025-07-17 15:59:40] [Rank 0] step:3841/10000 train_time:899246ms step_avg:234.12ms +[2025-07-17 15:59:40] [Rank 0] step:3841/10000 train_time:899246ms step_avg:234.12ms +[2025-07-17 15:59:45] [Rank 0] step:3861/10000 train_time:904102ms step_avg:234.16ms +[2025-07-17 15:59:45] [Rank 0] step:3861/10000 train_time:904102ms step_avg:234.16ms +[2025-07-17 15:59:53] [Rank 0] PRINT: step:3875/10000 val_loss:4.1421 train_time:907755ms step_avg:234.26ms +[2025-07-17 15:59:53] [Rank 0] PRINT: step:3875/10000 val_loss:4.1421 train_time:907755ms step_avg:234.26ms +[2025-07-17 15:59:55] [Rank 0] step:3881/10000 train_time:908964ms step_avg:234.21ms +[2025-07-17 15:59:55] [Rank 0] step:3881/10000 train_time:908964ms step_avg:234.21ms +[2025-07-17 15:59:59] [Rank 0] step:3901/10000 train_time:913824ms step_avg:234.25ms +[2025-07-17 15:59:59] [Rank 0] step:3901/10000 train_time:913824ms step_avg:234.25ms +[2025-07-17 16:00:04] [Rank 0] step:3921/10000 train_time:918677ms step_avg:234.30ms +[2025-07-17 16:00:04] [Rank 0] step:3921/10000 train_time:918677ms step_avg:234.30ms +[2025-07-17 16:00:09] [Rank 0] step:3941/10000 train_time:923532ms step_avg:234.34ms +[2025-07-17 16:00:09] [Rank 0] step:3941/10000 train_time:923532ms step_avg:234.34ms +[2025-07-17 16:00:14] [Rank 0] step:3961/10000 train_time:928391ms step_avg:234.38ms +[2025-07-17 16:00:14] [Rank 0] step:3961/10000 train_time:928391ms step_avg:234.38ms +[2025-07-17 16:00:19] [Rank 0] step:3981/10000 train_time:933255ms step_avg:234.43ms +[2025-07-17 16:00:19] [Rank 0] step:3981/10000 train_time:933255ms step_avg:234.43ms +[2025-07-17 16:00:28] [Rank 0] PRINT: step:4000/10000 val_loss:4.2935 train_time:938115ms step_avg:234.53ms +[2025-07-17 16:00:28] [Rank 0] PRINT: step:4000/10000 val_loss:4.2935 train_time:938115ms step_avg:234.53ms +[2025-07-17 16:00:28] [Rank 0] step:4001/10000 train_time:938133ms step_avg:234.47ms +[2025-07-17 16:00:28] [Rank 0] step:4001/10000 train_time:938133ms step_avg:234.47ms +[2025-07-17 16:00:33] [Rank 0] step:4021/10000 train_time:942974ms step_avg:234.51ms +[2025-07-17 16:00:33] [Rank 0] step:4021/10000 train_time:942974ms step_avg:234.51ms +[2025-07-17 16:00:38] [Rank 0] step:4041/10000 train_time:947830ms step_avg:234.55ms +[2025-07-17 16:00:38] [Rank 0] step:4041/10000 train_time:947830ms step_avg:234.55ms +[2025-07-17 16:00:43] [Rank 0] step:4061/10000 train_time:952680ms step_avg:234.59ms +[2025-07-17 16:00:43] [Rank 0] step:4061/10000 train_time:952680ms step_avg:234.59ms +[2025-07-17 16:00:48] [Rank 0] step:4081/10000 train_time:958041ms step_avg:234.76ms +[2025-07-17 16:00:48] [Rank 0] step:4081/10000 train_time:958041ms step_avg:234.76ms +[2025-07-17 16:00:53] [Rank 0] step:4101/10000 train_time:962891ms step_avg:234.79ms +[2025-07-17 16:00:53] [Rank 0] step:4101/10000 train_time:962891ms step_avg:234.79ms +[2025-07-17 16:00:58] [Rank 0] step:4121/10000 train_time:967743ms step_avg:234.83ms +[2025-07-17 16:00:58] [Rank 0] step:4121/10000 train_time:967743ms step_avg:234.83ms +[2025-07-17 16:01:04] [Rank 0] PRINT: step:4125/10000 val_loss:4.3098 train_time:968962ms step_avg:234.90ms +[2025-07-17 16:01:04] [Rank 0] PRINT: step:4125/10000 val_loss:4.3098 train_time:968962ms step_avg:234.90ms +[2025-07-17 16:01:07] [Rank 0] step:4141/10000 train_time:972587ms step_avg:234.87ms +[2025-07-17 16:01:07] [Rank 0] step:4141/10000 train_time:972587ms step_avg:234.87ms +[2025-07-17 16:01:12] [Rank 0] step:4161/10000 train_time:977435ms step_avg:234.90ms +[2025-07-17 16:01:12] [Rank 0] step:4161/10000 train_time:977435ms step_avg:234.90ms +[2025-07-17 16:01:17] [Rank 0] step:4181/10000 train_time:982283ms step_avg:234.94ms +[2025-07-17 16:01:17] [Rank 0] step:4181/10000 train_time:982283ms step_avg:234.94ms +[2025-07-17 16:01:22] [Rank 0] step:4201/10000 train_time:987136ms step_avg:234.98ms +[2025-07-17 16:01:22] [Rank 0] step:4201/10000 train_time:987136ms step_avg:234.98ms +[2025-07-17 16:01:27] [Rank 0] step:4221/10000 train_time:991983ms step_avg:235.01ms +[2025-07-17 16:01:27] [Rank 0] step:4221/10000 train_time:991983ms step_avg:235.01ms +[2025-07-17 16:01:32] [Rank 0] step:4241/10000 train_time:996835ms step_avg:235.05ms +[2025-07-17 16:01:32] [Rank 0] step:4241/10000 train_time:996835ms step_avg:235.05ms +[2025-07-17 16:01:39] [Rank 0] PRINT: step:4250/10000 val_loss:4.5390 train_time:999266ms step_avg:235.12ms +[2025-07-17 16:01:39] [Rank 0] PRINT: step:4250/10000 val_loss:4.5390 train_time:999266ms step_avg:235.12ms +[2025-07-17 16:01:41] [Rank 0] step:4261/10000 train_time:1001684ms step_avg:235.08ms +[2025-07-17 16:01:41] [Rank 0] step:4261/10000 train_time:1001684ms step_avg:235.08ms +[2025-07-17 16:01:46] [Rank 0] step:4281/10000 train_time:1006542ms step_avg:235.12ms +[2025-07-17 16:01:46] [Rank 0] step:4281/10000 train_time:1006542ms step_avg:235.12ms +[2025-07-17 16:01:51] [Rank 0] step:4301/10000 train_time:1011397ms step_avg:235.15ms +[2025-07-17 16:01:51] [Rank 0] step:4301/10000 train_time:1011397ms step_avg:235.15ms +[2025-07-17 16:01:56] [Rank 0] step:4321/10000 train_time:1016260ms step_avg:235.19ms +[2025-07-17 16:01:56] [Rank 0] step:4321/10000 train_time:1016260ms step_avg:235.19ms +[2025-07-17 16:02:01] [Rank 0] step:4341/10000 train_time:1021117ms step_avg:235.23ms +[2025-07-17 16:02:01] [Rank 0] step:4341/10000 train_time:1021117ms step_avg:235.23ms +[2025-07-17 16:02:05] [Rank 0] step:4361/10000 train_time:1025972ms step_avg:235.26ms +[2025-07-17 16:02:05] [Rank 0] step:4361/10000 train_time:1025972ms step_avg:235.26ms +[2025-07-17 16:02:13] [Rank 0] PRINT: step:4375/10000 val_loss:4.3472 train_time:1029617ms step_avg:235.34ms +[2025-07-17 16:02:13] [Rank 0] PRINT: step:4375/10000 val_loss:4.3472 train_time:1029617ms step_avg:235.34ms +[2025-07-17 16:02:15] [Rank 0] step:4381/10000 train_time:1030829ms step_avg:235.30ms +[2025-07-17 16:02:15] [Rank 0] step:4381/10000 train_time:1030829ms step_avg:235.30ms +[2025-07-17 16:02:19] [Rank 0] step:4401/10000 train_time:1035685ms step_avg:235.33ms +[2025-07-17 16:02:19] [Rank 0] step:4401/10000 train_time:1035685ms step_avg:235.33ms +[2025-07-17 16:02:24] [Rank 0] step:4421/10000 train_time:1040536ms step_avg:235.36ms +[2025-07-17 16:02:24] [Rank 0] step:4421/10000 train_time:1040536ms step_avg:235.36ms +[2025-07-17 16:02:29] [Rank 0] step:4441/10000 train_time:1045390ms step_avg:235.40ms +[2025-07-17 16:02:29] [Rank 0] step:4441/10000 train_time:1045390ms step_avg:235.40ms +[2025-07-17 16:02:34] [Rank 0] step:4461/10000 train_time:1050256ms step_avg:235.43ms +[2025-07-17 16:02:34] [Rank 0] step:4461/10000 train_time:1050256ms step_avg:235.43ms +[2025-07-17 16:02:39] [Rank 0] step:4481/10000 train_time:1055123ms step_avg:235.47ms +[2025-07-17 16:02:39] [Rank 0] step:4481/10000 train_time:1055123ms step_avg:235.47ms +[2025-07-17 16:02:48] [Rank 0] PRINT: step:4500/10000 val_loss:4.3274 train_time:1059991ms step_avg:235.55ms +[2025-07-17 16:02:48] [Rank 0] PRINT: step:4500/10000 val_loss:4.3274 train_time:1059991ms step_avg:235.55ms +[2025-07-17 16:02:48] [Rank 0] step:4501/10000 train_time:1060008ms step_avg:235.51ms +[2025-07-17 16:02:48] [Rank 0] step:4501/10000 train_time:1060008ms step_avg:235.51ms +[2025-07-17 16:02:53] [Rank 0] step:4521/10000 train_time:1064850ms step_avg:235.53ms +[2025-07-17 16:02:53] [Rank 0] step:4521/10000 train_time:1064850ms step_avg:235.53ms +[2025-07-17 16:02:58] [Rank 0] step:4541/10000 train_time:1069711ms step_avg:235.57ms +[2025-07-17 16:02:58] [Rank 0] step:4541/10000 train_time:1069711ms step_avg:235.57ms +[2025-07-17 16:03:03] [Rank 0] step:4561/10000 train_time:1074572ms step_avg:235.60ms +[2025-07-17 16:03:03] [Rank 0] step:4561/10000 train_time:1074572ms step_avg:235.60ms +[2025-07-17 16:03:08] [Rank 0] step:4581/10000 train_time:1079523ms step_avg:235.65ms +[2025-07-17 16:03:08] [Rank 0] step:4581/10000 train_time:1079523ms step_avg:235.65ms +[2025-07-17 16:03:13] [Rank 0] step:4601/10000 train_time:1084393ms step_avg:235.69ms +[2025-07-17 16:03:13] [Rank 0] step:4601/10000 train_time:1084393ms step_avg:235.69ms +[2025-07-17 16:03:18] [Rank 0] step:4621/10000 train_time:1089254ms step_avg:235.72ms +[2025-07-17 16:03:18] [Rank 0] step:4621/10000 train_time:1089254ms step_avg:235.72ms +[2025-07-17 16:03:23] [Rank 0] PRINT: step:4625/10000 val_loss:4.2977 train_time:1090479ms step_avg:235.78ms +[2025-07-17 16:03:23] [Rank 0] PRINT: step:4625/10000 val_loss:4.2977 train_time:1090479ms step_avg:235.78ms +[2025-07-17 16:03:27] [Rank 0] step:4641/10000 train_time:1094113ms step_avg:235.75ms +[2025-07-17 16:03:27] [Rank 0] step:4641/10000 train_time:1094113ms step_avg:235.75ms +[2025-07-17 16:03:32] [Rank 0] step:4661/10000 train_time:1098984ms step_avg:235.78ms +[2025-07-17 16:03:32] [Rank 0] step:4661/10000 train_time:1098984ms step_avg:235.78ms +[2025-07-17 16:03:37] [Rank 0] step:4681/10000 train_time:1103850ms step_avg:235.81ms +[2025-07-17 16:03:37] [Rank 0] step:4681/10000 train_time:1103850ms step_avg:235.81ms +[2025-07-17 16:03:42] [Rank 0] step:4701/10000 train_time:1108723ms step_avg:235.85ms +[2025-07-17 16:03:42] [Rank 0] step:4701/10000 train_time:1108723ms step_avg:235.85ms +[2025-07-17 16:03:47] [Rank 0] step:4721/10000 train_time:1113585ms step_avg:235.88ms +[2025-07-17 16:03:47] [Rank 0] step:4721/10000 train_time:1113585ms step_avg:235.88ms +[2025-07-17 16:03:52] [Rank 0] step:4741/10000 train_time:1118449ms step_avg:235.91ms +[2025-07-17 16:03:52] [Rank 0] step:4741/10000 train_time:1118449ms step_avg:235.91ms +[2025-07-17 16:03:58] [Rank 0] PRINT: step:4750/10000 val_loss:4.3745 train_time:1120890ms step_avg:235.98ms +[2025-07-17 16:03:58] [Rank 0] PRINT: step:4750/10000 val_loss:4.3745 train_time:1120890ms step_avg:235.98ms +[2025-07-17 16:04:01] [Rank 0] step:4761/10000 train_time:1123316ms step_avg:235.94ms +[2025-07-17 16:04:01] [Rank 0] step:4761/10000 train_time:1123316ms step_avg:235.94ms +[2025-07-17 16:04:06] [Rank 0] step:4781/10000 train_time:1128179ms step_avg:235.97ms +[2025-07-17 16:04:06] [Rank 0] step:4781/10000 train_time:1128179ms step_avg:235.97ms +[2025-07-17 16:04:11] [Rank 0] step:4801/10000 train_time:1133040ms step_avg:236.00ms +[2025-07-17 16:04:11] [Rank 0] step:4801/10000 train_time:1133040ms step_avg:236.00ms +[2025-07-17 16:04:16] [Rank 0] step:4821/10000 train_time:1137907ms step_avg:236.03ms +[2025-07-17 16:04:16] [Rank 0] step:4821/10000 train_time:1137907ms step_avg:236.03ms +[2025-07-17 16:04:21] [Rank 0] step:4841/10000 train_time:1142779ms step_avg:236.06ms +[2025-07-17 16:04:21] [Rank 0] step:4841/10000 train_time:1142779ms step_avg:236.06ms +[2025-07-17 16:04:25] [Rank 0] step:4861/10000 train_time:1147647ms step_avg:236.09ms +[2025-07-17 16:04:25] [Rank 0] step:4861/10000 train_time:1147647ms step_avg:236.09ms +[2025-07-17 16:04:34] [Rank 0] PRINT: step:4875/10000 val_loss:4.4756 train_time:1151307ms step_avg:236.17ms +[2025-07-17 16:04:34] [Rank 0] PRINT: step:4875/10000 val_loss:4.4756 train_time:1151307ms step_avg:236.17ms +[2025-07-17 16:04:35] [Rank 0] step:4881/10000 train_time:1152521ms step_avg:236.12ms +[2025-07-17 16:04:35] [Rank 0] step:4881/10000 train_time:1152521ms step_avg:236.12ms +[2025-07-17 16:04:40] [Rank 0] step:4901/10000 train_time:1157394ms step_avg:236.15ms +[2025-07-17 16:04:40] [Rank 0] step:4901/10000 train_time:1157394ms step_avg:236.15ms +[2025-07-17 16:04:45] [Rank 0] step:4921/10000 train_time:1162254ms step_avg:236.18ms +[2025-07-17 16:04:45] [Rank 0] step:4921/10000 train_time:1162254ms step_avg:236.18ms +[2025-07-17 16:04:50] [Rank 0] step:4941/10000 train_time:1167124ms step_avg:236.21ms +[2025-07-17 16:04:50] [Rank 0] step:4941/10000 train_time:1167124ms step_avg:236.21ms +[2025-07-17 16:04:54] [Rank 0] step:4961/10000 train_time:1171994ms step_avg:236.24ms +[2025-07-17 16:04:54] [Rank 0] step:4961/10000 train_time:1171994ms step_avg:236.24ms +[2025-07-17 16:04:59] [Rank 0] step:4981/10000 train_time:1176855ms step_avg:236.27ms +[2025-07-17 16:04:59] [Rank 0] step:4981/10000 train_time:1176855ms step_avg:236.27ms +[2025-07-17 16:05:08] [Rank 0] PRINT: step:5000/10000 val_loss:4.5512 train_time:1181729ms step_avg:236.35ms +[2025-07-17 16:05:08] [Rank 0] PRINT: step:5000/10000 val_loss:4.5512 train_time:1181729ms step_avg:236.35ms +[2025-07-17 16:05:08] [Rank 0] step:5001/10000 train_time:1181746ms step_avg:236.30ms +[2025-07-17 16:05:08] [Rank 0] step:5001/10000 train_time:1181746ms step_avg:236.30ms +[2025-07-17 16:05:13] [Rank 0] step:5021/10000 train_time:1186589ms step_avg:236.33ms +[2025-07-17 16:05:13] [Rank 0] step:5021/10000 train_time:1186589ms step_avg:236.33ms +[2025-07-17 16:05:18] [Rank 0] step:5041/10000 train_time:1191453ms step_avg:236.35ms +[2025-07-17 16:05:18] [Rank 0] step:5041/10000 train_time:1191453ms step_avg:236.35ms +[2025-07-17 16:05:23] [Rank 0] step:5061/10000 train_time:1196317ms step_avg:236.38ms +[2025-07-17 16:05:23] [Rank 0] step:5061/10000 train_time:1196317ms step_avg:236.38ms +[2025-07-17 16:05:28] [Rank 0] step:5081/10000 train_time:1201293ms step_avg:236.43ms +[2025-07-17 16:05:28] [Rank 0] step:5081/10000 train_time:1201293ms step_avg:236.43ms +[2025-07-17 16:05:33] [Rank 0] step:5101/10000 train_time:1206127ms step_avg:236.45ms +[2025-07-17 16:05:33] [Rank 0] step:5101/10000 train_time:1206127ms step_avg:236.45ms +[2025-07-17 16:05:38] [Rank 0] step:5121/10000 train_time:1210984ms step_avg:236.47ms +[2025-07-17 16:05:38] [Rank 0] step:5121/10000 train_time:1210984ms step_avg:236.47ms +[2025-07-17 16:05:43] [Rank 0] PRINT: step:5125/10000 val_loss:4.4125 train_time:1212204ms step_avg:236.53ms +[2025-07-17 16:05:43] [Rank 0] PRINT: step:5125/10000 val_loss:4.4125 train_time:1212204ms step_avg:236.53ms +[2025-07-17 16:05:47] [Rank 0] step:5141/10000 train_time:1215841ms step_avg:236.50ms +[2025-07-17 16:05:47] [Rank 0] step:5141/10000 train_time:1215841ms step_avg:236.50ms +[2025-07-17 16:05:52] [Rank 0] step:5161/10000 train_time:1220703ms step_avg:236.52ms +[2025-07-17 16:05:52] [Rank 0] step:5161/10000 train_time:1220703ms step_avg:236.52ms +[2025-07-17 16:05:57] [Rank 0] step:5181/10000 train_time:1225564ms step_avg:236.55ms +[2025-07-17 16:05:57] [Rank 0] step:5181/10000 train_time:1225564ms step_avg:236.55ms +[2025-07-17 16:06:02] [Rank 0] step:5201/10000 train_time:1230473ms step_avg:236.58ms +[2025-07-17 16:06:02] [Rank 0] step:5201/10000 train_time:1230473ms step_avg:236.58ms +[2025-07-17 16:06:07] [Rank 0] step:5221/10000 train_time:1235412ms step_avg:236.62ms +[2025-07-17 16:06:07] [Rank 0] step:5221/10000 train_time:1235412ms step_avg:236.62ms +[2025-07-17 16:06:12] [Rank 0] step:5241/10000 train_time:1240347ms step_avg:236.66ms +[2025-07-17 16:06:12] [Rank 0] step:5241/10000 train_time:1240347ms step_avg:236.66ms +[2025-07-17 16:06:19] [Rank 0] PRINT: step:5250/10000 val_loss:4.3791 train_time:1242815ms step_avg:236.73ms +[2025-07-17 16:06:19] [Rank 0] PRINT: step:5250/10000 val_loss:4.3791 train_time:1242815ms step_avg:236.73ms +[2025-07-17 16:06:21] [Rank 0] step:5261/10000 train_time:1245272ms step_avg:236.70ms +[2025-07-17 16:06:21] [Rank 0] step:5261/10000 train_time:1245272ms step_avg:236.70ms +[2025-07-17 16:06:26] [Rank 0] step:5281/10000 train_time:1250206ms step_avg:236.74ms +[2025-07-17 16:06:26] [Rank 0] step:5281/10000 train_time:1250206ms step_avg:236.74ms +[2025-07-17 16:06:31] [Rank 0] step:5301/10000 train_time:1255138ms step_avg:236.77ms +[2025-07-17 16:06:31] [Rank 0] step:5301/10000 train_time:1255138ms step_avg:236.77ms +[2025-07-17 16:06:36] [Rank 0] step:5321/10000 train_time:1260070ms step_avg:236.81ms +[2025-07-17 16:06:36] [Rank 0] step:5321/10000 train_time:1260070ms step_avg:236.81ms +[2025-07-17 16:06:41] [Rank 0] step:5341/10000 train_time:1265011ms step_avg:236.85ms +[2025-07-17 16:06:41] [Rank 0] step:5341/10000 train_time:1265011ms step_avg:236.85ms +[2025-07-17 16:06:46] [Rank 0] step:5361/10000 train_time:1269943ms step_avg:236.89ms +[2025-07-17 16:06:46] [Rank 0] step:5361/10000 train_time:1269943ms step_avg:236.89ms +[2025-07-17 16:06:54] [Rank 0] PRINT: step:5375/10000 val_loss:4.3488 train_time:1273650ms step_avg:236.96ms +[2025-07-17 16:06:54] [Rank 0] PRINT: step:5375/10000 val_loss:4.3488 train_time:1273650ms step_avg:236.96ms +[2025-07-17 16:06:56] [Rank 0] step:5381/10000 train_time:1274881ms step_avg:236.92ms +[2025-07-17 16:06:56] [Rank 0] step:5381/10000 train_time:1274881ms step_avg:236.92ms +[2025-07-17 16:07:01] [Rank 0] step:5401/10000 train_time:1279817ms step_avg:236.96ms +[2025-07-17 16:07:01] [Rank 0] step:5401/10000 train_time:1279817ms step_avg:236.96ms +[2025-07-17 16:07:06] [Rank 0] step:5421/10000 train_time:1284762ms step_avg:237.00ms +[2025-07-17 16:07:06] [Rank 0] step:5421/10000 train_time:1284762ms step_avg:237.00ms +[2025-07-17 16:07:10] [Rank 0] step:5441/10000 train_time:1289695ms step_avg:237.03ms +[2025-07-17 16:07:10] [Rank 0] step:5441/10000 train_time:1289695ms step_avg:237.03ms +[2025-07-17 16:07:15] [Rank 0] step:5461/10000 train_time:1294635ms step_avg:237.07ms +[2025-07-17 16:07:15] [Rank 0] step:5461/10000 train_time:1294635ms step_avg:237.07ms +[2025-07-17 16:07:20] [Rank 0] step:5481/10000 train_time:1299581ms step_avg:237.11ms +[2025-07-17 16:07:20] [Rank 0] step:5481/10000 train_time:1299581ms step_avg:237.11ms +[2025-07-17 16:07:29] [Rank 0] PRINT: step:5500/10000 val_loss:4.4978 train_time:1304528ms step_avg:237.19ms +[2025-07-17 16:07:29] [Rank 0] PRINT: step:5500/10000 val_loss:4.4978 train_time:1304528ms step_avg:237.19ms +[2025-07-17 16:07:30] [Rank 0] step:5501/10000 train_time:1304545ms step_avg:237.15ms +[2025-07-17 16:07:30] [Rank 0] step:5501/10000 train_time:1304545ms step_avg:237.15ms +[2025-07-17 16:07:35] [Rank 0] step:5521/10000 train_time:1309461ms step_avg:237.18ms +[2025-07-17 16:07:35] [Rank 0] step:5521/10000 train_time:1309461ms step_avg:237.18ms +[2025-07-17 16:07:39] [Rank 0] step:5541/10000 train_time:1314402ms step_avg:237.21ms +[2025-07-17 16:07:39] [Rank 0] step:5541/10000 train_time:1314402ms step_avg:237.21ms +[2025-07-17 16:07:44] [Rank 0] step:5561/10000 train_time:1319341ms step_avg:237.25ms +[2025-07-17 16:07:44] [Rank 0] step:5561/10000 train_time:1319341ms step_avg:237.25ms +[2025-07-17 16:07:49] [Rank 0] step:5581/10000 train_time:1324281ms step_avg:237.28ms +[2025-07-17 16:07:49] [Rank 0] step:5581/10000 train_time:1324281ms step_avg:237.28ms +[2025-07-17 16:07:54] [Rank 0] step:5601/10000 train_time:1329306ms step_avg:237.33ms +[2025-07-17 16:07:54] [Rank 0] step:5601/10000 train_time:1329306ms step_avg:237.33ms +[2025-07-17 16:07:59] [Rank 0] step:5621/10000 train_time:1334248ms step_avg:237.37ms +[2025-07-17 16:07:59] [Rank 0] step:5621/10000 train_time:1334248ms step_avg:237.37ms +[2025-07-17 16:08:05] [Rank 0] PRINT: step:5625/10000 val_loss:4.2882 train_time:1335495ms step_avg:237.42ms +[2025-07-17 16:08:05] [Rank 0] PRINT: step:5625/10000 val_loss:4.2882 train_time:1335495ms step_avg:237.42ms +[2025-07-17 16:08:09] [Rank 0] step:5641/10000 train_time:1339193ms step_avg:237.40ms +[2025-07-17 16:08:09] [Rank 0] step:5641/10000 train_time:1339193ms step_avg:237.40ms +[2025-07-17 16:08:14] [Rank 0] step:5661/10000 train_time:1344137ms step_avg:237.44ms +[2025-07-17 16:08:14] [Rank 0] step:5661/10000 train_time:1344137ms step_avg:237.44ms +[2025-07-17 16:08:19] [Rank 0] step:5681/10000 train_time:1349080ms step_avg:237.47ms +[2025-07-17 16:08:19] [Rank 0] step:5681/10000 train_time:1349080ms step_avg:237.47ms +[2025-07-17 16:08:24] [Rank 0] step:5701/10000 train_time:1354022ms step_avg:237.51ms +[2025-07-17 16:08:24] [Rank 0] step:5701/10000 train_time:1354022ms step_avg:237.51ms +[2025-07-17 16:08:29] [Rank 0] step:5721/10000 train_time:1358962ms step_avg:237.54ms +[2025-07-17 16:08:29] [Rank 0] step:5721/10000 train_time:1358962ms step_avg:237.54ms +[2025-07-17 16:08:34] [Rank 0] step:5741/10000 train_time:1363908ms step_avg:237.57ms +[2025-07-17 16:08:34] [Rank 0] step:5741/10000 train_time:1363908ms step_avg:237.57ms +[2025-07-17 16:08:41] [Rank 0] PRINT: step:5750/10000 val_loss:4.6507 train_time:1366384ms step_avg:237.63ms +[2025-07-17 16:08:41] [Rank 0] PRINT: step:5750/10000 val_loss:4.6507 train_time:1366384ms step_avg:237.63ms +[2025-07-17 16:08:43] [Rank 0] step:5761/10000 train_time:1368852ms step_avg:237.61ms +[2025-07-17 16:08:43] [Rank 0] step:5761/10000 train_time:1368852ms step_avg:237.61ms +[2025-07-17 16:08:48] [Rank 0] step:5781/10000 train_time:1373800ms step_avg:237.64ms +[2025-07-17 16:08:48] [Rank 0] step:5781/10000 train_time:1373800ms step_avg:237.64ms +[2025-07-17 16:08:53] [Rank 0] step:5801/10000 train_time:1378740ms step_avg:237.67ms +[2025-07-17 16:08:53] [Rank 0] step:5801/10000 train_time:1378740ms step_avg:237.67ms +[2025-07-17 16:08:58] [Rank 0] step:5821/10000 train_time:1383686ms step_avg:237.71ms +[2025-07-17 16:08:58] [Rank 0] step:5821/10000 train_time:1383686ms step_avg:237.71ms +[2025-07-17 16:09:03] [Rank 0] step:5841/10000 train_time:1388636ms step_avg:237.74ms +[2025-07-17 16:09:03] [Rank 0] step:5841/10000 train_time:1388636ms step_avg:237.74ms +[2025-07-17 16:09:08] [Rank 0] step:5861/10000 train_time:1393579ms step_avg:237.77ms +[2025-07-17 16:09:08] [Rank 0] step:5861/10000 train_time:1393579ms step_avg:237.77ms +[2025-07-17 16:09:16] [Rank 0] PRINT: step:5875/10000 val_loss:4.5486 train_time:1397291ms step_avg:237.84ms +[2025-07-17 16:09:16] [Rank 0] PRINT: step:5875/10000 val_loss:4.5486 train_time:1397291ms step_avg:237.84ms +[2025-07-17 16:09:18] [Rank 0] step:5881/10000 train_time:1398518ms step_avg:237.80ms +[2025-07-17 16:09:18] [Rank 0] step:5881/10000 train_time:1398518ms step_avg:237.80ms +[2025-07-17 16:09:23] [Rank 0] step:5901/10000 train_time:1403470ms step_avg:237.84ms +[2025-07-17 16:09:23] [Rank 0] step:5901/10000 train_time:1403470ms step_avg:237.84ms +[2025-07-17 16:09:28] [Rank 0] step:5921/10000 train_time:1408413ms step_avg:237.87ms +[2025-07-17 16:09:28] [Rank 0] step:5921/10000 train_time:1408413ms step_avg:237.87ms +[2025-07-17 16:09:33] [Rank 0] step:5941/10000 train_time:1413367ms step_avg:237.90ms +[2025-07-17 16:09:33] [Rank 0] step:5941/10000 train_time:1413367ms step_avg:237.90ms +[2025-07-17 16:09:38] [Rank 0] step:5961/10000 train_time:1418324ms step_avg:237.93ms +[2025-07-17 16:09:38] [Rank 0] step:5961/10000 train_time:1418324ms step_avg:237.93ms +[2025-07-17 16:09:42] [Rank 0] step:5981/10000 train_time:1423281ms step_avg:237.97ms +[2025-07-17 16:09:42] [Rank 0] step:5981/10000 train_time:1423281ms step_avg:237.97ms +[2025-07-17 16:09:52] [Rank 0] PRINT: step:6000/10000 val_loss:4.4125 train_time:1428246ms step_avg:238.04ms +[2025-07-17 16:09:52] [Rank 0] PRINT: step:6000/10000 val_loss:4.4125 train_time:1428246ms step_avg:238.04ms +[2025-07-17 16:09:52] [Rank 0] step:6001/10000 train_time:1428263ms step_avg:238.00ms +[2025-07-17 16:09:52] [Rank 0] step:6001/10000 train_time:1428263ms step_avg:238.00ms +[2025-07-17 16:09:57] [Rank 0] step:6021/10000 train_time:1433192ms step_avg:238.03ms +[2025-07-17 16:09:57] [Rank 0] step:6021/10000 train_time:1433192ms step_avg:238.03ms +[2025-07-17 16:10:02] [Rank 0] step:6041/10000 train_time:1438148ms step_avg:238.06ms +[2025-07-17 16:10:02] [Rank 0] step:6041/10000 train_time:1438148ms step_avg:238.06ms +[2025-07-17 16:10:07] [Rank 0] step:6061/10000 train_time:1443101ms step_avg:238.10ms +[2025-07-17 16:10:07] [Rank 0] step:6061/10000 train_time:1443101ms step_avg:238.10ms +[2025-07-17 16:10:12] [Rank 0] step:6081/10000 train_time:1448056ms step_avg:238.13ms +[2025-07-17 16:10:12] [Rank 0] step:6081/10000 train_time:1448056ms step_avg:238.13ms +[2025-07-17 16:10:17] [Rank 0] step:6101/10000 train_time:1453507ms step_avg:238.24ms +[2025-07-17 16:10:17] [Rank 0] step:6101/10000 train_time:1453507ms step_avg:238.24ms +[2025-07-17 16:10:22] [Rank 0] step:6121/10000 train_time:1458471ms step_avg:238.27ms +[2025-07-17 16:10:22] [Rank 0] step:6121/10000 train_time:1458471ms step_avg:238.27ms +[2025-07-17 16:10:28] [Rank 0] PRINT: step:6125/10000 val_loss:4.4803 train_time:1459715ms step_avg:238.32ms +[2025-07-17 16:10:28] [Rank 0] PRINT: step:6125/10000 val_loss:4.4803 train_time:1459715ms step_avg:238.32ms +[2025-07-17 16:10:32] [Rank 0] step:6141/10000 train_time:1463425ms step_avg:238.30ms +[2025-07-17 16:10:32] [Rank 0] step:6141/10000 train_time:1463425ms step_avg:238.30ms +[2025-07-17 16:10:37] [Rank 0] step:6161/10000 train_time:1468375ms step_avg:238.33ms +[2025-07-17 16:10:37] [Rank 0] step:6161/10000 train_time:1468375ms step_avg:238.33ms +[2025-07-17 16:10:42] [Rank 0] step:6181/10000 train_time:1473337ms step_avg:238.37ms +[2025-07-17 16:10:42] [Rank 0] step:6181/10000 train_time:1473337ms step_avg:238.37ms +[2025-07-17 16:10:47] [Rank 0] step:6201/10000 train_time:1478307ms step_avg:238.40ms +[2025-07-17 16:10:47] [Rank 0] step:6201/10000 train_time:1478307ms step_avg:238.40ms +[2025-07-17 16:10:52] [Rank 0] step:6221/10000 train_time:1483270ms step_avg:238.43ms +[2025-07-17 16:10:52] [Rank 0] step:6221/10000 train_time:1483270ms step_avg:238.43ms +[2025-07-17 16:10:57] [Rank 0] step:6241/10000 train_time:1488237ms step_avg:238.46ms +[2025-07-17 16:10:57] [Rank 0] step:6241/10000 train_time:1488237ms step_avg:238.46ms +[2025-07-17 16:11:04] [Rank 0] PRINT: step:6250/10000 val_loss:4.5192 train_time:1490821ms step_avg:238.53ms +[2025-07-17 16:11:04] [Rank 0] PRINT: step:6250/10000 val_loss:4.5192 train_time:1490821ms step_avg:238.53ms +[2025-07-17 16:11:07] [Rank 0] step:6261/10000 train_time:1493293ms step_avg:238.51ms +[2025-07-17 16:11:07] [Rank 0] step:6261/10000 train_time:1493293ms step_avg:238.51ms +[2025-07-17 16:11:12] [Rank 0] step:6281/10000 train_time:1498264ms step_avg:238.54ms +[2025-07-17 16:11:12] [Rank 0] step:6281/10000 train_time:1498264ms step_avg:238.54ms +[2025-07-17 16:11:17] [Rank 0] step:6301/10000 train_time:1503224ms step_avg:238.57ms +[2025-07-17 16:11:17] [Rank 0] step:6301/10000 train_time:1503224ms step_avg:238.57ms +[2025-07-17 16:11:22] [Rank 0] step:6321/10000 train_time:1508186ms step_avg:238.60ms +[2025-07-17 16:11:22] [Rank 0] step:6321/10000 train_time:1508186ms step_avg:238.60ms +[2025-07-17 16:11:27] [Rank 0] step:6341/10000 train_time:1513250ms step_avg:238.65ms +[2025-07-17 16:11:27] [Rank 0] step:6341/10000 train_time:1513250ms step_avg:238.65ms +[2025-07-17 16:11:32] [Rank 0] step:6361/10000 train_time:1518198ms step_avg:238.67ms +[2025-07-17 16:11:32] [Rank 0] step:6361/10000 train_time:1518198ms step_avg:238.67ms +[2025-07-17 16:11:40] [Rank 0] PRINT: step:6375/10000 val_loss:4.5111 train_time:1521913ms step_avg:238.73ms +[2025-07-17 16:11:40] [Rank 0] PRINT: step:6375/10000 val_loss:4.5111 train_time:1521913ms step_avg:238.73ms +[2025-07-17 16:11:41] [Rank 0] step:6381/10000 train_time:1523147ms step_avg:238.70ms +[2025-07-17 16:11:41] [Rank 0] step:6381/10000 train_time:1523147ms step_avg:238.70ms +[2025-07-17 16:11:46] [Rank 0] step:6401/10000 train_time:1528088ms step_avg:238.73ms +[2025-07-17 16:11:46] [Rank 0] step:6401/10000 train_time:1528088ms step_avg:238.73ms +[2025-07-17 16:11:51] [Rank 0] step:6421/10000 train_time:1533039ms step_avg:238.75ms +[2025-07-17 16:11:51] [Rank 0] step:6421/10000 train_time:1533039ms step_avg:238.75ms +[2025-07-17 16:11:56] [Rank 0] step:6441/10000 train_time:1537991ms step_avg:238.78ms +[2025-07-17 16:11:56] [Rank 0] step:6441/10000 train_time:1537991ms step_avg:238.78ms +[2025-07-17 16:12:01] [Rank 0] step:6461/10000 train_time:1542952ms step_avg:238.81ms +[2025-07-17 16:12:01] [Rank 0] step:6461/10000 train_time:1542952ms step_avg:238.81ms +[2025-07-17 16:12:06] [Rank 0] step:6481/10000 train_time:1547907ms step_avg:238.84ms +[2025-07-17 16:12:06] [Rank 0] step:6481/10000 train_time:1547907ms step_avg:238.84ms +[2025-07-17 16:12:15] [Rank 0] PRINT: step:6500/10000 val_loss:4.5639 train_time:1552861ms step_avg:238.90ms +[2025-07-17 16:12:15] [Rank 0] PRINT: step:6500/10000 val_loss:4.5639 train_time:1552861ms step_avg:238.90ms +[2025-07-17 16:12:16] [Rank 0] step:6501/10000 train_time:1552879ms step_avg:238.87ms +[2025-07-17 16:12:16] [Rank 0] step:6501/10000 train_time:1552879ms step_avg:238.87ms +[2025-07-17 16:12:21] [Rank 0] step:6521/10000 train_time:1557809ms step_avg:238.89ms +[2025-07-17 16:12:21] [Rank 0] step:6521/10000 train_time:1557809ms step_avg:238.89ms +[2025-07-17 16:12:26] [Rank 0] step:6541/10000 train_time:1562763ms step_avg:238.92ms +[2025-07-17 16:12:26] [Rank 0] step:6541/10000 train_time:1562763ms step_avg:238.92ms +[2025-07-17 16:12:31] [Rank 0] step:6561/10000 train_time:1567729ms step_avg:238.95ms +[2025-07-17 16:12:31] [Rank 0] step:6561/10000 train_time:1567729ms step_avg:238.95ms +[2025-07-17 16:12:36] [Rank 0] step:6581/10000 train_time:1572689ms step_avg:238.97ms +[2025-07-17 16:12:36] [Rank 0] step:6581/10000 train_time:1572689ms step_avg:238.97ms +[2025-07-17 16:12:40] [Rank 0] step:6601/10000 train_time:1577657ms step_avg:239.00ms +[2025-07-17 16:12:40] [Rank 0] step:6601/10000 train_time:1577657ms step_avg:239.00ms +[2025-07-17 16:12:46] [Rank 0] step:6621/10000 train_time:1583122ms step_avg:239.11ms +[2025-07-17 16:12:46] [Rank 0] step:6621/10000 train_time:1583122ms step_avg:239.11ms +[2025-07-17 16:12:52] [Rank 0] PRINT: step:6625/10000 val_loss:4.3954 train_time:1584368ms step_avg:239.15ms +[2025-07-17 16:12:52] [Rank 0] PRINT: step:6625/10000 val_loss:4.3954 train_time:1584368ms step_avg:239.15ms +[2025-07-17 16:12:56] [Rank 0] step:6641/10000 train_time:1588068ms step_avg:239.13ms +[2025-07-17 16:12:56] [Rank 0] step:6641/10000 train_time:1588068ms step_avg:239.13ms +[2025-07-17 16:13:01] [Rank 0] step:6661/10000 train_time:1593022ms step_avg:239.16ms +[2025-07-17 16:13:01] [Rank 0] step:6661/10000 train_time:1593022ms step_avg:239.16ms +[2025-07-17 16:13:06] [Rank 0] step:6681/10000 train_time:1598025ms step_avg:239.19ms +[2025-07-17 16:13:06] [Rank 0] step:6681/10000 train_time:1598025ms step_avg:239.19ms +[2025-07-17 16:13:11] [Rank 0] step:6701/10000 train_time:1603039ms step_avg:239.22ms +[2025-07-17 16:13:11] [Rank 0] step:6701/10000 train_time:1603039ms step_avg:239.22ms +[2025-07-17 16:13:16] [Rank 0] step:6721/10000 train_time:1608076ms step_avg:239.26ms +[2025-07-17 16:13:16] [Rank 0] step:6721/10000 train_time:1608076ms step_avg:239.26ms +[2025-07-17 16:13:21] [Rank 0] step:6741/10000 train_time:1613111ms step_avg:239.30ms +[2025-07-17 16:13:21] [Rank 0] step:6741/10000 train_time:1613111ms step_avg:239.30ms +[2025-07-17 16:13:28] [Rank 0] PRINT: step:6750/10000 val_loss:4.1887 train_time:1615621ms step_avg:239.35ms +[2025-07-17 16:13:28] [Rank 0] PRINT: step:6750/10000 val_loss:4.1887 train_time:1615621ms step_avg:239.35ms +[2025-07-17 16:13:30] [Rank 0] step:6761/10000 train_time:1618127ms step_avg:239.33ms +[2025-07-17 16:13:30] [Rank 0] step:6761/10000 train_time:1618127ms step_avg:239.33ms +[2025-07-17 16:13:35] [Rank 0] step:6781/10000 train_time:1623152ms step_avg:239.37ms +[2025-07-17 16:13:35] [Rank 0] step:6781/10000 train_time:1623152ms step_avg:239.37ms +[2025-07-17 16:13:40] [Rank 0] step:6801/10000 train_time:1628178ms step_avg:239.40ms +[2025-07-17 16:13:40] [Rank 0] step:6801/10000 train_time:1628178ms step_avg:239.40ms +[2025-07-17 16:13:45] [Rank 0] step:6821/10000 train_time:1633197ms step_avg:239.44ms +[2025-07-17 16:13:45] [Rank 0] step:6821/10000 train_time:1633197ms step_avg:239.44ms +[2025-07-17 16:13:50] [Rank 0] step:6841/10000 train_time:1638221ms step_avg:239.47ms +[2025-07-17 16:13:50] [Rank 0] step:6841/10000 train_time:1638221ms step_avg:239.47ms +[2025-07-17 16:13:55] [Rank 0] step:6861/10000 train_time:1643236ms step_avg:239.50ms +[2025-07-17 16:13:55] [Rank 0] step:6861/10000 train_time:1643236ms step_avg:239.50ms +[2025-07-17 16:14:04] [Rank 0] PRINT: step:6875/10000 val_loss:4.3124 train_time:1646997ms step_avg:239.56ms +[2025-07-17 16:14:04] [Rank 0] PRINT: step:6875/10000 val_loss:4.3124 train_time:1646997ms step_avg:239.56ms +[2025-07-17 16:14:05] [Rank 0] step:6881/10000 train_time:1648247ms step_avg:239.54ms +[2025-07-17 16:14:05] [Rank 0] step:6881/10000 train_time:1648247ms step_avg:239.54ms +[2025-07-17 16:14:10] [Rank 0] step:6901/10000 train_time:1653255ms step_avg:239.57ms +[2025-07-17 16:14:10] [Rank 0] step:6901/10000 train_time:1653255ms step_avg:239.57ms +[2025-07-17 16:14:15] [Rank 0] step:6921/10000 train_time:1658268ms step_avg:239.60ms +[2025-07-17 16:14:15] [Rank 0] step:6921/10000 train_time:1658268ms step_avg:239.60ms +[2025-07-17 16:14:20] [Rank 0] step:6941/10000 train_time:1663297ms step_avg:239.63ms +[2025-07-17 16:14:20] [Rank 0] step:6941/10000 train_time:1663297ms step_avg:239.63ms +[2025-07-17 16:14:25] [Rank 0] step:6961/10000 train_time:1668319ms step_avg:239.67ms +[2025-07-17 16:14:25] [Rank 0] step:6961/10000 train_time:1668319ms step_avg:239.67ms +[2025-07-17 16:14:30] [Rank 0] step:6981/10000 train_time:1673340ms step_avg:239.70ms +[2025-07-17 16:14:30] [Rank 0] step:6981/10000 train_time:1673340ms step_avg:239.70ms +[2025-07-17 16:14:40] [Rank 0] PRINT: step:7000/10000 val_loss:4.3866 train_time:1678360ms step_avg:239.77ms +[2025-07-17 16:14:40] [Rank 0] PRINT: step:7000/10000 val_loss:4.3866 train_time:1678360ms step_avg:239.77ms +[2025-07-17 16:14:40] [Rank 0] step:7001/10000 train_time:1678376ms step_avg:239.73ms +[2025-07-17 16:14:40] [Rank 0] step:7001/10000 train_time:1678376ms step_avg:239.73ms +[2025-07-17 16:14:45] [Rank 0] step:7021/10000 train_time:1683368ms step_avg:239.76ms +[2025-07-17 16:14:45] [Rank 0] step:7021/10000 train_time:1683368ms step_avg:239.76ms +[2025-07-17 16:14:50] [Rank 0] step:7041/10000 train_time:1688384ms step_avg:239.79ms +[2025-07-17 16:14:50] [Rank 0] step:7041/10000 train_time:1688384ms step_avg:239.79ms +[2025-07-17 16:14:55] [Rank 0] step:7061/10000 train_time:1693395ms step_avg:239.82ms +[2025-07-17 16:14:55] [Rank 0] step:7061/10000 train_time:1693395ms step_avg:239.82ms +[2025-07-17 16:15:00] [Rank 0] step:7081/10000 train_time:1698414ms step_avg:239.86ms +[2025-07-17 16:15:00] [Rank 0] step:7081/10000 train_time:1698414ms step_avg:239.86ms +[2025-07-17 16:15:05] [Rank 0] step:7101/10000 train_time:1703424ms step_avg:239.89ms +[2025-07-17 16:15:05] [Rank 0] step:7101/10000 train_time:1703424ms step_avg:239.89ms +[2025-07-17 16:15:10] [Rank 0] step:7121/10000 train_time:1708518ms step_avg:239.93ms +[2025-07-17 16:15:10] [Rank 0] step:7121/10000 train_time:1708518ms step_avg:239.93ms +[2025-07-17 16:15:16] [Rank 0] PRINT: step:7125/10000 val_loss:4.4244 train_time:1709776ms step_avg:239.97ms +[2025-07-17 16:15:16] [Rank 0] PRINT: step:7125/10000 val_loss:4.4244 train_time:1709776ms step_avg:239.97ms +[2025-07-17 16:15:20] [Rank 0] step:7141/10000 train_time:1713532ms step_avg:239.96ms +[2025-07-17 16:15:20] [Rank 0] step:7141/10000 train_time:1713532ms step_avg:239.96ms +[2025-07-17 16:15:25] [Rank 0] step:7161/10000 train_time:1718550ms step_avg:239.99ms +[2025-07-17 16:15:25] [Rank 0] step:7161/10000 train_time:1718550ms step_avg:239.99ms +[2025-07-17 16:15:30] [Rank 0] step:7181/10000 train_time:1723567ms step_avg:240.02ms +[2025-07-17 16:15:30] [Rank 0] step:7181/10000 train_time:1723567ms step_avg:240.02ms +[2025-07-17 16:15:35] [Rank 0] step:7201/10000 train_time:1728595ms step_avg:240.05ms +[2025-07-17 16:15:35] [Rank 0] step:7201/10000 train_time:1728595ms step_avg:240.05ms +[2025-07-17 16:15:40] [Rank 0] step:7221/10000 train_time:1733610ms step_avg:240.08ms +[2025-07-17 16:15:40] [Rank 0] step:7221/10000 train_time:1733610ms step_avg:240.08ms +[2025-07-17 16:15:45] [Rank 0] step:7241/10000 train_time:1738625ms step_avg:240.11ms +[2025-07-17 16:15:45] [Rank 0] step:7241/10000 train_time:1738625ms step_avg:240.11ms +[2025-07-17 16:15:52] [Rank 0] PRINT: step:7250/10000 val_loss:4.4694 train_time:1741142ms step_avg:240.16ms +[2025-07-17 16:15:52] [Rank 0] PRINT: step:7250/10000 val_loss:4.4694 train_time:1741142ms step_avg:240.16ms +[2025-07-17 16:15:55] [Rank 0] step:7261/10000 train_time:1743641ms step_avg:240.14ms +[2025-07-17 16:15:55] [Rank 0] step:7261/10000 train_time:1743641ms step_avg:240.14ms +[2025-07-17 16:16:00] [Rank 0] step:7281/10000 train_time:1748660ms step_avg:240.17ms +[2025-07-17 16:16:00] [Rank 0] step:7281/10000 train_time:1748660ms step_avg:240.17ms +[2025-07-17 16:16:05] [Rank 0] step:7301/10000 train_time:1753676ms step_avg:240.20ms +[2025-07-17 16:16:05] [Rank 0] step:7301/10000 train_time:1753676ms step_avg:240.20ms +[2025-07-17 16:16:10] [Rank 0] step:7321/10000 train_time:1758713ms step_avg:240.23ms +[2025-07-17 16:16:10] [Rank 0] step:7321/10000 train_time:1758713ms step_avg:240.23ms +[2025-07-17 16:16:15] [Rank 0] step:7341/10000 train_time:1763732ms step_avg:240.26ms +[2025-07-17 16:16:15] [Rank 0] step:7341/10000 train_time:1763732ms step_avg:240.26ms +[2025-07-17 16:16:20] [Rank 0] step:7361/10000 train_time:1768762ms step_avg:240.29ms +[2025-07-17 16:16:20] [Rank 0] step:7361/10000 train_time:1768762ms step_avg:240.29ms +[2025-07-17 16:16:28] [Rank 0] PRINT: step:7375/10000 val_loss:4.2017 train_time:1772535ms step_avg:240.34ms +[2025-07-17 16:16:28] [Rank 0] PRINT: step:7375/10000 val_loss:4.2017 train_time:1772535ms step_avg:240.34ms +[2025-07-17 16:16:30] [Rank 0] step:7381/10000 train_time:1773785ms step_avg:240.32ms +[2025-07-17 16:16:30] [Rank 0] step:7381/10000 train_time:1773785ms step_avg:240.32ms +[2025-07-17 16:16:35] [Rank 0] step:7401/10000 train_time:1778811ms step_avg:240.35ms +[2025-07-17 16:16:35] [Rank 0] step:7401/10000 train_time:1778811ms step_avg:240.35ms +[2025-07-17 16:16:40] [Rank 0] step:7421/10000 train_time:1783832ms step_avg:240.38ms +[2025-07-17 16:16:40] [Rank 0] step:7421/10000 train_time:1783832ms step_avg:240.38ms +[2025-07-17 16:16:45] [Rank 0] step:7441/10000 train_time:1788867ms step_avg:240.41ms +[2025-07-17 16:16:45] [Rank 0] step:7441/10000 train_time:1788867ms step_avg:240.41ms +[2025-07-17 16:16:50] [Rank 0] step:7461/10000 train_time:1793891ms step_avg:240.44ms +[2025-07-17 16:16:50] [Rank 0] step:7461/10000 train_time:1793891ms step_avg:240.44ms +[2025-07-17 16:16:55] [Rank 0] step:7481/10000 train_time:1798926ms step_avg:240.47ms +[2025-07-17 16:16:55] [Rank 0] step:7481/10000 train_time:1798926ms step_avg:240.47ms +[2025-07-17 16:17:04] [Rank 0] PRINT: step:7500/10000 val_loss:4.5282 train_time:1803972ms step_avg:240.53ms +[2025-07-17 16:17:04] [Rank 0] PRINT: step:7500/10000 val_loss:4.5282 train_time:1803972ms step_avg:240.53ms +[2025-07-17 16:17:04] [Rank 0] step:7501/10000 train_time:1803990ms step_avg:240.50ms +[2025-07-17 16:17:04] [Rank 0] step:7501/10000 train_time:1803990ms step_avg:240.50ms +[2025-07-17 16:17:09] [Rank 0] step:7521/10000 train_time:1809009ms step_avg:240.53ms +[2025-07-17 16:17:09] [Rank 0] step:7521/10000 train_time:1809009ms step_avg:240.53ms +[2025-07-17 16:17:14] [Rank 0] step:7541/10000 train_time:1814037ms step_avg:240.56ms +[2025-07-17 16:17:14] [Rank 0] step:7541/10000 train_time:1814037ms step_avg:240.56ms +[2025-07-17 16:17:19] [Rank 0] step:7561/10000 train_time:1819073ms step_avg:240.59ms +[2025-07-17 16:17:19] [Rank 0] step:7561/10000 train_time:1819073ms step_avg:240.59ms +[2025-07-17 16:17:24] [Rank 0] step:7581/10000 train_time:1824109ms step_avg:240.62ms +[2025-07-17 16:17:24] [Rank 0] step:7581/10000 train_time:1824109ms step_avg:240.62ms +[2025-07-17 16:17:29] [Rank 0] step:7601/10000 train_time:1829158ms step_avg:240.65ms +[2025-07-17 16:17:29] [Rank 0] step:7601/10000 train_time:1829158ms step_avg:240.65ms +[2025-07-17 16:17:35] [Rank 0] step:7621/10000 train_time:1834215ms step_avg:240.68ms +[2025-07-17 16:17:35] [Rank 0] step:7621/10000 train_time:1834215ms step_avg:240.68ms +[2025-07-17 16:17:41] [Rank 0] PRINT: step:7625/10000 val_loss:4.5624 train_time:1836003ms step_avg:240.79ms +[2025-07-17 16:17:41] [Rank 0] PRINT: step:7625/10000 val_loss:4.5624 train_time:1836003ms step_avg:240.79ms +[2025-07-17 16:17:45] [Rank 0] step:7641/10000 train_time:1839780ms step_avg:240.78ms +[2025-07-17 16:17:45] [Rank 0] step:7641/10000 train_time:1839780ms step_avg:240.78ms +[2025-07-17 16:17:50] [Rank 0] step:7661/10000 train_time:1844826ms step_avg:240.81ms +[2025-07-17 16:17:50] [Rank 0] step:7661/10000 train_time:1844826ms step_avg:240.81ms +[2025-07-17 16:17:55] [Rank 0] step:7681/10000 train_time:1849885ms step_avg:240.84ms +[2025-07-17 16:17:55] [Rank 0] step:7681/10000 train_time:1849885ms step_avg:240.84ms +[2025-07-17 16:18:00] [Rank 0] step:7701/10000 train_time:1854925ms step_avg:240.87ms +[2025-07-17 16:18:00] [Rank 0] step:7701/10000 train_time:1854925ms step_avg:240.87ms +[2025-07-17 16:18:05] [Rank 0] step:7721/10000 train_time:1859964ms step_avg:240.90ms +[2025-07-17 16:18:05] [Rank 0] step:7721/10000 train_time:1859964ms step_avg:240.90ms +[2025-07-17 16:18:10] [Rank 0] step:7741/10000 train_time:1865003ms step_avg:240.93ms +[2025-07-17 16:18:10] [Rank 0] step:7741/10000 train_time:1865003ms step_avg:240.93ms +[2025-07-17 16:18:17] [Rank 0] PRINT: step:7750/10000 val_loss:4.4738 train_time:1867543ms step_avg:240.97ms +[2025-07-17 16:18:17] [Rank 0] PRINT: step:7750/10000 val_loss:4.4738 train_time:1867543ms step_avg:240.97ms +[2025-07-17 16:18:20] [Rank 0] step:7761/10000 train_time:1870058ms step_avg:240.96ms +[2025-07-17 16:18:20] [Rank 0] step:7761/10000 train_time:1870058ms step_avg:240.96ms +[2025-07-17 16:18:25] [Rank 0] step:7781/10000 train_time:1875100ms step_avg:240.98ms +[2025-07-17 16:18:25] [Rank 0] step:7781/10000 train_time:1875100ms step_avg:240.98ms +[2025-07-17 16:18:30] [Rank 0] step:7801/10000 train_time:1880144ms step_avg:241.01ms +[2025-07-17 16:18:30] [Rank 0] step:7801/10000 train_time:1880144ms step_avg:241.01ms +[2025-07-17 16:18:35] [Rank 0] step:7821/10000 train_time:1885182ms step_avg:241.04ms +[2025-07-17 16:18:35] [Rank 0] step:7821/10000 train_time:1885182ms step_avg:241.04ms +[2025-07-17 16:18:40] [Rank 0] step:7841/10000 train_time:1890224ms step_avg:241.07ms +[2025-07-17 16:18:40] [Rank 0] step:7841/10000 train_time:1890224ms step_avg:241.07ms +[2025-07-17 16:18:45] [Rank 0] step:7861/10000 train_time:1895248ms step_avg:241.10ms +[2025-07-17 16:18:45] [Rank 0] step:7861/10000 train_time:1895248ms step_avg:241.10ms +[2025-07-17 16:18:53] [Rank 0] PRINT: step:7875/10000 val_loss:4.5438 train_time:1899020ms step_avg:241.15ms +[2025-07-17 16:18:53] [Rank 0] PRINT: step:7875/10000 val_loss:4.5438 train_time:1899020ms step_avg:241.15ms +[2025-07-17 16:18:55] [Rank 0] step:7881/10000 train_time:1900271ms step_avg:241.12ms +[2025-07-17 16:18:55] [Rank 0] step:7881/10000 train_time:1900271ms step_avg:241.12ms +[2025-07-17 16:19:00] [Rank 0] step:7901/10000 train_time:1905304ms step_avg:241.15ms +[2025-07-17 16:19:00] [Rank 0] step:7901/10000 train_time:1905304ms step_avg:241.15ms +[2025-07-17 16:19:05] [Rank 0] step:7921/10000 train_time:1910339ms step_avg:241.17ms +[2025-07-17 16:19:05] [Rank 0] step:7921/10000 train_time:1910339ms step_avg:241.17ms +[2025-07-17 16:19:10] [Rank 0] step:7941/10000 train_time:1915382ms step_avg:241.20ms +[2025-07-17 16:19:10] [Rank 0] step:7941/10000 train_time:1915382ms step_avg:241.20ms +[2025-07-17 16:19:15] [Rank 0] step:7961/10000 train_time:1920430ms step_avg:241.23ms +[2025-07-17 16:19:15] [Rank 0] step:7961/10000 train_time:1920430ms step_avg:241.23ms +[2025-07-17 16:19:20] [Rank 0] step:7981/10000 train_time:1925463ms step_avg:241.26ms +[2025-07-17 16:19:20] [Rank 0] step:7981/10000 train_time:1925463ms step_avg:241.26ms +[2025-07-17 16:19:30] [Rank 0] PRINT: step:8000/10000 val_loss:4.6049 train_time:1930516ms step_avg:241.31ms +[2025-07-17 16:19:30] [Rank 0] PRINT: step:8000/10000 val_loss:4.6049 train_time:1930516ms step_avg:241.31ms +[2025-07-17 16:19:30] [Rank 0] step:8001/10000 train_time:1930533ms step_avg:241.29ms +[2025-07-17 16:19:30] [Rank 0] step:8001/10000 train_time:1930533ms step_avg:241.29ms +[2025-07-17 16:19:35] [Rank 0] step:8021/10000 train_time:1935549ms step_avg:241.31ms +[2025-07-17 16:19:35] [Rank 0] step:8021/10000 train_time:1935549ms step_avg:241.31ms +[2025-07-17 16:19:40] [Rank 0] step:8041/10000 train_time:1940607ms step_avg:241.34ms +[2025-07-17 16:19:40] [Rank 0] step:8041/10000 train_time:1940607ms step_avg:241.34ms +[2025-07-17 16:19:45] [Rank 0] step:8061/10000 train_time:1945644ms step_avg:241.37ms +[2025-07-17 16:19:45] [Rank 0] step:8061/10000 train_time:1945644ms step_avg:241.37ms +[2025-07-17 16:19:50] [Rank 0] step:8081/10000 train_time:1950692ms step_avg:241.39ms +[2025-07-17 16:19:50] [Rank 0] step:8081/10000 train_time:1950692ms step_avg:241.39ms +[2025-07-17 16:19:55] [Rank 0] step:8101/10000 train_time:1955727ms step_avg:241.42ms +[2025-07-17 16:19:55] [Rank 0] step:8101/10000 train_time:1955727ms step_avg:241.42ms +[2025-07-17 16:20:00] [Rank 0] step:8121/10000 train_time:1960764ms step_avg:241.44ms +[2025-07-17 16:20:00] [Rank 0] step:8121/10000 train_time:1960764ms step_avg:241.44ms +[2025-07-17 16:20:05] [Rank 0] PRINT: step:8125/10000 val_loss:4.5518 train_time:1962033ms step_avg:241.48ms +[2025-07-17 16:20:05] [Rank 0] PRINT: step:8125/10000 val_loss:4.5518 train_time:1962033ms step_avg:241.48ms +[2025-07-17 16:20:09] [Rank 0] step:8141/10000 train_time:1965881ms step_avg:241.48ms +[2025-07-17 16:20:09] [Rank 0] step:8141/10000 train_time:1965881ms step_avg:241.48ms +[2025-07-17 16:20:14] [Rank 0] step:8161/10000 train_time:1970955ms step_avg:241.51ms +[2025-07-17 16:20:14] [Rank 0] step:8161/10000 train_time:1970955ms step_avg:241.51ms +[2025-07-17 16:20:20] [Rank 0] step:8181/10000 train_time:1976064ms step_avg:241.54ms +[2025-07-17 16:20:20] [Rank 0] step:8181/10000 train_time:1976064ms step_avg:241.54ms +[2025-07-17 16:20:25] [Rank 0] step:8201/10000 train_time:1981154ms step_avg:241.57ms +[2025-07-17 16:20:25] [Rank 0] step:8201/10000 train_time:1981154ms step_avg:241.57ms +[2025-07-17 16:20:30] [Rank 0] step:8221/10000 train_time:1986252ms step_avg:241.61ms +[2025-07-17 16:20:30] [Rank 0] step:8221/10000 train_time:1986252ms step_avg:241.61ms +[2025-07-17 16:20:35] [Rank 0] step:8241/10000 train_time:1991354ms step_avg:241.64ms +[2025-07-17 16:20:35] [Rank 0] step:8241/10000 train_time:1991354ms step_avg:241.64ms +[2025-07-17 16:20:42] [Rank 0] PRINT: step:8250/10000 val_loss:4.5086 train_time:1993917ms step_avg:241.69ms +[2025-07-17 16:20:42] [Rank 0] PRINT: step:8250/10000 val_loss:4.5086 train_time:1993917ms step_avg:241.69ms +[2025-07-17 16:20:45] [Rank 0] step:8261/10000 train_time:1996461ms step_avg:241.67ms +[2025-07-17 16:20:45] [Rank 0] step:8261/10000 train_time:1996461ms step_avg:241.67ms +[2025-07-17 16:20:50] [Rank 0] step:8281/10000 train_time:2001580ms step_avg:241.71ms +[2025-07-17 16:20:50] [Rank 0] step:8281/10000 train_time:2001580ms step_avg:241.71ms +[2025-07-17 16:20:55] [Rank 0] step:8301/10000 train_time:2006671ms step_avg:241.74ms +[2025-07-17 16:20:55] [Rank 0] step:8301/10000 train_time:2006671ms step_avg:241.74ms +[2025-07-17 16:21:00] [Rank 0] step:8321/10000 train_time:2011773ms step_avg:241.77ms +[2025-07-17 16:21:00] [Rank 0] step:8321/10000 train_time:2011773ms step_avg:241.77ms +[2025-07-17 16:21:05] [Rank 0] step:8341/10000 train_time:2016883ms step_avg:241.80ms +[2025-07-17 16:21:05] [Rank 0] step:8341/10000 train_time:2016883ms step_avg:241.80ms +[2025-07-17 16:21:10] [Rank 0] step:8361/10000 train_time:2021979ms step_avg:241.83ms +[2025-07-17 16:21:10] [Rank 0] step:8361/10000 train_time:2021979ms step_avg:241.83ms +[2025-07-17 16:21:19] [Rank 0] PRINT: step:8375/10000 val_loss:4.6134 train_time:2025804ms step_avg:241.89ms +[2025-07-17 16:21:19] [Rank 0] PRINT: step:8375/10000 val_loss:4.6134 train_time:2025804ms step_avg:241.89ms +[2025-07-17 16:21:20] [Rank 0] step:8381/10000 train_time:2027061ms step_avg:241.86ms +[2025-07-17 16:21:20] [Rank 0] step:8381/10000 train_time:2027061ms step_avg:241.86ms +[2025-07-17 16:21:25] [Rank 0] step:8401/10000 train_time:2032147ms step_avg:241.89ms +[2025-07-17 16:21:25] [Rank 0] step:8401/10000 train_time:2032147ms step_avg:241.89ms +[2025-07-17 16:21:30] [Rank 0] step:8421/10000 train_time:2037248ms step_avg:241.92ms +[2025-07-17 16:21:30] [Rank 0] step:8421/10000 train_time:2037248ms step_avg:241.92ms +[2025-07-17 16:21:35] [Rank 0] step:8441/10000 train_time:2042351ms step_avg:241.96ms +[2025-07-17 16:21:35] [Rank 0] step:8441/10000 train_time:2042351ms step_avg:241.96ms +[2025-07-17 16:21:40] [Rank 0] step:8461/10000 train_time:2047461ms step_avg:241.99ms +[2025-07-17 16:21:40] [Rank 0] step:8461/10000 train_time:2047461ms step_avg:241.99ms +[2025-07-17 16:21:46] [Rank 0] step:8481/10000 train_time:2052553ms step_avg:242.02ms +[2025-07-17 16:21:46] [Rank 0] step:8481/10000 train_time:2052553ms step_avg:242.02ms +[2025-07-17 16:21:55] [Rank 0] PRINT: step:8500/10000 val_loss:4.5333 train_time:2057664ms step_avg:242.08ms +[2025-07-17 16:21:55] [Rank 0] PRINT: step:8500/10000 val_loss:4.5333 train_time:2057664ms step_avg:242.08ms +[2025-07-17 16:21:55] [Rank 0] step:8501/10000 train_time:2057681ms step_avg:242.05ms +[2025-07-17 16:21:55] [Rank 0] step:8501/10000 train_time:2057681ms step_avg:242.05ms +[2025-07-17 16:22:01] [Rank 0] step:8521/10000 train_time:2062763ms step_avg:242.08ms +[2025-07-17 16:22:01] [Rank 0] step:8521/10000 train_time:2062763ms step_avg:242.08ms +[2025-07-17 16:22:06] [Rank 0] step:8541/10000 train_time:2067880ms step_avg:242.11ms +[2025-07-17 16:22:06] [Rank 0] step:8541/10000 train_time:2067880ms step_avg:242.11ms +[2025-07-17 16:22:11] [Rank 0] step:8561/10000 train_time:2072974ms step_avg:242.14ms +[2025-07-17 16:22:11] [Rank 0] step:8561/10000 train_time:2072974ms step_avg:242.14ms +[2025-07-17 16:22:16] [Rank 0] step:8581/10000 train_time:2078075ms step_avg:242.17ms +[2025-07-17 16:22:16] [Rank 0] step:8581/10000 train_time:2078075ms step_avg:242.17ms +[2025-07-17 16:22:21] [Rank 0] step:8601/10000 train_time:2083160ms step_avg:242.20ms +[2025-07-17 16:22:21] [Rank 0] step:8601/10000 train_time:2083160ms step_avg:242.20ms +[2025-07-17 16:22:26] [Rank 0] step:8621/10000 train_time:2088246ms step_avg:242.23ms +[2025-07-17 16:22:26] [Rank 0] step:8621/10000 train_time:2088246ms step_avg:242.23ms +[2025-07-17 16:22:32] [Rank 0] PRINT: step:8625/10000 val_loss:4.5703 train_time:2089524ms step_avg:242.26ms +[2025-07-17 16:22:32] [Rank 0] PRINT: step:8625/10000 val_loss:4.5703 train_time:2089524ms step_avg:242.26ms +[2025-07-17 16:22:36] [Rank 0] step:8641/10000 train_time:2093423ms step_avg:242.27ms +[2025-07-17 16:22:36] [Rank 0] step:8641/10000 train_time:2093423ms step_avg:242.27ms +[2025-07-17 16:22:41] [Rank 0] step:8661/10000 train_time:2098514ms step_avg:242.29ms +[2025-07-17 16:22:41] [Rank 0] step:8661/10000 train_time:2098514ms step_avg:242.29ms +[2025-07-17 16:22:46] [Rank 0] step:8681/10000 train_time:2103600ms step_avg:242.32ms +[2025-07-17 16:22:46] [Rank 0] step:8681/10000 train_time:2103600ms step_avg:242.32ms +[2025-07-17 16:22:51] [Rank 0] step:8701/10000 train_time:2108698ms step_avg:242.35ms +[2025-07-17 16:22:51] [Rank 0] step:8701/10000 train_time:2108698ms step_avg:242.35ms +[2025-07-17 16:22:56] [Rank 0] step:8721/10000 train_time:2113791ms step_avg:242.38ms +[2025-07-17 16:22:56] [Rank 0] step:8721/10000 train_time:2113791ms step_avg:242.38ms +[2025-07-17 16:23:01] [Rank 0] step:8741/10000 train_time:2118883ms step_avg:242.41ms +[2025-07-17 16:23:01] [Rank 0] step:8741/10000 train_time:2118883ms step_avg:242.41ms +[2025-07-17 16:23:08] [Rank 0] PRINT: step:8750/10000 val_loss:4.7376 train_time:2121431ms step_avg:242.45ms +[2025-07-17 16:23:08] [Rank 0] PRINT: step:8750/10000 val_loss:4.7376 train_time:2121431ms step_avg:242.45ms +[2025-07-17 16:23:11] [Rank 0] step:8761/10000 train_time:2123965ms step_avg:242.43ms +[2025-07-17 16:23:11] [Rank 0] step:8761/10000 train_time:2123965ms step_avg:242.43ms +[2025-07-17 16:23:16] [Rank 0] step:8781/10000 train_time:2129054ms step_avg:242.46ms +[2025-07-17 16:23:16] [Rank 0] step:8781/10000 train_time:2129054ms step_avg:242.46ms +[2025-07-17 16:23:21] [Rank 0] step:8801/10000 train_time:2134149ms step_avg:242.49ms +[2025-07-17 16:23:21] [Rank 0] step:8801/10000 train_time:2134149ms step_avg:242.49ms +[2025-07-17 16:23:27] [Rank 0] step:8821/10000 train_time:2139242ms step_avg:242.52ms +[2025-07-17 16:23:27] [Rank 0] step:8821/10000 train_time:2139242ms step_avg:242.52ms +[2025-07-17 16:23:32] [Rank 0] step:8841/10000 train_time:2144358ms step_avg:242.55ms +[2025-07-17 16:23:32] [Rank 0] step:8841/10000 train_time:2144358ms step_avg:242.55ms +[2025-07-17 16:23:37] [Rank 0] step:8861/10000 train_time:2149469ms step_avg:242.58ms +[2025-07-17 16:23:37] [Rank 0] step:8861/10000 train_time:2149469ms step_avg:242.58ms +[2025-07-17 16:23:45] [Rank 0] PRINT: step:8875/10000 val_loss:4.6835 train_time:2153282ms step_avg:242.62ms +[2025-07-17 16:23:45] [Rank 0] PRINT: step:8875/10000 val_loss:4.6835 train_time:2153282ms step_avg:242.62ms +[2025-07-17 16:23:47] [Rank 0] step:8881/10000 train_time:2154547ms step_avg:242.60ms +[2025-07-17 16:23:47] [Rank 0] step:8881/10000 train_time:2154547ms step_avg:242.60ms +[2025-07-17 16:23:52] [Rank 0] step:8901/10000 train_time:2159633ms step_avg:242.63ms +[2025-07-17 16:23:52] [Rank 0] step:8901/10000 train_time:2159633ms step_avg:242.63ms +[2025-07-17 16:23:57] [Rank 0] step:8921/10000 train_time:2164726ms step_avg:242.66ms +[2025-07-17 16:23:57] [Rank 0] step:8921/10000 train_time:2164726ms step_avg:242.66ms +[2025-07-17 16:24:02] [Rank 0] step:8941/10000 train_time:2169821ms step_avg:242.68ms +[2025-07-17 16:24:02] [Rank 0] step:8941/10000 train_time:2169821ms step_avg:242.68ms +[2025-07-17 16:24:07] [Rank 0] step:8961/10000 train_time:2174920ms step_avg:242.71ms +[2025-07-17 16:24:07] [Rank 0] step:8961/10000 train_time:2174920ms step_avg:242.71ms +[2025-07-17 16:24:12] [Rank 0] step:8981/10000 train_time:2180016ms step_avg:242.74ms +[2025-07-17 16:24:12] [Rank 0] step:8981/10000 train_time:2180016ms step_avg:242.74ms +[2025-07-17 16:24:22] [Rank 0] PRINT: step:9000/10000 val_loss:4.6780 train_time:2185120ms step_avg:242.79ms +[2025-07-17 16:24:22] [Rank 0] PRINT: step:9000/10000 val_loss:4.6780 train_time:2185120ms step_avg:242.79ms +[2025-07-17 16:24:22] [Rank 0] step:9001/10000 train_time:2185138ms step_avg:242.77ms +[2025-07-17 16:24:22] [Rank 0] step:9001/10000 train_time:2185138ms step_avg:242.77ms +[2025-07-17 16:24:27] [Rank 0] step:9021/10000 train_time:2190214ms step_avg:242.79ms +[2025-07-17 16:24:27] [Rank 0] step:9021/10000 train_time:2190214ms step_avg:242.79ms +[2025-07-17 16:24:32] [Rank 0] step:9041/10000 train_time:2195338ms step_avg:242.82ms +[2025-07-17 16:24:32] [Rank 0] step:9041/10000 train_time:2195338ms step_avg:242.82ms +[2025-07-17 16:24:37] [Rank 0] step:9061/10000 train_time:2200445ms step_avg:242.85ms +[2025-07-17 16:24:37] [Rank 0] step:9061/10000 train_time:2200445ms step_avg:242.85ms +[2025-07-17 16:24:42] [Rank 0] step:9081/10000 train_time:2205574ms step_avg:242.88ms +[2025-07-17 16:24:42] [Rank 0] step:9081/10000 train_time:2205574ms step_avg:242.88ms +[2025-07-17 16:24:48] [Rank 0] step:9101/10000 train_time:2210692ms step_avg:242.91ms +[2025-07-17 16:24:48] [Rank 0] step:9101/10000 train_time:2210692ms step_avg:242.91ms +[2025-07-17 16:24:53] [Rank 0] step:9121/10000 train_time:2215808ms step_avg:242.93ms +[2025-07-17 16:24:53] [Rank 0] step:9121/10000 train_time:2215808ms step_avg:242.93ms +[2025-07-17 16:24:58] [Rank 0] PRINT: step:9125/10000 val_loss:4.6749 train_time:2217085ms step_avg:242.97ms +[2025-07-17 16:24:58] [Rank 0] PRINT: step:9125/10000 val_loss:4.6749 train_time:2217085ms step_avg:242.97ms +[2025-07-17 16:25:03] [Rank 0] step:9141/10000 train_time:2220892ms step_avg:242.96ms +[2025-07-17 16:25:03] [Rank 0] step:9141/10000 train_time:2220892ms step_avg:242.96ms +[2025-07-17 16:25:08] [Rank 0] step:9161/10000 train_time:2226504ms step_avg:243.04ms +[2025-07-17 16:25:08] [Rank 0] step:9161/10000 train_time:2226504ms step_avg:243.04ms +[2025-07-17 16:25:13] [Rank 0] step:9181/10000 train_time:2231603ms step_avg:243.07ms +[2025-07-17 16:25:13] [Rank 0] step:9181/10000 train_time:2231603ms step_avg:243.07ms +[2025-07-17 16:25:18] [Rank 0] step:9201/10000 train_time:2236705ms step_avg:243.09ms +[2025-07-17 16:25:18] [Rank 0] step:9201/10000 train_time:2236705ms step_avg:243.09ms +[2025-07-17 16:25:24] [Rank 0] step:9221/10000 train_time:2241840ms step_avg:243.12ms +[2025-07-17 16:25:24] [Rank 0] step:9221/10000 train_time:2241840ms step_avg:243.12ms +[2025-07-17 16:25:29] [Rank 0] step:9241/10000 train_time:2246947ms step_avg:243.15ms +[2025-07-17 16:25:29] [Rank 0] step:9241/10000 train_time:2246947ms step_avg:243.15ms +[2025-07-17 16:25:36] [Rank 0] PRINT: step:9250/10000 val_loss:4.6845 train_time:2249508ms step_avg:243.19ms +[2025-07-17 16:25:36] [Rank 0] PRINT: step:9250/10000 val_loss:4.6845 train_time:2249508ms step_avg:243.19ms +[2025-07-17 16:25:39] [Rank 0] step:9261/10000 train_time:2252059ms step_avg:243.18ms +[2025-07-17 16:25:39] [Rank 0] step:9261/10000 train_time:2252059ms step_avg:243.18ms +[2025-07-17 16:25:44] [Rank 0] step:9281/10000 train_time:2257136ms step_avg:243.20ms +[2025-07-17 16:25:44] [Rank 0] step:9281/10000 train_time:2257136ms step_avg:243.20ms +[2025-07-17 16:25:49] [Rank 0] step:9301/10000 train_time:2262233ms step_avg:243.22ms +[2025-07-17 16:25:49] [Rank 0] step:9301/10000 train_time:2262233ms step_avg:243.22ms +[2025-07-17 16:25:54] [Rank 0] step:9321/10000 train_time:2267350ms step_avg:243.25ms +[2025-07-17 16:25:54] [Rank 0] step:9321/10000 train_time:2267350ms step_avg:243.25ms +[2025-07-17 16:25:59] [Rank 0] step:9341/10000 train_time:2272448ms step_avg:243.28ms +[2025-07-17 16:25:59] [Rank 0] step:9341/10000 train_time:2272448ms step_avg:243.28ms +[2025-07-17 16:26:04] [Rank 0] step:9361/10000 train_time:2277547ms step_avg:243.30ms +[2025-07-17 16:26:04] [Rank 0] step:9361/10000 train_time:2277547ms step_avg:243.30ms +[2025-07-17 16:26:12] [Rank 0] PRINT: step:9375/10000 val_loss:4.7142 train_time:2281373ms step_avg:243.35ms +[2025-07-17 16:26:12] [Rank 0] PRINT: step:9375/10000 val_loss:4.7142 train_time:2281373ms step_avg:243.35ms +[2025-07-17 16:26:14] [Rank 0] step:9381/10000 train_time:2282637ms step_avg:243.33ms +[2025-07-17 16:26:14] [Rank 0] step:9381/10000 train_time:2282637ms step_avg:243.33ms +[2025-07-17 16:26:19] [Rank 0] step:9401/10000 train_time:2287717ms step_avg:243.35ms +[2025-07-17 16:26:19] [Rank 0] step:9401/10000 train_time:2287717ms step_avg:243.35ms +[2025-07-17 16:26:24] [Rank 0] step:9421/10000 train_time:2292808ms step_avg:243.37ms +[2025-07-17 16:26:24] [Rank 0] step:9421/10000 train_time:2292808ms step_avg:243.37ms +[2025-07-17 16:26:29] [Rank 0] step:9441/10000 train_time:2297902ms step_avg:243.40ms +[2025-07-17 16:26:29] [Rank 0] step:9441/10000 train_time:2297902ms step_avg:243.40ms +[2025-07-17 16:26:34] [Rank 0] step:9461/10000 train_time:2303007ms step_avg:243.42ms +[2025-07-17 16:26:34] [Rank 0] step:9461/10000 train_time:2303007ms step_avg:243.42ms +[2025-07-17 16:26:39] [Rank 0] step:9481/10000 train_time:2308102ms step_avg:243.45ms +[2025-07-17 16:26:39] [Rank 0] step:9481/10000 train_time:2308102ms step_avg:243.45ms +[2025-07-17 16:26:49] [Rank 0] PRINT: step:9500/10000 val_loss:4.6763 train_time:2313230ms step_avg:243.50ms +[2025-07-17 16:26:49] [Rank 0] PRINT: step:9500/10000 val_loss:4.6763 train_time:2313230ms step_avg:243.50ms +[2025-07-17 16:26:49] [Rank 0] step:9501/10000 train_time:2313247ms step_avg:243.47ms +[2025-07-17 16:26:49] [Rank 0] step:9501/10000 train_time:2313247ms step_avg:243.47ms +[2025-07-17 16:26:54] [Rank 0] step:9521/10000 train_time:2318323ms step_avg:243.50ms +[2025-07-17 16:26:54] [Rank 0] step:9521/10000 train_time:2318323ms step_avg:243.50ms +[2025-07-17 16:26:59] [Rank 0] step:9541/10000 train_time:2323424ms step_avg:243.52ms +[2025-07-17 16:26:59] [Rank 0] step:9541/10000 train_time:2323424ms step_avg:243.52ms +[2025-07-17 16:27:04] [Rank 0] step:9561/10000 train_time:2328511ms step_avg:243.54ms +[2025-07-17 16:27:04] [Rank 0] step:9561/10000 train_time:2328511ms step_avg:243.54ms +[2025-07-17 16:27:10] [Rank 0] step:9581/10000 train_time:2333592ms step_avg:243.56ms +[2025-07-17 16:27:10] [Rank 0] step:9581/10000 train_time:2333592ms step_avg:243.56ms +[2025-07-17 16:27:15] [Rank 0] step:9601/10000 train_time:2338678ms step_avg:243.59ms +[2025-07-17 16:27:15] [Rank 0] step:9601/10000 train_time:2338678ms step_avg:243.59ms +[2025-07-17 16:27:20] [Rank 0] step:9621/10000 train_time:2343794ms step_avg:243.61ms +[2025-07-17 16:27:20] [Rank 0] step:9621/10000 train_time:2343794ms step_avg:243.61ms +[2025-07-17 16:27:26] [Rank 0] PRINT: step:9625/10000 val_loss:4.7298 train_time:2345068ms step_avg:243.64ms +[2025-07-17 16:27:26] [Rank 0] PRINT: step:9625/10000 val_loss:4.7298 train_time:2345068ms step_avg:243.64ms +[2025-07-17 16:27:30] [Rank 0] step:9641/10000 train_time:2348902ms step_avg:243.64ms +[2025-07-17 16:27:30] [Rank 0] step:9641/10000 train_time:2348902ms step_avg:243.64ms +[2025-07-17 16:27:35] [Rank 0] step:9661/10000 train_time:2354535ms step_avg:243.72ms +[2025-07-17 16:27:35] [Rank 0] step:9661/10000 train_time:2354535ms step_avg:243.72ms +[2025-07-17 16:27:40] [Rank 0] step:9681/10000 train_time:2359684ms step_avg:243.74ms +[2025-07-17 16:27:40] [Rank 0] step:9681/10000 train_time:2359684ms step_avg:243.74ms +[2025-07-17 16:27:46] [Rank 0] step:9701/10000 train_time:2364840ms step_avg:243.77ms +[2025-07-17 16:27:46] [Rank 0] step:9701/10000 train_time:2364840ms step_avg:243.77ms +[2025-07-17 16:27:51] [Rank 0] step:9721/10000 train_time:2369979ms step_avg:243.80ms +[2025-07-17 16:27:51] [Rank 0] step:9721/10000 train_time:2369979ms step_avg:243.80ms +[2025-07-17 16:27:56] [Rank 0] step:9741/10000 train_time:2375138ms step_avg:243.83ms +[2025-07-17 16:27:56] [Rank 0] step:9741/10000 train_time:2375138ms step_avg:243.83ms +[2025-07-17 16:28:03] [Rank 0] PRINT: step:9750/10000 val_loss:4.6033 train_time:2377707ms step_avg:243.87ms +[2025-07-17 16:28:03] [Rank 0] PRINT: step:9750/10000 val_loss:4.6033 train_time:2377707ms step_avg:243.87ms +[2025-07-17 16:28:06] [Rank 0] step:9761/10000 train_time:2380269ms step_avg:243.86ms +[2025-07-17 16:28:06] [Rank 0] step:9761/10000 train_time:2380269ms step_avg:243.86ms +[2025-07-17 16:28:11] [Rank 0] step:9781/10000 train_time:2385420ms step_avg:243.88ms +[2025-07-17 16:28:11] [Rank 0] step:9781/10000 train_time:2385420ms step_avg:243.88ms +[2025-07-17 16:28:16] [Rank 0] step:9801/10000 train_time:2390556ms step_avg:243.91ms +[2025-07-17 16:28:16] [Rank 0] step:9801/10000 train_time:2390556ms step_avg:243.91ms +[2025-07-17 16:28:21] [Rank 0] step:9821/10000 train_time:2395709ms step_avg:243.94ms +[2025-07-17 16:28:21] [Rank 0] step:9821/10000 train_time:2395709ms step_avg:243.94ms +[2025-07-17 16:28:26] [Rank 0] step:9841/10000 train_time:2400842ms step_avg:243.96ms +[2025-07-17 16:28:26] [Rank 0] step:9841/10000 train_time:2400842ms step_avg:243.96ms +[2025-07-17 16:28:32] [Rank 0] step:9861/10000 train_time:2405977ms step_avg:243.99ms +[2025-07-17 16:28:32] [Rank 0] step:9861/10000 train_time:2405977ms step_avg:243.99ms +[2025-07-17 16:28:40] [Rank 0] PRINT: step:9875/10000 val_loss:4.6553 train_time:2409836ms step_avg:244.03ms +[2025-07-17 16:28:40] [Rank 0] PRINT: step:9875/10000 val_loss:4.6553 train_time:2409836ms step_avg:244.03ms +[2025-07-17 16:28:42] [Rank 0] step:9881/10000 train_time:2411119ms step_avg:244.02ms +[2025-07-17 16:28:42] [Rank 0] step:9881/10000 train_time:2411119ms step_avg:244.02ms +[2025-07-17 16:28:47] [Rank 0] step:9901/10000 train_time:2416263ms step_avg:244.04ms +[2025-07-17 16:28:47] [Rank 0] step:9901/10000 train_time:2416263ms step_avg:244.04ms +[2025-07-17 16:28:52] [Rank 0] step:9921/10000 train_time:2421421ms step_avg:244.07ms +[2025-07-17 16:28:52] [Rank 0] step:9921/10000 train_time:2421421ms step_avg:244.07ms +[2025-07-17 16:28:57] [Rank 0] step:9941/10000 train_time:2426606ms step_avg:244.10ms +[2025-07-17 16:28:57] [Rank 0] step:9941/10000 train_time:2426606ms step_avg:244.10ms +[2025-07-17 16:29:02] [Rank 0] step:9961/10000 train_time:2431775ms step_avg:244.13ms +[2025-07-17 16:29:02] [Rank 0] step:9961/10000 train_time:2431775ms step_avg:244.13ms +[2025-07-17 16:29:07] [Rank 0] step:9981/10000 train_time:2436961ms step_avg:244.16ms +[2025-07-17 16:29:07] [Rank 0] step:9981/10000 train_time:2436961ms step_avg:244.16ms +[2025-07-17 16:29:12] [Rank 0] step:10000/10000 train_time:2441840ms step_avg:244.18ms +[2025-07-17 16:29:12] [Rank 0] step:10000/10000 train_time:2441840ms step_avg:244.18ms +[2025-07-17 16:29:17] [Rank 0] PRINT: step:10000/10000 val_loss:4.6509 train_time:2442106ms step_avg:244.21ms +[2025-07-17 16:29:17] [Rank 0] PRINT: step:10000/10000 val_loss:4.6509 train_time:2442106ms step_avg:244.21ms +[2025-07-17 16:29:17] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 16:29:17 2025 --- +[2025-07-17 16:29:17] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 16:29:17 2025 --- +[2025-07-17 16:29:17] [Rank 0] PRINT: Peak memory allocated: 30775 MiB reserved: 31136 MiB +[2025-07-17 16:29:17] [Rank 0] PRINT: Peak memory allocated: 30775 MiB reserved: 31136 MiB diff --git a/logs_norope/diff_modes/mode_7_param_norope_seed_43/config.json b/logs_norope/diff_modes/mode_7_param_norope_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b505e6df8b0db41e9f3426d4c9a1f52d9f8891a2 --- /dev/null +++ b/logs_norope/diff_modes/mode_7_param_norope_seed_43/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 7, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "e560b6a4-f6de-4a37-bc6e-f778307a5438", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_7_param_norope_seed_43/training_log_e560b6a4-f6de-4a37-bc6e-f778307a5438.txt b/logs_norope/diff_modes/mode_7_param_norope_seed_43/training_log_e560b6a4-f6de-4a37-bc6e-f778307a5438.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca93f1a62ad07910035828996d0b426ee4750c6f --- /dev/null +++ b/logs_norope/diff_modes/mode_7_param_norope_seed_43/training_log_e560b6a4-f6de-4a37-bc6e-f778307a5438.txt @@ -0,0 +1,2360 @@ +[2025-07-17 22:53:03] [Rank 0] PRINT: --- Script Start: Thu Jul 17 22:53:03 2025 --- +[2025-07-17 22:53:03] [Rank 0] PRINT: --- Script Start: Thu Jul 17 22:53:03 2025 --- +[2025-07-17 22:53:03] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=7, model_parameterization='norope') +[2025-07-17 22:53:03] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=7, model_parameterization='norope') +[2025-07-17 22:53:03] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 22:53:03] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 22:53:03] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 22:53:03] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-17 22:53:03] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_7_param_norope_seed_43 +[2025-07-17 22:53:03] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_7_param_norope_seed_43 +[2025-07-17 22:53:03] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 22:53:03] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 22:53:03] [Rank 0] PRINT: Constructing model... +[2025-07-17 22:53:03] [Rank 0] PRINT: Constructing model... +[2025-07-17 22:53:05] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 22:53:05] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 22:53:05] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 22:53:05] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 22:53:05] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 22:53:05] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 22:53:06] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 22:53:06] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 22:53:06] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 7 +[2025-07-17 22:53:06] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 7 +[2025-07-17 22:53:06] [Rank 0] PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: 0.001). +[2025-07-17 22:53:06] [Rank 0] PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: 0.001). +[2025-07-17 22:53:06] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 22:53:06] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 22:53:06] [Rank 0] PRINT: Muon optimizer is active with 46 parameters. +[2025-07-17 22:53:06] [Rank 0] PRINT: Muon optimizer is active with 46 parameters. +[2025-07-17 22:53:06] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 22:53:06] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 22:53:06] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 22:53:06] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 22:53:06] [Rank 0] PRINT: Starting warmup... +[2025-07-17 22:53:06] [Rank 0] PRINT: Starting warmup... +[2025-07-17 22:54:12] [Rank 0] PRINT: Warmup complete. +[2025-07-17 22:54:12] [Rank 0] PRINT: Warmup complete. +[2025-07-17 22:54:12] [Rank 0] PRINT: Starting training... +[2025-07-17 22:54:12] [Rank 0] PRINT: Starting training... +[2025-07-17 22:54:24] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 22:54:24] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 22:54:29] [Rank 0] step:21/10000 train_time:4828ms step_avg:229.92ms +[2025-07-17 22:54:29] [Rank 0] step:21/10000 train_time:4828ms step_avg:229.92ms +[2025-07-17 22:54:33] [Rank 0] step:41/10000 train_time:9362ms step_avg:228.34ms +[2025-07-17 22:54:33] [Rank 0] step:41/10000 train_time:9362ms step_avg:228.34ms +[2025-07-17 22:54:38] [Rank 0] step:61/10000 train_time:13899ms step_avg:227.85ms +[2025-07-17 22:54:38] [Rank 0] step:61/10000 train_time:13899ms step_avg:227.85ms +[2025-07-17 22:54:42] [Rank 0] step:81/10000 train_time:18443ms step_avg:227.69ms +[2025-07-17 22:54:42] [Rank 0] step:81/10000 train_time:18443ms step_avg:227.69ms +[2025-07-17 22:54:47] [Rank 0] step:101/10000 train_time:22988ms step_avg:227.60ms +[2025-07-17 22:54:47] [Rank 0] step:101/10000 train_time:22988ms step_avg:227.60ms +[2025-07-17 22:54:52] [Rank 0] step:121/10000 train_time:27534ms step_avg:227.56ms +[2025-07-17 22:54:52] [Rank 0] step:121/10000 train_time:27534ms step_avg:227.56ms +[2025-07-17 22:54:57] [Rank 0] PRINT: step:125/10000 val_loss:5.1449 train_time:28679ms step_avg:229.43ms +[2025-07-17 22:54:57] [Rank 0] PRINT: step:125/10000 val_loss:5.1449 train_time:28679ms step_avg:229.43ms +[2025-07-17 22:55:01] [Rank 0] step:141/10000 train_time:32084ms step_avg:227.54ms +[2025-07-17 22:55:01] [Rank 0] step:141/10000 train_time:32084ms step_avg:227.54ms +[2025-07-17 22:55:05] [Rank 0] step:161/10000 train_time:36634ms step_avg:227.54ms +[2025-07-17 22:55:05] [Rank 0] step:161/10000 train_time:36634ms step_avg:227.54ms +[2025-07-17 22:55:10] [Rank 0] step:181/10000 train_time:41184ms step_avg:227.53ms +[2025-07-17 22:55:10] [Rank 0] step:181/10000 train_time:41184ms step_avg:227.53ms +[2025-07-17 22:55:14] [Rank 0] step:201/10000 train_time:45729ms step_avg:227.51ms +[2025-07-17 22:55:14] [Rank 0] step:201/10000 train_time:45729ms step_avg:227.51ms +[2025-07-17 22:55:19] [Rank 0] step:221/10000 train_time:50277ms step_avg:227.50ms +[2025-07-17 22:55:19] [Rank 0] step:221/10000 train_time:50277ms step_avg:227.50ms +[2025-07-17 22:55:23] [Rank 0] step:241/10000 train_time:54827ms step_avg:227.50ms +[2025-07-17 22:55:23] [Rank 0] step:241/10000 train_time:54827ms step_avg:227.50ms +[2025-07-17 22:55:30] [Rank 0] PRINT: step:250/10000 val_loss:4.6969 train_time:57105ms step_avg:228.42ms +[2025-07-17 22:55:30] [Rank 0] PRINT: step:250/10000 val_loss:4.6969 train_time:57105ms step_avg:228.42ms +[2025-07-17 22:55:32] [Rank 0] step:261/10000 train_time:59372ms step_avg:227.48ms +[2025-07-17 22:55:32] [Rank 0] step:261/10000 train_time:59372ms step_avg:227.48ms +[2025-07-17 22:55:37] [Rank 0] step:281/10000 train_time:63920ms step_avg:227.47ms +[2025-07-17 22:55:37] [Rank 0] step:281/10000 train_time:63920ms step_avg:227.47ms +[2025-07-17 22:55:41] [Rank 0] step:301/10000 train_time:68469ms step_avg:227.47ms +[2025-07-17 22:55:41] [Rank 0] step:301/10000 train_time:68469ms step_avg:227.47ms +[2025-07-17 22:55:46] [Rank 0] step:321/10000 train_time:73017ms step_avg:227.47ms +[2025-07-17 22:55:46] [Rank 0] step:321/10000 train_time:73017ms step_avg:227.47ms +[2025-07-17 22:55:50] [Rank 0] step:341/10000 train_time:77564ms step_avg:227.46ms +[2025-07-17 22:55:50] [Rank 0] step:341/10000 train_time:77564ms step_avg:227.46ms +[2025-07-17 22:55:55] [Rank 0] step:361/10000 train_time:82114ms step_avg:227.46ms +[2025-07-17 22:55:55] [Rank 0] step:361/10000 train_time:82114ms step_avg:227.46ms +[2025-07-17 22:56:03] [Rank 0] PRINT: step:375/10000 val_loss:4.4914 train_time:85529ms step_avg:228.08ms +[2025-07-17 22:56:03] [Rank 0] PRINT: step:375/10000 val_loss:4.4914 train_time:85529ms step_avg:228.08ms +[2025-07-17 22:56:04] [Rank 0] step:381/10000 train_time:86661ms step_avg:227.46ms +[2025-07-17 22:56:04] [Rank 0] step:381/10000 train_time:86661ms step_avg:227.46ms +[2025-07-17 22:56:08] [Rank 0] step:401/10000 train_time:91211ms step_avg:227.46ms +[2025-07-17 22:56:08] [Rank 0] step:401/10000 train_time:91211ms step_avg:227.46ms +[2025-07-17 22:56:13] [Rank 0] step:421/10000 train_time:95758ms step_avg:227.45ms +[2025-07-17 22:56:13] [Rank 0] step:421/10000 train_time:95758ms step_avg:227.45ms +[2025-07-17 22:56:18] [Rank 0] step:441/10000 train_time:100307ms step_avg:227.45ms +[2025-07-17 22:56:18] [Rank 0] step:441/10000 train_time:100307ms step_avg:227.45ms +[2025-07-17 22:56:22] [Rank 0] step:461/10000 train_time:104857ms step_avg:227.45ms +[2025-07-17 22:56:22] [Rank 0] step:461/10000 train_time:104857ms step_avg:227.45ms +[2025-07-17 22:56:27] [Rank 0] step:481/10000 train_time:109408ms step_avg:227.46ms +[2025-07-17 22:56:27] [Rank 0] step:481/10000 train_time:109408ms step_avg:227.46ms +[2025-07-17 22:56:35] [Rank 0] PRINT: step:500/10000 val_loss:4.4409 train_time:113962ms step_avg:227.92ms +[2025-07-17 22:56:35] [Rank 0] PRINT: step:500/10000 val_loss:4.4409 train_time:113962ms step_avg:227.92ms +[2025-07-17 22:56:35] [Rank 0] step:501/10000 train_time:113979ms step_avg:227.50ms +[2025-07-17 22:56:35] [Rank 0] step:501/10000 train_time:113979ms step_avg:227.50ms +[2025-07-17 22:56:40] [Rank 0] step:521/10000 train_time:119021ms step_avg:228.45ms +[2025-07-17 22:56:40] [Rank 0] step:521/10000 train_time:119021ms step_avg:228.45ms +[2025-07-17 22:56:45] [Rank 0] step:541/10000 train_time:123572ms step_avg:228.41ms +[2025-07-17 22:56:45] [Rank 0] step:541/10000 train_time:123572ms step_avg:228.41ms +[2025-07-17 22:56:49] [Rank 0] step:561/10000 train_time:128122ms step_avg:228.38ms +[2025-07-17 22:56:49] [Rank 0] step:561/10000 train_time:128122ms step_avg:228.38ms +[2025-07-17 22:56:54] [Rank 0] step:581/10000 train_time:132674ms step_avg:228.35ms +[2025-07-17 22:56:54] [Rank 0] step:581/10000 train_time:132674ms step_avg:228.35ms +[2025-07-17 22:56:59] [Rank 0] step:601/10000 train_time:137230ms step_avg:228.34ms +[2025-07-17 22:56:59] [Rank 0] step:601/10000 train_time:137230ms step_avg:228.34ms +[2025-07-17 22:57:03] [Rank 0] step:621/10000 train_time:141785ms step_avg:228.32ms +[2025-07-17 22:57:03] [Rank 0] step:621/10000 train_time:141785ms step_avg:228.32ms +[2025-07-17 22:57:08] [Rank 0] PRINT: step:625/10000 val_loss:4.4461 train_time:142931ms step_avg:228.69ms +[2025-07-17 22:57:08] [Rank 0] PRINT: step:625/10000 val_loss:4.4461 train_time:142931ms step_avg:228.69ms +[2025-07-17 22:57:12] [Rank 0] step:641/10000 train_time:146343ms step_avg:228.30ms +[2025-07-17 22:57:12] [Rank 0] step:641/10000 train_time:146343ms step_avg:228.30ms +[2025-07-17 22:57:16] [Rank 0] step:661/10000 train_time:150904ms step_avg:228.30ms +[2025-07-17 22:57:16] [Rank 0] step:661/10000 train_time:150904ms step_avg:228.30ms +[2025-07-17 22:57:21] [Rank 0] step:681/10000 train_time:155466ms step_avg:228.29ms +[2025-07-17 22:57:21] [Rank 0] step:681/10000 train_time:155466ms step_avg:228.29ms +[2025-07-17 22:57:25] [Rank 0] step:701/10000 train_time:160029ms step_avg:228.29ms +[2025-07-17 22:57:25] [Rank 0] step:701/10000 train_time:160029ms step_avg:228.29ms +[2025-07-17 22:57:30] [Rank 0] step:721/10000 train_time:164591ms step_avg:228.28ms +[2025-07-17 22:57:30] [Rank 0] step:721/10000 train_time:164591ms step_avg:228.28ms +[2025-07-17 22:57:34] [Rank 0] step:741/10000 train_time:169156ms step_avg:228.28ms +[2025-07-17 22:57:34] [Rank 0] step:741/10000 train_time:169156ms step_avg:228.28ms +[2025-07-17 22:57:41] [Rank 0] PRINT: step:750/10000 val_loss:4.5621 train_time:171458ms step_avg:228.61ms +[2025-07-17 22:57:41] [Rank 0] PRINT: step:750/10000 val_loss:4.5621 train_time:171458ms step_avg:228.61ms +[2025-07-17 22:57:44] [Rank 0] step:761/10000 train_time:173748ms step_avg:228.32ms +[2025-07-17 22:57:44] [Rank 0] step:761/10000 train_time:173748ms step_avg:228.32ms +[2025-07-17 22:57:48] [Rank 0] step:781/10000 train_time:178345ms step_avg:228.35ms +[2025-07-17 22:57:48] [Rank 0] step:781/10000 train_time:178345ms step_avg:228.35ms +[2025-07-17 22:57:53] [Rank 0] step:801/10000 train_time:182944ms step_avg:228.39ms +[2025-07-17 22:57:53] [Rank 0] step:801/10000 train_time:182944ms step_avg:228.39ms +[2025-07-17 22:57:57] [Rank 0] step:821/10000 train_time:187546ms step_avg:228.44ms +[2025-07-17 22:57:57] [Rank 0] step:821/10000 train_time:187546ms step_avg:228.44ms +[2025-07-17 22:58:02] [Rank 0] step:841/10000 train_time:192143ms step_avg:228.47ms +[2025-07-17 22:58:02] [Rank 0] step:841/10000 train_time:192143ms step_avg:228.47ms +[2025-07-17 22:58:07] [Rank 0] step:861/10000 train_time:196741ms step_avg:228.50ms +[2025-07-17 22:58:07] [Rank 0] step:861/10000 train_time:196741ms step_avg:228.50ms +[2025-07-17 22:58:14] [Rank 0] PRINT: step:875/10000 val_loss:4.6769 train_time:200196ms step_avg:228.80ms +[2025-07-17 22:58:14] [Rank 0] PRINT: step:875/10000 val_loss:4.6769 train_time:200196ms step_avg:228.80ms +[2025-07-17 22:58:16] [Rank 0] step:881/10000 train_time:201339ms step_avg:228.53ms +[2025-07-17 22:58:16] [Rank 0] step:881/10000 train_time:201339ms step_avg:228.53ms +[2025-07-17 22:58:20] [Rank 0] step:901/10000 train_time:205942ms step_avg:228.57ms +[2025-07-17 22:58:20] [Rank 0] step:901/10000 train_time:205942ms step_avg:228.57ms +[2025-07-17 22:58:25] [Rank 0] step:921/10000 train_time:210540ms step_avg:228.60ms +[2025-07-17 22:58:25] [Rank 0] step:921/10000 train_time:210540ms step_avg:228.60ms +[2025-07-17 22:58:29] [Rank 0] step:941/10000 train_time:215144ms step_avg:228.63ms +[2025-07-17 22:58:29] [Rank 0] step:941/10000 train_time:215144ms step_avg:228.63ms +[2025-07-17 22:58:34] [Rank 0] step:961/10000 train_time:219744ms step_avg:228.66ms +[2025-07-17 22:58:34] [Rank 0] step:961/10000 train_time:219744ms step_avg:228.66ms +[2025-07-17 22:58:39] [Rank 0] step:981/10000 train_time:224344ms step_avg:228.69ms +[2025-07-17 22:58:39] [Rank 0] step:981/10000 train_time:224344ms step_avg:228.69ms +[2025-07-17 22:58:47] [Rank 0] PRINT: step:1000/10000 val_loss:4.7220 train_time:228949ms step_avg:228.95ms +[2025-07-17 22:58:47] [Rank 0] PRINT: step:1000/10000 val_loss:4.7220 train_time:228949ms step_avg:228.95ms +[2025-07-17 22:58:48] [Rank 0] step:1001/10000 train_time:228966ms step_avg:228.74ms +[2025-07-17 22:58:48] [Rank 0] step:1001/10000 train_time:228966ms step_avg:228.74ms +[2025-07-17 22:58:52] [Rank 0] step:1021/10000 train_time:233643ms step_avg:228.84ms +[2025-07-17 22:58:52] [Rank 0] step:1021/10000 train_time:233643ms step_avg:228.84ms +[2025-07-17 22:58:57] [Rank 0] step:1041/10000 train_time:238250ms step_avg:228.87ms +[2025-07-17 22:58:57] [Rank 0] step:1041/10000 train_time:238250ms step_avg:228.87ms +[2025-07-17 22:59:02] [Rank 0] step:1061/10000 train_time:242857ms step_avg:228.89ms +[2025-07-17 22:59:02] [Rank 0] step:1061/10000 train_time:242857ms step_avg:228.89ms +[2025-07-17 22:59:06] [Rank 0] step:1081/10000 train_time:247463ms step_avg:228.92ms +[2025-07-17 22:59:06] [Rank 0] step:1081/10000 train_time:247463ms step_avg:228.92ms +[2025-07-17 22:59:11] [Rank 0] step:1101/10000 train_time:252068ms step_avg:228.94ms +[2025-07-17 22:59:11] [Rank 0] step:1101/10000 train_time:252068ms step_avg:228.94ms +[2025-07-17 22:59:15] [Rank 0] step:1121/10000 train_time:256681ms step_avg:228.97ms +[2025-07-17 22:59:15] [Rank 0] step:1121/10000 train_time:256681ms step_avg:228.97ms +[2025-07-17 22:59:21] [Rank 0] PRINT: step:1125/10000 val_loss:4.6697 train_time:257839ms step_avg:229.19ms +[2025-07-17 22:59:21] [Rank 0] PRINT: step:1125/10000 val_loss:4.6697 train_time:257839ms step_avg:229.19ms +[2025-07-17 22:59:24] [Rank 0] step:1141/10000 train_time:261282ms step_avg:228.99ms +[2025-07-17 22:59:24] [Rank 0] step:1141/10000 train_time:261282ms step_avg:228.99ms +[2025-07-17 22:59:29] [Rank 0] step:1161/10000 train_time:265888ms step_avg:229.02ms +[2025-07-17 22:59:29] [Rank 0] step:1161/10000 train_time:265888ms step_avg:229.02ms +[2025-07-17 22:59:34] [Rank 0] step:1181/10000 train_time:270496ms step_avg:229.04ms +[2025-07-17 22:59:34] [Rank 0] step:1181/10000 train_time:270496ms step_avg:229.04ms +[2025-07-17 22:59:38] [Rank 0] step:1201/10000 train_time:275100ms step_avg:229.06ms +[2025-07-17 22:59:38] [Rank 0] step:1201/10000 train_time:275100ms step_avg:229.06ms +[2025-07-17 22:59:43] [Rank 0] step:1221/10000 train_time:279708ms step_avg:229.08ms +[2025-07-17 22:59:43] [Rank 0] step:1221/10000 train_time:279708ms step_avg:229.08ms +[2025-07-17 22:59:47] [Rank 0] step:1241/10000 train_time:284318ms step_avg:229.10ms +[2025-07-17 22:59:47] [Rank 0] step:1241/10000 train_time:284318ms step_avg:229.10ms +[2025-07-17 22:59:54] [Rank 0] PRINT: step:1250/10000 val_loss:4.7617 train_time:286624ms step_avg:229.30ms +[2025-07-17 22:59:54] [Rank 0] PRINT: step:1250/10000 val_loss:4.7617 train_time:286624ms step_avg:229.30ms +[2025-07-17 22:59:57] [Rank 0] step:1261/10000 train_time:288917ms step_avg:229.12ms +[2025-07-17 22:59:57] [Rank 0] step:1261/10000 train_time:288917ms step_avg:229.12ms +[2025-07-17 23:00:01] [Rank 0] step:1281/10000 train_time:293524ms step_avg:229.14ms +[2025-07-17 23:00:01] [Rank 0] step:1281/10000 train_time:293524ms step_avg:229.14ms +[2025-07-17 23:00:06] [Rank 0] step:1301/10000 train_time:298132ms step_avg:229.16ms +[2025-07-17 23:00:06] [Rank 0] step:1301/10000 train_time:298132ms step_avg:229.16ms +[2025-07-17 23:00:10] [Rank 0] step:1321/10000 train_time:302738ms step_avg:229.17ms +[2025-07-17 23:00:10] [Rank 0] step:1321/10000 train_time:302738ms step_avg:229.17ms +[2025-07-17 23:00:15] [Rank 0] step:1341/10000 train_time:307341ms step_avg:229.19ms +[2025-07-17 23:00:15] [Rank 0] step:1341/10000 train_time:307341ms step_avg:229.19ms +[2025-07-17 23:00:20] [Rank 0] step:1361/10000 train_time:311948ms step_avg:229.20ms +[2025-07-17 23:00:20] [Rank 0] step:1361/10000 train_time:311948ms step_avg:229.20ms +[2025-07-17 23:00:27] [Rank 0] PRINT: step:1375/10000 val_loss:4.7243 train_time:315409ms step_avg:229.39ms +[2025-07-17 23:00:27] [Rank 0] PRINT: step:1375/10000 val_loss:4.7243 train_time:315409ms step_avg:229.39ms +[2025-07-17 23:00:28] [Rank 0] step:1381/10000 train_time:316553ms step_avg:229.22ms +[2025-07-17 23:00:28] [Rank 0] step:1381/10000 train_time:316553ms step_avg:229.22ms +[2025-07-17 23:00:33] [Rank 0] step:1401/10000 train_time:321161ms step_avg:229.24ms +[2025-07-17 23:00:33] [Rank 0] step:1401/10000 train_time:321161ms step_avg:229.24ms +[2025-07-17 23:00:37] [Rank 0] step:1421/10000 train_time:325771ms step_avg:229.26ms +[2025-07-17 23:00:37] [Rank 0] step:1421/10000 train_time:325771ms step_avg:229.26ms +[2025-07-17 23:00:42] [Rank 0] step:1441/10000 train_time:330381ms step_avg:229.27ms +[2025-07-17 23:00:42] [Rank 0] step:1441/10000 train_time:330381ms step_avg:229.27ms +[2025-07-17 23:00:47] [Rank 0] step:1461/10000 train_time:334993ms step_avg:229.29ms +[2025-07-17 23:00:47] [Rank 0] step:1461/10000 train_time:334993ms step_avg:229.29ms +[2025-07-17 23:00:51] [Rank 0] step:1481/10000 train_time:339605ms step_avg:229.31ms +[2025-07-17 23:00:51] [Rank 0] step:1481/10000 train_time:339605ms step_avg:229.31ms +[2025-07-17 23:01:00] [Rank 0] PRINT: step:1500/10000 val_loss:4.6727 train_time:344246ms step_avg:229.50ms +[2025-07-17 23:01:00] [Rank 0] PRINT: step:1500/10000 val_loss:4.6727 train_time:344246ms step_avg:229.50ms +[2025-07-17 23:01:00] [Rank 0] step:1501/10000 train_time:344263ms step_avg:229.36ms +[2025-07-17 23:01:00] [Rank 0] step:1501/10000 train_time:344263ms step_avg:229.36ms +[2025-07-17 23:01:05] [Rank 0] step:1521/10000 train_time:348882ms step_avg:229.38ms +[2025-07-17 23:01:05] [Rank 0] step:1521/10000 train_time:348882ms step_avg:229.38ms +[2025-07-17 23:01:10] [Rank 0] step:1541/10000 train_time:353618ms step_avg:229.47ms +[2025-07-17 23:01:10] [Rank 0] step:1541/10000 train_time:353618ms step_avg:229.47ms +[2025-07-17 23:01:14] [Rank 0] step:1561/10000 train_time:358261ms step_avg:229.51ms +[2025-07-17 23:01:14] [Rank 0] step:1561/10000 train_time:358261ms step_avg:229.51ms +[2025-07-17 23:01:19] [Rank 0] step:1581/10000 train_time:362905ms step_avg:229.54ms +[2025-07-17 23:01:19] [Rank 0] step:1581/10000 train_time:362905ms step_avg:229.54ms +[2025-07-17 23:01:24] [Rank 0] step:1601/10000 train_time:367544ms step_avg:229.57ms +[2025-07-17 23:01:24] [Rank 0] step:1601/10000 train_time:367544ms step_avg:229.57ms +[2025-07-17 23:01:28] [Rank 0] step:1621/10000 train_time:372186ms step_avg:229.60ms +[2025-07-17 23:01:28] [Rank 0] step:1621/10000 train_time:372186ms step_avg:229.60ms +[2025-07-17 23:01:33] [Rank 0] PRINT: step:1625/10000 val_loss:4.7249 train_time:373353ms step_avg:229.76ms +[2025-07-17 23:01:33] [Rank 0] PRINT: step:1625/10000 val_loss:4.7249 train_time:373353ms step_avg:229.76ms +[2025-07-17 23:01:37] [Rank 0] step:1641/10000 train_time:376822ms step_avg:229.63ms +[2025-07-17 23:01:37] [Rank 0] step:1641/10000 train_time:376822ms step_avg:229.63ms +[2025-07-17 23:01:42] [Rank 0] step:1661/10000 train_time:381460ms step_avg:229.66ms +[2025-07-17 23:01:42] [Rank 0] step:1661/10000 train_time:381460ms step_avg:229.66ms +[2025-07-17 23:01:46] [Rank 0] step:1681/10000 train_time:386096ms step_avg:229.68ms +[2025-07-17 23:01:46] [Rank 0] step:1681/10000 train_time:386096ms step_avg:229.68ms +[2025-07-17 23:01:51] [Rank 0] step:1701/10000 train_time:390732ms step_avg:229.71ms +[2025-07-17 23:01:51] [Rank 0] step:1701/10000 train_time:390732ms step_avg:229.71ms +[2025-07-17 23:01:56] [Rank 0] step:1721/10000 train_time:395371ms step_avg:229.73ms +[2025-07-17 23:01:56] [Rank 0] step:1721/10000 train_time:395371ms step_avg:229.73ms +[2025-07-17 23:02:00] [Rank 0] step:1741/10000 train_time:400009ms step_avg:229.76ms +[2025-07-17 23:02:00] [Rank 0] step:1741/10000 train_time:400009ms step_avg:229.76ms +[2025-07-17 23:02:06] [Rank 0] PRINT: step:1750/10000 val_loss:4.7634 train_time:402333ms step_avg:229.90ms +[2025-07-17 23:02:06] [Rank 0] PRINT: step:1750/10000 val_loss:4.7634 train_time:402333ms step_avg:229.90ms +[2025-07-17 23:02:09] [Rank 0] step:1761/10000 train_time:404647ms step_avg:229.78ms +[2025-07-17 23:02:09] [Rank 0] step:1761/10000 train_time:404647ms step_avg:229.78ms +[2025-07-17 23:02:14] [Rank 0] step:1781/10000 train_time:409286ms step_avg:229.81ms +[2025-07-17 23:02:14] [Rank 0] step:1781/10000 train_time:409286ms step_avg:229.81ms +[2025-07-17 23:02:18] [Rank 0] step:1801/10000 train_time:413923ms step_avg:229.83ms +[2025-07-17 23:02:18] [Rank 0] step:1801/10000 train_time:413923ms step_avg:229.83ms +[2025-07-17 23:02:23] [Rank 0] step:1821/10000 train_time:418563ms step_avg:229.85ms +[2025-07-17 23:02:23] [Rank 0] step:1821/10000 train_time:418563ms step_avg:229.85ms +[2025-07-17 23:02:27] [Rank 0] step:1841/10000 train_time:423200ms step_avg:229.87ms +[2025-07-17 23:02:27] [Rank 0] step:1841/10000 train_time:423200ms step_avg:229.87ms +[2025-07-17 23:02:32] [Rank 0] step:1861/10000 train_time:427842ms step_avg:229.90ms +[2025-07-17 23:02:32] [Rank 0] step:1861/10000 train_time:427842ms step_avg:229.90ms +[2025-07-17 23:02:40] [Rank 0] PRINT: step:1875/10000 val_loss:4.7084 train_time:431326ms step_avg:230.04ms +[2025-07-17 23:02:40] [Rank 0] PRINT: step:1875/10000 val_loss:4.7084 train_time:431326ms step_avg:230.04ms +[2025-07-17 23:02:41] [Rank 0] step:1881/10000 train_time:432478ms step_avg:229.92ms +[2025-07-17 23:02:41] [Rank 0] step:1881/10000 train_time:432478ms step_avg:229.92ms +[2025-07-17 23:02:46] [Rank 0] step:1901/10000 train_time:437112ms step_avg:229.94ms +[2025-07-17 23:02:46] [Rank 0] step:1901/10000 train_time:437112ms step_avg:229.94ms +[2025-07-17 23:02:50] [Rank 0] step:1921/10000 train_time:441747ms step_avg:229.96ms +[2025-07-17 23:02:50] [Rank 0] step:1921/10000 train_time:441747ms step_avg:229.96ms +[2025-07-17 23:02:55] [Rank 0] step:1941/10000 train_time:446378ms step_avg:229.97ms +[2025-07-17 23:02:55] [Rank 0] step:1941/10000 train_time:446378ms step_avg:229.97ms +[2025-07-17 23:03:00] [Rank 0] step:1961/10000 train_time:451014ms step_avg:229.99ms +[2025-07-17 23:03:00] [Rank 0] step:1961/10000 train_time:451014ms step_avg:229.99ms +[2025-07-17 23:03:04] [Rank 0] step:1981/10000 train_time:455650ms step_avg:230.01ms +[2025-07-17 23:03:04] [Rank 0] step:1981/10000 train_time:455650ms step_avg:230.01ms +[2025-07-17 23:03:13] [Rank 0] PRINT: step:2000/10000 val_loss:4.7673 train_time:460288ms step_avg:230.14ms +[2025-07-17 23:03:13] [Rank 0] PRINT: step:2000/10000 val_loss:4.7673 train_time:460288ms step_avg:230.14ms +[2025-07-17 23:03:13] [Rank 0] step:2001/10000 train_time:460304ms step_avg:230.04ms +[2025-07-17 23:03:13] [Rank 0] step:2001/10000 train_time:460304ms step_avg:230.04ms +[2025-07-17 23:03:18] [Rank 0] step:2021/10000 train_time:464920ms step_avg:230.04ms +[2025-07-17 23:03:18] [Rank 0] step:2021/10000 train_time:464920ms step_avg:230.04ms +[2025-07-17 23:03:22] [Rank 0] step:2041/10000 train_time:469654ms step_avg:230.11ms +[2025-07-17 23:03:22] [Rank 0] step:2041/10000 train_time:469654ms step_avg:230.11ms +[2025-07-17 23:03:27] [Rank 0] step:2061/10000 train_time:474287ms step_avg:230.12ms +[2025-07-17 23:03:27] [Rank 0] step:2061/10000 train_time:474287ms step_avg:230.12ms +[2025-07-17 23:03:32] [Rank 0] step:2081/10000 train_time:478919ms step_avg:230.14ms +[2025-07-17 23:03:32] [Rank 0] step:2081/10000 train_time:478919ms step_avg:230.14ms +[2025-07-17 23:03:36] [Rank 0] step:2101/10000 train_time:483554ms step_avg:230.15ms +[2025-07-17 23:03:36] [Rank 0] step:2101/10000 train_time:483554ms step_avg:230.15ms +[2025-07-17 23:03:41] [Rank 0] step:2121/10000 train_time:488191ms step_avg:230.17ms +[2025-07-17 23:03:41] [Rank 0] step:2121/10000 train_time:488191ms step_avg:230.17ms +[2025-07-17 23:03:46] [Rank 0] PRINT: step:2125/10000 val_loss:4.7548 train_time:489358ms step_avg:230.29ms +[2025-07-17 23:03:46] [Rank 0] PRINT: step:2125/10000 val_loss:4.7548 train_time:489358ms step_avg:230.29ms +[2025-07-17 23:03:50] [Rank 0] step:2141/10000 train_time:492825ms step_avg:230.18ms +[2025-07-17 23:03:50] [Rank 0] step:2141/10000 train_time:492825ms step_avg:230.18ms +[2025-07-17 23:03:55] [Rank 0] step:2161/10000 train_time:497462ms step_avg:230.20ms +[2025-07-17 23:03:55] [Rank 0] step:2161/10000 train_time:497462ms step_avg:230.20ms +[2025-07-17 23:03:59] [Rank 0] step:2181/10000 train_time:502098ms step_avg:230.21ms +[2025-07-17 23:03:59] [Rank 0] step:2181/10000 train_time:502098ms step_avg:230.21ms +[2025-07-17 23:04:04] [Rank 0] step:2201/10000 train_time:506739ms step_avg:230.23ms +[2025-07-17 23:04:04] [Rank 0] step:2201/10000 train_time:506739ms step_avg:230.23ms +[2025-07-17 23:04:09] [Rank 0] step:2221/10000 train_time:511376ms step_avg:230.25ms +[2025-07-17 23:04:09] [Rank 0] step:2221/10000 train_time:511376ms step_avg:230.25ms +[2025-07-17 23:04:13] [Rank 0] step:2241/10000 train_time:516107ms step_avg:230.30ms +[2025-07-17 23:04:13] [Rank 0] step:2241/10000 train_time:516107ms step_avg:230.30ms +[2025-07-17 23:04:20] [Rank 0] PRINT: step:2250/10000 val_loss:4.2057 train_time:518489ms step_avg:230.44ms +[2025-07-17 23:04:20] [Rank 0] PRINT: step:2250/10000 val_loss:4.2057 train_time:518489ms step_avg:230.44ms +[2025-07-17 23:04:23] [Rank 0] step:2261/10000 train_time:520850ms step_avg:230.36ms +[2025-07-17 23:04:23] [Rank 0] step:2261/10000 train_time:520850ms step_avg:230.36ms +[2025-07-17 23:04:27] [Rank 0] step:2281/10000 train_time:525588ms step_avg:230.42ms +[2025-07-17 23:04:27] [Rank 0] step:2281/10000 train_time:525588ms step_avg:230.42ms +[2025-07-17 23:04:32] [Rank 0] step:2301/10000 train_time:530325ms step_avg:230.48ms +[2025-07-17 23:04:32] [Rank 0] step:2301/10000 train_time:530325ms step_avg:230.48ms +[2025-07-17 23:04:37] [Rank 0] step:2321/10000 train_time:535058ms step_avg:230.53ms +[2025-07-17 23:04:37] [Rank 0] step:2321/10000 train_time:535058ms step_avg:230.53ms +[2025-07-17 23:04:42] [Rank 0] step:2341/10000 train_time:539794ms step_avg:230.58ms +[2025-07-17 23:04:42] [Rank 0] step:2341/10000 train_time:539794ms step_avg:230.58ms +[2025-07-17 23:04:46] [Rank 0] step:2361/10000 train_time:544531ms step_avg:230.64ms +[2025-07-17 23:04:46] [Rank 0] step:2361/10000 train_time:544531ms step_avg:230.64ms +[2025-07-17 23:04:54] [Rank 0] PRINT: step:2375/10000 val_loss:4.4372 train_time:548087ms step_avg:230.77ms +[2025-07-17 23:04:54] [Rank 0] PRINT: step:2375/10000 val_loss:4.4372 train_time:548087ms step_avg:230.77ms +[2025-07-17 23:04:56] [Rank 0] step:2381/10000 train_time:549266ms step_avg:230.69ms +[2025-07-17 23:04:56] [Rank 0] step:2381/10000 train_time:549266ms step_avg:230.69ms +[2025-07-17 23:05:00] [Rank 0] step:2401/10000 train_time:553998ms step_avg:230.74ms +[2025-07-17 23:05:00] [Rank 0] step:2401/10000 train_time:553998ms step_avg:230.74ms +[2025-07-17 23:05:05] [Rank 0] step:2421/10000 train_time:558736ms step_avg:230.79ms +[2025-07-17 23:05:05] [Rank 0] step:2421/10000 train_time:558736ms step_avg:230.79ms +[2025-07-17 23:05:10] [Rank 0] step:2441/10000 train_time:563475ms step_avg:230.84ms +[2025-07-17 23:05:10] [Rank 0] step:2441/10000 train_time:563475ms step_avg:230.84ms +[2025-07-17 23:05:15] [Rank 0] step:2461/10000 train_time:568215ms step_avg:230.89ms +[2025-07-17 23:05:15] [Rank 0] step:2461/10000 train_time:568215ms step_avg:230.89ms +[2025-07-17 23:05:19] [Rank 0] step:2481/10000 train_time:572956ms step_avg:230.94ms +[2025-07-17 23:05:19] [Rank 0] step:2481/10000 train_time:572956ms step_avg:230.94ms +[2025-07-17 23:05:28] [Rank 0] PRINT: step:2500/10000 val_loss:4.2863 train_time:577703ms step_avg:231.08ms +[2025-07-17 23:05:28] [Rank 0] PRINT: step:2500/10000 val_loss:4.2863 train_time:577703ms step_avg:231.08ms +[2025-07-17 23:05:29] [Rank 0] step:2501/10000 train_time:577720ms step_avg:231.00ms +[2025-07-17 23:05:29] [Rank 0] step:2501/10000 train_time:577720ms step_avg:231.00ms +[2025-07-17 23:05:33] [Rank 0] step:2521/10000 train_time:582439ms step_avg:231.03ms +[2025-07-17 23:05:33] [Rank 0] step:2521/10000 train_time:582439ms step_avg:231.03ms +[2025-07-17 23:05:38] [Rank 0] step:2541/10000 train_time:587183ms step_avg:231.08ms +[2025-07-17 23:05:38] [Rank 0] step:2541/10000 train_time:587183ms step_avg:231.08ms +[2025-07-17 23:05:43] [Rank 0] step:2561/10000 train_time:591927ms step_avg:231.13ms +[2025-07-17 23:05:43] [Rank 0] step:2561/10000 train_time:591927ms step_avg:231.13ms +[2025-07-17 23:05:48] [Rank 0] step:2581/10000 train_time:596673ms step_avg:231.18ms +[2025-07-17 23:05:48] [Rank 0] step:2581/10000 train_time:596673ms step_avg:231.18ms +[2025-07-17 23:05:52] [Rank 0] step:2601/10000 train_time:601420ms step_avg:231.23ms +[2025-07-17 23:05:52] [Rank 0] step:2601/10000 train_time:601420ms step_avg:231.23ms +[2025-07-17 23:05:57] [Rank 0] step:2621/10000 train_time:606171ms step_avg:231.27ms +[2025-07-17 23:05:57] [Rank 0] step:2621/10000 train_time:606171ms step_avg:231.27ms +[2025-07-17 23:06:02] [Rank 0] PRINT: step:2625/10000 val_loss:4.2494 train_time:607366ms step_avg:231.38ms +[2025-07-17 23:06:02] [Rank 0] PRINT: step:2625/10000 val_loss:4.2494 train_time:607366ms step_avg:231.38ms +[2025-07-17 23:06:06] [Rank 0] step:2641/10000 train_time:610918ms step_avg:231.32ms +[2025-07-17 23:06:06] [Rank 0] step:2641/10000 train_time:610918ms step_avg:231.32ms +[2025-07-17 23:06:11] [Rank 0] step:2661/10000 train_time:615666ms step_avg:231.37ms +[2025-07-17 23:06:11] [Rank 0] step:2661/10000 train_time:615666ms step_avg:231.37ms +[2025-07-17 23:06:15] [Rank 0] step:2681/10000 train_time:620416ms step_avg:231.41ms +[2025-07-17 23:06:15] [Rank 0] step:2681/10000 train_time:620416ms step_avg:231.41ms +[2025-07-17 23:06:20] [Rank 0] step:2701/10000 train_time:625168ms step_avg:231.46ms +[2025-07-17 23:06:20] [Rank 0] step:2701/10000 train_time:625168ms step_avg:231.46ms +[2025-07-17 23:06:25] [Rank 0] step:2721/10000 train_time:629914ms step_avg:231.50ms +[2025-07-17 23:06:25] [Rank 0] step:2721/10000 train_time:629914ms step_avg:231.50ms +[2025-07-17 23:06:30] [Rank 0] step:2741/10000 train_time:634666ms step_avg:231.55ms +[2025-07-17 23:06:30] [Rank 0] step:2741/10000 train_time:634666ms step_avg:231.55ms +[2025-07-17 23:06:36] [Rank 0] PRINT: step:2750/10000 val_loss:4.3262 train_time:637049ms step_avg:231.65ms +[2025-07-17 23:06:36] [Rank 0] PRINT: step:2750/10000 val_loss:4.3262 train_time:637049ms step_avg:231.65ms +[2025-07-17 23:06:39] [Rank 0] step:2761/10000 train_time:639413ms step_avg:231.59ms +[2025-07-17 23:06:39] [Rank 0] step:2761/10000 train_time:639413ms step_avg:231.59ms +[2025-07-17 23:06:44] [Rank 0] step:2781/10000 train_time:644198ms step_avg:231.64ms +[2025-07-17 23:06:44] [Rank 0] step:2781/10000 train_time:644198ms step_avg:231.64ms +[2025-07-17 23:06:49] [Rank 0] step:2801/10000 train_time:648949ms step_avg:231.68ms +[2025-07-17 23:06:49] [Rank 0] step:2801/10000 train_time:648949ms step_avg:231.68ms +[2025-07-17 23:06:53] [Rank 0] step:2821/10000 train_time:653701ms step_avg:231.73ms +[2025-07-17 23:06:53] [Rank 0] step:2821/10000 train_time:653701ms step_avg:231.73ms +[2025-07-17 23:06:58] [Rank 0] step:2841/10000 train_time:658450ms step_avg:231.77ms +[2025-07-17 23:06:58] [Rank 0] step:2841/10000 train_time:658450ms step_avg:231.77ms +[2025-07-17 23:07:03] [Rank 0] step:2861/10000 train_time:663201ms step_avg:231.81ms +[2025-07-17 23:07:03] [Rank 0] step:2861/10000 train_time:663201ms step_avg:231.81ms +[2025-07-17 23:07:11] [Rank 0] PRINT: step:2875/10000 val_loss:4.2546 train_time:666770ms step_avg:231.92ms +[2025-07-17 23:07:11] [Rank 0] PRINT: step:2875/10000 val_loss:4.2546 train_time:666770ms step_avg:231.92ms +[2025-07-17 23:07:12] [Rank 0] step:2881/10000 train_time:667951ms step_avg:231.85ms +[2025-07-17 23:07:12] [Rank 0] step:2881/10000 train_time:667951ms step_avg:231.85ms +[2025-07-17 23:07:17] [Rank 0] step:2901/10000 train_time:672700ms step_avg:231.89ms +[2025-07-17 23:07:17] [Rank 0] step:2901/10000 train_time:672700ms step_avg:231.89ms +[2025-07-17 23:07:22] [Rank 0] step:2921/10000 train_time:677449ms step_avg:231.92ms +[2025-07-17 23:07:22] [Rank 0] step:2921/10000 train_time:677449ms step_avg:231.92ms +[2025-07-17 23:07:26] [Rank 0] step:2941/10000 train_time:682200ms step_avg:231.96ms +[2025-07-17 23:07:26] [Rank 0] step:2941/10000 train_time:682200ms step_avg:231.96ms +[2025-07-17 23:07:31] [Rank 0] step:2961/10000 train_time:686950ms step_avg:232.00ms +[2025-07-17 23:07:31] [Rank 0] step:2961/10000 train_time:686950ms step_avg:232.00ms +[2025-07-17 23:07:36] [Rank 0] step:2981/10000 train_time:691717ms step_avg:232.04ms +[2025-07-17 23:07:36] [Rank 0] step:2981/10000 train_time:691717ms step_avg:232.04ms +[2025-07-17 23:07:45] [Rank 0] PRINT: step:3000/10000 val_loss:4.2720 train_time:696490ms step_avg:232.16ms +[2025-07-17 23:07:45] [Rank 0] PRINT: step:3000/10000 val_loss:4.2720 train_time:696490ms step_avg:232.16ms +[2025-07-17 23:07:45] [Rank 0] step:3001/10000 train_time:696507ms step_avg:232.09ms +[2025-07-17 23:07:45] [Rank 0] step:3001/10000 train_time:696507ms step_avg:232.09ms +[2025-07-17 23:07:50] [Rank 0] step:3021/10000 train_time:701259ms step_avg:232.13ms +[2025-07-17 23:07:50] [Rank 0] step:3021/10000 train_time:701259ms step_avg:232.13ms +[2025-07-17 23:07:55] [Rank 0] step:3041/10000 train_time:706033ms step_avg:232.17ms +[2025-07-17 23:07:55] [Rank 0] step:3041/10000 train_time:706033ms step_avg:232.17ms +[2025-07-17 23:08:00] [Rank 0] step:3061/10000 train_time:711314ms step_avg:232.38ms +[2025-07-17 23:08:00] [Rank 0] step:3061/10000 train_time:711314ms step_avg:232.38ms +[2025-07-17 23:08:05] [Rank 0] step:3081/10000 train_time:716088ms step_avg:232.42ms +[2025-07-17 23:08:05] [Rank 0] step:3081/10000 train_time:716088ms step_avg:232.42ms +[2025-07-17 23:08:10] [Rank 0] step:3101/10000 train_time:720861ms step_avg:232.46ms +[2025-07-17 23:08:10] [Rank 0] step:3101/10000 train_time:720861ms step_avg:232.46ms +[2025-07-17 23:08:14] [Rank 0] step:3121/10000 train_time:725633ms step_avg:232.50ms +[2025-07-17 23:08:14] [Rank 0] step:3121/10000 train_time:725633ms step_avg:232.50ms +[2025-07-17 23:08:20] [Rank 0] PRINT: step:3125/10000 val_loss:4.3236 train_time:726832ms step_avg:232.59ms +[2025-07-17 23:08:20] [Rank 0] PRINT: step:3125/10000 val_loss:4.3236 train_time:726832ms step_avg:232.59ms +[2025-07-17 23:08:24] [Rank 0] step:3141/10000 train_time:730406ms step_avg:232.54ms +[2025-07-17 23:08:24] [Rank 0] step:3141/10000 train_time:730406ms step_avg:232.54ms +[2025-07-17 23:08:29] [Rank 0] step:3161/10000 train_time:735183ms step_avg:232.58ms +[2025-07-17 23:08:29] [Rank 0] step:3161/10000 train_time:735183ms step_avg:232.58ms +[2025-07-17 23:08:33] [Rank 0] step:3181/10000 train_time:739957ms step_avg:232.62ms +[2025-07-17 23:08:33] [Rank 0] step:3181/10000 train_time:739957ms step_avg:232.62ms +[2025-07-17 23:08:38] [Rank 0] step:3201/10000 train_time:744728ms step_avg:232.65ms +[2025-07-17 23:08:38] [Rank 0] step:3201/10000 train_time:744728ms step_avg:232.65ms +[2025-07-17 23:08:43] [Rank 0] step:3221/10000 train_time:749501ms step_avg:232.69ms +[2025-07-17 23:08:43] [Rank 0] step:3221/10000 train_time:749501ms step_avg:232.69ms +[2025-07-17 23:08:48] [Rank 0] step:3241/10000 train_time:754272ms step_avg:232.73ms +[2025-07-17 23:08:48] [Rank 0] step:3241/10000 train_time:754272ms step_avg:232.73ms +[2025-07-17 23:08:54] [Rank 0] PRINT: step:3250/10000 val_loss:4.1966 train_time:756666ms step_avg:232.82ms +[2025-07-17 23:08:54] [Rank 0] PRINT: step:3250/10000 val_loss:4.1966 train_time:756666ms step_avg:232.82ms +[2025-07-17 23:08:57] [Rank 0] step:3261/10000 train_time:759044ms step_avg:232.76ms +[2025-07-17 23:08:57] [Rank 0] step:3261/10000 train_time:759044ms step_avg:232.76ms +[2025-07-17 23:09:02] [Rank 0] step:3281/10000 train_time:763814ms step_avg:232.80ms +[2025-07-17 23:09:02] [Rank 0] step:3281/10000 train_time:763814ms step_avg:232.80ms +[2025-07-17 23:09:07] [Rank 0] step:3301/10000 train_time:768586ms step_avg:232.83ms +[2025-07-17 23:09:07] [Rank 0] step:3301/10000 train_time:768586ms step_avg:232.83ms +[2025-07-17 23:09:11] [Rank 0] step:3321/10000 train_time:773362ms step_avg:232.87ms +[2025-07-17 23:09:11] [Rank 0] step:3321/10000 train_time:773362ms step_avg:232.87ms +[2025-07-17 23:09:16] [Rank 0] step:3341/10000 train_time:778135ms step_avg:232.90ms +[2025-07-17 23:09:16] [Rank 0] step:3341/10000 train_time:778135ms step_avg:232.90ms +[2025-07-17 23:09:21] [Rank 0] step:3361/10000 train_time:782909ms step_avg:232.94ms +[2025-07-17 23:09:21] [Rank 0] step:3361/10000 train_time:782909ms step_avg:232.94ms +[2025-07-17 23:09:29] [Rank 0] PRINT: step:3375/10000 val_loss:4.3266 train_time:786494ms step_avg:233.04ms +[2025-07-17 23:09:29] [Rank 0] PRINT: step:3375/10000 val_loss:4.3266 train_time:786494ms step_avg:233.04ms +[2025-07-17 23:09:30] [Rank 0] step:3381/10000 train_time:787681ms step_avg:232.97ms +[2025-07-17 23:09:30] [Rank 0] step:3381/10000 train_time:787681ms step_avg:232.97ms +[2025-07-17 23:09:35] [Rank 0] step:3401/10000 train_time:792453ms step_avg:233.01ms +[2025-07-17 23:09:35] [Rank 0] step:3401/10000 train_time:792453ms step_avg:233.01ms +[2025-07-17 23:09:40] [Rank 0] step:3421/10000 train_time:797226ms step_avg:233.04ms +[2025-07-17 23:09:40] [Rank 0] step:3421/10000 train_time:797226ms step_avg:233.04ms +[2025-07-17 23:09:45] [Rank 0] step:3441/10000 train_time:801997ms step_avg:233.07ms +[2025-07-17 23:09:45] [Rank 0] step:3441/10000 train_time:801997ms step_avg:233.07ms +[2025-07-17 23:09:49] [Rank 0] step:3461/10000 train_time:806770ms step_avg:233.10ms +[2025-07-17 23:09:49] [Rank 0] step:3461/10000 train_time:806770ms step_avg:233.10ms +[2025-07-17 23:09:54] [Rank 0] step:3481/10000 train_time:811545ms step_avg:233.14ms +[2025-07-17 23:09:54] [Rank 0] step:3481/10000 train_time:811545ms step_avg:233.14ms +[2025-07-17 23:10:03] [Rank 0] PRINT: step:3500/10000 val_loss:4.3437 train_time:816321ms step_avg:233.23ms +[2025-07-17 23:10:03] [Rank 0] PRINT: step:3500/10000 val_loss:4.3437 train_time:816321ms step_avg:233.23ms +[2025-07-17 23:10:03] [Rank 0] step:3501/10000 train_time:816337ms step_avg:233.17ms +[2025-07-17 23:10:03] [Rank 0] step:3501/10000 train_time:816337ms step_avg:233.17ms +[2025-07-17 23:10:08] [Rank 0] step:3521/10000 train_time:821081ms step_avg:233.20ms +[2025-07-17 23:10:08] [Rank 0] step:3521/10000 train_time:821081ms step_avg:233.20ms +[2025-07-17 23:10:13] [Rank 0] step:3541/10000 train_time:825846ms step_avg:233.22ms +[2025-07-17 23:10:13] [Rank 0] step:3541/10000 train_time:825846ms step_avg:233.22ms +[2025-07-17 23:10:18] [Rank 0] step:3561/10000 train_time:831108ms step_avg:233.39ms +[2025-07-17 23:10:18] [Rank 0] step:3561/10000 train_time:831108ms step_avg:233.39ms +[2025-07-17 23:10:23] [Rank 0] step:3581/10000 train_time:835869ms step_avg:233.42ms +[2025-07-17 23:10:23] [Rank 0] step:3581/10000 train_time:835869ms step_avg:233.42ms +[2025-07-17 23:10:28] [Rank 0] step:3601/10000 train_time:840634ms step_avg:233.44ms +[2025-07-17 23:10:28] [Rank 0] step:3601/10000 train_time:840634ms step_avg:233.44ms +[2025-07-17 23:10:33] [Rank 0] step:3621/10000 train_time:845396ms step_avg:233.47ms +[2025-07-17 23:10:33] [Rank 0] step:3621/10000 train_time:845396ms step_avg:233.47ms +[2025-07-17 23:10:38] [Rank 0] PRINT: step:3625/10000 val_loss:4.4096 train_time:846595ms step_avg:233.54ms +[2025-07-17 23:10:38] [Rank 0] PRINT: step:3625/10000 val_loss:4.4096 train_time:846595ms step_avg:233.54ms +[2025-07-17 23:10:42] [Rank 0] step:3641/10000 train_time:850159ms step_avg:233.50ms +[2025-07-17 23:10:42] [Rank 0] step:3641/10000 train_time:850159ms step_avg:233.50ms +[2025-07-17 23:10:47] [Rank 0] step:3661/10000 train_time:854922ms step_avg:233.52ms +[2025-07-17 23:10:47] [Rank 0] step:3661/10000 train_time:854922ms step_avg:233.52ms +[2025-07-17 23:10:51] [Rank 0] step:3681/10000 train_time:859685ms step_avg:233.55ms +[2025-07-17 23:10:51] [Rank 0] step:3681/10000 train_time:859685ms step_avg:233.55ms +[2025-07-17 23:10:56] [Rank 0] step:3701/10000 train_time:864450ms step_avg:233.57ms +[2025-07-17 23:10:56] [Rank 0] step:3701/10000 train_time:864450ms step_avg:233.57ms +[2025-07-17 23:11:01] [Rank 0] step:3721/10000 train_time:869284ms step_avg:233.62ms +[2025-07-17 23:11:01] [Rank 0] step:3721/10000 train_time:869284ms step_avg:233.62ms +[2025-07-17 23:11:06] [Rank 0] step:3741/10000 train_time:874141ms step_avg:233.67ms +[2025-07-17 23:11:06] [Rank 0] step:3741/10000 train_time:874141ms step_avg:233.67ms +[2025-07-17 23:11:12] [Rank 0] PRINT: step:3750/10000 val_loss:4.3050 train_time:876577ms step_avg:233.75ms +[2025-07-17 23:11:12] [Rank 0] PRINT: step:3750/10000 val_loss:4.3050 train_time:876577ms step_avg:233.75ms +[2025-07-17 23:11:15] [Rank 0] step:3761/10000 train_time:878995ms step_avg:233.71ms +[2025-07-17 23:11:15] [Rank 0] step:3761/10000 train_time:878995ms step_avg:233.71ms +[2025-07-17 23:11:20] [Rank 0] step:3781/10000 train_time:883850ms step_avg:233.76ms +[2025-07-17 23:11:20] [Rank 0] step:3781/10000 train_time:883850ms step_avg:233.76ms +[2025-07-17 23:11:25] [Rank 0] step:3801/10000 train_time:888705ms step_avg:233.81ms +[2025-07-17 23:11:25] [Rank 0] step:3801/10000 train_time:888705ms step_avg:233.81ms +[2025-07-17 23:11:29] [Rank 0] step:3821/10000 train_time:893560ms step_avg:233.85ms +[2025-07-17 23:11:29] [Rank 0] step:3821/10000 train_time:893560ms step_avg:233.85ms +[2025-07-17 23:11:34] [Rank 0] step:3841/10000 train_time:898414ms step_avg:233.90ms +[2025-07-17 23:11:34] [Rank 0] step:3841/10000 train_time:898414ms step_avg:233.90ms +[2025-07-17 23:11:39] [Rank 0] step:3861/10000 train_time:903267ms step_avg:233.95ms +[2025-07-17 23:11:39] [Rank 0] step:3861/10000 train_time:903267ms step_avg:233.95ms +[2025-07-17 23:11:47] [Rank 0] PRINT: step:3875/10000 val_loss:4.1074 train_time:906911ms step_avg:234.04ms +[2025-07-17 23:11:47] [Rank 0] PRINT: step:3875/10000 val_loss:4.1074 train_time:906911ms step_avg:234.04ms +[2025-07-17 23:11:49] [Rank 0] step:3881/10000 train_time:908119ms step_avg:233.99ms +[2025-07-17 23:11:49] [Rank 0] step:3881/10000 train_time:908119ms step_avg:233.99ms +[2025-07-17 23:11:54] [Rank 0] step:3901/10000 train_time:912971ms step_avg:234.04ms +[2025-07-17 23:11:54] [Rank 0] step:3901/10000 train_time:912971ms step_avg:234.04ms +[2025-07-17 23:11:58] [Rank 0] step:3921/10000 train_time:917826ms step_avg:234.08ms +[2025-07-17 23:11:58] [Rank 0] step:3921/10000 train_time:917826ms step_avg:234.08ms +[2025-07-17 23:12:03] [Rank 0] step:3941/10000 train_time:922686ms step_avg:234.12ms +[2025-07-17 23:12:03] [Rank 0] step:3941/10000 train_time:922686ms step_avg:234.12ms +[2025-07-17 23:12:08] [Rank 0] step:3961/10000 train_time:927543ms step_avg:234.17ms +[2025-07-17 23:12:08] [Rank 0] step:3961/10000 train_time:927543ms step_avg:234.17ms +[2025-07-17 23:12:13] [Rank 0] step:3981/10000 train_time:932400ms step_avg:234.21ms +[2025-07-17 23:12:13] [Rank 0] step:3981/10000 train_time:932400ms step_avg:234.21ms +[2025-07-17 23:12:22] [Rank 0] PRINT: step:4000/10000 val_loss:4.4661 train_time:937259ms step_avg:234.31ms +[2025-07-17 23:12:22] [Rank 0] PRINT: step:4000/10000 val_loss:4.4661 train_time:937259ms step_avg:234.31ms +[2025-07-17 23:12:22] [Rank 0] step:4001/10000 train_time:937276ms step_avg:234.26ms +[2025-07-17 23:12:22] [Rank 0] step:4001/10000 train_time:937276ms step_avg:234.26ms +[2025-07-17 23:12:27] [Rank 0] step:4021/10000 train_time:942115ms step_avg:234.30ms +[2025-07-17 23:12:27] [Rank 0] step:4021/10000 train_time:942115ms step_avg:234.30ms +[2025-07-17 23:12:32] [Rank 0] step:4041/10000 train_time:946971ms step_avg:234.34ms +[2025-07-17 23:12:32] [Rank 0] step:4041/10000 train_time:946971ms step_avg:234.34ms +[2025-07-17 23:12:37] [Rank 0] step:4061/10000 train_time:951827ms step_avg:234.38ms +[2025-07-17 23:12:37] [Rank 0] step:4061/10000 train_time:951827ms step_avg:234.38ms +[2025-07-17 23:12:42] [Rank 0] step:4081/10000 train_time:956770ms step_avg:234.44ms +[2025-07-17 23:12:42] [Rank 0] step:4081/10000 train_time:956770ms step_avg:234.44ms +[2025-07-17 23:12:47] [Rank 0] step:4101/10000 train_time:961628ms step_avg:234.49ms +[2025-07-17 23:12:47] [Rank 0] step:4101/10000 train_time:961628ms step_avg:234.49ms +[2025-07-17 23:12:52] [Rank 0] step:4121/10000 train_time:966484ms step_avg:234.53ms +[2025-07-17 23:12:52] [Rank 0] step:4121/10000 train_time:966484ms step_avg:234.53ms +[2025-07-17 23:12:57] [Rank 0] PRINT: step:4125/10000 val_loss:4.4763 train_time:967704ms step_avg:234.59ms +[2025-07-17 23:12:57] [Rank 0] PRINT: step:4125/10000 val_loss:4.4763 train_time:967704ms step_avg:234.59ms +[2025-07-17 23:13:01] [Rank 0] step:4141/10000 train_time:971335ms step_avg:234.57ms +[2025-07-17 23:13:01] [Rank 0] step:4141/10000 train_time:971335ms step_avg:234.57ms +[2025-07-17 23:13:06] [Rank 0] step:4161/10000 train_time:976188ms step_avg:234.60ms +[2025-07-17 23:13:06] [Rank 0] step:4161/10000 train_time:976188ms step_avg:234.60ms +[2025-07-17 23:13:10] [Rank 0] step:4181/10000 train_time:981040ms step_avg:234.64ms +[2025-07-17 23:13:10] [Rank 0] step:4181/10000 train_time:981040ms step_avg:234.64ms +[2025-07-17 23:13:15] [Rank 0] step:4201/10000 train_time:985894ms step_avg:234.68ms +[2025-07-17 23:13:15] [Rank 0] step:4201/10000 train_time:985894ms step_avg:234.68ms +[2025-07-17 23:13:20] [Rank 0] step:4221/10000 train_time:990748ms step_avg:234.72ms +[2025-07-17 23:13:20] [Rank 0] step:4221/10000 train_time:990748ms step_avg:234.72ms +[2025-07-17 23:13:25] [Rank 0] step:4241/10000 train_time:995603ms step_avg:234.76ms +[2025-07-17 23:13:25] [Rank 0] step:4241/10000 train_time:995603ms step_avg:234.76ms +[2025-07-17 23:13:32] [Rank 0] PRINT: step:4250/10000 val_loss:4.4973 train_time:998034ms step_avg:234.83ms +[2025-07-17 23:13:32] [Rank 0] PRINT: step:4250/10000 val_loss:4.4973 train_time:998034ms step_avg:234.83ms +[2025-07-17 23:13:34] [Rank 0] step:4261/10000 train_time:1000455ms step_avg:234.79ms +[2025-07-17 23:13:34] [Rank 0] step:4261/10000 train_time:1000455ms step_avg:234.79ms +[2025-07-17 23:13:39] [Rank 0] step:4281/10000 train_time:1005308ms step_avg:234.83ms +[2025-07-17 23:13:39] [Rank 0] step:4281/10000 train_time:1005308ms step_avg:234.83ms +[2025-07-17 23:13:44] [Rank 0] step:4301/10000 train_time:1010161ms step_avg:234.87ms +[2025-07-17 23:13:44] [Rank 0] step:4301/10000 train_time:1010161ms step_avg:234.87ms +[2025-07-17 23:13:49] [Rank 0] step:4321/10000 train_time:1015023ms step_avg:234.90ms +[2025-07-17 23:13:49] [Rank 0] step:4321/10000 train_time:1015023ms step_avg:234.90ms +[2025-07-17 23:13:54] [Rank 0] step:4341/10000 train_time:1019876ms step_avg:234.94ms +[2025-07-17 23:13:54] [Rank 0] step:4341/10000 train_time:1019876ms step_avg:234.94ms +[2025-07-17 23:13:59] [Rank 0] step:4361/10000 train_time:1024730ms step_avg:234.98ms +[2025-07-17 23:13:59] [Rank 0] step:4361/10000 train_time:1024730ms step_avg:234.98ms +[2025-07-17 23:14:07] [Rank 0] PRINT: step:4375/10000 val_loss:4.4980 train_time:1028370ms step_avg:235.06ms +[2025-07-17 23:14:07] [Rank 0] PRINT: step:4375/10000 val_loss:4.4980 train_time:1028370ms step_avg:235.06ms +[2025-07-17 23:14:08] [Rank 0] step:4381/10000 train_time:1029578ms step_avg:235.01ms +[2025-07-17 23:14:08] [Rank 0] step:4381/10000 train_time:1029578ms step_avg:235.01ms +[2025-07-17 23:14:13] [Rank 0] step:4401/10000 train_time:1034427ms step_avg:235.04ms +[2025-07-17 23:14:13] [Rank 0] step:4401/10000 train_time:1034427ms step_avg:235.04ms +[2025-07-17 23:14:18] [Rank 0] step:4421/10000 train_time:1039274ms step_avg:235.08ms +[2025-07-17 23:14:18] [Rank 0] step:4421/10000 train_time:1039274ms step_avg:235.08ms +[2025-07-17 23:14:23] [Rank 0] step:4441/10000 train_time:1044129ms step_avg:235.11ms +[2025-07-17 23:14:23] [Rank 0] step:4441/10000 train_time:1044129ms step_avg:235.11ms +[2025-07-17 23:14:28] [Rank 0] step:4461/10000 train_time:1048994ms step_avg:235.15ms +[2025-07-17 23:14:28] [Rank 0] step:4461/10000 train_time:1048994ms step_avg:235.15ms +[2025-07-17 23:14:32] [Rank 0] step:4481/10000 train_time:1053855ms step_avg:235.18ms +[2025-07-17 23:14:32] [Rank 0] step:4481/10000 train_time:1053855ms step_avg:235.18ms +[2025-07-17 23:14:42] [Rank 0] PRINT: step:4500/10000 val_loss:4.5099 train_time:1058719ms step_avg:235.27ms +[2025-07-17 23:14:42] [Rank 0] PRINT: step:4500/10000 val_loss:4.5099 train_time:1058719ms step_avg:235.27ms +[2025-07-17 23:14:42] [Rank 0] step:4501/10000 train_time:1058736ms step_avg:235.22ms +[2025-07-17 23:14:42] [Rank 0] step:4501/10000 train_time:1058736ms step_avg:235.22ms +[2025-07-17 23:14:47] [Rank 0] step:4521/10000 train_time:1063575ms step_avg:235.25ms +[2025-07-17 23:14:47] [Rank 0] step:4521/10000 train_time:1063575ms step_avg:235.25ms +[2025-07-17 23:14:52] [Rank 0] step:4541/10000 train_time:1068438ms step_avg:235.29ms +[2025-07-17 23:14:52] [Rank 0] step:4541/10000 train_time:1068438ms step_avg:235.29ms +[2025-07-17 23:14:57] [Rank 0] step:4561/10000 train_time:1073295ms step_avg:235.32ms +[2025-07-17 23:14:57] [Rank 0] step:4561/10000 train_time:1073295ms step_avg:235.32ms +[2025-07-17 23:15:02] [Rank 0] step:4581/10000 train_time:1078244ms step_avg:235.37ms +[2025-07-17 23:15:02] [Rank 0] step:4581/10000 train_time:1078244ms step_avg:235.37ms +[2025-07-17 23:15:06] [Rank 0] step:4601/10000 train_time:1083110ms step_avg:235.41ms +[2025-07-17 23:15:06] [Rank 0] step:4601/10000 train_time:1083110ms step_avg:235.41ms +[2025-07-17 23:15:11] [Rank 0] step:4621/10000 train_time:1087972ms step_avg:235.44ms +[2025-07-17 23:15:11] [Rank 0] step:4621/10000 train_time:1087972ms step_avg:235.44ms +[2025-07-17 23:15:17] [Rank 0] PRINT: step:4625/10000 val_loss:4.6015 train_time:1089194ms step_avg:235.50ms +[2025-07-17 23:15:17] [Rank 0] PRINT: step:4625/10000 val_loss:4.6015 train_time:1089194ms step_avg:235.50ms +[2025-07-17 23:15:21] [Rank 0] step:4641/10000 train_time:1092832ms step_avg:235.47ms +[2025-07-17 23:15:21] [Rank 0] step:4641/10000 train_time:1092832ms step_avg:235.47ms +[2025-07-17 23:15:26] [Rank 0] step:4661/10000 train_time:1097700ms step_avg:235.51ms +[2025-07-17 23:15:26] [Rank 0] step:4661/10000 train_time:1097700ms step_avg:235.51ms +[2025-07-17 23:15:30] [Rank 0] step:4681/10000 train_time:1102567ms step_avg:235.54ms +[2025-07-17 23:15:30] [Rank 0] step:4681/10000 train_time:1102567ms step_avg:235.54ms +[2025-07-17 23:15:35] [Rank 0] step:4701/10000 train_time:1107436ms step_avg:235.57ms +[2025-07-17 23:15:35] [Rank 0] step:4701/10000 train_time:1107436ms step_avg:235.57ms +[2025-07-17 23:15:40] [Rank 0] step:4721/10000 train_time:1112302ms step_avg:235.61ms +[2025-07-17 23:15:40] [Rank 0] step:4721/10000 train_time:1112302ms step_avg:235.61ms +[2025-07-17 23:15:45] [Rank 0] step:4741/10000 train_time:1117170ms step_avg:235.64ms +[2025-07-17 23:15:45] [Rank 0] step:4741/10000 train_time:1117170ms step_avg:235.64ms +[2025-07-17 23:15:52] [Rank 0] PRINT: step:4750/10000 val_loss:4.4915 train_time:1119614ms step_avg:235.71ms +[2025-07-17 23:15:52] [Rank 0] PRINT: step:4750/10000 val_loss:4.4915 train_time:1119614ms step_avg:235.71ms +[2025-07-17 23:15:55] [Rank 0] step:4761/10000 train_time:1122041ms step_avg:235.67ms +[2025-07-17 23:15:55] [Rank 0] step:4761/10000 train_time:1122041ms step_avg:235.67ms +[2025-07-17 23:15:59] [Rank 0] step:4781/10000 train_time:1126905ms step_avg:235.70ms +[2025-07-17 23:15:59] [Rank 0] step:4781/10000 train_time:1126905ms step_avg:235.70ms +[2025-07-17 23:16:04] [Rank 0] step:4801/10000 train_time:1131773ms step_avg:235.74ms +[2025-07-17 23:16:04] [Rank 0] step:4801/10000 train_time:1131773ms step_avg:235.74ms +[2025-07-17 23:16:09] [Rank 0] step:4821/10000 train_time:1136646ms step_avg:235.77ms +[2025-07-17 23:16:09] [Rank 0] step:4821/10000 train_time:1136646ms step_avg:235.77ms +[2025-07-17 23:16:14] [Rank 0] step:4841/10000 train_time:1141520ms step_avg:235.80ms +[2025-07-17 23:16:14] [Rank 0] step:4841/10000 train_time:1141520ms step_avg:235.80ms +[2025-07-17 23:16:19] [Rank 0] step:4861/10000 train_time:1146389ms step_avg:235.83ms +[2025-07-17 23:16:19] [Rank 0] step:4861/10000 train_time:1146389ms step_avg:235.83ms +[2025-07-17 23:16:27] [Rank 0] PRINT: step:4875/10000 val_loss:4.6322 train_time:1150047ms step_avg:235.91ms +[2025-07-17 23:16:27] [Rank 0] PRINT: step:4875/10000 val_loss:4.6322 train_time:1150047ms step_avg:235.91ms +[2025-07-17 23:16:28] [Rank 0] step:4881/10000 train_time:1151269ms step_avg:235.87ms +[2025-07-17 23:16:28] [Rank 0] step:4881/10000 train_time:1151269ms step_avg:235.87ms +[2025-07-17 23:16:33] [Rank 0] step:4901/10000 train_time:1156144ms step_avg:235.90ms +[2025-07-17 23:16:33] [Rank 0] step:4901/10000 train_time:1156144ms step_avg:235.90ms +[2025-07-17 23:16:38] [Rank 0] step:4921/10000 train_time:1161021ms step_avg:235.93ms +[2025-07-17 23:16:38] [Rank 0] step:4921/10000 train_time:1161021ms step_avg:235.93ms +[2025-07-17 23:16:43] [Rank 0] step:4941/10000 train_time:1165900ms step_avg:235.96ms +[2025-07-17 23:16:43] [Rank 0] step:4941/10000 train_time:1165900ms step_avg:235.96ms +[2025-07-17 23:16:48] [Rank 0] step:4961/10000 train_time:1170774ms step_avg:236.00ms +[2025-07-17 23:16:48] [Rank 0] step:4961/10000 train_time:1170774ms step_avg:236.00ms +[2025-07-17 23:16:53] [Rank 0] step:4981/10000 train_time:1175647ms step_avg:236.03ms +[2025-07-17 23:16:53] [Rank 0] step:4981/10000 train_time:1175647ms step_avg:236.03ms +[2025-07-17 23:17:02] [Rank 0] PRINT: step:5000/10000 val_loss:4.5313 train_time:1180523ms step_avg:236.10ms +[2025-07-17 23:17:02] [Rank 0] PRINT: step:5000/10000 val_loss:4.5313 train_time:1180523ms step_avg:236.10ms +[2025-07-17 23:17:02] [Rank 0] step:5001/10000 train_time:1180539ms step_avg:236.06ms +[2025-07-17 23:17:02] [Rank 0] step:5001/10000 train_time:1180539ms step_avg:236.06ms +[2025-07-17 23:17:07] [Rank 0] step:5021/10000 train_time:1185393ms step_avg:236.09ms +[2025-07-17 23:17:07] [Rank 0] step:5021/10000 train_time:1185393ms step_avg:236.09ms +[2025-07-17 23:17:12] [Rank 0] step:5041/10000 train_time:1190264ms step_avg:236.12ms +[2025-07-17 23:17:12] [Rank 0] step:5041/10000 train_time:1190264ms step_avg:236.12ms +[2025-07-17 23:17:17] [Rank 0] step:5061/10000 train_time:1195134ms step_avg:236.15ms +[2025-07-17 23:17:17] [Rank 0] step:5061/10000 train_time:1195134ms step_avg:236.15ms +[2025-07-17 23:17:22] [Rank 0] step:5081/10000 train_time:1200008ms step_avg:236.18ms +[2025-07-17 23:17:22] [Rank 0] step:5081/10000 train_time:1200008ms step_avg:236.18ms +[2025-07-17 23:17:27] [Rank 0] step:5101/10000 train_time:1205396ms step_avg:236.31ms +[2025-07-17 23:17:27] [Rank 0] step:5101/10000 train_time:1205396ms step_avg:236.31ms +[2025-07-17 23:17:32] [Rank 0] step:5121/10000 train_time:1210258ms step_avg:236.33ms +[2025-07-17 23:17:32] [Rank 0] step:5121/10000 train_time:1210258ms step_avg:236.33ms +[2025-07-17 23:17:38] [Rank 0] PRINT: step:5125/10000 val_loss:4.5242 train_time:1211482ms step_avg:236.39ms +[2025-07-17 23:17:38] [Rank 0] PRINT: step:5125/10000 val_loss:4.5242 train_time:1211482ms step_avg:236.39ms +[2025-07-17 23:17:42] [Rank 0] step:5141/10000 train_time:1215127ms step_avg:236.36ms +[2025-07-17 23:17:42] [Rank 0] step:5141/10000 train_time:1215127ms step_avg:236.36ms +[2025-07-17 23:17:47] [Rank 0] step:5161/10000 train_time:1220001ms step_avg:236.39ms +[2025-07-17 23:17:47] [Rank 0] step:5161/10000 train_time:1220001ms step_avg:236.39ms +[2025-07-17 23:17:51] [Rank 0] step:5181/10000 train_time:1224875ms step_avg:236.42ms +[2025-07-17 23:17:51] [Rank 0] step:5181/10000 train_time:1224875ms step_avg:236.42ms +[2025-07-17 23:17:56] [Rank 0] step:5201/10000 train_time:1229800ms step_avg:236.45ms +[2025-07-17 23:17:56] [Rank 0] step:5201/10000 train_time:1229800ms step_avg:236.45ms +[2025-07-17 23:18:01] [Rank 0] step:5221/10000 train_time:1234755ms step_avg:236.50ms +[2025-07-17 23:18:01] [Rank 0] step:5221/10000 train_time:1234755ms step_avg:236.50ms +[2025-07-17 23:18:06] [Rank 0] step:5241/10000 train_time:1239709ms step_avg:236.54ms +[2025-07-17 23:18:06] [Rank 0] step:5241/10000 train_time:1239709ms step_avg:236.54ms +[2025-07-17 23:18:13] [Rank 0] PRINT: step:5250/10000 val_loss:4.5232 train_time:1242184ms step_avg:236.61ms +[2025-07-17 23:18:13] [Rank 0] PRINT: step:5250/10000 val_loss:4.5232 train_time:1242184ms step_avg:236.61ms +[2025-07-17 23:18:16] [Rank 0] step:5261/10000 train_time:1244650ms step_avg:236.58ms +[2025-07-17 23:18:16] [Rank 0] step:5261/10000 train_time:1244650ms step_avg:236.58ms +[2025-07-17 23:18:21] [Rank 0] step:5281/10000 train_time:1249603ms step_avg:236.62ms +[2025-07-17 23:18:21] [Rank 0] step:5281/10000 train_time:1249603ms step_avg:236.62ms +[2025-07-17 23:18:26] [Rank 0] step:5301/10000 train_time:1254551ms step_avg:236.66ms +[2025-07-17 23:18:26] [Rank 0] step:5301/10000 train_time:1254551ms step_avg:236.66ms +[2025-07-17 23:18:31] [Rank 0] step:5321/10000 train_time:1259497ms step_avg:236.70ms +[2025-07-17 23:18:31] [Rank 0] step:5321/10000 train_time:1259497ms step_avg:236.70ms +[2025-07-17 23:18:36] [Rank 0] step:5341/10000 train_time:1264452ms step_avg:236.74ms +[2025-07-17 23:18:36] [Rank 0] step:5341/10000 train_time:1264452ms step_avg:236.74ms +[2025-07-17 23:18:41] [Rank 0] step:5361/10000 train_time:1269405ms step_avg:236.79ms +[2025-07-17 23:18:41] [Rank 0] step:5361/10000 train_time:1269405ms step_avg:236.79ms +[2025-07-17 23:18:49] [Rank 0] PRINT: step:5375/10000 val_loss:4.3176 train_time:1273126ms step_avg:236.86ms +[2025-07-17 23:18:49] [Rank 0] PRINT: step:5375/10000 val_loss:4.3176 train_time:1273126ms step_avg:236.86ms +[2025-07-17 23:18:50] [Rank 0] step:5381/10000 train_time:1274356ms step_avg:236.83ms +[2025-07-17 23:18:50] [Rank 0] step:5381/10000 train_time:1274356ms step_avg:236.83ms +[2025-07-17 23:18:55] [Rank 0] step:5401/10000 train_time:1279305ms step_avg:236.86ms +[2025-07-17 23:18:55] [Rank 0] step:5401/10000 train_time:1279305ms step_avg:236.86ms +[2025-07-17 23:19:00] [Rank 0] step:5421/10000 train_time:1284264ms step_avg:236.91ms +[2025-07-17 23:19:00] [Rank 0] step:5421/10000 train_time:1284264ms step_avg:236.91ms +[2025-07-17 23:19:05] [Rank 0] step:5441/10000 train_time:1289210ms step_avg:236.94ms +[2025-07-17 23:19:05] [Rank 0] step:5441/10000 train_time:1289210ms step_avg:236.94ms +[2025-07-17 23:19:10] [Rank 0] step:5461/10000 train_time:1294162ms step_avg:236.98ms +[2025-07-17 23:19:10] [Rank 0] step:5461/10000 train_time:1294162ms step_avg:236.98ms +[2025-07-17 23:19:15] [Rank 0] step:5481/10000 train_time:1299118ms step_avg:237.02ms +[2025-07-17 23:19:15] [Rank 0] step:5481/10000 train_time:1299118ms step_avg:237.02ms +[2025-07-17 23:19:24] [Rank 0] PRINT: step:5500/10000 val_loss:4.6205 train_time:1304073ms step_avg:237.10ms +[2025-07-17 23:19:24] [Rank 0] PRINT: step:5500/10000 val_loss:4.6205 train_time:1304073ms step_avg:237.10ms +[2025-07-17 23:19:24] [Rank 0] step:5501/10000 train_time:1304089ms step_avg:237.06ms +[2025-07-17 23:19:24] [Rank 0] step:5501/10000 train_time:1304089ms step_avg:237.06ms +[2025-07-17 23:19:29] [Rank 0] step:5521/10000 train_time:1309018ms step_avg:237.10ms +[2025-07-17 23:19:29] [Rank 0] step:5521/10000 train_time:1309018ms step_avg:237.10ms +[2025-07-17 23:19:34] [Rank 0] step:5541/10000 train_time:1313971ms step_avg:237.14ms +[2025-07-17 23:19:34] [Rank 0] step:5541/10000 train_time:1313971ms step_avg:237.14ms +[2025-07-17 23:19:39] [Rank 0] step:5561/10000 train_time:1318921ms step_avg:237.17ms +[2025-07-17 23:19:39] [Rank 0] step:5561/10000 train_time:1318921ms step_avg:237.17ms +[2025-07-17 23:19:44] [Rank 0] step:5581/10000 train_time:1323875ms step_avg:237.21ms +[2025-07-17 23:19:44] [Rank 0] step:5581/10000 train_time:1323875ms step_avg:237.21ms +[2025-07-17 23:19:49] [Rank 0] step:5601/10000 train_time:1329314ms step_avg:237.34ms +[2025-07-17 23:19:49] [Rank 0] step:5601/10000 train_time:1329314ms step_avg:237.34ms +[2025-07-17 23:19:54] [Rank 0] step:5621/10000 train_time:1334262ms step_avg:237.37ms +[2025-07-17 23:19:54] [Rank 0] step:5621/10000 train_time:1334262ms step_avg:237.37ms +[2025-07-17 23:20:00] [Rank 0] PRINT: step:5625/10000 val_loss:4.5726 train_time:1335507ms step_avg:237.42ms +[2025-07-17 23:20:00] [Rank 0] PRINT: step:5625/10000 val_loss:4.5726 train_time:1335507ms step_avg:237.42ms +[2025-07-17 23:20:04] [Rank 0] step:5641/10000 train_time:1339206ms step_avg:237.41ms +[2025-07-17 23:20:04] [Rank 0] step:5641/10000 train_time:1339206ms step_avg:237.41ms +[2025-07-17 23:20:09] [Rank 0] step:5661/10000 train_time:1344153ms step_avg:237.44ms +[2025-07-17 23:20:09] [Rank 0] step:5661/10000 train_time:1344153ms step_avg:237.44ms +[2025-07-17 23:20:14] [Rank 0] step:5681/10000 train_time:1349101ms step_avg:237.48ms +[2025-07-17 23:20:14] [Rank 0] step:5681/10000 train_time:1349101ms step_avg:237.48ms +[2025-07-17 23:20:19] [Rank 0] step:5701/10000 train_time:1354047ms step_avg:237.51ms +[2025-07-17 23:20:19] [Rank 0] step:5701/10000 train_time:1354047ms step_avg:237.51ms +[2025-07-17 23:20:24] [Rank 0] step:5721/10000 train_time:1358991ms step_avg:237.54ms +[2025-07-17 23:20:24] [Rank 0] step:5721/10000 train_time:1358991ms step_avg:237.54ms +[2025-07-17 23:20:29] [Rank 0] step:5741/10000 train_time:1363941ms step_avg:237.58ms +[2025-07-17 23:20:29] [Rank 0] step:5741/10000 train_time:1363941ms step_avg:237.58ms +[2025-07-17 23:20:36] [Rank 0] PRINT: step:5750/10000 val_loss:4.6511 train_time:1366420ms step_avg:237.64ms +[2025-07-17 23:20:36] [Rank 0] PRINT: step:5750/10000 val_loss:4.6511 train_time:1366420ms step_avg:237.64ms +[2025-07-17 23:20:38] [Rank 0] step:5761/10000 train_time:1368887ms step_avg:237.61ms +[2025-07-17 23:20:38] [Rank 0] step:5761/10000 train_time:1368887ms step_avg:237.61ms +[2025-07-17 23:20:43] [Rank 0] step:5781/10000 train_time:1373830ms step_avg:237.65ms +[2025-07-17 23:20:43] [Rank 0] step:5781/10000 train_time:1373830ms step_avg:237.65ms +[2025-07-17 23:20:48] [Rank 0] step:5801/10000 train_time:1378768ms step_avg:237.68ms +[2025-07-17 23:20:48] [Rank 0] step:5801/10000 train_time:1378768ms step_avg:237.68ms +[2025-07-17 23:20:53] [Rank 0] step:5821/10000 train_time:1383711ms step_avg:237.71ms +[2025-07-17 23:20:53] [Rank 0] step:5821/10000 train_time:1383711ms step_avg:237.71ms +[2025-07-17 23:20:58] [Rank 0] step:5841/10000 train_time:1388653ms step_avg:237.74ms +[2025-07-17 23:20:58] [Rank 0] step:5841/10000 train_time:1388653ms step_avg:237.74ms +[2025-07-17 23:21:03] [Rank 0] step:5861/10000 train_time:1393591ms step_avg:237.77ms +[2025-07-17 23:21:03] [Rank 0] step:5861/10000 train_time:1393591ms step_avg:237.77ms +[2025-07-17 23:21:11] [Rank 0] PRINT: step:5875/10000 val_loss:4.4844 train_time:1397297ms step_avg:237.84ms +[2025-07-17 23:21:11] [Rank 0] PRINT: step:5875/10000 val_loss:4.4844 train_time:1397297ms step_avg:237.84ms +[2025-07-17 23:21:13] [Rank 0] step:5881/10000 train_time:1398525ms step_avg:237.80ms +[2025-07-17 23:21:13] [Rank 0] step:5881/10000 train_time:1398525ms step_avg:237.80ms +[2025-07-17 23:21:18] [Rank 0] step:5901/10000 train_time:1403464ms step_avg:237.83ms +[2025-07-17 23:21:18] [Rank 0] step:5901/10000 train_time:1403464ms step_avg:237.83ms +[2025-07-17 23:21:23] [Rank 0] step:5921/10000 train_time:1408404ms step_avg:237.87ms +[2025-07-17 23:21:23] [Rank 0] step:5921/10000 train_time:1408404ms step_avg:237.87ms +[2025-07-17 23:21:27] [Rank 0] step:5941/10000 train_time:1413359ms step_avg:237.90ms +[2025-07-17 23:21:27] [Rank 0] step:5941/10000 train_time:1413359ms step_avg:237.90ms +[2025-07-17 23:21:32] [Rank 0] step:5961/10000 train_time:1418314ms step_avg:237.93ms +[2025-07-17 23:21:32] [Rank 0] step:5961/10000 train_time:1418314ms step_avg:237.93ms +[2025-07-17 23:21:37] [Rank 0] step:5981/10000 train_time:1423266ms step_avg:237.96ms +[2025-07-17 23:21:37] [Rank 0] step:5981/10000 train_time:1423266ms step_avg:237.96ms +[2025-07-17 23:21:47] [Rank 0] PRINT: step:6000/10000 val_loss:4.5987 train_time:1428232ms step_avg:238.04ms +[2025-07-17 23:21:47] [Rank 0] PRINT: step:6000/10000 val_loss:4.5987 train_time:1428232ms step_avg:238.04ms +[2025-07-17 23:21:47] [Rank 0] step:6001/10000 train_time:1428249ms step_avg:238.00ms +[2025-07-17 23:21:47] [Rank 0] step:6001/10000 train_time:1428249ms step_avg:238.00ms +[2025-07-17 23:21:52] [Rank 0] step:6021/10000 train_time:1433180ms step_avg:238.03ms +[2025-07-17 23:21:52] [Rank 0] step:6021/10000 train_time:1433180ms step_avg:238.03ms +[2025-07-17 23:21:57] [Rank 0] step:6041/10000 train_time:1438137ms step_avg:238.06ms +[2025-07-17 23:21:57] [Rank 0] step:6041/10000 train_time:1438137ms step_avg:238.06ms +[2025-07-17 23:22:02] [Rank 0] step:6061/10000 train_time:1443088ms step_avg:238.09ms +[2025-07-17 23:22:02] [Rank 0] step:6061/10000 train_time:1443088ms step_avg:238.09ms +[2025-07-17 23:22:07] [Rank 0] step:6081/10000 train_time:1448048ms step_avg:238.13ms +[2025-07-17 23:22:07] [Rank 0] step:6081/10000 train_time:1448048ms step_avg:238.13ms +[2025-07-17 23:22:12] [Rank 0] step:6101/10000 train_time:1453506ms step_avg:238.24ms +[2025-07-17 23:22:12] [Rank 0] step:6101/10000 train_time:1453506ms step_avg:238.24ms +[2025-07-17 23:22:17] [Rank 0] step:6121/10000 train_time:1458470ms step_avg:238.27ms +[2025-07-17 23:22:17] [Rank 0] step:6121/10000 train_time:1458470ms step_avg:238.27ms +[2025-07-17 23:22:23] [Rank 0] PRINT: step:6125/10000 val_loss:4.6285 train_time:1459717ms step_avg:238.32ms +[2025-07-17 23:22:23] [Rank 0] PRINT: step:6125/10000 val_loss:4.6285 train_time:1459717ms step_avg:238.32ms +[2025-07-17 23:22:27] [Rank 0] step:6141/10000 train_time:1463430ms step_avg:238.30ms +[2025-07-17 23:22:27] [Rank 0] step:6141/10000 train_time:1463430ms step_avg:238.30ms +[2025-07-17 23:22:32] [Rank 0] step:6161/10000 train_time:1468381ms step_avg:238.33ms +[2025-07-17 23:22:32] [Rank 0] step:6161/10000 train_time:1468381ms step_avg:238.33ms +[2025-07-17 23:22:37] [Rank 0] step:6181/10000 train_time:1473348ms step_avg:238.37ms +[2025-07-17 23:22:37] [Rank 0] step:6181/10000 train_time:1473348ms step_avg:238.37ms +[2025-07-17 23:22:42] [Rank 0] step:6201/10000 train_time:1478315ms step_avg:238.40ms +[2025-07-17 23:22:42] [Rank 0] step:6201/10000 train_time:1478315ms step_avg:238.40ms +[2025-07-17 23:22:47] [Rank 0] step:6221/10000 train_time:1483276ms step_avg:238.43ms +[2025-07-17 23:22:47] [Rank 0] step:6221/10000 train_time:1483276ms step_avg:238.43ms +[2025-07-17 23:22:52] [Rank 0] step:6241/10000 train_time:1488242ms step_avg:238.46ms +[2025-07-17 23:22:52] [Rank 0] step:6241/10000 train_time:1488242ms step_avg:238.46ms +[2025-07-17 23:22:59] [Rank 0] PRINT: step:6250/10000 val_loss:4.5094 train_time:1490731ms step_avg:238.52ms +[2025-07-17 23:22:59] [Rank 0] PRINT: step:6250/10000 val_loss:4.5094 train_time:1490731ms step_avg:238.52ms +[2025-07-17 23:23:01] [Rank 0] step:6261/10000 train_time:1493203ms step_avg:238.49ms +[2025-07-17 23:23:01] [Rank 0] step:6261/10000 train_time:1493203ms step_avg:238.49ms +[2025-07-17 23:23:06] [Rank 0] step:6281/10000 train_time:1498170ms step_avg:238.52ms +[2025-07-17 23:23:06] [Rank 0] step:6281/10000 train_time:1498170ms step_avg:238.52ms +[2025-07-17 23:23:11] [Rank 0] step:6301/10000 train_time:1503126ms step_avg:238.55ms +[2025-07-17 23:23:11] [Rank 0] step:6301/10000 train_time:1503126ms step_avg:238.55ms +[2025-07-17 23:23:16] [Rank 0] step:6321/10000 train_time:1508093ms step_avg:238.58ms +[2025-07-17 23:23:16] [Rank 0] step:6321/10000 train_time:1508093ms step_avg:238.58ms +[2025-07-17 23:23:21] [Rank 0] step:6341/10000 train_time:1513062ms step_avg:238.62ms +[2025-07-17 23:23:21] [Rank 0] step:6341/10000 train_time:1513062ms step_avg:238.62ms +[2025-07-17 23:23:26] [Rank 0] step:6361/10000 train_time:1518021ms step_avg:238.64ms +[2025-07-17 23:23:26] [Rank 0] step:6361/10000 train_time:1518021ms step_avg:238.64ms +[2025-07-17 23:23:34] [Rank 0] PRINT: step:6375/10000 val_loss:4.5029 train_time:1521744ms step_avg:238.70ms +[2025-07-17 23:23:34] [Rank 0] PRINT: step:6375/10000 val_loss:4.5029 train_time:1521744ms step_avg:238.70ms +[2025-07-17 23:23:36] [Rank 0] step:6381/10000 train_time:1522978ms step_avg:238.67ms +[2025-07-17 23:23:36] [Rank 0] step:6381/10000 train_time:1522978ms step_avg:238.67ms +[2025-07-17 23:23:41] [Rank 0] step:6401/10000 train_time:1527926ms step_avg:238.70ms +[2025-07-17 23:23:41] [Rank 0] step:6401/10000 train_time:1527926ms step_avg:238.70ms +[2025-07-17 23:23:46] [Rank 0] step:6421/10000 train_time:1532886ms step_avg:238.73ms +[2025-07-17 23:23:46] [Rank 0] step:6421/10000 train_time:1532886ms step_avg:238.73ms +[2025-07-17 23:23:51] [Rank 0] step:6441/10000 train_time:1537850ms step_avg:238.76ms +[2025-07-17 23:23:51] [Rank 0] step:6441/10000 train_time:1537850ms step_avg:238.76ms +[2025-07-17 23:23:56] [Rank 0] step:6461/10000 train_time:1542821ms step_avg:238.79ms +[2025-07-17 23:23:56] [Rank 0] step:6461/10000 train_time:1542821ms step_avg:238.79ms +[2025-07-17 23:24:01] [Rank 0] step:6481/10000 train_time:1547789ms step_avg:238.82ms +[2025-07-17 23:24:01] [Rank 0] step:6481/10000 train_time:1547789ms step_avg:238.82ms +[2025-07-17 23:24:10] [Rank 0] PRINT: step:6500/10000 val_loss:4.5621 train_time:1552753ms step_avg:238.89ms +[2025-07-17 23:24:10] [Rank 0] PRINT: step:6500/10000 val_loss:4.5621 train_time:1552753ms step_avg:238.89ms +[2025-07-17 23:24:10] [Rank 0] step:6501/10000 train_time:1552769ms step_avg:238.85ms +[2025-07-17 23:24:10] [Rank 0] step:6501/10000 train_time:1552769ms step_avg:238.85ms +[2025-07-17 23:24:15] [Rank 0] step:6521/10000 train_time:1557712ms step_avg:238.88ms +[2025-07-17 23:24:15] [Rank 0] step:6521/10000 train_time:1557712ms step_avg:238.88ms +[2025-07-17 23:24:20] [Rank 0] step:6541/10000 train_time:1562672ms step_avg:238.90ms +[2025-07-17 23:24:20] [Rank 0] step:6541/10000 train_time:1562672ms step_avg:238.90ms +[2025-07-17 23:24:25] [Rank 0] step:6561/10000 train_time:1567647ms step_avg:238.93ms +[2025-07-17 23:24:25] [Rank 0] step:6561/10000 train_time:1567647ms step_avg:238.93ms +[2025-07-17 23:24:30] [Rank 0] step:6581/10000 train_time:1572611ms step_avg:238.96ms +[2025-07-17 23:24:30] [Rank 0] step:6581/10000 train_time:1572611ms step_avg:238.96ms +[2025-07-17 23:24:35] [Rank 0] step:6601/10000 train_time:1577588ms step_avg:238.99ms +[2025-07-17 23:24:35] [Rank 0] step:6601/10000 train_time:1577588ms step_avg:238.99ms +[2025-07-17 23:24:41] [Rank 0] step:6621/10000 train_time:1583064ms step_avg:239.10ms +[2025-07-17 23:24:41] [Rank 0] step:6621/10000 train_time:1583064ms step_avg:239.10ms +[2025-07-17 23:24:46] [Rank 0] PRINT: step:6625/10000 val_loss:4.6388 train_time:1584312ms step_avg:239.14ms +[2025-07-17 23:24:46] [Rank 0] PRINT: step:6625/10000 val_loss:4.6388 train_time:1584312ms step_avg:239.14ms +[2025-07-17 23:24:50] [Rank 0] step:6641/10000 train_time:1588020ms step_avg:239.12ms +[2025-07-17 23:24:50] [Rank 0] step:6641/10000 train_time:1588020ms step_avg:239.12ms +[2025-07-17 23:24:55] [Rank 0] step:6661/10000 train_time:1592983ms step_avg:239.15ms +[2025-07-17 23:24:55] [Rank 0] step:6661/10000 train_time:1592983ms step_avg:239.15ms +[2025-07-17 23:25:00] [Rank 0] step:6681/10000 train_time:1598001ms step_avg:239.19ms +[2025-07-17 23:25:00] [Rank 0] step:6681/10000 train_time:1598001ms step_avg:239.19ms +[2025-07-17 23:25:05] [Rank 0] step:6701/10000 train_time:1603027ms step_avg:239.22ms +[2025-07-17 23:25:05] [Rank 0] step:6701/10000 train_time:1603027ms step_avg:239.22ms +[2025-07-17 23:25:10] [Rank 0] step:6721/10000 train_time:1608069ms step_avg:239.26ms +[2025-07-17 23:25:10] [Rank 0] step:6721/10000 train_time:1608069ms step_avg:239.26ms +[2025-07-17 23:25:15] [Rank 0] step:6741/10000 train_time:1613111ms step_avg:239.30ms +[2025-07-17 23:25:15] [Rank 0] step:6741/10000 train_time:1613111ms step_avg:239.30ms +[2025-07-17 23:25:22] [Rank 0] PRINT: step:6750/10000 val_loss:4.5130 train_time:1615627ms step_avg:239.35ms +[2025-07-17 23:25:22] [Rank 0] PRINT: step:6750/10000 val_loss:4.5130 train_time:1615627ms step_avg:239.35ms +[2025-07-17 23:25:25] [Rank 0] step:6761/10000 train_time:1618134ms step_avg:239.33ms +[2025-07-17 23:25:25] [Rank 0] step:6761/10000 train_time:1618134ms step_avg:239.33ms +[2025-07-17 23:25:30] [Rank 0] step:6781/10000 train_time:1623160ms step_avg:239.37ms +[2025-07-17 23:25:30] [Rank 0] step:6781/10000 train_time:1623160ms step_avg:239.37ms +[2025-07-17 23:25:35] [Rank 0] step:6801/10000 train_time:1628185ms step_avg:239.40ms +[2025-07-17 23:25:35] [Rank 0] step:6801/10000 train_time:1628185ms step_avg:239.40ms +[2025-07-17 23:25:40] [Rank 0] step:6821/10000 train_time:1633206ms step_avg:239.44ms +[2025-07-17 23:25:40] [Rank 0] step:6821/10000 train_time:1633206ms step_avg:239.44ms +[2025-07-17 23:25:45] [Rank 0] step:6841/10000 train_time:1638226ms step_avg:239.47ms +[2025-07-17 23:25:45] [Rank 0] step:6841/10000 train_time:1638226ms step_avg:239.47ms +[2025-07-17 23:25:50] [Rank 0] step:6861/10000 train_time:1643239ms step_avg:239.50ms +[2025-07-17 23:25:50] [Rank 0] step:6861/10000 train_time:1643239ms step_avg:239.50ms +[2025-07-17 23:25:58] [Rank 0] PRINT: step:6875/10000 val_loss:4.4628 train_time:1646997ms step_avg:239.56ms +[2025-07-17 23:25:58] [Rank 0] PRINT: step:6875/10000 val_loss:4.4628 train_time:1646997ms step_avg:239.56ms +[2025-07-17 23:26:00] [Rank 0] step:6881/10000 train_time:1648249ms step_avg:239.54ms +[2025-07-17 23:26:00] [Rank 0] step:6881/10000 train_time:1648249ms step_avg:239.54ms +[2025-07-17 23:26:05] [Rank 0] step:6901/10000 train_time:1653258ms step_avg:239.57ms +[2025-07-17 23:26:05] [Rank 0] step:6901/10000 train_time:1653258ms step_avg:239.57ms +[2025-07-17 23:26:10] [Rank 0] step:6921/10000 train_time:1658267ms step_avg:239.60ms +[2025-07-17 23:26:10] [Rank 0] step:6921/10000 train_time:1658267ms step_avg:239.60ms +[2025-07-17 23:26:15] [Rank 0] step:6941/10000 train_time:1663292ms step_avg:239.63ms +[2025-07-17 23:26:15] [Rank 0] step:6941/10000 train_time:1663292ms step_avg:239.63ms +[2025-07-17 23:26:20] [Rank 0] step:6961/10000 train_time:1668310ms step_avg:239.67ms +[2025-07-17 23:26:20] [Rank 0] step:6961/10000 train_time:1668310ms step_avg:239.67ms +[2025-07-17 23:26:25] [Rank 0] step:6981/10000 train_time:1673329ms step_avg:239.70ms +[2025-07-17 23:26:25] [Rank 0] step:6981/10000 train_time:1673329ms step_avg:239.70ms +[2025-07-17 23:26:34] [Rank 0] PRINT: step:7000/10000 val_loss:4.4844 train_time:1678347ms step_avg:239.76ms +[2025-07-17 23:26:34] [Rank 0] PRINT: step:7000/10000 val_loss:4.4844 train_time:1678347ms step_avg:239.76ms +[2025-07-17 23:26:34] [Rank 0] step:7001/10000 train_time:1678365ms step_avg:239.73ms +[2025-07-17 23:26:34] [Rank 0] step:7001/10000 train_time:1678365ms step_avg:239.73ms +[2025-07-17 23:26:39] [Rank 0] step:7021/10000 train_time:1683359ms step_avg:239.76ms +[2025-07-17 23:26:39] [Rank 0] step:7021/10000 train_time:1683359ms step_avg:239.76ms +[2025-07-17 23:26:44] [Rank 0] step:7041/10000 train_time:1688373ms step_avg:239.79ms +[2025-07-17 23:26:44] [Rank 0] step:7041/10000 train_time:1688373ms step_avg:239.79ms +[2025-07-17 23:26:49] [Rank 0] step:7061/10000 train_time:1693382ms step_avg:239.82ms +[2025-07-17 23:26:49] [Rank 0] step:7061/10000 train_time:1693382ms step_avg:239.82ms +[2025-07-17 23:26:54] [Rank 0] step:7081/10000 train_time:1698395ms step_avg:239.85ms +[2025-07-17 23:26:54] [Rank 0] step:7081/10000 train_time:1698395ms step_avg:239.85ms +[2025-07-17 23:26:59] [Rank 0] step:7101/10000 train_time:1703402ms step_avg:239.88ms +[2025-07-17 23:26:59] [Rank 0] step:7101/10000 train_time:1703402ms step_avg:239.88ms +[2025-07-17 23:27:05] [Rank 0] step:7121/10000 train_time:1708900ms step_avg:239.98ms +[2025-07-17 23:27:05] [Rank 0] step:7121/10000 train_time:1708900ms step_avg:239.98ms +[2025-07-17 23:27:10] [Rank 0] PRINT: step:7125/10000 val_loss:4.4964 train_time:1710157ms step_avg:240.02ms +[2025-07-17 23:27:10] [Rank 0] PRINT: step:7125/10000 val_loss:4.4964 train_time:1710157ms step_avg:240.02ms +[2025-07-17 23:27:14] [Rank 0] step:7141/10000 train_time:1713912ms step_avg:240.01ms +[2025-07-17 23:27:14] [Rank 0] step:7141/10000 train_time:1713912ms step_avg:240.01ms +[2025-07-17 23:27:19] [Rank 0] step:7161/10000 train_time:1718929ms step_avg:240.04ms +[2025-07-17 23:27:19] [Rank 0] step:7161/10000 train_time:1718929ms step_avg:240.04ms +[2025-07-17 23:27:24] [Rank 0] step:7181/10000 train_time:1723939ms step_avg:240.07ms +[2025-07-17 23:27:24] [Rank 0] step:7181/10000 train_time:1723939ms step_avg:240.07ms +[2025-07-17 23:27:29] [Rank 0] step:7201/10000 train_time:1728964ms step_avg:240.10ms +[2025-07-17 23:27:29] [Rank 0] step:7201/10000 train_time:1728964ms step_avg:240.10ms +[2025-07-17 23:27:34] [Rank 0] step:7221/10000 train_time:1733973ms step_avg:240.13ms +[2025-07-17 23:27:34] [Rank 0] step:7221/10000 train_time:1733973ms step_avg:240.13ms +[2025-07-17 23:27:39] [Rank 0] step:7241/10000 train_time:1738987ms step_avg:240.16ms +[2025-07-17 23:27:39] [Rank 0] step:7241/10000 train_time:1738987ms step_avg:240.16ms +[2025-07-17 23:27:46] [Rank 0] PRINT: step:7250/10000 val_loss:4.4558 train_time:1741501ms step_avg:240.21ms +[2025-07-17 23:27:46] [Rank 0] PRINT: step:7250/10000 val_loss:4.4558 train_time:1741501ms step_avg:240.21ms +[2025-07-17 23:27:49] [Rank 0] step:7261/10000 train_time:1743995ms step_avg:240.19ms +[2025-07-17 23:27:49] [Rank 0] step:7261/10000 train_time:1743995ms step_avg:240.19ms +[2025-07-17 23:27:54] [Rank 0] step:7281/10000 train_time:1749005ms step_avg:240.21ms +[2025-07-17 23:27:54] [Rank 0] step:7281/10000 train_time:1749005ms step_avg:240.21ms +[2025-07-17 23:27:59] [Rank 0] step:7301/10000 train_time:1754015ms step_avg:240.24ms +[2025-07-17 23:27:59] [Rank 0] step:7301/10000 train_time:1754015ms step_avg:240.24ms +[2025-07-17 23:28:04] [Rank 0] step:7321/10000 train_time:1759038ms step_avg:240.27ms +[2025-07-17 23:28:04] [Rank 0] step:7321/10000 train_time:1759038ms step_avg:240.27ms +[2025-07-17 23:28:09] [Rank 0] step:7341/10000 train_time:1764050ms step_avg:240.30ms +[2025-07-17 23:28:09] [Rank 0] step:7341/10000 train_time:1764050ms step_avg:240.30ms +[2025-07-17 23:28:14] [Rank 0] step:7361/10000 train_time:1769073ms step_avg:240.33ms +[2025-07-17 23:28:14] [Rank 0] step:7361/10000 train_time:1769073ms step_avg:240.33ms +[2025-07-17 23:28:22] [Rank 0] PRINT: step:7375/10000 val_loss:4.4748 train_time:1772840ms step_avg:240.39ms +[2025-07-17 23:28:22] [Rank 0] PRINT: step:7375/10000 val_loss:4.4748 train_time:1772840ms step_avg:240.39ms +[2025-07-17 23:28:24] [Rank 0] step:7381/10000 train_time:1774091ms step_avg:240.36ms +[2025-07-17 23:28:24] [Rank 0] step:7381/10000 train_time:1774091ms step_avg:240.36ms +[2025-07-17 23:28:29] [Rank 0] step:7401/10000 train_time:1779113ms step_avg:240.39ms +[2025-07-17 23:28:29] [Rank 0] step:7401/10000 train_time:1779113ms step_avg:240.39ms +[2025-07-17 23:28:34] [Rank 0] step:7421/10000 train_time:1784135ms step_avg:240.42ms +[2025-07-17 23:28:34] [Rank 0] step:7421/10000 train_time:1784135ms step_avg:240.42ms +[2025-07-17 23:28:39] [Rank 0] step:7441/10000 train_time:1789170ms step_avg:240.45ms +[2025-07-17 23:28:39] [Rank 0] step:7441/10000 train_time:1789170ms step_avg:240.45ms +[2025-07-17 23:28:44] [Rank 0] step:7461/10000 train_time:1794194ms step_avg:240.48ms +[2025-07-17 23:28:44] [Rank 0] step:7461/10000 train_time:1794194ms step_avg:240.48ms +[2025-07-17 23:28:49] [Rank 0] step:7481/10000 train_time:1799226ms step_avg:240.51ms +[2025-07-17 23:28:49] [Rank 0] step:7481/10000 train_time:1799226ms step_avg:240.51ms +[2025-07-17 23:28:58] [Rank 0] PRINT: step:7500/10000 val_loss:4.6146 train_time:1804268ms step_avg:240.57ms +[2025-07-17 23:28:58] [Rank 0] PRINT: step:7500/10000 val_loss:4.6146 train_time:1804268ms step_avg:240.57ms +[2025-07-17 23:28:59] [Rank 0] step:7501/10000 train_time:1804284ms step_avg:240.54ms +[2025-07-17 23:28:59] [Rank 0] step:7501/10000 train_time:1804284ms step_avg:240.54ms +[2025-07-17 23:29:04] [Rank 0] step:7521/10000 train_time:1809302ms step_avg:240.57ms +[2025-07-17 23:29:04] [Rank 0] step:7521/10000 train_time:1809302ms step_avg:240.57ms +[2025-07-17 23:29:09] [Rank 0] step:7541/10000 train_time:1814328ms step_avg:240.60ms +[2025-07-17 23:29:09] [Rank 0] step:7541/10000 train_time:1814328ms step_avg:240.60ms +[2025-07-17 23:29:14] [Rank 0] step:7561/10000 train_time:1819360ms step_avg:240.62ms +[2025-07-17 23:29:14] [Rank 0] step:7561/10000 train_time:1819360ms step_avg:240.62ms +[2025-07-17 23:29:19] [Rank 0] step:7581/10000 train_time:1824396ms step_avg:240.65ms +[2025-07-17 23:29:19] [Rank 0] step:7581/10000 train_time:1824396ms step_avg:240.65ms +[2025-07-17 23:29:24] [Rank 0] step:7601/10000 train_time:1829443ms step_avg:240.68ms +[2025-07-17 23:29:24] [Rank 0] step:7601/10000 train_time:1829443ms step_avg:240.68ms +[2025-07-17 23:29:29] [Rank 0] step:7621/10000 train_time:1834493ms step_avg:240.72ms +[2025-07-17 23:29:29] [Rank 0] step:7621/10000 train_time:1834493ms step_avg:240.72ms +[2025-07-17 23:29:35] [Rank 0] PRINT: step:7625/10000 val_loss:4.6479 train_time:1836268ms step_avg:240.82ms +[2025-07-17 23:29:35] [Rank 0] PRINT: step:7625/10000 val_loss:4.6479 train_time:1836268ms step_avg:240.82ms +[2025-07-17 23:29:39] [Rank 0] step:7641/10000 train_time:1840035ms step_avg:240.81ms +[2025-07-17 23:29:39] [Rank 0] step:7641/10000 train_time:1840035ms step_avg:240.81ms +[2025-07-17 23:29:44] [Rank 0] step:7661/10000 train_time:1845073ms step_avg:240.84ms +[2025-07-17 23:29:44] [Rank 0] step:7661/10000 train_time:1845073ms step_avg:240.84ms +[2025-07-17 23:29:49] [Rank 0] step:7681/10000 train_time:1850123ms step_avg:240.87ms +[2025-07-17 23:29:49] [Rank 0] step:7681/10000 train_time:1850123ms step_avg:240.87ms +[2025-07-17 23:29:54] [Rank 0] step:7701/10000 train_time:1855154ms step_avg:240.90ms +[2025-07-17 23:29:54] [Rank 0] step:7701/10000 train_time:1855154ms step_avg:240.90ms +[2025-07-17 23:29:59] [Rank 0] step:7721/10000 train_time:1860188ms step_avg:240.93ms +[2025-07-17 23:29:59] [Rank 0] step:7721/10000 train_time:1860188ms step_avg:240.93ms +[2025-07-17 23:30:04] [Rank 0] step:7741/10000 train_time:1865225ms step_avg:240.95ms +[2025-07-17 23:30:04] [Rank 0] step:7741/10000 train_time:1865225ms step_avg:240.95ms +[2025-07-17 23:30:11] [Rank 0] PRINT: step:7750/10000 val_loss:4.5676 train_time:1867760ms step_avg:241.00ms +[2025-07-17 23:30:11] [Rank 0] PRINT: step:7750/10000 val_loss:4.5676 train_time:1867760ms step_avg:241.00ms +[2025-07-17 23:30:14] [Rank 0] step:7761/10000 train_time:1870276ms step_avg:240.98ms +[2025-07-17 23:30:14] [Rank 0] step:7761/10000 train_time:1870276ms step_avg:240.98ms +[2025-07-17 23:30:19] [Rank 0] step:7781/10000 train_time:1875313ms step_avg:241.01ms +[2025-07-17 23:30:19] [Rank 0] step:7781/10000 train_time:1875313ms step_avg:241.01ms +[2025-07-17 23:30:24] [Rank 0] step:7801/10000 train_time:1880353ms step_avg:241.04ms +[2025-07-17 23:30:24] [Rank 0] step:7801/10000 train_time:1880353ms step_avg:241.04ms +[2025-07-17 23:30:29] [Rank 0] step:7821/10000 train_time:1885385ms step_avg:241.07ms +[2025-07-17 23:30:29] [Rank 0] step:7821/10000 train_time:1885385ms step_avg:241.07ms +[2025-07-17 23:30:34] [Rank 0] step:7841/10000 train_time:1890418ms step_avg:241.09ms +[2025-07-17 23:30:34] [Rank 0] step:7841/10000 train_time:1890418ms step_avg:241.09ms +[2025-07-17 23:30:39] [Rank 0] step:7861/10000 train_time:1895434ms step_avg:241.12ms +[2025-07-17 23:30:39] [Rank 0] step:7861/10000 train_time:1895434ms step_avg:241.12ms +[2025-07-17 23:30:47] [Rank 0] PRINT: step:7875/10000 val_loss:4.5593 train_time:1899201ms step_avg:241.17ms +[2025-07-17 23:30:47] [Rank 0] PRINT: step:7875/10000 val_loss:4.5593 train_time:1899201ms step_avg:241.17ms +[2025-07-17 23:30:49] [Rank 0] step:7881/10000 train_time:1900445ms step_avg:241.14ms +[2025-07-17 23:30:49] [Rank 0] step:7881/10000 train_time:1900445ms step_avg:241.14ms +[2025-07-17 23:30:54] [Rank 0] step:7901/10000 train_time:1905462ms step_avg:241.17ms +[2025-07-17 23:30:54] [Rank 0] step:7901/10000 train_time:1905462ms step_avg:241.17ms +[2025-07-17 23:30:59] [Rank 0] step:7921/10000 train_time:1910480ms step_avg:241.19ms +[2025-07-17 23:30:59] [Rank 0] step:7921/10000 train_time:1910480ms step_avg:241.19ms +[2025-07-17 23:31:04] [Rank 0] step:7941/10000 train_time:1915511ms step_avg:241.22ms +[2025-07-17 23:31:04] [Rank 0] step:7941/10000 train_time:1915511ms step_avg:241.22ms +[2025-07-17 23:31:09] [Rank 0] step:7961/10000 train_time:1920545ms step_avg:241.24ms +[2025-07-17 23:31:09] [Rank 0] step:7961/10000 train_time:1920545ms step_avg:241.24ms +[2025-07-17 23:31:14] [Rank 0] step:7981/10000 train_time:1925565ms step_avg:241.27ms +[2025-07-17 23:31:14] [Rank 0] step:7981/10000 train_time:1925565ms step_avg:241.27ms +[2025-07-17 23:31:24] [Rank 0] PRINT: step:8000/10000 val_loss:4.5449 train_time:1930607ms step_avg:241.33ms +[2025-07-17 23:31:24] [Rank 0] PRINT: step:8000/10000 val_loss:4.5449 train_time:1930607ms step_avg:241.33ms +[2025-07-17 23:31:24] [Rank 0] step:8001/10000 train_time:1930623ms step_avg:241.30ms +[2025-07-17 23:31:24] [Rank 0] step:8001/10000 train_time:1930623ms step_avg:241.30ms +[2025-07-17 23:31:29] [Rank 0] step:8021/10000 train_time:1935626ms step_avg:241.32ms +[2025-07-17 23:31:29] [Rank 0] step:8021/10000 train_time:1935626ms step_avg:241.32ms +[2025-07-17 23:31:34] [Rank 0] step:8041/10000 train_time:1940673ms step_avg:241.35ms +[2025-07-17 23:31:34] [Rank 0] step:8041/10000 train_time:1940673ms step_avg:241.35ms +[2025-07-17 23:31:39] [Rank 0] step:8061/10000 train_time:1945697ms step_avg:241.37ms +[2025-07-17 23:31:39] [Rank 0] step:8061/10000 train_time:1945697ms step_avg:241.37ms +[2025-07-17 23:31:44] [Rank 0] step:8081/10000 train_time:1950729ms step_avg:241.40ms +[2025-07-17 23:31:44] [Rank 0] step:8081/10000 train_time:1950729ms step_avg:241.40ms +[2025-07-17 23:31:49] [Rank 0] step:8101/10000 train_time:1955753ms step_avg:241.42ms +[2025-07-17 23:31:49] [Rank 0] step:8101/10000 train_time:1955753ms step_avg:241.42ms +[2025-07-17 23:31:54] [Rank 0] step:8121/10000 train_time:1960777ms step_avg:241.45ms +[2025-07-17 23:31:54] [Rank 0] step:8121/10000 train_time:1960777ms step_avg:241.45ms +[2025-07-17 23:32:00] [Rank 0] PRINT: step:8125/10000 val_loss:4.6351 train_time:1962041ms step_avg:241.48ms +[2025-07-17 23:32:00] [Rank 0] PRINT: step:8125/10000 val_loss:4.6351 train_time:1962041ms step_avg:241.48ms +[2025-07-17 23:32:04] [Rank 0] step:8141/10000 train_time:1966287ms step_avg:241.53ms +[2025-07-17 23:32:04] [Rank 0] step:8141/10000 train_time:1966287ms step_avg:241.53ms +[2025-07-17 23:32:09] [Rank 0] step:8161/10000 train_time:1971346ms step_avg:241.56ms +[2025-07-17 23:32:09] [Rank 0] step:8161/10000 train_time:1971346ms step_avg:241.56ms +[2025-07-17 23:32:14] [Rank 0] step:8181/10000 train_time:1976436ms step_avg:241.59ms +[2025-07-17 23:32:14] [Rank 0] step:8181/10000 train_time:1976436ms step_avg:241.59ms +[2025-07-17 23:32:19] [Rank 0] step:8201/10000 train_time:1981506ms step_avg:241.62ms +[2025-07-17 23:32:19] [Rank 0] step:8201/10000 train_time:1981506ms step_avg:241.62ms +[2025-07-17 23:32:25] [Rank 0] step:8221/10000 train_time:1986590ms step_avg:241.65ms +[2025-07-17 23:32:25] [Rank 0] step:8221/10000 train_time:1986590ms step_avg:241.65ms +[2025-07-17 23:32:30] [Rank 0] step:8241/10000 train_time:1991675ms step_avg:241.68ms +[2025-07-17 23:32:30] [Rank 0] step:8241/10000 train_time:1991675ms step_avg:241.68ms +[2025-07-17 23:32:37] [Rank 0] PRINT: step:8250/10000 val_loss:4.6708 train_time:1994227ms step_avg:241.72ms +[2025-07-17 23:32:37] [Rank 0] PRINT: step:8250/10000 val_loss:4.6708 train_time:1994227ms step_avg:241.72ms +[2025-07-17 23:32:39] [Rank 0] step:8261/10000 train_time:1996764ms step_avg:241.71ms +[2025-07-17 23:32:39] [Rank 0] step:8261/10000 train_time:1996764ms step_avg:241.71ms +[2025-07-17 23:32:45] [Rank 0] step:8281/10000 train_time:2001874ms step_avg:241.74ms +[2025-07-17 23:32:45] [Rank 0] step:8281/10000 train_time:2001874ms step_avg:241.74ms +[2025-07-17 23:32:50] [Rank 0] step:8301/10000 train_time:2006957ms step_avg:241.77ms +[2025-07-17 23:32:50] [Rank 0] step:8301/10000 train_time:2006957ms step_avg:241.77ms +[2025-07-17 23:32:55] [Rank 0] step:8321/10000 train_time:2012054ms step_avg:241.80ms +[2025-07-17 23:32:55] [Rank 0] step:8321/10000 train_time:2012054ms step_avg:241.80ms +[2025-07-17 23:33:00] [Rank 0] step:8341/10000 train_time:2017154ms step_avg:241.84ms +[2025-07-17 23:33:00] [Rank 0] step:8341/10000 train_time:2017154ms step_avg:241.84ms +[2025-07-17 23:33:05] [Rank 0] step:8361/10000 train_time:2022233ms step_avg:241.87ms +[2025-07-17 23:33:05] [Rank 0] step:8361/10000 train_time:2022233ms step_avg:241.87ms +[2025-07-17 23:33:13] [Rank 0] PRINT: step:8375/10000 val_loss:4.5963 train_time:2026053ms step_avg:241.92ms +[2025-07-17 23:33:13] [Rank 0] PRINT: step:8375/10000 val_loss:4.5963 train_time:2026053ms step_avg:241.92ms +[2025-07-17 23:33:15] [Rank 0] step:8381/10000 train_time:2027308ms step_avg:241.89ms +[2025-07-17 23:33:15] [Rank 0] step:8381/10000 train_time:2027308ms step_avg:241.89ms +[2025-07-17 23:33:20] [Rank 0] step:8401/10000 train_time:2032380ms step_avg:241.92ms +[2025-07-17 23:33:20] [Rank 0] step:8401/10000 train_time:2032380ms step_avg:241.92ms +[2025-07-17 23:33:25] [Rank 0] step:8421/10000 train_time:2037469ms step_avg:241.95ms +[2025-07-17 23:33:25] [Rank 0] step:8421/10000 train_time:2037469ms step_avg:241.95ms +[2025-07-17 23:33:30] [Rank 0] step:8441/10000 train_time:2042562ms step_avg:241.98ms +[2025-07-17 23:33:30] [Rank 0] step:8441/10000 train_time:2042562ms step_avg:241.98ms +[2025-07-17 23:33:35] [Rank 0] step:8461/10000 train_time:2047666ms step_avg:242.01ms +[2025-07-17 23:33:35] [Rank 0] step:8461/10000 train_time:2047666ms step_avg:242.01ms +[2025-07-17 23:33:40] [Rank 0] step:8481/10000 train_time:2052750ms step_avg:242.04ms +[2025-07-17 23:33:40] [Rank 0] step:8481/10000 train_time:2052750ms step_avg:242.04ms +[2025-07-17 23:33:50] [Rank 0] PRINT: step:8500/10000 val_loss:4.7491 train_time:2057858ms step_avg:242.10ms +[2025-07-17 23:33:50] [Rank 0] PRINT: step:8500/10000 val_loss:4.7491 train_time:2057858ms step_avg:242.10ms +[2025-07-17 23:33:50] [Rank 0] step:8501/10000 train_time:2057875ms step_avg:242.07ms +[2025-07-17 23:33:50] [Rank 0] step:8501/10000 train_time:2057875ms step_avg:242.07ms +[2025-07-17 23:33:55] [Rank 0] step:8521/10000 train_time:2062949ms step_avg:242.10ms +[2025-07-17 23:33:55] [Rank 0] step:8521/10000 train_time:2062949ms step_avg:242.10ms +[2025-07-17 23:34:00] [Rank 0] step:8541/10000 train_time:2068066ms step_avg:242.13ms +[2025-07-17 23:34:00] [Rank 0] step:8541/10000 train_time:2068066ms step_avg:242.13ms +[2025-07-17 23:34:05] [Rank 0] step:8561/10000 train_time:2073160ms step_avg:242.16ms +[2025-07-17 23:34:05] [Rank 0] step:8561/10000 train_time:2073160ms step_avg:242.16ms +[2025-07-17 23:34:11] [Rank 0] step:8581/10000 train_time:2078259ms step_avg:242.19ms +[2025-07-17 23:34:11] [Rank 0] step:8581/10000 train_time:2078259ms step_avg:242.19ms +[2025-07-17 23:34:16] [Rank 0] step:8601/10000 train_time:2083336ms step_avg:242.22ms +[2025-07-17 23:34:16] [Rank 0] step:8601/10000 train_time:2083336ms step_avg:242.22ms +[2025-07-17 23:34:21] [Rank 0] step:8621/10000 train_time:2088425ms step_avg:242.25ms +[2025-07-17 23:34:21] [Rank 0] step:8621/10000 train_time:2088425ms step_avg:242.25ms +[2025-07-17 23:34:26] [Rank 0] PRINT: step:8625/10000 val_loss:4.7252 train_time:2089704ms step_avg:242.28ms +[2025-07-17 23:34:26] [Rank 0] PRINT: step:8625/10000 val_loss:4.7252 train_time:2089704ms step_avg:242.28ms +[2025-07-17 23:34:31] [Rank 0] step:8641/10000 train_time:2094029ms step_avg:242.34ms +[2025-07-17 23:34:31] [Rank 0] step:8641/10000 train_time:2094029ms step_avg:242.34ms +[2025-07-17 23:34:36] [Rank 0] step:8661/10000 train_time:2099117ms step_avg:242.36ms +[2025-07-17 23:34:36] [Rank 0] step:8661/10000 train_time:2099117ms step_avg:242.36ms +[2025-07-17 23:34:41] [Rank 0] step:8681/10000 train_time:2104203ms step_avg:242.39ms +[2025-07-17 23:34:41] [Rank 0] step:8681/10000 train_time:2104203ms step_avg:242.39ms +[2025-07-17 23:34:46] [Rank 0] step:8701/10000 train_time:2109295ms step_avg:242.42ms +[2025-07-17 23:34:46] [Rank 0] step:8701/10000 train_time:2109295ms step_avg:242.42ms +[2025-07-17 23:34:51] [Rank 0] step:8721/10000 train_time:2114381ms step_avg:242.45ms +[2025-07-17 23:34:51] [Rank 0] step:8721/10000 train_time:2114381ms step_avg:242.45ms +[2025-07-17 23:34:56] [Rank 0] step:8741/10000 train_time:2119466ms step_avg:242.47ms +[2025-07-17 23:34:56] [Rank 0] step:8741/10000 train_time:2119466ms step_avg:242.47ms +[2025-07-17 23:35:03] [Rank 0] PRINT: step:8750/10000 val_loss:4.7319 train_time:2122008ms step_avg:242.52ms +[2025-07-17 23:35:03] [Rank 0] PRINT: step:8750/10000 val_loss:4.7319 train_time:2122008ms step_avg:242.52ms +[2025-07-17 23:35:06] [Rank 0] step:8761/10000 train_time:2124538ms step_avg:242.50ms +[2025-07-17 23:35:06] [Rank 0] step:8761/10000 train_time:2124538ms step_avg:242.50ms +[2025-07-17 23:35:11] [Rank 0] step:8781/10000 train_time:2129617ms step_avg:242.53ms +[2025-07-17 23:35:11] [Rank 0] step:8781/10000 train_time:2129617ms step_avg:242.53ms +[2025-07-17 23:35:16] [Rank 0] step:8801/10000 train_time:2134704ms step_avg:242.55ms +[2025-07-17 23:35:16] [Rank 0] step:8801/10000 train_time:2134704ms step_avg:242.55ms +[2025-07-17 23:35:21] [Rank 0] step:8821/10000 train_time:2139780ms step_avg:242.58ms +[2025-07-17 23:35:21] [Rank 0] step:8821/10000 train_time:2139780ms step_avg:242.58ms +[2025-07-17 23:35:26] [Rank 0] step:8841/10000 train_time:2144876ms step_avg:242.61ms +[2025-07-17 23:35:26] [Rank 0] step:8841/10000 train_time:2144876ms step_avg:242.61ms +[2025-07-17 23:35:31] [Rank 0] step:8861/10000 train_time:2149974ms step_avg:242.63ms +[2025-07-17 23:35:31] [Rank 0] step:8861/10000 train_time:2149974ms step_avg:242.63ms +[2025-07-17 23:35:40] [Rank 0] PRINT: step:8875/10000 val_loss:4.7184 train_time:2153776ms step_avg:242.68ms +[2025-07-17 23:35:40] [Rank 0] PRINT: step:8875/10000 val_loss:4.7184 train_time:2153776ms step_avg:242.68ms +[2025-07-17 23:35:41] [Rank 0] step:8881/10000 train_time:2155039ms step_avg:242.66ms +[2025-07-17 23:35:41] [Rank 0] step:8881/10000 train_time:2155039ms step_avg:242.66ms +[2025-07-17 23:35:46] [Rank 0] step:8901/10000 train_time:2160112ms step_avg:242.68ms +[2025-07-17 23:35:46] [Rank 0] step:8901/10000 train_time:2160112ms step_avg:242.68ms +[2025-07-17 23:35:51] [Rank 0] step:8921/10000 train_time:2165190ms step_avg:242.71ms +[2025-07-17 23:35:51] [Rank 0] step:8921/10000 train_time:2165190ms step_avg:242.71ms +[2025-07-17 23:35:57] [Rank 0] step:8941/10000 train_time:2170275ms step_avg:242.73ms +[2025-07-17 23:35:57] [Rank 0] step:8941/10000 train_time:2170275ms step_avg:242.73ms +[2025-07-17 23:36:02] [Rank 0] step:8961/10000 train_time:2175361ms step_avg:242.76ms +[2025-07-17 23:36:02] [Rank 0] step:8961/10000 train_time:2175361ms step_avg:242.76ms +[2025-07-17 23:36:07] [Rank 0] step:8981/10000 train_time:2180450ms step_avg:242.78ms +[2025-07-17 23:36:07] [Rank 0] step:8981/10000 train_time:2180450ms step_avg:242.78ms +[2025-07-17 23:36:16] [Rank 0] PRINT: step:9000/10000 val_loss:4.6955 train_time:2185546ms step_avg:242.84ms +[2025-07-17 23:36:16] [Rank 0] PRINT: step:9000/10000 val_loss:4.6955 train_time:2185546ms step_avg:242.84ms +[2025-07-17 23:36:16] [Rank 0] step:9001/10000 train_time:2185563ms step_avg:242.81ms +[2025-07-17 23:36:16] [Rank 0] step:9001/10000 train_time:2185563ms step_avg:242.81ms +[2025-07-17 23:36:21] [Rank 0] step:9021/10000 train_time:2190631ms step_avg:242.84ms +[2025-07-17 23:36:21] [Rank 0] step:9021/10000 train_time:2190631ms step_avg:242.84ms +[2025-07-17 23:36:26] [Rank 0] step:9041/10000 train_time:2195747ms step_avg:242.87ms +[2025-07-17 23:36:26] [Rank 0] step:9041/10000 train_time:2195747ms step_avg:242.87ms +[2025-07-17 23:36:31] [Rank 0] step:9061/10000 train_time:2200840ms step_avg:242.89ms +[2025-07-17 23:36:31] [Rank 0] step:9061/10000 train_time:2200840ms step_avg:242.89ms +[2025-07-17 23:36:37] [Rank 0] step:9081/10000 train_time:2205953ms step_avg:242.92ms +[2025-07-17 23:36:37] [Rank 0] step:9081/10000 train_time:2205953ms step_avg:242.92ms +[2025-07-17 23:36:42] [Rank 0] step:9101/10000 train_time:2211064ms step_avg:242.95ms +[2025-07-17 23:36:42] [Rank 0] step:9101/10000 train_time:2211064ms step_avg:242.95ms +[2025-07-17 23:36:47] [Rank 0] step:9121/10000 train_time:2216173ms step_avg:242.97ms +[2025-07-17 23:36:47] [Rank 0] step:9121/10000 train_time:2216173ms step_avg:242.97ms +[2025-07-17 23:36:53] [Rank 0] PRINT: step:9125/10000 val_loss:4.7649 train_time:2217448ms step_avg:243.01ms +[2025-07-17 23:36:53] [Rank 0] PRINT: step:9125/10000 val_loss:4.7649 train_time:2217448ms step_avg:243.01ms +[2025-07-17 23:36:57] [Rank 0] step:9141/10000 train_time:2221249ms step_avg:243.00ms +[2025-07-17 23:36:57] [Rank 0] step:9141/10000 train_time:2221249ms step_avg:243.00ms +[2025-07-17 23:37:02] [Rank 0] step:9161/10000 train_time:2226867ms step_avg:243.08ms +[2025-07-17 23:37:02] [Rank 0] step:9161/10000 train_time:2226867ms step_avg:243.08ms +[2025-07-17 23:37:07] [Rank 0] step:9181/10000 train_time:2231959ms step_avg:243.11ms +[2025-07-17 23:37:07] [Rank 0] step:9181/10000 train_time:2231959ms step_avg:243.11ms +[2025-07-17 23:37:12] [Rank 0] step:9201/10000 train_time:2237049ms step_avg:243.13ms +[2025-07-17 23:37:12] [Rank 0] step:9201/10000 train_time:2237049ms step_avg:243.13ms +[2025-07-17 23:37:18] [Rank 0] step:9221/10000 train_time:2242173ms step_avg:243.16ms +[2025-07-17 23:37:18] [Rank 0] step:9221/10000 train_time:2242173ms step_avg:243.16ms +[2025-07-17 23:37:23] [Rank 0] step:9241/10000 train_time:2247271ms step_avg:243.18ms +[2025-07-17 23:37:23] [Rank 0] step:9241/10000 train_time:2247271ms step_avg:243.18ms +[2025-07-17 23:37:30] [Rank 0] PRINT: step:9250/10000 val_loss:4.7105 train_time:2249826ms step_avg:243.22ms +[2025-07-17 23:37:30] [Rank 0] PRINT: step:9250/10000 val_loss:4.7105 train_time:2249826ms step_avg:243.22ms +[2025-07-17 23:37:33] [Rank 0] step:9261/10000 train_time:2252373ms step_avg:243.21ms +[2025-07-17 23:37:33] [Rank 0] step:9261/10000 train_time:2252373ms step_avg:243.21ms +[2025-07-17 23:37:38] [Rank 0] step:9281/10000 train_time:2257442ms step_avg:243.23ms +[2025-07-17 23:37:38] [Rank 0] step:9281/10000 train_time:2257442ms step_avg:243.23ms +[2025-07-17 23:37:43] [Rank 0] step:9301/10000 train_time:2262537ms step_avg:243.26ms +[2025-07-17 23:37:43] [Rank 0] step:9301/10000 train_time:2262537ms step_avg:243.26ms +[2025-07-17 23:37:48] [Rank 0] step:9321/10000 train_time:2267648ms step_avg:243.28ms +[2025-07-17 23:37:48] [Rank 0] step:9321/10000 train_time:2267648ms step_avg:243.28ms +[2025-07-17 23:37:53] [Rank 0] step:9341/10000 train_time:2272743ms step_avg:243.31ms +[2025-07-17 23:37:53] [Rank 0] step:9341/10000 train_time:2272743ms step_avg:243.31ms +[2025-07-17 23:37:58] [Rank 0] step:9361/10000 train_time:2277839ms step_avg:243.33ms +[2025-07-17 23:37:58] [Rank 0] step:9361/10000 train_time:2277839ms step_avg:243.33ms +[2025-07-17 23:38:06] [Rank 0] PRINT: step:9375/10000 val_loss:4.6555 train_time:2281667ms step_avg:243.38ms +[2025-07-17 23:38:06] [Rank 0] PRINT: step:9375/10000 val_loss:4.6555 train_time:2281667ms step_avg:243.38ms +[2025-07-17 23:38:08] [Rank 0] step:9381/10000 train_time:2282932ms step_avg:243.36ms +[2025-07-17 23:38:08] [Rank 0] step:9381/10000 train_time:2282932ms step_avg:243.36ms +[2025-07-17 23:38:13] [Rank 0] step:9401/10000 train_time:2288009ms step_avg:243.38ms +[2025-07-17 23:38:13] [Rank 0] step:9401/10000 train_time:2288009ms step_avg:243.38ms +[2025-07-17 23:38:18] [Rank 0] step:9421/10000 train_time:2293103ms step_avg:243.40ms +[2025-07-17 23:38:18] [Rank 0] step:9421/10000 train_time:2293103ms step_avg:243.40ms +[2025-07-17 23:38:23] [Rank 0] step:9441/10000 train_time:2298206ms step_avg:243.43ms +[2025-07-17 23:38:23] [Rank 0] step:9441/10000 train_time:2298206ms step_avg:243.43ms +[2025-07-17 23:38:28] [Rank 0] step:9461/10000 train_time:2303316ms step_avg:243.45ms +[2025-07-17 23:38:28] [Rank 0] step:9461/10000 train_time:2303316ms step_avg:243.45ms +[2025-07-17 23:38:33] [Rank 0] step:9481/10000 train_time:2308421ms step_avg:243.48ms +[2025-07-17 23:38:33] [Rank 0] step:9481/10000 train_time:2308421ms step_avg:243.48ms +[2025-07-17 23:38:43] [Rank 0] PRINT: step:9500/10000 val_loss:4.6707 train_time:2313561ms step_avg:243.53ms +[2025-07-17 23:38:43] [Rank 0] PRINT: step:9500/10000 val_loss:4.6707 train_time:2313561ms step_avg:243.53ms +[2025-07-17 23:38:43] [Rank 0] step:9501/10000 train_time:2313577ms step_avg:243.51ms +[2025-07-17 23:38:43] [Rank 0] step:9501/10000 train_time:2313577ms step_avg:243.51ms +[2025-07-17 23:38:48] [Rank 0] step:9521/10000 train_time:2318662ms step_avg:243.53ms +[2025-07-17 23:38:48] [Rank 0] step:9521/10000 train_time:2318662ms step_avg:243.53ms +[2025-07-17 23:38:53] [Rank 0] step:9541/10000 train_time:2323772ms step_avg:243.56ms +[2025-07-17 23:38:53] [Rank 0] step:9541/10000 train_time:2323772ms step_avg:243.56ms +[2025-07-17 23:38:59] [Rank 0] step:9561/10000 train_time:2328869ms step_avg:243.58ms +[2025-07-17 23:38:59] [Rank 0] step:9561/10000 train_time:2328869ms step_avg:243.58ms +[2025-07-17 23:39:04] [Rank 0] step:9581/10000 train_time:2333957ms step_avg:243.60ms +[2025-07-17 23:39:04] [Rank 0] step:9581/10000 train_time:2333957ms step_avg:243.60ms +[2025-07-17 23:39:09] [Rank 0] step:9601/10000 train_time:2339051ms step_avg:243.63ms +[2025-07-17 23:39:09] [Rank 0] step:9601/10000 train_time:2339051ms step_avg:243.63ms +[2025-07-17 23:39:14] [Rank 0] step:9621/10000 train_time:2344177ms step_avg:243.65ms +[2025-07-17 23:39:14] [Rank 0] step:9621/10000 train_time:2344177ms step_avg:243.65ms +[2025-07-17 23:39:20] [Rank 0] PRINT: step:9625/10000 val_loss:4.7649 train_time:2345457ms step_avg:243.68ms +[2025-07-17 23:39:20] [Rank 0] PRINT: step:9625/10000 val_loss:4.7649 train_time:2345457ms step_avg:243.68ms +[2025-07-17 23:39:24] [Rank 0] step:9641/10000 train_time:2349299ms step_avg:243.68ms +[2025-07-17 23:39:24] [Rank 0] step:9641/10000 train_time:2349299ms step_avg:243.68ms +[2025-07-17 23:39:29] [Rank 0] step:9661/10000 train_time:2354966ms step_avg:243.76ms +[2025-07-17 23:39:29] [Rank 0] step:9661/10000 train_time:2354966ms step_avg:243.76ms +[2025-07-17 23:39:35] [Rank 0] step:9681/10000 train_time:2360124ms step_avg:243.79ms +[2025-07-17 23:39:35] [Rank 0] step:9681/10000 train_time:2360124ms step_avg:243.79ms +[2025-07-17 23:39:40] [Rank 0] step:9701/10000 train_time:2365280ms step_avg:243.82ms +[2025-07-17 23:39:40] [Rank 0] step:9701/10000 train_time:2365280ms step_avg:243.82ms +[2025-07-17 23:39:45] [Rank 0] step:9721/10000 train_time:2370415ms step_avg:243.84ms +[2025-07-17 23:39:45] [Rank 0] step:9721/10000 train_time:2370415ms step_avg:243.84ms +[2025-07-17 23:39:50] [Rank 0] step:9741/10000 train_time:2375571ms step_avg:243.87ms +[2025-07-17 23:39:50] [Rank 0] step:9741/10000 train_time:2375571ms step_avg:243.87ms +[2025-07-17 23:39:57] [Rank 0] PRINT: step:9750/10000 val_loss:4.5639 train_time:2378140ms step_avg:243.91ms +[2025-07-17 23:39:57] [Rank 0] PRINT: step:9750/10000 val_loss:4.5639 train_time:2378140ms step_avg:243.91ms +[2025-07-17 23:40:00] [Rank 0] step:9761/10000 train_time:2380698ms step_avg:243.90ms +[2025-07-17 23:40:00] [Rank 0] step:9761/10000 train_time:2380698ms step_avg:243.90ms +[2025-07-17 23:40:05] [Rank 0] step:9781/10000 train_time:2385844ms step_avg:243.93ms +[2025-07-17 23:40:05] [Rank 0] step:9781/10000 train_time:2385844ms step_avg:243.93ms +[2025-07-17 23:40:10] [Rank 0] step:9801/10000 train_time:2390978ms step_avg:243.95ms +[2025-07-17 23:40:10] [Rank 0] step:9801/10000 train_time:2390978ms step_avg:243.95ms +[2025-07-17 23:40:15] [Rank 0] step:9821/10000 train_time:2396123ms step_avg:243.98ms +[2025-07-17 23:40:15] [Rank 0] step:9821/10000 train_time:2396123ms step_avg:243.98ms +[2025-07-17 23:40:21] [Rank 0] step:9841/10000 train_time:2401252ms step_avg:244.00ms +[2025-07-17 23:40:21] [Rank 0] step:9841/10000 train_time:2401252ms step_avg:244.00ms +[2025-07-17 23:40:26] [Rank 0] step:9861/10000 train_time:2406382ms step_avg:244.03ms +[2025-07-17 23:40:26] [Rank 0] step:9861/10000 train_time:2406382ms step_avg:244.03ms +[2025-07-17 23:40:34] [Rank 0] PRINT: step:9875/10000 val_loss:4.5766 train_time:2410236ms step_avg:244.07ms +[2025-07-17 23:40:34] [Rank 0] PRINT: step:9875/10000 val_loss:4.5766 train_time:2410236ms step_avg:244.07ms +[2025-07-17 23:40:35] [Rank 0] step:9881/10000 train_time:2411518ms step_avg:244.06ms +[2025-07-17 23:40:35] [Rank 0] step:9881/10000 train_time:2411518ms step_avg:244.06ms +[2025-07-17 23:40:40] [Rank 0] step:9901/10000 train_time:2416660ms step_avg:244.08ms +[2025-07-17 23:40:40] [Rank 0] step:9901/10000 train_time:2416660ms step_avg:244.08ms +[2025-07-17 23:40:46] [Rank 0] step:9921/10000 train_time:2421815ms step_avg:244.11ms +[2025-07-17 23:40:46] [Rank 0] step:9921/10000 train_time:2421815ms step_avg:244.11ms +[2025-07-17 23:40:51] [Rank 0] step:9941/10000 train_time:2426995ms step_avg:244.14ms +[2025-07-17 23:40:51] [Rank 0] step:9941/10000 train_time:2426995ms step_avg:244.14ms +[2025-07-17 23:40:56] [Rank 0] step:9961/10000 train_time:2432161ms step_avg:244.17ms +[2025-07-17 23:40:56] [Rank 0] step:9961/10000 train_time:2432161ms step_avg:244.17ms +[2025-07-17 23:41:01] [Rank 0] step:9981/10000 train_time:2437353ms step_avg:244.20ms +[2025-07-17 23:41:01] [Rank 0] step:9981/10000 train_time:2437353ms step_avg:244.20ms +[2025-07-17 23:41:06] [Rank 0] step:10000/10000 train_time:2442236ms step_avg:244.22ms +[2025-07-17 23:41:06] [Rank 0] step:10000/10000 train_time:2442236ms step_avg:244.22ms +[2025-07-17 23:41:10] [Rank 0] PRINT: step:10000/10000 val_loss:4.5978 train_time:2442501ms step_avg:244.25ms +[2025-07-17 23:41:10] [Rank 0] PRINT: step:10000/10000 val_loss:4.5978 train_time:2442501ms step_avg:244.25ms +[2025-07-17 23:41:10] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 23:41:10 2025 --- +[2025-07-17 23:41:10] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 23:41:10 2025 --- +[2025-07-17 23:41:10] [Rank 0] PRINT: Peak memory allocated: 30775 MiB reserved: 31136 MiB +[2025-07-17 23:41:10] [Rank 0] PRINT: Peak memory allocated: 30775 MiB reserved: 31136 MiB diff --git a/logs_norope/diff_modes/mode_8_param_norope_seed_42/config.json b/logs_norope/diff_modes/mode_8_param_norope_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..7a0a038b8946c89865dd7fad2b40f1a34969d004 --- /dev/null +++ b/logs_norope/diff_modes/mode_8_param_norope_seed_42/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 8, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "25e29705-829c-4cfb-ad30-4ac811d8b705", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_8_param_norope_seed_42/training_log_25e29705-829c-4cfb-ad30-4ac811d8b705.txt b/logs_norope/diff_modes/mode_8_param_norope_seed_42/training_log_25e29705-829c-4cfb-ad30-4ac811d8b705.txt new file mode 100644 index 0000000000000000000000000000000000000000..85293e34ea949684953fd8febb09cbc6c1eb6e16 --- /dev/null +++ b/logs_norope/diff_modes/mode_8_param_norope_seed_42/training_log_25e29705-829c-4cfb-ad30-4ac811d8b705.txt @@ -0,0 +1,2360 @@ +[2025-07-17 16:29:36] [Rank 0] PRINT: --- Script Start: Thu Jul 17 16:29:36 2025 --- +[2025-07-17 16:29:36] [Rank 0] PRINT: --- Script Start: Thu Jul 17 16:29:36 2025 --- +[2025-07-17 16:29:36] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=8, model_parameterization='norope') +[2025-07-17 16:29:36] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=8, model_parameterization='norope') +[2025-07-17 16:29:36] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 16:29:36] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-17 16:29:36] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 16:29:36] [Rank 0] PRINT: Using fixed seed: 42 +[2025-07-17 16:29:36] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_8_param_norope_seed_42 +[2025-07-17 16:29:36] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_8_param_norope_seed_42 +[2025-07-17 16:29:36] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 16:29:36] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-17 16:29:36] [Rank 0] PRINT: Constructing model... +[2025-07-17 16:29:36] [Rank 0] PRINT: Constructing model... +[2025-07-17 16:29:39] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 16:29:39] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-17 16:29:39] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 16:29:39] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-17 16:29:39] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 16:29:39] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-17 16:29:39] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 16:29:39] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-17 16:29:39] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 8 +[2025-07-17 16:29:39] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 8 +[2025-07-17 16:29:39] [Rank 0] PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: 0.001). +[2025-07-17 16:29:39] [Rank 0] PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: 0.001). +[2025-07-17 16:29:39] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 16:29:39] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-17 16:29:39] [Rank 0] PRINT: Muon optimizer is active with 34 parameters. +[2025-07-17 16:29:39] [Rank 0] PRINT: Muon optimizer is active with 34 parameters. +[2025-07-17 16:29:39] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 16:29:39] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-17 16:29:39] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 16:29:39] [Rank 0] PRINT: Model compilation complete. +[2025-07-17 16:29:39] [Rank 0] PRINT: Starting warmup... +[2025-07-17 16:29:39] [Rank 0] PRINT: Starting warmup... +[2025-07-17 16:30:47] [Rank 0] PRINT: Warmup complete. +[2025-07-17 16:30:47] [Rank 0] PRINT: Warmup complete. +[2025-07-17 16:30:47] [Rank 0] PRINT: Starting training... +[2025-07-17 16:30:47] [Rank 0] PRINT: Starting training... +[2025-07-17 16:30:58] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 16:30:58] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-17 16:31:02] [Rank 0] step:21/10000 train_time:4535ms step_avg:215.94ms +[2025-07-17 16:31:02] [Rank 0] step:21/10000 train_time:4535ms step_avg:215.94ms +[2025-07-17 16:31:07] [Rank 0] step:41/10000 train_time:9027ms step_avg:220.18ms +[2025-07-17 16:31:07] [Rank 0] step:41/10000 train_time:9027ms step_avg:220.18ms +[2025-07-17 16:31:11] [Rank 0] step:61/10000 train_time:13530ms step_avg:221.80ms +[2025-07-17 16:31:11] [Rank 0] step:61/10000 train_time:13530ms step_avg:221.80ms +[2025-07-17 16:31:16] [Rank 0] step:81/10000 train_time:18028ms step_avg:222.57ms +[2025-07-17 16:31:16] [Rank 0] step:81/10000 train_time:18028ms step_avg:222.57ms +[2025-07-17 16:31:20] [Rank 0] step:101/10000 train_time:22531ms step_avg:223.08ms +[2025-07-17 16:31:20] [Rank 0] step:101/10000 train_time:22531ms step_avg:223.08ms +[2025-07-17 16:31:25] [Rank 0] step:121/10000 train_time:27037ms step_avg:223.45ms +[2025-07-17 16:31:25] [Rank 0] step:121/10000 train_time:27037ms step_avg:223.45ms +[2025-07-17 16:31:30] [Rank 0] PRINT: step:125/10000 val_loss:5.1240 train_time:28220ms step_avg:225.76ms +[2025-07-17 16:31:30] [Rank 0] PRINT: step:125/10000 val_loss:5.1240 train_time:28220ms step_avg:225.76ms +[2025-07-17 16:31:34] [Rank 0] step:141/10000 train_time:31540ms step_avg:223.69ms +[2025-07-17 16:31:34] [Rank 0] step:141/10000 train_time:31540ms step_avg:223.69ms +[2025-07-17 16:31:38] [Rank 0] step:161/10000 train_time:36048ms step_avg:223.90ms +[2025-07-17 16:31:38] [Rank 0] step:161/10000 train_time:36048ms step_avg:223.90ms +[2025-07-17 16:31:43] [Rank 0] step:181/10000 train_time:40554ms step_avg:224.06ms +[2025-07-17 16:31:43] [Rank 0] step:181/10000 train_time:40554ms step_avg:224.06ms +[2025-07-17 16:31:47] [Rank 0] step:201/10000 train_time:45059ms step_avg:224.17ms +[2025-07-17 16:31:47] [Rank 0] step:201/10000 train_time:45059ms step_avg:224.17ms +[2025-07-17 16:31:52] [Rank 0] step:221/10000 train_time:49564ms step_avg:224.27ms +[2025-07-17 16:31:52] [Rank 0] step:221/10000 train_time:49564ms step_avg:224.27ms +[2025-07-17 16:31:56] [Rank 0] step:241/10000 train_time:54069ms step_avg:224.35ms +[2025-07-17 16:31:56] [Rank 0] step:241/10000 train_time:54069ms step_avg:224.35ms +[2025-07-17 16:32:03] [Rank 0] PRINT: step:250/10000 val_loss:4.7179 train_time:56379ms step_avg:225.52ms +[2025-07-17 16:32:03] [Rank 0] PRINT: step:250/10000 val_loss:4.7179 train_time:56379ms step_avg:225.52ms +[2025-07-17 16:32:05] [Rank 0] step:261/10000 train_time:58574ms step_avg:224.42ms +[2025-07-17 16:32:05] [Rank 0] step:261/10000 train_time:58574ms step_avg:224.42ms +[2025-07-17 16:32:10] [Rank 0] step:281/10000 train_time:63076ms step_avg:224.47ms +[2025-07-17 16:32:10] [Rank 0] step:281/10000 train_time:63076ms step_avg:224.47ms +[2025-07-17 16:32:14] [Rank 0] step:301/10000 train_time:67583ms step_avg:224.53ms +[2025-07-17 16:32:14] [Rank 0] step:301/10000 train_time:67583ms step_avg:224.53ms +[2025-07-17 16:32:19] [Rank 0] step:321/10000 train_time:72087ms step_avg:224.57ms +[2025-07-17 16:32:19] [Rank 0] step:321/10000 train_time:72087ms step_avg:224.57ms +[2025-07-17 16:32:23] [Rank 0] step:341/10000 train_time:76597ms step_avg:224.63ms +[2025-07-17 16:32:23] [Rank 0] step:341/10000 train_time:76597ms step_avg:224.63ms +[2025-07-17 16:32:28] [Rank 0] step:361/10000 train_time:81105ms step_avg:224.67ms +[2025-07-17 16:32:28] [Rank 0] step:361/10000 train_time:81105ms step_avg:224.67ms +[2025-07-17 16:32:35] [Rank 0] PRINT: step:375/10000 val_loss:4.5141 train_time:84539ms step_avg:225.44ms +[2025-07-17 16:32:35] [Rank 0] PRINT: step:375/10000 val_loss:4.5141 train_time:84539ms step_avg:225.44ms +[2025-07-17 16:32:37] [Rank 0] step:381/10000 train_time:85613ms step_avg:224.71ms +[2025-07-17 16:32:37] [Rank 0] step:381/10000 train_time:85613ms step_avg:224.71ms +[2025-07-17 16:32:41] [Rank 0] step:401/10000 train_time:90117ms step_avg:224.73ms +[2025-07-17 16:32:41] [Rank 0] step:401/10000 train_time:90117ms step_avg:224.73ms +[2025-07-17 16:32:46] [Rank 0] step:421/10000 train_time:94631ms step_avg:224.78ms +[2025-07-17 16:32:46] [Rank 0] step:421/10000 train_time:94631ms step_avg:224.78ms +[2025-07-17 16:32:50] [Rank 0] step:441/10000 train_time:99141ms step_avg:224.81ms +[2025-07-17 16:32:50] [Rank 0] step:441/10000 train_time:99141ms step_avg:224.81ms +[2025-07-17 16:32:55] [Rank 0] step:461/10000 train_time:103656ms step_avg:224.85ms +[2025-07-17 16:32:55] [Rank 0] step:461/10000 train_time:103656ms step_avg:224.85ms +[2025-07-17 16:32:59] [Rank 0] step:481/10000 train_time:108169ms step_avg:224.88ms +[2025-07-17 16:32:59] [Rank 0] step:481/10000 train_time:108169ms step_avg:224.88ms +[2025-07-17 16:33:08] [Rank 0] PRINT: step:500/10000 val_loss:4.4522 train_time:112735ms step_avg:225.47ms +[2025-07-17 16:33:08] [Rank 0] PRINT: step:500/10000 val_loss:4.4522 train_time:112735ms step_avg:225.47ms +[2025-07-17 16:33:08] [Rank 0] step:501/10000 train_time:112751ms step_avg:225.05ms +[2025-07-17 16:33:08] [Rank 0] step:501/10000 train_time:112751ms step_avg:225.05ms +[2025-07-17 16:33:13] [Rank 0] step:521/10000 train_time:117194ms step_avg:224.94ms +[2025-07-17 16:33:13] [Rank 0] step:521/10000 train_time:117194ms step_avg:224.94ms +[2025-07-17 16:33:17] [Rank 0] step:541/10000 train_time:121712ms step_avg:224.98ms +[2025-07-17 16:33:17] [Rank 0] step:541/10000 train_time:121712ms step_avg:224.98ms +[2025-07-17 16:33:22] [Rank 0] step:561/10000 train_time:126230ms step_avg:225.01ms +[2025-07-17 16:33:22] [Rank 0] step:561/10000 train_time:126230ms step_avg:225.01ms +[2025-07-17 16:33:26] [Rank 0] step:581/10000 train_time:130746ms step_avg:225.04ms +[2025-07-17 16:33:26] [Rank 0] step:581/10000 train_time:130746ms step_avg:225.04ms +[2025-07-17 16:33:31] [Rank 0] step:601/10000 train_time:135263ms step_avg:225.06ms +[2025-07-17 16:33:31] [Rank 0] step:601/10000 train_time:135263ms step_avg:225.06ms +[2025-07-17 16:33:35] [Rank 0] step:621/10000 train_time:139782ms step_avg:225.09ms +[2025-07-17 16:33:35] [Rank 0] step:621/10000 train_time:139782ms step_avg:225.09ms +[2025-07-17 16:33:41] [Rank 0] PRINT: step:625/10000 val_loss:4.4844 train_time:140971ms step_avg:225.55ms +[2025-07-17 16:33:41] [Rank 0] PRINT: step:625/10000 val_loss:4.4844 train_time:140971ms step_avg:225.55ms +[2025-07-17 16:33:44] [Rank 0] step:641/10000 train_time:144297ms step_avg:225.11ms +[2025-07-17 16:33:44] [Rank 0] step:641/10000 train_time:144297ms step_avg:225.11ms +[2025-07-17 16:33:49] [Rank 0] step:661/10000 train_time:148815ms step_avg:225.14ms +[2025-07-17 16:33:49] [Rank 0] step:661/10000 train_time:148815ms step_avg:225.14ms +[2025-07-17 16:33:53] [Rank 0] step:681/10000 train_time:153329ms step_avg:225.15ms +[2025-07-17 16:33:53] [Rank 0] step:681/10000 train_time:153329ms step_avg:225.15ms +[2025-07-17 16:33:58] [Rank 0] step:701/10000 train_time:157851ms step_avg:225.18ms +[2025-07-17 16:33:58] [Rank 0] step:701/10000 train_time:157851ms step_avg:225.18ms +[2025-07-17 16:34:02] [Rank 0] step:721/10000 train_time:162368ms step_avg:225.20ms +[2025-07-17 16:34:02] [Rank 0] step:721/10000 train_time:162368ms step_avg:225.20ms +[2025-07-17 16:34:07] [Rank 0] step:741/10000 train_time:166888ms step_avg:225.22ms +[2025-07-17 16:34:07] [Rank 0] step:741/10000 train_time:166888ms step_avg:225.22ms +[2025-07-17 16:34:14] [Rank 0] PRINT: step:750/10000 val_loss:4.6065 train_time:169222ms step_avg:225.63ms +[2025-07-17 16:34:14] [Rank 0] PRINT: step:750/10000 val_loss:4.6065 train_time:169222ms step_avg:225.63ms +[2025-07-17 16:34:16] [Rank 0] step:761/10000 train_time:171440ms step_avg:225.28ms +[2025-07-17 16:34:16] [Rank 0] step:761/10000 train_time:171440ms step_avg:225.28ms +[2025-07-17 16:34:21] [Rank 0] step:781/10000 train_time:175995ms step_avg:225.35ms +[2025-07-17 16:34:21] [Rank 0] step:781/10000 train_time:175995ms step_avg:225.35ms +[2025-07-17 16:34:25] [Rank 0] step:801/10000 train_time:180553ms step_avg:225.41ms +[2025-07-17 16:34:25] [Rank 0] step:801/10000 train_time:180553ms step_avg:225.41ms +[2025-07-17 16:34:30] [Rank 0] step:821/10000 train_time:185109ms step_avg:225.47ms +[2025-07-17 16:34:30] [Rank 0] step:821/10000 train_time:185109ms step_avg:225.47ms +[2025-07-17 16:34:34] [Rank 0] step:841/10000 train_time:189666ms step_avg:225.52ms +[2025-07-17 16:34:34] [Rank 0] step:841/10000 train_time:189666ms step_avg:225.52ms +[2025-07-17 16:34:39] [Rank 0] step:861/10000 train_time:194220ms step_avg:225.58ms +[2025-07-17 16:34:39] [Rank 0] step:861/10000 train_time:194220ms step_avg:225.58ms +[2025-07-17 16:34:46] [Rank 0] PRINT: step:875/10000 val_loss:4.6640 train_time:197694ms step_avg:225.94ms +[2025-07-17 16:34:46] [Rank 0] PRINT: step:875/10000 val_loss:4.6640 train_time:197694ms step_avg:225.94ms +[2025-07-17 16:34:48] [Rank 0] step:881/10000 train_time:198777ms step_avg:225.63ms +[2025-07-17 16:34:48] [Rank 0] step:881/10000 train_time:198777ms step_avg:225.63ms +[2025-07-17 16:34:52] [Rank 0] step:901/10000 train_time:203333ms step_avg:225.67ms +[2025-07-17 16:34:52] [Rank 0] step:901/10000 train_time:203333ms step_avg:225.67ms +[2025-07-17 16:34:57] [Rank 0] step:921/10000 train_time:207892ms step_avg:225.72ms +[2025-07-17 16:34:57] [Rank 0] step:921/10000 train_time:207892ms step_avg:225.72ms +[2025-07-17 16:35:01] [Rank 0] step:941/10000 train_time:212450ms step_avg:225.77ms +[2025-07-17 16:35:01] [Rank 0] step:941/10000 train_time:212450ms step_avg:225.77ms +[2025-07-17 16:35:06] [Rank 0] step:961/10000 train_time:217012ms step_avg:225.82ms +[2025-07-17 16:35:06] [Rank 0] step:961/10000 train_time:217012ms step_avg:225.82ms +[2025-07-17 16:35:11] [Rank 0] step:981/10000 train_time:221569ms step_avg:225.86ms +[2025-07-17 16:35:11] [Rank 0] step:981/10000 train_time:221569ms step_avg:225.86ms +[2025-07-17 16:35:19] [Rank 0] PRINT: step:1000/10000 val_loss:4.6370 train_time:226184ms step_avg:226.18ms +[2025-07-17 16:35:19] [Rank 0] PRINT: step:1000/10000 val_loss:4.6370 train_time:226184ms step_avg:226.18ms +[2025-07-17 16:35:19] [Rank 0] step:1001/10000 train_time:226201ms step_avg:225.98ms +[2025-07-17 16:35:19] [Rank 0] step:1001/10000 train_time:226201ms step_avg:225.98ms +[2025-07-17 16:35:24] [Rank 0] step:1021/10000 train_time:230693ms step_avg:225.95ms +[2025-07-17 16:35:24] [Rank 0] step:1021/10000 train_time:230693ms step_avg:225.95ms +[2025-07-17 16:35:28] [Rank 0] step:1041/10000 train_time:235258ms step_avg:225.99ms +[2025-07-17 16:35:28] [Rank 0] step:1041/10000 train_time:235258ms step_avg:225.99ms +[2025-07-17 16:35:33] [Rank 0] step:1061/10000 train_time:239824ms step_avg:226.04ms +[2025-07-17 16:35:33] [Rank 0] step:1061/10000 train_time:239824ms step_avg:226.04ms +[2025-07-17 16:35:37] [Rank 0] step:1081/10000 train_time:244390ms step_avg:226.08ms +[2025-07-17 16:35:37] [Rank 0] step:1081/10000 train_time:244390ms step_avg:226.08ms +[2025-07-17 16:35:42] [Rank 0] step:1101/10000 train_time:248959ms step_avg:226.12ms +[2025-07-17 16:35:42] [Rank 0] step:1101/10000 train_time:248959ms step_avg:226.12ms +[2025-07-17 16:35:47] [Rank 0] step:1121/10000 train_time:253525ms step_avg:226.16ms +[2025-07-17 16:35:47] [Rank 0] step:1121/10000 train_time:253525ms step_avg:226.16ms +[2025-07-17 16:35:52] [Rank 0] PRINT: step:1125/10000 val_loss:4.6244 train_time:254726ms step_avg:226.42ms +[2025-07-17 16:35:52] [Rank 0] PRINT: step:1125/10000 val_loss:4.6244 train_time:254726ms step_avg:226.42ms +[2025-07-17 16:35:56] [Rank 0] step:1141/10000 train_time:258090ms step_avg:226.20ms +[2025-07-17 16:35:56] [Rank 0] step:1141/10000 train_time:258090ms step_avg:226.20ms +[2025-07-17 16:36:00] [Rank 0] step:1161/10000 train_time:262654ms step_avg:226.23ms +[2025-07-17 16:36:00] [Rank 0] step:1161/10000 train_time:262654ms step_avg:226.23ms +[2025-07-17 16:36:05] [Rank 0] step:1181/10000 train_time:267220ms step_avg:226.27ms +[2025-07-17 16:36:05] [Rank 0] step:1181/10000 train_time:267220ms step_avg:226.27ms +[2025-07-17 16:36:09] [Rank 0] step:1201/10000 train_time:271792ms step_avg:226.30ms +[2025-07-17 16:36:09] [Rank 0] step:1201/10000 train_time:271792ms step_avg:226.30ms +[2025-07-17 16:36:14] [Rank 0] step:1221/10000 train_time:276360ms step_avg:226.34ms +[2025-07-17 16:36:14] [Rank 0] step:1221/10000 train_time:276360ms step_avg:226.34ms +[2025-07-17 16:36:18] [Rank 0] step:1241/10000 train_time:280932ms step_avg:226.38ms +[2025-07-17 16:36:18] [Rank 0] step:1241/10000 train_time:280932ms step_avg:226.38ms +[2025-07-17 16:36:25] [Rank 0] PRINT: step:1250/10000 val_loss:4.6933 train_time:283274ms step_avg:226.62ms +[2025-07-17 16:36:25] [Rank 0] PRINT: step:1250/10000 val_loss:4.6933 train_time:283274ms step_avg:226.62ms +[2025-07-17 16:36:27] [Rank 0] step:1261/10000 train_time:285500ms step_avg:226.41ms +[2025-07-17 16:36:27] [Rank 0] step:1261/10000 train_time:285500ms step_avg:226.41ms +[2025-07-17 16:36:32] [Rank 0] step:1281/10000 train_time:290073ms step_avg:226.44ms +[2025-07-17 16:36:32] [Rank 0] step:1281/10000 train_time:290073ms step_avg:226.44ms +[2025-07-17 16:36:36] [Rank 0] step:1301/10000 train_time:294647ms step_avg:226.48ms +[2025-07-17 16:36:36] [Rank 0] step:1301/10000 train_time:294647ms step_avg:226.48ms +[2025-07-17 16:36:41] [Rank 0] step:1321/10000 train_time:299216ms step_avg:226.51ms +[2025-07-17 16:36:41] [Rank 0] step:1321/10000 train_time:299216ms step_avg:226.51ms +[2025-07-17 16:36:45] [Rank 0] step:1341/10000 train_time:303789ms step_avg:226.54ms +[2025-07-17 16:36:45] [Rank 0] step:1341/10000 train_time:303789ms step_avg:226.54ms +[2025-07-17 16:36:50] [Rank 0] step:1361/10000 train_time:308361ms step_avg:226.57ms +[2025-07-17 16:36:50] [Rank 0] step:1361/10000 train_time:308361ms step_avg:226.57ms +[2025-07-17 16:36:58] [Rank 0] PRINT: step:1375/10000 val_loss:4.6495 train_time:311846ms step_avg:226.80ms +[2025-07-17 16:36:58] [Rank 0] PRINT: step:1375/10000 val_loss:4.6495 train_time:311846ms step_avg:226.80ms +[2025-07-17 16:36:59] [Rank 0] step:1381/10000 train_time:312931ms step_avg:226.60ms +[2025-07-17 16:36:59] [Rank 0] step:1381/10000 train_time:312931ms step_avg:226.60ms +[2025-07-17 16:37:04] [Rank 0] step:1401/10000 train_time:317505ms step_avg:226.63ms +[2025-07-17 16:37:04] [Rank 0] step:1401/10000 train_time:317505ms step_avg:226.63ms +[2025-07-17 16:37:08] [Rank 0] step:1421/10000 train_time:322079ms step_avg:226.66ms +[2025-07-17 16:37:08] [Rank 0] step:1421/10000 train_time:322079ms step_avg:226.66ms +[2025-07-17 16:37:13] [Rank 0] step:1441/10000 train_time:326651ms step_avg:226.68ms +[2025-07-17 16:37:13] [Rank 0] step:1441/10000 train_time:326651ms step_avg:226.68ms +[2025-07-17 16:37:17] [Rank 0] step:1461/10000 train_time:331222ms step_avg:226.71ms +[2025-07-17 16:37:17] [Rank 0] step:1461/10000 train_time:331222ms step_avg:226.71ms +[2025-07-17 16:37:22] [Rank 0] step:1481/10000 train_time:335794ms step_avg:226.73ms +[2025-07-17 16:37:22] [Rank 0] step:1481/10000 train_time:335794ms step_avg:226.73ms +[2025-07-17 16:37:31] [Rank 0] PRINT: step:1500/10000 val_loss:4.5721 train_time:340448ms step_avg:226.97ms +[2025-07-17 16:37:31] [Rank 0] PRINT: step:1500/10000 val_loss:4.5721 train_time:340448ms step_avg:226.97ms +[2025-07-17 16:37:31] [Rank 0] step:1501/10000 train_time:340464ms step_avg:226.82ms +[2025-07-17 16:37:31] [Rank 0] step:1501/10000 train_time:340464ms step_avg:226.82ms +[2025-07-17 16:37:35] [Rank 0] step:1521/10000 train_time:344988ms step_avg:226.82ms +[2025-07-17 16:37:35] [Rank 0] step:1521/10000 train_time:344988ms step_avg:226.82ms +[2025-07-17 16:37:40] [Rank 0] step:1541/10000 train_time:349581ms step_avg:226.85ms +[2025-07-17 16:37:40] [Rank 0] step:1541/10000 train_time:349581ms step_avg:226.85ms +[2025-07-17 16:37:45] [Rank 0] step:1561/10000 train_time:354172ms step_avg:226.89ms +[2025-07-17 16:37:45] [Rank 0] step:1561/10000 train_time:354172ms step_avg:226.89ms +[2025-07-17 16:37:49] [Rank 0] step:1581/10000 train_time:358761ms step_avg:226.92ms +[2025-07-17 16:37:49] [Rank 0] step:1581/10000 train_time:358761ms step_avg:226.92ms +[2025-07-17 16:37:54] [Rank 0] step:1601/10000 train_time:363353ms step_avg:226.95ms +[2025-07-17 16:37:54] [Rank 0] step:1601/10000 train_time:363353ms step_avg:226.95ms +[2025-07-17 16:37:58] [Rank 0] step:1621/10000 train_time:367946ms step_avg:226.99ms +[2025-07-17 16:37:58] [Rank 0] step:1621/10000 train_time:367946ms step_avg:226.99ms +[2025-07-17 16:38:04] [Rank 0] PRINT: step:1625/10000 val_loss:4.6129 train_time:369152ms step_avg:227.17ms +[2025-07-17 16:38:04] [Rank 0] PRINT: step:1625/10000 val_loss:4.6129 train_time:369152ms step_avg:227.17ms +[2025-07-17 16:38:08] [Rank 0] step:1641/10000 train_time:372531ms step_avg:227.01ms +[2025-07-17 16:38:08] [Rank 0] step:1641/10000 train_time:372531ms step_avg:227.01ms +[2025-07-17 16:38:12] [Rank 0] step:1661/10000 train_time:377120ms step_avg:227.04ms +[2025-07-17 16:38:12] [Rank 0] step:1661/10000 train_time:377120ms step_avg:227.04ms +[2025-07-17 16:38:17] [Rank 0] step:1681/10000 train_time:381711ms step_avg:227.07ms +[2025-07-17 16:38:17] [Rank 0] step:1681/10000 train_time:381711ms step_avg:227.07ms +[2025-07-17 16:38:21] [Rank 0] step:1701/10000 train_time:386303ms step_avg:227.10ms +[2025-07-17 16:38:21] [Rank 0] step:1701/10000 train_time:386303ms step_avg:227.10ms +[2025-07-17 16:38:26] [Rank 0] step:1721/10000 train_time:390896ms step_avg:227.13ms +[2025-07-17 16:38:26] [Rank 0] step:1721/10000 train_time:390896ms step_avg:227.13ms +[2025-07-17 16:38:30] [Rank 0] step:1741/10000 train_time:395491ms step_avg:227.16ms +[2025-07-17 16:38:30] [Rank 0] step:1741/10000 train_time:395491ms step_avg:227.16ms +[2025-07-17 16:38:37] [Rank 0] PRINT: step:1750/10000 val_loss:4.6085 train_time:397846ms step_avg:227.34ms +[2025-07-17 16:38:37] [Rank 0] PRINT: step:1750/10000 val_loss:4.6085 train_time:397846ms step_avg:227.34ms +[2025-07-17 16:38:40] [Rank 0] step:1761/10000 train_time:400081ms step_avg:227.19ms +[2025-07-17 16:38:40] [Rank 0] step:1761/10000 train_time:400081ms step_avg:227.19ms +[2025-07-17 16:38:44] [Rank 0] step:1781/10000 train_time:404676ms step_avg:227.22ms +[2025-07-17 16:38:44] [Rank 0] step:1781/10000 train_time:404676ms step_avg:227.22ms +[2025-07-17 16:38:49] [Rank 0] step:1801/10000 train_time:409271ms step_avg:227.25ms +[2025-07-17 16:38:49] [Rank 0] step:1801/10000 train_time:409271ms step_avg:227.25ms +[2025-07-17 16:38:53] [Rank 0] step:1821/10000 train_time:413869ms step_avg:227.28ms +[2025-07-17 16:38:53] [Rank 0] step:1821/10000 train_time:413869ms step_avg:227.28ms +[2025-07-17 16:38:58] [Rank 0] step:1841/10000 train_time:418463ms step_avg:227.30ms +[2025-07-17 16:38:58] [Rank 0] step:1841/10000 train_time:418463ms step_avg:227.30ms +[2025-07-17 16:39:03] [Rank 0] step:1861/10000 train_time:423055ms step_avg:227.33ms +[2025-07-17 16:39:03] [Rank 0] step:1861/10000 train_time:423055ms step_avg:227.33ms +[2025-07-17 16:39:10] [Rank 0] PRINT: step:1875/10000 val_loss:4.6462 train_time:426559ms step_avg:227.50ms +[2025-07-17 16:39:10] [Rank 0] PRINT: step:1875/10000 val_loss:4.6462 train_time:426559ms step_avg:227.50ms +[2025-07-17 16:39:12] [Rank 0] step:1881/10000 train_time:427653ms step_avg:227.35ms +[2025-07-17 16:39:12] [Rank 0] step:1881/10000 train_time:427653ms step_avg:227.35ms +[2025-07-17 16:39:16] [Rank 0] step:1901/10000 train_time:432249ms step_avg:227.38ms +[2025-07-17 16:39:16] [Rank 0] step:1901/10000 train_time:432249ms step_avg:227.38ms +[2025-07-17 16:39:21] [Rank 0] step:1921/10000 train_time:436846ms step_avg:227.41ms +[2025-07-17 16:39:21] [Rank 0] step:1921/10000 train_time:436846ms step_avg:227.41ms +[2025-07-17 16:39:25] [Rank 0] step:1941/10000 train_time:441439ms step_avg:227.43ms +[2025-07-17 16:39:25] [Rank 0] step:1941/10000 train_time:441439ms step_avg:227.43ms +[2025-07-17 16:39:30] [Rank 0] step:1961/10000 train_time:446035ms step_avg:227.45ms +[2025-07-17 16:39:30] [Rank 0] step:1961/10000 train_time:446035ms step_avg:227.45ms +[2025-07-17 16:39:35] [Rank 0] step:1981/10000 train_time:450633ms step_avg:227.48ms +[2025-07-17 16:39:35] [Rank 0] step:1981/10000 train_time:450633ms step_avg:227.48ms +[2025-07-17 16:39:43] [Rank 0] PRINT: step:2000/10000 val_loss:4.6244 train_time:455283ms step_avg:227.64ms +[2025-07-17 16:39:43] [Rank 0] PRINT: step:2000/10000 val_loss:4.6244 train_time:455283ms step_avg:227.64ms +[2025-07-17 16:39:44] [Rank 0] step:2001/10000 train_time:455299ms step_avg:227.54ms +[2025-07-17 16:39:44] [Rank 0] step:2001/10000 train_time:455299ms step_avg:227.54ms +[2025-07-17 16:39:48] [Rank 0] step:2021/10000 train_time:459823ms step_avg:227.52ms +[2025-07-17 16:39:48] [Rank 0] step:2021/10000 train_time:459823ms step_avg:227.52ms +[2025-07-17 16:39:53] [Rank 0] step:2041/10000 train_time:464419ms step_avg:227.54ms +[2025-07-17 16:39:53] [Rank 0] step:2041/10000 train_time:464419ms step_avg:227.54ms +[2025-07-17 16:39:57] [Rank 0] step:2061/10000 train_time:469018ms step_avg:227.57ms +[2025-07-17 16:39:57] [Rank 0] step:2061/10000 train_time:469018ms step_avg:227.57ms +[2025-07-17 16:40:02] [Rank 0] step:2081/10000 train_time:473619ms step_avg:227.59ms +[2025-07-17 16:40:02] [Rank 0] step:2081/10000 train_time:473619ms step_avg:227.59ms +[2025-07-17 16:40:07] [Rank 0] step:2101/10000 train_time:478220ms step_avg:227.62ms +[2025-07-17 16:40:07] [Rank 0] step:2101/10000 train_time:478220ms step_avg:227.62ms +[2025-07-17 16:40:11] [Rank 0] step:2121/10000 train_time:482820ms step_avg:227.64ms +[2025-07-17 16:40:11] [Rank 0] step:2121/10000 train_time:482820ms step_avg:227.64ms +[2025-07-17 16:40:17] [Rank 0] PRINT: step:2125/10000 val_loss:4.6261 train_time:484029ms step_avg:227.78ms +[2025-07-17 16:40:17] [Rank 0] PRINT: step:2125/10000 val_loss:4.6261 train_time:484029ms step_avg:227.78ms +[2025-07-17 16:40:20] [Rank 0] step:2141/10000 train_time:487420ms step_avg:227.66ms +[2025-07-17 16:40:20] [Rank 0] step:2141/10000 train_time:487420ms step_avg:227.66ms +[2025-07-17 16:40:25] [Rank 0] step:2161/10000 train_time:492019ms step_avg:227.68ms +[2025-07-17 16:40:25] [Rank 0] step:2161/10000 train_time:492019ms step_avg:227.68ms +[2025-07-17 16:40:30] [Rank 0] step:2181/10000 train_time:496620ms step_avg:227.70ms +[2025-07-17 16:40:30] [Rank 0] step:2181/10000 train_time:496620ms step_avg:227.70ms +[2025-07-17 16:40:34] [Rank 0] step:2201/10000 train_time:501219ms step_avg:227.72ms +[2025-07-17 16:40:34] [Rank 0] step:2201/10000 train_time:501219ms step_avg:227.72ms +[2025-07-17 16:40:39] [Rank 0] step:2221/10000 train_time:505818ms step_avg:227.74ms +[2025-07-17 16:40:39] [Rank 0] step:2221/10000 train_time:505818ms step_avg:227.74ms +[2025-07-17 16:40:43] [Rank 0] step:2241/10000 train_time:510512ms step_avg:227.81ms +[2025-07-17 16:40:43] [Rank 0] step:2241/10000 train_time:510512ms step_avg:227.81ms +[2025-07-17 16:40:50] [Rank 0] PRINT: step:2250/10000 val_loss:4.2612 train_time:512928ms step_avg:227.97ms +[2025-07-17 16:40:50] [Rank 0] PRINT: step:2250/10000 val_loss:4.2612 train_time:512928ms step_avg:227.97ms +[2025-07-17 16:40:53] [Rank 0] step:2261/10000 train_time:515230ms step_avg:227.88ms +[2025-07-17 16:40:53] [Rank 0] step:2261/10000 train_time:515230ms step_avg:227.88ms +[2025-07-17 16:40:57] [Rank 0] step:2281/10000 train_time:519948ms step_avg:227.95ms +[2025-07-17 16:40:57] [Rank 0] step:2281/10000 train_time:519948ms step_avg:227.95ms +[2025-07-17 16:41:02] [Rank 0] step:2301/10000 train_time:524663ms step_avg:228.02ms +[2025-07-17 16:41:02] [Rank 0] step:2301/10000 train_time:524663ms step_avg:228.02ms +[2025-07-17 16:41:07] [Rank 0] step:2321/10000 train_time:529380ms step_avg:228.08ms +[2025-07-17 16:41:07] [Rank 0] step:2321/10000 train_time:529380ms step_avg:228.08ms +[2025-07-17 16:41:12] [Rank 0] step:2341/10000 train_time:534098ms step_avg:228.15ms +[2025-07-17 16:41:12] [Rank 0] step:2341/10000 train_time:534098ms step_avg:228.15ms +[2025-07-17 16:41:16] [Rank 0] step:2361/10000 train_time:538819ms step_avg:228.22ms +[2025-07-17 16:41:16] [Rank 0] step:2361/10000 train_time:538819ms step_avg:228.22ms +[2025-07-17 16:41:24] [Rank 0] PRINT: step:2375/10000 val_loss:4.2346 train_time:542416ms step_avg:228.39ms +[2025-07-17 16:41:24] [Rank 0] PRINT: step:2375/10000 val_loss:4.2346 train_time:542416ms step_avg:228.39ms +[2025-07-17 16:41:25] [Rank 0] step:2381/10000 train_time:543538ms step_avg:228.28ms +[2025-07-17 16:41:25] [Rank 0] step:2381/10000 train_time:543538ms step_avg:228.28ms +[2025-07-17 16:41:30] [Rank 0] step:2401/10000 train_time:548254ms step_avg:228.34ms +[2025-07-17 16:41:30] [Rank 0] step:2401/10000 train_time:548254ms step_avg:228.34ms +[2025-07-17 16:41:35] [Rank 0] step:2421/10000 train_time:552972ms step_avg:228.41ms +[2025-07-17 16:41:35] [Rank 0] step:2421/10000 train_time:552972ms step_avg:228.41ms +[2025-07-17 16:41:39] [Rank 0] step:2441/10000 train_time:557689ms step_avg:228.47ms +[2025-07-17 16:41:39] [Rank 0] step:2441/10000 train_time:557689ms step_avg:228.47ms +[2025-07-17 16:41:44] [Rank 0] step:2461/10000 train_time:562405ms step_avg:228.53ms +[2025-07-17 16:41:44] [Rank 0] step:2461/10000 train_time:562405ms step_avg:228.53ms +[2025-07-17 16:41:49] [Rank 0] step:2481/10000 train_time:567125ms step_avg:228.59ms +[2025-07-17 16:41:49] [Rank 0] step:2481/10000 train_time:567125ms step_avg:228.59ms +[2025-07-17 16:41:57] [Rank 0] PRINT: step:2500/10000 val_loss:4.2233 train_time:571900ms step_avg:228.76ms +[2025-07-17 16:41:57] [Rank 0] PRINT: step:2500/10000 val_loss:4.2233 train_time:571900ms step_avg:228.76ms +[2025-07-17 16:41:58] [Rank 0] step:2501/10000 train_time:571916ms step_avg:228.67ms +[2025-07-17 16:41:58] [Rank 0] step:2501/10000 train_time:571916ms step_avg:228.67ms +[2025-07-17 16:42:02] [Rank 0] step:2521/10000 train_time:576564ms step_avg:228.70ms +[2025-07-17 16:42:02] [Rank 0] step:2521/10000 train_time:576564ms step_avg:228.70ms +[2025-07-17 16:42:07] [Rank 0] step:2541/10000 train_time:581284ms step_avg:228.76ms +[2025-07-17 16:42:07] [Rank 0] step:2541/10000 train_time:581284ms step_avg:228.76ms +[2025-07-17 16:42:12] [Rank 0] step:2561/10000 train_time:586007ms step_avg:228.82ms +[2025-07-17 16:42:12] [Rank 0] step:2561/10000 train_time:586007ms step_avg:228.82ms +[2025-07-17 16:42:17] [Rank 0] step:2581/10000 train_time:590730ms step_avg:228.88ms +[2025-07-17 16:42:17] [Rank 0] step:2581/10000 train_time:590730ms step_avg:228.88ms +[2025-07-17 16:42:21] [Rank 0] step:2601/10000 train_time:595447ms step_avg:228.93ms +[2025-07-17 16:42:21] [Rank 0] step:2601/10000 train_time:595447ms step_avg:228.93ms +[2025-07-17 16:42:26] [Rank 0] step:2621/10000 train_time:600169ms step_avg:228.98ms +[2025-07-17 16:42:26] [Rank 0] step:2621/10000 train_time:600169ms step_avg:228.98ms +[2025-07-17 16:42:31] [Rank 0] PRINT: step:2625/10000 val_loss:4.1986 train_time:601409ms step_avg:229.11ms +[2025-07-17 16:42:31] [Rank 0] PRINT: step:2625/10000 val_loss:4.1986 train_time:601409ms step_avg:229.11ms +[2025-07-17 16:42:35] [Rank 0] step:2641/10000 train_time:604889ms step_avg:229.04ms +[2025-07-17 16:42:35] [Rank 0] step:2641/10000 train_time:604889ms step_avg:229.04ms +[2025-07-17 16:42:40] [Rank 0] step:2661/10000 train_time:609610ms step_avg:229.09ms +[2025-07-17 16:42:40] [Rank 0] step:2661/10000 train_time:609610ms step_avg:229.09ms +[2025-07-17 16:42:44] [Rank 0] step:2681/10000 train_time:614329ms step_avg:229.14ms +[2025-07-17 16:42:44] [Rank 0] step:2681/10000 train_time:614329ms step_avg:229.14ms +[2025-07-17 16:42:49] [Rank 0] step:2701/10000 train_time:619052ms step_avg:229.19ms +[2025-07-17 16:42:49] [Rank 0] step:2701/10000 train_time:619052ms step_avg:229.19ms +[2025-07-17 16:42:54] [Rank 0] step:2721/10000 train_time:623773ms step_avg:229.24ms +[2025-07-17 16:42:54] [Rank 0] step:2721/10000 train_time:623773ms step_avg:229.24ms +[2025-07-17 16:42:59] [Rank 0] step:2741/10000 train_time:628494ms step_avg:229.29ms +[2025-07-17 16:42:59] [Rank 0] step:2741/10000 train_time:628494ms step_avg:229.29ms +[2025-07-17 16:43:05] [Rank 0] PRINT: step:2750/10000 val_loss:4.3042 train_time:630915ms step_avg:229.42ms +[2025-07-17 16:43:05] [Rank 0] PRINT: step:2750/10000 val_loss:4.3042 train_time:630915ms step_avg:229.42ms +[2025-07-17 16:43:08] [Rank 0] step:2761/10000 train_time:633213ms step_avg:229.34ms +[2025-07-17 16:43:08] [Rank 0] step:2761/10000 train_time:633213ms step_avg:229.34ms +[2025-07-17 16:43:13] [Rank 0] step:2781/10000 train_time:637933ms step_avg:229.39ms +[2025-07-17 16:43:13] [Rank 0] step:2781/10000 train_time:637933ms step_avg:229.39ms +[2025-07-17 16:43:17] [Rank 0] step:2801/10000 train_time:642654ms step_avg:229.44ms +[2025-07-17 16:43:17] [Rank 0] step:2801/10000 train_time:642654ms step_avg:229.44ms +[2025-07-17 16:43:22] [Rank 0] step:2821/10000 train_time:647375ms step_avg:229.48ms +[2025-07-17 16:43:22] [Rank 0] step:2821/10000 train_time:647375ms step_avg:229.48ms +[2025-07-17 16:43:27] [Rank 0] step:2841/10000 train_time:652096ms step_avg:229.53ms +[2025-07-17 16:43:27] [Rank 0] step:2841/10000 train_time:652096ms step_avg:229.53ms +[2025-07-17 16:43:31] [Rank 0] step:2861/10000 train_time:656814ms step_avg:229.58ms +[2025-07-17 16:43:31] [Rank 0] step:2861/10000 train_time:656814ms step_avg:229.58ms +[2025-07-17 16:43:39] [Rank 0] PRINT: step:2875/10000 val_loss:4.2982 train_time:660410ms step_avg:229.71ms +[2025-07-17 16:43:39] [Rank 0] PRINT: step:2875/10000 val_loss:4.2982 train_time:660410ms step_avg:229.71ms +[2025-07-17 16:43:41] [Rank 0] step:2881/10000 train_time:661534ms step_avg:229.62ms +[2025-07-17 16:43:41] [Rank 0] step:2881/10000 train_time:661534ms step_avg:229.62ms +[2025-07-17 16:43:45] [Rank 0] step:2901/10000 train_time:666251ms step_avg:229.66ms +[2025-07-17 16:43:45] [Rank 0] step:2901/10000 train_time:666251ms step_avg:229.66ms +[2025-07-17 16:43:50] [Rank 0] step:2921/10000 train_time:670968ms step_avg:229.71ms +[2025-07-17 16:43:50] [Rank 0] step:2921/10000 train_time:670968ms step_avg:229.71ms +[2025-07-17 16:43:55] [Rank 0] step:2941/10000 train_time:675689ms step_avg:229.75ms +[2025-07-17 16:43:55] [Rank 0] step:2941/10000 train_time:675689ms step_avg:229.75ms +[2025-07-17 16:44:00] [Rank 0] step:2961/10000 train_time:680411ms step_avg:229.79ms +[2025-07-17 16:44:00] [Rank 0] step:2961/10000 train_time:680411ms step_avg:229.79ms +[2025-07-17 16:44:04] [Rank 0] step:2981/10000 train_time:685145ms step_avg:229.84ms +[2025-07-17 16:44:04] [Rank 0] step:2981/10000 train_time:685145ms step_avg:229.84ms +[2025-07-17 16:44:13] [Rank 0] PRINT: step:3000/10000 val_loss:4.2703 train_time:689941ms step_avg:229.98ms +[2025-07-17 16:44:13] [Rank 0] PRINT: step:3000/10000 val_loss:4.2703 train_time:689941ms step_avg:229.98ms +[2025-07-17 16:44:14] [Rank 0] step:3001/10000 train_time:689957ms step_avg:229.91ms +[2025-07-17 16:44:14] [Rank 0] step:3001/10000 train_time:689957ms step_avg:229.91ms +[2025-07-17 16:44:18] [Rank 0] step:3021/10000 train_time:694627ms step_avg:229.93ms +[2025-07-17 16:44:18] [Rank 0] step:3021/10000 train_time:694627ms step_avg:229.93ms +[2025-07-17 16:44:23] [Rank 0] step:3041/10000 train_time:699370ms step_avg:229.98ms +[2025-07-17 16:44:23] [Rank 0] step:3041/10000 train_time:699370ms step_avg:229.98ms +[2025-07-17 16:44:28] [Rank 0] step:3061/10000 train_time:704109ms step_avg:230.03ms +[2025-07-17 16:44:28] [Rank 0] step:3061/10000 train_time:704109ms step_avg:230.03ms +[2025-07-17 16:44:33] [Rank 0] step:3081/10000 train_time:708850ms step_avg:230.07ms +[2025-07-17 16:44:33] [Rank 0] step:3081/10000 train_time:708850ms step_avg:230.07ms +[2025-07-17 16:44:37] [Rank 0] step:3101/10000 train_time:713593ms step_avg:230.12ms +[2025-07-17 16:44:37] [Rank 0] step:3101/10000 train_time:713593ms step_avg:230.12ms +[2025-07-17 16:44:42] [Rank 0] step:3121/10000 train_time:718334ms step_avg:230.16ms +[2025-07-17 16:44:42] [Rank 0] step:3121/10000 train_time:718334ms step_avg:230.16ms +[2025-07-17 16:44:47] [Rank 0] PRINT: step:3125/10000 val_loss:4.3444 train_time:719579ms step_avg:230.27ms +[2025-07-17 16:44:47] [Rank 0] PRINT: step:3125/10000 val_loss:4.3444 train_time:719579ms step_avg:230.27ms +[2025-07-17 16:44:51] [Rank 0] step:3141/10000 train_time:723075ms step_avg:230.21ms +[2025-07-17 16:44:51] [Rank 0] step:3141/10000 train_time:723075ms step_avg:230.21ms +[2025-07-17 16:44:56] [Rank 0] step:3161/10000 train_time:727819ms step_avg:230.25ms +[2025-07-17 16:44:56] [Rank 0] step:3161/10000 train_time:727819ms step_avg:230.25ms +[2025-07-17 16:45:01] [Rank 0] step:3181/10000 train_time:732563ms step_avg:230.29ms +[2025-07-17 16:45:01] [Rank 0] step:3181/10000 train_time:732563ms step_avg:230.29ms +[2025-07-17 16:45:05] [Rank 0] step:3201/10000 train_time:737309ms step_avg:230.34ms +[2025-07-17 16:45:05] [Rank 0] step:3201/10000 train_time:737309ms step_avg:230.34ms +[2025-07-17 16:45:10] [Rank 0] step:3221/10000 train_time:742055ms step_avg:230.38ms +[2025-07-17 16:45:10] [Rank 0] step:3221/10000 train_time:742055ms step_avg:230.38ms +[2025-07-17 16:45:15] [Rank 0] step:3241/10000 train_time:746803ms step_avg:230.42ms +[2025-07-17 16:45:15] [Rank 0] step:3241/10000 train_time:746803ms step_avg:230.42ms +[2025-07-17 16:45:22] [Rank 0] PRINT: step:3250/10000 val_loss:4.2072 train_time:749234ms step_avg:230.53ms +[2025-07-17 16:45:22] [Rank 0] PRINT: step:3250/10000 val_loss:4.2072 train_time:749234ms step_avg:230.53ms +[2025-07-17 16:45:24] [Rank 0] step:3261/10000 train_time:751542ms step_avg:230.46ms +[2025-07-17 16:45:24] [Rank 0] step:3261/10000 train_time:751542ms step_avg:230.46ms +[2025-07-17 16:45:29] [Rank 0] step:3281/10000 train_time:756286ms step_avg:230.50ms +[2025-07-17 16:45:29] [Rank 0] step:3281/10000 train_time:756286ms step_avg:230.50ms +[2025-07-17 16:45:34] [Rank 0] step:3301/10000 train_time:761029ms step_avg:230.55ms +[2025-07-17 16:45:34] [Rank 0] step:3301/10000 train_time:761029ms step_avg:230.55ms +[2025-07-17 16:45:38] [Rank 0] step:3321/10000 train_time:765775ms step_avg:230.59ms +[2025-07-17 16:45:38] [Rank 0] step:3321/10000 train_time:765775ms step_avg:230.59ms +[2025-07-17 16:45:43] [Rank 0] step:3341/10000 train_time:770521ms step_avg:230.63ms +[2025-07-17 16:45:43] [Rank 0] step:3341/10000 train_time:770521ms step_avg:230.63ms +[2025-07-17 16:45:48] [Rank 0] step:3361/10000 train_time:775266ms step_avg:230.67ms +[2025-07-17 16:45:48] [Rank 0] step:3361/10000 train_time:775266ms step_avg:230.67ms +[2025-07-17 16:45:55] [Rank 0] PRINT: step:3375/10000 val_loss:4.3360 train_time:778881ms step_avg:230.78ms +[2025-07-17 16:45:55] [Rank 0] PRINT: step:3375/10000 val_loss:4.3360 train_time:778881ms step_avg:230.78ms +[2025-07-17 16:45:57] [Rank 0] step:3381/10000 train_time:780006ms step_avg:230.70ms +[2025-07-17 16:45:57] [Rank 0] step:3381/10000 train_time:780006ms step_avg:230.70ms +[2025-07-17 16:46:02] [Rank 0] step:3401/10000 train_time:784753ms step_avg:230.74ms +[2025-07-17 16:46:02] [Rank 0] step:3401/10000 train_time:784753ms step_avg:230.74ms +[2025-07-17 16:46:06] [Rank 0] step:3421/10000 train_time:789503ms step_avg:230.78ms +[2025-07-17 16:46:06] [Rank 0] step:3421/10000 train_time:789503ms step_avg:230.78ms +[2025-07-17 16:46:11] [Rank 0] step:3441/10000 train_time:794250ms step_avg:230.82ms +[2025-07-17 16:46:11] [Rank 0] step:3441/10000 train_time:794250ms step_avg:230.82ms +[2025-07-17 16:46:16] [Rank 0] step:3461/10000 train_time:798997ms step_avg:230.86ms +[2025-07-17 16:46:16] [Rank 0] step:3461/10000 train_time:798997ms step_avg:230.86ms +[2025-07-17 16:46:21] [Rank 0] step:3481/10000 train_time:803744ms step_avg:230.89ms +[2025-07-17 16:46:21] [Rank 0] step:3481/10000 train_time:803744ms step_avg:230.89ms +[2025-07-17 16:46:29] [Rank 0] PRINT: step:3500/10000 val_loss:4.3769 train_time:808547ms step_avg:231.01ms +[2025-07-17 16:46:29] [Rank 0] PRINT: step:3500/10000 val_loss:4.3769 train_time:808547ms step_avg:231.01ms +[2025-07-17 16:46:29] [Rank 0] step:3501/10000 train_time:808563ms step_avg:230.95ms +[2025-07-17 16:46:29] [Rank 0] step:3501/10000 train_time:808563ms step_avg:230.95ms +[2025-07-17 16:46:34] [Rank 0] step:3521/10000 train_time:813229ms step_avg:230.97ms +[2025-07-17 16:46:34] [Rank 0] step:3521/10000 train_time:813229ms step_avg:230.97ms +[2025-07-17 16:46:39] [Rank 0] step:3541/10000 train_time:817973ms step_avg:231.00ms +[2025-07-17 16:46:39] [Rank 0] step:3541/10000 train_time:817973ms step_avg:231.00ms +[2025-07-17 16:46:44] [Rank 0] step:3561/10000 train_time:822719ms step_avg:231.04ms +[2025-07-17 16:46:44] [Rank 0] step:3561/10000 train_time:822719ms step_avg:231.04ms +[2025-07-17 16:46:48] [Rank 0] step:3581/10000 train_time:827461ms step_avg:231.07ms +[2025-07-17 16:46:48] [Rank 0] step:3581/10000 train_time:827461ms step_avg:231.07ms +[2025-07-17 16:46:53] [Rank 0] step:3601/10000 train_time:832200ms step_avg:231.10ms +[2025-07-17 16:46:53] [Rank 0] step:3601/10000 train_time:832200ms step_avg:231.10ms +[2025-07-17 16:46:58] [Rank 0] step:3621/10000 train_time:836938ms step_avg:231.13ms +[2025-07-17 16:46:58] [Rank 0] step:3621/10000 train_time:836938ms step_avg:231.13ms +[2025-07-17 16:47:03] [Rank 0] PRINT: step:3625/10000 val_loss:4.3945 train_time:838182ms step_avg:231.22ms +[2025-07-17 16:47:03] [Rank 0] PRINT: step:3625/10000 val_loss:4.3945 train_time:838182ms step_avg:231.22ms +[2025-07-17 16:47:07] [Rank 0] step:3641/10000 train_time:841678ms step_avg:231.17ms +[2025-07-17 16:47:07] [Rank 0] step:3641/10000 train_time:841678ms step_avg:231.17ms +[2025-07-17 16:47:12] [Rank 0] step:3661/10000 train_time:846422ms step_avg:231.20ms +[2025-07-17 16:47:12] [Rank 0] step:3661/10000 train_time:846422ms step_avg:231.20ms +[2025-07-17 16:47:17] [Rank 0] step:3681/10000 train_time:851164ms step_avg:231.23ms +[2025-07-17 16:47:17] [Rank 0] step:3681/10000 train_time:851164ms step_avg:231.23ms +[2025-07-17 16:47:21] [Rank 0] step:3701/10000 train_time:855907ms step_avg:231.26ms +[2025-07-17 16:47:21] [Rank 0] step:3701/10000 train_time:855907ms step_avg:231.26ms +[2025-07-17 16:47:26] [Rank 0] step:3721/10000 train_time:860722ms step_avg:231.31ms +[2025-07-17 16:47:26] [Rank 0] step:3721/10000 train_time:860722ms step_avg:231.31ms +[2025-07-17 16:47:31] [Rank 0] step:3741/10000 train_time:865551ms step_avg:231.37ms +[2025-07-17 16:47:31] [Rank 0] step:3741/10000 train_time:865551ms step_avg:231.37ms +[2025-07-17 16:47:38] [Rank 0] PRINT: step:3750/10000 val_loss:4.3847 train_time:868024ms step_avg:231.47ms +[2025-07-17 16:47:38] [Rank 0] PRINT: step:3750/10000 val_loss:4.3847 train_time:868024ms step_avg:231.47ms +[2025-07-17 16:47:40] [Rank 0] step:3761/10000 train_time:870374ms step_avg:231.42ms +[2025-07-17 16:47:40] [Rank 0] step:3761/10000 train_time:870374ms step_avg:231.42ms +[2025-07-17 16:47:45] [Rank 0] step:3781/10000 train_time:875203ms step_avg:231.47ms +[2025-07-17 16:47:45] [Rank 0] step:3781/10000 train_time:875203ms step_avg:231.47ms +[2025-07-17 16:47:50] [Rank 0] step:3801/10000 train_time:880032ms step_avg:231.53ms +[2025-07-17 16:47:50] [Rank 0] step:3801/10000 train_time:880032ms step_avg:231.53ms +[2025-07-17 16:47:55] [Rank 0] step:3821/10000 train_time:884863ms step_avg:231.58ms +[2025-07-17 16:47:55] [Rank 0] step:3821/10000 train_time:884863ms step_avg:231.58ms +[2025-07-17 16:48:00] [Rank 0] step:3841/10000 train_time:889690ms step_avg:231.63ms +[2025-07-17 16:48:00] [Rank 0] step:3841/10000 train_time:889690ms step_avg:231.63ms +[2025-07-17 16:48:04] [Rank 0] step:3861/10000 train_time:894518ms step_avg:231.68ms +[2025-07-17 16:48:04] [Rank 0] step:3861/10000 train_time:894518ms step_avg:231.68ms +[2025-07-17 16:48:12] [Rank 0] PRINT: step:3875/10000 val_loss:4.4420 train_time:898200ms step_avg:231.79ms +[2025-07-17 16:48:12] [Rank 0] PRINT: step:3875/10000 val_loss:4.4420 train_time:898200ms step_avg:231.79ms +[2025-07-17 16:48:14] [Rank 0] step:3881/10000 train_time:899347ms step_avg:231.73ms +[2025-07-17 16:48:14] [Rank 0] step:3881/10000 train_time:899347ms step_avg:231.73ms +[2025-07-17 16:48:19] [Rank 0] step:3901/10000 train_time:904209ms step_avg:231.79ms +[2025-07-17 16:48:19] [Rank 0] step:3901/10000 train_time:904209ms step_avg:231.79ms +[2025-07-17 16:48:23] [Rank 0] step:3921/10000 train_time:909034ms step_avg:231.84ms +[2025-07-17 16:48:23] [Rank 0] step:3921/10000 train_time:909034ms step_avg:231.84ms +[2025-07-17 16:48:28] [Rank 0] step:3941/10000 train_time:913858ms step_avg:231.88ms +[2025-07-17 16:48:28] [Rank 0] step:3941/10000 train_time:913858ms step_avg:231.88ms +[2025-07-17 16:48:33] [Rank 0] step:3961/10000 train_time:918683ms step_avg:231.93ms +[2025-07-17 16:48:33] [Rank 0] step:3961/10000 train_time:918683ms step_avg:231.93ms +[2025-07-17 16:48:38] [Rank 0] step:3981/10000 train_time:923507ms step_avg:231.98ms +[2025-07-17 16:48:38] [Rank 0] step:3981/10000 train_time:923507ms step_avg:231.98ms +[2025-07-17 16:48:47] [Rank 0] PRINT: step:4000/10000 val_loss:4.3822 train_time:928386ms step_avg:232.10ms +[2025-07-17 16:48:47] [Rank 0] PRINT: step:4000/10000 val_loss:4.3822 train_time:928386ms step_avg:232.10ms +[2025-07-17 16:48:47] [Rank 0] step:4001/10000 train_time:928402ms step_avg:232.04ms +[2025-07-17 16:48:47] [Rank 0] step:4001/10000 train_time:928402ms step_avg:232.04ms +[2025-07-17 16:48:52] [Rank 0] step:4021/10000 train_time:933150ms step_avg:232.07ms +[2025-07-17 16:48:52] [Rank 0] step:4021/10000 train_time:933150ms step_avg:232.07ms +[2025-07-17 16:48:57] [Rank 0] step:4041/10000 train_time:937972ms step_avg:232.11ms +[2025-07-17 16:48:57] [Rank 0] step:4041/10000 train_time:937972ms step_avg:232.11ms +[2025-07-17 16:49:02] [Rank 0] step:4061/10000 train_time:942793ms step_avg:232.16ms +[2025-07-17 16:49:02] [Rank 0] step:4061/10000 train_time:942793ms step_avg:232.16ms +[2025-07-17 16:49:07] [Rank 0] step:4081/10000 train_time:947613ms step_avg:232.20ms +[2025-07-17 16:49:07] [Rank 0] step:4081/10000 train_time:947613ms step_avg:232.20ms +[2025-07-17 16:49:12] [Rank 0] step:4101/10000 train_time:952437ms step_avg:232.25ms +[2025-07-17 16:49:12] [Rank 0] step:4101/10000 train_time:952437ms step_avg:232.25ms +[2025-07-17 16:49:16] [Rank 0] step:4121/10000 train_time:957259ms step_avg:232.29ms +[2025-07-17 16:49:16] [Rank 0] step:4121/10000 train_time:957259ms step_avg:232.29ms +[2025-07-17 16:49:21] [Rank 0] PRINT: step:4125/10000 val_loss:4.3811 train_time:958526ms step_avg:232.37ms +[2025-07-17 16:49:21] [Rank 0] PRINT: step:4125/10000 val_loss:4.3811 train_time:958526ms step_avg:232.37ms +[2025-07-17 16:49:25] [Rank 0] step:4141/10000 train_time:962082ms step_avg:232.33ms +[2025-07-17 16:49:25] [Rank 0] step:4141/10000 train_time:962082ms step_avg:232.33ms +[2025-07-17 16:49:30] [Rank 0] step:4161/10000 train_time:966905ms step_avg:232.37ms +[2025-07-17 16:49:30] [Rank 0] step:4161/10000 train_time:966905ms step_avg:232.37ms +[2025-07-17 16:49:35] [Rank 0] step:4181/10000 train_time:971727ms step_avg:232.42ms +[2025-07-17 16:49:35] [Rank 0] step:4181/10000 train_time:971727ms step_avg:232.42ms +[2025-07-17 16:49:40] [Rank 0] step:4201/10000 train_time:976552ms step_avg:232.46ms +[2025-07-17 16:49:40] [Rank 0] step:4201/10000 train_time:976552ms step_avg:232.46ms +[2025-07-17 16:49:45] [Rank 0] step:4221/10000 train_time:981375ms step_avg:232.50ms +[2025-07-17 16:49:45] [Rank 0] step:4221/10000 train_time:981375ms step_avg:232.50ms +[2025-07-17 16:49:49] [Rank 0] step:4241/10000 train_time:986202ms step_avg:232.54ms +[2025-07-17 16:49:49] [Rank 0] step:4241/10000 train_time:986202ms step_avg:232.54ms +[2025-07-17 16:49:56] [Rank 0] PRINT: step:4250/10000 val_loss:4.4524 train_time:988677ms step_avg:232.63ms +[2025-07-17 16:49:56] [Rank 0] PRINT: step:4250/10000 val_loss:4.4524 train_time:988677ms step_avg:232.63ms +[2025-07-17 16:49:59] [Rank 0] step:4261/10000 train_time:991025ms step_avg:232.58ms +[2025-07-17 16:49:59] [Rank 0] step:4261/10000 train_time:991025ms step_avg:232.58ms +[2025-07-17 16:50:04] [Rank 0] step:4281/10000 train_time:995851ms step_avg:232.62ms +[2025-07-17 16:50:04] [Rank 0] step:4281/10000 train_time:995851ms step_avg:232.62ms +[2025-07-17 16:50:08] [Rank 0] step:4301/10000 train_time:1000676ms step_avg:232.66ms +[2025-07-17 16:50:08] [Rank 0] step:4301/10000 train_time:1000676ms step_avg:232.66ms +[2025-07-17 16:50:13] [Rank 0] step:4321/10000 train_time:1005504ms step_avg:232.70ms +[2025-07-17 16:50:13] [Rank 0] step:4321/10000 train_time:1005504ms step_avg:232.70ms +[2025-07-17 16:50:18] [Rank 0] step:4341/10000 train_time:1010327ms step_avg:232.74ms +[2025-07-17 16:50:18] [Rank 0] step:4341/10000 train_time:1010327ms step_avg:232.74ms +[2025-07-17 16:50:23] [Rank 0] step:4361/10000 train_time:1015149ms step_avg:232.78ms +[2025-07-17 16:50:23] [Rank 0] step:4361/10000 train_time:1015149ms step_avg:232.78ms +[2025-07-17 16:50:31] [Rank 0] PRINT: step:4375/10000 val_loss:4.3982 train_time:1018829ms step_avg:232.88ms +[2025-07-17 16:50:31] [Rank 0] PRINT: step:4375/10000 val_loss:4.3982 train_time:1018829ms step_avg:232.88ms +[2025-07-17 16:50:32] [Rank 0] step:4381/10000 train_time:1019977ms step_avg:232.82ms +[2025-07-17 16:50:32] [Rank 0] step:4381/10000 train_time:1019977ms step_avg:232.82ms +[2025-07-17 16:50:37] [Rank 0] step:4401/10000 train_time:1024802ms step_avg:232.86ms +[2025-07-17 16:50:37] [Rank 0] step:4401/10000 train_time:1024802ms step_avg:232.86ms +[2025-07-17 16:50:42] [Rank 0] step:4421/10000 train_time:1029624ms step_avg:232.89ms +[2025-07-17 16:50:42] [Rank 0] step:4421/10000 train_time:1029624ms step_avg:232.89ms +[2025-07-17 16:50:47] [Rank 0] step:4441/10000 train_time:1034448ms step_avg:232.93ms +[2025-07-17 16:50:47] [Rank 0] step:4441/10000 train_time:1034448ms step_avg:232.93ms +[2025-07-17 16:50:51] [Rank 0] step:4461/10000 train_time:1039281ms step_avg:232.97ms +[2025-07-17 16:50:51] [Rank 0] step:4461/10000 train_time:1039281ms step_avg:232.97ms +[2025-07-17 16:50:56] [Rank 0] step:4481/10000 train_time:1044121ms step_avg:233.01ms +[2025-07-17 16:50:56] [Rank 0] step:4481/10000 train_time:1044121ms step_avg:233.01ms +[2025-07-17 16:51:05] [Rank 0] PRINT: step:4500/10000 val_loss:4.4143 train_time:1049019ms step_avg:233.12ms +[2025-07-17 16:51:05] [Rank 0] PRINT: step:4500/10000 val_loss:4.4143 train_time:1049019ms step_avg:233.12ms +[2025-07-17 16:51:06] [Rank 0] step:4501/10000 train_time:1049035ms step_avg:233.07ms +[2025-07-17 16:51:06] [Rank 0] step:4501/10000 train_time:1049035ms step_avg:233.07ms +[2025-07-17 16:51:11] [Rank 0] step:4521/10000 train_time:1053796ms step_avg:233.09ms +[2025-07-17 16:51:11] [Rank 0] step:4521/10000 train_time:1053796ms step_avg:233.09ms +[2025-07-17 16:51:15] [Rank 0] step:4541/10000 train_time:1058633ms step_avg:233.13ms +[2025-07-17 16:51:15] [Rank 0] step:4541/10000 train_time:1058633ms step_avg:233.13ms +[2025-07-17 16:51:20] [Rank 0] step:4561/10000 train_time:1063467ms step_avg:233.17ms +[2025-07-17 16:51:20] [Rank 0] step:4561/10000 train_time:1063467ms step_avg:233.17ms +[2025-07-17 16:51:25] [Rank 0] step:4581/10000 train_time:1068307ms step_avg:233.20ms +[2025-07-17 16:51:25] [Rank 0] step:4581/10000 train_time:1068307ms step_avg:233.20ms +[2025-07-17 16:51:30] [Rank 0] step:4601/10000 train_time:1073150ms step_avg:233.24ms +[2025-07-17 16:51:30] [Rank 0] step:4601/10000 train_time:1073150ms step_avg:233.24ms +[2025-07-17 16:51:35] [Rank 0] step:4621/10000 train_time:1077988ms step_avg:233.28ms +[2025-07-17 16:51:35] [Rank 0] step:4621/10000 train_time:1077988ms step_avg:233.28ms +[2025-07-17 16:51:40] [Rank 0] PRINT: step:4625/10000 val_loss:4.4157 train_time:1079258ms step_avg:233.35ms +[2025-07-17 16:51:40] [Rank 0] PRINT: step:4625/10000 val_loss:4.4157 train_time:1079258ms step_avg:233.35ms +[2025-07-17 16:51:44] [Rank 0] step:4641/10000 train_time:1082820ms step_avg:233.32ms +[2025-07-17 16:51:44] [Rank 0] step:4641/10000 train_time:1082820ms step_avg:233.32ms +[2025-07-17 16:51:49] [Rank 0] step:4661/10000 train_time:1087657ms step_avg:233.35ms +[2025-07-17 16:51:49] [Rank 0] step:4661/10000 train_time:1087657ms step_avg:233.35ms +[2025-07-17 16:51:54] [Rank 0] step:4681/10000 train_time:1092491ms step_avg:233.39ms +[2025-07-17 16:51:54] [Rank 0] step:4681/10000 train_time:1092491ms step_avg:233.39ms +[2025-07-17 16:51:59] [Rank 0] step:4701/10000 train_time:1097322ms step_avg:233.42ms +[2025-07-17 16:51:59] [Rank 0] step:4701/10000 train_time:1097322ms step_avg:233.42ms +[2025-07-17 16:52:03] [Rank 0] step:4721/10000 train_time:1102147ms step_avg:233.46ms +[2025-07-17 16:52:03] [Rank 0] step:4721/10000 train_time:1102147ms step_avg:233.46ms +[2025-07-17 16:52:08] [Rank 0] step:4741/10000 train_time:1106973ms step_avg:233.49ms +[2025-07-17 16:52:08] [Rank 0] step:4741/10000 train_time:1106973ms step_avg:233.49ms +[2025-07-17 16:52:15] [Rank 0] PRINT: step:4750/10000 val_loss:4.4143 train_time:1109447ms step_avg:233.57ms +[2025-07-17 16:52:15] [Rank 0] PRINT: step:4750/10000 val_loss:4.4143 train_time:1109447ms step_avg:233.57ms +[2025-07-17 16:52:17] [Rank 0] step:4761/10000 train_time:1111803ms step_avg:233.52ms +[2025-07-17 16:52:17] [Rank 0] step:4761/10000 train_time:1111803ms step_avg:233.52ms +[2025-07-17 16:52:22] [Rank 0] step:4781/10000 train_time:1116631ms step_avg:233.56ms +[2025-07-17 16:52:22] [Rank 0] step:4781/10000 train_time:1116631ms step_avg:233.56ms +[2025-07-17 16:52:27] [Rank 0] step:4801/10000 train_time:1121458ms step_avg:233.59ms +[2025-07-17 16:52:27] [Rank 0] step:4801/10000 train_time:1121458ms step_avg:233.59ms +[2025-07-17 16:52:32] [Rank 0] step:4821/10000 train_time:1126292ms step_avg:233.62ms +[2025-07-17 16:52:32] [Rank 0] step:4821/10000 train_time:1126292ms step_avg:233.62ms +[2025-07-17 16:52:37] [Rank 0] step:4841/10000 train_time:1131128ms step_avg:233.66ms +[2025-07-17 16:52:37] [Rank 0] step:4841/10000 train_time:1131128ms step_avg:233.66ms +[2025-07-17 16:52:42] [Rank 0] step:4861/10000 train_time:1135963ms step_avg:233.69ms +[2025-07-17 16:52:42] [Rank 0] step:4861/10000 train_time:1135963ms step_avg:233.69ms +[2025-07-17 16:52:50] [Rank 0] PRINT: step:4875/10000 val_loss:4.4491 train_time:1139653ms step_avg:233.78ms +[2025-07-17 16:52:50] [Rank 0] PRINT: step:4875/10000 val_loss:4.4491 train_time:1139653ms step_avg:233.78ms +[2025-07-17 16:52:51] [Rank 0] step:4881/10000 train_time:1140807ms step_avg:233.72ms +[2025-07-17 16:52:51] [Rank 0] step:4881/10000 train_time:1140807ms step_avg:233.72ms +[2025-07-17 16:52:56] [Rank 0] step:4901/10000 train_time:1145647ms step_avg:233.76ms +[2025-07-17 16:52:56] [Rank 0] step:4901/10000 train_time:1145647ms step_avg:233.76ms +[2025-07-17 16:53:01] [Rank 0] step:4921/10000 train_time:1150483ms step_avg:233.79ms +[2025-07-17 16:53:01] [Rank 0] step:4921/10000 train_time:1150483ms step_avg:233.79ms +[2025-07-17 16:53:06] [Rank 0] step:4941/10000 train_time:1155322ms step_avg:233.82ms +[2025-07-17 16:53:06] [Rank 0] step:4941/10000 train_time:1155322ms step_avg:233.82ms +[2025-07-17 16:53:10] [Rank 0] step:4961/10000 train_time:1160158ms step_avg:233.86ms +[2025-07-17 16:53:10] [Rank 0] step:4961/10000 train_time:1160158ms step_avg:233.86ms +[2025-07-17 16:53:15] [Rank 0] step:4981/10000 train_time:1164995ms step_avg:233.89ms +[2025-07-17 16:53:15] [Rank 0] step:4981/10000 train_time:1164995ms step_avg:233.89ms +[2025-07-17 16:53:24] [Rank 0] PRINT: step:5000/10000 val_loss:4.4774 train_time:1169899ms step_avg:233.98ms +[2025-07-17 16:53:24] [Rank 0] PRINT: step:5000/10000 val_loss:4.4774 train_time:1169899ms step_avg:233.98ms +[2025-07-17 16:53:25] [Rank 0] step:5001/10000 train_time:1169915ms step_avg:233.94ms +[2025-07-17 16:53:25] [Rank 0] step:5001/10000 train_time:1169915ms step_avg:233.94ms +[2025-07-17 16:53:30] [Rank 0] step:5021/10000 train_time:1174673ms step_avg:233.95ms +[2025-07-17 16:53:30] [Rank 0] step:5021/10000 train_time:1174673ms step_avg:233.95ms +[2025-07-17 16:53:34] [Rank 0] step:5041/10000 train_time:1179509ms step_avg:233.98ms +[2025-07-17 16:53:34] [Rank 0] step:5041/10000 train_time:1179509ms step_avg:233.98ms +[2025-07-17 16:53:39] [Rank 0] step:5061/10000 train_time:1184339ms step_avg:234.01ms +[2025-07-17 16:53:39] [Rank 0] step:5061/10000 train_time:1184339ms step_avg:234.01ms +[2025-07-17 16:53:44] [Rank 0] step:5081/10000 train_time:1189165ms step_avg:234.04ms +[2025-07-17 16:53:44] [Rank 0] step:5081/10000 train_time:1189165ms step_avg:234.04ms +[2025-07-17 16:53:49] [Rank 0] step:5101/10000 train_time:1193992ms step_avg:234.07ms +[2025-07-17 16:53:49] [Rank 0] step:5101/10000 train_time:1193992ms step_avg:234.07ms +[2025-07-17 16:53:54] [Rank 0] step:5121/10000 train_time:1198817ms step_avg:234.10ms +[2025-07-17 16:53:54] [Rank 0] step:5121/10000 train_time:1198817ms step_avg:234.10ms +[2025-07-17 16:53:59] [Rank 0] PRINT: step:5125/10000 val_loss:4.4382 train_time:1200083ms step_avg:234.16ms +[2025-07-17 16:53:59] [Rank 0] PRINT: step:5125/10000 val_loss:4.4382 train_time:1200083ms step_avg:234.16ms +[2025-07-17 16:54:03] [Rank 0] step:5141/10000 train_time:1203633ms step_avg:234.12ms +[2025-07-17 16:54:03] [Rank 0] step:5141/10000 train_time:1203633ms step_avg:234.12ms +[2025-07-17 16:54:08] [Rank 0] step:5161/10000 train_time:1208453ms step_avg:234.15ms +[2025-07-17 16:54:08] [Rank 0] step:5161/10000 train_time:1208453ms step_avg:234.15ms +[2025-07-17 16:54:13] [Rank 0] step:5181/10000 train_time:1213275ms step_avg:234.18ms +[2025-07-17 16:54:13] [Rank 0] step:5181/10000 train_time:1213275ms step_avg:234.18ms +[2025-07-17 16:54:18] [Rank 0] step:5201/10000 train_time:1218145ms step_avg:234.21ms +[2025-07-17 16:54:18] [Rank 0] step:5201/10000 train_time:1218145ms step_avg:234.21ms +[2025-07-17 16:54:23] [Rank 0] step:5221/10000 train_time:1223048ms step_avg:234.26ms +[2025-07-17 16:54:23] [Rank 0] step:5221/10000 train_time:1223048ms step_avg:234.26ms +[2025-07-17 16:54:27] [Rank 0] step:5241/10000 train_time:1227941ms step_avg:234.30ms +[2025-07-17 16:54:27] [Rank 0] step:5241/10000 train_time:1227941ms step_avg:234.30ms +[2025-07-17 16:54:34] [Rank 0] PRINT: step:5250/10000 val_loss:4.3462 train_time:1230449ms step_avg:234.37ms +[2025-07-17 16:54:34] [Rank 0] PRINT: step:5250/10000 val_loss:4.3462 train_time:1230449ms step_avg:234.37ms +[2025-07-17 16:54:37] [Rank 0] step:5261/10000 train_time:1232835ms step_avg:234.33ms +[2025-07-17 16:54:37] [Rank 0] step:5261/10000 train_time:1232835ms step_avg:234.33ms +[2025-07-17 16:54:42] [Rank 0] step:5281/10000 train_time:1237741ms step_avg:234.38ms +[2025-07-17 16:54:42] [Rank 0] step:5281/10000 train_time:1237741ms step_avg:234.38ms +[2025-07-17 16:54:46] [Rank 0] step:5301/10000 train_time:1242643ms step_avg:234.42ms +[2025-07-17 16:54:46] [Rank 0] step:5301/10000 train_time:1242643ms step_avg:234.42ms +[2025-07-17 16:54:51] [Rank 0] step:5321/10000 train_time:1247543ms step_avg:234.46ms +[2025-07-17 16:54:51] [Rank 0] step:5321/10000 train_time:1247543ms step_avg:234.46ms +[2025-07-17 16:54:56] [Rank 0] step:5341/10000 train_time:1252456ms step_avg:234.50ms +[2025-07-17 16:54:56] [Rank 0] step:5341/10000 train_time:1252456ms step_avg:234.50ms +[2025-07-17 16:55:01] [Rank 0] step:5361/10000 train_time:1257363ms step_avg:234.54ms +[2025-07-17 16:55:01] [Rank 0] step:5361/10000 train_time:1257363ms step_avg:234.54ms +[2025-07-17 16:55:09] [Rank 0] PRINT: step:5375/10000 val_loss:4.2752 train_time:1261105ms step_avg:234.62ms +[2025-07-17 16:55:09] [Rank 0] PRINT: step:5375/10000 val_loss:4.2752 train_time:1261105ms step_avg:234.62ms +[2025-07-17 16:55:11] [Rank 0] step:5381/10000 train_time:1262270ms step_avg:234.58ms +[2025-07-17 16:55:11] [Rank 0] step:5381/10000 train_time:1262270ms step_avg:234.58ms +[2025-07-17 16:55:16] [Rank 0] step:5401/10000 train_time:1267177ms step_avg:234.62ms +[2025-07-17 16:55:16] [Rank 0] step:5401/10000 train_time:1267177ms step_avg:234.62ms +[2025-07-17 16:55:21] [Rank 0] step:5421/10000 train_time:1272093ms step_avg:234.66ms +[2025-07-17 16:55:21] [Rank 0] step:5421/10000 train_time:1272093ms step_avg:234.66ms +[2025-07-17 16:55:26] [Rank 0] step:5441/10000 train_time:1276993ms step_avg:234.70ms +[2025-07-17 16:55:26] [Rank 0] step:5441/10000 train_time:1276993ms step_avg:234.70ms +[2025-07-17 16:55:30] [Rank 0] step:5461/10000 train_time:1281908ms step_avg:234.74ms +[2025-07-17 16:55:30] [Rank 0] step:5461/10000 train_time:1281908ms step_avg:234.74ms +[2025-07-17 16:55:35] [Rank 0] step:5481/10000 train_time:1286820ms step_avg:234.78ms +[2025-07-17 16:55:35] [Rank 0] step:5481/10000 train_time:1286820ms step_avg:234.78ms +[2025-07-17 16:55:45] [Rank 0] PRINT: step:5500/10000 val_loss:4.3829 train_time:1291788ms step_avg:234.87ms +[2025-07-17 16:55:45] [Rank 0] PRINT: step:5500/10000 val_loss:4.3829 train_time:1291788ms step_avg:234.87ms +[2025-07-17 16:55:45] [Rank 0] step:5501/10000 train_time:1291804ms step_avg:234.83ms +[2025-07-17 16:55:45] [Rank 0] step:5501/10000 train_time:1291804ms step_avg:234.83ms +[2025-07-17 16:55:50] [Rank 0] step:5521/10000 train_time:1296635ms step_avg:234.86ms +[2025-07-17 16:55:50] [Rank 0] step:5521/10000 train_time:1296635ms step_avg:234.86ms +[2025-07-17 16:55:55] [Rank 0] step:5541/10000 train_time:1301547ms step_avg:234.89ms +[2025-07-17 16:55:55] [Rank 0] step:5541/10000 train_time:1301547ms step_avg:234.89ms +[2025-07-17 16:56:00] [Rank 0] step:5561/10000 train_time:1306456ms step_avg:234.93ms +[2025-07-17 16:56:00] [Rank 0] step:5561/10000 train_time:1306456ms step_avg:234.93ms +[2025-07-17 16:56:05] [Rank 0] step:5581/10000 train_time:1311364ms step_avg:234.97ms +[2025-07-17 16:56:05] [Rank 0] step:5581/10000 train_time:1311364ms step_avg:234.97ms +[2025-07-17 16:56:09] [Rank 0] step:5601/10000 train_time:1316276ms step_avg:235.01ms +[2025-07-17 16:56:09] [Rank 0] step:5601/10000 train_time:1316276ms step_avg:235.01ms +[2025-07-17 16:56:14] [Rank 0] step:5621/10000 train_time:1321189ms step_avg:235.05ms +[2025-07-17 16:56:14] [Rank 0] step:5621/10000 train_time:1321189ms step_avg:235.05ms +[2025-07-17 16:56:20] [Rank 0] PRINT: step:5625/10000 val_loss:4.4720 train_time:1322476ms step_avg:235.11ms +[2025-07-17 16:56:20] [Rank 0] PRINT: step:5625/10000 val_loss:4.4720 train_time:1322476ms step_avg:235.11ms +[2025-07-17 16:56:24] [Rank 0] step:5641/10000 train_time:1326096ms step_avg:235.08ms +[2025-07-17 16:56:24] [Rank 0] step:5641/10000 train_time:1326096ms step_avg:235.08ms +[2025-07-17 16:56:29] [Rank 0] step:5661/10000 train_time:1331010ms step_avg:235.12ms +[2025-07-17 16:56:29] [Rank 0] step:5661/10000 train_time:1331010ms step_avg:235.12ms +[2025-07-17 16:56:33] [Rank 0] step:5681/10000 train_time:1335925ms step_avg:235.16ms +[2025-07-17 16:56:33] [Rank 0] step:5681/10000 train_time:1335925ms step_avg:235.16ms +[2025-07-17 16:56:38] [Rank 0] step:5701/10000 train_time:1340836ms step_avg:235.19ms +[2025-07-17 16:56:38] [Rank 0] step:5701/10000 train_time:1340836ms step_avg:235.19ms +[2025-07-17 16:56:43] [Rank 0] step:5721/10000 train_time:1345739ms step_avg:235.23ms +[2025-07-17 16:56:43] [Rank 0] step:5721/10000 train_time:1345739ms step_avg:235.23ms +[2025-07-17 16:56:48] [Rank 0] step:5741/10000 train_time:1350649ms step_avg:235.26ms +[2025-07-17 16:56:48] [Rank 0] step:5741/10000 train_time:1350649ms step_avg:235.26ms +[2025-07-17 16:56:55] [Rank 0] PRINT: step:5750/10000 val_loss:4.4504 train_time:1353162ms step_avg:235.33ms +[2025-07-17 16:56:55] [Rank 0] PRINT: step:5750/10000 val_loss:4.4504 train_time:1353162ms step_avg:235.33ms +[2025-07-17 16:56:58] [Rank 0] step:5761/10000 train_time:1355554ms step_avg:235.30ms +[2025-07-17 16:56:58] [Rank 0] step:5761/10000 train_time:1355554ms step_avg:235.30ms +[2025-07-17 16:57:03] [Rank 0] step:5781/10000 train_time:1360453ms step_avg:235.33ms +[2025-07-17 16:57:03] [Rank 0] step:5781/10000 train_time:1360453ms step_avg:235.33ms +[2025-07-17 16:57:08] [Rank 0] step:5801/10000 train_time:1365348ms step_avg:235.36ms +[2025-07-17 16:57:08] [Rank 0] step:5801/10000 train_time:1365348ms step_avg:235.36ms +[2025-07-17 16:57:12] [Rank 0] step:5821/10000 train_time:1370241ms step_avg:235.40ms +[2025-07-17 16:57:12] [Rank 0] step:5821/10000 train_time:1370241ms step_avg:235.40ms +[2025-07-17 16:57:17] [Rank 0] step:5841/10000 train_time:1375144ms step_avg:235.43ms +[2025-07-17 16:57:17] [Rank 0] step:5841/10000 train_time:1375144ms step_avg:235.43ms +[2025-07-17 16:57:22] [Rank 0] step:5861/10000 train_time:1380039ms step_avg:235.46ms +[2025-07-17 16:57:22] [Rank 0] step:5861/10000 train_time:1380039ms step_avg:235.46ms +[2025-07-17 16:57:30] [Rank 0] PRINT: step:5875/10000 val_loss:4.4166 train_time:1383765ms step_avg:235.53ms +[2025-07-17 16:57:30] [Rank 0] PRINT: step:5875/10000 val_loss:4.4166 train_time:1383765ms step_avg:235.53ms +[2025-07-17 16:57:31] [Rank 0] step:5881/10000 train_time:1384930ms step_avg:235.49ms +[2025-07-17 16:57:31] [Rank 0] step:5881/10000 train_time:1384930ms step_avg:235.49ms +[2025-07-17 16:57:36] [Rank 0] step:5901/10000 train_time:1389841ms step_avg:235.53ms +[2025-07-17 16:57:36] [Rank 0] step:5901/10000 train_time:1389841ms step_avg:235.53ms +[2025-07-17 16:57:41] [Rank 0] step:5921/10000 train_time:1394742ms step_avg:235.56ms +[2025-07-17 16:57:41] [Rank 0] step:5921/10000 train_time:1394742ms step_avg:235.56ms +[2025-07-17 16:57:46] [Rank 0] step:5941/10000 train_time:1399660ms step_avg:235.59ms +[2025-07-17 16:57:46] [Rank 0] step:5941/10000 train_time:1399660ms step_avg:235.59ms +[2025-07-17 16:57:51] [Rank 0] step:5961/10000 train_time:1404577ms step_avg:235.63ms +[2025-07-17 16:57:51] [Rank 0] step:5961/10000 train_time:1404577ms step_avg:235.63ms +[2025-07-17 16:57:56] [Rank 0] step:5981/10000 train_time:1409495ms step_avg:235.66ms +[2025-07-17 16:57:56] [Rank 0] step:5981/10000 train_time:1409495ms step_avg:235.66ms +[2025-07-17 16:58:05] [Rank 0] PRINT: step:6000/10000 val_loss:4.3167 train_time:1414469ms step_avg:235.74ms +[2025-07-17 16:58:05] [Rank 0] PRINT: step:6000/10000 val_loss:4.3167 train_time:1414469ms step_avg:235.74ms +[2025-07-17 16:58:05] [Rank 0] step:6001/10000 train_time:1414485ms step_avg:235.71ms +[2025-07-17 16:58:05] [Rank 0] step:6001/10000 train_time:1414485ms step_avg:235.71ms +[2025-07-17 16:58:10] [Rank 0] step:6021/10000 train_time:1419323ms step_avg:235.73ms +[2025-07-17 16:58:10] [Rank 0] step:6021/10000 train_time:1419323ms step_avg:235.73ms +[2025-07-17 16:58:15] [Rank 0] step:6041/10000 train_time:1424246ms step_avg:235.76ms +[2025-07-17 16:58:15] [Rank 0] step:6041/10000 train_time:1424246ms step_avg:235.76ms +[2025-07-17 16:58:20] [Rank 0] step:6061/10000 train_time:1429159ms step_avg:235.80ms +[2025-07-17 16:58:20] [Rank 0] step:6061/10000 train_time:1429159ms step_avg:235.80ms +[2025-07-17 16:58:25] [Rank 0] step:6081/10000 train_time:1434078ms step_avg:235.83ms +[2025-07-17 16:58:25] [Rank 0] step:6081/10000 train_time:1434078ms step_avg:235.83ms +[2025-07-17 16:58:30] [Rank 0] step:6101/10000 train_time:1438997ms step_avg:235.86ms +[2025-07-17 16:58:30] [Rank 0] step:6101/10000 train_time:1438997ms step_avg:235.86ms +[2025-07-17 16:58:35] [Rank 0] step:6121/10000 train_time:1443921ms step_avg:235.90ms +[2025-07-17 16:58:35] [Rank 0] step:6121/10000 train_time:1443921ms step_avg:235.90ms +[2025-07-17 16:58:40] [Rank 0] PRINT: step:6125/10000 val_loss:4.3153 train_time:1445212ms step_avg:235.95ms +[2025-07-17 16:58:40] [Rank 0] PRINT: step:6125/10000 val_loss:4.3153 train_time:1445212ms step_avg:235.95ms +[2025-07-17 16:58:44] [Rank 0] step:6141/10000 train_time:1448841ms step_avg:235.93ms +[2025-07-17 16:58:44] [Rank 0] step:6141/10000 train_time:1448841ms step_avg:235.93ms +[2025-07-17 16:58:49] [Rank 0] step:6161/10000 train_time:1453759ms step_avg:235.96ms +[2025-07-17 16:58:49] [Rank 0] step:6161/10000 train_time:1453759ms step_avg:235.96ms +[2025-07-17 16:58:54] [Rank 0] step:6181/10000 train_time:1458689ms step_avg:236.00ms +[2025-07-17 16:58:54] [Rank 0] step:6181/10000 train_time:1458689ms step_avg:236.00ms +[2025-07-17 16:58:59] [Rank 0] step:6201/10000 train_time:1463621ms step_avg:236.03ms +[2025-07-17 16:58:59] [Rank 0] step:6201/10000 train_time:1463621ms step_avg:236.03ms +[2025-07-17 16:59:04] [Rank 0] step:6221/10000 train_time:1468549ms step_avg:236.06ms +[2025-07-17 16:59:04] [Rank 0] step:6221/10000 train_time:1468549ms step_avg:236.06ms +[2025-07-17 16:59:09] [Rank 0] step:6241/10000 train_time:1473481ms step_avg:236.10ms +[2025-07-17 16:59:09] [Rank 0] step:6241/10000 train_time:1473481ms step_avg:236.10ms +[2025-07-17 16:59:16] [Rank 0] PRINT: step:6250/10000 val_loss:4.3602 train_time:1476007ms step_avg:236.16ms +[2025-07-17 16:59:16] [Rank 0] PRINT: step:6250/10000 val_loss:4.3602 train_time:1476007ms step_avg:236.16ms +[2025-07-17 16:59:19] [Rank 0] step:6261/10000 train_time:1478407ms step_avg:236.13ms +[2025-07-17 16:59:19] [Rank 0] step:6261/10000 train_time:1478407ms step_avg:236.13ms +[2025-07-17 16:59:24] [Rank 0] step:6281/10000 train_time:1483346ms step_avg:236.16ms +[2025-07-17 16:59:24] [Rank 0] step:6281/10000 train_time:1483346ms step_avg:236.16ms +[2025-07-17 16:59:29] [Rank 0] step:6301/10000 train_time:1488276ms step_avg:236.20ms +[2025-07-17 16:59:29] [Rank 0] step:6301/10000 train_time:1488276ms step_avg:236.20ms +[2025-07-17 16:59:34] [Rank 0] step:6321/10000 train_time:1493207ms step_avg:236.23ms +[2025-07-17 16:59:34] [Rank 0] step:6321/10000 train_time:1493207ms step_avg:236.23ms +[2025-07-17 16:59:38] [Rank 0] step:6341/10000 train_time:1498146ms step_avg:236.26ms +[2025-07-17 16:59:38] [Rank 0] step:6341/10000 train_time:1498146ms step_avg:236.26ms +[2025-07-17 16:59:43] [Rank 0] step:6361/10000 train_time:1503075ms step_avg:236.30ms +[2025-07-17 16:59:43] [Rank 0] step:6361/10000 train_time:1503075ms step_avg:236.30ms +[2025-07-17 16:59:52] [Rank 0] PRINT: step:6375/10000 val_loss:4.4395 train_time:1506827ms step_avg:236.36ms +[2025-07-17 16:59:52] [Rank 0] PRINT: step:6375/10000 val_loss:4.4395 train_time:1506827ms step_avg:236.36ms +[2025-07-17 16:59:53] [Rank 0] step:6381/10000 train_time:1507997ms step_avg:236.33ms +[2025-07-17 16:59:53] [Rank 0] step:6381/10000 train_time:1507997ms step_avg:236.33ms +[2025-07-17 16:59:58] [Rank 0] step:6401/10000 train_time:1512920ms step_avg:236.36ms +[2025-07-17 16:59:58] [Rank 0] step:6401/10000 train_time:1512920ms step_avg:236.36ms +[2025-07-17 17:00:03] [Rank 0] step:6421/10000 train_time:1517845ms step_avg:236.39ms +[2025-07-17 17:00:03] [Rank 0] step:6421/10000 train_time:1517845ms step_avg:236.39ms +[2025-07-17 17:00:08] [Rank 0] step:6441/10000 train_time:1522777ms step_avg:236.42ms +[2025-07-17 17:00:08] [Rank 0] step:6441/10000 train_time:1522777ms step_avg:236.42ms +[2025-07-17 17:00:13] [Rank 0] step:6461/10000 train_time:1527715ms step_avg:236.45ms +[2025-07-17 17:00:13] [Rank 0] step:6461/10000 train_time:1527715ms step_avg:236.45ms +[2025-07-17 17:00:18] [Rank 0] step:6481/10000 train_time:1532644ms step_avg:236.48ms +[2025-07-17 17:00:18] [Rank 0] step:6481/10000 train_time:1532644ms step_avg:236.48ms +[2025-07-17 17:00:27] [Rank 0] PRINT: step:6500/10000 val_loss:4.3951 train_time:1537629ms step_avg:236.56ms +[2025-07-17 17:00:27] [Rank 0] PRINT: step:6500/10000 val_loss:4.3951 train_time:1537629ms step_avg:236.56ms +[2025-07-17 17:00:27] [Rank 0] step:6501/10000 train_time:1537645ms step_avg:236.52ms +[2025-07-17 17:00:27] [Rank 0] step:6501/10000 train_time:1537645ms step_avg:236.52ms +[2025-07-17 17:00:32] [Rank 0] step:6521/10000 train_time:1542494ms step_avg:236.54ms +[2025-07-17 17:00:32] [Rank 0] step:6521/10000 train_time:1542494ms step_avg:236.54ms +[2025-07-17 17:00:37] [Rank 0] step:6541/10000 train_time:1547422ms step_avg:236.57ms +[2025-07-17 17:00:37] [Rank 0] step:6541/10000 train_time:1547422ms step_avg:236.57ms +[2025-07-17 17:00:42] [Rank 0] step:6561/10000 train_time:1552358ms step_avg:236.60ms +[2025-07-17 17:00:42] [Rank 0] step:6561/10000 train_time:1552358ms step_avg:236.60ms +[2025-07-17 17:00:47] [Rank 0] step:6581/10000 train_time:1557290ms step_avg:236.63ms +[2025-07-17 17:00:47] [Rank 0] step:6581/10000 train_time:1557290ms step_avg:236.63ms +[2025-07-17 17:00:52] [Rank 0] step:6601/10000 train_time:1562222ms step_avg:236.66ms +[2025-07-17 17:00:52] [Rank 0] step:6601/10000 train_time:1562222ms step_avg:236.66ms +[2025-07-17 17:00:57] [Rank 0] step:6621/10000 train_time:1567146ms step_avg:236.69ms +[2025-07-17 17:00:57] [Rank 0] step:6621/10000 train_time:1567146ms step_avg:236.69ms +[2025-07-17 17:01:03] [Rank 0] PRINT: step:6625/10000 val_loss:4.3274 train_time:1568440ms step_avg:236.75ms +[2025-07-17 17:01:03] [Rank 0] PRINT: step:6625/10000 val_loss:4.3274 train_time:1568440ms step_avg:236.75ms +[2025-07-17 17:01:07] [Rank 0] step:6641/10000 train_time:1572067ms step_avg:236.72ms +[2025-07-17 17:01:07] [Rank 0] step:6641/10000 train_time:1572067ms step_avg:236.72ms +[2025-07-17 17:01:11] [Rank 0] step:6661/10000 train_time:1576991ms step_avg:236.75ms +[2025-07-17 17:01:11] [Rank 0] step:6661/10000 train_time:1576991ms step_avg:236.75ms +[2025-07-17 17:01:16] [Rank 0] step:6681/10000 train_time:1581968ms step_avg:236.79ms +[2025-07-17 17:01:16] [Rank 0] step:6681/10000 train_time:1581968ms step_avg:236.79ms +[2025-07-17 17:01:21] [Rank 0] step:6701/10000 train_time:1586958ms step_avg:236.82ms +[2025-07-17 17:01:21] [Rank 0] step:6701/10000 train_time:1586958ms step_avg:236.82ms +[2025-07-17 17:01:26] [Rank 0] step:6721/10000 train_time:1591963ms step_avg:236.86ms +[2025-07-17 17:01:26] [Rank 0] step:6721/10000 train_time:1591963ms step_avg:236.86ms +[2025-07-17 17:01:31] [Rank 0] step:6741/10000 train_time:1596963ms step_avg:236.90ms +[2025-07-17 17:01:31] [Rank 0] step:6741/10000 train_time:1596963ms step_avg:236.90ms +[2025-07-17 17:01:38] [Rank 0] PRINT: step:6750/10000 val_loss:4.4638 train_time:1599517ms step_avg:236.97ms +[2025-07-17 17:01:38] [Rank 0] PRINT: step:6750/10000 val_loss:4.4638 train_time:1599517ms step_avg:236.97ms +[2025-07-17 17:01:41] [Rank 0] step:6761/10000 train_time:1601954ms step_avg:236.94ms +[2025-07-17 17:01:41] [Rank 0] step:6761/10000 train_time:1601954ms step_avg:236.94ms +[2025-07-17 17:01:46] [Rank 0] step:6781/10000 train_time:1606948ms step_avg:236.98ms +[2025-07-17 17:01:46] [Rank 0] step:6781/10000 train_time:1606948ms step_avg:236.98ms +[2025-07-17 17:01:51] [Rank 0] step:6801/10000 train_time:1611946ms step_avg:237.02ms +[2025-07-17 17:01:51] [Rank 0] step:6801/10000 train_time:1611946ms step_avg:237.02ms +[2025-07-17 17:01:56] [Rank 0] step:6821/10000 train_time:1616934ms step_avg:237.05ms +[2025-07-17 17:01:56] [Rank 0] step:6821/10000 train_time:1616934ms step_avg:237.05ms +[2025-07-17 17:02:01] [Rank 0] step:6841/10000 train_time:1621929ms step_avg:237.09ms +[2025-07-17 17:02:01] [Rank 0] step:6841/10000 train_time:1621929ms step_avg:237.09ms +[2025-07-17 17:02:06] [Rank 0] step:6861/10000 train_time:1626916ms step_avg:237.13ms +[2025-07-17 17:02:06] [Rank 0] step:6861/10000 train_time:1626916ms step_avg:237.13ms +[2025-07-17 17:02:14] [Rank 0] PRINT: step:6875/10000 val_loss:4.4539 train_time:1630710ms step_avg:237.19ms +[2025-07-17 17:02:14] [Rank 0] PRINT: step:6875/10000 val_loss:4.4539 train_time:1630710ms step_avg:237.19ms +[2025-07-17 17:02:16] [Rank 0] step:6881/10000 train_time:1631897ms step_avg:237.16ms +[2025-07-17 17:02:16] [Rank 0] step:6881/10000 train_time:1631897ms step_avg:237.16ms +[2025-07-17 17:02:21] [Rank 0] step:6901/10000 train_time:1636881ms step_avg:237.19ms +[2025-07-17 17:02:21] [Rank 0] step:6901/10000 train_time:1636881ms step_avg:237.19ms +[2025-07-17 17:02:26] [Rank 0] step:6921/10000 train_time:1641866ms step_avg:237.23ms +[2025-07-17 17:02:26] [Rank 0] step:6921/10000 train_time:1641866ms step_avg:237.23ms +[2025-07-17 17:02:31] [Rank 0] step:6941/10000 train_time:1646862ms step_avg:237.27ms +[2025-07-17 17:02:31] [Rank 0] step:6941/10000 train_time:1646862ms step_avg:237.27ms +[2025-07-17 17:02:36] [Rank 0] step:6961/10000 train_time:1651853ms step_avg:237.30ms +[2025-07-17 17:02:36] [Rank 0] step:6961/10000 train_time:1651853ms step_avg:237.30ms +[2025-07-17 17:02:41] [Rank 0] step:6981/10000 train_time:1656846ms step_avg:237.34ms +[2025-07-17 17:02:41] [Rank 0] step:6981/10000 train_time:1656846ms step_avg:237.34ms +[2025-07-17 17:02:50] [Rank 0] PRINT: step:7000/10000 val_loss:4.3967 train_time:1661896ms step_avg:237.41ms +[2025-07-17 17:02:50] [Rank 0] PRINT: step:7000/10000 val_loss:4.3967 train_time:1661896ms step_avg:237.41ms +[2025-07-17 17:02:50] [Rank 0] step:7001/10000 train_time:1661913ms step_avg:237.38ms +[2025-07-17 17:02:50] [Rank 0] step:7001/10000 train_time:1661913ms step_avg:237.38ms +[2025-07-17 17:02:55] [Rank 0] step:7021/10000 train_time:1666829ms step_avg:237.41ms +[2025-07-17 17:02:55] [Rank 0] step:7021/10000 train_time:1666829ms step_avg:237.41ms +[2025-07-17 17:03:00] [Rank 0] step:7041/10000 train_time:1671814ms step_avg:237.44ms +[2025-07-17 17:03:00] [Rank 0] step:7041/10000 train_time:1671814ms step_avg:237.44ms +[2025-07-17 17:03:05] [Rank 0] step:7061/10000 train_time:1676797ms step_avg:237.47ms +[2025-07-17 17:03:05] [Rank 0] step:7061/10000 train_time:1676797ms step_avg:237.47ms +[2025-07-17 17:03:10] [Rank 0] step:7081/10000 train_time:1681783ms step_avg:237.51ms +[2025-07-17 17:03:10] [Rank 0] step:7081/10000 train_time:1681783ms step_avg:237.51ms +[2025-07-17 17:03:15] [Rank 0] step:7101/10000 train_time:1686764ms step_avg:237.54ms +[2025-07-17 17:03:15] [Rank 0] step:7101/10000 train_time:1686764ms step_avg:237.54ms +[2025-07-17 17:03:20] [Rank 0] step:7121/10000 train_time:1691751ms step_avg:237.57ms +[2025-07-17 17:03:20] [Rank 0] step:7121/10000 train_time:1691751ms step_avg:237.57ms +[2025-07-17 17:03:26] [Rank 0] PRINT: step:7125/10000 val_loss:4.3464 train_time:1693057ms step_avg:237.62ms +[2025-07-17 17:03:26] [Rank 0] PRINT: step:7125/10000 val_loss:4.3464 train_time:1693057ms step_avg:237.62ms +[2025-07-17 17:03:30] [Rank 0] step:7141/10000 train_time:1696738ms step_avg:237.61ms +[2025-07-17 17:03:30] [Rank 0] step:7141/10000 train_time:1696738ms step_avg:237.61ms +[2025-07-17 17:03:35] [Rank 0] step:7161/10000 train_time:1701725ms step_avg:237.64ms +[2025-07-17 17:03:35] [Rank 0] step:7161/10000 train_time:1701725ms step_avg:237.64ms +[2025-07-17 17:03:40] [Rank 0] step:7181/10000 train_time:1706703ms step_avg:237.67ms +[2025-07-17 17:03:40] [Rank 0] step:7181/10000 train_time:1706703ms step_avg:237.67ms +[2025-07-17 17:03:45] [Rank 0] step:7201/10000 train_time:1711705ms step_avg:237.70ms +[2025-07-17 17:03:45] [Rank 0] step:7201/10000 train_time:1711705ms step_avg:237.70ms +[2025-07-17 17:03:50] [Rank 0] step:7221/10000 train_time:1716692ms step_avg:237.74ms +[2025-07-17 17:03:50] [Rank 0] step:7221/10000 train_time:1716692ms step_avg:237.74ms +[2025-07-17 17:03:55] [Rank 0] step:7241/10000 train_time:1721681ms step_avg:237.77ms +[2025-07-17 17:03:55] [Rank 0] step:7241/10000 train_time:1721681ms step_avg:237.77ms +[2025-07-17 17:04:02] [Rank 0] PRINT: step:7250/10000 val_loss:4.4190 train_time:1724237ms step_avg:237.83ms +[2025-07-17 17:04:02] [Rank 0] PRINT: step:7250/10000 val_loss:4.4190 train_time:1724237ms step_avg:237.83ms +[2025-07-17 17:04:04] [Rank 0] step:7261/10000 train_time:1726665ms step_avg:237.80ms +[2025-07-17 17:04:04] [Rank 0] step:7261/10000 train_time:1726665ms step_avg:237.80ms +[2025-07-17 17:04:09] [Rank 0] step:7281/10000 train_time:1731646ms step_avg:237.83ms +[2025-07-17 17:04:09] [Rank 0] step:7281/10000 train_time:1731646ms step_avg:237.83ms +[2025-07-17 17:04:14] [Rank 0] step:7301/10000 train_time:1736635ms step_avg:237.86ms +[2025-07-17 17:04:14] [Rank 0] step:7301/10000 train_time:1736635ms step_avg:237.86ms +[2025-07-17 17:04:19] [Rank 0] step:7321/10000 train_time:1741634ms step_avg:237.90ms +[2025-07-17 17:04:19] [Rank 0] step:7321/10000 train_time:1741634ms step_avg:237.90ms +[2025-07-17 17:04:24] [Rank 0] step:7341/10000 train_time:1746616ms step_avg:237.93ms +[2025-07-17 17:04:24] [Rank 0] step:7341/10000 train_time:1746616ms step_avg:237.93ms +[2025-07-17 17:04:29] [Rank 0] step:7361/10000 train_time:1751611ms step_avg:237.96ms +[2025-07-17 17:04:29] [Rank 0] step:7361/10000 train_time:1751611ms step_avg:237.96ms +[2025-07-17 17:04:38] [Rank 0] PRINT: step:7375/10000 val_loss:4.3562 train_time:1755413ms step_avg:238.02ms +[2025-07-17 17:04:38] [Rank 0] PRINT: step:7375/10000 val_loss:4.3562 train_time:1755413ms step_avg:238.02ms +[2025-07-17 17:04:39] [Rank 0] step:7381/10000 train_time:1756597ms step_avg:237.99ms +[2025-07-17 17:04:39] [Rank 0] step:7381/10000 train_time:1756597ms step_avg:237.99ms +[2025-07-17 17:04:44] [Rank 0] step:7401/10000 train_time:1761589ms step_avg:238.02ms +[2025-07-17 17:04:44] [Rank 0] step:7401/10000 train_time:1761589ms step_avg:238.02ms +[2025-07-17 17:04:49] [Rank 0] step:7421/10000 train_time:1766577ms step_avg:238.05ms +[2025-07-17 17:04:49] [Rank 0] step:7421/10000 train_time:1766577ms step_avg:238.05ms +[2025-07-17 17:04:54] [Rank 0] step:7441/10000 train_time:1771582ms step_avg:238.08ms +[2025-07-17 17:04:54] [Rank 0] step:7441/10000 train_time:1771582ms step_avg:238.08ms +[2025-07-17 17:04:59] [Rank 0] step:7461/10000 train_time:1776573ms step_avg:238.11ms +[2025-07-17 17:04:59] [Rank 0] step:7461/10000 train_time:1776573ms step_avg:238.11ms +[2025-07-17 17:05:04] [Rank 0] step:7481/10000 train_time:1781579ms step_avg:238.15ms +[2025-07-17 17:05:04] [Rank 0] step:7481/10000 train_time:1781579ms step_avg:238.15ms +[2025-07-17 17:05:14] [Rank 0] PRINT: step:7500/10000 val_loss:4.4073 train_time:1786651ms step_avg:238.22ms +[2025-07-17 17:05:14] [Rank 0] PRINT: step:7500/10000 val_loss:4.4073 train_time:1786651ms step_avg:238.22ms +[2025-07-17 17:05:14] [Rank 0] step:7501/10000 train_time:1786667ms step_avg:238.19ms +[2025-07-17 17:05:14] [Rank 0] step:7501/10000 train_time:1786667ms step_avg:238.19ms +[2025-07-17 17:05:19] [Rank 0] step:7521/10000 train_time:1791595ms step_avg:238.21ms +[2025-07-17 17:05:19] [Rank 0] step:7521/10000 train_time:1791595ms step_avg:238.21ms +[2025-07-17 17:05:24] [Rank 0] step:7541/10000 train_time:1796590ms step_avg:238.24ms +[2025-07-17 17:05:24] [Rank 0] step:7541/10000 train_time:1796590ms step_avg:238.24ms +[2025-07-17 17:05:29] [Rank 0] step:7561/10000 train_time:1801583ms step_avg:238.27ms +[2025-07-17 17:05:29] [Rank 0] step:7561/10000 train_time:1801583ms step_avg:238.27ms +[2025-07-17 17:05:34] [Rank 0] step:7581/10000 train_time:1806588ms step_avg:238.30ms +[2025-07-17 17:05:34] [Rank 0] step:7581/10000 train_time:1806588ms step_avg:238.30ms +[2025-07-17 17:05:39] [Rank 0] step:7601/10000 train_time:1811598ms step_avg:238.34ms +[2025-07-17 17:05:39] [Rank 0] step:7601/10000 train_time:1811598ms step_avg:238.34ms +[2025-07-17 17:05:44] [Rank 0] step:7621/10000 train_time:1816619ms step_avg:238.37ms +[2025-07-17 17:05:44] [Rank 0] step:7621/10000 train_time:1816619ms step_avg:238.37ms +[2025-07-17 17:05:50] [Rank 0] PRINT: step:7625/10000 val_loss:4.4114 train_time:1817932ms step_avg:238.42ms +[2025-07-17 17:05:50] [Rank 0] PRINT: step:7625/10000 val_loss:4.4114 train_time:1817932ms step_avg:238.42ms +[2025-07-17 17:05:54] [Rank 0] step:7641/10000 train_time:1821619ms step_avg:238.40ms +[2025-07-17 17:05:54] [Rank 0] step:7641/10000 train_time:1821619ms step_avg:238.40ms +[2025-07-17 17:05:59] [Rank 0] step:7661/10000 train_time:1826629ms step_avg:238.43ms +[2025-07-17 17:05:59] [Rank 0] step:7661/10000 train_time:1826629ms step_avg:238.43ms +[2025-07-17 17:06:04] [Rank 0] step:7681/10000 train_time:1831653ms step_avg:238.47ms +[2025-07-17 17:06:04] [Rank 0] step:7681/10000 train_time:1831653ms step_avg:238.47ms +[2025-07-17 17:06:09] [Rank 0] step:7701/10000 train_time:1836657ms step_avg:238.50ms +[2025-07-17 17:06:09] [Rank 0] step:7701/10000 train_time:1836657ms step_avg:238.50ms +[2025-07-17 17:06:14] [Rank 0] step:7721/10000 train_time:1841670ms step_avg:238.53ms +[2025-07-17 17:06:14] [Rank 0] step:7721/10000 train_time:1841670ms step_avg:238.53ms +[2025-07-17 17:06:19] [Rank 0] step:7741/10000 train_time:1846673ms step_avg:238.56ms +[2025-07-17 17:06:19] [Rank 0] step:7741/10000 train_time:1846673ms step_avg:238.56ms +[2025-07-17 17:06:26] [Rank 0] PRINT: step:7750/10000 val_loss:4.3932 train_time:1849250ms step_avg:238.61ms +[2025-07-17 17:06:26] [Rank 0] PRINT: step:7750/10000 val_loss:4.3932 train_time:1849250ms step_avg:238.61ms +[2025-07-17 17:06:28] [Rank 0] step:7761/10000 train_time:1851686ms step_avg:238.59ms +[2025-07-17 17:06:28] [Rank 0] step:7761/10000 train_time:1851686ms step_avg:238.59ms +[2025-07-17 17:06:33] [Rank 0] step:7781/10000 train_time:1856699ms step_avg:238.62ms +[2025-07-17 17:06:33] [Rank 0] step:7781/10000 train_time:1856699ms step_avg:238.62ms +[2025-07-17 17:06:38] [Rank 0] step:7801/10000 train_time:1861708ms step_avg:238.65ms +[2025-07-17 17:06:38] [Rank 0] step:7801/10000 train_time:1861708ms step_avg:238.65ms +[2025-07-17 17:06:44] [Rank 0] step:7821/10000 train_time:1866714ms step_avg:238.68ms +[2025-07-17 17:06:44] [Rank 0] step:7821/10000 train_time:1866714ms step_avg:238.68ms +[2025-07-17 17:06:49] [Rank 0] step:7841/10000 train_time:1871721ms step_avg:238.71ms +[2025-07-17 17:06:49] [Rank 0] step:7841/10000 train_time:1871721ms step_avg:238.71ms +[2025-07-17 17:06:53] [Rank 0] step:7861/10000 train_time:1876713ms step_avg:238.74ms +[2025-07-17 17:06:53] [Rank 0] step:7861/10000 train_time:1876713ms step_avg:238.74ms +[2025-07-17 17:07:02] [Rank 0] PRINT: step:7875/10000 val_loss:4.4758 train_time:1880519ms step_avg:238.80ms +[2025-07-17 17:07:02] [Rank 0] PRINT: step:7875/10000 val_loss:4.4758 train_time:1880519ms step_avg:238.80ms +[2025-07-17 17:07:03] [Rank 0] step:7881/10000 train_time:1881706ms step_avg:238.76ms +[2025-07-17 17:07:03] [Rank 0] step:7881/10000 train_time:1881706ms step_avg:238.76ms +[2025-07-17 17:07:08] [Rank 0] step:7901/10000 train_time:1886707ms step_avg:238.79ms +[2025-07-17 17:07:08] [Rank 0] step:7901/10000 train_time:1886707ms step_avg:238.79ms +[2025-07-17 17:07:13] [Rank 0] step:7921/10000 train_time:1891705ms step_avg:238.82ms +[2025-07-17 17:07:13] [Rank 0] step:7921/10000 train_time:1891705ms step_avg:238.82ms +[2025-07-17 17:07:18] [Rank 0] step:7941/10000 train_time:1896708ms step_avg:238.85ms +[2025-07-17 17:07:18] [Rank 0] step:7941/10000 train_time:1896708ms step_avg:238.85ms +[2025-07-17 17:07:23] [Rank 0] step:7961/10000 train_time:1901721ms step_avg:238.88ms +[2025-07-17 17:07:23] [Rank 0] step:7961/10000 train_time:1901721ms step_avg:238.88ms +[2025-07-17 17:07:28] [Rank 0] step:7981/10000 train_time:1906717ms step_avg:238.91ms +[2025-07-17 17:07:28] [Rank 0] step:7981/10000 train_time:1906717ms step_avg:238.91ms +[2025-07-17 17:07:38] [Rank 0] PRINT: step:8000/10000 val_loss:4.5152 train_time:1911788ms step_avg:238.97ms +[2025-07-17 17:07:38] [Rank 0] PRINT: step:8000/10000 val_loss:4.5152 train_time:1911788ms step_avg:238.97ms +[2025-07-17 17:07:38] [Rank 0] step:8001/10000 train_time:1911804ms step_avg:238.95ms +[2025-07-17 17:07:38] [Rank 0] step:8001/10000 train_time:1911804ms step_avg:238.95ms +[2025-07-17 17:07:43] [Rank 0] step:8021/10000 train_time:1916722ms step_avg:238.96ms +[2025-07-17 17:07:43] [Rank 0] step:8021/10000 train_time:1916722ms step_avg:238.96ms +[2025-07-17 17:07:48] [Rank 0] step:8041/10000 train_time:1921743ms step_avg:238.99ms +[2025-07-17 17:07:48] [Rank 0] step:8041/10000 train_time:1921743ms step_avg:238.99ms +[2025-07-17 17:07:53] [Rank 0] step:8061/10000 train_time:1926738ms step_avg:239.02ms +[2025-07-17 17:07:53] [Rank 0] step:8061/10000 train_time:1926738ms step_avg:239.02ms +[2025-07-17 17:07:58] [Rank 0] step:8081/10000 train_time:1931744ms step_avg:239.05ms +[2025-07-17 17:07:58] [Rank 0] step:8081/10000 train_time:1931744ms step_avg:239.05ms +[2025-07-17 17:08:03] [Rank 0] step:8101/10000 train_time:1936742ms step_avg:239.07ms +[2025-07-17 17:08:03] [Rank 0] step:8101/10000 train_time:1936742ms step_avg:239.07ms +[2025-07-17 17:08:08] [Rank 0] step:8121/10000 train_time:1941736ms step_avg:239.10ms +[2025-07-17 17:08:08] [Rank 0] step:8121/10000 train_time:1941736ms step_avg:239.10ms +[2025-07-17 17:08:14] [Rank 0] PRINT: step:8125/10000 val_loss:4.4586 train_time:1943047ms step_avg:239.14ms +[2025-07-17 17:08:14] [Rank 0] PRINT: step:8125/10000 val_loss:4.4586 train_time:1943047ms step_avg:239.14ms +[2025-07-17 17:08:18] [Rank 0] step:8141/10000 train_time:1946740ms step_avg:239.13ms +[2025-07-17 17:08:18] [Rank 0] step:8141/10000 train_time:1946740ms step_avg:239.13ms +[2025-07-17 17:08:23] [Rank 0] step:8161/10000 train_time:1951773ms step_avg:239.16ms +[2025-07-17 17:08:23] [Rank 0] step:8161/10000 train_time:1951773ms step_avg:239.16ms +[2025-07-17 17:08:28] [Rank 0] step:8181/10000 train_time:1956840ms step_avg:239.19ms +[2025-07-17 17:08:28] [Rank 0] step:8181/10000 train_time:1956840ms step_avg:239.19ms +[2025-07-17 17:08:33] [Rank 0] step:8201/10000 train_time:1961887ms step_avg:239.23ms +[2025-07-17 17:08:33] [Rank 0] step:8201/10000 train_time:1961887ms step_avg:239.23ms +[2025-07-17 17:08:38] [Rank 0] step:8221/10000 train_time:1966950ms step_avg:239.26ms +[2025-07-17 17:08:38] [Rank 0] step:8221/10000 train_time:1966950ms step_avg:239.26ms +[2025-07-17 17:08:43] [Rank 0] step:8241/10000 train_time:1972007ms step_avg:239.29ms +[2025-07-17 17:08:43] [Rank 0] step:8241/10000 train_time:1972007ms step_avg:239.29ms +[2025-07-17 17:08:50] [Rank 0] PRINT: step:8250/10000 val_loss:4.4388 train_time:1974602ms step_avg:239.35ms +[2025-07-17 17:08:50] [Rank 0] PRINT: step:8250/10000 val_loss:4.4388 train_time:1974602ms step_avg:239.35ms +[2025-07-17 17:08:53] [Rank 0] step:8261/10000 train_time:1977070ms step_avg:239.33ms +[2025-07-17 17:08:53] [Rank 0] step:8261/10000 train_time:1977070ms step_avg:239.33ms +[2025-07-17 17:08:58] [Rank 0] step:8281/10000 train_time:1982149ms step_avg:239.36ms +[2025-07-17 17:08:58] [Rank 0] step:8281/10000 train_time:1982149ms step_avg:239.36ms +[2025-07-17 17:09:03] [Rank 0] step:8301/10000 train_time:1987203ms step_avg:239.39ms +[2025-07-17 17:09:03] [Rank 0] step:8301/10000 train_time:1987203ms step_avg:239.39ms +[2025-07-17 17:09:08] [Rank 0] step:8321/10000 train_time:1992266ms step_avg:239.43ms +[2025-07-17 17:09:08] [Rank 0] step:8321/10000 train_time:1992266ms step_avg:239.43ms +[2025-07-17 17:09:13] [Rank 0] step:8341/10000 train_time:1997337ms step_avg:239.46ms +[2025-07-17 17:09:13] [Rank 0] step:8341/10000 train_time:1997337ms step_avg:239.46ms +[2025-07-17 17:09:18] [Rank 0] step:8361/10000 train_time:2002389ms step_avg:239.49ms +[2025-07-17 17:09:18] [Rank 0] step:8361/10000 train_time:2002389ms step_avg:239.49ms +[2025-07-17 17:09:27] [Rank 0] PRINT: step:8375/10000 val_loss:4.4420 train_time:2006249ms step_avg:239.55ms +[2025-07-17 17:09:27] [Rank 0] PRINT: step:8375/10000 val_loss:4.4420 train_time:2006249ms step_avg:239.55ms +[2025-07-17 17:09:28] [Rank 0] step:8381/10000 train_time:2007443ms step_avg:239.52ms +[2025-07-17 17:09:28] [Rank 0] step:8381/10000 train_time:2007443ms step_avg:239.52ms +[2025-07-17 17:09:33] [Rank 0] step:8401/10000 train_time:2012484ms step_avg:239.55ms +[2025-07-17 17:09:33] [Rank 0] step:8401/10000 train_time:2012484ms step_avg:239.55ms +[2025-07-17 17:09:38] [Rank 0] step:8421/10000 train_time:2017553ms step_avg:239.59ms +[2025-07-17 17:09:38] [Rank 0] step:8421/10000 train_time:2017553ms step_avg:239.59ms +[2025-07-17 17:09:43] [Rank 0] step:8441/10000 train_time:2022617ms step_avg:239.62ms +[2025-07-17 17:09:43] [Rank 0] step:8441/10000 train_time:2022617ms step_avg:239.62ms +[2025-07-17 17:09:48] [Rank 0] step:8461/10000 train_time:2027696ms step_avg:239.65ms +[2025-07-17 17:09:48] [Rank 0] step:8461/10000 train_time:2027696ms step_avg:239.65ms +[2025-07-17 17:09:53] [Rank 0] step:8481/10000 train_time:2032752ms step_avg:239.68ms +[2025-07-17 17:09:53] [Rank 0] step:8481/10000 train_time:2032752ms step_avg:239.68ms +[2025-07-17 17:10:03] [Rank 0] PRINT: step:8500/10000 val_loss:4.4648 train_time:2037876ms step_avg:239.75ms +[2025-07-17 17:10:03] [Rank 0] PRINT: step:8500/10000 val_loss:4.4648 train_time:2037876ms step_avg:239.75ms +[2025-07-17 17:10:03] [Rank 0] step:8501/10000 train_time:2037893ms step_avg:239.72ms +[2025-07-17 17:10:03] [Rank 0] step:8501/10000 train_time:2037893ms step_avg:239.72ms +[2025-07-17 17:10:08] [Rank 0] step:8521/10000 train_time:2042877ms step_avg:239.75ms +[2025-07-17 17:10:08] [Rank 0] step:8521/10000 train_time:2042877ms step_avg:239.75ms +[2025-07-17 17:10:13] [Rank 0] step:8541/10000 train_time:2047961ms step_avg:239.78ms +[2025-07-17 17:10:13] [Rank 0] step:8541/10000 train_time:2047961ms step_avg:239.78ms +[2025-07-17 17:10:18] [Rank 0] step:8561/10000 train_time:2053018ms step_avg:239.81ms +[2025-07-17 17:10:18] [Rank 0] step:8561/10000 train_time:2053018ms step_avg:239.81ms +[2025-07-17 17:10:23] [Rank 0] step:8581/10000 train_time:2058083ms step_avg:239.84ms +[2025-07-17 17:10:23] [Rank 0] step:8581/10000 train_time:2058083ms step_avg:239.84ms +[2025-07-17 17:10:28] [Rank 0] step:8601/10000 train_time:2063134ms step_avg:239.87ms +[2025-07-17 17:10:28] [Rank 0] step:8601/10000 train_time:2063134ms step_avg:239.87ms +[2025-07-17 17:10:33] [Rank 0] step:8621/10000 train_time:2068191ms step_avg:239.90ms +[2025-07-17 17:10:33] [Rank 0] step:8621/10000 train_time:2068191ms step_avg:239.90ms +[2025-07-17 17:10:39] [Rank 0] PRINT: step:8625/10000 val_loss:4.4640 train_time:2069519ms step_avg:239.94ms +[2025-07-17 17:10:39] [Rank 0] PRINT: step:8625/10000 val_loss:4.4640 train_time:2069519ms step_avg:239.94ms +[2025-07-17 17:10:43] [Rank 0] step:8641/10000 train_time:2073268ms step_avg:239.93ms +[2025-07-17 17:10:43] [Rank 0] step:8641/10000 train_time:2073268ms step_avg:239.93ms +[2025-07-17 17:10:48] [Rank 0] step:8661/10000 train_time:2078328ms step_avg:239.96ms +[2025-07-17 17:10:48] [Rank 0] step:8661/10000 train_time:2078328ms step_avg:239.96ms +[2025-07-17 17:10:53] [Rank 0] step:8681/10000 train_time:2083386ms step_avg:239.99ms +[2025-07-17 17:10:53] [Rank 0] step:8681/10000 train_time:2083386ms step_avg:239.99ms +[2025-07-17 17:10:58] [Rank 0] step:8701/10000 train_time:2088453ms step_avg:240.02ms +[2025-07-17 17:10:58] [Rank 0] step:8701/10000 train_time:2088453ms step_avg:240.02ms +[2025-07-17 17:11:03] [Rank 0] step:8721/10000 train_time:2093519ms step_avg:240.05ms +[2025-07-17 17:11:03] [Rank 0] step:8721/10000 train_time:2093519ms step_avg:240.05ms +[2025-07-17 17:11:08] [Rank 0] step:8741/10000 train_time:2098584ms step_avg:240.09ms +[2025-07-17 17:11:08] [Rank 0] step:8741/10000 train_time:2098584ms step_avg:240.09ms +[2025-07-17 17:11:15] [Rank 0] PRINT: step:8750/10000 val_loss:4.4659 train_time:2101174ms step_avg:240.13ms +[2025-07-17 17:11:15] [Rank 0] PRINT: step:8750/10000 val_loss:4.4659 train_time:2101174ms step_avg:240.13ms +[2025-07-17 17:11:18] [Rank 0] step:8761/10000 train_time:2103647ms step_avg:240.11ms +[2025-07-17 17:11:18] [Rank 0] step:8761/10000 train_time:2103647ms step_avg:240.11ms +[2025-07-17 17:11:23] [Rank 0] step:8781/10000 train_time:2108709ms step_avg:240.14ms +[2025-07-17 17:11:23] [Rank 0] step:8781/10000 train_time:2108709ms step_avg:240.14ms +[2025-07-17 17:11:28] [Rank 0] step:8801/10000 train_time:2113777ms step_avg:240.17ms +[2025-07-17 17:11:28] [Rank 0] step:8801/10000 train_time:2113777ms step_avg:240.17ms +[2025-07-17 17:11:33] [Rank 0] step:8821/10000 train_time:2118838ms step_avg:240.20ms +[2025-07-17 17:11:33] [Rank 0] step:8821/10000 train_time:2118838ms step_avg:240.20ms +[2025-07-17 17:11:38] [Rank 0] step:8841/10000 train_time:2123920ms step_avg:240.24ms +[2025-07-17 17:11:38] [Rank 0] step:8841/10000 train_time:2123920ms step_avg:240.24ms +[2025-07-17 17:11:43] [Rank 0] step:8861/10000 train_time:2128988ms step_avg:240.26ms +[2025-07-17 17:11:43] [Rank 0] step:8861/10000 train_time:2128988ms step_avg:240.26ms +[2025-07-17 17:11:51] [Rank 0] PRINT: step:8875/10000 val_loss:4.5346 train_time:2132847ms step_avg:240.32ms +[2025-07-17 17:11:51] [Rank 0] PRINT: step:8875/10000 val_loss:4.5346 train_time:2132847ms step_avg:240.32ms +[2025-07-17 17:11:53] [Rank 0] step:8881/10000 train_time:2134048ms step_avg:240.29ms +[2025-07-17 17:11:53] [Rank 0] step:8881/10000 train_time:2134048ms step_avg:240.29ms +[2025-07-17 17:11:58] [Rank 0] step:8901/10000 train_time:2139110ms step_avg:240.32ms +[2025-07-17 17:11:58] [Rank 0] step:8901/10000 train_time:2139110ms step_avg:240.32ms +[2025-07-17 17:12:03] [Rank 0] step:8921/10000 train_time:2144168ms step_avg:240.35ms +[2025-07-17 17:12:03] [Rank 0] step:8921/10000 train_time:2144168ms step_avg:240.35ms +[2025-07-17 17:12:08] [Rank 0] step:8941/10000 train_time:2149229ms step_avg:240.38ms +[2025-07-17 17:12:08] [Rank 0] step:8941/10000 train_time:2149229ms step_avg:240.38ms +[2025-07-17 17:12:13] [Rank 0] step:8961/10000 train_time:2154294ms step_avg:240.41ms +[2025-07-17 17:12:13] [Rank 0] step:8961/10000 train_time:2154294ms step_avg:240.41ms +[2025-07-17 17:12:18] [Rank 0] step:8981/10000 train_time:2159361ms step_avg:240.44ms +[2025-07-17 17:12:18] [Rank 0] step:8981/10000 train_time:2159361ms step_avg:240.44ms +[2025-07-17 17:12:28] [Rank 0] PRINT: step:9000/10000 val_loss:4.5024 train_time:2164482ms step_avg:240.50ms +[2025-07-17 17:12:28] [Rank 0] PRINT: step:9000/10000 val_loss:4.5024 train_time:2164482ms step_avg:240.50ms +[2025-07-17 17:12:28] [Rank 0] step:9001/10000 train_time:2164499ms step_avg:240.47ms +[2025-07-17 17:12:28] [Rank 0] step:9001/10000 train_time:2164499ms step_avg:240.47ms +[2025-07-17 17:12:33] [Rank 0] step:9021/10000 train_time:2169480ms step_avg:240.49ms +[2025-07-17 17:12:33] [Rank 0] step:9021/10000 train_time:2169480ms step_avg:240.49ms +[2025-07-17 17:12:38] [Rank 0] step:9041/10000 train_time:2174563ms step_avg:240.52ms +[2025-07-17 17:12:38] [Rank 0] step:9041/10000 train_time:2174563ms step_avg:240.52ms +[2025-07-17 17:12:43] [Rank 0] step:9061/10000 train_time:2179628ms step_avg:240.55ms +[2025-07-17 17:12:43] [Rank 0] step:9061/10000 train_time:2179628ms step_avg:240.55ms +[2025-07-17 17:12:48] [Rank 0] step:9081/10000 train_time:2184722ms step_avg:240.58ms +[2025-07-17 17:12:48] [Rank 0] step:9081/10000 train_time:2184722ms step_avg:240.58ms +[2025-07-17 17:12:54] [Rank 0] step:9101/10000 train_time:2189803ms step_avg:240.61ms +[2025-07-17 17:12:54] [Rank 0] step:9101/10000 train_time:2189803ms step_avg:240.61ms +[2025-07-17 17:12:59] [Rank 0] step:9121/10000 train_time:2194886ms step_avg:240.64ms +[2025-07-17 17:12:59] [Rank 0] step:9121/10000 train_time:2194886ms step_avg:240.64ms +[2025-07-17 17:13:04] [Rank 0] PRINT: step:9125/10000 val_loss:4.5031 train_time:2196214ms step_avg:240.68ms +[2025-07-17 17:13:04] [Rank 0] PRINT: step:9125/10000 val_loss:4.5031 train_time:2196214ms step_avg:240.68ms +[2025-07-17 17:13:08] [Rank 0] step:9141/10000 train_time:2199947ms step_avg:240.67ms +[2025-07-17 17:13:08] [Rank 0] step:9141/10000 train_time:2199947ms step_avg:240.67ms +[2025-07-17 17:13:14] [Rank 0] step:9161/10000 train_time:2205047ms step_avg:240.70ms +[2025-07-17 17:13:14] [Rank 0] step:9161/10000 train_time:2205047ms step_avg:240.70ms +[2025-07-17 17:13:19] [Rank 0] step:9181/10000 train_time:2210114ms step_avg:240.73ms +[2025-07-17 17:13:19] [Rank 0] step:9181/10000 train_time:2210114ms step_avg:240.73ms +[2025-07-17 17:13:24] [Rank 0] step:9201/10000 train_time:2215187ms step_avg:240.76ms +[2025-07-17 17:13:24] [Rank 0] step:9201/10000 train_time:2215187ms step_avg:240.76ms +[2025-07-17 17:13:29] [Rank 0] step:9221/10000 train_time:2220287ms step_avg:240.79ms +[2025-07-17 17:13:29] [Rank 0] step:9221/10000 train_time:2220287ms step_avg:240.79ms +[2025-07-17 17:13:34] [Rank 0] step:9241/10000 train_time:2225368ms step_avg:240.81ms +[2025-07-17 17:13:34] [Rank 0] step:9241/10000 train_time:2225368ms step_avg:240.81ms +[2025-07-17 17:13:41] [Rank 0] PRINT: step:9250/10000 val_loss:4.5354 train_time:2227966ms step_avg:240.86ms +[2025-07-17 17:13:41] [Rank 0] PRINT: step:9250/10000 val_loss:4.5354 train_time:2227966ms step_avg:240.86ms +[2025-07-17 17:13:44] [Rank 0] step:9261/10000 train_time:2230443ms step_avg:240.84ms +[2025-07-17 17:13:44] [Rank 0] step:9261/10000 train_time:2230443ms step_avg:240.84ms +[2025-07-17 17:13:49] [Rank 0] step:9281/10000 train_time:2235491ms step_avg:240.87ms +[2025-07-17 17:13:49] [Rank 0] step:9281/10000 train_time:2235491ms step_avg:240.87ms +[2025-07-17 17:13:54] [Rank 0] step:9301/10000 train_time:2240563ms step_avg:240.89ms +[2025-07-17 17:13:54] [Rank 0] step:9301/10000 train_time:2240563ms step_avg:240.89ms +[2025-07-17 17:13:59] [Rank 0] step:9321/10000 train_time:2245656ms step_avg:240.92ms +[2025-07-17 17:13:59] [Rank 0] step:9321/10000 train_time:2245656ms step_avg:240.92ms +[2025-07-17 17:14:04] [Rank 0] step:9341/10000 train_time:2250723ms step_avg:240.95ms +[2025-07-17 17:14:04] [Rank 0] step:9341/10000 train_time:2250723ms step_avg:240.95ms +[2025-07-17 17:14:09] [Rank 0] step:9361/10000 train_time:2255788ms step_avg:240.98ms +[2025-07-17 17:14:09] [Rank 0] step:9361/10000 train_time:2255788ms step_avg:240.98ms +[2025-07-17 17:14:17] [Rank 0] PRINT: step:9375/10000 val_loss:4.5319 train_time:2259651ms step_avg:241.03ms +[2025-07-17 17:14:17] [Rank 0] PRINT: step:9375/10000 val_loss:4.5319 train_time:2259651ms step_avg:241.03ms +[2025-07-17 17:14:19] [Rank 0] step:9381/10000 train_time:2260852ms step_avg:241.00ms +[2025-07-17 17:14:19] [Rank 0] step:9381/10000 train_time:2260852ms step_avg:241.00ms +[2025-07-17 17:14:24] [Rank 0] step:9401/10000 train_time:2265905ms step_avg:241.03ms +[2025-07-17 17:14:24] [Rank 0] step:9401/10000 train_time:2265905ms step_avg:241.03ms +[2025-07-17 17:14:29] [Rank 0] step:9421/10000 train_time:2270970ms step_avg:241.05ms +[2025-07-17 17:14:29] [Rank 0] step:9421/10000 train_time:2270970ms step_avg:241.05ms +[2025-07-17 17:14:34] [Rank 0] step:9441/10000 train_time:2276045ms step_avg:241.08ms +[2025-07-17 17:14:34] [Rank 0] step:9441/10000 train_time:2276045ms step_avg:241.08ms +[2025-07-17 17:14:39] [Rank 0] step:9461/10000 train_time:2281123ms step_avg:241.11ms +[2025-07-17 17:14:39] [Rank 0] step:9461/10000 train_time:2281123ms step_avg:241.11ms +[2025-07-17 17:14:44] [Rank 0] step:9481/10000 train_time:2286205ms step_avg:241.14ms +[2025-07-17 17:14:44] [Rank 0] step:9481/10000 train_time:2286205ms step_avg:241.14ms +[2025-07-17 17:14:54] [Rank 0] PRINT: step:9500/10000 val_loss:4.5163 train_time:2291367ms step_avg:241.20ms +[2025-07-17 17:14:54] [Rank 0] PRINT: step:9500/10000 val_loss:4.5163 train_time:2291367ms step_avg:241.20ms +[2025-07-17 17:14:54] [Rank 0] step:9501/10000 train_time:2291383ms step_avg:241.17ms +[2025-07-17 17:14:54] [Rank 0] step:9501/10000 train_time:2291383ms step_avg:241.17ms +[2025-07-17 17:14:59] [Rank 0] step:9521/10000 train_time:2296373ms step_avg:241.19ms +[2025-07-17 17:14:59] [Rank 0] step:9521/10000 train_time:2296373ms step_avg:241.19ms +[2025-07-17 17:15:04] [Rank 0] step:9541/10000 train_time:2301467ms step_avg:241.22ms +[2025-07-17 17:15:04] [Rank 0] step:9541/10000 train_time:2301467ms step_avg:241.22ms +[2025-07-17 17:15:09] [Rank 0] step:9561/10000 train_time:2306526ms step_avg:241.24ms +[2025-07-17 17:15:09] [Rank 0] step:9561/10000 train_time:2306526ms step_avg:241.24ms +[2025-07-17 17:15:14] [Rank 0] step:9581/10000 train_time:2311596ms step_avg:241.27ms +[2025-07-17 17:15:14] [Rank 0] step:9581/10000 train_time:2311596ms step_avg:241.27ms +[2025-07-17 17:15:19] [Rank 0] step:9601/10000 train_time:2316658ms step_avg:241.29ms +[2025-07-17 17:15:19] [Rank 0] step:9601/10000 train_time:2316658ms step_avg:241.29ms +[2025-07-17 17:15:25] [Rank 0] step:9621/10000 train_time:2321754ms step_avg:241.32ms +[2025-07-17 17:15:25] [Rank 0] step:9621/10000 train_time:2321754ms step_avg:241.32ms +[2025-07-17 17:15:30] [Rank 0] PRINT: step:9625/10000 val_loss:4.5201 train_time:2323079ms step_avg:241.36ms +[2025-07-17 17:15:30] [Rank 0] PRINT: step:9625/10000 val_loss:4.5201 train_time:2323079ms step_avg:241.36ms +[2025-07-17 17:15:34] [Rank 0] step:9641/10000 train_time:2326849ms step_avg:241.35ms +[2025-07-17 17:15:34] [Rank 0] step:9641/10000 train_time:2326849ms step_avg:241.35ms +[2025-07-17 17:15:40] [Rank 0] step:9661/10000 train_time:2331986ms step_avg:241.38ms +[2025-07-17 17:15:40] [Rank 0] step:9661/10000 train_time:2331986ms step_avg:241.38ms +[2025-07-17 17:15:45] [Rank 0] step:9681/10000 train_time:2337121ms step_avg:241.41ms +[2025-07-17 17:15:45] [Rank 0] step:9681/10000 train_time:2337121ms step_avg:241.41ms +[2025-07-17 17:15:50] [Rank 0] step:9701/10000 train_time:2342261ms step_avg:241.45ms +[2025-07-17 17:15:50] [Rank 0] step:9701/10000 train_time:2342261ms step_avg:241.45ms +[2025-07-17 17:15:55] [Rank 0] step:9721/10000 train_time:2347386ms step_avg:241.48ms +[2025-07-17 17:15:55] [Rank 0] step:9721/10000 train_time:2347386ms step_avg:241.48ms +[2025-07-17 17:16:00] [Rank 0] step:9741/10000 train_time:2352526ms step_avg:241.51ms +[2025-07-17 17:16:00] [Rank 0] step:9741/10000 train_time:2352526ms step_avg:241.51ms +[2025-07-17 17:16:07] [Rank 0] PRINT: step:9750/10000 val_loss:4.6150 train_time:2355146ms step_avg:241.55ms +[2025-07-17 17:16:07] [Rank 0] PRINT: step:9750/10000 val_loss:4.6150 train_time:2355146ms step_avg:241.55ms +[2025-07-17 17:16:10] [Rank 0] step:9761/10000 train_time:2357643ms step_avg:241.54ms +[2025-07-17 17:16:10] [Rank 0] step:9761/10000 train_time:2357643ms step_avg:241.54ms +[2025-07-17 17:16:15] [Rank 0] step:9781/10000 train_time:2362770ms step_avg:241.57ms +[2025-07-17 17:16:15] [Rank 0] step:9781/10000 train_time:2362770ms step_avg:241.57ms +[2025-07-17 17:16:20] [Rank 0] step:9801/10000 train_time:2367886ms step_avg:241.60ms +[2025-07-17 17:16:20] [Rank 0] step:9801/10000 train_time:2367886ms step_avg:241.60ms +[2025-07-17 17:16:25] [Rank 0] step:9821/10000 train_time:2373005ms step_avg:241.63ms +[2025-07-17 17:16:25] [Rank 0] step:9821/10000 train_time:2373005ms step_avg:241.63ms +[2025-07-17 17:16:31] [Rank 0] step:9841/10000 train_time:2378123ms step_avg:241.65ms +[2025-07-17 17:16:31] [Rank 0] step:9841/10000 train_time:2378123ms step_avg:241.65ms +[2025-07-17 17:16:36] [Rank 0] step:9861/10000 train_time:2383243ms step_avg:241.68ms +[2025-07-17 17:16:36] [Rank 0] step:9861/10000 train_time:2383243ms step_avg:241.68ms +[2025-07-17 17:16:44] [Rank 0] PRINT: step:9875/10000 val_loss:4.6064 train_time:2387142ms step_avg:241.74ms +[2025-07-17 17:16:44] [Rank 0] PRINT: step:9875/10000 val_loss:4.6064 train_time:2387142ms step_avg:241.74ms +[2025-07-17 17:16:46] [Rank 0] step:9881/10000 train_time:2388366ms step_avg:241.71ms +[2025-07-17 17:16:46] [Rank 0] step:9881/10000 train_time:2388366ms step_avg:241.71ms +[2025-07-17 17:16:51] [Rank 0] step:9901/10000 train_time:2393485ms step_avg:241.74ms +[2025-07-17 17:16:51] [Rank 0] step:9901/10000 train_time:2393485ms step_avg:241.74ms +[2025-07-17 17:16:56] [Rank 0] step:9921/10000 train_time:2398617ms step_avg:241.77ms +[2025-07-17 17:16:56] [Rank 0] step:9921/10000 train_time:2398617ms step_avg:241.77ms +[2025-07-17 17:17:01] [Rank 0] step:9941/10000 train_time:2403776ms step_avg:241.80ms +[2025-07-17 17:17:01] [Rank 0] step:9941/10000 train_time:2403776ms step_avg:241.80ms +[2025-07-17 17:17:06] [Rank 0] step:9961/10000 train_time:2408918ms step_avg:241.83ms +[2025-07-17 17:17:06] [Rank 0] step:9961/10000 train_time:2408918ms step_avg:241.83ms +[2025-07-17 17:17:11] [Rank 0] step:9981/10000 train_time:2414072ms step_avg:241.87ms +[2025-07-17 17:17:11] [Rank 0] step:9981/10000 train_time:2414072ms step_avg:241.87ms +[2025-07-17 17:17:16] [Rank 0] step:10000/10000 train_time:2418927ms step_avg:241.89ms +[2025-07-17 17:17:16] [Rank 0] step:10000/10000 train_time:2418927ms step_avg:241.89ms +[2025-07-17 17:17:21] [Rank 0] PRINT: step:10000/10000 val_loss:4.6250 train_time:2419252ms step_avg:241.93ms +[2025-07-17 17:17:21] [Rank 0] PRINT: step:10000/10000 val_loss:4.6250 train_time:2419252ms step_avg:241.93ms +[2025-07-17 17:17:21] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 17:17:21 2025 --- +[2025-07-17 17:17:21] [Rank 0] PRINT: --- Training Finished: Thu Jul 17 17:17:21 2025 --- +[2025-07-17 17:17:21] [Rank 0] PRINT: Peak memory allocated: 30964 MiB reserved: 31494 MiB +[2025-07-17 17:17:21] [Rank 0] PRINT: Peak memory allocated: 30964 MiB reserved: 31494 MiB diff --git a/logs_norope/diff_modes/mode_8_param_norope_seed_43/config.json b/logs_norope/diff_modes/mode_8_param_norope_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..ae3c6077ec4a2c5846b8fbb68c979777761248c6 --- /dev/null +++ b/logs_norope/diff_modes/mode_8_param_norope_seed_43/config.json @@ -0,0 +1,22 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 8, + "model_parameterization": "norope" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 10485760, + "train_seq_len": 49152, + "val_seq_len": 262144, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 125, + "save_checkpoint": false + }, + "run_uuid_for_log": "a844408f-c36b-438f-855e-c51df4fcc4d1", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_norope/diff_modes/mode_8_param_norope_seed_43/training_log_a844408f-c36b-438f-855e-c51df4fcc4d1.txt b/logs_norope/diff_modes/mode_8_param_norope_seed_43/training_log_a844408f-c36b-438f-855e-c51df4fcc4d1.txt new file mode 100644 index 0000000000000000000000000000000000000000..a4fcc9c5c9b4bb04e96c0b530390d925b25f81e6 --- /dev/null +++ b/logs_norope/diff_modes/mode_8_param_norope_seed_43/training_log_a844408f-c36b-438f-855e-c51df4fcc4d1.txt @@ -0,0 +1,2360 @@ +[2025-07-18 04:28:09] [Rank 0] PRINT: --- Script Start: Fri Jul 18 04:28:09 2025 --- +[2025-07-18 04:28:09] [Rank 0] PRINT: --- Script Start: Fri Jul 18 04:28:09 2025 --- +[2025-07-18 04:28:09] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=8, model_parameterization='norope') +[2025-07-18 04:28:09] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=8, model_parameterization='norope') +[2025-07-18 04:28:09] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-18 04:28:09] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-07-18 04:28:09] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-18 04:28:09] [Rank 0] PRINT: Using fixed seed: 43 +[2025-07-18 04:28:09] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_8_param_norope_seed_43 +[2025-07-18 04:28:09] [Rank 0] PRINT: Run directory: logs_norope/diff_modes/mode_8_param_norope_seed_43 +[2025-07-18 04:28:09] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-18 04:28:09] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope"]) +# parser.add_argument("--adam_lr", type=float, default=0.001, help="Learning rate for Adam matrices") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_gpt_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + #val_tokens = 1966080 + val_tokens = 10485760 + #train_seq_len = 12*1024 + #val_seq_len = 4*16*1024 + train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 125 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path("logs_norope/diff_modes") + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = 0.001 # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=0.22), + dict(params=embed_params, lr=0.6), + dict(params=scalar_params, lr=0.04) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=0.05, momentum=0.95, nesterov=True, ns_steps=5, rank=rank, world_size=world_size) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-07-18 04:28:09] [Rank 0] PRINT: Constructing model... +[2025-07-18 04:28:09] [Rank 0] PRINT: Constructing model... +[2025-07-18 04:28:12] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-18 04:28:12] [Rank 0] PRINT: Broadcasting model parameters... +[2025-07-18 04:28:12] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-18 04:28:12] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-07-18 04:28:12] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-18 04:28:12] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-07-18 04:28:12] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-18 04:28:12] [Rank 0] PRINT: Warning - Parameter torch.Size([262144, 768]) ended up in scalar_params but has ndim >= 2. Check grouping. +[2025-07-18 04:28:12] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 8 +[2025-07-18 04:28:12] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 8 +[2025-07-18 04:28:12] [Rank 0] PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: 0.001). +[2025-07-18 04:28:12] [Rank 0] PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: 0.001). +[2025-07-18 04:28:12] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-18 04:28:12] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-07-18 04:28:12] [Rank 0] PRINT: Muon optimizer is active with 34 parameters. +[2025-07-18 04:28:12] [Rank 0] PRINT: Muon optimizer is active with 34 parameters. +[2025-07-18 04:28:12] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-18 04:28:12] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-07-18 04:28:12] [Rank 0] PRINT: Model compilation complete. +[2025-07-18 04:28:12] [Rank 0] PRINT: Model compilation complete. +[2025-07-18 04:28:12] [Rank 0] PRINT: Starting warmup... +[2025-07-18 04:28:12] [Rank 0] PRINT: Starting warmup... +[2025-07-18 04:42:13] [Rank 0] PRINT: Warmup complete. +[2025-07-18 04:42:13] [Rank 0] PRINT: Warmup complete. +[2025-07-18 04:42:13] [Rank 0] PRINT: Starting training... +[2025-07-18 04:42:13] [Rank 0] PRINT: Starting training... +[2025-07-18 04:44:09] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-18 04:44:09] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 train_time:0ms step_avg:0.00ms +[2025-07-18 04:44:14] [Rank 0] step:21/10000 train_time:4825ms step_avg:229.78ms +[2025-07-18 04:44:14] [Rank 0] step:21/10000 train_time:4825ms step_avg:229.78ms +[2025-07-18 04:44:19] [Rank 0] step:41/10000 train_time:9308ms step_avg:227.02ms +[2025-07-18 04:44:19] [Rank 0] step:41/10000 train_time:9308ms step_avg:227.02ms +[2025-07-18 04:44:23] [Rank 0] step:61/10000 train_time:13891ms step_avg:227.73ms +[2025-07-18 04:44:23] [Rank 0] step:61/10000 train_time:13891ms step_avg:227.73ms +[2025-07-18 04:44:28] [Rank 0] step:81/10000 train_time:18377ms step_avg:226.88ms +[2025-07-18 04:44:28] [Rank 0] step:81/10000 train_time:18377ms step_avg:226.88ms +[2025-07-18 04:44:32] [Rank 0] step:101/10000 train_time:22860ms step_avg:226.34ms +[2025-07-18 04:44:32] [Rank 0] step:101/10000 train_time:22860ms step_avg:226.34ms +[2025-07-18 04:44:37] [Rank 0] step:121/10000 train_time:27350ms step_avg:226.03ms +[2025-07-18 04:44:37] [Rank 0] step:121/10000 train_time:27350ms step_avg:226.03ms +[2025-07-18 04:44:42] [Rank 0] PRINT: step:125/10000 val_loss:5.1409 train_time:28530ms step_avg:228.24ms +[2025-07-18 04:44:42] [Rank 0] PRINT: step:125/10000 val_loss:5.1409 train_time:28530ms step_avg:228.24ms +[2025-07-18 04:44:46] [Rank 0] step:141/10000 train_time:31839ms step_avg:225.81ms +[2025-07-18 04:44:46] [Rank 0] step:141/10000 train_time:31839ms step_avg:225.81ms +[2025-07-18 04:44:50] [Rank 0] step:161/10000 train_time:36333ms step_avg:225.67ms +[2025-07-18 04:44:50] [Rank 0] step:161/10000 train_time:36333ms step_avg:225.67ms +[2025-07-18 04:44:55] [Rank 0] step:181/10000 train_time:40828ms step_avg:225.57ms +[2025-07-18 04:44:55] [Rank 0] step:181/10000 train_time:40828ms step_avg:225.57ms +[2025-07-18 04:44:59] [Rank 0] step:201/10000 train_time:45318ms step_avg:225.46ms +[2025-07-18 04:44:59] [Rank 0] step:201/10000 train_time:45318ms step_avg:225.46ms +[2025-07-18 04:45:04] [Rank 0] step:221/10000 train_time:49809ms step_avg:225.38ms +[2025-07-18 04:45:04] [Rank 0] step:221/10000 train_time:49809ms step_avg:225.38ms +[2025-07-18 04:45:08] [Rank 0] step:241/10000 train_time:54300ms step_avg:225.31ms +[2025-07-18 04:45:08] [Rank 0] step:241/10000 train_time:54300ms step_avg:225.31ms +[2025-07-18 04:45:14] [Rank 0] PRINT: step:250/10000 val_loss:4.7078 train_time:56601ms step_avg:226.41ms +[2025-07-18 04:45:14] [Rank 0] PRINT: step:250/10000 val_loss:4.7078 train_time:56601ms step_avg:226.41ms +[2025-07-18 04:45:17] [Rank 0] step:261/10000 train_time:58793ms step_avg:225.26ms +[2025-07-18 04:45:17] [Rank 0] step:261/10000 train_time:58793ms step_avg:225.26ms +[2025-07-18 04:45:21] [Rank 0] step:281/10000 train_time:63288ms step_avg:225.23ms +[2025-07-18 04:45:21] [Rank 0] step:281/10000 train_time:63288ms step_avg:225.23ms +[2025-07-18 04:45:26] [Rank 0] step:301/10000 train_time:67782ms step_avg:225.19ms +[2025-07-18 04:45:26] [Rank 0] step:301/10000 train_time:67782ms step_avg:225.19ms +[2025-07-18 04:45:30] [Rank 0] step:321/10000 train_time:72278ms step_avg:225.17ms +[2025-07-18 04:45:30] [Rank 0] step:321/10000 train_time:72278ms step_avg:225.17ms +[2025-07-18 04:45:35] [Rank 0] step:341/10000 train_time:76775ms step_avg:225.15ms +[2025-07-18 04:45:35] [Rank 0] step:341/10000 train_time:76775ms step_avg:225.15ms +[2025-07-18 04:45:39] [Rank 0] step:361/10000 train_time:81273ms step_avg:225.13ms +[2025-07-18 04:45:39] [Rank 0] step:361/10000 train_time:81273ms step_avg:225.13ms +[2025-07-18 04:45:47] [Rank 0] PRINT: step:375/10000 val_loss:4.5012 train_time:84696ms step_avg:225.86ms +[2025-07-18 04:45:47] [Rank 0] PRINT: step:375/10000 val_loss:4.5012 train_time:84696ms step_avg:225.86ms +[2025-07-18 04:45:48] [Rank 0] step:381/10000 train_time:85768ms step_avg:225.11ms +[2025-07-18 04:45:48] [Rank 0] step:381/10000 train_time:85768ms step_avg:225.11ms +[2025-07-18 04:45:53] [Rank 0] step:401/10000 train_time:90261ms step_avg:225.09ms +[2025-07-18 04:45:53] [Rank 0] step:401/10000 train_time:90261ms step_avg:225.09ms +[2025-07-18 04:45:57] [Rank 0] step:421/10000 train_time:94759ms step_avg:225.08ms +[2025-07-18 04:45:57] [Rank 0] step:421/10000 train_time:94759ms step_avg:225.08ms +[2025-07-18 04:46:02] [Rank 0] step:441/10000 train_time:99261ms step_avg:225.08ms +[2025-07-18 04:46:02] [Rank 0] step:441/10000 train_time:99261ms step_avg:225.08ms +[2025-07-18 04:46:06] [Rank 0] step:461/10000 train_time:103758ms step_avg:225.07ms +[2025-07-18 04:46:06] [Rank 0] step:461/10000 train_time:103758ms step_avg:225.07ms +[2025-07-18 04:46:11] [Rank 0] step:481/10000 train_time:108257ms step_avg:225.07ms +[2025-07-18 04:46:11] [Rank 0] step:481/10000 train_time:108257ms step_avg:225.07ms +[2025-07-18 04:46:19] [Rank 0] PRINT: step:500/10000 val_loss:4.4718 train_time:112806ms step_avg:225.61ms +[2025-07-18 04:46:19] [Rank 0] PRINT: step:500/10000 val_loss:4.4718 train_time:112806ms step_avg:225.61ms +[2025-07-18 04:46:19] [Rank 0] step:501/10000 train_time:112822ms step_avg:225.19ms +[2025-07-18 04:46:19] [Rank 0] step:501/10000 train_time:112822ms step_avg:225.19ms +[2025-07-18 04:46:24] [Rank 0] step:521/10000 train_time:117267ms step_avg:225.08ms +[2025-07-18 04:46:24] [Rank 0] step:521/10000 train_time:117267ms step_avg:225.08ms +[2025-07-18 04:46:28] [Rank 0] step:541/10000 train_time:121773ms step_avg:225.09ms +[2025-07-18 04:46:28] [Rank 0] step:541/10000 train_time:121773ms step_avg:225.09ms +[2025-07-18 04:46:33] [Rank 0] step:561/10000 train_time:126278ms step_avg:225.09ms +[2025-07-18 04:46:33] [Rank 0] step:561/10000 train_time:126278ms step_avg:225.09ms +[2025-07-18 04:46:37] [Rank 0] step:581/10000 train_time:130785ms step_avg:225.10ms +[2025-07-18 04:46:37] [Rank 0] step:581/10000 train_time:130785ms step_avg:225.10ms +[2025-07-18 04:46:42] [Rank 0] step:601/10000 train_time:135294ms step_avg:225.12ms +[2025-07-18 04:46:42] [Rank 0] step:601/10000 train_time:135294ms step_avg:225.12ms +[2025-07-18 04:46:46] [Rank 0] step:621/10000 train_time:139803ms step_avg:225.13ms +[2025-07-18 04:46:46] [Rank 0] step:621/10000 train_time:139803ms step_avg:225.13ms +[2025-07-18 04:46:52] [Rank 0] PRINT: step:625/10000 val_loss:4.4695 train_time:140988ms step_avg:225.58ms +[2025-07-18 04:46:52] [Rank 0] PRINT: step:625/10000 val_loss:4.4695 train_time:140988ms step_avg:225.58ms +[2025-07-18 04:46:55] [Rank 0] step:641/10000 train_time:144311ms step_avg:225.13ms +[2025-07-18 04:46:55] [Rank 0] step:641/10000 train_time:144311ms step_avg:225.13ms +[2025-07-18 04:47:00] [Rank 0] step:661/10000 train_time:148817ms step_avg:225.14ms +[2025-07-18 04:47:00] [Rank 0] step:661/10000 train_time:148817ms step_avg:225.14ms +[2025-07-18 04:47:04] [Rank 0] step:681/10000 train_time:153326ms step_avg:225.15ms +[2025-07-18 04:47:04] [Rank 0] step:681/10000 train_time:153326ms step_avg:225.15ms +[2025-07-18 04:47:09] [Rank 0] step:701/10000 train_time:157833ms step_avg:225.15ms +[2025-07-18 04:47:09] [Rank 0] step:701/10000 train_time:157833ms step_avg:225.15ms +[2025-07-18 04:47:13] [Rank 0] step:721/10000 train_time:162342ms step_avg:225.16ms +[2025-07-18 04:47:13] [Rank 0] step:721/10000 train_time:162342ms step_avg:225.16ms +[2025-07-18 04:47:18] [Rank 0] step:741/10000 train_time:166852ms step_avg:225.17ms +[2025-07-18 04:47:18] [Rank 0] step:741/10000 train_time:166852ms step_avg:225.17ms +[2025-07-18 04:47:24] [Rank 0] PRINT: step:750/10000 val_loss:4.6644 train_time:169178ms step_avg:225.57ms +[2025-07-18 04:47:24] [Rank 0] PRINT: step:750/10000 val_loss:4.6644 train_time:169178ms step_avg:225.57ms +[2025-07-18 04:47:27] [Rank 0] step:761/10000 train_time:171392ms step_avg:225.22ms +[2025-07-18 04:47:27] [Rank 0] step:761/10000 train_time:171392ms step_avg:225.22ms +[2025-07-18 04:47:31] [Rank 0] step:781/10000 train_time:175932ms step_avg:225.26ms +[2025-07-18 04:47:31] [Rank 0] step:781/10000 train_time:175932ms step_avg:225.26ms +[2025-07-18 04:47:36] [Rank 0] step:801/10000 train_time:180484ms step_avg:225.32ms +[2025-07-18 04:47:36] [Rank 0] step:801/10000 train_time:180484ms step_avg:225.32ms +[2025-07-18 04:47:41] [Rank 0] step:821/10000 train_time:185034ms step_avg:225.38ms +[2025-07-18 04:47:41] [Rank 0] step:821/10000 train_time:185034ms step_avg:225.38ms +[2025-07-18 04:47:45] [Rank 0] step:841/10000 train_time:189587ms step_avg:225.43ms +[2025-07-18 04:47:45] [Rank 0] step:841/10000 train_time:189587ms step_avg:225.43ms +[2025-07-18 04:47:50] [Rank 0] step:861/10000 train_time:194141ms step_avg:225.48ms +[2025-07-18 04:47:50] [Rank 0] step:861/10000 train_time:194141ms step_avg:225.48ms +[2025-07-18 04:47:57] [Rank 0] PRINT: step:875/10000 val_loss:4.6712 train_time:197613ms step_avg:225.84ms +[2025-07-18 04:47:57] [Rank 0] PRINT: step:875/10000 val_loss:4.6712 train_time:197613ms step_avg:225.84ms +[2025-07-18 04:47:59] [Rank 0] step:881/10000 train_time:198693ms step_avg:225.53ms +[2025-07-18 04:47:59] [Rank 0] step:881/10000 train_time:198693ms step_avg:225.53ms +[2025-07-18 04:48:03] [Rank 0] step:901/10000 train_time:203239ms step_avg:225.57ms +[2025-07-18 04:48:03] [Rank 0] step:901/10000 train_time:203239ms step_avg:225.57ms +[2025-07-18 04:48:08] [Rank 0] step:921/10000 train_time:207788ms step_avg:225.61ms +[2025-07-18 04:48:08] [Rank 0] step:921/10000 train_time:207788ms step_avg:225.61ms +[2025-07-18 04:48:12] [Rank 0] step:941/10000 train_time:212343ms step_avg:225.66ms +[2025-07-18 04:48:12] [Rank 0] step:941/10000 train_time:212343ms step_avg:225.66ms +[2025-07-18 04:48:17] [Rank 0] step:961/10000 train_time:216892ms step_avg:225.69ms +[2025-07-18 04:48:17] [Rank 0] step:961/10000 train_time:216892ms step_avg:225.69ms +[2025-07-18 04:48:21] [Rank 0] step:981/10000 train_time:221442ms step_avg:225.73ms +[2025-07-18 04:48:21] [Rank 0] step:981/10000 train_time:221442ms step_avg:225.73ms +[2025-07-18 04:48:30] [Rank 0] PRINT: step:1000/10000 val_loss:4.6805 train_time:226045ms step_avg:226.04ms +[2025-07-18 04:48:30] [Rank 0] PRINT: step:1000/10000 val_loss:4.6805 train_time:226045ms step_avg:226.04ms +[2025-07-18 04:48:30] [Rank 0] step:1001/10000 train_time:226060ms step_avg:225.83ms +[2025-07-18 04:48:30] [Rank 0] step:1001/10000 train_time:226060ms step_avg:225.83ms +[2025-07-18 04:48:35] [Rank 0] step:1021/10000 train_time:230646ms step_avg:225.90ms +[2025-07-18 04:48:35] [Rank 0] step:1021/10000 train_time:230646ms step_avg:225.90ms +[2025-07-18 04:48:40] [Rank 0] step:1041/10000 train_time:235194ms step_avg:225.93ms +[2025-07-18 04:48:40] [Rank 0] step:1041/10000 train_time:235194ms step_avg:225.93ms +[2025-07-18 04:48:44] [Rank 0] step:1061/10000 train_time:239740ms step_avg:225.96ms +[2025-07-18 04:48:44] [Rank 0] step:1061/10000 train_time:239740ms step_avg:225.96ms +[2025-07-18 04:48:49] [Rank 0] step:1081/10000 train_time:244291ms step_avg:225.99ms +[2025-07-18 04:48:49] [Rank 0] step:1081/10000 train_time:244291ms step_avg:225.99ms +[2025-07-18 04:48:53] [Rank 0] step:1101/10000 train_time:248843ms step_avg:226.02ms +[2025-07-18 04:48:53] [Rank 0] step:1101/10000 train_time:248843ms step_avg:226.02ms +[2025-07-18 04:48:58] [Rank 0] step:1121/10000 train_time:253397ms step_avg:226.05ms +[2025-07-18 04:48:58] [Rank 0] step:1121/10000 train_time:253397ms step_avg:226.05ms +[2025-07-18 04:49:03] [Rank 0] PRINT: step:1125/10000 val_loss:4.6722 train_time:254593ms step_avg:226.30ms +[2025-07-18 04:49:03] [Rank 0] PRINT: step:1125/10000 val_loss:4.6722 train_time:254593ms step_avg:226.30ms +[2025-07-18 04:49:07] [Rank 0] step:1141/10000 train_time:257949ms step_avg:226.07ms +[2025-07-18 04:49:07] [Rank 0] step:1141/10000 train_time:257949ms step_avg:226.07ms +[2025-07-18 04:49:11] [Rank 0] step:1161/10000 train_time:262504ms step_avg:226.10ms +[2025-07-18 04:49:11] [Rank 0] step:1161/10000 train_time:262504ms step_avg:226.10ms +[2025-07-18 04:49:16] [Rank 0] step:1181/10000 train_time:267060ms step_avg:226.13ms +[2025-07-18 04:49:16] [Rank 0] step:1181/10000 train_time:267060ms step_avg:226.13ms +[2025-07-18 04:49:21] [Rank 0] step:1201/10000 train_time:271614ms step_avg:226.16ms +[2025-07-18 04:49:21] [Rank 0] step:1201/10000 train_time:271614ms step_avg:226.16ms +[2025-07-18 04:49:25] [Rank 0] step:1221/10000 train_time:276169ms step_avg:226.18ms +[2025-07-18 04:49:25] [Rank 0] step:1221/10000 train_time:276169ms step_avg:226.18ms +[2025-07-18 04:49:30] [Rank 0] step:1241/10000 train_time:280725ms step_avg:226.21ms +[2025-07-18 04:49:30] [Rank 0] step:1241/10000 train_time:280725ms step_avg:226.21ms +[2025-07-18 04:49:36] [Rank 0] PRINT: step:1250/10000 val_loss:4.7439 train_time:283058ms step_avg:226.45ms +[2025-07-18 04:49:36] [Rank 0] PRINT: step:1250/10000 val_loss:4.7439 train_time:283058ms step_avg:226.45ms +[2025-07-18 04:49:39] [Rank 0] step:1261/10000 train_time:285276ms step_avg:226.23ms +[2025-07-18 04:49:39] [Rank 0] step:1261/10000 train_time:285276ms step_avg:226.23ms +[2025-07-18 04:49:43] [Rank 0] step:1281/10000 train_time:289836ms step_avg:226.26ms +[2025-07-18 04:49:43] [Rank 0] step:1281/10000 train_time:289836ms step_avg:226.26ms +[2025-07-18 04:49:48] [Rank 0] step:1301/10000 train_time:294399ms step_avg:226.29ms +[2025-07-18 04:49:48] [Rank 0] step:1301/10000 train_time:294399ms step_avg:226.29ms +[2025-07-18 04:49:52] [Rank 0] step:1321/10000 train_time:298962ms step_avg:226.32ms +[2025-07-18 04:49:52] [Rank 0] step:1321/10000 train_time:298962ms step_avg:226.32ms +[2025-07-18 04:49:57] [Rank 0] step:1341/10000 train_time:303522ms step_avg:226.34ms +[2025-07-18 04:49:57] [Rank 0] step:1341/10000 train_time:303522ms step_avg:226.34ms +[2025-07-18 04:50:01] [Rank 0] step:1361/10000 train_time:308081ms step_avg:226.36ms +[2025-07-18 04:50:01] [Rank 0] step:1361/10000 train_time:308081ms step_avg:226.36ms +[2025-07-18 04:50:09] [Rank 0] PRINT: step:1375/10000 val_loss:4.7394 train_time:311555ms step_avg:226.59ms +[2025-07-18 04:50:09] [Rank 0] PRINT: step:1375/10000 val_loss:4.7394 train_time:311555ms step_avg:226.59ms +[2025-07-18 04:50:10] [Rank 0] step:1381/10000 train_time:312637ms step_avg:226.38ms +[2025-07-18 04:50:10] [Rank 0] step:1381/10000 train_time:312637ms step_avg:226.38ms +[2025-07-18 04:50:15] [Rank 0] step:1401/10000 train_time:317191ms step_avg:226.40ms +[2025-07-18 04:50:15] [Rank 0] step:1401/10000 train_time:317191ms step_avg:226.40ms +[2025-07-18 04:50:20] [Rank 0] step:1421/10000 train_time:321749ms step_avg:226.42ms +[2025-07-18 04:50:20] [Rank 0] step:1421/10000 train_time:321749ms step_avg:226.42ms +[2025-07-18 04:50:24] [Rank 0] step:1441/10000 train_time:326306ms step_avg:226.44ms +[2025-07-18 04:50:24] [Rank 0] step:1441/10000 train_time:326306ms step_avg:226.44ms +[2025-07-18 04:50:29] [Rank 0] step:1461/10000 train_time:330863ms step_avg:226.46ms +[2025-07-18 04:50:29] [Rank 0] step:1461/10000 train_time:330863ms step_avg:226.46ms +[2025-07-18 04:50:33] [Rank 0] step:1481/10000 train_time:335418ms step_avg:226.48ms +[2025-07-18 04:50:33] [Rank 0] step:1481/10000 train_time:335418ms step_avg:226.48ms +[2025-07-18 04:50:42] [Rank 0] PRINT: step:1500/10000 val_loss:4.6501 train_time:340052ms step_avg:226.70ms +[2025-07-18 04:50:42] [Rank 0] PRINT: step:1500/10000 val_loss:4.6501 train_time:340052ms step_avg:226.70ms +[2025-07-18 04:50:42] [Rank 0] step:1501/10000 train_time:340067ms step_avg:226.56ms +[2025-07-18 04:50:42] [Rank 0] step:1501/10000 train_time:340067ms step_avg:226.56ms +[2025-07-18 04:50:47] [Rank 0] step:1521/10000 train_time:344585ms step_avg:226.55ms +[2025-07-18 04:50:47] [Rank 0] step:1521/10000 train_time:344585ms step_avg:226.55ms +[2025-07-18 04:50:52] [Rank 0] step:1541/10000 train_time:349289ms step_avg:226.66ms +[2025-07-18 04:50:52] [Rank 0] step:1541/10000 train_time:349289ms step_avg:226.66ms +[2025-07-18 04:50:56] [Rank 0] step:1561/10000 train_time:353873ms step_avg:226.70ms +[2025-07-18 04:50:56] [Rank 0] step:1561/10000 train_time:353873ms step_avg:226.70ms +[2025-07-18 04:51:01] [Rank 0] step:1581/10000 train_time:358458ms step_avg:226.73ms +[2025-07-18 04:51:01] [Rank 0] step:1581/10000 train_time:358458ms step_avg:226.73ms +[2025-07-18 04:51:05] [Rank 0] step:1601/10000 train_time:363046ms step_avg:226.76ms +[2025-07-18 04:51:05] [Rank 0] step:1601/10000 train_time:363046ms step_avg:226.76ms +[2025-07-18 04:51:10] [Rank 0] step:1621/10000 train_time:367634ms step_avg:226.79ms +[2025-07-18 04:51:10] [Rank 0] step:1621/10000 train_time:367634ms step_avg:226.79ms +[2025-07-18 04:51:15] [Rank 0] PRINT: step:1625/10000 val_loss:4.7268 train_time:368839ms step_avg:226.98ms +[2025-07-18 04:51:15] [Rank 0] PRINT: step:1625/10000 val_loss:4.7268 train_time:368839ms step_avg:226.98ms +[2025-07-18 04:51:19] [Rank 0] step:1641/10000 train_time:372221ms step_avg:226.83ms +[2025-07-18 04:51:19] [Rank 0] step:1641/10000 train_time:372221ms step_avg:226.83ms +[2025-07-18 04:51:24] [Rank 0] step:1661/10000 train_time:376809ms step_avg:226.86ms +[2025-07-18 04:51:24] [Rank 0] step:1661/10000 train_time:376809ms step_avg:226.86ms +[2025-07-18 04:51:28] [Rank 0] step:1681/10000 train_time:381397ms step_avg:226.89ms +[2025-07-18 04:51:28] [Rank 0] step:1681/10000 train_time:381397ms step_avg:226.89ms +[2025-07-18 04:51:33] [Rank 0] step:1701/10000 train_time:385985ms step_avg:226.92ms +[2025-07-18 04:51:33] [Rank 0] step:1701/10000 train_time:385985ms step_avg:226.92ms +[2025-07-18 04:51:37] [Rank 0] step:1721/10000 train_time:390672ms step_avg:227.00ms +[2025-07-18 04:51:37] [Rank 0] step:1721/10000 train_time:390672ms step_avg:227.00ms +[2025-07-18 04:51:42] [Rank 0] step:1741/10000 train_time:395271ms step_avg:227.04ms +[2025-07-18 04:51:42] [Rank 0] step:1741/10000 train_time:395271ms step_avg:227.04ms +[2025-07-18 04:51:49] [Rank 0] PRINT: step:1750/10000 val_loss:4.7007 train_time:397623ms step_avg:227.21ms +[2025-07-18 04:51:49] [Rank 0] PRINT: step:1750/10000 val_loss:4.7007 train_time:397623ms step_avg:227.21ms +[2025-07-18 04:51:51] [Rank 0] step:1761/10000 train_time:399857ms step_avg:227.06ms +[2025-07-18 04:51:51] [Rank 0] step:1761/10000 train_time:399857ms step_avg:227.06ms +[2025-07-18 04:51:56] [Rank 0] step:1781/10000 train_time:404446ms step_avg:227.09ms +[2025-07-18 04:51:56] [Rank 0] step:1781/10000 train_time:404446ms step_avg:227.09ms +[2025-07-18 04:52:00] [Rank 0] step:1801/10000 train_time:409036ms step_avg:227.12ms +[2025-07-18 04:52:00] [Rank 0] step:1801/10000 train_time:409036ms step_avg:227.12ms +[2025-07-18 04:52:05] [Rank 0] step:1821/10000 train_time:413625ms step_avg:227.14ms +[2025-07-18 04:52:05] [Rank 0] step:1821/10000 train_time:413625ms step_avg:227.14ms +[2025-07-18 04:52:10] [Rank 0] step:1841/10000 train_time:418217ms step_avg:227.17ms +[2025-07-18 04:52:10] [Rank 0] step:1841/10000 train_time:418217ms step_avg:227.17ms +[2025-07-18 04:52:14] [Rank 0] step:1861/10000 train_time:422808ms step_avg:227.19ms +[2025-07-18 04:52:14] [Rank 0] step:1861/10000 train_time:422808ms step_avg:227.19ms +[2025-07-18 04:52:22] [Rank 0] PRINT: step:1875/10000 val_loss:4.6886 train_time:426308ms step_avg:227.36ms +[2025-07-18 04:52:22] [Rank 0] PRINT: step:1875/10000 val_loss:4.6886 train_time:426308ms step_avg:227.36ms +[2025-07-18 04:52:23] [Rank 0] step:1881/10000 train_time:427401ms step_avg:227.22ms +[2025-07-18 04:52:23] [Rank 0] step:1881/10000 train_time:427401ms step_avg:227.22ms +[2025-07-18 04:52:28] [Rank 0] step:1901/10000 train_time:431992ms step_avg:227.24ms +[2025-07-18 04:52:28] [Rank 0] step:1901/10000 train_time:431992ms step_avg:227.24ms +[2025-07-18 04:52:32] [Rank 0] step:1921/10000 train_time:436580ms step_avg:227.27ms +[2025-07-18 04:52:32] [Rank 0] step:1921/10000 train_time:436580ms step_avg:227.27ms +[2025-07-18 04:52:37] [Rank 0] step:1941/10000 train_time:441171ms step_avg:227.29ms +[2025-07-18 04:52:37] [Rank 0] step:1941/10000 train_time:441171ms step_avg:227.29ms +[2025-07-18 04:52:42] [Rank 0] step:1961/10000 train_time:445763ms step_avg:227.31ms +[2025-07-18 04:52:42] [Rank 0] step:1961/10000 train_time:445763ms step_avg:227.31ms +[2025-07-18 04:52:46] [Rank 0] step:1981/10000 train_time:450353ms step_avg:227.34ms +[2025-07-18 04:52:46] [Rank 0] step:1981/10000 train_time:450353ms step_avg:227.34ms +[2025-07-18 04:52:55] [Rank 0] PRINT: step:2000/10000 val_loss:4.6682 train_time:454996ms step_avg:227.50ms +[2025-07-18 04:52:55] [Rank 0] PRINT: step:2000/10000 val_loss:4.6682 train_time:454996ms step_avg:227.50ms +[2025-07-18 04:52:55] [Rank 0] step:2001/10000 train_time:455012ms step_avg:227.39ms +[2025-07-18 04:52:55] [Rank 0] step:2001/10000 train_time:455012ms step_avg:227.39ms +[2025-07-18 04:53:00] [Rank 0] step:2021/10000 train_time:459534ms step_avg:227.38ms +[2025-07-18 04:53:00] [Rank 0] step:2021/10000 train_time:459534ms step_avg:227.38ms +[2025-07-18 04:53:04] [Rank 0] step:2041/10000 train_time:464129ms step_avg:227.40ms +[2025-07-18 04:53:04] [Rank 0] step:2041/10000 train_time:464129ms step_avg:227.40ms +[2025-07-18 04:53:09] [Rank 0] step:2061/10000 train_time:468723ms step_avg:227.42ms +[2025-07-18 04:53:09] [Rank 0] step:2061/10000 train_time:468723ms step_avg:227.42ms +[2025-07-18 04:53:14] [Rank 0] step:2081/10000 train_time:473319ms step_avg:227.45ms +[2025-07-18 04:53:14] [Rank 0] step:2081/10000 train_time:473319ms step_avg:227.45ms +[2025-07-18 04:53:18] [Rank 0] step:2101/10000 train_time:477911ms step_avg:227.47ms +[2025-07-18 04:53:18] [Rank 0] step:2101/10000 train_time:477911ms step_avg:227.47ms +[2025-07-18 04:53:23] [Rank 0] step:2121/10000 train_time:482504ms step_avg:227.49ms +[2025-07-18 04:53:23] [Rank 0] step:2121/10000 train_time:482504ms step_avg:227.49ms +[2025-07-18 04:53:28] [Rank 0] PRINT: step:2125/10000 val_loss:4.7274 train_time:483710ms step_avg:227.63ms +[2025-07-18 04:53:28] [Rank 0] PRINT: step:2125/10000 val_loss:4.7274 train_time:483710ms step_avg:227.63ms +[2025-07-18 04:53:32] [Rank 0] step:2141/10000 train_time:487096ms step_avg:227.51ms +[2025-07-18 04:53:32] [Rank 0] step:2141/10000 train_time:487096ms step_avg:227.51ms +[2025-07-18 04:53:36] [Rank 0] step:2161/10000 train_time:491689ms step_avg:227.53ms +[2025-07-18 04:53:36] [Rank 0] step:2161/10000 train_time:491689ms step_avg:227.53ms +[2025-07-18 04:53:41] [Rank 0] step:2181/10000 train_time:496284ms step_avg:227.55ms +[2025-07-18 04:53:41] [Rank 0] step:2181/10000 train_time:496284ms step_avg:227.55ms +[2025-07-18 04:53:46] [Rank 0] step:2201/10000 train_time:500877ms step_avg:227.57ms +[2025-07-18 04:53:46] [Rank 0] step:2201/10000 train_time:500877ms step_avg:227.57ms +[2025-07-18 04:53:50] [Rank 0] step:2221/10000 train_time:505472ms step_avg:227.59ms +[2025-07-18 04:53:50] [Rank 0] step:2221/10000 train_time:505472ms step_avg:227.59ms +[2025-07-18 04:53:55] [Rank 0] step:2241/10000 train_time:510160ms step_avg:227.65ms +[2025-07-18 04:53:55] [Rank 0] step:2241/10000 train_time:510160ms step_avg:227.65ms +[2025-07-18 04:54:01] [Rank 0] PRINT: step:2250/10000 val_loss:4.2773 train_time:512674ms step_avg:227.86ms +[2025-07-18 04:54:01] [Rank 0] PRINT: step:2250/10000 val_loss:4.2773 train_time:512674ms step_avg:227.86ms +[2025-07-18 04:54:04] [Rank 0] step:2261/10000 train_time:514972ms step_avg:227.76ms +[2025-07-18 04:54:04] [Rank 0] step:2261/10000 train_time:514972ms step_avg:227.76ms +[2025-07-18 04:54:09] [Rank 0] step:2281/10000 train_time:519778ms step_avg:227.87ms +[2025-07-18 04:54:09] [Rank 0] step:2281/10000 train_time:519778ms step_avg:227.87ms +[2025-07-18 04:54:14] [Rank 0] step:2301/10000 train_time:524586ms step_avg:227.98ms +[2025-07-18 04:54:14] [Rank 0] step:2301/10000 train_time:524586ms step_avg:227.98ms +[2025-07-18 04:54:18] [Rank 0] step:2321/10000 train_time:529391ms step_avg:228.09ms +[2025-07-18 04:54:18] [Rank 0] step:2321/10000 train_time:529391ms step_avg:228.09ms +[2025-07-18 04:54:23] [Rank 0] step:2341/10000 train_time:534098ms step_avg:228.15ms +[2025-07-18 04:54:23] [Rank 0] step:2341/10000 train_time:534098ms step_avg:228.15ms +[2025-07-18 04:54:28] [Rank 0] step:2361/10000 train_time:538806ms step_avg:228.21ms +[2025-07-18 04:54:28] [Rank 0] step:2361/10000 train_time:538806ms step_avg:228.21ms +[2025-07-18 04:54:36] [Rank 0] PRINT: step:2375/10000 val_loss:4.2331 train_time:542392ms step_avg:228.38ms +[2025-07-18 04:54:36] [Rank 0] PRINT: step:2375/10000 val_loss:4.2331 train_time:542392ms step_avg:228.38ms +[2025-07-18 04:54:37] [Rank 0] step:2381/10000 train_time:543512ms step_avg:228.27ms +[2025-07-18 04:54:37] [Rank 0] step:2381/10000 train_time:543512ms step_avg:228.27ms +[2025-07-18 04:54:42] [Rank 0] step:2401/10000 train_time:548221ms step_avg:228.33ms +[2025-07-18 04:54:42] [Rank 0] step:2401/10000 train_time:548221ms step_avg:228.33ms +[2025-07-18 04:54:47] [Rank 0] step:2421/10000 train_time:553027ms step_avg:228.43ms +[2025-07-18 04:54:47] [Rank 0] step:2421/10000 train_time:553027ms step_avg:228.43ms +[2025-07-18 04:54:51] [Rank 0] step:2441/10000 train_time:557836ms step_avg:228.53ms +[2025-07-18 04:54:51] [Rank 0] step:2441/10000 train_time:557836ms step_avg:228.53ms +[2025-07-18 04:54:56] [Rank 0] step:2461/10000 train_time:562642ms step_avg:228.62ms +[2025-07-18 04:54:56] [Rank 0] step:2461/10000 train_time:562642ms step_avg:228.62ms +[2025-07-18 04:55:01] [Rank 0] step:2481/10000 train_time:567349ms step_avg:228.68ms +[2025-07-18 04:55:01] [Rank 0] step:2481/10000 train_time:567349ms step_avg:228.68ms +[2025-07-18 04:55:10] [Rank 0] PRINT: step:2500/10000 val_loss:4.1959 train_time:572210ms step_avg:228.88ms +[2025-07-18 04:55:10] [Rank 0] PRINT: step:2500/10000 val_loss:4.1959 train_time:572210ms step_avg:228.88ms +[2025-07-18 04:55:10] [Rank 0] step:2501/10000 train_time:572227ms step_avg:228.80ms +[2025-07-18 04:55:10] [Rank 0] step:2501/10000 train_time:572227ms step_avg:228.80ms +[2025-07-18 04:55:15] [Rank 0] step:2521/10000 train_time:576857ms step_avg:228.82ms +[2025-07-18 04:55:15] [Rank 0] step:2521/10000 train_time:576857ms step_avg:228.82ms +[2025-07-18 04:55:20] [Rank 0] step:2541/10000 train_time:581621ms step_avg:228.89ms +[2025-07-18 04:55:20] [Rank 0] step:2541/10000 train_time:581621ms step_avg:228.89ms +[2025-07-18 04:55:25] [Rank 0] step:2561/10000 train_time:586368ms step_avg:228.96ms +[2025-07-18 04:55:25] [Rank 0] step:2561/10000 train_time:586368ms step_avg:228.96ms +[2025-07-18 04:55:29] [Rank 0] step:2581/10000 train_time:591075ms step_avg:229.01ms +[2025-07-18 04:55:29] [Rank 0] step:2581/10000 train_time:591075ms step_avg:229.01ms +[2025-07-18 04:55:34] [Rank 0] step:2601/10000 train_time:595783ms step_avg:229.06ms +[2025-07-18 04:55:34] [Rank 0] step:2601/10000 train_time:595783ms step_avg:229.06ms +[2025-07-18 04:55:39] [Rank 0] step:2621/10000 train_time:600491ms step_avg:229.11ms +[2025-07-18 04:55:39] [Rank 0] step:2621/10000 train_time:600491ms step_avg:229.11ms +[2025-07-18 04:55:44] [Rank 0] PRINT: step:2625/10000 val_loss:4.2022 train_time:601726ms step_avg:229.23ms +[2025-07-18 04:55:44] [Rank 0] PRINT: step:2625/10000 val_loss:4.2022 train_time:601726ms step_avg:229.23ms +[2025-07-18 04:55:48] [Rank 0] step:2641/10000 train_time:605198ms step_avg:229.15ms +[2025-07-18 04:55:48] [Rank 0] step:2641/10000 train_time:605198ms step_avg:229.15ms +[2025-07-18 04:55:53] [Rank 0] step:2661/10000 train_time:609906ms step_avg:229.20ms +[2025-07-18 04:55:53] [Rank 0] step:2661/10000 train_time:609906ms step_avg:229.20ms +[2025-07-18 04:55:57] [Rank 0] step:2681/10000 train_time:614612ms step_avg:229.25ms +[2025-07-18 04:55:57] [Rank 0] step:2681/10000 train_time:614612ms step_avg:229.25ms +[2025-07-18 04:56:02] [Rank 0] step:2701/10000 train_time:619319ms step_avg:229.29ms +[2025-07-18 04:56:02] [Rank 0] step:2701/10000 train_time:619319ms step_avg:229.29ms +[2025-07-18 04:56:07] [Rank 0] step:2721/10000 train_time:624027ms step_avg:229.34ms +[2025-07-18 04:56:07] [Rank 0] step:2721/10000 train_time:624027ms step_avg:229.34ms +[2025-07-18 04:56:12] [Rank 0] step:2741/10000 train_time:628741ms step_avg:229.38ms +[2025-07-18 04:56:12] [Rank 0] step:2741/10000 train_time:628741ms step_avg:229.38ms +[2025-07-18 04:56:18] [Rank 0] PRINT: step:2750/10000 val_loss:4.2274 train_time:631150ms step_avg:229.51ms +[2025-07-18 04:56:18] [Rank 0] PRINT: step:2750/10000 val_loss:4.2274 train_time:631150ms step_avg:229.51ms +[2025-07-18 04:56:21] [Rank 0] step:2761/10000 train_time:633441ms step_avg:229.42ms +[2025-07-18 04:56:21] [Rank 0] step:2761/10000 train_time:633441ms step_avg:229.42ms +[2025-07-18 04:56:26] [Rank 0] step:2781/10000 train_time:638142ms step_avg:229.46ms +[2025-07-18 04:56:26] [Rank 0] step:2781/10000 train_time:638142ms step_avg:229.46ms +[2025-07-18 04:56:30] [Rank 0] step:2801/10000 train_time:642837ms step_avg:229.50ms +[2025-07-18 04:56:30] [Rank 0] step:2801/10000 train_time:642837ms step_avg:229.50ms +[2025-07-18 04:56:35] [Rank 0] step:2821/10000 train_time:647530ms step_avg:229.54ms +[2025-07-18 04:56:35] [Rank 0] step:2821/10000 train_time:647530ms step_avg:229.54ms +[2025-07-18 04:56:40] [Rank 0] step:2841/10000 train_time:652223ms step_avg:229.58ms +[2025-07-18 04:56:40] [Rank 0] step:2841/10000 train_time:652223ms step_avg:229.58ms +[2025-07-18 04:56:44] [Rank 0] step:2861/10000 train_time:656914ms step_avg:229.61ms +[2025-07-18 04:56:44] [Rank 0] step:2861/10000 train_time:656914ms step_avg:229.61ms +[2025-07-18 04:56:52] [Rank 0] PRINT: step:2875/10000 val_loss:4.2580 train_time:660489ms step_avg:229.74ms +[2025-07-18 04:56:52] [Rank 0] PRINT: step:2875/10000 val_loss:4.2580 train_time:660489ms step_avg:229.74ms +[2025-07-18 04:56:53] [Rank 0] step:2881/10000 train_time:661605ms step_avg:229.64ms +[2025-07-18 04:56:53] [Rank 0] step:2881/10000 train_time:661605ms step_avg:229.64ms +[2025-07-18 04:56:58] [Rank 0] step:2901/10000 train_time:666292ms step_avg:229.68ms +[2025-07-18 04:56:58] [Rank 0] step:2901/10000 train_time:666292ms step_avg:229.68ms +[2025-07-18 04:57:03] [Rank 0] step:2921/10000 train_time:670984ms step_avg:229.71ms +[2025-07-18 04:57:03] [Rank 0] step:2921/10000 train_time:670984ms step_avg:229.71ms +[2025-07-18 04:57:07] [Rank 0] step:2941/10000 train_time:675677ms step_avg:229.74ms +[2025-07-18 04:57:07] [Rank 0] step:2941/10000 train_time:675677ms step_avg:229.74ms +[2025-07-18 04:57:12] [Rank 0] step:2961/10000 train_time:680371ms step_avg:229.78ms +[2025-07-18 04:57:12] [Rank 0] step:2961/10000 train_time:680371ms step_avg:229.78ms +[2025-07-18 04:57:17] [Rank 0] step:2981/10000 train_time:685078ms step_avg:229.81ms +[2025-07-18 04:57:17] [Rank 0] step:2981/10000 train_time:685078ms step_avg:229.81ms +[2025-07-18 04:57:26] [Rank 0] PRINT: step:3000/10000 val_loss:4.2465 train_time:689843ms step_avg:229.95ms +[2025-07-18 04:57:26] [Rank 0] PRINT: step:3000/10000 val_loss:4.2465 train_time:689843ms step_avg:229.95ms +[2025-07-18 04:57:26] [Rank 0] step:3001/10000 train_time:689864ms step_avg:229.88ms +[2025-07-18 04:57:26] [Rank 0] step:3001/10000 train_time:689864ms step_avg:229.88ms +[2025-07-18 04:57:31] [Rank 0] step:3021/10000 train_time:694496ms step_avg:229.89ms +[2025-07-18 04:57:31] [Rank 0] step:3021/10000 train_time:694496ms step_avg:229.89ms +[2025-07-18 04:57:35] [Rank 0] step:3041/10000 train_time:699207ms step_avg:229.93ms +[2025-07-18 04:57:35] [Rank 0] step:3041/10000 train_time:699207ms step_avg:229.93ms +[2025-07-18 04:57:40] [Rank 0] step:3061/10000 train_time:704009ms step_avg:229.99ms +[2025-07-18 04:57:40] [Rank 0] step:3061/10000 train_time:704009ms step_avg:229.99ms +[2025-07-18 04:57:45] [Rank 0] step:3081/10000 train_time:708719ms step_avg:230.03ms +[2025-07-18 04:57:45] [Rank 0] step:3081/10000 train_time:708719ms step_avg:230.03ms +[2025-07-18 04:57:50] [Rank 0] step:3101/10000 train_time:713429ms step_avg:230.06ms +[2025-07-18 04:57:50] [Rank 0] step:3101/10000 train_time:713429ms step_avg:230.06ms +[2025-07-18 04:57:54] [Rank 0] step:3121/10000 train_time:718136ms step_avg:230.10ms +[2025-07-18 04:57:54] [Rank 0] step:3121/10000 train_time:718136ms step_avg:230.10ms +[2025-07-18 04:57:59] [Rank 0] PRINT: step:3125/10000 val_loss:4.3551 train_time:719373ms step_avg:230.20ms +[2025-07-18 04:57:59] [Rank 0] PRINT: step:3125/10000 val_loss:4.3551 train_time:719373ms step_avg:230.20ms +[2025-07-18 04:58:03] [Rank 0] step:3141/10000 train_time:722848ms step_avg:230.13ms +[2025-07-18 04:58:03] [Rank 0] step:3141/10000 train_time:722848ms step_avg:230.13ms +[2025-07-18 04:58:08] [Rank 0] step:3161/10000 train_time:727561ms step_avg:230.17ms +[2025-07-18 04:58:08] [Rank 0] step:3161/10000 train_time:727561ms step_avg:230.17ms +[2025-07-18 04:58:13] [Rank 0] step:3181/10000 train_time:732274ms step_avg:230.20ms +[2025-07-18 04:58:13] [Rank 0] step:3181/10000 train_time:732274ms step_avg:230.20ms +[2025-07-18 04:58:17] [Rank 0] step:3201/10000 train_time:736991ms step_avg:230.24ms +[2025-07-18 04:58:17] [Rank 0] step:3201/10000 train_time:736991ms step_avg:230.24ms +[2025-07-18 04:58:22] [Rank 0] step:3221/10000 train_time:741707ms step_avg:230.27ms +[2025-07-18 04:58:22] [Rank 0] step:3221/10000 train_time:741707ms step_avg:230.27ms +[2025-07-18 04:58:27] [Rank 0] step:3241/10000 train_time:746424ms step_avg:230.31ms +[2025-07-18 04:58:27] [Rank 0] step:3241/10000 train_time:746424ms step_avg:230.31ms +[2025-07-18 04:58:34] [Rank 0] PRINT: step:3250/10000 val_loss:4.2794 train_time:748840ms step_avg:230.41ms +[2025-07-18 04:58:34] [Rank 0] PRINT: step:3250/10000 val_loss:4.2794 train_time:748840ms step_avg:230.41ms +[2025-07-18 04:58:36] [Rank 0] step:3261/10000 train_time:751139ms step_avg:230.34ms +[2025-07-18 04:58:36] [Rank 0] step:3261/10000 train_time:751139ms step_avg:230.34ms +[2025-07-18 04:58:41] [Rank 0] step:3281/10000 train_time:755853ms step_avg:230.37ms +[2025-07-18 04:58:41] [Rank 0] step:3281/10000 train_time:755853ms step_avg:230.37ms +[2025-07-18 04:58:46] [Rank 0] step:3301/10000 train_time:760570ms step_avg:230.41ms +[2025-07-18 04:58:46] [Rank 0] step:3301/10000 train_time:760570ms step_avg:230.41ms +[2025-07-18 04:58:50] [Rank 0] step:3321/10000 train_time:765285ms step_avg:230.44ms +[2025-07-18 04:58:50] [Rank 0] step:3321/10000 train_time:765285ms step_avg:230.44ms +[2025-07-18 04:58:55] [Rank 0] step:3341/10000 train_time:769999ms step_avg:230.47ms +[2025-07-18 04:58:55] [Rank 0] step:3341/10000 train_time:769999ms step_avg:230.47ms +[2025-07-18 04:59:00] [Rank 0] step:3361/10000 train_time:774713ms step_avg:230.50ms +[2025-07-18 04:59:00] [Rank 0] step:3361/10000 train_time:774713ms step_avg:230.50ms +[2025-07-18 04:59:07] [Rank 0] PRINT: step:3375/10000 val_loss:4.3300 train_time:778307ms step_avg:230.61ms +[2025-07-18 04:59:07] [Rank 0] PRINT: step:3375/10000 val_loss:4.3300 train_time:778307ms step_avg:230.61ms +[2025-07-18 04:59:09] [Rank 0] step:3381/10000 train_time:779431ms step_avg:230.53ms +[2025-07-18 04:59:09] [Rank 0] step:3381/10000 train_time:779431ms step_avg:230.53ms +[2025-07-18 04:59:13] [Rank 0] step:3401/10000 train_time:784151ms step_avg:230.56ms +[2025-07-18 04:59:13] [Rank 0] step:3401/10000 train_time:784151ms step_avg:230.56ms +[2025-07-18 04:59:18] [Rank 0] step:3421/10000 train_time:788869ms step_avg:230.60ms +[2025-07-18 04:59:18] [Rank 0] step:3421/10000 train_time:788869ms step_avg:230.60ms +[2025-07-18 04:59:23] [Rank 0] step:3441/10000 train_time:793588ms step_avg:230.63ms +[2025-07-18 04:59:23] [Rank 0] step:3441/10000 train_time:793588ms step_avg:230.63ms +[2025-07-18 04:59:27] [Rank 0] step:3461/10000 train_time:798307ms step_avg:230.66ms +[2025-07-18 04:59:27] [Rank 0] step:3461/10000 train_time:798307ms step_avg:230.66ms +[2025-07-18 04:59:32] [Rank 0] step:3481/10000 train_time:803025ms step_avg:230.69ms +[2025-07-18 04:59:32] [Rank 0] step:3481/10000 train_time:803025ms step_avg:230.69ms +[2025-07-18 04:59:41] [Rank 0] PRINT: step:3500/10000 val_loss:4.3400 train_time:807797ms step_avg:230.80ms +[2025-07-18 04:59:41] [Rank 0] PRINT: step:3500/10000 val_loss:4.3400 train_time:807797ms step_avg:230.80ms +[2025-07-18 04:59:41] [Rank 0] step:3501/10000 train_time:807815ms step_avg:230.74ms +[2025-07-18 04:59:41] [Rank 0] step:3501/10000 train_time:807815ms step_avg:230.74ms +[2025-07-18 04:59:46] [Rank 0] step:3521/10000 train_time:812461ms step_avg:230.75ms +[2025-07-18 04:59:46] [Rank 0] step:3521/10000 train_time:812461ms step_avg:230.75ms +[2025-07-18 04:59:51] [Rank 0] step:3541/10000 train_time:817182ms step_avg:230.78ms +[2025-07-18 04:59:51] [Rank 0] step:3541/10000 train_time:817182ms step_avg:230.78ms +[2025-07-18 04:59:56] [Rank 0] step:3561/10000 train_time:821904ms step_avg:230.81ms +[2025-07-18 04:59:56] [Rank 0] step:3561/10000 train_time:821904ms step_avg:230.81ms +[2025-07-18 05:00:00] [Rank 0] step:3581/10000 train_time:826626ms step_avg:230.84ms +[2025-07-18 05:00:00] [Rank 0] step:3581/10000 train_time:826626ms step_avg:230.84ms +[2025-07-18 05:00:05] [Rank 0] step:3601/10000 train_time:831347ms step_avg:230.87ms +[2025-07-18 05:00:05] [Rank 0] step:3601/10000 train_time:831347ms step_avg:230.87ms +[2025-07-18 05:00:10] [Rank 0] step:3621/10000 train_time:836067ms step_avg:230.89ms +[2025-07-18 05:00:10] [Rank 0] step:3621/10000 train_time:836067ms step_avg:230.89ms +[2025-07-18 05:00:15] [Rank 0] PRINT: step:3625/10000 val_loss:4.3706 train_time:837307ms step_avg:230.98ms +[2025-07-18 05:00:15] [Rank 0] PRINT: step:3625/10000 val_loss:4.3706 train_time:837307ms step_avg:230.98ms +[2025-07-18 05:00:19] [Rank 0] step:3641/10000 train_time:840787ms step_avg:230.92ms +[2025-07-18 05:00:19] [Rank 0] step:3641/10000 train_time:840787ms step_avg:230.92ms +[2025-07-18 05:00:24] [Rank 0] step:3661/10000 train_time:845509ms step_avg:230.95ms +[2025-07-18 05:00:24] [Rank 0] step:3661/10000 train_time:845509ms step_avg:230.95ms +[2025-07-18 05:00:29] [Rank 0] step:3681/10000 train_time:850330ms step_avg:231.01ms +[2025-07-18 05:00:29] [Rank 0] step:3681/10000 train_time:850330ms step_avg:231.01ms +[2025-07-18 05:00:34] [Rank 0] step:3701/10000 train_time:855153ms step_avg:231.06ms +[2025-07-18 05:00:34] [Rank 0] step:3701/10000 train_time:855153ms step_avg:231.06ms +[2025-07-18 05:00:38] [Rank 0] step:3721/10000 train_time:859946ms step_avg:231.11ms +[2025-07-18 05:00:38] [Rank 0] step:3721/10000 train_time:859946ms step_avg:231.11ms +[2025-07-18 05:00:43] [Rank 0] step:3741/10000 train_time:864755ms step_avg:231.16ms +[2025-07-18 05:00:43] [Rank 0] step:3741/10000 train_time:864755ms step_avg:231.16ms +[2025-07-18 05:00:50] [Rank 0] PRINT: step:3750/10000 val_loss:4.2805 train_time:867219ms step_avg:231.26ms +[2025-07-18 05:00:50] [Rank 0] PRINT: step:3750/10000 val_loss:4.2805 train_time:867219ms step_avg:231.26ms +[2025-07-18 05:00:53] [Rank 0] step:3761/10000 train_time:869562ms step_avg:231.21ms +[2025-07-18 05:00:53] [Rank 0] step:3761/10000 train_time:869562ms step_avg:231.21ms +[2025-07-18 05:00:57] [Rank 0] step:3781/10000 train_time:874374ms step_avg:231.25ms +[2025-07-18 05:00:57] [Rank 0] step:3781/10000 train_time:874374ms step_avg:231.25ms +[2025-07-18 05:01:02] [Rank 0] step:3801/10000 train_time:879186ms step_avg:231.30ms +[2025-07-18 05:01:02] [Rank 0] step:3801/10000 train_time:879186ms step_avg:231.30ms +[2025-07-18 05:01:07] [Rank 0] step:3821/10000 train_time:884004ms step_avg:231.35ms +[2025-07-18 05:01:07] [Rank 0] step:3821/10000 train_time:884004ms step_avg:231.35ms +[2025-07-18 05:01:12] [Rank 0] step:3841/10000 train_time:888818ms step_avg:231.40ms +[2025-07-18 05:01:12] [Rank 0] step:3841/10000 train_time:888818ms step_avg:231.40ms +[2025-07-18 05:01:17] [Rank 0] step:3861/10000 train_time:893630ms step_avg:231.45ms +[2025-07-18 05:01:17] [Rank 0] step:3861/10000 train_time:893630ms step_avg:231.45ms +[2025-07-18 05:01:25] [Rank 0] PRINT: step:3875/10000 val_loss:4.3960 train_time:897301ms step_avg:231.56ms +[2025-07-18 05:01:25] [Rank 0] PRINT: step:3875/10000 val_loss:4.3960 train_time:897301ms step_avg:231.56ms +[2025-07-18 05:01:26] [Rank 0] step:3881/10000 train_time:898448ms step_avg:231.50ms +[2025-07-18 05:01:26] [Rank 0] step:3881/10000 train_time:898448ms step_avg:231.50ms +[2025-07-18 05:01:31] [Rank 0] step:3901/10000 train_time:903269ms step_avg:231.55ms +[2025-07-18 05:01:31] [Rank 0] step:3901/10000 train_time:903269ms step_avg:231.55ms +[2025-07-18 05:01:36] [Rank 0] step:3921/10000 train_time:908084ms step_avg:231.59ms +[2025-07-18 05:01:36] [Rank 0] step:3921/10000 train_time:908084ms step_avg:231.59ms +[2025-07-18 05:01:41] [Rank 0] step:3941/10000 train_time:912899ms step_avg:231.64ms +[2025-07-18 05:01:41] [Rank 0] step:3941/10000 train_time:912899ms step_avg:231.64ms +[2025-07-18 05:01:45] [Rank 0] step:3961/10000 train_time:917716ms step_avg:231.69ms +[2025-07-18 05:01:45] [Rank 0] step:3961/10000 train_time:917716ms step_avg:231.69ms +[2025-07-18 05:01:50] [Rank 0] step:3981/10000 train_time:922532ms step_avg:231.73ms +[2025-07-18 05:01:50] [Rank 0] step:3981/10000 train_time:922532ms step_avg:231.73ms +[2025-07-18 05:01:59] [Rank 0] PRINT: step:4000/10000 val_loss:4.3843 train_time:927399ms step_avg:231.85ms +[2025-07-18 05:01:59] [Rank 0] PRINT: step:4000/10000 val_loss:4.3843 train_time:927399ms step_avg:231.85ms +[2025-07-18 05:02:00] [Rank 0] step:4001/10000 train_time:927418ms step_avg:231.80ms +[2025-07-18 05:02:00] [Rank 0] step:4001/10000 train_time:927418ms step_avg:231.80ms +[2025-07-18 05:02:04] [Rank 0] step:4021/10000 train_time:932155ms step_avg:231.82ms +[2025-07-18 05:02:04] [Rank 0] step:4021/10000 train_time:932155ms step_avg:231.82ms +[2025-07-18 05:02:09] [Rank 0] step:4041/10000 train_time:936969ms step_avg:231.87ms +[2025-07-18 05:02:09] [Rank 0] step:4041/10000 train_time:936969ms step_avg:231.87ms +[2025-07-18 05:02:14] [Rank 0] step:4061/10000 train_time:941782ms step_avg:231.91ms +[2025-07-18 05:02:14] [Rank 0] step:4061/10000 train_time:941782ms step_avg:231.91ms +[2025-07-18 05:02:19] [Rank 0] step:4081/10000 train_time:946598ms step_avg:231.95ms +[2025-07-18 05:02:19] [Rank 0] step:4081/10000 train_time:946598ms step_avg:231.95ms +[2025-07-18 05:02:24] [Rank 0] step:4101/10000 train_time:951415ms step_avg:232.00ms +[2025-07-18 05:02:24] [Rank 0] step:4101/10000 train_time:951415ms step_avg:232.00ms +[2025-07-18 05:02:29] [Rank 0] step:4121/10000 train_time:956232ms step_avg:232.04ms +[2025-07-18 05:02:29] [Rank 0] step:4121/10000 train_time:956232ms step_avg:232.04ms +[2025-07-18 05:02:34] [Rank 0] PRINT: step:4125/10000 val_loss:4.4243 train_time:957495ms step_avg:232.12ms +[2025-07-18 05:02:34] [Rank 0] PRINT: step:4125/10000 val_loss:4.4243 train_time:957495ms step_avg:232.12ms +[2025-07-18 05:02:38] [Rank 0] step:4141/10000 train_time:961042ms step_avg:232.08ms +[2025-07-18 05:02:38] [Rank 0] step:4141/10000 train_time:961042ms step_avg:232.08ms +[2025-07-18 05:02:43] [Rank 0] step:4161/10000 train_time:965858ms step_avg:232.12ms +[2025-07-18 05:02:43] [Rank 0] step:4161/10000 train_time:965858ms step_avg:232.12ms +[2025-07-18 05:02:48] [Rank 0] step:4181/10000 train_time:970669ms step_avg:232.16ms +[2025-07-18 05:02:48] [Rank 0] step:4181/10000 train_time:970669ms step_avg:232.16ms +[2025-07-18 05:02:52] [Rank 0] step:4201/10000 train_time:975482ms step_avg:232.20ms +[2025-07-18 05:02:52] [Rank 0] step:4201/10000 train_time:975482ms step_avg:232.20ms +[2025-07-18 05:02:57] [Rank 0] step:4221/10000 train_time:980296ms step_avg:232.24ms +[2025-07-18 05:02:57] [Rank 0] step:4221/10000 train_time:980296ms step_avg:232.24ms +[2025-07-18 05:03:02] [Rank 0] step:4241/10000 train_time:985110ms step_avg:232.28ms +[2025-07-18 05:03:02] [Rank 0] step:4241/10000 train_time:985110ms step_avg:232.28ms +[2025-07-18 05:03:09] [Rank 0] PRINT: step:4250/10000 val_loss:4.3855 train_time:987576ms step_avg:232.37ms +[2025-07-18 05:03:09] [Rank 0] PRINT: step:4250/10000 val_loss:4.3855 train_time:987576ms step_avg:232.37ms +[2025-07-18 05:03:12] [Rank 0] step:4261/10000 train_time:989922ms step_avg:232.32ms +[2025-07-18 05:03:12] [Rank 0] step:4261/10000 train_time:989922ms step_avg:232.32ms +[2025-07-18 05:03:16] [Rank 0] step:4281/10000 train_time:994738ms step_avg:232.36ms +[2025-07-18 05:03:16] [Rank 0] step:4281/10000 train_time:994738ms step_avg:232.36ms +[2025-07-18 05:03:21] [Rank 0] step:4301/10000 train_time:999553ms step_avg:232.40ms +[2025-07-18 05:03:21] [Rank 0] step:4301/10000 train_time:999553ms step_avg:232.40ms +[2025-07-18 05:03:26] [Rank 0] step:4321/10000 train_time:1004375ms step_avg:232.44ms +[2025-07-18 05:03:26] [Rank 0] step:4321/10000 train_time:1004375ms step_avg:232.44ms +[2025-07-18 05:03:31] [Rank 0] step:4341/10000 train_time:1009194ms step_avg:232.48ms +[2025-07-18 05:03:31] [Rank 0] step:4341/10000 train_time:1009194ms step_avg:232.48ms +[2025-07-18 05:03:36] [Rank 0] step:4361/10000 train_time:1014010ms step_avg:232.52ms +[2025-07-18 05:03:36] [Rank 0] step:4361/10000 train_time:1014010ms step_avg:232.52ms +[2025-07-18 05:03:44] [Rank 0] PRINT: step:4375/10000 val_loss:4.4403 train_time:1017678ms step_avg:232.61ms +[2025-07-18 05:03:44] [Rank 0] PRINT: step:4375/10000 val_loss:4.4403 train_time:1017678ms step_avg:232.61ms +[2025-07-18 05:03:45] [Rank 0] step:4381/10000 train_time:1018828ms step_avg:232.56ms +[2025-07-18 05:03:45] [Rank 0] step:4381/10000 train_time:1018828ms step_avg:232.56ms +[2025-07-18 05:03:50] [Rank 0] step:4401/10000 train_time:1023641ms step_avg:232.59ms +[2025-07-18 05:03:50] [Rank 0] step:4401/10000 train_time:1023641ms step_avg:232.59ms +[2025-07-18 05:03:55] [Rank 0] step:4421/10000 train_time:1028556ms step_avg:232.65ms +[2025-07-18 05:03:55] [Rank 0] step:4421/10000 train_time:1028556ms step_avg:232.65ms +[2025-07-18 05:04:00] [Rank 0] step:4441/10000 train_time:1033372ms step_avg:232.69ms +[2025-07-18 05:04:00] [Rank 0] step:4441/10000 train_time:1033372ms step_avg:232.69ms +[2025-07-18 05:04:04] [Rank 0] step:4461/10000 train_time:1038202ms step_avg:232.73ms +[2025-07-18 05:04:04] [Rank 0] step:4461/10000 train_time:1038202ms step_avg:232.73ms +[2025-07-18 05:04:09] [Rank 0] step:4481/10000 train_time:1043034ms step_avg:232.77ms +[2025-07-18 05:04:09] [Rank 0] step:4481/10000 train_time:1043034ms step_avg:232.77ms +[2025-07-18 05:04:19] [Rank 0] PRINT: step:4500/10000 val_loss:4.3427 train_time:1047927ms step_avg:232.87ms +[2025-07-18 05:04:19] [Rank 0] PRINT: step:4500/10000 val_loss:4.3427 train_time:1047927ms step_avg:232.87ms +[2025-07-18 05:04:19] [Rank 0] step:4501/10000 train_time:1047945ms step_avg:232.82ms +[2025-07-18 05:04:19] [Rank 0] step:4501/10000 train_time:1047945ms step_avg:232.82ms +[2025-07-18 05:04:24] [Rank 0] step:4521/10000 train_time:1052704ms step_avg:232.85ms +[2025-07-18 05:04:24] [Rank 0] step:4521/10000 train_time:1052704ms step_avg:232.85ms +[2025-07-18 05:04:28] [Rank 0] step:4541/10000 train_time:1057535ms step_avg:232.89ms +[2025-07-18 05:04:28] [Rank 0] step:4541/10000 train_time:1057535ms step_avg:232.89ms +[2025-07-18 05:04:33] [Rank 0] step:4561/10000 train_time:1062366ms step_avg:232.92ms +[2025-07-18 05:04:33] [Rank 0] step:4561/10000 train_time:1062366ms step_avg:232.92ms +[2025-07-18 05:04:38] [Rank 0] step:4581/10000 train_time:1067236ms step_avg:232.97ms +[2025-07-18 05:04:38] [Rank 0] step:4581/10000 train_time:1067236ms step_avg:232.97ms +[2025-07-18 05:04:43] [Rank 0] step:4601/10000 train_time:1072072ms step_avg:233.01ms +[2025-07-18 05:04:43] [Rank 0] step:4601/10000 train_time:1072072ms step_avg:233.01ms +[2025-07-18 05:04:48] [Rank 0] step:4621/10000 train_time:1076901ms step_avg:233.05ms +[2025-07-18 05:04:48] [Rank 0] step:4621/10000 train_time:1076901ms step_avg:233.05ms +[2025-07-18 05:04:53] [Rank 0] PRINT: step:4625/10000 val_loss:4.3745 train_time:1078171ms step_avg:233.12ms +[2025-07-18 05:04:53] [Rank 0] PRINT: step:4625/10000 val_loss:4.3745 train_time:1078171ms step_avg:233.12ms +[2025-07-18 05:04:57] [Rank 0] step:4641/10000 train_time:1081730ms step_avg:233.08ms +[2025-07-18 05:04:57] [Rank 0] step:4641/10000 train_time:1081730ms step_avg:233.08ms +[2025-07-18 05:05:02] [Rank 0] step:4661/10000 train_time:1086567ms step_avg:233.12ms +[2025-07-18 05:05:02] [Rank 0] step:4661/10000 train_time:1086567ms step_avg:233.12ms +[2025-07-18 05:05:07] [Rank 0] step:4681/10000 train_time:1091399ms step_avg:233.16ms +[2025-07-18 05:05:07] [Rank 0] step:4681/10000 train_time:1091399ms step_avg:233.16ms +[2025-07-18 05:05:12] [Rank 0] step:4701/10000 train_time:1096230ms step_avg:233.19ms +[2025-07-18 05:05:12] [Rank 0] step:4701/10000 train_time:1096230ms step_avg:233.19ms +[2025-07-18 05:05:17] [Rank 0] step:4721/10000 train_time:1101059ms step_avg:233.23ms +[2025-07-18 05:05:17] [Rank 0] step:4721/10000 train_time:1101059ms step_avg:233.23ms +[2025-07-18 05:05:22] [Rank 0] step:4741/10000 train_time:1105889ms step_avg:233.26ms +[2025-07-18 05:05:22] [Rank 0] step:4741/10000 train_time:1105889ms step_avg:233.26ms +[2025-07-18 05:05:28] [Rank 0] PRINT: step:4750/10000 val_loss:4.3823 train_time:1108369ms step_avg:233.34ms +[2025-07-18 05:05:28] [Rank 0] PRINT: step:4750/10000 val_loss:4.3823 train_time:1108369ms step_avg:233.34ms +[2025-07-18 05:05:31] [Rank 0] step:4761/10000 train_time:1110723ms step_avg:233.30ms +[2025-07-18 05:05:31] [Rank 0] step:4761/10000 train_time:1110723ms step_avg:233.30ms +[2025-07-18 05:05:35] [Rank 0] step:4781/10000 train_time:1115553ms step_avg:233.33ms +[2025-07-18 05:05:35] [Rank 0] step:4781/10000 train_time:1115553ms step_avg:233.33ms +[2025-07-18 05:05:40] [Rank 0] step:4801/10000 train_time:1120380ms step_avg:233.36ms +[2025-07-18 05:05:40] [Rank 0] step:4801/10000 train_time:1120380ms step_avg:233.36ms +[2025-07-18 05:05:45] [Rank 0] step:4821/10000 train_time:1125213ms step_avg:233.40ms +[2025-07-18 05:05:45] [Rank 0] step:4821/10000 train_time:1125213ms step_avg:233.40ms +[2025-07-18 05:05:50] [Rank 0] step:4841/10000 train_time:1130051ms step_avg:233.43ms +[2025-07-18 05:05:50] [Rank 0] step:4841/10000 train_time:1130051ms step_avg:233.43ms +[2025-07-18 05:05:55] [Rank 0] step:4861/10000 train_time:1134886ms step_avg:233.47ms +[2025-07-18 05:05:55] [Rank 0] step:4861/10000 train_time:1134886ms step_avg:233.47ms +[2025-07-18 05:06:03] [Rank 0] PRINT: step:4875/10000 val_loss:4.4513 train_time:1138572ms step_avg:233.55ms +[2025-07-18 05:06:03] [Rank 0] PRINT: step:4875/10000 val_loss:4.4513 train_time:1138572ms step_avg:233.55ms +[2025-07-18 05:06:04] [Rank 0] step:4881/10000 train_time:1139726ms step_avg:233.50ms +[2025-07-18 05:06:04] [Rank 0] step:4881/10000 train_time:1139726ms step_avg:233.50ms +[2025-07-18 05:06:09] [Rank 0] step:4901/10000 train_time:1144561ms step_avg:233.54ms +[2025-07-18 05:06:09] [Rank 0] step:4901/10000 train_time:1144561ms step_avg:233.54ms +[2025-07-18 05:06:14] [Rank 0] step:4921/10000 train_time:1149394ms step_avg:233.57ms +[2025-07-18 05:06:14] [Rank 0] step:4921/10000 train_time:1149394ms step_avg:233.57ms +[2025-07-18 05:06:19] [Rank 0] step:4941/10000 train_time:1154231ms step_avg:233.60ms +[2025-07-18 05:06:19] [Rank 0] step:4941/10000 train_time:1154231ms step_avg:233.60ms +[2025-07-18 05:06:24] [Rank 0] step:4961/10000 train_time:1159063ms step_avg:233.63ms +[2025-07-18 05:06:24] [Rank 0] step:4961/10000 train_time:1159063ms step_avg:233.63ms +[2025-07-18 05:06:28] [Rank 0] step:4981/10000 train_time:1163895ms step_avg:233.67ms +[2025-07-18 05:06:28] [Rank 0] step:4981/10000 train_time:1163895ms step_avg:233.67ms +[2025-07-18 05:06:38] [Rank 0] PRINT: step:5000/10000 val_loss:4.3675 train_time:1168790ms step_avg:233.76ms +[2025-07-18 05:06:38] [Rank 0] PRINT: step:5000/10000 val_loss:4.3675 train_time:1168790ms step_avg:233.76ms +[2025-07-18 05:06:38] [Rank 0] step:5001/10000 train_time:1168807ms step_avg:233.71ms +[2025-07-18 05:06:38] [Rank 0] step:5001/10000 train_time:1168807ms step_avg:233.71ms +[2025-07-18 05:06:43] [Rank 0] step:5021/10000 train_time:1173564ms step_avg:233.73ms +[2025-07-18 05:06:43] [Rank 0] step:5021/10000 train_time:1173564ms step_avg:233.73ms +[2025-07-18 05:06:48] [Rank 0] step:5041/10000 train_time:1178393ms step_avg:233.76ms +[2025-07-18 05:06:48] [Rank 0] step:5041/10000 train_time:1178393ms step_avg:233.76ms +[2025-07-18 05:06:52] [Rank 0] step:5061/10000 train_time:1183223ms step_avg:233.79ms +[2025-07-18 05:06:52] [Rank 0] step:5061/10000 train_time:1183223ms step_avg:233.79ms +[2025-07-18 05:06:57] [Rank 0] step:5081/10000 train_time:1188075ms step_avg:233.83ms +[2025-07-18 05:06:57] [Rank 0] step:5081/10000 train_time:1188075ms step_avg:233.83ms +[2025-07-18 05:07:02] [Rank 0] step:5101/10000 train_time:1192879ms step_avg:233.85ms +[2025-07-18 05:07:02] [Rank 0] step:5101/10000 train_time:1192879ms step_avg:233.85ms +[2025-07-18 05:07:07] [Rank 0] step:5121/10000 train_time:1197706ms step_avg:233.88ms +[2025-07-18 05:07:07] [Rank 0] step:5121/10000 train_time:1197706ms step_avg:233.88ms +[2025-07-18 05:07:12] [Rank 0] PRINT: step:5125/10000 val_loss:4.4355 train_time:1198972ms step_avg:233.95ms +[2025-07-18 05:07:12] [Rank 0] PRINT: step:5125/10000 val_loss:4.4355 train_time:1198972ms step_avg:233.95ms +[2025-07-18 05:07:16] [Rank 0] step:5141/10000 train_time:1202533ms step_avg:233.91ms +[2025-07-18 05:07:16] [Rank 0] step:5141/10000 train_time:1202533ms step_avg:233.91ms +[2025-07-18 05:07:21] [Rank 0] step:5161/10000 train_time:1207359ms step_avg:233.94ms +[2025-07-18 05:07:21] [Rank 0] step:5161/10000 train_time:1207359ms step_avg:233.94ms +[2025-07-18 05:07:26] [Rank 0] step:5181/10000 train_time:1212192ms step_avg:233.97ms +[2025-07-18 05:07:26] [Rank 0] step:5181/10000 train_time:1212192ms step_avg:233.97ms +[2025-07-18 05:07:31] [Rank 0] step:5201/10000 train_time:1217069ms step_avg:234.01ms +[2025-07-18 05:07:31] [Rank 0] step:5201/10000 train_time:1217069ms step_avg:234.01ms +[2025-07-18 05:07:35] [Rank 0] step:5221/10000 train_time:1221976ms step_avg:234.05ms +[2025-07-18 05:07:35] [Rank 0] step:5221/10000 train_time:1221976ms step_avg:234.05ms +[2025-07-18 05:07:40] [Rank 0] step:5241/10000 train_time:1226975ms step_avg:234.11ms +[2025-07-18 05:07:40] [Rank 0] step:5241/10000 train_time:1226975ms step_avg:234.11ms +[2025-07-18 05:07:47] [Rank 0] PRINT: step:5250/10000 val_loss:4.2134 train_time:1229487ms step_avg:234.19ms +[2025-07-18 05:07:47] [Rank 0] PRINT: step:5250/10000 val_loss:4.2134 train_time:1229487ms step_avg:234.19ms +[2025-07-18 05:07:50] [Rank 0] step:5261/10000 train_time:1231875ms step_avg:234.15ms +[2025-07-18 05:07:50] [Rank 0] step:5261/10000 train_time:1231875ms step_avg:234.15ms +[2025-07-18 05:07:55] [Rank 0] step:5281/10000 train_time:1236780ms step_avg:234.19ms +[2025-07-18 05:07:55] [Rank 0] step:5281/10000 train_time:1236780ms step_avg:234.19ms +[2025-07-18 05:07:59] [Rank 0] step:5301/10000 train_time:1241681ms step_avg:234.24ms +[2025-07-18 05:07:59] [Rank 0] step:5301/10000 train_time:1241681ms step_avg:234.24ms +[2025-07-18 05:08:04] [Rank 0] step:5321/10000 train_time:1246584ms step_avg:234.28ms +[2025-07-18 05:08:04] [Rank 0] step:5321/10000 train_time:1246584ms step_avg:234.28ms +[2025-07-18 05:08:09] [Rank 0] step:5341/10000 train_time:1251493ms step_avg:234.32ms +[2025-07-18 05:08:09] [Rank 0] step:5341/10000 train_time:1251493ms step_avg:234.32ms +[2025-07-18 05:08:14] [Rank 0] step:5361/10000 train_time:1256392ms step_avg:234.36ms +[2025-07-18 05:08:14] [Rank 0] step:5361/10000 train_time:1256392ms step_avg:234.36ms +[2025-07-18 05:08:22] [Rank 0] PRINT: step:5375/10000 val_loss:4.2473 train_time:1260134ms step_avg:234.44ms +[2025-07-18 05:08:22] [Rank 0] PRINT: step:5375/10000 val_loss:4.2473 train_time:1260134ms step_avg:234.44ms +[2025-07-18 05:08:24] [Rank 0] step:5381/10000 train_time:1261302ms step_avg:234.40ms +[2025-07-18 05:08:24] [Rank 0] step:5381/10000 train_time:1261302ms step_avg:234.40ms +[2025-07-18 05:08:29] [Rank 0] step:5401/10000 train_time:1266207ms step_avg:234.44ms +[2025-07-18 05:08:29] [Rank 0] step:5401/10000 train_time:1266207ms step_avg:234.44ms +[2025-07-18 05:08:34] [Rank 0] step:5421/10000 train_time:1271127ms step_avg:234.48ms +[2025-07-18 05:08:34] [Rank 0] step:5421/10000 train_time:1271127ms step_avg:234.48ms +[2025-07-18 05:08:39] [Rank 0] step:5441/10000 train_time:1276028ms step_avg:234.52ms +[2025-07-18 05:08:39] [Rank 0] step:5441/10000 train_time:1276028ms step_avg:234.52ms +[2025-07-18 05:08:43] [Rank 0] step:5461/10000 train_time:1280940ms step_avg:234.56ms +[2025-07-18 05:08:43] [Rank 0] step:5461/10000 train_time:1280940ms step_avg:234.56ms +[2025-07-18 05:08:48] [Rank 0] step:5481/10000 train_time:1285853ms step_avg:234.60ms +[2025-07-18 05:08:48] [Rank 0] step:5481/10000 train_time:1285853ms step_avg:234.60ms +[2025-07-18 05:08:58] [Rank 0] PRINT: step:5500/10000 val_loss:4.3245 train_time:1290815ms step_avg:234.69ms +[2025-07-18 05:08:58] [Rank 0] PRINT: step:5500/10000 val_loss:4.3245 train_time:1290815ms step_avg:234.69ms +[2025-07-18 05:08:58] [Rank 0] step:5501/10000 train_time:1290837ms step_avg:234.65ms +[2025-07-18 05:08:58] [Rank 0] step:5501/10000 train_time:1290837ms step_avg:234.65ms +[2025-07-18 05:09:03] [Rank 0] step:5521/10000 train_time:1295655ms step_avg:234.68ms +[2025-07-18 05:09:03] [Rank 0] step:5521/10000 train_time:1295655ms step_avg:234.68ms +[2025-07-18 05:09:08] [Rank 0] step:5541/10000 train_time:1300559ms step_avg:234.72ms +[2025-07-18 05:09:08] [Rank 0] step:5541/10000 train_time:1300559ms step_avg:234.72ms +[2025-07-18 05:09:13] [Rank 0] step:5561/10000 train_time:1305465ms step_avg:234.75ms +[2025-07-18 05:09:13] [Rank 0] step:5561/10000 train_time:1305465ms step_avg:234.75ms +[2025-07-18 05:09:18] [Rank 0] step:5581/10000 train_time:1310365ms step_avg:234.79ms +[2025-07-18 05:09:18] [Rank 0] step:5581/10000 train_time:1310365ms step_avg:234.79ms +[2025-07-18 05:09:23] [Rank 0] step:5601/10000 train_time:1315745ms step_avg:234.91ms +[2025-07-18 05:09:23] [Rank 0] step:5601/10000 train_time:1315745ms step_avg:234.91ms +[2025-07-18 05:09:28] [Rank 0] step:5621/10000 train_time:1320656ms step_avg:234.95ms +[2025-07-18 05:09:28] [Rank 0] step:5621/10000 train_time:1320656ms step_avg:234.95ms +[2025-07-18 05:09:34] [Rank 0] PRINT: step:5625/10000 val_loss:4.3939 train_time:1321943ms step_avg:235.01ms +[2025-07-18 05:09:34] [Rank 0] PRINT: step:5625/10000 val_loss:4.3939 train_time:1321943ms step_avg:235.01ms +[2025-07-18 05:09:37] [Rank 0] step:5641/10000 train_time:1325562ms step_avg:234.99ms +[2025-07-18 05:09:37] [Rank 0] step:5641/10000 train_time:1325562ms step_avg:234.99ms +[2025-07-18 05:09:42] [Rank 0] step:5661/10000 train_time:1330470ms step_avg:235.02ms +[2025-07-18 05:09:42] [Rank 0] step:5661/10000 train_time:1330470ms step_avg:235.02ms +[2025-07-18 05:09:47] [Rank 0] step:5681/10000 train_time:1335376ms step_avg:235.06ms +[2025-07-18 05:09:47] [Rank 0] step:5681/10000 train_time:1335376ms step_avg:235.06ms +[2025-07-18 05:09:52] [Rank 0] step:5701/10000 train_time:1340281ms step_avg:235.10ms +[2025-07-18 05:09:52] [Rank 0] step:5701/10000 train_time:1340281ms step_avg:235.10ms +[2025-07-18 05:09:57] [Rank 0] step:5721/10000 train_time:1345184ms step_avg:235.13ms +[2025-07-18 05:09:57] [Rank 0] step:5721/10000 train_time:1345184ms step_avg:235.13ms +[2025-07-18 05:10:02] [Rank 0] step:5741/10000 train_time:1350092ms step_avg:235.17ms +[2025-07-18 05:10:02] [Rank 0] step:5741/10000 train_time:1350092ms step_avg:235.17ms +[2025-07-18 05:10:09] [Rank 0] PRINT: step:5750/10000 val_loss:4.3625 train_time:1352601ms step_avg:235.23ms +[2025-07-18 05:10:09] [Rank 0] PRINT: step:5750/10000 val_loss:4.3625 train_time:1352601ms step_avg:235.23ms +[2025-07-18 05:10:12] [Rank 0] step:5761/10000 train_time:1354994ms step_avg:235.20ms +[2025-07-18 05:10:12] [Rank 0] step:5761/10000 train_time:1354994ms step_avg:235.20ms +[2025-07-18 05:10:17] [Rank 0] step:5781/10000 train_time:1359885ms step_avg:235.23ms +[2025-07-18 05:10:17] [Rank 0] step:5781/10000 train_time:1359885ms step_avg:235.23ms +[2025-07-18 05:10:21] [Rank 0] step:5801/10000 train_time:1364777ms step_avg:235.27ms +[2025-07-18 05:10:21] [Rank 0] step:5801/10000 train_time:1364777ms step_avg:235.27ms +[2025-07-18 05:10:26] [Rank 0] step:5821/10000 train_time:1369664ms step_avg:235.30ms +[2025-07-18 05:10:26] [Rank 0] step:5821/10000 train_time:1369664ms step_avg:235.30ms +[2025-07-18 05:10:31] [Rank 0] step:5841/10000 train_time:1374558ms step_avg:235.33ms +[2025-07-18 05:10:31] [Rank 0] step:5841/10000 train_time:1374558ms step_avg:235.33ms +[2025-07-18 05:10:36] [Rank 0] step:5861/10000 train_time:1379446ms step_avg:235.36ms +[2025-07-18 05:10:36] [Rank 0] step:5861/10000 train_time:1379446ms step_avg:235.36ms +[2025-07-18 05:10:44] [Rank 0] PRINT: step:5875/10000 val_loss:4.3714 train_time:1383164ms step_avg:235.43ms +[2025-07-18 05:10:44] [Rank 0] PRINT: step:5875/10000 val_loss:4.3714 train_time:1383164ms step_avg:235.43ms +[2025-07-18 05:10:46] [Rank 0] step:5881/10000 train_time:1384327ms step_avg:235.39ms +[2025-07-18 05:10:46] [Rank 0] step:5881/10000 train_time:1384327ms step_avg:235.39ms +[2025-07-18 05:10:51] [Rank 0] step:5901/10000 train_time:1389213ms step_avg:235.42ms +[2025-07-18 05:10:51] [Rank 0] step:5901/10000 train_time:1389213ms step_avg:235.42ms +[2025-07-18 05:10:55] [Rank 0] step:5921/10000 train_time:1394093ms step_avg:235.45ms +[2025-07-18 05:10:55] [Rank 0] step:5921/10000 train_time:1394093ms step_avg:235.45ms +[2025-07-18 05:11:00] [Rank 0] step:5941/10000 train_time:1398985ms step_avg:235.48ms +[2025-07-18 05:11:00] [Rank 0] step:5941/10000 train_time:1398985ms step_avg:235.48ms +[2025-07-18 05:11:05] [Rank 0] step:5961/10000 train_time:1403884ms step_avg:235.51ms +[2025-07-18 05:11:05] [Rank 0] step:5961/10000 train_time:1403884ms step_avg:235.51ms +[2025-07-18 05:11:10] [Rank 0] step:5981/10000 train_time:1408776ms step_avg:235.54ms +[2025-07-18 05:11:10] [Rank 0] step:5981/10000 train_time:1408776ms step_avg:235.54ms +[2025-07-18 05:11:19] [Rank 0] PRINT: step:6000/10000 val_loss:4.3606 train_time:1413730ms step_avg:235.62ms +[2025-07-18 05:11:19] [Rank 0] PRINT: step:6000/10000 val_loss:4.3606 train_time:1413730ms step_avg:235.62ms +[2025-07-18 05:11:19] [Rank 0] step:6001/10000 train_time:1413751ms step_avg:235.59ms +[2025-07-18 05:11:19] [Rank 0] step:6001/10000 train_time:1413751ms step_avg:235.59ms +[2025-07-18 05:11:24] [Rank 0] step:6021/10000 train_time:1418565ms step_avg:235.60ms +[2025-07-18 05:11:24] [Rank 0] step:6021/10000 train_time:1418565ms step_avg:235.60ms +[2025-07-18 05:11:29] [Rank 0] step:6041/10000 train_time:1423458ms step_avg:235.63ms +[2025-07-18 05:11:29] [Rank 0] step:6041/10000 train_time:1423458ms step_avg:235.63ms +[2025-07-18 05:11:34] [Rank 0] step:6061/10000 train_time:1428344ms step_avg:235.66ms +[2025-07-18 05:11:34] [Rank 0] step:6061/10000 train_time:1428344ms step_avg:235.66ms +[2025-07-18 05:11:39] [Rank 0] step:6081/10000 train_time:1433239ms step_avg:235.69ms +[2025-07-18 05:11:39] [Rank 0] step:6081/10000 train_time:1433239ms step_avg:235.69ms +[2025-07-18 05:11:44] [Rank 0] step:6101/10000 train_time:1438173ms step_avg:235.73ms +[2025-07-18 05:11:44] [Rank 0] step:6101/10000 train_time:1438173ms step_avg:235.73ms +[2025-07-18 05:11:49] [Rank 0] step:6121/10000 train_time:1443073ms step_avg:235.76ms +[2025-07-18 05:11:49] [Rank 0] step:6121/10000 train_time:1443073ms step_avg:235.76ms +[2025-07-18 05:11:54] [Rank 0] PRINT: step:6125/10000 val_loss:4.4618 train_time:1444356ms step_avg:235.81ms +[2025-07-18 05:11:54] [Rank 0] PRINT: step:6125/10000 val_loss:4.4618 train_time:1444356ms step_avg:235.81ms +[2025-07-18 05:11:58] [Rank 0] step:6141/10000 train_time:1447964ms step_avg:235.79ms +[2025-07-18 05:11:58] [Rank 0] step:6141/10000 train_time:1447964ms step_avg:235.79ms +[2025-07-18 05:12:03] [Rank 0] step:6161/10000 train_time:1452854ms step_avg:235.81ms +[2025-07-18 05:12:03] [Rank 0] step:6161/10000 train_time:1452854ms step_avg:235.81ms +[2025-07-18 05:12:08] [Rank 0] step:6181/10000 train_time:1457760ms step_avg:235.85ms +[2025-07-18 05:12:08] [Rank 0] step:6181/10000 train_time:1457760ms step_avg:235.85ms +[2025-07-18 05:12:13] [Rank 0] step:6201/10000 train_time:1462669ms step_avg:235.88ms +[2025-07-18 05:12:13] [Rank 0] step:6201/10000 train_time:1462669ms step_avg:235.88ms +[2025-07-18 05:12:18] [Rank 0] step:6221/10000 train_time:1467578ms step_avg:235.91ms +[2025-07-18 05:12:18] [Rank 0] step:6221/10000 train_time:1467578ms step_avg:235.91ms +[2025-07-18 05:12:23] [Rank 0] step:6241/10000 train_time:1472483ms step_avg:235.94ms +[2025-07-18 05:12:23] [Rank 0] step:6241/10000 train_time:1472483ms step_avg:235.94ms +[2025-07-18 05:12:30] [Rank 0] PRINT: step:6250/10000 val_loss:4.4434 train_time:1474995ms step_avg:236.00ms +[2025-07-18 05:12:30] [Rank 0] PRINT: step:6250/10000 val_loss:4.4434 train_time:1474995ms step_avg:236.00ms +[2025-07-18 05:12:32] [Rank 0] step:6261/10000 train_time:1477384ms step_avg:235.97ms +[2025-07-18 05:12:32] [Rank 0] step:6261/10000 train_time:1477384ms step_avg:235.97ms +[2025-07-18 05:12:37] [Rank 0] step:6281/10000 train_time:1482291ms step_avg:236.00ms +[2025-07-18 05:12:37] [Rank 0] step:6281/10000 train_time:1482291ms step_avg:236.00ms +[2025-07-18 05:12:42] [Rank 0] step:6301/10000 train_time:1487189ms step_avg:236.02ms +[2025-07-18 05:12:42] [Rank 0] step:6301/10000 train_time:1487189ms step_avg:236.02ms +[2025-07-18 05:12:47] [Rank 0] step:6321/10000 train_time:1492087ms step_avg:236.05ms +[2025-07-18 05:12:47] [Rank 0] step:6321/10000 train_time:1492087ms step_avg:236.05ms +[2025-07-18 05:12:52] [Rank 0] step:6341/10000 train_time:1496990ms step_avg:236.08ms +[2025-07-18 05:12:52] [Rank 0] step:6341/10000 train_time:1496990ms step_avg:236.08ms +[2025-07-18 05:12:57] [Rank 0] step:6361/10000 train_time:1501885ms step_avg:236.11ms +[2025-07-18 05:12:57] [Rank 0] step:6361/10000 train_time:1501885ms step_avg:236.11ms +[2025-07-18 05:13:05] [Rank 0] PRINT: step:6375/10000 val_loss:4.3966 train_time:1505611ms step_avg:236.17ms +[2025-07-18 05:13:05] [Rank 0] PRINT: step:6375/10000 val_loss:4.3966 train_time:1505611ms step_avg:236.17ms +[2025-07-18 05:13:06] [Rank 0] step:6381/10000 train_time:1506779ms step_avg:236.14ms +[2025-07-18 05:13:06] [Rank 0] step:6381/10000 train_time:1506779ms step_avg:236.14ms +[2025-07-18 05:13:11] [Rank 0] step:6401/10000 train_time:1511669ms step_avg:236.16ms +[2025-07-18 05:13:11] [Rank 0] step:6401/10000 train_time:1511669ms step_avg:236.16ms +[2025-07-18 05:13:16] [Rank 0] step:6421/10000 train_time:1516572ms step_avg:236.19ms +[2025-07-18 05:13:16] [Rank 0] step:6421/10000 train_time:1516572ms step_avg:236.19ms +[2025-07-18 05:13:21] [Rank 0] step:6441/10000 train_time:1521473ms step_avg:236.22ms +[2025-07-18 05:13:21] [Rank 0] step:6441/10000 train_time:1521473ms step_avg:236.22ms +[2025-07-18 05:13:26] [Rank 0] step:6461/10000 train_time:1526382ms step_avg:236.25ms +[2025-07-18 05:13:26] [Rank 0] step:6461/10000 train_time:1526382ms step_avg:236.25ms +[2025-07-18 05:13:31] [Rank 0] step:6481/10000 train_time:1531287ms step_avg:236.27ms +[2025-07-18 05:13:31] [Rank 0] step:6481/10000 train_time:1531287ms step_avg:236.27ms +[2025-07-18 05:13:40] [Rank 0] PRINT: step:6500/10000 val_loss:4.4143 train_time:1536252ms step_avg:236.35ms +[2025-07-18 05:13:40] [Rank 0] PRINT: step:6500/10000 val_loss:4.4143 train_time:1536252ms step_avg:236.35ms +[2025-07-18 05:13:41] [Rank 0] step:6501/10000 train_time:1536273ms step_avg:236.31ms +[2025-07-18 05:13:41] [Rank 0] step:6501/10000 train_time:1536273ms step_avg:236.31ms +[2025-07-18 05:13:46] [Rank 0] step:6521/10000 train_time:1541096ms step_avg:236.33ms +[2025-07-18 05:13:46] [Rank 0] step:6521/10000 train_time:1541096ms step_avg:236.33ms +[2025-07-18 05:13:50] [Rank 0] step:6541/10000 train_time:1546004ms step_avg:236.36ms +[2025-07-18 05:13:50] [Rank 0] step:6541/10000 train_time:1546004ms step_avg:236.36ms +[2025-07-18 05:13:55] [Rank 0] step:6561/10000 train_time:1550921ms step_avg:236.38ms +[2025-07-18 05:13:55] [Rank 0] step:6561/10000 train_time:1550921ms step_avg:236.38ms +[2025-07-18 05:14:00] [Rank 0] step:6581/10000 train_time:1555833ms step_avg:236.41ms +[2025-07-18 05:14:00] [Rank 0] step:6581/10000 train_time:1555833ms step_avg:236.41ms +[2025-07-18 05:14:05] [Rank 0] step:6601/10000 train_time:1560751ms step_avg:236.44ms +[2025-07-18 05:14:05] [Rank 0] step:6601/10000 train_time:1560751ms step_avg:236.44ms +[2025-07-18 05:14:11] [Rank 0] step:6621/10000 train_time:1566138ms step_avg:236.54ms +[2025-07-18 05:14:11] [Rank 0] step:6621/10000 train_time:1566138ms step_avg:236.54ms +[2025-07-18 05:14:16] [Rank 0] PRINT: step:6625/10000 val_loss:4.3268 train_time:1567429ms step_avg:236.59ms +[2025-07-18 05:14:16] [Rank 0] PRINT: step:6625/10000 val_loss:4.3268 train_time:1567429ms step_avg:236.59ms +[2025-07-18 05:14:20] [Rank 0] step:6641/10000 train_time:1571044ms step_avg:236.57ms +[2025-07-18 05:14:20] [Rank 0] step:6641/10000 train_time:1571044ms step_avg:236.57ms +[2025-07-18 05:14:25] [Rank 0] step:6661/10000 train_time:1575947ms step_avg:236.59ms +[2025-07-18 05:14:25] [Rank 0] step:6661/10000 train_time:1575947ms step_avg:236.59ms +[2025-07-18 05:14:30] [Rank 0] step:6681/10000 train_time:1580905ms step_avg:236.63ms +[2025-07-18 05:14:30] [Rank 0] step:6681/10000 train_time:1580905ms step_avg:236.63ms +[2025-07-18 05:14:35] [Rank 0] step:6701/10000 train_time:1585874ms step_avg:236.66ms +[2025-07-18 05:14:35] [Rank 0] step:6701/10000 train_time:1585874ms step_avg:236.66ms +[2025-07-18 05:14:40] [Rank 0] step:6721/10000 train_time:1590863ms step_avg:236.70ms +[2025-07-18 05:14:40] [Rank 0] step:6721/10000 train_time:1590863ms step_avg:236.70ms +[2025-07-18 05:14:45] [Rank 0] step:6741/10000 train_time:1595847ms step_avg:236.74ms +[2025-07-18 05:14:45] [Rank 0] step:6741/10000 train_time:1595847ms step_avg:236.74ms +[2025-07-18 05:14:52] [Rank 0] PRINT: step:6750/10000 val_loss:4.4189 train_time:1598397ms step_avg:236.80ms +[2025-07-18 05:14:52] [Rank 0] PRINT: step:6750/10000 val_loss:4.4189 train_time:1598397ms step_avg:236.80ms +[2025-07-18 05:14:55] [Rank 0] step:6761/10000 train_time:1600825ms step_avg:236.77ms +[2025-07-18 05:14:55] [Rank 0] step:6761/10000 train_time:1600825ms step_avg:236.77ms +[2025-07-18 05:15:00] [Rank 0] step:6781/10000 train_time:1605803ms step_avg:236.81ms +[2025-07-18 05:15:00] [Rank 0] step:6781/10000 train_time:1605803ms step_avg:236.81ms +[2025-07-18 05:15:05] [Rank 0] step:6801/10000 train_time:1610778ms step_avg:236.84ms +[2025-07-18 05:15:05] [Rank 0] step:6801/10000 train_time:1610778ms step_avg:236.84ms +[2025-07-18 05:15:10] [Rank 0] step:6821/10000 train_time:1615748ms step_avg:236.88ms +[2025-07-18 05:15:10] [Rank 0] step:6821/10000 train_time:1615748ms step_avg:236.88ms +[2025-07-18 05:15:15] [Rank 0] step:6841/10000 train_time:1620725ms step_avg:236.91ms +[2025-07-18 05:15:15] [Rank 0] step:6841/10000 train_time:1620725ms step_avg:236.91ms +[2025-07-18 05:15:20] [Rank 0] step:6861/10000 train_time:1625692ms step_avg:236.95ms +[2025-07-18 05:15:20] [Rank 0] step:6861/10000 train_time:1625692ms step_avg:236.95ms +[2025-07-18 05:15:28] [Rank 0] PRINT: step:6875/10000 val_loss:4.4754 train_time:1629480ms step_avg:237.02ms +[2025-07-18 05:15:28] [Rank 0] PRINT: step:6875/10000 val_loss:4.4754 train_time:1629480ms step_avg:237.02ms +[2025-07-18 05:15:29] [Rank 0] step:6881/10000 train_time:1630664ms step_avg:236.98ms +[2025-07-18 05:15:29] [Rank 0] step:6881/10000 train_time:1630664ms step_avg:236.98ms +[2025-07-18 05:15:34] [Rank 0] step:6901/10000 train_time:1635631ms step_avg:237.01ms +[2025-07-18 05:15:34] [Rank 0] step:6901/10000 train_time:1635631ms step_avg:237.01ms +[2025-07-18 05:15:39] [Rank 0] step:6921/10000 train_time:1640599ms step_avg:237.05ms +[2025-07-18 05:15:39] [Rank 0] step:6921/10000 train_time:1640599ms step_avg:237.05ms +[2025-07-18 05:15:44] [Rank 0] step:6941/10000 train_time:1645590ms step_avg:237.08ms +[2025-07-18 05:15:44] [Rank 0] step:6941/10000 train_time:1645590ms step_avg:237.08ms +[2025-07-18 05:15:49] [Rank 0] step:6961/10000 train_time:1650567ms step_avg:237.12ms +[2025-07-18 05:15:49] [Rank 0] step:6961/10000 train_time:1650567ms step_avg:237.12ms +[2025-07-18 05:15:54] [Rank 0] step:6981/10000 train_time:1655541ms step_avg:237.15ms +[2025-07-18 05:15:54] [Rank 0] step:6981/10000 train_time:1655541ms step_avg:237.15ms +[2025-07-18 05:16:04] [Rank 0] PRINT: step:7000/10000 val_loss:4.3264 train_time:1660576ms step_avg:237.23ms +[2025-07-18 05:16:04] [Rank 0] PRINT: step:7000/10000 val_loss:4.3264 train_time:1660576ms step_avg:237.23ms +[2025-07-18 05:16:04] [Rank 0] step:7001/10000 train_time:1660594ms step_avg:237.19ms +[2025-07-18 05:16:04] [Rank 0] step:7001/10000 train_time:1660594ms step_avg:237.19ms +[2025-07-18 05:16:09] [Rank 0] step:7021/10000 train_time:1665491ms step_avg:237.22ms +[2025-07-18 05:16:09] [Rank 0] step:7021/10000 train_time:1665491ms step_avg:237.22ms +[2025-07-18 05:16:14] [Rank 0] step:7041/10000 train_time:1670467ms step_avg:237.25ms +[2025-07-18 05:16:14] [Rank 0] step:7041/10000 train_time:1670467ms step_avg:237.25ms +[2025-07-18 05:16:19] [Rank 0] step:7061/10000 train_time:1675434ms step_avg:237.28ms +[2025-07-18 05:16:19] [Rank 0] step:7061/10000 train_time:1675434ms step_avg:237.28ms +[2025-07-18 05:16:24] [Rank 0] step:7081/10000 train_time:1680415ms step_avg:237.31ms +[2025-07-18 05:16:24] [Rank 0] step:7081/10000 train_time:1680415ms step_avg:237.31ms +[2025-07-18 05:16:29] [Rank 0] step:7101/10000 train_time:1685390ms step_avg:237.35ms +[2025-07-18 05:16:29] [Rank 0] step:7101/10000 train_time:1685390ms step_avg:237.35ms +[2025-07-18 05:16:34] [Rank 0] step:7121/10000 train_time:1690365ms step_avg:237.38ms +[2025-07-18 05:16:34] [Rank 0] step:7121/10000 train_time:1690365ms step_avg:237.38ms +[2025-07-18 05:16:39] [Rank 0] PRINT: step:7125/10000 val_loss:4.4333 train_time:1691672ms step_avg:237.43ms +[2025-07-18 05:16:39] [Rank 0] PRINT: step:7125/10000 val_loss:4.4333 train_time:1691672ms step_avg:237.43ms +[2025-07-18 05:16:43] [Rank 0] step:7141/10000 train_time:1695347ms step_avg:237.41ms +[2025-07-18 05:16:43] [Rank 0] step:7141/10000 train_time:1695347ms step_avg:237.41ms +[2025-07-18 05:16:48] [Rank 0] step:7161/10000 train_time:1700328ms step_avg:237.44ms +[2025-07-18 05:16:48] [Rank 0] step:7161/10000 train_time:1700328ms step_avg:237.44ms +[2025-07-18 05:16:53] [Rank 0] step:7181/10000 train_time:1705298ms step_avg:237.47ms +[2025-07-18 05:16:53] [Rank 0] step:7181/10000 train_time:1705298ms step_avg:237.47ms +[2025-07-18 05:16:58] [Rank 0] step:7201/10000 train_time:1710289ms step_avg:237.51ms +[2025-07-18 05:16:58] [Rank 0] step:7201/10000 train_time:1710289ms step_avg:237.51ms +[2025-07-18 05:17:03] [Rank 0] step:7221/10000 train_time:1715273ms step_avg:237.54ms +[2025-07-18 05:17:03] [Rank 0] step:7221/10000 train_time:1715273ms step_avg:237.54ms +[2025-07-18 05:17:08] [Rank 0] step:7241/10000 train_time:1720248ms step_avg:237.57ms +[2025-07-18 05:17:08] [Rank 0] step:7241/10000 train_time:1720248ms step_avg:237.57ms +[2025-07-18 05:17:15] [Rank 0] PRINT: step:7250/10000 val_loss:4.4295 train_time:1722798ms step_avg:237.63ms +[2025-07-18 05:17:15] [Rank 0] PRINT: step:7250/10000 val_loss:4.4295 train_time:1722798ms step_avg:237.63ms +[2025-07-18 05:17:18] [Rank 0] step:7261/10000 train_time:1725222ms step_avg:237.60ms +[2025-07-18 05:17:18] [Rank 0] step:7261/10000 train_time:1725222ms step_avg:237.60ms +[2025-07-18 05:17:23] [Rank 0] step:7281/10000 train_time:1730199ms step_avg:237.63ms +[2025-07-18 05:17:23] [Rank 0] step:7281/10000 train_time:1730199ms step_avg:237.63ms +[2025-07-18 05:17:28] [Rank 0] step:7301/10000 train_time:1735174ms step_avg:237.66ms +[2025-07-18 05:17:28] [Rank 0] step:7301/10000 train_time:1735174ms step_avg:237.66ms +[2025-07-18 05:17:33] [Rank 0] step:7321/10000 train_time:1740165ms step_avg:237.69ms +[2025-07-18 05:17:33] [Rank 0] step:7321/10000 train_time:1740165ms step_avg:237.69ms +[2025-07-18 05:17:38] [Rank 0] step:7341/10000 train_time:1745141ms step_avg:237.73ms +[2025-07-18 05:17:38] [Rank 0] step:7341/10000 train_time:1745141ms step_avg:237.73ms +[2025-07-18 05:17:43] [Rank 0] step:7361/10000 train_time:1750130ms step_avg:237.76ms +[2025-07-18 05:17:43] [Rank 0] step:7361/10000 train_time:1750130ms step_avg:237.76ms +[2025-07-18 05:17:51] [Rank 0] PRINT: step:7375/10000 val_loss:4.5543 train_time:1753926ms step_avg:237.82ms +[2025-07-18 05:17:51] [Rank 0] PRINT: step:7375/10000 val_loss:4.5543 train_time:1753926ms step_avg:237.82ms +[2025-07-18 05:17:53] [Rank 0] step:7381/10000 train_time:1755110ms step_avg:237.79ms +[2025-07-18 05:17:53] [Rank 0] step:7381/10000 train_time:1755110ms step_avg:237.79ms +[2025-07-18 05:17:58] [Rank 0] step:7401/10000 train_time:1760090ms step_avg:237.82ms +[2025-07-18 05:17:58] [Rank 0] step:7401/10000 train_time:1760090ms step_avg:237.82ms +[2025-07-18 05:18:03] [Rank 0] step:7421/10000 train_time:1765069ms step_avg:237.85ms +[2025-07-18 05:18:03] [Rank 0] step:7421/10000 train_time:1765069ms step_avg:237.85ms +[2025-07-18 05:18:08] [Rank 0] step:7441/10000 train_time:1770062ms step_avg:237.88ms +[2025-07-18 05:18:08] [Rank 0] step:7441/10000 train_time:1770062ms step_avg:237.88ms +[2025-07-18 05:18:13] [Rank 0] step:7461/10000 train_time:1775043ms step_avg:237.91ms +[2025-07-18 05:18:13] [Rank 0] step:7461/10000 train_time:1775043ms step_avg:237.91ms +[2025-07-18 05:18:18] [Rank 0] step:7481/10000 train_time:1780129ms step_avg:237.95ms +[2025-07-18 05:18:18] [Rank 0] step:7481/10000 train_time:1780129ms step_avg:237.95ms +[2025-07-18 05:18:27] [Rank 0] PRINT: step:7500/10000 val_loss:4.5361 train_time:1785182ms step_avg:238.02ms +[2025-07-18 05:18:27] [Rank 0] PRINT: step:7500/10000 val_loss:4.5361 train_time:1785182ms step_avg:238.02ms +[2025-07-18 05:18:28] [Rank 0] step:7501/10000 train_time:1785200ms step_avg:237.99ms +[2025-07-18 05:18:28] [Rank 0] step:7501/10000 train_time:1785200ms step_avg:237.99ms +[2025-07-18 05:18:33] [Rank 0] step:7521/10000 train_time:1790111ms step_avg:238.02ms +[2025-07-18 05:18:33] [Rank 0] step:7521/10000 train_time:1790111ms step_avg:238.02ms +[2025-07-18 05:18:38] [Rank 0] step:7541/10000 train_time:1795099ms step_avg:238.05ms +[2025-07-18 05:18:38] [Rank 0] step:7541/10000 train_time:1795099ms step_avg:238.05ms +[2025-07-18 05:18:43] [Rank 0] step:7561/10000 train_time:1800086ms step_avg:238.08ms +[2025-07-18 05:18:43] [Rank 0] step:7561/10000 train_time:1800086ms step_avg:238.08ms +[2025-07-18 05:18:48] [Rank 0] step:7581/10000 train_time:1805087ms step_avg:238.11ms +[2025-07-18 05:18:48] [Rank 0] step:7581/10000 train_time:1805087ms step_avg:238.11ms +[2025-07-18 05:18:53] [Rank 0] step:7601/10000 train_time:1810090ms step_avg:238.14ms +[2025-07-18 05:18:53] [Rank 0] step:7601/10000 train_time:1810090ms step_avg:238.14ms +[2025-07-18 05:18:58] [Rank 0] step:7621/10000 train_time:1815249ms step_avg:238.19ms +[2025-07-18 05:18:58] [Rank 0] step:7621/10000 train_time:1815249ms step_avg:238.19ms +[2025-07-18 05:19:03] [Rank 0] PRINT: step:7625/10000 val_loss:4.5885 train_time:1816485ms step_avg:238.23ms +[2025-07-18 05:19:03] [Rank 0] PRINT: step:7625/10000 val_loss:4.5885 train_time:1816485ms step_avg:238.23ms +[2025-07-18 05:19:07] [Rank 0] step:7641/10000 train_time:1820164ms step_avg:238.21ms +[2025-07-18 05:19:07] [Rank 0] step:7641/10000 train_time:1820164ms step_avg:238.21ms +[2025-07-18 05:19:12] [Rank 0] step:7661/10000 train_time:1825167ms step_avg:238.24ms +[2025-07-18 05:19:12] [Rank 0] step:7661/10000 train_time:1825167ms step_avg:238.24ms +[2025-07-18 05:19:17] [Rank 0] step:7681/10000 train_time:1830185ms step_avg:238.27ms +[2025-07-18 05:19:17] [Rank 0] step:7681/10000 train_time:1830185ms step_avg:238.27ms +[2025-07-18 05:19:22] [Rank 0] step:7701/10000 train_time:1835178ms step_avg:238.30ms +[2025-07-18 05:19:22] [Rank 0] step:7701/10000 train_time:1835178ms step_avg:238.30ms +[2025-07-18 05:19:27] [Rank 0] step:7721/10000 train_time:1840177ms step_avg:238.33ms +[2025-07-18 05:19:27] [Rank 0] step:7721/10000 train_time:1840177ms step_avg:238.33ms +[2025-07-18 05:19:32] [Rank 0] step:7741/10000 train_time:1845166ms step_avg:238.36ms +[2025-07-18 05:19:32] [Rank 0] step:7741/10000 train_time:1845166ms step_avg:238.36ms +[2025-07-18 05:19:39] [Rank 0] PRINT: step:7750/10000 val_loss:4.5776 train_time:1847738ms step_avg:238.42ms +[2025-07-18 05:19:39] [Rank 0] PRINT: step:7750/10000 val_loss:4.5776 train_time:1847738ms step_avg:238.42ms +[2025-07-18 05:19:42] [Rank 0] step:7761/10000 train_time:1850172ms step_avg:238.39ms +[2025-07-18 05:19:42] [Rank 0] step:7761/10000 train_time:1850172ms step_avg:238.39ms +[2025-07-18 05:19:47] [Rank 0] step:7781/10000 train_time:1855164ms step_avg:238.42ms +[2025-07-18 05:19:47] [Rank 0] step:7781/10000 train_time:1855164ms step_avg:238.42ms +[2025-07-18 05:19:52] [Rank 0] step:7801/10000 train_time:1860159ms step_avg:238.45ms +[2025-07-18 05:19:52] [Rank 0] step:7801/10000 train_time:1860159ms step_avg:238.45ms +[2025-07-18 05:19:57] [Rank 0] step:7821/10000 train_time:1865156ms step_avg:238.48ms +[2025-07-18 05:19:57] [Rank 0] step:7821/10000 train_time:1865156ms step_avg:238.48ms +[2025-07-18 05:20:02] [Rank 0] step:7841/10000 train_time:1870148ms step_avg:238.51ms +[2025-07-18 05:20:02] [Rank 0] step:7841/10000 train_time:1870148ms step_avg:238.51ms +[2025-07-18 05:20:07] [Rank 0] step:7861/10000 train_time:1875137ms step_avg:238.54ms +[2025-07-18 05:20:07] [Rank 0] step:7861/10000 train_time:1875137ms step_avg:238.54ms +[2025-07-18 05:20:15] [Rank 0] PRINT: step:7875/10000 val_loss:4.5705 train_time:1878940ms step_avg:238.60ms +[2025-07-18 05:20:15] [Rank 0] PRINT: step:7875/10000 val_loss:4.5705 train_time:1878940ms step_avg:238.60ms +[2025-07-18 05:20:17] [Rank 0] step:7881/10000 train_time:1880126ms step_avg:238.56ms +[2025-07-18 05:20:17] [Rank 0] step:7881/10000 train_time:1880126ms step_avg:238.56ms +[2025-07-18 05:20:22] [Rank 0] step:7901/10000 train_time:1885113ms step_avg:238.59ms +[2025-07-18 05:20:22] [Rank 0] step:7901/10000 train_time:1885113ms step_avg:238.59ms +[2025-07-18 05:20:27] [Rank 0] step:7921/10000 train_time:1890104ms step_avg:238.62ms +[2025-07-18 05:20:27] [Rank 0] step:7921/10000 train_time:1890104ms step_avg:238.62ms +[2025-07-18 05:20:32] [Rank 0] step:7941/10000 train_time:1895095ms step_avg:238.65ms +[2025-07-18 05:20:32] [Rank 0] step:7941/10000 train_time:1895095ms step_avg:238.65ms +[2025-07-18 05:20:37] [Rank 0] step:7961/10000 train_time:1900108ms step_avg:238.68ms +[2025-07-18 05:20:37] [Rank 0] step:7961/10000 train_time:1900108ms step_avg:238.68ms +[2025-07-18 05:20:42] [Rank 0] step:7981/10000 train_time:1905094ms step_avg:238.70ms +[2025-07-18 05:20:42] [Rank 0] step:7981/10000 train_time:1905094ms step_avg:238.70ms +[2025-07-18 05:20:51] [Rank 0] PRINT: step:8000/10000 val_loss:4.5253 train_time:1910152ms step_avg:238.77ms +[2025-07-18 05:20:51] [Rank 0] PRINT: step:8000/10000 val_loss:4.5253 train_time:1910152ms step_avg:238.77ms +[2025-07-18 05:20:52] [Rank 0] step:8001/10000 train_time:1910174ms step_avg:238.74ms +[2025-07-18 05:20:52] [Rank 0] step:8001/10000 train_time:1910174ms step_avg:238.74ms +[2025-07-18 05:20:57] [Rank 0] step:8021/10000 train_time:1915082ms step_avg:238.76ms +[2025-07-18 05:20:57] [Rank 0] step:8021/10000 train_time:1915082ms step_avg:238.76ms +[2025-07-18 05:21:02] [Rank 0] step:8041/10000 train_time:1920097ms step_avg:238.79ms +[2025-07-18 05:21:02] [Rank 0] step:8041/10000 train_time:1920097ms step_avg:238.79ms +[2025-07-18 05:21:07] [Rank 0] step:8061/10000 train_time:1925080ms step_avg:238.81ms +[2025-07-18 05:21:07] [Rank 0] step:8061/10000 train_time:1925080ms step_avg:238.81ms +[2025-07-18 05:21:12] [Rank 0] step:8081/10000 train_time:1930075ms step_avg:238.84ms +[2025-07-18 05:21:12] [Rank 0] step:8081/10000 train_time:1930075ms step_avg:238.84ms +[2025-07-18 05:21:16] [Rank 0] step:8101/10000 train_time:1935072ms step_avg:238.87ms +[2025-07-18 05:21:16] [Rank 0] step:8101/10000 train_time:1935072ms step_avg:238.87ms +[2025-07-18 05:21:21] [Rank 0] step:8121/10000 train_time:1940059ms step_avg:238.89ms +[2025-07-18 05:21:21] [Rank 0] step:8121/10000 train_time:1940059ms step_avg:238.89ms +[2025-07-18 05:21:27] [Rank 0] PRINT: step:8125/10000 val_loss:4.4907 train_time:1941369ms step_avg:238.94ms +[2025-07-18 05:21:27] [Rank 0] PRINT: step:8125/10000 val_loss:4.4907 train_time:1941369ms step_avg:238.94ms +[2025-07-18 05:21:31] [Rank 0] step:8141/10000 train_time:1945060ms step_avg:238.92ms +[2025-07-18 05:21:31] [Rank 0] step:8141/10000 train_time:1945060ms step_avg:238.92ms +[2025-07-18 05:21:36] [Rank 0] step:8161/10000 train_time:1950089ms step_avg:238.95ms +[2025-07-18 05:21:36] [Rank 0] step:8161/10000 train_time:1950089ms step_avg:238.95ms +[2025-07-18 05:21:41] [Rank 0] step:8181/10000 train_time:1955160ms step_avg:238.99ms +[2025-07-18 05:21:41] [Rank 0] step:8181/10000 train_time:1955160ms step_avg:238.99ms +[2025-07-18 05:21:46] [Rank 0] step:8201/10000 train_time:1960202ms step_avg:239.02ms +[2025-07-18 05:21:46] [Rank 0] step:8201/10000 train_time:1960202ms step_avg:239.02ms +[2025-07-18 05:21:51] [Rank 0] step:8221/10000 train_time:1965262ms step_avg:239.05ms +[2025-07-18 05:21:51] [Rank 0] step:8221/10000 train_time:1965262ms step_avg:239.05ms +[2025-07-18 05:21:56] [Rank 0] step:8241/10000 train_time:1970313ms step_avg:239.09ms +[2025-07-18 05:21:56] [Rank 0] step:8241/10000 train_time:1970313ms step_avg:239.09ms +[2025-07-18 05:22:03] [Rank 0] PRINT: step:8250/10000 val_loss:4.4011 train_time:1972907ms step_avg:239.14ms +[2025-07-18 05:22:03] [Rank 0] PRINT: step:8250/10000 val_loss:4.4011 train_time:1972907ms step_avg:239.14ms +[2025-07-18 05:22:06] [Rank 0] step:8261/10000 train_time:1975380ms step_avg:239.12ms +[2025-07-18 05:22:06] [Rank 0] step:8261/10000 train_time:1975380ms step_avg:239.12ms +[2025-07-18 05:22:11] [Rank 0] step:8281/10000 train_time:1980462ms step_avg:239.16ms +[2025-07-18 05:22:11] [Rank 0] step:8281/10000 train_time:1980462ms step_avg:239.16ms +[2025-07-18 05:22:16] [Rank 0] step:8301/10000 train_time:1985515ms step_avg:239.19ms +[2025-07-18 05:22:16] [Rank 0] step:8301/10000 train_time:1985515ms step_avg:239.19ms +[2025-07-18 05:22:21] [Rank 0] step:8321/10000 train_time:1990579ms step_avg:239.22ms +[2025-07-18 05:22:21] [Rank 0] step:8321/10000 train_time:1990579ms step_avg:239.22ms +[2025-07-18 05:22:26] [Rank 0] step:8341/10000 train_time:1995653ms step_avg:239.26ms +[2025-07-18 05:22:26] [Rank 0] step:8341/10000 train_time:1995653ms step_avg:239.26ms +[2025-07-18 05:22:31] [Rank 0] step:8361/10000 train_time:2000703ms step_avg:239.29ms +[2025-07-18 05:22:31] [Rank 0] step:8361/10000 train_time:2000703ms step_avg:239.29ms +[2025-07-18 05:22:39] [Rank 0] PRINT: step:8375/10000 val_loss:4.4522 train_time:2004552ms step_avg:239.35ms +[2025-07-18 05:22:39] [Rank 0] PRINT: step:8375/10000 val_loss:4.4522 train_time:2004552ms step_avg:239.35ms +[2025-07-18 05:22:41] [Rank 0] step:8381/10000 train_time:2005746ms step_avg:239.32ms +[2025-07-18 05:22:41] [Rank 0] step:8381/10000 train_time:2005746ms step_avg:239.32ms +[2025-07-18 05:22:46] [Rank 0] step:8401/10000 train_time:2010781ms step_avg:239.35ms +[2025-07-18 05:22:46] [Rank 0] step:8401/10000 train_time:2010781ms step_avg:239.35ms +[2025-07-18 05:22:51] [Rank 0] step:8421/10000 train_time:2015845ms step_avg:239.38ms +[2025-07-18 05:22:51] [Rank 0] step:8421/10000 train_time:2015845ms step_avg:239.38ms +[2025-07-18 05:22:56] [Rank 0] step:8441/10000 train_time:2020899ms step_avg:239.41ms +[2025-07-18 05:22:56] [Rank 0] step:8441/10000 train_time:2020899ms step_avg:239.41ms +[2025-07-18 05:23:01] [Rank 0] step:8461/10000 train_time:2025969ms step_avg:239.45ms +[2025-07-18 05:23:01] [Rank 0] step:8461/10000 train_time:2025969ms step_avg:239.45ms +[2025-07-18 05:23:06] [Rank 0] step:8481/10000 train_time:2031017ms step_avg:239.48ms +[2025-07-18 05:23:06] [Rank 0] step:8481/10000 train_time:2031017ms step_avg:239.48ms +[2025-07-18 05:23:16] [Rank 0] PRINT: step:8500/10000 val_loss:4.6042 train_time:2036138ms step_avg:239.55ms +[2025-07-18 05:23:16] [Rank 0] PRINT: step:8500/10000 val_loss:4.6042 train_time:2036138ms step_avg:239.55ms +[2025-07-18 05:23:16] [Rank 0] step:8501/10000 train_time:2036154ms step_avg:239.52ms +[2025-07-18 05:23:16] [Rank 0] step:8501/10000 train_time:2036154ms step_avg:239.52ms +[2025-07-18 05:23:21] [Rank 0] step:8521/10000 train_time:2041128ms step_avg:239.54ms +[2025-07-18 05:23:21] [Rank 0] step:8521/10000 train_time:2041128ms step_avg:239.54ms +[2025-07-18 05:23:26] [Rank 0] step:8541/10000 train_time:2046202ms step_avg:239.57ms +[2025-07-18 05:23:26] [Rank 0] step:8541/10000 train_time:2046202ms step_avg:239.57ms +[2025-07-18 05:23:31] [Rank 0] step:8561/10000 train_time:2051248ms step_avg:239.60ms +[2025-07-18 05:23:31] [Rank 0] step:8561/10000 train_time:2051248ms step_avg:239.60ms +[2025-07-18 05:23:36] [Rank 0] step:8581/10000 train_time:2056301ms step_avg:239.63ms +[2025-07-18 05:23:36] [Rank 0] step:8581/10000 train_time:2056301ms step_avg:239.63ms +[2025-07-18 05:23:41] [Rank 0] step:8601/10000 train_time:2061339ms step_avg:239.66ms +[2025-07-18 05:23:41] [Rank 0] step:8601/10000 train_time:2061339ms step_avg:239.66ms +[2025-07-18 05:23:46] [Rank 0] step:8621/10000 train_time:2066388ms step_avg:239.69ms +[2025-07-18 05:23:46] [Rank 0] step:8621/10000 train_time:2066388ms step_avg:239.69ms +[2025-07-18 05:23:52] [Rank 0] PRINT: step:8625/10000 val_loss:4.6335 train_time:2067714ms step_avg:239.73ms +[2025-07-18 05:23:52] [Rank 0] PRINT: step:8625/10000 val_loss:4.6335 train_time:2067714ms step_avg:239.73ms +[2025-07-18 05:23:56] [Rank 0] step:8641/10000 train_time:2071462ms step_avg:239.72ms +[2025-07-18 05:23:56] [Rank 0] step:8641/10000 train_time:2071462ms step_avg:239.72ms +[2025-07-18 05:24:01] [Rank 0] step:8661/10000 train_time:2076522ms step_avg:239.76ms +[2025-07-18 05:24:01] [Rank 0] step:8661/10000 train_time:2076522ms step_avg:239.76ms +[2025-07-18 05:24:06] [Rank 0] step:8681/10000 train_time:2081579ms step_avg:239.79ms +[2025-07-18 05:24:06] [Rank 0] step:8681/10000 train_time:2081579ms step_avg:239.79ms +[2025-07-18 05:24:11] [Rank 0] step:8701/10000 train_time:2086644ms step_avg:239.82ms +[2025-07-18 05:24:11] [Rank 0] step:8701/10000 train_time:2086644ms step_avg:239.82ms +[2025-07-18 05:24:16] [Rank 0] step:8721/10000 train_time:2091707ms step_avg:239.85ms +[2025-07-18 05:24:16] [Rank 0] step:8721/10000 train_time:2091707ms step_avg:239.85ms +[2025-07-18 05:24:22] [Rank 0] step:8741/10000 train_time:2096764ms step_avg:239.88ms +[2025-07-18 05:24:22] [Rank 0] step:8741/10000 train_time:2096764ms step_avg:239.88ms +[2025-07-18 05:24:29] [Rank 0] PRINT: step:8750/10000 val_loss:4.6319 train_time:2099347ms step_avg:239.93ms +[2025-07-18 05:24:29] [Rank 0] PRINT: step:8750/10000 val_loss:4.6319 train_time:2099347ms step_avg:239.93ms +[2025-07-18 05:24:31] [Rank 0] step:8761/10000 train_time:2101812ms step_avg:239.91ms +[2025-07-18 05:24:31] [Rank 0] step:8761/10000 train_time:2101812ms step_avg:239.91ms +[2025-07-18 05:24:36] [Rank 0] step:8781/10000 train_time:2106862ms step_avg:239.93ms +[2025-07-18 05:24:36] [Rank 0] step:8781/10000 train_time:2106862ms step_avg:239.93ms +[2025-07-18 05:24:42] [Rank 0] step:8801/10000 train_time:2111929ms step_avg:239.96ms +[2025-07-18 05:24:42] [Rank 0] step:8801/10000 train_time:2111929ms step_avg:239.96ms +[2025-07-18 05:24:47] [Rank 0] step:8821/10000 train_time:2116985ms step_avg:239.99ms +[2025-07-18 05:24:47] [Rank 0] step:8821/10000 train_time:2116985ms step_avg:239.99ms +[2025-07-18 05:24:52] [Rank 0] step:8841/10000 train_time:2122059ms step_avg:240.02ms +[2025-07-18 05:24:52] [Rank 0] step:8841/10000 train_time:2122059ms step_avg:240.02ms +[2025-07-18 05:24:57] [Rank 0] step:8861/10000 train_time:2127127ms step_avg:240.05ms +[2025-07-18 05:24:57] [Rank 0] step:8861/10000 train_time:2127127ms step_avg:240.05ms +[2025-07-18 05:25:05] [Rank 0] PRINT: step:8875/10000 val_loss:4.6559 train_time:2130979ms step_avg:240.11ms +[2025-07-18 05:25:05] [Rank 0] PRINT: step:8875/10000 val_loss:4.6559 train_time:2130979ms step_avg:240.11ms +[2025-07-18 05:25:06] [Rank 0] step:8881/10000 train_time:2132179ms step_avg:240.08ms +[2025-07-18 05:25:06] [Rank 0] step:8881/10000 train_time:2132179ms step_avg:240.08ms +[2025-07-18 05:25:11] [Rank 0] step:8901/10000 train_time:2137231ms step_avg:240.11ms +[2025-07-18 05:25:11] [Rank 0] step:8901/10000 train_time:2137231ms step_avg:240.11ms +[2025-07-18 05:25:16] [Rank 0] step:8921/10000 train_time:2142290ms step_avg:240.14ms +[2025-07-18 05:25:16] [Rank 0] step:8921/10000 train_time:2142290ms step_avg:240.14ms +[2025-07-18 05:25:21] [Rank 0] step:8941/10000 train_time:2147353ms step_avg:240.17ms +[2025-07-18 05:25:21] [Rank 0] step:8941/10000 train_time:2147353ms step_avg:240.17ms +[2025-07-18 05:25:26] [Rank 0] step:8961/10000 train_time:2152418ms step_avg:240.20ms +[2025-07-18 05:25:26] [Rank 0] step:8961/10000 train_time:2152418ms step_avg:240.20ms +[2025-07-18 05:25:31] [Rank 0] step:8981/10000 train_time:2157485ms step_avg:240.23ms +[2025-07-18 05:25:31] [Rank 0] step:8981/10000 train_time:2157485ms step_avg:240.23ms +[2025-07-18 05:25:41] [Rank 0] PRINT: step:9000/10000 val_loss:4.6199 train_time:2162604ms step_avg:240.29ms +[2025-07-18 05:25:41] [Rank 0] PRINT: step:9000/10000 val_loss:4.6199 train_time:2162604ms step_avg:240.29ms +[2025-07-18 05:25:41] [Rank 0] step:9001/10000 train_time:2162620ms step_avg:240.26ms +[2025-07-18 05:25:41] [Rank 0] step:9001/10000 train_time:2162620ms step_avg:240.26ms +[2025-07-18 05:25:46] [Rank 0] step:9021/10000 train_time:2167596ms step_avg:240.28ms +[2025-07-18 05:25:46] [Rank 0] step:9021/10000 train_time:2167596ms step_avg:240.28ms +[2025-07-18 05:25:51] [Rank 0] step:9041/10000 train_time:2172681ms step_avg:240.31ms +[2025-07-18 05:25:51] [Rank 0] step:9041/10000 train_time:2172681ms step_avg:240.31ms +[2025-07-18 05:25:57] [Rank 0] step:9061/10000 train_time:2177741ms step_avg:240.34ms +[2025-07-18 05:25:57] [Rank 0] step:9061/10000 train_time:2177741ms step_avg:240.34ms +[2025-07-18 05:26:02] [Rank 0] step:9081/10000 train_time:2182825ms step_avg:240.37ms +[2025-07-18 05:26:02] [Rank 0] step:9081/10000 train_time:2182825ms step_avg:240.37ms +[2025-07-18 05:26:07] [Rank 0] step:9101/10000 train_time:2187900ms step_avg:240.40ms +[2025-07-18 05:26:07] [Rank 0] step:9101/10000 train_time:2187900ms step_avg:240.40ms +[2025-07-18 05:26:12] [Rank 0] step:9121/10000 train_time:2192974ms step_avg:240.43ms +[2025-07-18 05:26:12] [Rank 0] step:9121/10000 train_time:2192974ms step_avg:240.43ms +[2025-07-18 05:26:18] [Rank 0] PRINT: step:9125/10000 val_loss:4.6768 train_time:2194299ms step_avg:240.47ms +[2025-07-18 05:26:18] [Rank 0] PRINT: step:9125/10000 val_loss:4.6768 train_time:2194299ms step_avg:240.47ms +[2025-07-18 05:26:22] [Rank 0] step:9141/10000 train_time:2198022ms step_avg:240.46ms +[2025-07-18 05:26:22] [Rank 0] step:9141/10000 train_time:2198022ms step_avg:240.46ms +[2025-07-18 05:26:27] [Rank 0] step:9161/10000 train_time:2203119ms step_avg:240.49ms +[2025-07-18 05:26:27] [Rank 0] step:9161/10000 train_time:2203119ms step_avg:240.49ms +[2025-07-18 05:26:32] [Rank 0] step:9181/10000 train_time:2208183ms step_avg:240.52ms +[2025-07-18 05:26:32] [Rank 0] step:9181/10000 train_time:2208183ms step_avg:240.52ms +[2025-07-18 05:26:37] [Rank 0] step:9201/10000 train_time:2213244ms step_avg:240.54ms +[2025-07-18 05:26:37] [Rank 0] step:9201/10000 train_time:2213244ms step_avg:240.54ms +[2025-07-18 05:26:42] [Rank 0] step:9221/10000 train_time:2218335ms step_avg:240.57ms +[2025-07-18 05:26:42] [Rank 0] step:9221/10000 train_time:2218335ms step_avg:240.57ms +[2025-07-18 05:26:47] [Rank 0] step:9241/10000 train_time:2223409ms step_avg:240.60ms +[2025-07-18 05:26:47] [Rank 0] step:9241/10000 train_time:2223409ms step_avg:240.60ms +[2025-07-18 05:26:54] [Rank 0] PRINT: step:9250/10000 val_loss:4.6663 train_time:2226002ms step_avg:240.65ms +[2025-07-18 05:26:54] [Rank 0] PRINT: step:9250/10000 val_loss:4.6663 train_time:2226002ms step_avg:240.65ms +[2025-07-18 05:26:57] [Rank 0] step:9261/10000 train_time:2228481ms step_avg:240.63ms +[2025-07-18 05:26:57] [Rank 0] step:9261/10000 train_time:2228481ms step_avg:240.63ms +[2025-07-18 05:27:02] [Rank 0] step:9281/10000 train_time:2233526ms step_avg:240.66ms +[2025-07-18 05:27:02] [Rank 0] step:9281/10000 train_time:2233526ms step_avg:240.66ms +[2025-07-18 05:27:07] [Rank 0] step:9301/10000 train_time:2238592ms step_avg:240.68ms +[2025-07-18 05:27:07] [Rank 0] step:9301/10000 train_time:2238592ms step_avg:240.68ms +[2025-07-18 05:27:12] [Rank 0] step:9321/10000 train_time:2243683ms step_avg:240.71ms +[2025-07-18 05:27:12] [Rank 0] step:9321/10000 train_time:2243683ms step_avg:240.71ms +[2025-07-18 05:27:17] [Rank 0] step:9341/10000 train_time:2248748ms step_avg:240.74ms +[2025-07-18 05:27:17] [Rank 0] step:9341/10000 train_time:2248748ms step_avg:240.74ms +[2025-07-18 05:27:22] [Rank 0] step:9361/10000 train_time:2253816ms step_avg:240.77ms +[2025-07-18 05:27:22] [Rank 0] step:9361/10000 train_time:2253816ms step_avg:240.77ms +[2025-07-18 05:27:31] [Rank 0] PRINT: step:9375/10000 val_loss:4.6809 train_time:2257679ms step_avg:240.82ms +[2025-07-18 05:27:31] [Rank 0] PRINT: step:9375/10000 val_loss:4.6809 train_time:2257679ms step_avg:240.82ms +[2025-07-18 05:27:32] [Rank 0] step:9381/10000 train_time:2258883ms step_avg:240.79ms +[2025-07-18 05:27:32] [Rank 0] step:9381/10000 train_time:2258883ms step_avg:240.79ms +[2025-07-18 05:27:37] [Rank 0] step:9401/10000 train_time:2263927ms step_avg:240.82ms +[2025-07-18 05:27:37] [Rank 0] step:9401/10000 train_time:2263927ms step_avg:240.82ms +[2025-07-18 05:27:42] [Rank 0] step:9421/10000 train_time:2268985ms step_avg:240.84ms +[2025-07-18 05:27:42] [Rank 0] step:9421/10000 train_time:2268985ms step_avg:240.84ms +[2025-07-18 05:27:47] [Rank 0] step:9441/10000 train_time:2274053ms step_avg:240.87ms +[2025-07-18 05:27:47] [Rank 0] step:9441/10000 train_time:2274053ms step_avg:240.87ms +[2025-07-18 05:27:52] [Rank 0] step:9461/10000 train_time:2279127ms step_avg:240.90ms +[2025-07-18 05:27:52] [Rank 0] step:9461/10000 train_time:2279127ms step_avg:240.90ms +[2025-07-18 05:27:57] [Rank 0] step:9481/10000 train_time:2284204ms step_avg:240.92ms +[2025-07-18 05:27:57] [Rank 0] step:9481/10000 train_time:2284204ms step_avg:240.92ms +[2025-07-18 05:28:07] [Rank 0] PRINT: step:9500/10000 val_loss:4.6452 train_time:2289359ms step_avg:240.99ms +[2025-07-18 05:28:07] [Rank 0] PRINT: step:9500/10000 val_loss:4.6452 train_time:2289359ms step_avg:240.99ms +[2025-07-18 05:28:07] [Rank 0] step:9501/10000 train_time:2289379ms step_avg:240.96ms +[2025-07-18 05:28:07] [Rank 0] step:9501/10000 train_time:2289379ms step_avg:240.96ms +[2025-07-18 05:28:12] [Rank 0] step:9521/10000 train_time:2294364ms step_avg:240.98ms +[2025-07-18 05:28:12] [Rank 0] step:9521/10000 train_time:2294364ms step_avg:240.98ms +[2025-07-18 05:28:17] [Rank 0] step:9541/10000 train_time:2299447ms step_avg:241.01ms +[2025-07-18 05:28:17] [Rank 0] step:9541/10000 train_time:2299447ms step_avg:241.01ms +[2025-07-18 05:28:23] [Rank 0] step:9561/10000 train_time:2304500ms step_avg:241.03ms +[2025-07-18 05:28:23] [Rank 0] step:9561/10000 train_time:2304500ms step_avg:241.03ms +[2025-07-18 05:28:28] [Rank 0] step:9581/10000 train_time:2309559ms step_avg:241.06ms +[2025-07-18 05:28:28] [Rank 0] step:9581/10000 train_time:2309559ms step_avg:241.06ms +[2025-07-18 05:28:33] [Rank 0] step:9601/10000 train_time:2314617ms step_avg:241.08ms +[2025-07-18 05:28:33] [Rank 0] step:9601/10000 train_time:2314617ms step_avg:241.08ms +[2025-07-18 05:28:38] [Rank 0] step:9621/10000 train_time:2319710ms step_avg:241.11ms +[2025-07-18 05:28:38] [Rank 0] step:9621/10000 train_time:2319710ms step_avg:241.11ms +[2025-07-18 05:28:44] [Rank 0] PRINT: step:9625/10000 val_loss:4.6839 train_time:2321034ms step_avg:241.15ms +[2025-07-18 05:28:44] [Rank 0] PRINT: step:9625/10000 val_loss:4.6839 train_time:2321034ms step_avg:241.15ms +[2025-07-18 05:28:48] [Rank 0] step:9641/10000 train_time:2324798ms step_avg:241.14ms +[2025-07-18 05:28:48] [Rank 0] step:9641/10000 train_time:2324798ms step_avg:241.14ms +[2025-07-18 05:28:53] [Rank 0] step:9661/10000 train_time:2329933ms step_avg:241.17ms +[2025-07-18 05:28:53] [Rank 0] step:9661/10000 train_time:2329933ms step_avg:241.17ms +[2025-07-18 05:28:58] [Rank 0] step:9681/10000 train_time:2335064ms step_avg:241.20ms +[2025-07-18 05:28:58] [Rank 0] step:9681/10000 train_time:2335064ms step_avg:241.20ms +[2025-07-18 05:29:03] [Rank 0] step:9701/10000 train_time:2340193ms step_avg:241.23ms +[2025-07-18 05:29:03] [Rank 0] step:9701/10000 train_time:2340193ms step_avg:241.23ms +[2025-07-18 05:29:08] [Rank 0] step:9721/10000 train_time:2345308ms step_avg:241.26ms +[2025-07-18 05:29:08] [Rank 0] step:9721/10000 train_time:2345308ms step_avg:241.26ms +[2025-07-18 05:29:13] [Rank 0] step:9741/10000 train_time:2350439ms step_avg:241.29ms +[2025-07-18 05:29:13] [Rank 0] step:9741/10000 train_time:2350439ms step_avg:241.29ms +[2025-07-18 05:29:20] [Rank 0] PRINT: step:9750/10000 val_loss:4.6905 train_time:2353060ms step_avg:241.34ms +[2025-07-18 05:29:20] [Rank 0] PRINT: step:9750/10000 val_loss:4.6905 train_time:2353060ms step_avg:241.34ms +[2025-07-18 05:29:23] [Rank 0] step:9761/10000 train_time:2355549ms step_avg:241.32ms +[2025-07-18 05:29:23] [Rank 0] step:9761/10000 train_time:2355549ms step_avg:241.32ms +[2025-07-18 05:29:28] [Rank 0] step:9781/10000 train_time:2360667ms step_avg:241.35ms +[2025-07-18 05:29:28] [Rank 0] step:9781/10000 train_time:2360667ms step_avg:241.35ms +[2025-07-18 05:29:33] [Rank 0] step:9801/10000 train_time:2365767ms step_avg:241.38ms +[2025-07-18 05:29:33] [Rank 0] step:9801/10000 train_time:2365767ms step_avg:241.38ms +[2025-07-18 05:29:39] [Rank 0] step:9821/10000 train_time:2370868ms step_avg:241.41ms +[2025-07-18 05:29:39] [Rank 0] step:9821/10000 train_time:2370868ms step_avg:241.41ms +[2025-07-18 05:29:44] [Rank 0] step:9841/10000 train_time:2375964ms step_avg:241.44ms +[2025-07-18 05:29:44] [Rank 0] step:9841/10000 train_time:2375964ms step_avg:241.44ms +[2025-07-18 05:29:49] [Rank 0] step:9861/10000 train_time:2381063ms step_avg:241.46ms +[2025-07-18 05:29:49] [Rank 0] step:9861/10000 train_time:2381063ms step_avg:241.46ms +[2025-07-18 05:29:57] [Rank 0] PRINT: step:9875/10000 val_loss:4.6865 train_time:2384944ms step_avg:241.51ms +[2025-07-18 05:29:57] [Rank 0] PRINT: step:9875/10000 val_loss:4.6865 train_time:2384944ms step_avg:241.51ms +[2025-07-18 05:29:59] [Rank 0] step:9881/10000 train_time:2386162ms step_avg:241.49ms +[2025-07-18 05:29:59] [Rank 0] step:9881/10000 train_time:2386162ms step_avg:241.49ms +[2025-07-18 05:30:04] [Rank 0] step:9901/10000 train_time:2391259ms step_avg:241.52ms +[2025-07-18 05:30:04] [Rank 0] step:9901/10000 train_time:2391259ms step_avg:241.52ms +[2025-07-18 05:30:09] [Rank 0] step:9921/10000 train_time:2396373ms step_avg:241.55ms +[2025-07-18 05:30:09] [Rank 0] step:9921/10000 train_time:2396373ms step_avg:241.55ms +[2025-07-18 05:30:14] [Rank 0] step:9941/10000 train_time:2401512ms step_avg:241.58ms +[2025-07-18 05:30:14] [Rank 0] step:9941/10000 train_time:2401512ms step_avg:241.58ms +[2025-07-18 05:30:19] [Rank 0] step:9961/10000 train_time:2406634ms step_avg:241.61ms +[2025-07-18 05:30:19] [Rank 0] step:9961/10000 train_time:2406634ms step_avg:241.61ms +[2025-07-18 05:30:24] [Rank 0] step:9981/10000 train_time:2411772ms step_avg:241.64ms +[2025-07-18 05:30:24] [Rank 0] step:9981/10000 train_time:2411772ms step_avg:241.64ms +[2025-07-18 05:30:29] [Rank 0] step:10000/10000 train_time:2416610ms step_avg:241.66ms +[2025-07-18 05:30:29] [Rank 0] step:10000/10000 train_time:2416610ms step_avg:241.66ms +[2025-07-18 05:30:33] [Rank 0] PRINT: step:10000/10000 val_loss:4.7147 train_time:2416933ms step_avg:241.69ms +[2025-07-18 05:30:33] [Rank 0] PRINT: step:10000/10000 val_loss:4.7147 train_time:2416933ms step_avg:241.69ms +[2025-07-18 05:30:33] [Rank 0] PRINT: --- Training Finished: Fri Jul 18 05:30:33 2025 --- +[2025-07-18 05:30:33] [Rank 0] PRINT: --- Training Finished: Fri Jul 18 05:30:33 2025 --- +[2025-07-18 05:30:33] [Rank 0] PRINT: Peak memory allocated: 30964 MiB reserved: 34336 MiB +[2025-07-18 05:30:33] [Rank 0] PRINT: Peak memory allocated: 30964 MiB reserved: 34336 MiB