| { | |
| "dataset": { | |
| "repo_id": "godnpeter/aopoli-lv-libero_combined_no_noops_lerobot_v21", | |
| "use_all_local_repos": false, | |
| "root": null, | |
| "episodes": null, | |
| "image_transforms": { | |
| "enable": false, | |
| "max_num_transforms": 3, | |
| "random_order": false, | |
| "tfs": { | |
| "brightness": { | |
| "weight": 1.0, | |
| "type": "ColorJitter", | |
| "kwargs": { | |
| "brightness": [ | |
| 0.8, | |
| 1.2 | |
| ] | |
| } | |
| }, | |
| "contrast": { | |
| "weight": 1.0, | |
| "type": "ColorJitter", | |
| "kwargs": { | |
| "contrast": [ | |
| 0.8, | |
| 1.2 | |
| ] | |
| } | |
| }, | |
| "saturation": { | |
| "weight": 1.0, | |
| "type": "ColorJitter", | |
| "kwargs": { | |
| "saturation": [ | |
| 0.5, | |
| 1.5 | |
| ] | |
| } | |
| }, | |
| "hue": { | |
| "weight": 1.0, | |
| "type": "ColorJitter", | |
| "kwargs": { | |
| "hue": [ | |
| -0.05, | |
| 0.05 | |
| ] | |
| } | |
| }, | |
| "sharpness": { | |
| "weight": 1.0, | |
| "type": "SharpnessJitter", | |
| "kwargs": { | |
| "sharpness": [ | |
| 0.5, | |
| 1.5 | |
| ] | |
| } | |
| }, | |
| "shift": { | |
| "weight": 0.0, | |
| "type": "RandomShift", | |
| "kwargs": { | |
| "max_shift": 8, | |
| "padding_mode": "edge" | |
| } | |
| } | |
| } | |
| }, | |
| "revision": null, | |
| "use_imagenet_stats": true, | |
| "video_backend": "torchcodec", | |
| "only_robot_type": "so100", | |
| "exclude_tasks": null, | |
| "report_task_stats": true | |
| }, | |
| "env": null, | |
| "policy": { | |
| "type": "smolvla", | |
| "n_obs_steps": 1, | |
| "normalization_mapping": { | |
| "VISUAL": "IDENTITY", | |
| "STATE": "MEAN_STD", | |
| "ACTION": "MEAN_STD" | |
| }, | |
| "input_features": { | |
| "observation.images.wrist_image": { | |
| "type": "VISUAL", | |
| "shape": [ | |
| 256, | |
| 256, | |
| 3 | |
| ] | |
| }, | |
| "observation.images.image": { | |
| "type": "VISUAL", | |
| "shape": [ | |
| 256, | |
| 256, | |
| 3 | |
| ] | |
| }, | |
| "observation.state": { | |
| "type": "STATE", | |
| "shape": [ | |
| 8 | |
| ] | |
| } | |
| }, | |
| "output_features": { | |
| "action": { | |
| "type": "ACTION", | |
| "shape": [ | |
| 7 | |
| ] | |
| } | |
| }, | |
| "device": "cuda", | |
| "use_amp": false, | |
| "use_peft": false, | |
| "push_to_hub": true, | |
| "repo_id": "combined_frozen_chunk8_yesproprio_fullvlm_1003", | |
| "private": null, | |
| "tags": null, | |
| "license": null, | |
| "use_proprio": true, | |
| "chunk_size": 8, | |
| "n_action_steps": 8, | |
| "normalize_visual": "identity", | |
| "normalize_state": "mean_std", | |
| "normalize_action": "mean_std", | |
| "max_state_dim": 32, | |
| "max_action_dim": 32, | |
| "resize_imgs_with_padding": [ | |
| 512, | |
| 512 | |
| ], | |
| "empty_cameras": 0, | |
| "adapt_to_pi_aloha": false, | |
| "use_delta_joint_actions_aloha": false, | |
| "tokenizer_max_length": 48, | |
| "num_steps": 10, | |
| "use_cache": true, | |
| "freeze_vision_encoder": true, | |
| "train_expert_only": true, | |
| "train_state_proj": true, | |
| "optimizer_lr": 0.0001, | |
| "optimizer_betas": [ | |
| 0.9, | |
| 0.95 | |
| ], | |
| "optimizer_eps": 1e-08, | |
| "optimizer_weight_decay": 1e-10, | |
| "optimizer_grad_clip_norm": 10, | |
| "scheduler_warmup_steps": 1000, | |
| "scheduler_decay_steps": 30000, | |
| "scheduler_decay_lr": 2.5e-06, | |
| "vlm_model_name": "HuggingFaceTB/SmolVLM2-500M-Video-Instruct", | |
| "load_vlm_weights": true, | |
| "vlm_model_dtype": "bfloat16", | |
| "add_image_special_tokens": false, | |
| "attention_mode": "cross_attn", | |
| "prefix_length": -1, | |
| "pad_language_to": "longest", | |
| "num_expert_layers": -1, | |
| "num_vlm_layers": -1, | |
| "self_attn_every_n_layers": 2, | |
| "expert_width_multiplier": 0.75, | |
| "min_period": 0.004, | |
| "max_period": 4.0 | |
| }, | |
| "output_dir": "outputs/combined_frozen_fullvlm_1003/combined_frozen_chunk8_yesproprio_fullvlm_1003/2025-10-04/09-46-27", | |
| "exp_name": "combined_frozen_chunk8_yesproprio_fullvlm_1003/2025-10-04/09-46-27", | |
| "group_name": "combined_frozen_fullvlm_1003", | |
| "resume": false, | |
| "seed": 1000, | |
| "num_workers": 8, | |
| "batch_size": 64, | |
| "update_steps": 100000, | |
| "eval_freq": 20000, | |
| "log_freq": 200, | |
| "save_checkpoint": true, | |
| "save_freq": 10000, | |
| "use_policy_training_preset": true, | |
| "optimizer": { | |
| "type": "adamw", | |
| "lr": 0.0001, | |
| "weight_decay": 1e-10, | |
| "grad_clip_norm": 10, | |
| "betas": [ | |
| 0.9, | |
| 0.95 | |
| ], | |
| "eps": 1e-08 | |
| }, | |
| "scheduler": { | |
| "type": "cosine_decay_with_warmup", | |
| "num_warmup_steps": 1000, | |
| "num_decay_steps": 30000, | |
| "peak_lr": 0.0001, | |
| "decay_lr": 2.5e-06 | |
| }, | |
| "eval": { | |
| "n_episodes": 50, | |
| "batch_size": 50, | |
| "use_async_envs": false | |
| }, | |
| "log_with": "wandb", | |
| "wandb": { | |
| "enable": false, | |
| "disable_artifact": false, | |
| "project": "lerobot", | |
| "entity": null, | |
| "notes": null, | |
| "run_id": "qonzm53y", | |
| "mode": null | |
| }, | |
| "gradient_accumulation_steps": 1, | |
| "use_peft": false, | |
| "autocast_adapter_dtype": true, | |
| "peft": { | |
| "target_modules": null, | |
| "modules_to_save": null, | |
| "method_type": "LORA", | |
| "init_type": null, | |
| "r": 64, | |
| "lora_alpha": 128, | |
| "fullfinetune_vlm_patch_embeddings": false, | |
| "fullfinetune_vlm_vision_model": false | |
| } | |
| } |