Add llama_finetune_tokenized_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=100_seed=123 LoRA model 4a878c3 verified mciccone commited on Jun 10, 2025
Add llama_finetune_tokenized_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=100_seed=123 LoRA model 0fdc3ad verified mciccone commited on Jun 10, 2025
Add llama_finetune_tokenized_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=500_seed=123 LoRA model 55c5c2f verified mciccone commited on Jun 10, 2025
Add llama_finetune_tokenized_r16_alpha=32_dropout=0.05_lr5e-05_data_size1000_max_steps=500_seed=123 LoRA model b6ff8be verified mciccone commited on Jun 10, 2025
Add llama_finetune_tokenized_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=100_seed=123 LoRA model 5369101 verified mciccone commited on Jun 10, 2025
Add llama_finetune_tokenized_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=500_seed=123 LoRA model ffc024e verified mciccone commited on Jun 10, 2025
Add llama_finetune_tokenized_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=500_seed=123 LoRA model f904fa3 verified mciccone commited on Jun 10, 2025