{ | |
"language": "Python", | |
"license": "apache-2.0", | |
"library_name": "transformers", | |
"tags": [ | |
"tinyllama", | |
"lora", | |
"peft", | |
"code", | |
"python", | |
"fine-tuning", | |
"mps" | |
], | |
"model_type": "causal-lm", | |
"pipeline_tag": "text-generation", | |
"base_model": "TinyLlama/TinyLlama-1.1B-Chat-v1.0", | |
"datasets": [ | |
"codeparrot/codeparrot-clean-valid" | |
], | |
"trained_on": "Apple M3 Pro (MPS)", | |
"adapter_type": "lora", | |
"num_train_samples": 1000, | |
"num_epochs": 1, | |
"gradient_accumulation_steps": 4, | |
"per_device_batch_size": 1, | |
"prompt_format": "<|python|>\\n{code}", | |
"inference_prompt": "<|python|>\\ndef fibonacci(n):", | |
"example_output": "def fibonacci(n):\n if n <= 1:\n return n\n return fibonacci(n-1) + fibonacci(n-2)" | |
} | |