Lunar-Abyss-12B / mergekit_config.yml
Vortex5's picture
Upload folder using huggingface_hub
953bbac verified
raw
history blame contribute delete
761 Bytes
models:
- model: Vortex5/LunaMaid-12B
parameters:
weight:
- filter: self_attn
value: [0.35, 0.4, 0.6, 0.8, 1.0, 0.9, 0.6, 0.3]
- filter: mlp
value: [0.20, 0.25, 0.35, 0.45, 0.45, 0.40, 0.30, 0.20]
- value: [0.25, 0.3, 0.35, 0.4, 0.4, 0.35, 0.3, 0.25]
density: 0.55
epsilon: 0.3
- model: Vortex5/Abyssal-Seraph-12B
parameters:
weight:
- filter: mlp
value: [0.3, 0.5, 0.8, 1.0, 1.0, 0.9, 0.7, 0.4]
- value: [0.2, 0.3, 0.4, 0.5, 0.5, 0.4, 0.3, 0.2]
density: 0.5
epsilon: 0.4
merge_method: della
base_model: Vortex5/MegaMoon-Karcher-12B
parameters:
lambda: 1.0
normalize: true
dtype: bfloat16
tokenizer:
source: Vortex5/Abyssal-Seraph-12B