Merge Details
Merge Method
This model was merged using the Multi-SLERP merge method.
Models Merged
The following models were included in the merge:
Configuration
The following YAML configuration was used to produce this model:
merge_method: multislerp
models:
- model: yamatazen/EtherealAurora-12B-Lorablated
parameters:
weight: [1.000, 1.000, 1.000, 1.000, 0.968, 0.744, 0.256, 0.030, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.0225, 0.1275, 0.15]
- model: Retreatcost/Shisa-K-sakurization
parameters:
weight: [0.000, 0.000, 0.000, 0.030, 0.256, 0.744, 0.968, 1.000, 1.000, 1.000, 1.000, 0.968, 0.744, 0.256, 0.030, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.015, 0.085, 0.10]
- model: ./impish-ink-long
parameters:
weight: [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.030, 0.256, 0.744, 0.968, 1.000, 1.000, 1.000, 0.968, 0.744, 0.256, 0.030, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.0075, 0.0425, 0.05]
- model: Retreatcost/Lorablated-w2bb-psy-della
parameters:
weight: [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.030, 0.256, 0.744, 0.968, 1.000, 1.000, 1.000, 0.968, 0.744, 0.256, 0.030, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000]
- model: Retreatcost/Forgotten-directive-Neon-stock
parameters:
weight: [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.030, 0.256, 0.744, 0.968, 1.000, 1.000, 1.000, 0.968, 0.744, 0.256, 0.030, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000]
- model: Retreatcost/Irix-mpf-stock
parameters:
weight: [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.030, 0.256, 0.744, 0.968, 1.000, 1.000, 1.000, 1.000, 1.000, 0.850, 0.150, 0.000]
- model: ./retokenized_NB
parameters:
weight: [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.105, 0.595, 0.700]
dtype: bfloat16
parameters:
normalize: true
tokenizer_source: Retreatcost/KansenSakura-Radiance-RP-12b