merge_method: della_linear dtype: bfloat16 parameters: epsilon: 0.1 lambda: 1.0 normalize: false base_model: arcee-ai/Llama-3.1-SuperNova-Lite+grimjim/Llama-3-Instruct-abliteration-LoRA-8B models: - model: hf-100/Llama-3-Spellbound-Instruct-8B-0.3 parameters: weight: 0.18 density: 0.54 - model: djuna/L3.1-ForStHS+Blackroot/Llama-3-8B-Abomination-LORA parameters: weight: 0.22 density: 0.5 - model: djuna/L3.1-Suze-Vume-calc parameters: weight: 0.13 density: 0.49 - model: THUDM/LongWriter-llama3.1-8b+ResplendentAI/Smarts_Llama3 parameters: weight: 0.18 density: 0.55