models: - model: cognitivecomputations/dolphin-2.8-mistral-7b-v02 parameters: density: 0.5 weight: 0.5 - model: meta-llama/Meta-Llama-3-8B parameters: density: 0.5 weight: 0.5 merge_method: slerp base_model: meta-llama/Meta-Llama-3-8B parameters: normalize: false t: [0.0, 0.5, 1.0, 0.5, 0.0] dtype: float16