File size: 604 Bytes
76f50b6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
base_model: darkc0de/XortronCriminalComputingConfig
chat_template: auto
merge_method: dare_ties
modules:
default:
slices:
- sources:
- layer_range: [0, 40]
model: Entropicengine/DarkTriad-24b
parameters:
density: 0.5
weight: 0.3
- layer_range: [0, 40]
model: darkc0de/XortronCriminalComputingConfig
parameters:
density: 0.8
weight: 0.8
- layer_range: [0, 40]
model: Entropicengine/Trifecta-Max-24b
parameters:
density: 0.5
weight: 0.1
out_dtype: bfloat16
tokenizer: {} |