File size: 620 Bytes
8bfb9ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
merge_method: della
dtype: bfloat16
tokenizer_source: union
parameters:
  lambda: 0.78
  epsilon: 0.1
  normalize: false
base_model: vicgalle/Configurable-Hermes-3-Llama-3.1-8B
models:
  - model: ValiantLabs/Llama3.1-8B-ShiningValiant2
    parameters: 
      weight: 0.2
      density: 0.5
  - model: djuna/L3.1-Romes-Ninomos
    parameters:
      weight: 0.15
      density: 0.55
  - model: DreadPoor/Aurora_faustus-8B-LINEAR+grimjim/Llama-3-Instruct-abliteration-LoRA-8B
    parameters: 
      weight: 0.14
      density: 0.56
  - model: v000000/L3.1-Storniitova-8B
    parameters:
      weight: 0.2
      density: 0.5