dtype: bfloat16 | |
merge_method: passthrough | |
slices: | |
- sources: | |
- layer_range: [0, 8] | |
model: nvidia/Mistral-NeMo-Minitron-8B-Base | |
- sources: | |
- layer_range: [8, 16] | |
model: nvidia/Mistral-NeMo-Minitron-8B-Base | |
parameters: | |
scale: | |
- filter: input_layernorm | |
value: 0.75 | |
- value: 1.0 | |
- sources: | |
- layer_range: [8, 16] | |
model: nvidia/Mistral-NeMo-Minitron-8B-Base | |
parameters: | |
scale: | |
- filter: input_layernorm | |
value: 0.75 | |
- filter: v_proj | |
value: 0.5 | |
- filter: post_attention_layernorm | |
value: 0.5 | |
- value: 1.0 | |
- sources: | |
- layer_range: [16, 17] | |
model: nvidia/Mistral-NeMo-Minitron-8B-Base | |
- sources: | |
- layer_range: [17, 24] | |
model: nvidia/Mistral-NeMo-Minitron-8B-Base | |
parameters: | |
scale: | |
- filter: input_layernorm | |
value: 0.75 | |
- value: 1.0 | |
- sources: | |
- layer_range: [17, 24] | |
model: nvidia/Mistral-NeMo-Minitron-8B-Base | |
parameters: | |
scale: | |
- filter: input_layernorm | |
value: 0.75 | |
- filter: v_proj | |
value: 0.5 | |
- filter: post_attention_layernorm | |
value: 0.5 | |
- value: 1.0 | |
- sources: | |
- layer_range: [24, 25] | |
model: nvidia/Mistral-NeMo-Minitron-8B-Base | |
- sources: | |
- layer_range: [25, 32] | |
model: nvidia/Mistral-NeMo-Minitron-8B-Base | |
parameters: | |
scale: | |
- filter: input_layernorm | |
value: 0.75 | |
- filter: v_proj | |
value: 0.5 | |
- filter: post_attention_layernorm | |
value: 0.5 | |
- value: 1.0 | |
- sources: | |
- layer_range: [25, 40] | |
model: nvidia/Mistral-NeMo-Minitron-8B-Base |