BigLlama-2-120B / mergekit_config.yml
ampdot's picture
Add files using upload-large-folder tool
bfc39fc verified
raw
history blame contribute delete
567 Bytes
slices:
- sources:
- layer_range: [0, 20]
model: meta-llama/Llama-2-70b-hf
- sources:
- layer_range: [10, 30]
model: meta-llama/Llama-2-70b-hf
- sources:
- layer_range: [20, 40]
model: meta-llama/Llama-2-70b-hf
- sources:
- layer_range: [30, 50]
model: meta-llama/Llama-2-70b-hf
- sources:
- layer_range: [40, 60]
model: meta-llama/Llama-2-70b-hf
- sources:
- layer_range: [50, 70]
model: meta-llama/Llama-2-70b-hf
- sources:
- layer_range: [60, 80]
model: meta-llama/Llama-2-70b-hf
merge_method: passthrough
dtype: float16