GemOmniscien-ties / mergekit_config.yml
Natch69's picture
Upload folder using huggingface_hub
e481d5a verified
raw
history blame
No virus
1.19 kB
models:
- model: Warit2/GemOmniscien
parameters:
density: 0.5
weight: 0.5
- model: google/gemma-2b-it
parameters:
density: 0.5
weight: 0.5 # weight gradient
merge_method: ties
base_model: Warit2/GemOmniscien
parameters:
normalize: true
int8_mask: true
dtype: bfloat16
# models:
# - model: unsloth/gemma-7b-bnb-4bit
# layer_range: [0, 32]
# # no parameters necessary for base model
# - model: mistralai/Mistral-7B-v0.1
# layer_range: [24, 32]
# merge_method: passthrough
# # base_model: unsloth/gemma-7b-bnb-4bit
# parameters:
# normalize: true
# int8_mask: true
# dtype: float16
# slices:
# - sources:
# - model: unsloth/gemma-2b-bnb-4bit
# layer_range: [0, 16]
# - sources:
# - model: NousResearch/Nous-Hermes-llama-2-7b
# layer_range: [0, 22]
# merge_method: passthrough
# dtype: bfloat16
# models:
# - model: unsloth/gemma-2b-bnb-4bit
# parameters:
# density: 0.53
# weight: 0.45
# - model: TinyLlama/TinyLlama-1.1B-Chat-v1.0
# parameters:
# weight: 0.5
# merge_method: ties
# base_model: unsloth/gemma-2b-bnb-4bit
# parameters:
# int8_mask: true
# dtype: bfloat16