{ "base_model_name_or_path": "/root/.cache/huggingface/hub/models--Qwen--Qwen2.5-72B-Instruct/snapshots/495f39366efef23836d0cfae4fbe635880d2be31", "r": 32, "target_modules": [ "k_proj", "v_proj", "up_proj", "q_proj", "gate_proj", "o_proj", "down_proj" ], "lora_alpha": 32, "lora_dropout": 0.0, "fan_in_fan_out": false, "bias": "none", "use_rslora": false, "init_lora_weights": true, "megatron_core": "megatron.core", "use_dora": false, "lora_bias": false, "task_type": "CAUSAL_LM", "peft_type": "LORA" }