250 steps, wandb_run = https://wandb.ai/quirky_lats_at_mats/sae-unlearning/runs/wslbvi8w?nw=nwuserphilliphguo
2f629c2
verified
{ | |
"alpha_pattern": {}, | |
"auto_mapping": { | |
"base_model_class": "Gemma2ForCausalLM", | |
"parent_library": "transformers.models.gemma2.modeling_gemma2" | |
}, | |
"base_model_name_or_path": "google/gemma-2-9b", | |
"bias": "none", | |
"eva_config": null, | |
"exclude_modules": null, | |
"fan_in_fan_out": false, | |
"inference_mode": true, | |
"init_lora_weights": true, | |
"layer_replication": null, | |
"layers_pattern": null, | |
"layers_to_transform": [ | |
0, | |
1, | |
2, | |
3, | |
4, | |
5, | |
6, | |
7, | |
8, | |
9, | |
10, | |
11, | |
12, | |
13, | |
14, | |
15, | |
16, | |
17, | |
18, | |
19, | |
20, | |
21 | |
], | |
"loftq_config": {}, | |
"lora_alpha": 16, | |
"lora_bias": false, | |
"lora_dropout": 0.05, | |
"megatron_config": null, | |
"megatron_core": "megatron.core", | |
"modules_to_save": null, | |
"peft_type": "LORA", | |
"r": 16, | |
"rank_pattern": {}, | |
"revision": null, | |
"target_modules": [ | |
"up_proj", | |
"o_proj", | |
"k_proj", | |
"v_proj", | |
"q_proj", | |
"gate_proj", | |
"down_proj" | |
], | |
"task_type": null, | |
"use_dora": false, | |
"use_rslora": false | |
} |