frcp commited on
Commit
dd9187e
·
verified ·
1 Parent(s): e476a23

Upload gemma2_train_dev_test_qlora_test2.yml

Browse files
gemma2_train_dev_test_qlora_test2.yml ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_model: google/gemma-2-9b-it
2
+ model_type: AutoModelForCausalLM
3
+ tokenizer_type: AutoTokenizer
4
+
5
+ load_in_8bit: false
6
+ load_in_4bit: true
7
+ strict: false
8
+
9
+ chat_template: gemma
10
+ datasets:
11
+ - path: frcp/malpyung_yoyak_train_dev_test
12
+ type: alpaca
13
+ chat_template: gemma
14
+ drop_system_message: true
15
+ val_set_size: 0.0
16
+ output_dir: ./outputs/gemma2_train_dev_test_qlora_test2
17
+
18
+ adapter: qlora
19
+ lora_r: 64
20
+ lora_alpha: 64
21
+ lora_dropout: 0.05
22
+ lora_target_linear: true
23
+
24
+ sequence_len: 4096
25
+ sample_packing: false
26
+ eval_sample_packing: false
27
+ pad_to_sequence_len: true
28
+
29
+ wandb_project:
30
+ wandb_entity:
31
+ wandb_watch:
32
+ wandb_name:
33
+ wandb_log_model:
34
+
35
+
36
+ gradient_accumulation_steps: 2
37
+ micro_batch_size: 4
38
+ num_epochs: 10
39
+ optimizer: adamw_bnb_8bit
40
+ lr_scheduler: cosine
41
+ learning_rate: 2e-5
42
+
43
+ train_on_inputs: false
44
+ group_by_length: false
45
+ bf16: auto
46
+ fp16:
47
+ tf32: true
48
+
49
+ gradient_checkpointing: true
50
+ early_stopping_patience:
51
+ resume_from_checkpoint:
52
+ local_rank:
53
+ logging_steps: 1
54
+ xformers_attention:
55
+ flash_attention: true
56
+
57
+ warmup_ratio: 0
58
+ evals_per_epoch:
59
+ eval_table_size:
60
+ eval_max_new_tokens: 0
61
+ saves_per_epoch: 0
62
+ debug:
63
+ deepspeed:
64
+ weight_decay: 0.0
65
+ fsdp:
66
+ fsdp_config:
67
+ special_tokens: