Guilherme34 commited on
Commit
5775bfa
·
verified ·
1 Parent(s): 82f23cd

Update adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +5 -3
adapter_config.json CHANGED
@@ -6,16 +6,17 @@
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
 
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "loftq_config": {},
12
- "lora_alpha": 256,
13
- "lora_dropout": 0.05,
14
  "megatron_config": null,
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
- "r": 128,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
@@ -28,5 +29,6 @@
28
  "k_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
 
31
  "use_rslora": false
32
  }
 
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
+ "layer_replication": null,
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
+ "lora_alpha": 16,
14
+ "lora_dropout": 0,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
+ "r": 8,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
29
  "k_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
+ "use_dora": false,
33
  "use_rslora": false
34
  }