gpol13 commited on
Commit
a9f5dbc
·
verified ·
1 Parent(s): 02de28b

Upload folder using huggingface_hub

Browse files
adapter_config.json CHANGED
@@ -20,10 +20,10 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "k_proj",
24
  "o_proj",
25
  "v_proj",
26
- "q_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "q_proj",
24
  "o_proj",
25
  "v_proj",
26
+ "k_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a18c90947aa52d7cc66a70fb0a9df35d476e14d6689603cf5b540c5a629d554a
3
  size 54560368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e687d33c6ed62abaeffa4a96ff4c4a662c23295f3a691e41ce02288209173ab4
3
  size 54560368
tokenizer_config.json CHANGED
@@ -32,6 +32,7 @@
32
  "bos_token": "<s>",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
 
35
  "legacy": false,
36
  "max_length": 256,
37
  "model_max_length": 1000000000000000019884624838656,
 
32
  "bos_token": "<s>",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
+ "extra_special_tokens": {},
36
  "legacy": false,
37
  "max_length": 256,
38
  "model_max_length": 1000000000000000019884624838656,