alpindale commited on
Commit
fef27e0
1 Parent(s): e2aa265

fix BOS and EOS

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -4,8 +4,8 @@
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "eos_token_id": 151643,
8
- "bos_token_id": 151645,
9
  "hidden_act": "silu",
10
  "hidden_size": 8192,
11
  "initializer_range": 0.02,
 
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
  "hidden_size": 8192,
11
  "initializer_range": 0.02,