lanking commited on
Commit
f8af7ce
1 Parent(s): bebc822

Update the rope scaling settings for GPTQ model

Browse files
Files changed (1) hide show
  1. config.json +7 -1
config.json CHANGED
@@ -36,7 +36,13 @@
36
  "true_sequential": true
37
  },
38
  "rms_norm_eps": 1e-05,
39
- "rope_scaling": null,
 
 
 
 
 
 
40
  "rope_theta": 500000.0,
41
  "tie_word_embeddings": false,
42
  "torch_dtype": "float16",
 
36
  "true_sequential": true
37
  },
38
  "rms_norm_eps": 1e-05,
39
+ "rope_scaling": {
40
+ "factor": 8.0,
41
+ "low_freq_factor": 1.0,
42
+ "high_freq_factor": 4.0,
43
+ "original_max_position_embeddings": 8192,
44
+ "rope_type": "llama3"
45
+ },
46
  "rope_theta": 500000.0,
47
  "tie_word_embeddings": false,
48
  "torch_dtype": "float16",