Upload FP8Qwen2ForCausalLM

#9
by Xihc20 - opened
Files changed (2) hide show
  1. config.json +2 -2
  2. generation_config.json +1 -1
config.json CHANGED
@@ -12,6 +12,7 @@
12
  "AutoModelForTokenClassification": "modeling_fp8_qwen2.FP8Qwen2ForTokenClassification"
13
  },
14
  "bos_token_id": 151643,
 
15
  "eos_token_id": 151645,
16
  "fp8_config": {
17
  "act_block_size": 16,
@@ -67,8 +68,7 @@
67
  "rope_theta": 1000000.0,
68
  "sliding_window": null,
69
  "tie_word_embeddings": false,
70
- "torch_dtype": "bfloat16",
71
- "transformers_version": "4.54.1",
72
  "use_cache": true,
73
  "use_sliding_window": false,
74
  "vocab_size": 152064
 
12
  "AutoModelForTokenClassification": "modeling_fp8_qwen2.FP8Qwen2ForTokenClassification"
13
  },
14
  "bos_token_id": 151643,
15
+ "dtype": "bfloat16",
16
  "eos_token_id": 151645,
17
  "fp8_config": {
18
  "act_block_size": 16,
 
68
  "rope_theta": 1000000.0,
69
  "sliding_window": null,
70
  "tie_word_embeddings": false,
71
+ "transformers_version": "4.57.0",
 
72
  "use_cache": true,
73
  "use_sliding_window": false,
74
  "vocab_size": 152064
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 151643,
4
  "eos_token_id": 151645,
5
- "transformers_version": "4.54.1"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 151643,
4
  "eos_token_id": 151645,
5
+ "transformers_version": "4.57.0"
6
  }