zRzRzRzRzRzRzR commited on
Commit
5c16395
·
1 Parent(s): d7278ee

update for new format config of transformers lib / sglang lib

Browse files
Files changed (2) hide show
  1. config.json +10 -18
  2. generation_config.json +1 -1
config.json CHANGED
@@ -3,15 +3,16 @@
3
  "Glm4vMoeForConditionalGeneration"
4
  ],
5
  "model_type": "glm4v_moe",
6
- "rope_scaling": {
7
- "rope_type": "default",
8
- "mrope_section": [
9
- 8,
10
- 12,
11
- 12
12
- ]
13
- },
14
  "text_config": {
 
15
  "pad_token_id": 151329,
16
  "vocab_size": 151552,
17
  "eos_token_id": [
@@ -19,9 +20,6 @@
19
  151336,
20
  151338
21
  ],
22
- "image_end_token_id": 151340,
23
- "image_start_token_id": 151339,
24
- "image_token_id": 151363,
25
  "head_dim": 128,
26
  "attention_bias": true,
27
  "attention_dropout": 0.0,
@@ -31,7 +29,6 @@
31
  "initializer_range": 0.02,
32
  "intermediate_size": 10944,
33
  "max_position_embeddings": 65536,
34
- "model_type": "glm4v_moe_text",
35
  "moe_intermediate_size": 1408,
36
  "n_group": 1,
37
  "n_routed_experts": 128,
@@ -43,7 +40,7 @@
43
  "num_key_value_heads": 8,
44
  "partial_rotary_factor": 0.5,
45
  "rms_norm_eps": 1e-05,
46
- "torch_dtype": "bfloat16",
47
  "rope_scaling": {
48
  "rope_type": "default",
49
  "mrope_section": [
@@ -58,11 +55,6 @@
58
  "use_cache": true,
59
  "use_qk_norm": false
60
  },
61
- "torch_dtype": "bfloat16",
62
- "transformers_version": "4.55.0.dev0",
63
- "video_end_token_id": 151342,
64
- "video_start_token_id": 151341,
65
- "video_token_id": 151364,
66
  "vision_config": {
67
  "attention_bias": false,
68
  "attention_dropout": 0.0,
 
3
  "Glm4vMoeForConditionalGeneration"
4
  ],
5
  "model_type": "glm4v_moe",
6
+ "image_start_token_id": 151339,
7
+ "image_end_token_id": 151340,
8
+ "video_start_token_id": 151341,
9
+ "video_end_token_id": 151342,
10
+ "image_token_id": 151363,
11
+ "video_token_id": 151364,
12
+ "tie_word_embeddings": false,
13
+ "transformers_version": "4.57.1",
14
  "text_config": {
15
+ "model_type": "glm4v_moe_text",
16
  "pad_token_id": 151329,
17
  "vocab_size": 151552,
18
  "eos_token_id": [
 
20
  151336,
21
  151338
22
  ],
 
 
 
23
  "head_dim": 128,
24
  "attention_bias": true,
25
  "attention_dropout": 0.0,
 
29
  "initializer_range": 0.02,
30
  "intermediate_size": 10944,
31
  "max_position_embeddings": 65536,
 
32
  "moe_intermediate_size": 1408,
33
  "n_group": 1,
34
  "n_routed_experts": 128,
 
40
  "num_key_value_heads": 8,
41
  "partial_rotary_factor": 0.5,
42
  "rms_norm_eps": 1e-05,
43
+ "dtype": "bfloat16",
44
  "rope_scaling": {
45
  "rope_type": "default",
46
  "mrope_section": [
 
55
  "use_cache": true,
56
  "use_qk_norm": false
57
  },
 
 
 
 
 
58
  "vision_config": {
59
  "attention_bias": false,
60
  "attention_dropout": 0.0,
generation_config.json CHANGED
@@ -10,5 +10,5 @@
10
  "temperature": 1.0,
11
  "top_k": 1,
12
  "top_p": 0.0001,
13
- "transformers_version": "4.55.0.dev"
14
  }
 
10
  "temperature": 1.0,
11
  "top_k": 1,
12
  "top_p": 0.0001,
13
+ "transformers_version": "4.57.1"
14
  }