Qwen3-30B-A3B-MNN / llm_config.json
zhaode's picture
Upload folder using huggingface_hub
58dcd99 verified
raw
history blame contribute delete
413 Bytes
{
"hidden_size": 2048,
"layer_nums": 48,
"attention_mask": "float",
"key_value_shape": [
2,
1,
0,
4,
128
],
"bos": "",
"system_prompt_template": "<|im_start|>system\n%s<|im_end|>\n",
"user_prompt_template": "<|im_start|>user\n%s<|im_end|>\n",
"assistant_prompt_template": "<|im_start|>assistant\n%s<|im_end|>\n",
"is_visual": false
}