File size: 246 Bytes
179a736 |
1 2 3 4 5 6 7 8 9 10 11 12 |
{
"bits": 4,
"group_size": 128,
"sym": true,
"data_type": "int",
"iters": 1000,
"nsamples": 512,
"low_gpu_mem_usage": true,
"autoround_version": "0.7.1",
"quant_method": "auto-round",
"packing_format": "auto_round:auto_gptq"
} |