
IlyasMoutawwakil
HF Staff
Upload benchmarks/pytorch-llama-2.2.0-4bit-awq-gemm.json with huggingface_hub
feeacef
verified
{ | |
"config": { | |
"name": "pytorch-llama", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.0", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-generation", | |
"library": "transformers", | |
"model": "meta-llama/Llama-2-7b-hf", | |
"processor": "meta-llama/Llama-2-7b-hf", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": "float16", | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": "awq", | |
"quantization_config": { | |
"bits": 4, | |
"version": "gemm" | |
}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"tensor_parallel": false, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 10, | |
"duration": 10, | |
"warmup_runs": 10, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 128 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 32, | |
"min_new_tokens": 32 | |
}, | |
"call_kwargs": {} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7742 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 540671.627264, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.4.0-166-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"NVIDIA A100-SXM4-80GB", | |
"NVIDIA A100-SXM4-80GB", | |
"NVIDIA A100-SXM4-80GB", | |
"NVIDIA DGX Display", | |
"NVIDIA A100-SXM4-80GB" | |
], | |
"gpu_count": 5, | |
"gpu_vram_mb": 347892350976, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "347e13ca9f7f904f55669603cfb9f0b6c7e8672c", | |
"transformers_version": "4.41.1", | |
"transformers_commit": null, | |
"accelerate_version": "0.30.1", | |
"accelerate_commit": null, | |
"diffusers_version": null, | |
"diffusers_commit": null, | |
"optimum_version": "1.20.0", | |
"optimum_commit": null, | |
"timm_version": null, | |
"timm_commit": null, | |
"peft_version": "0.11.1", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"prefill": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 2074.35776, | |
"max_global_vram": 5790.171136, | |
"max_process_vram": 4863.295488, | |
"max_reserved": 4330.61888, | |
"max_allocated": 4048.193024 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 10, | |
"total": 0.41605302047729487, | |
"mean": 0.041605302047729484, | |
"stdev": 0.00020399218803153726, | |
"p50": 0.04154483032226562, | |
"p90": 0.041704954147338866, | |
"p95": 0.041949580192565915, | |
"p99": 0.04214528102874756, | |
"values": [ | |
0.04219420623779297, | |
0.04144332885742188, | |
0.04153247833251953, | |
0.04148556900024414, | |
0.04155718231201172, | |
0.041518913269042966, | |
0.04158358383178711, | |
0.0415015983581543, | |
0.041650592803955076, | |
0.04158556747436523 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 3076.530963605522 | |
}, | |
"energy": null, | |
"efficiency": null | |
}, | |
"decode": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 2074.640384, | |
"max_global_vram": 5790.171136, | |
"max_process_vram": 4863.295488, | |
"max_reserved": 4330.61888, | |
"max_allocated": 4129.47456 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 10, | |
"total": 12.225400024414062, | |
"mean": 1.2225400024414061, | |
"stdev": 0.022245431745828787, | |
"p50": 1.2144505004882813, | |
"p90": 1.240745471191406, | |
"p95": 1.261704034423828, | |
"p99": 1.2784708850097657, | |
"values": [ | |
1.28266259765625, | |
1.211429443359375, | |
1.22725341796875, | |
1.211544189453125, | |
1.210598388671875, | |
1.2110260009765625, | |
1.1987266845703124, | |
1.2360880126953124, | |
1.2173568115234374, | |
1.2187144775390626 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 25.357043481680073 | |
}, | |
"energy": null, | |
"efficiency": null | |
}, | |
"per_token": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 309, | |
"total": 12.559073287963866, | |
"mean": 0.040644250122860416, | |
"stdev": 0.007230265843393068, | |
"p50": 0.039160831451416016, | |
"p90": 0.04030074920654297, | |
"p95": 0.041378200531005854, | |
"p99": 0.08146419494628906, | |
"values": [ | |
0.040097793579101565, | |
0.040379390716552735, | |
0.040237056732177735, | |
0.03997491073608399, | |
0.04155392074584961, | |
0.03958988952636719, | |
0.03933388900756836, | |
0.04033740615844727, | |
0.039723007202148435, | |
0.04019200134277344, | |
0.04077568054199219, | |
0.04030054473876953, | |
0.04030156707763672, | |
0.04001587295532227, | |
0.03991654586791992, | |
0.04090982437133789, | |
0.04145151901245117, | |
0.040599552154541016, | |
0.05827686309814453, | |
0.05269401550292969, | |
0.04028006362915039, | |
0.038847488403320314, | |
0.03914035034179687, | |
0.03944857788085938, | |
0.05122150421142578, | |
0.039021568298339845, | |
0.039126014709472655, | |
0.03942604827880859, | |
0.03931033706665039, | |
0.03940249633789063, | |
0.08254259490966796, | |
0.03973324966430664, | |
0.03926732635498047, | |
0.039300094604492186, | |
0.03943526458740235, | |
0.03904307174682617, | |
0.039067649841308595, | |
0.038768638610839845, | |
0.03854950332641602, | |
0.038593536376953126, | |
0.03858943939208984, | |
0.03856281661987305, | |
0.038558719635009765, | |
0.038809600830078124, | |
0.03847577667236328, | |
0.03876761627197266, | |
0.038965248107910154, | |
0.0388485107421875, | |
0.03898470306396484, | |
0.039169025421142575, | |
0.039218177795410154, | |
0.0393994255065918, | |
0.03936665725708008, | |
0.039298046112060545, | |
0.03921612930297851, | |
0.03929190444946289, | |
0.03927552032470703, | |
0.03899699020385742, | |
0.03897446441650391, | |
0.03901235198974609, | |
0.03908915328979492, | |
0.08097996520996094, | |
0.03890585708618164, | |
0.03935334396362305, | |
0.039087104797363284, | |
0.038833152770996096, | |
0.03916697692871094, | |
0.039564289093017575, | |
0.04012543869018555, | |
0.03950387191772461, | |
0.03857920074462891, | |
0.038435840606689455, | |
0.03873484802246094, | |
0.03887820816040039, | |
0.046458881378173826, | |
0.04090777587890625, | |
0.03990835189819336, | |
0.03919257736206055, | |
0.039518207550048826, | |
0.03941068649291992, | |
0.03938611221313477, | |
0.04009062576293945, | |
0.03932364654541016, | |
0.03936460876464844, | |
0.039669761657714846, | |
0.039365631103515625, | |
0.03944140625, | |
0.039347198486328124, | |
0.03936358261108398, | |
0.03938611221313477, | |
0.03933491134643555, | |
0.03919462585449219, | |
0.08150630187988281, | |
0.039300094604492186, | |
0.03904307174682617, | |
0.03904204940795898, | |
0.038847488403320314, | |
0.03904409790039062, | |
0.038825984954833984, | |
0.03873177719116211, | |
0.03882700729370117, | |
0.038575103759765625, | |
0.03860172653198242, | |
0.03852799987792969, | |
0.0384983024597168, | |
0.03982131195068359, | |
0.039403518676757815, | |
0.03935539245605469, | |
0.03986943817138672, | |
0.040151039123535154, | |
0.038934528350830076, | |
0.03897446441650391, | |
0.038798336029052735, | |
0.038833152770996096, | |
0.03887513732910156, | |
0.03885977554321289, | |
0.03947417449951172, | |
0.03940966415405273, | |
0.03894476699829102, | |
0.03907174301147461, | |
0.03912499237060547, | |
0.0391833610534668, | |
0.03862527847290039, | |
0.08044748687744141, | |
0.03896319961547851, | |
0.038965248107910154, | |
0.03892633438110352, | |
0.03868057632446289, | |
0.038556671142578124, | |
0.039946239471435545, | |
0.03887820816040039, | |
0.040597503662109374, | |
0.0393359375, | |
0.038747135162353515, | |
0.03870003128051758, | |
0.03868467330932617, | |
0.03908812713623047, | |
0.03895603179931641, | |
0.039754753112792966, | |
0.03891302490234375, | |
0.03933695983886719, | |
0.03904819107055664, | |
0.03930624008178711, | |
0.03880652618408203, | |
0.038790145874023435, | |
0.03862835311889649, | |
0.03879731369018555, | |
0.03911679840087891, | |
0.039390209197998044, | |
0.039005184173583986, | |
0.03882905578613281, | |
0.038623233795166016, | |
0.03920588684082031, | |
0.03913011169433594, | |
0.08065331268310547, | |
0.03888025665283203, | |
0.03804569625854492, | |
0.03861708831787109, | |
0.03830988693237305, | |
0.038312961578369144, | |
0.03845017623901367, | |
0.03885158538818359, | |
0.03849932861328125, | |
0.03851366424560547, | |
0.038435840606689455, | |
0.03857612609863281, | |
0.0387583999633789, | |
0.039613441467285154, | |
0.04010496139526367, | |
0.04012646484375, | |
0.03987046432495117, | |
0.039779327392578126, | |
0.0401448974609375, | |
0.039801856994628904, | |
0.039772159576416014, | |
0.03974758529663086, | |
0.039056385040283206, | |
0.03899699020385742, | |
0.039300094604492186, | |
0.03900620651245117, | |
0.038967296600341796, | |
0.038983680725097655, | |
0.03874611282348633, | |
0.038934528350830076, | |
0.03876761627197266, | |
0.08088166046142578, | |
0.03881267166137695, | |
0.03869388961791992, | |
0.03849728012084961, | |
0.038509567260742186, | |
0.038553600311279294, | |
0.03885465621948242, | |
0.03897753524780274, | |
0.038776832580566405, | |
0.03892326354980469, | |
0.038563838958740236, | |
0.03803955078125, | |
0.03827916717529297, | |
0.038558719635009765, | |
0.03851366424560547, | |
0.03882905578613281, | |
0.03846041488647461, | |
0.038591487884521485, | |
0.03845632171630859, | |
0.03849216079711914, | |
0.03863347244262695, | |
0.03860275268554687, | |
0.038735870361328126, | |
0.03874816131591797, | |
0.03880652618408203, | |
0.0387512321472168, | |
0.03876249694824219, | |
0.03867443084716797, | |
0.03872153472900391, | |
0.03903078460693359, | |
0.03861094284057617, | |
0.082123779296875, | |
0.03978854370117187, | |
0.03969023895263672, | |
0.039820289611816405, | |
0.04088729476928711, | |
0.04037529754638672, | |
0.03975270462036133, | |
0.04003839874267578, | |
0.04004044723510742, | |
0.03998720169067383, | |
0.03993907165527344, | |
0.039918590545654296, | |
0.040243198394775394, | |
0.04006195068359375, | |
0.04001484680175781, | |
0.03905945587158203, | |
0.03921612930297851, | |
0.0389119987487793, | |
0.03877171325683594, | |
0.04006399917602539, | |
0.04054937744140625, | |
0.04061798477172852, | |
0.03925299072265625, | |
0.038809600830078124, | |
0.03885055923461914, | |
0.03897139358520508, | |
0.03886796951293945, | |
0.04036608123779297, | |
0.042103809356689455, | |
0.04126822280883789, | |
0.03923660659790039, | |
0.08081715393066406, | |
0.038866943359375, | |
0.03971993637084961, | |
0.040049663543701174, | |
0.04010905456542969, | |
0.038809600830078124, | |
0.03877785491943359, | |
0.03899801635742187, | |
0.039065601348876954, | |
0.03908505630493164, | |
0.039444480895996094, | |
0.039613441467285154, | |
0.03912396621704101, | |
0.03907276916503906, | |
0.03905228805541992, | |
0.03907379150390625, | |
0.039277568817138675, | |
0.03960115051269531, | |
0.03954585647583008, | |
0.039605247497558595, | |
0.03908607864379883, | |
0.03913216018676758, | |
0.039550975799560545, | |
0.03923660659790039, | |
0.0390830078125, | |
0.039228416442871096, | |
0.039242752075195314, | |
0.039147518157958985, | |
0.03908505630493164, | |
0.039339038848876955, | |
0.039217121124267576, | |
0.08159232330322265, | |
0.039373825073242184, | |
0.03952025604248047, | |
0.039314430236816404, | |
0.03924787139892578, | |
0.03940249633789063, | |
0.03920588684082031, | |
0.03970048141479492, | |
0.03929702377319336, | |
0.03963187026977539, | |
0.03956531143188476, | |
0.039419902801513675, | |
0.03931033706665039, | |
0.03904204940795898, | |
0.03904512023925781, | |
0.039190528869628906, | |
0.04031180953979492, | |
0.04006399917602539, | |
0.039695358276367186, | |
0.038749183654785156, | |
0.03926630401611328, | |
0.03908198547363281, | |
0.03882291030883789, | |
0.03906867218017578, | |
0.03866419219970703, | |
0.039093246459960936, | |
0.03965542221069336, | |
0.03919462585449219, | |
0.038816768646240236, | |
0.038836223602294925, | |
0.039160831451416016 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 24.603726159965454 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |