CultriX commited on
Commit
f4a418a
·
verified ·
1 Parent(s): 73a3a55

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +996 -383
app.py CHANGED
@@ -320,571 +320,823 @@ def download_all_data():
320
  benchmark_data = [
321
  {
322
  "rank": 1,
323
- "name": "sometimesanotion/Lamarck-14B-v0.7-rc4",
324
  "scores": {
325
- "average": 41.22,
326
- "IFEval": 72.11,
327
- "BBH": 49.85,
328
- "MATH": 36.86,
329
- "GPQA": 18.57,
330
- "MUSR": 21.07,
331
- "MMLU_PRO": 48.89,
332
  "Architecture": "Qwen2ForCausalLM",
333
  "Parameters": "14.766B",
334
- "Chat_Template": "No"
335
  },
336
- "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.7-rc4",
337
- "known_config": None
338
  },
339
  {
340
  "rank": 2,
341
- "name": "arcee-ai/Virtuoso-Small-v2",
342
  "scores": {
343
- "average": 41.08,
344
- "IFEval": 82.73,
345
- "BBH": 50.95,
346
- "MATH": 38.22,
347
- "GPQA": 13.76,
348
- "MUSR": 14.28,
349
- "MMLU_PRO": 46.53,
350
  "Architecture": "Qwen2ForCausalLM",
351
  "Parameters": "14.766B",
352
  "Chat_Template": "Yes"
353
  },
354
- "hf_url": "https://huggingface.co/arcee-ai/Virtuoso-Small-v2",
355
- "known_config": None
356
  },
357
  {
358
  "rank": 3,
359
- "name": "sometimesanotion/Qwenvergence-14B-v12-Prose-DS",
360
- "scores": {
361
- "average": 41.08,
362
- "IFEval": 61.73,
363
- "BBH": 49.87,
364
- "MATH": 42.30,
365
- "GPQA": 19.24,
366
- "MUSR": 24.78,
367
- "MMLU_PRO": 48.54,
368
  "Architecture": "Qwen2ForCausalLM",
369
- "Parameters": "14.766B",
370
- "Chat_Template": "No"
371
  },
372
- "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v12-Prose-DS",
373
- "known_config": None
374
  },
375
  {
376
  "rank": 4,
377
- "name": "bunnycore/Phi-4-Model-Stock-v4",
378
  "scores": {
379
- "average": 41.03,
380
- "IFEval": 71.10,
381
- "BBH": 55.90,
382
- "MATH": 37.16,
383
- "GPQA": 15.88,
384
- "MUSR": 17.30,
385
- "MMLU_PRO": 48.82,
386
- "Architecture": "LlamaForCausalLM",
387
- "Parameters": "14.66B",
388
  "Chat_Template": "Yes"
389
  },
390
- "hf_url": "https://huggingface.co/bunnycore/Phi-4-Model-Stock-v4",
391
- "known_config": None
392
  },
393
  {
394
  "rank": 5,
395
- "name": "sthenno/tempesthenno-nuslerp-0124",
396
  "scores": {
397
- "average": 40.97,
398
- "IFEval": 70.04,
399
- "BBH": 49.28,
400
- "MATH": 39.27,
401
- "GPQA": 18.68,
402
- "MUSR": 20.21,
403
- "MMLU_PRO": 48.36,
404
  "Architecture": "Qwen2ForCausalLM",
405
- "Parameters": "14.766B",
406
- "Chat_Template": "No"
407
  },
408
- "hf_url": "https://huggingface.co/sthenno/tempesthenno-nuslerp-0124",
409
- "known_config": None
410
  },
411
  {
412
  "rank": 6,
413
- "name": "bunnycore/Phi-4-RR-Shoup",
414
  "scores": {
415
- "average": 40.95,
416
- "IFEval": 65.87,
417
- "BBH": 56.11,
418
- "MATH": 47.96,
419
- "GPQA": 11.63,
420
- "MUSR": 14.94,
421
- "MMLU_PRO": 49.21,
422
- "Architecture": "LlamaForCausalLM",
423
- "Parameters": "14.66B",
424
  "Chat_Template": "Yes"
425
  },
426
- "hf_url": "https://huggingface.co/bunnycore/Phi-4-RR-Shoup",
427
- "known_config": None
428
  },
429
  {
430
  "rank": 7,
431
- "name": "sometimesanotion/Qwenvergence-14B-v10",
432
  "scores": {
433
- "average": 40.86,
434
- "IFEval": 67.57,
435
- "BBH": 46.75,
436
- "MATH": 44.18,
437
- "GPQA": 17.23,
438
- "MUSR": 22.33,
439
- "MMLU_PRO": 47.10,
440
  "Architecture": "Qwen2ForCausalLM",
441
  "Parameters": "14.766B",
442
- "Chat_Template": "No"
443
  },
444
- "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v10",
445
- "known_config": None
446
  },
447
  {
448
  "rank": 8,
449
- "name": "bunnycore/Phi-4-RStock-v0.1",
450
  "scores": {
451
- "average": 40.84,
452
- "IFEval": 70.03,
453
- "BBH": 55.98,
454
- "MATH": 38.07,
455
- "GPQA": 15.32,
456
- "MUSR": 16.73,
457
- "MMLU_PRO": 48.90,
458
- "Architecture": "LlamaForCausalLM",
459
  "Parameters": "14.66B",
460
  "Chat_Template": "Yes"
461
  },
462
- "hf_url": "https://huggingface.co/bunnycore/Phi-4-RStock-v0.1",
463
- "known_config": None
464
  },
465
  {
466
  "rank": 9,
467
- "name": "jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
468
  "scores": {
469
- "average": 40.74,
470
- "IFEval": 73.23,
471
- "BBH": 49.57,
472
- "MATH": 36.93,
473
- "GPQA": 17.23,
474
- "MUSR": 19.30,
475
- "MMLU_PRO": 48.19,
476
  "Architecture": "Qwen2ForCausalLM",
477
- "Parameters": "14.766B",
478
- "Chat_Template": "No"
479
  },
480
- "hf_url": "https://huggingface.co/jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
481
- "known_config": None
482
  },
483
  {
484
  "rank": 10,
485
- "name": "bunnycore/Phi-4-ReasoningRP",
486
  "scores": {
487
- "average": 40.73,
488
- "IFEval": 67.36,
489
- "BBH": 55.88,
490
- "MATH": 44.34,
491
- "GPQA": 12.53,
492
- "MUSR": 15.14,
493
- "MMLU_PRO": 49.12,
494
- "Architecture": "LlamaForCausalLM",
495
- "Parameters": "14.66B",
496
  "Chat_Template": "Yes"
497
  },
498
- "hf_url": "https://huggingface.co/bunnycore/Phi-4-ReasoningRP",
499
- "known_config": None
500
  },
501
  {
502
  "rank": 11,
503
- "name": "sometimesanotion/Lamarck-14B-v0.7-rc1",
504
  "scores": {
505
- "average": 40.69,
506
- "IFEval": 73.05,
507
- "BBH": 49.51,
508
- "MATH": 35.80,
509
- "GPQA": 18.57,
510
- "MUSR": 18.13,
511
- "MMLU_PRO": 49.06,
512
  "Architecture": "Qwen2ForCausalLM",
513
  "Parameters": "14.766B",
514
- "Chat_Template": "No"
515
  },
516
- "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.7-rc1",
517
- "known_config": None
518
  },
519
  {
520
  "rank": 12,
521
- "name": "jpacifico/Chocolatine-2-14B-Instruct-v2.0b2",
522
  "scores": {
523
- "average": 40.62,
524
- "IFEval": 72.41,
525
- "BBH": 49.58,
526
- "MATH": 35.73,
527
- "GPQA": 17.79,
528
- "MUSR": 19.66,
529
- "MMLU_PRO": 48.54,
530
  "Architecture": "Qwen2ForCausalLM",
531
- "Parameters": "14.766B",
532
- "Chat_Template": "No"
533
  },
534
- "hf_url": "https://huggingface.co/jpacifico/Chocolatine-2-14B-Instruct-v2.0b2",
535
- "known_config": None
536
  },
537
  {
538
  "rank": 13,
539
- "name": "sometimesanotion/Qwenvergence-14B-v11",
540
  "scores": {
541
- "average": 40.56,
542
- "IFEval": 71.92,
543
- "BBH": 47.55,
544
- "MATH": 40.71,
545
- "GPQA": 16.33,
546
- "MUSR": 18.76,
547
- "MMLU_PRO": 48.08,
548
  "Architecture": "Qwen2ForCausalLM",
549
- "Parameters": "14.766B",
550
- "Chat_Template": "No"
551
  },
552
- "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v11",
553
- "known_config": None
554
  },
555
  {
556
  "rank": 14,
557
- "name": "sthenno/tempesthenno-ppo-ckpt40",
558
  "scores": {
559
- "average": 40.55,
560
- "IFEval": 79.23,
561
- "BBH": 50.57,
562
- "MATH": 34.21,
563
- "GPQA": 17.00,
564
- "MUSR": 14.56,
565
- "MMLU_PRO": 47.69,
566
  "Architecture": "Qwen2ForCausalLM",
567
  "Parameters": "14.766B",
568
- "Chat_Template": "Yes"
569
  },
570
- "hf_url": "https://huggingface.co/sthenno/tempesthenno-ppo-ckpt40",
571
- "known_config": None
572
  },
573
  {
574
  "rank": 15,
575
- "name": "tensopolis/virtuoso-small-v2-tensopolis-v1",
576
  "scores": {
577
- "average": 40.38,
578
- "IFEval": 80.94,
579
- "BBH": 50.46,
580
- "MATH": 35.88,
581
- "GPQA": 13.65,
582
- "MUSR": 15.82,
583
- "MMLU_PRO": 45.52,
584
  "Architecture": "Qwen2ForCausalLM",
585
- "Parameters": "14.766B",
586
  "Chat_Template": "Yes"
587
  },
588
- "hf_url": "https://huggingface.co/tensopolis/virtuoso-small-v2-tensopolis-v1",
589
- "known_config": None
590
  },
591
  {
592
  "rank": 16,
593
- "name": "sometimesanotion/Lamarck-14B-v0.6",
594
  "scores": {
595
- "average": 40.37,
596
- "IFEval": 69.73,
597
- "BBH": 49.30,
598
- "MATH": 35.65,
599
- "GPQA": 18.57,
600
- "MUSR": 20.12,
601
- "MMLU_PRO": 48.89,
602
  "Architecture": "Qwen2ForCausalLM",
603
  "Parameters": "14.766B",
604
  "Chat_Template": "No"
605
  },
606
- "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.6",
607
- "known_config": None
608
  },
609
  {
610
  "rank": 17,
611
- "name": "sthenno-com/miscii-14b-0130",
612
  "scores": {
613
- "average": 40.29,
614
- "IFEval": 66.47,
615
- "BBH": 49.84,
616
- "MATH": 38.44,
617
- "GPQA": 17.56,
618
- "MUSR": 20.96,
619
- "MMLU_PRO": 48.48,
620
  "Architecture": "Qwen2ForCausalLM",
621
  "Parameters": "14.766B",
622
  "Chat_Template": "No"
623
  },
624
- "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-0130",
625
- "known_config": None
626
  },
627
  {
628
  "rank": 18,
629
- "name": "hotmailuser/QwenSlerp2-14B",
630
  "scores": {
631
- "average": 40.21,
632
- "IFEval": 70.37,
633
- "BBH": 49.68,
634
- "MATH": 35.73,
635
- "GPQA": 17.45,
636
- "MUSR": 19.35,
637
- "MMLU_PRO": 48.66,
638
  "Architecture": "Qwen2ForCausalLM",
639
- "Parameters": "14.766B",
640
- "Chat_Template": "No"
641
  },
642
- "hf_url": "https://huggingface.co/hotmailuser/QwenSlerp2-14B",
643
- "known_config": None
644
  },
645
  {
646
  "rank": 19,
647
- "name": "Sakalti/ultiima-14B-v0.2",
648
  "scores": {
649
- "average": 40.18,
650
- "IFEval": 70.70,
651
- "BBH": 49.51,
652
- "MATH": 35.27,
653
- "GPQA": 17.67,
654
- "MUSR": 19.19,
655
- "MMLU_PRO": 48.75,
656
- "Architecture": "Qwen2ForCausalLM",
657
- "Parameters": "14.766B",
658
- "Chat_Template": "No"
659
  },
660
- "hf_url": "https://huggingface.co/Sakalti/ultiima-14B-v0.2",
661
- "known_config": None
662
  },
663
  {
664
  "rank": 20,
665
- "name": "pankajmathur/orca_mini_phi-4",
666
  "scores": {
667
- "average": 40.16,
668
- "IFEval": 77.81,
669
- "BBH": 54.63,
670
- "MATH": 26.44,
671
- "GPQA": 16.55,
672
- "MUSR": 18.25,
673
- "MMLU_PRO": 47.28,
674
- "Architecture": "LlamaForCausalLM",
675
- "Parameters": "14.66B",
676
  "Chat_Template": "Yes"
677
  },
678
- "hf_url": "https://huggingface.co/pankajmathur/orca_mini_phi-4",
679
- "known_config": None
680
  },
681
  {
682
  "rank": 21,
683
- "name": "pankajmathur/orca_mini_v9_2_14B",
684
  "scores": {
685
- "average": 40.16,
686
- "IFEval": 77.81,
687
- "BBH": 54.63,
688
- "MATH": 26.44,
689
- "GPQA": 16.55,
690
- "MUSR": 18.25,
691
- "MMLU_PRO": 47.28,
692
- "Architecture": "LlamaForCausalLM",
693
- "Parameters": "14.66B",
694
  "Chat_Template": "Yes"
695
  },
696
- "hf_url": "https://huggingface.co/pankajmathur/orca_mini_v9_2_14B",
697
- "known_config": None
698
  },
699
  {
700
- "rank": 22,
701
- "name": "sometimesanotion/Qwen2.5-14B-Vimarckoso-v3",
702
- "scores": {
703
- "average": 40.10,
704
- "IFEval": 72.57,
705
- "BBH": 48.58,
706
- "MATH": 34.44,
707
- "GPQA": 17.34,
708
- "MUSR": 19.39,
709
- "MMLU_PRO": 48.26,
710
  "Architecture": "Qwen2ForCausalLM",
711
- "Parameters": "14B",
712
- "Chat_Template": "No"
713
  },
714
- "hf_url": "https://huggingface.co/sometimesanotion/Qwen2.5-14B-Vimarckoso-v3",
715
- "known_config": None
716
  },
717
  {
718
- "rank": 23,
719
- "name": "sthenno-com/miscii-14b-1225",
720
  "scores": {
721
- "average": 40.08,
722
- "IFEval": 78.78,
723
- "BBH": 50.91,
724
- "MATH": 31.57,
725
- "GPQA": 17.00,
726
- "MUSR": 14.77,
727
- "MMLU_PRO": 47.46,
728
  "Architecture": "Qwen2ForCausalLM",
729
- "Parameters": "14.766B",
730
  "Chat_Template": "Yes"
731
  },
732
- "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1225",
733
- "known_config": None
734
  },
735
  {
736
  "rank": 24,
737
- "name": "bunnycore/Phi-4-Model-Stock",
738
  "scores": {
739
- "average": 40.06,
740
- "IFEval": 68.79,
741
- "BBH": 55.32,
742
- "MATH": 38.60,
743
- "GPQA": 13.98,
744
- "MUSR": 15.12,
745
- "MMLU_PRO": 48.54,
746
- "Architecture": "LlamaForCausalLM",
747
- "Parameters": "14.66B",
748
- "Chat_Template": "Yes"
749
  },
750
- "hf_url": "https://huggingface.co/bunnycore/Phi-4-Model-Stock",
751
- "known_config": None
752
  },
753
  {
754
  "rank": 25,
755
- "name": "djuna/Q2.5-Veltha-14B-0.5",
756
  "scores": {
757
- "average": 39.96,
758
- "IFEval": 77.96,
759
- "BBH": 50.32,
760
- "MATH": 33.84,
761
- "GPQA": 15.77,
762
- "MUSR": 14.17,
763
- "MMLU_PRO": 47.72,
764
  "Architecture": "Qwen2ForCausalLM",
765
  "Parameters": "14.766B",
766
- "Chat_Template": "Yes"
767
  },
768
- "hf_url": "https://huggingface.co/djuna/Q2.5-Veltha-14B-0.5",
769
- "known_config": None
770
  },
771
  {
772
  "rank": 26,
773
- "name": "ehristoforu/fp4-14b-v1-fix",
774
  "scores": {
775
- "average": 39.96,
776
- "IFEval": 67.42,
777
- "BBH": 54.33,
778
- "MATH": 39.58,
779
- "GPQA": 13.87,
780
- "MUSR": 16.18,
781
- "MMLU_PRO": 48.37,
782
- "Architecture": "LlamaForCausalLM",
783
- "Parameters": "14.66B",
784
  "Chat_Template": "Yes"
785
  },
786
- "hf_url": "https://huggingface.co/ehristoforu/fp4-14b-v1-fix",
787
- "known_config": None
788
  },
789
  {
790
  "rank": 27,
791
- "name": "sthenno/tempesthenno-nuslerp-001",
792
  "scores": {
793
- "average": 39.94,
794
- "IFEval": 79.26,
795
- "BBH": 51.04,
796
- "MATH": 31.72,
797
- "GPQA": 16.44,
798
- "MUSR": 13.88,
799
- "MMLU_PRO": 47.30,
800
  "Architecture": "Qwen2ForCausalLM",
801
- "Parameters": "14.766B",
802
  "Chat_Template": "Yes"
803
  },
804
- "hf_url": "https://huggingface.co/sthenno/tempesthenno-nuslerp-001",
805
- "known_config": None
806
  },
807
  {
808
  "rank": 28,
809
- "name": "bunnycore/Phi-4-Stock-Ex",
810
  "scores": {
811
- "average": 39.93,
812
- "IFEval": 65.75,
813
- "BBH": 55.20,
814
- "MATH": 39.12,
815
- "GPQA": 13.42,
816
- "MUSR": 17.46,
817
- "MMLU_PRO": 48.61,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
818
  "Architecture": "LlamaForCausalLM",
819
  "Parameters": "14.66B",
820
  "Chat_Template": "Yes"
821
  },
822
- "hf_url": "https://huggingface.co/bunnycore/Phi-4-Stock-Ex",
823
- "known_config": None
824
  },
825
  {
826
- "rank": 29,
827
- "name": "hotmailuser/QwenSlerp-14B",
828
  "scores": {
829
- "average": 39.87,
830
- "IFEval": 70.25,
831
- "BBH": 49.42,
832
- "MATH": 35.50,
833
- "GPQA": 18.34,
834
- "MUSR": 16.83,
835
- "MMLU_PRO": 48.89,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
836
  "Architecture": "Qwen2ForCausalLM",
837
  "Parameters": "14.766B",
838
  "Chat_Template": "No"
839
  },
840
- "hf_url": "https://huggingface.co/hotmailuser/QwenSlerp-14B",
841
- "known_config": None
842
  },
843
  {
844
- "rank": 30,
845
- "name": "sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-model_stock",
846
- "scores": {
847
- "average": 39.81,
848
- "IFEval": 71.62,
849
- "BBH": 48.76,
850
- "MATH": 33.99,
851
- "GPQA": 17.34,
852
- "MUSR": 19.23,
853
- "MMLU_PRO": 47.95,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
854
  "Architecture": "Qwen2ForCausalLM",
855
- "Parameters": "14B",
856
- "Chat_Template": "No"
857
  },
858
- "hf_url": "https://huggingface.co/sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-model_stock",
859
- "known_config": None
860
  },
861
  {
862
- "rank": 31,
863
- "name": "tensopolis/virtuoso-small-tensopolis-v1",
864
  "scores": {
865
- "average": 39.69,
866
- "IFEval": 79.50,
867
- "BBH": 50.70,
868
- "MATH": 36.03,
869
- "GPQA": 10.85,
870
- "MUSR": 14.70,
871
- "MMLU_PRO": 46.36,
872
  "Architecture": "Qwen2ForCausalLM",
873
  "Parameters": "14.77B",
874
  "Chat_Template": "Yes"
875
  },
876
- "hf_url": "https://huggingface.co/tensopolis/virtuoso-small-tensopolis-v1",
877
- "known_config": None
878
  },
879
  {
880
- "rank": 32,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
881
  "name": "sometimesanotion/Lamarck-14B-v0.6-model_stock",
882
  "scores": {
883
- "average": 39.58,
884
- "IFEval": 67.90,
885
  "BBH": 46.49,
886
- "MATH": 35.88,
887
- "GPQA": 17.90,
888
  "MUSR": 22.68,
889
  "MMLU_PRO": 46.64,
890
  "Architecture": "Qwen2ForCausalLM",
@@ -892,34 +1144,142 @@ benchmark_data = [
892
  "Chat_Template": "No"
893
  },
894
  "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.6-model_stock",
895
- "known_config": None
896
  },
897
  {
898
- "rank": 33,
899
- "name": "tensopolis/virtuoso-small-tensopolis-v2",
900
  "scores": {
901
- "average": 39.53,
902
- "IFEval": 80.20,
903
- "BBH": 50.23,
904
- "MATH": 35.27,
905
- "GPQA": 10.51,
906
- "MUSR": 14.84,
907
- "MMLU_PRO": 46.15,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
908
  "Architecture": "Qwen2ForCausalLM",
909
  "Parameters": "14.77B",
910
  "Chat_Template": "Yes"
911
  },
912
- "hf_url": "https://huggingface.co/tensopolis/virtuoso-small-tensopolis-v2",
913
- "known_config": None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
914
  },
915
  {
916
- "rank": 34,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
917
  "name": "Sakalti/ultiima-14B-v0.3",
918
  "scores": {
919
- "average": 39.53,
920
- "IFEval": 70.40,
921
  "BBH": 48.45,
922
- "MATH": 34.52,
923
  "GPQA": 16.89,
924
  "MUSR": 18.73,
925
  "MMLU_PRO": 48.18,
@@ -928,10 +1288,263 @@ benchmark_data = [
928
  "Chat_Template": "No"
929
  },
930
  "hf_url": "https://huggingface.co/Sakalti/ultiima-14B-v0.3",
931
- "known_config": None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
932
  }
933
  ]
934
 
 
935
  def snippet_scrape_model_page(url):
936
  """
937
  Equivalent scraping function for the larger dataset
 
320
  benchmark_data = [
321
  {
322
  "rank": 1,
323
+ "name": "suayptalha/Lamarckvergence-14B",
324
  "scores": {
325
+ "average": 43.32,
326
+ "IFEval": 76.56,
327
+ "BBH": 50.33,
328
+ "MATH": 54.0,
329
+ "GPQA": 15.1,
330
+ "MUSR": 16.34,
331
+ "MMLU_PRO": 47.59,
332
  "Architecture": "Qwen2ForCausalLM",
333
  "Parameters": "14.766B",
334
+ "Chat_Template": "Yes"
335
  },
336
+ "hf_url": "https://huggingface.co/suayptalha/Lamarckvergence-14B",
337
+ "known_config": null
338
  },
339
  {
340
  "rank": 2,
341
+ "name": "sthenno/tempesthenno-ppo-ckpt40",
342
  "scores": {
343
+ "average": 42.74,
344
+ "IFEval": 79.23,
345
+ "BBH": 50.57,
346
+ "MATH": 47.36,
347
+ "GPQA": 17.0,
348
+ "MUSR": 14.56,
349
+ "MMLU_PRO": 47.69,
350
  "Architecture": "Qwen2ForCausalLM",
351
  "Parameters": "14.766B",
352
  "Chat_Template": "Yes"
353
  },
354
+ "hf_url": "https://huggingface.co/sthenno/tempesthenno-ppo-ckpt40",
355
+ "known_config": null
356
  },
357
  {
358
  "rank": 3,
359
+ "name": "tanliboy/lambda-qwen2.5-14b-dpo-test",
360
+ "scores": {
361
+ "average": 42.62,
362
+ "IFEval": 82.31,
363
+ "BBH": 48.45,
364
+ "MATH": 54.61,
365
+ "GPQA": 14.99,
366
+ "MUSR": 12.59,
367
+ "MMLU_PRO": 42.75,
368
  "Architecture": "Qwen2ForCausalLM",
369
+ "Parameters": "14.77B",
370
+ "Chat_Template": "Yes"
371
  },
372
+ "hf_url": "https://huggingface.co/tanliboy/lambda-qwen2.5-14b-dpo-test",
373
+ "known_config": null
374
  },
375
  {
376
  "rank": 4,
377
+ "name": "sthenno/tempesthenno-nuslerp-001",
378
  "scores": {
379
+ "average": 42.59,
380
+ "IFEval": 79.26,
381
+ "BBH": 51.04,
382
+ "MATH": 47.58,
383
+ "GPQA": 16.44,
384
+ "MUSR": 13.88,
385
+ "MMLU_PRO": 47.3,
386
+ "Architecture": "Qwen2ForCausalLM",
387
+ "Parameters": "14.766B",
388
  "Chat_Template": "Yes"
389
  },
390
+ "hf_url": "https://huggingface.co/sthenno/tempesthenno-nuslerp-001",
391
+ "known_config": null
392
  },
393
  {
394
  "rank": 5,
395
+ "name": "Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4",
396
  "scores": {
397
+ "average": 42.55,
398
+ "IFEval": 82.92,
399
+ "BBH": 48.05,
400
+ "MATH": 54.23,
401
+ "GPQA": 12.3,
402
+ "MUSR": 13.15,
403
+ "MMLU_PRO": 44.65,
404
  "Architecture": "Qwen2ForCausalLM",
405
+ "Parameters": "14.77B",
406
+ "Chat_Template": "Yes"
407
  },
408
+ "hf_url": "https://huggingface.co/Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4",
409
+ "known_config": null
410
  },
411
  {
412
  "rank": 6,
413
+ "name": "djuna/Q2.5-Veltha-14B",
414
  "scores": {
415
+ "average": 42.52,
416
+ "IFEval": 82.92,
417
+ "BBH": 49.75,
418
+ "MATH": 47.89,
419
+ "GPQA": 14.54,
420
+ "MUSR": 12.26,
421
+ "MMLU_PRO": 47.76,
422
+ "Architecture": "Qwen2ForCausalLM",
423
+ "Parameters": "14.766B",
424
  "Chat_Template": "Yes"
425
  },
426
+ "hf_url": "https://huggingface.co/djuna/Q2.5-Veltha-14B",
427
+ "known_config": null
428
  },
429
  {
430
  "rank": 7,
431
+ "name": "arcee-ai/Virtuoso-Small-v2",
432
  "scores": {
433
+ "average": 42.48,
434
+ "IFEval": 82.73,
435
+ "BBH": 50.95,
436
+ "MATH": 46.6,
437
+ "GPQA": 13.76,
438
+ "MUSR": 14.28,
439
+ "MMLU_PRO": 46.53,
440
  "Architecture": "Qwen2ForCausalLM",
441
  "Parameters": "14.766B",
442
+ "Chat_Template": "Yes"
443
  },
444
+ "hf_url": "https://huggingface.co/arcee-ai/Virtuoso-Small-v2",
445
+ "known_config": null
446
  },
447
  {
448
  "rank": 8,
449
+ "name": "jpacifico/Chocolatine-14B-Instruct-DPO-v1.3",
450
  "scores": {
451
+ "average": 42.42,
452
+ "IFEval": 70.4,
453
+ "BBH": 54.85,
454
+ "MATH": 56.19,
455
+ "GPQA": 12.19,
456
+ "MUSR": 12.29,
457
+ "MMLU_PRO": 48.6,
458
+ "Architecture": "Phi3ForCausalLM",
459
  "Parameters": "14.66B",
460
  "Chat_Template": "Yes"
461
  },
462
+ "hf_url": "https://huggingface.co/jpacifico/Chocolatine-14B-Instruct-DPO-v1.3",
463
+ "known_config": null
464
  },
465
  {
466
  "rank": 9,
467
+ "name": "sthenno-com/miscii-14b-1028",
468
  "scores": {
469
+ "average": 42.38,
470
+ "IFEval": 82.37,
471
+ "BBH": 49.26,
472
+ "MATH": 50.3,
473
+ "GPQA": 14.21,
474
+ "MUSR": 12.0,
475
+ "MMLU_PRO": 46.14,
476
  "Architecture": "Qwen2ForCausalLM",
477
+ "Parameters": "14.77B",
478
+ "Chat_Template": "Yes"
479
  },
480
+ "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1028",
481
+ "known_config": null
482
  },
483
  {
484
  "rank": 10,
485
+ "name": "sthenno-com/miscii-14b-1225",
486
  "scores": {
487
+ "average": 42.35,
488
+ "IFEval": 78.78,
489
+ "BBH": 50.91,
490
+ "MATH": 45.17,
491
+ "GPQA": 17.0,
492
+ "MUSR": 14.77,
493
+ "MMLU_PRO": 47.46,
494
+ "Architecture": "Qwen2ForCausalLM",
495
+ "Parameters": "14.766B",
496
  "Chat_Template": "Yes"
497
  },
498
+ "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1225",
499
+ "known_config": null
500
  },
501
  {
502
  "rank": 11,
503
+ "name": "tensopolis/virtuoso-small-v2-tensopolis-v1",
504
  "scores": {
505
+ "average": 42.34,
506
+ "IFEval": 83.4,
507
+ "BBH": 50.99,
508
+ "MATH": 46.6,
509
+ "GPQA": 12.98,
510
+ "MUSR": 13.38,
511
+ "MMLU_PRO": 46.67,
512
  "Architecture": "Qwen2ForCausalLM",
513
  "Parameters": "14.766B",
514
+ "Chat_Template": "Yes"
515
  },
516
+ "hf_url": "https://huggingface.co/tensopolis/virtuoso-small-v2-tensopolis-v1",
517
+ "known_config": null
518
  },
519
  {
520
  "rank": 12,
521
+ "name": "rombodawg/Rombos-LLM-V2.6-Qwen-14b",
522
  "scores": {
523
+ "average": 42.2,
524
+ "IFEval": 84.32,
525
+ "BBH": 49.28,
526
+ "MATH": 52.11,
527
+ "GPQA": 11.19,
528
+ "MUSR": 12.29,
529
+ "MMLU_PRO": 44.01,
530
  "Architecture": "Qwen2ForCausalLM",
531
+ "Parameters": "14.77B",
532
+ "Chat_Template": "Yes"
533
  },
534
+ "hf_url": "https://huggingface.co/rombodawg/Rombos-LLM-V2.6-Qwen-14b",
535
+ "known_config": null
536
  },
537
  {
538
  "rank": 13,
539
+ "name": "1024m/QWEN-14B-B100",
540
  "scores": {
541
+ "average": 41.92,
542
+ "IFEval": 77.62,
543
+ "BBH": 49.78,
544
+ "MATH": 54.38,
545
+ "GPQA": 13.42,
546
+ "MUSR": 9.88,
547
+ "MMLU_PRO": 46.43,
548
  "Architecture": "Qwen2ForCausalLM",
549
+ "Parameters": "14.77B",
550
+ "Chat_Template": "Yes"
551
  },
552
+ "hf_url": "https://huggingface.co/1024m/QWEN-14B-B100",
553
+ "known_config": null
554
  },
555
  {
556
  "rank": 14,
557
+ "name": "Sakalti/Saka-14B",
558
  "scores": {
559
+ "average": 41.91,
560
+ "IFEval": 71.74,
561
+ "BBH": 49.72,
562
+ "MATH": 40.94,
563
+ "GPQA": 19.46,
564
+ "MUSR": 20.74,
565
+ "MMLU_PRO": 48.84,
566
  "Architecture": "Qwen2ForCausalLM",
567
  "Parameters": "14.766B",
568
+ "Chat_Template": "No"
569
  },
570
+ "hf_url": "https://huggingface.co/Sakalti/Saka-14B",
571
+ "known_config": null
572
  },
573
  {
574
  "rank": 15,
575
+ "name": "Tsunami-th/Tsunami-1.0-14B-Instruct",
576
  "scores": {
577
+ "average": 41.84,
578
+ "IFEval": 78.29,
579
+ "BBH": 49.15,
580
+ "MATH": 45.85,
581
+ "GPQA": 14.21,
582
+ "MUSR": 16.34,
583
+ "MMLU_PRO": 47.21,
584
  "Architecture": "Qwen2ForCausalLM",
585
+ "Parameters": "14.77B",
586
  "Chat_Template": "Yes"
587
  },
588
+ "hf_url": "https://huggingface.co/Tsunami-th/Tsunami-1.0-14B-Instruct",
589
+ "known_config": null
590
  },
591
  {
592
  "rank": 16,
593
+ "name": "sthenno/tempesthenno-kto-0205-ckpt80",
594
  "scores": {
595
+ "average": 41.79,
596
+ "IFEval": 80.54,
597
+ "BBH": 50.64,
598
+ "MATH": 45.92,
599
+ "GPQA": 13.09,
600
+ "MUSR": 12.93,
601
+ "MMLU_PRO": 47.62,
602
  "Architecture": "Qwen2ForCausalLM",
603
  "Parameters": "14.766B",
604
  "Chat_Template": "No"
605
  },
606
+ "hf_url": "https://huggingface.co/sthenno/tempesthenno-kto-0205-ckpt80",
607
+ "known_config": null
608
  },
609
  {
610
  "rank": 17,
611
+ "name": "sometimesanotion/Lamarck-14B-v0.7-rc4",
612
  "scores": {
613
+ "average": 41.79,
614
+ "IFEval": 72.11,
615
+ "BBH": 49.85,
616
+ "MATH": 40.26,
617
+ "GPQA": 18.57,
618
+ "MUSR": 21.07,
619
+ "MMLU_PRO": 48.89,
620
  "Architecture": "Qwen2ForCausalLM",
621
  "Parameters": "14.766B",
622
  "Chat_Template": "No"
623
  },
624
+ "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.7-rc4",
625
+ "known_config": null
626
  },
627
  {
628
  "rank": 18,
629
+ "name": "CombinHorizon/Josiefied-abliteratedV4-Qwen2.5-14B-Inst-BaseMerge-TIES",
630
  "scores": {
631
+ "average": 41.77,
632
+ "IFEval": 82.4,
633
+ "BBH": 48.2,
634
+ "MATH": 53.17,
635
+ "GPQA": 9.96,
636
+ "MUSR": 12.65,
637
+ "MMLU_PRO": 44.21,
638
  "Architecture": "Qwen2ForCausalLM",
639
+ "Parameters": "14.77B",
640
+ "Chat_Template": "Yes"
641
  },
642
+ "hf_url": "https://huggingface.co/CombinHorizon/Josiefied-abliteratedV4-Qwen2.5-14B-Inst-BaseMerge-TIES",
643
+ "known_config": null
644
  },
645
  {
646
  "rank": 19,
647
+ "name": "suayptalha/Luminis-phi-4",
648
  "scores": {
649
+ "average": 41.76,
650
+ "IFEval": 69.0,
651
+ "BBH": 55.8,
652
+ "MATH": 46.37,
653
+ "GPQA": 13.53,
654
+ "MUSR": 16.68,
655
+ "MMLU_PRO": 49.15,
656
+ "Architecture": "LlamaForCausalLM",
657
+ "Parameters": "14.66B",
658
+ "Chat_Template": "Yes"
659
  },
660
+ "hf_url": "https://huggingface.co/suayptalha/Luminis-phi-4",
661
+ "known_config": null
662
  },
663
  {
664
  "rank": 20,
665
+ "name": "huihui-ai/Qwen2.5-14B-Instruct-abliterated-v2",
666
  "scores": {
667
+ "average": 41.75,
668
+ "IFEval": 83.28,
669
+ "BBH": 47.41,
670
+ "MATH": 53.02,
671
+ "GPQA": 11.19,
672
+ "MUSR": 11.58,
673
+ "MMLU_PRO": 44.02,
674
+ "Architecture": "Qwen2ForCausalLM",
675
+ "Parameters": "14.77B",
676
  "Chat_Template": "Yes"
677
  },
678
+ "hf_url": "https://huggingface.co/huihui-ai/Qwen2.5-14B-Instruct-abliterated-v2",
679
+ "known_config": null
680
  },
681
  {
682
  "rank": 21,
683
+ "name": "djuna/Q2.5-Veltha-14B-0.5",
684
  "scores": {
685
+ "average": 41.61,
686
+ "IFEval": 77.96,
687
+ "BBH": 50.32,
688
+ "MATH": 43.73,
689
+ "GPQA": 15.77,
690
+ "MUSR": 14.17,
691
+ "MMLU_PRO": 47.72,
692
+ "Architecture": "Qwen2ForCausalLM",
693
+ "Parameters": "14.766B",
694
  "Chat_Template": "Yes"
695
  },
696
+ "hf_url": "https://huggingface.co/djuna/Q2.5-Veltha-14B-0.5",
697
+ "known_config": null
698
  },
699
  {
700
+ "rank": 23,
701
+ "name": "Qwen/Qwen2.5-14B-Instruct-1M",
702
+ "scores": {
703
+ "average": 41.56,
704
+ "IFEval": 84.14,
705
+ "BBH": 45.66,
706
+ "MATH": 53.02,
707
+ "GPQA": 12.42,
708
+ "MUSR": 11.35,
709
+ "MMLU_PRO": 42.77,
710
  "Architecture": "Qwen2ForCausalLM",
711
+ "Parameters": "14.77B",
712
+ "Chat_Template": "Yes"
713
  },
714
+ "hf_url": "https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M",
715
+ "known_config": null
716
  },
717
  {
718
+ "rank": 22,
719
+ "name": "notbdq/Qwen2.5-14B-Instruct-1M-GRPO-Reasoning",
720
  "scores": {
721
+ "average": 41.56,
722
+ "IFEval": 84.14,
723
+ "BBH": 45.66,
724
+ "MATH": 53.02,
725
+ "GPQA": 12.42,
726
+ "MUSR": 11.35,
727
+ "MMLU_PRO": 42.77,
728
  "Architecture": "Qwen2ForCausalLM",
729
+ "Parameters": "14.77B",
730
  "Chat_Template": "Yes"
731
  },
732
+ "hf_url": "https://huggingface.co/notbdq/Qwen2.5-14B-Instruct-1M-GRPO-Reasoning",
733
+ "known_config": null
734
  },
735
  {
736
  "rank": 24,
737
+ "name": "sometimesanotion/Qwenvergence-14B-v11",
738
  "scores": {
739
+ "average": 41.52,
740
+ "IFEval": 71.92,
741
+ "BBH": 47.55,
742
+ "MATH": 46.45,
743
+ "GPQA": 16.33,
744
+ "MUSR": 18.76,
745
+ "MMLU_PRO": 48.08,
746
+ "Architecture": "Qwen2ForCausalLM",
747
+ "Parameters": "14.766B",
748
+ "Chat_Template": "No"
749
  },
750
+ "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v11",
751
+ "known_config": null
752
  },
753
  {
754
  "rank": 25,
755
+ "name": "sometimesanotion/Qwenvergence-14B-v10",
756
  "scores": {
757
+ "average": 41.48,
758
+ "IFEval": 67.57,
759
+ "BBH": 46.75,
760
+ "MATH": 47.89,
761
+ "GPQA": 17.23,
762
+ "MUSR": 22.33,
763
+ "MMLU_PRO": 47.1,
764
  "Architecture": "Qwen2ForCausalLM",
765
  "Parameters": "14.766B",
766
+ "Chat_Template": "No"
767
  },
768
+ "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v10",
769
+ "known_config": null
770
  },
771
  {
772
  "rank": 26,
773
+ "name": "CombinHorizon/huihui-ai-abliteratedV2-Qwen2.5-14B-Inst-BaseMerge-TIES",
774
  "scores": {
775
+ "average": 41.47,
776
+ "IFEval": 81.76,
777
+ "BBH": 47.77,
778
+ "MATH": 54.76,
779
+ "GPQA": 8.61,
780
+ "MUSR": 12.45,
781
+ "MMLU_PRO": 43.45,
782
+ "Architecture": "Qwen2ForCausalLM",
783
+ "Parameters": "14.77B",
784
  "Chat_Template": "Yes"
785
  },
786
+ "hf_url": "https://huggingface.co/CombinHorizon/huihui-ai-abliteratedV2-Qwen2.5-14B-Inst-BaseMerge-TIES",
787
+ "known_config": null
788
  },
789
  {
790
  "rank": 27,
791
+ "name": "RDson/WomboCombo-R1-Coder-14B-Preview",
792
  "scores": {
793
+ "average": 41.46,
794
+ "IFEval": 62.86,
795
+ "BBH": 48.15,
796
+ "MATH": 59.89,
797
+ "GPQA": 9.51,
798
+ "MUSR": 22.01,
799
+ "MMLU_PRO": 46.31,
800
  "Architecture": "Qwen2ForCausalLM",
801
+ "Parameters": "14.77B",
802
  "Chat_Template": "Yes"
803
  },
804
+ "hf_url": "https://huggingface.co/RDson/WomboCombo-R1-Coder-14B-Preview",
805
+ "known_config": null
806
  },
807
  {
808
  "rank": 28,
809
+ "name": "jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
810
  "scores": {
811
+ "average": 41.43,
812
+ "IFEval": 73.23,
813
+ "BBH": 49.57,
814
+ "MATH": 41.09,
815
+ "GPQA": 17.23,
816
+ "MUSR": 19.3,
817
+ "MMLU_PRO": 48.19,
818
+ "Architecture": "Qwen2ForCausalLM",
819
+ "Parameters": "14.766B",
820
+ "Chat_Template": "No"
821
+ },
822
+ "hf_url": "https://huggingface.co/jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
823
+ "known_config": null
824
+ },
825
+ {
826
+ "rank": 29,
827
+ "name": "Quazim0t0/Nova-14b-sce",
828
+ "scores": {
829
+ "average": 41.41,
830
+ "IFEval": 70.22,
831
+ "BBH": 56.03,
832
+ "MATH": 41.62,
833
+ "GPQA": 15.1,
834
+ "MUSR": 16.43,
835
+ "MMLU_PRO": 49.03,
836
  "Architecture": "LlamaForCausalLM",
837
  "Parameters": "14.66B",
838
  "Chat_Template": "Yes"
839
  },
840
+ "hf_url": "https://huggingface.co/Quazim0t0/Nova-14b-sce",
841
+ "known_config": null
842
  },
843
  {
844
+ "rank": 30,
845
+ "name": "v000000/Qwen2.5-14B-Gutenberg-Instruct-Slerpeno",
846
  "scores": {
847
+ "average": 41.36,
848
+ "IFEval": 81.97,
849
+ "BBH": 48.45,
850
+ "MATH": 53.25,
851
+ "GPQA": 10.85,
852
+ "MUSR": 10.05,
853
+ "MMLU_PRO": 43.59,
854
+ "Architecture": "Qwen2ForCausalLM",
855
+ "Parameters": "14.77B",
856
+ "Chat_Template": "Yes"
857
+ },
858
+ "hf_url": "https://huggingface.co/v000000/Qwen2.5-14B-Gutenberg-Instruct-Slerpeno",
859
+ "known_config": null
860
+ },
861
+ {
862
+ "rank": 31,
863
+ "name": "Quazim0t0/NovaScotia-14b-stock",
864
+ "scores": {
865
+ "average": 41.35,
866
+ "IFEval": 67.87,
867
+ "BBH": 56.03,
868
+ "MATH": 46.3,
869
+ "GPQA": 13.2,
870
+ "MUSR": 15.7,
871
+ "MMLU_PRO": 48.99,
872
+ "Architecture": "LlamaForCausalLM",
873
+ "Parameters": "14.66B",
874
+ "Chat_Template": "Yes"
875
+ },
876
+ "hf_url": "https://huggingface.co/Quazim0t0/NovaScotia-14b-stock",
877
+ "known_config": null
878
+ },
879
+ {
880
+ "rank": 32,
881
+ "name": "Quazim0t0/ODB-14b-sce",
882
+ "scores": {
883
+ "average": 41.34,
884
+ "IFEval": 70.16,
885
+ "BBH": 56.19,
886
+ "MATH": 41.16,
887
+ "GPQA": 14.99,
888
+ "MUSR": 16.5,
889
+ "MMLU_PRO": 49.02,
890
+ "Architecture": "LlamaForCausalLM",
891
+ "Parameters": "14.66B",
892
+ "Chat_Template": "Yes"
893
+ },
894
+ "hf_url": "https://huggingface.co/Quazim0t0/ODB-14b-sce",
895
+ "known_config": null
896
+ },
897
+ {
898
+ "rank": 54,
899
+ "name": "LightningRodLabs/Flashlight-v1.1",
900
+ "scores": {
901
+ "average": 40.99,
902
+ "IFEval": 67.21,
903
+ "BBH": 55.43,
904
+ "MATH": 53.25,
905
+ "GPQA": 11.97,
906
+ "MUSR": 9.0,
907
+ "MMLU_PRO": 49.06,
908
+ "Architecture": "Phi3ForCausalLM",
909
+ "Parameters": "14.66B",
910
+ "Chat_Template": "Yes"
911
+ },
912
+ "hf_url": "https://huggingface.co/LightningRodLabs/Flashlight-v1.1",
913
+ "known_config": null
914
+ },
915
+ {
916
+ "rank": 55,
917
+ "name": "Quazim0t0/Mithril-14B-sce",
918
+ "scores": {
919
+ "average": 40.98,
920
+ "IFEval": 69.58,
921
+ "BBH": 55.93,
922
+ "MATH": 38.22,
923
+ "GPQA": 15.88,
924
+ "MUSR": 17.37,
925
+ "MMLU_PRO": 48.92,
926
+ "Architecture": "LlamaForCausalLM",
927
+ "Parameters": "14.66B",
928
+ "Chat_Template": "Yes"
929
+ },
930
+ "hf_url": "https://huggingface.co/Quazim0t0/Mithril-14B-sce",
931
+ "known_config": null
932
+ },
933
+ {
934
+ "rank": 56,
935
+ "name": "Sakalti/ultiima-14B-v0.2",
936
+ "scores": {
937
+ "average": 40.96,
938
+ "IFEval": 70.7,
939
+ "BBH": 49.51,
940
+ "MATH": 39.95,
941
+ "GPQA": 17.67,
942
+ "MUSR": 19.19,
943
+ "MMLU_PRO": 48.75,
944
  "Architecture": "Qwen2ForCausalLM",
945
  "Parameters": "14.766B",
946
  "Chat_Template": "No"
947
  },
948
+ "hf_url": "https://huggingface.co/Sakalti/ultiima-14B-v0.2",
949
+ "known_config": null
950
  },
951
  {
952
+ "rank": 57,
953
+ "name": "bunnycore/Phi-4-ReasoningRP",
954
+ "scores": {
955
+ "average": 40.95,
956
+ "IFEval": 67.36,
957
+ "BBH": 55.88,
958
+ "MATH": 45.69,
959
+ "GPQA": 12.53,
960
+ "MUSR": 15.14,
961
+ "MMLU_PRO": 49.12,
962
+ "Architecture": "LlamaForCausalLM",
963
+ "Parameters": "14.66B",
964
+ "Chat_Template": "Yes"
965
+ },
966
+ "hf_url": "https://huggingface.co/bunnycore/Phi-4-ReasoningRP",
967
+ "known_config": null
968
+ },
969
+ {
970
+ "rank": 58,
971
+ "name": "dwikitheduck/gen-inst-1",
972
+ "scores": {
973
+ "average": 40.88,
974
+ "IFEval": 77.5,
975
+ "BBH": 48.32,
976
+ "MATH": 45.54,
977
+ "GPQA": 16.22,
978
+ "MUSR": 12.27,
979
+ "MMLU_PRO": 45.43,
980
  "Architecture": "Qwen2ForCausalLM",
981
+ "Parameters": "14.77B",
982
+ "Chat_Template": "Yes"
983
  },
984
+ "hf_url": "https://huggingface.co/dwikitheduck/gen-inst-1",
985
+ "known_config": null
986
  },
987
  {
988
+ "rank": 59,
989
+ "name": "v000000/Qwen2.5-14B-Gutenberg-1e-Delta",
990
  "scores": {
991
+ "average": 40.88,
992
+ "IFEval": 80.45,
993
+ "BBH": 48.62,
994
+ "MATH": 52.64,
995
+ "GPQA": 10.51,
996
+ "MUSR": 9.38,
997
+ "MMLU_PRO": 43.67,
998
  "Architecture": "Qwen2ForCausalLM",
999
  "Parameters": "14.77B",
1000
  "Chat_Template": "Yes"
1001
  },
1002
+ "hf_url": "https://huggingface.co/v000000/Qwen2.5-14B-Gutenberg-1e-Delta",
1003
+ "known_config": null
1004
  },
1005
  {
1006
+ "rank": 60,
1007
+ "name": "hotmailuser/QwenSlerp2-14B",
1008
+ "scores": {
1009
+ "average": 40.86,
1010
+ "IFEval": 70.37,
1011
+ "BBH": 49.68,
1012
+ "MATH": 39.65,
1013
+ "GPQA": 17.45,
1014
+ "MUSR": 19.35,
1015
+ "MMLU_PRO": 48.66,
1016
+ "Architecture": "Qwen2ForCausalLM",
1017
+ "Parameters": "14.766B",
1018
+ "Chat_Template": "No"
1019
+ },
1020
+ "hf_url": "https://huggingface.co/hotmailuser/QwenSlerp2-14B",
1021
+ "known_config": null
1022
+ },
1023
+ {
1024
+ "rank": 61,
1025
+ "name": "Quazim0t0/Loke-14B-sce",
1026
+ "scores": {
1027
+ "average": 40.86,
1028
+ "IFEval": 68.48,
1029
+ "BBH": 55.83,
1030
+ "MATH": 39.05,
1031
+ "GPQA": 15.32,
1032
+ "MUSR": 17.56,
1033
+ "MMLU_PRO": 48.9,
1034
+ "Architecture": "LlamaForCausalLM",
1035
+ "Parameters": "14.66B",
1036
+ "Chat_Template": "Yes"
1037
+ },
1038
+ "hf_url": "https://huggingface.co/Quazim0t0/Loke-14B-sce",
1039
+ "known_config": null
1040
+ },
1041
+ {
1042
+ "rank": 62,
1043
+ "name": "Quazim0t0/mosaic-14b-sce",
1044
+ "scores": {
1045
+ "average": 40.83,
1046
+ "IFEval": 68.76,
1047
+ "BBH": 55.69,
1048
+ "MATH": 40.26,
1049
+ "GPQA": 14.99,
1050
+ "MUSR": 16.44,
1051
+ "MMLU_PRO": 48.85,
1052
+ "Architecture": "LlamaForCausalLM",
1053
+ "Parameters": "14.66B",
1054
+ "Chat_Template": "Yes"
1055
+ },
1056
+ "hf_url": "https://huggingface.co/Quazim0t0/mosaic-14b-sce",
1057
+ "known_config": null
1058
+ },
1059
+ {
1060
+ "rank": 63,
1061
+ "name": "bunnycore/Phi-4-Model-Stock",
1062
+ "scores": {
1063
+ "average": 40.79,
1064
+ "IFEval": 68.79,
1065
+ "BBH": 55.32,
1066
+ "MATH": 42.98,
1067
+ "GPQA": 13.98,
1068
+ "MUSR": 15.12,
1069
+ "MMLU_PRO": 48.54,
1070
+ "Architecture": "LlamaForCausalLM",
1071
+ "Parameters": "14.66B",
1072
+ "Chat_Template": "Yes"
1073
+ },
1074
+ "hf_url": "https://huggingface.co/bunnycore/Phi-4-Model-Stock",
1075
+ "known_config": null
1076
+ },
1077
+ {
1078
+ "rank": 64,
1079
+ "name": "unsloth/phi-4",
1080
+ "scores": {
1081
+ "average": 40.73,
1082
+ "IFEval": 68.82,
1083
+ "BBH": 55.25,
1084
+ "MATH": 50.0,
1085
+ "GPQA": 11.52,
1086
+ "MUSR": 10.13,
1087
+ "MMLU_PRO": 48.65,
1088
+ "Architecture": "LlamaForCausalLM",
1089
+ "Parameters": "14.66B",
1090
+ "Chat_Template": "Yes"
1091
+ },
1092
+ "hf_url": "https://huggingface.co/unsloth/phi-4",
1093
+ "known_config": null
1094
+ },
1095
+ {
1096
+ "rank": 65,
1097
+ "name": "pankajmathur/orca_mini_phi-4",
1098
+ "scores": {
1099
+ "average": 40.68,
1100
+ "IFEval": 77.81,
1101
+ "BBH": 54.63,
1102
+ "MATH": 29.53,
1103
+ "GPQA": 16.55,
1104
+ "MUSR": 18.25,
1105
+ "MMLU_PRO": 47.28,
1106
+ "Architecture": "LlamaForCausalLM",
1107
+ "Parameters": "14.66B",
1108
+ "Chat_Template": "Yes"
1109
+ },
1110
+ "hf_url": "https://huggingface.co/pankajmathur/orca_mini_phi-4",
1111
+ "known_config": null
1112
+ },
1113
+ {
1114
+ "rank": 66,
1115
+ "name": "pankajmathur/orca_mini_v9_2_14B",
1116
+ "scores": {
1117
+ "average": 40.68,
1118
+ "IFEval": 77.81,
1119
+ "BBH": 54.63,
1120
+ "MATH": 29.53,
1121
+ "GPQA": 16.55,
1122
+ "MUSR": 18.25,
1123
+ "MMLU_PRO": 47.28,
1124
+ "Architecture": "LlamaForCausalLM",
1125
+ "Parameters": "14.66B",
1126
+ "Chat_Template": "Yes"
1127
+ },
1128
+ "hf_url": "https://huggingface.co/pankajmathur/orca_mini_v9_2_14B",
1129
+ "known_config": null
1130
+ },
1131
+ {
1132
+ "rank": 67,
1133
  "name": "sometimesanotion/Lamarck-14B-v0.6-model_stock",
1134
  "scores": {
1135
+ "average": 40.68,
1136
+ "IFEval": 67.9,
1137
  "BBH": 46.49,
1138
+ "MATH": 42.45,
1139
+ "GPQA": 17.9,
1140
  "MUSR": 22.68,
1141
  "MMLU_PRO": 46.64,
1142
  "Architecture": "Qwen2ForCausalLM",
 
1144
  "Chat_Template": "No"
1145
  },
1146
  "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.6-model_stock",
1147
+ "known_config": null
1148
  },
1149
  {
1150
+ "rank": 68,
1151
+ "name": "sometimesanotion/Qwenvergence-14B-v0.6-004-model_stock",
1152
  "scores": {
1153
+ "average": 40.6,
1154
+ "IFEval": 68.6,
1155
+ "BBH": 46.37,
1156
+ "MATH": 40.94,
1157
+ "GPQA": 17.79,
1158
+ "MUSR": 23.35,
1159
+ "MMLU_PRO": 46.59,
1160
+ "Architecture": "Qwen2ForCausalLM",
1161
+ "Parameters": "14B",
1162
+ "Chat_Template": "No"
1163
+ },
1164
+ "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v0.6-004-model_stock",
1165
+ "known_config": null
1166
+ },
1167
+ {
1168
+ "rank": 69,
1169
+ "name": "Quazim0t0/Oasis-14B-ties",
1170
+ "scores": {
1171
+ "average": 40.59,
1172
+ "IFEval": 69.37,
1173
+ "BBH": 55.75,
1174
+ "MATH": 37.54,
1175
+ "GPQA": 15.32,
1176
+ "MUSR": 16.63,
1177
+ "MMLU_PRO": 48.94,
1178
+ "Architecture": "LlamaForCausalLM",
1179
+ "Parameters": "14.66B",
1180
+ "Chat_Template": "Yes"
1181
+ },
1182
+ "hf_url": "https://huggingface.co/Quazim0t0/Oasis-14B-ties",
1183
+ "known_config": null
1184
+ },
1185
+ {
1186
+ "rank": 70,
1187
+ "name": "LightningRodLabs/Flashlight-v1.0",
1188
+ "scores": {
1189
+ "average": 40.57,
1190
+ "IFEval": 67.45,
1191
+ "BBH": 55.15,
1192
+ "MATH": 49.7,
1193
+ "GPQA": 12.3,
1194
+ "MUSR": 9.93,
1195
+ "MMLU_PRO": 48.91,
1196
+ "Architecture": "LlamaForCausalLM",
1197
+ "Parameters": "14.66B",
1198
+ "Chat_Template": "Yes"
1199
+ },
1200
+ "hf_url": "https://huggingface.co/LightningRodLabs/Flashlight-v1.0",
1201
+ "known_config": null
1202
+ },
1203
+ {
1204
+ "rank": 71,
1205
+ "name": "arcee-ai/Virtuoso-Small",
1206
+ "scores": {
1207
+ "average": 40.54,
1208
+ "IFEval": 79.35,
1209
+ "BBH": 50.4,
1210
+ "MATH": 40.94,
1211
+ "GPQA": 11.52,
1212
+ "MUSR": 14.44,
1213
+ "MMLU_PRO": 46.57,
1214
  "Architecture": "Qwen2ForCausalLM",
1215
  "Parameters": "14.77B",
1216
  "Chat_Template": "Yes"
1217
  },
1218
+ "hf_url": "https://huggingface.co/arcee-ai/Virtuoso-Small",
1219
+ "known_config": null
1220
+ },
1221
+ {
1222
+ "rank": 72,
1223
+ "name": "Quazim0t0/GuiltySpark-14B-ties",
1224
+ "scores": {
1225
+ "average": 40.52,
1226
+ "IFEval": 68.54,
1227
+ "BBH": 55.72,
1228
+ "MATH": 38.37,
1229
+ "GPQA": 15.32,
1230
+ "MUSR": 16.3,
1231
+ "MMLU_PRO": 48.89,
1232
+ "Architecture": "LlamaForCausalLM",
1233
+ "Parameters": "14.66B",
1234
+ "Chat_Template": "Yes"
1235
+ },
1236
+ "hf_url": "https://huggingface.co/Quazim0t0/GuiltySpark-14B-ties",
1237
+ "known_config": null
1238
  },
1239
  {
1240
+ "rank": 73,
1241
+ "name": "ozone-ai/0x-lite",
1242
+ "scores": {
1243
+ "average": 40.48,
1244
+ "IFEval": 77.4,
1245
+ "BBH": 47.53,
1246
+ "MATH": 50.45,
1247
+ "GPQA": 9.28,
1248
+ "MUSR": 11.76,
1249
+ "MMLU_PRO": 46.49,
1250
+ "Architecture": "Qwen2ForCausalLM",
1251
+ "Parameters": "14.77B",
1252
+ "Chat_Template": "Yes"
1253
+ },
1254
+ "hf_url": "https://huggingface.co/ozone-ai/0x-lite",
1255
+ "known_config": null
1256
+ },
1257
+ {
1258
+ "rank": 74,
1259
+ "name": "Quazim0t0/Casa-14b-sce",
1260
+ "scores": {
1261
+ "average": 40.41,
1262
+ "IFEval": 66.54,
1263
+ "BBH": 55.4,
1264
+ "MATH": 46.98,
1265
+ "GPQA": 11.07,
1266
+ "MUSR": 13.31,
1267
+ "MMLU_PRO": 49.17,
1268
+ "Architecture": "LlamaForCausalLM",
1269
+ "Parameters": "14.66B",
1270
+ "Chat_Template": "Yes"
1271
+ },
1272
+ "hf_url": "https://huggingface.co/Quazim0t0/Casa-14b-sce",
1273
+ "known_config": null
1274
+ },
1275
+ {
1276
+ "rank": 75,
1277
  "name": "Sakalti/ultiima-14B-v0.3",
1278
  "scores": {
1279
+ "average": 40.38,
1280
+ "IFEval": 70.4,
1281
  "BBH": 48.45,
1282
+ "MATH": 39.65,
1283
  "GPQA": 16.89,
1284
  "MUSR": 18.73,
1285
  "MMLU_PRO": 48.18,
 
1288
  "Chat_Template": "No"
1289
  },
1290
  "hf_url": "https://huggingface.co/Sakalti/ultiima-14B-v0.3",
1291
+ "known_config": null
1292
+ },
1293
+ {
1294
+ "rank": 76,
1295
+ "name": "ehristoforu/fp4-14b-v1-fix",
1296
+ "scores": {
1297
+ "average": 40.37,
1298
+ "IFEval": 67.42,
1299
+ "BBH": 54.33,
1300
+ "MATH": 42.07,
1301
+ "GPQA": 13.87,
1302
+ "MUSR": 16.18,
1303
+ "MMLU_PRO": 48.37,
1304
+ "Architecture": "LlamaForCausalLM",
1305
+ "Parameters": "14.66B",
1306
+ "Chat_Template": "Yes"
1307
+ },
1308
+ "hf_url": "https://huggingface.co/ehristoforu/fp4-14b-v1-fix",
1309
+ "known_config": null
1310
+ },
1311
+ {
1312
+ "rank": 77,
1313
+ "name": "FINGU-AI/Chocolatine-Fusion-14B",
1314
+ "scores": {
1315
+ "average": 40.36,
1316
+ "IFEval": 69.49,
1317
+ "BBH": 48.6,
1318
+ "MATH": 38.52,
1319
+ "GPQA": 16.22,
1320
+ "MUSR": 21.99,
1321
+ "MMLU_PRO": 47.35,
1322
+ "Architecture": "Qwen2ForCausalLM",
1323
+ "Parameters": "8.367B",
1324
+ "Chat_Template": "No"
1325
+ },
1326
+ "hf_url": "https://huggingface.co/FINGU-AI/Chocolatine-Fusion-14B",
1327
+ "known_config": null
1328
+ },
1329
+ {
1330
+ "rank": 78,
1331
+ "name": "hotmailuser/QwenSlerp-14B",
1332
+ "scores": {
1333
+ "average": 40.35,
1334
+ "IFEval": 70.25,
1335
+ "BBH": 49.42,
1336
+ "MATH": 38.37,
1337
+ "GPQA": 18.34,
1338
+ "MUSR": 16.83,
1339
+ "MMLU_PRO": 48.89,
1340
+ "Architecture": "Qwen2ForCausalLM",
1341
+ "Parameters": "14.766B",
1342
+ "Chat_Template": "No"
1343
+ },
1344
+ "hf_url": "https://huggingface.co/hotmailuser/QwenSlerp-14B",
1345
+ "known_config": null
1346
+ },
1347
+ {
1348
+ "rank": 79,
1349
+ "name": "Triangle104/Robo-Gutenberg_V1.0",
1350
+ "scores": {
1351
+ "average": 40.35,
1352
+ "IFEval": 60.08,
1353
+ "BBH": 50.29,
1354
+ "MATH": 45.62,
1355
+ "GPQA": 18.12,
1356
+ "MUSR": 19.2,
1357
+ "MMLU_PRO": 48.79,
1358
+ "Architecture": "Qwen2ForCausalLM",
1359
+ "Parameters": "14.77B",
1360
+ "Chat_Template": "No"
1361
+ },
1362
+ "hf_url": "https://huggingface.co/Triangle104/Robo-Gutenberg_V1.0",
1363
+ "known_config": null
1364
+ },
1365
+ {
1366
+ "rank": 80,
1367
+ "name": "Quazim0t0/Adamant-14B-sce",
1368
+ "scores": {
1369
+ "average": 40.32,
1370
+ "IFEval": 68.58,
1371
+ "BBH": 54.97,
1372
+ "MATH": 39.88,
1373
+ "GPQA": 13.42,
1374
+ "MUSR": 16.51,
1375
+ "MMLU_PRO": 48.57,
1376
+ "Architecture": "LlamaForCausalLM",
1377
+ "Parameters": "14.66B",
1378
+ "Chat_Template": "Yes"
1379
+ },
1380
+ "hf_url": "https://huggingface.co/Quazim0t0/Adamant-14B-sce",
1381
+ "known_config": null
1382
+ },
1383
+ {
1384
+ "rank": 81,
1385
+ "name": "Quazim0t0/Phi4Basis-14B-sce",
1386
+ "scores": {
1387
+ "average": 40.31,
1388
+ "IFEval": 65.02,
1389
+ "BBH": 55.67,
1390
+ "MATH": 47.89,
1391
+ "GPQA": 10.51,
1392
+ "MUSR": 14.02,
1393
+ "MMLU_PRO": 48.78,
1394
+ "Architecture": "LlamaForCausalLM",
1395
+ "Parameters": "14.66B",
1396
+ "Chat_Template": "Yes"
1397
+ },
1398
+ "hf_url": "https://huggingface.co/Quazim0t0/Phi4Basis-14B-sce",
1399
+ "known_config": null
1400
+ },
1401
+ {
1402
+ "rank": 82,
1403
+ "name": "Quazim0t0/bloom-14b-stock",
1404
+ "scores": {
1405
+ "average": 40.29,
1406
+ "IFEval": 65.75,
1407
+ "BBH": 55.27,
1408
+ "MATH": 48.11,
1409
+ "GPQA": 10.85,
1410
+ "MUSR": 13.17,
1411
+ "MMLU_PRO": 48.59,
1412
+ "Architecture": "LlamaForCausalLM",
1413
+ "Parameters": "14.66B",
1414
+ "Chat_Template": "Yes"
1415
+ },
1416
+ "hf_url": "https://huggingface.co/Quazim0t0/bloom-14b-stock",
1417
+ "known_config": null
1418
+ },
1419
+ {
1420
+ "rank": 83,
1421
+ "name": "sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-Prose01",
1422
+ "scores": {
1423
+ "average": 40.28,
1424
+ "IFEval": 68.72,
1425
+ "BBH": 47.71,
1426
+ "MATH": 39.95,
1427
+ "GPQA": 18.23,
1428
+ "MUSR": 19.56,
1429
+ "MMLU_PRO": 47.5,
1430
+ "Architecture": "Qwen2ForCausalLM",
1431
+ "Parameters": "14B",
1432
+ "Chat_Template": "No"
1433
+ },
1434
+ "hf_url": "https://huggingface.co/sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-Prose01",
1435
+ "known_config": null
1436
+ },
1437
+ {
1438
+ "rank": 84,
1439
+ "name": "Quazim0t0/Halo-14B-sce",
1440
+ "scores": {
1441
+ "average": 40.26,
1442
+ "IFEval": 67.54,
1443
+ "BBH": 55.27,
1444
+ "MATH": 42.9,
1445
+ "GPQA": 12.98,
1446
+ "MUSR": 14.24,
1447
+ "MMLU_PRO": 48.63,
1448
+ "Architecture": "LlamaForCausalLM",
1449
+ "Parameters": "14.66B",
1450
+ "Chat_Template": "Yes"
1451
+ },
1452
+ "hf_url": "https://huggingface.co/Quazim0t0/Halo-14B-sce",
1453
+ "known_config": null
1454
+ },
1455
+ {
1456
+ "rank": 85,
1457
+ "name": "prithivMLmods/Calcium-Opus-14B-Elite2",
1458
+ "scores": {
1459
+ "average": 40.25,
1460
+ "IFEval": 61.76,
1461
+ "BBH": 46.81,
1462
+ "MATH": 46.9,
1463
+ "GPQA": 16.0,
1464
+ "MUSR": 22.24,
1465
+ "MMLU_PRO": 47.79,
1466
+ "Architecture": "Qwen2ForCausalLM",
1467
+ "Parameters": "14.766B",
1468
+ "Chat_Template": "No"
1469
+ },
1470
+ "hf_url": "https://huggingface.co/prithivMLmods/Calcium-Opus-14B-Elite2",
1471
+ "known_config": null
1472
+ },
1473
+ {
1474
+ "rank": 86,
1475
+ "name": "SicariusSicariiStuff/Impish_QWEN_14B-1M",
1476
+ "scores": {
1477
+ "average": 40.24,
1478
+ "IFEval": 78.68,
1479
+ "BBH": 47.22,
1480
+ "MATH": 39.65,
1481
+ "GPQA": 13.42,
1482
+ "MUSR": 17.52,
1483
+ "MMLU_PRO": 44.93,
1484
+ "Architecture": "Qwen2ForCausalLM",
1485
+ "Parameters": "14.77B",
1486
+ "Chat_Template": "Yes"
1487
+ },
1488
+ "hf_url": "https://huggingface.co/SicariusSicariiStuff/Impish_QWEN_14B-1M",
1489
+ "known_config": null
1490
+ },
1491
+ {
1492
+ "rank": 87,
1493
+ "name": "bunnycore/Phi-4-Stock-Ex",
1494
+ "scores": {
1495
+ "average": 40.22,
1496
+ "IFEval": 65.75,
1497
+ "BBH": 55.2,
1498
+ "MATH": 40.86,
1499
+ "GPQA": 13.42,
1500
+ "MUSR": 17.46,
1501
+ "MMLU_PRO": 48.61,
1502
+ "Architecture": "LlamaForCausalLM",
1503
+ "Parameters": "14.66B",
1504
+ "Chat_Template": "Yes"
1505
+ },
1506
+ "hf_url": "https://huggingface.co/bunnycore/Phi-4-Stock-Ex",
1507
+ "known_config": null
1508
+ },
1509
+ {
1510
+ "rank": 88,
1511
+ "name": "sometimesanotion/Qwenvergence-14B-qv256",
1512
+ "scores": {
1513
+ "average": 40.12,
1514
+ "IFEval": 70.06,
1515
+ "BBH": 47.08,
1516
+ "MATH": 38.97,
1517
+ "GPQA": 17.11,
1518
+ "MUSR": 21.07,
1519
+ "MMLU_PRO": 46.42,
1520
+ "Architecture": "Qwen2ForCausalLM",
1521
+ "Parameters": "14B",
1522
+ "Chat_Template": "No"
1523
+ },
1524
+ "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-qv256",
1525
+ "known_config": null
1526
+ },
1527
+ {
1528
+ "rank": 89,
1529
+ "name": "tensopolis/virtuoso-small-tensopolis-v2",
1530
+ "scores": {
1531
+ "average": 40.11,
1532
+ "IFEval": 80.2,
1533
+ "BBH": 50.23,
1534
+ "MATH": 38.75,
1535
+ "GPQA": 10.51,
1536
+ "MUSR": 14.84,
1537
+ "MMLU_PRO": 46.15,
1538
+ "Architecture": "Qwen2ForCausalLM",
1539
+ "Parameters": "14.77B",
1540
+ "Chat_Template": "Yes"
1541
+ },
1542
+ "hf_url": "https://huggingface.co/tensopolis/virtuoso-small-tensopolis-v2",
1543
+ "known_config": null
1544
  }
1545
  ]
1546
 
1547
+
1548
  def snippet_scrape_model_page(url):
1549
  """
1550
  Equivalent scraping function for the larger dataset