lvkaokao
commited on
Commit
·
124d34b
1
Parent(s):
7be779d
add Qwen2.
Browse files
Intel/results_2024-06-06-00-18-35.json
ADDED
|
@@ -0,0 +1,598 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"config_general": {
|
| 3 |
+
"lighteval_sha": "1.4",
|
| 4 |
+
"num_few_shot_default": null,
|
| 5 |
+
"num_fewshot_seeds": null,
|
| 6 |
+
"override_batch_size": null,
|
| 7 |
+
"max_samples": null,
|
| 8 |
+
"job_id": -1,
|
| 9 |
+
"start_time": null,
|
| 10 |
+
"end_time": "2024-06-06-00-18-35",
|
| 11 |
+
"total_evaluation_time_secondes": "",
|
| 12 |
+
"model_name": "Intel/Qwen2-7B-int4-inc",
|
| 13 |
+
"model_sha": "",
|
| 14 |
+
"model_dtype": "4bit",
|
| 15 |
+
"model_size": 5.2,
|
| 16 |
+
"model_params": 7,
|
| 17 |
+
"quant_type": "AutoRound",
|
| 18 |
+
"precision": "4bit"
|
| 19 |
+
},
|
| 20 |
+
"results": {
|
| 21 |
+
"harness|truthfulqa:mc2|0": {
|
| 22 |
+
"acc,none": 0.5428341738734119,
|
| 23 |
+
"acc_stderr,none": 0.014866609073167824,
|
| 24 |
+
"alias": "truthfulqa_mc2"
|
| 25 |
+
},
|
| 26 |
+
"harness|arc:easy|0": {
|
| 27 |
+
"acc,none": 0.7638888888888888,
|
| 28 |
+
"acc_stderr,none": 0.008714480491711292,
|
| 29 |
+
"acc_norm,none": 0.7117003367003367,
|
| 30 |
+
"acc_norm_stderr,none": 0.00929477425202962,
|
| 31 |
+
"alias": "arc_easy"
|
| 32 |
+
},
|
| 33 |
+
"harness|lambada:openai|0": {
|
| 34 |
+
"perplexity,none": 3.5641183746208984,
|
| 35 |
+
"perplexity_stderr,none": 0.07387640175498134,
|
| 36 |
+
"acc,none": 0.7252086163399961,
|
| 37 |
+
"acc_stderr,none": 0.006219351548299035,
|
| 38 |
+
"alias": "lambada_openai"
|
| 39 |
+
},
|
| 40 |
+
"harness|arc:challenge|0": {
|
| 41 |
+
"acc,none": 0.4616040955631399,
|
| 42 |
+
"acc_stderr,none": 0.01456824555029636,
|
| 43 |
+
"acc_norm,none": 0.4761092150170648,
|
| 44 |
+
"acc_norm_stderr,none": 0.014594701798071654,
|
| 45 |
+
"alias": "arc_challenge"
|
| 46 |
+
},
|
| 47 |
+
"harness|boolq|0": {
|
| 48 |
+
"acc,none": 0.8159021406727829,
|
| 49 |
+
"acc_stderr,none": 0.006778536599685005,
|
| 50 |
+
"alias": "boolq"
|
| 51 |
+
},
|
| 52 |
+
"harness|truthfulqa:mc1|0": {
|
| 53 |
+
"acc,none": 0.36107711138310894,
|
| 54 |
+
"acc_stderr,none": 0.016814312844836886,
|
| 55 |
+
"alias": "truthfulqa_mc1"
|
| 56 |
+
},
|
| 57 |
+
"harness|openbookqa|0": {
|
| 58 |
+
"acc,none": 0.34,
|
| 59 |
+
"acc_stderr,none": 0.021206117013673066,
|
| 60 |
+
"acc_norm,none": 0.426,
|
| 61 |
+
"acc_norm_stderr,none": 0.022136577335085637,
|
| 62 |
+
"alias": "openbookqa"
|
| 63 |
+
},
|
| 64 |
+
"harness|winogrande|0": {
|
| 65 |
+
"acc,none": 0.7261247040252565,
|
| 66 |
+
"acc_stderr,none": 0.012533292732620296,
|
| 67 |
+
"alias": "winogrande"
|
| 68 |
+
},
|
| 69 |
+
"harness|hellaswag|0": {
|
| 70 |
+
"acc,none": 0.5762796255725952,
|
| 71 |
+
"acc_stderr,none": 0.004931372657129809,
|
| 72 |
+
"acc_norm,none": 0.7721569408484366,
|
| 73 |
+
"acc_norm_stderr,none": 0.0041858359906645996,
|
| 74 |
+
"alias": "hellaswag"
|
| 75 |
+
},
|
| 76 |
+
"harness|piqa|0": {
|
| 77 |
+
"acc,none": 0.7916213275299239,
|
| 78 |
+
"acc_stderr,none": 0.009476125383049452,
|
| 79 |
+
"acc_norm,none": 0.8025027203482046,
|
| 80 |
+
"acc_norm_stderr,none": 0.009288578108523267,
|
| 81 |
+
"alias": "piqa"
|
| 82 |
+
},
|
| 83 |
+
"harness|mmlu|0": {
|
| 84 |
+
"acc,none": 0.6652186298248113,
|
| 85 |
+
"acc_stderr,none": 0.003715571902177435,
|
| 86 |
+
"alias": "mmlu"
|
| 87 |
+
},
|
| 88 |
+
"harness|mmlu_humanities|0": {
|
| 89 |
+
"alias": " - humanities",
|
| 90 |
+
"acc,none": 0.5846971307120085,
|
| 91 |
+
"acc_stderr,none": 0.006515055624404063
|
| 92 |
+
},
|
| 93 |
+
"harness|mmlu_formal_logic|0": {
|
| 94 |
+
"alias": " - formal_logic",
|
| 95 |
+
"acc,none": 0.5555555555555556,
|
| 96 |
+
"acc_stderr,none": 0.04444444444444449
|
| 97 |
+
},
|
| 98 |
+
"harness|mmlu_high_school_european_history|0": {
|
| 99 |
+
"alias": " - high_school_european_history",
|
| 100 |
+
"acc,none": 0.7757575757575758,
|
| 101 |
+
"acc_stderr,none": 0.03256866661681102
|
| 102 |
+
},
|
| 103 |
+
"harness|mmlu_high_school_us_history|0": {
|
| 104 |
+
"alias": " - high_school_us_history",
|
| 105 |
+
"acc,none": 0.8431372549019608,
|
| 106 |
+
"acc_stderr,none": 0.025524722324553318
|
| 107 |
+
},
|
| 108 |
+
"harness|mmlu_high_school_world_history|0": {
|
| 109 |
+
"alias": " - high_school_world_history",
|
| 110 |
+
"acc,none": 0.810126582278481,
|
| 111 |
+
"acc_stderr,none": 0.025530100460233494
|
| 112 |
+
},
|
| 113 |
+
"harness|mmlu_international_law|0": {
|
| 114 |
+
"alias": " - international_law",
|
| 115 |
+
"acc,none": 0.8347107438016529,
|
| 116 |
+
"acc_stderr,none": 0.03390780612972776
|
| 117 |
+
},
|
| 118 |
+
"harness|mmlu_jurisprudence|0": {
|
| 119 |
+
"alias": " - jurisprudence",
|
| 120 |
+
"acc,none": 0.7962962962962963,
|
| 121 |
+
"acc_stderr,none": 0.03893542518824849
|
| 122 |
+
},
|
| 123 |
+
"harness|mmlu_logical_fallacies|0": {
|
| 124 |
+
"alias": " - logical_fallacies",
|
| 125 |
+
"acc,none": 0.7975460122699386,
|
| 126 |
+
"acc_stderr,none": 0.03157065078911899
|
| 127 |
+
},
|
| 128 |
+
"harness|mmlu_moral_disputes|0": {
|
| 129 |
+
"alias": " - moral_disputes",
|
| 130 |
+
"acc,none": 0.7658959537572254,
|
| 131 |
+
"acc_stderr,none": 0.02279711027807113
|
| 132 |
+
},
|
| 133 |
+
"harness|mmlu_moral_scenarios|0": {
|
| 134 |
+
"alias": " - moral_scenarios",
|
| 135 |
+
"acc,none": 0.2424581005586592,
|
| 136 |
+
"acc_stderr,none": 0.014333522059217887
|
| 137 |
+
},
|
| 138 |
+
"harness|mmlu_philosophy|0": {
|
| 139 |
+
"alias": " - philosophy",
|
| 140 |
+
"acc,none": 0.7395498392282959,
|
| 141 |
+
"acc_stderr,none": 0.02492672322484555
|
| 142 |
+
},
|
| 143 |
+
"harness|mmlu_prehistory|0": {
|
| 144 |
+
"alias": " - prehistory",
|
| 145 |
+
"acc,none": 0.7623456790123457,
|
| 146 |
+
"acc_stderr,none": 0.023683591837008557
|
| 147 |
+
},
|
| 148 |
+
"harness|mmlu_professional_law|0": {
|
| 149 |
+
"alias": " - professional_law",
|
| 150 |
+
"acc,none": 0.5032594524119948,
|
| 151 |
+
"acc_stderr,none": 0.012769964760343314
|
| 152 |
+
},
|
| 153 |
+
"harness|mmlu_world_religions|0": {
|
| 154 |
+
"alias": " - world_religions",
|
| 155 |
+
"acc,none": 0.8245614035087719,
|
| 156 |
+
"acc_stderr,none": 0.02917088550072766
|
| 157 |
+
},
|
| 158 |
+
"harness|mmlu_other|0": {
|
| 159 |
+
"alias": " - other",
|
| 160 |
+
"acc,none": 0.7347924042484711,
|
| 161 |
+
"acc_stderr,none": 0.007602922006730982
|
| 162 |
+
},
|
| 163 |
+
"harness|mmlu_business_ethics|0": {
|
| 164 |
+
"alias": " - business_ethics",
|
| 165 |
+
"acc,none": 0.75,
|
| 166 |
+
"acc_stderr,none": 0.04351941398892446
|
| 167 |
+
},
|
| 168 |
+
"harness|mmlu_clinical_knowledge|0": {
|
| 169 |
+
"alias": " - clinical_knowledge",
|
| 170 |
+
"acc,none": 0.7584905660377359,
|
| 171 |
+
"acc_stderr,none": 0.026341480371118352
|
| 172 |
+
},
|
| 173 |
+
"harness|mmlu_college_medicine|0": {
|
| 174 |
+
"alias": " - college_medicine",
|
| 175 |
+
"acc,none": 0.6936416184971098,
|
| 176 |
+
"acc_stderr,none": 0.03514942551267438
|
| 177 |
+
},
|
| 178 |
+
"harness|mmlu_global_facts|0": {
|
| 179 |
+
"alias": " - global_facts",
|
| 180 |
+
"acc,none": 0.37,
|
| 181 |
+
"acc_stderr,none": 0.04852365870939099
|
| 182 |
+
},
|
| 183 |
+
"harness|mmlu_human_aging|0": {
|
| 184 |
+
"alias": " - human_aging",
|
| 185 |
+
"acc,none": 0.7219730941704036,
|
| 186 |
+
"acc_stderr,none": 0.030069584874494033
|
| 187 |
+
},
|
| 188 |
+
"harness|mmlu_management|0": {
|
| 189 |
+
"alias": " - management",
|
| 190 |
+
"acc,none": 0.8543689320388349,
|
| 191 |
+
"acc_stderr,none": 0.0349260647662379
|
| 192 |
+
},
|
| 193 |
+
"harness|mmlu_marketing|0": {
|
| 194 |
+
"alias": " - marketing",
|
| 195 |
+
"acc,none": 0.9273504273504274,
|
| 196 |
+
"acc_stderr,none": 0.017004368568132356
|
| 197 |
+
},
|
| 198 |
+
"harness|mmlu_medical_genetics|0": {
|
| 199 |
+
"alias": " - medical_genetics",
|
| 200 |
+
"acc,none": 0.78,
|
| 201 |
+
"acc_stderr,none": 0.04163331998932263
|
| 202 |
+
},
|
| 203 |
+
"harness|mmlu_miscellaneous|0": {
|
| 204 |
+
"alias": " - miscellaneous",
|
| 205 |
+
"acc,none": 0.8250319284802043,
|
| 206 |
+
"acc_stderr,none": 0.013586619219903341
|
| 207 |
+
},
|
| 208 |
+
"harness|mmlu_nutrition|0": {
|
| 209 |
+
"alias": " - nutrition",
|
| 210 |
+
"acc,none": 0.7581699346405228,
|
| 211 |
+
"acc_stderr,none": 0.024518195641879334
|
| 212 |
+
},
|
| 213 |
+
"harness|mmlu_professional_accounting|0": {
|
| 214 |
+
"alias": " - professional_accounting",
|
| 215 |
+
"acc,none": 0.5460992907801419,
|
| 216 |
+
"acc_stderr,none": 0.02970045324729148
|
| 217 |
+
},
|
| 218 |
+
"harness|mmlu_professional_medicine|0": {
|
| 219 |
+
"alias": " - professional_medicine",
|
| 220 |
+
"acc,none": 0.6985294117647058,
|
| 221 |
+
"acc_stderr,none": 0.027875982114273168
|
| 222 |
+
},
|
| 223 |
+
"harness|mmlu_virology|0": {
|
| 224 |
+
"alias": " - virology",
|
| 225 |
+
"acc,none": 0.5060240963855421,
|
| 226 |
+
"acc_stderr,none": 0.03892212195333045
|
| 227 |
+
},
|
| 228 |
+
"harness|mmlu_social_sciences|0": {
|
| 229 |
+
"alias": " - social_sciences",
|
| 230 |
+
"acc,none": 0.7747806304842378,
|
| 231 |
+
"acc_stderr,none": 0.007405038078278177
|
| 232 |
+
},
|
| 233 |
+
"harness|mmlu_econometrics|0": {
|
| 234 |
+
"alias": " - econometrics",
|
| 235 |
+
"acc,none": 0.5263157894736842,
|
| 236 |
+
"acc_stderr,none": 0.046970851366478626
|
| 237 |
+
},
|
| 238 |
+
"harness|mmlu_high_school_geography|0": {
|
| 239 |
+
"alias": " - high_school_geography",
|
| 240 |
+
"acc,none": 0.8636363636363636,
|
| 241 |
+
"acc_stderr,none": 0.024450155973189835
|
| 242 |
+
},
|
| 243 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
| 244 |
+
"alias": " - high_school_government_and_politics",
|
| 245 |
+
"acc,none": 0.8860103626943006,
|
| 246 |
+
"acc_stderr,none": 0.022935144053919426
|
| 247 |
+
},
|
| 248 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
| 249 |
+
"alias": " - high_school_macroeconomics",
|
| 250 |
+
"acc,none": 0.7230769230769231,
|
| 251 |
+
"acc_stderr,none": 0.022688042352424994
|
| 252 |
+
},
|
| 253 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
| 254 |
+
"alias": " - high_school_microeconomics",
|
| 255 |
+
"acc,none": 0.7857142857142857,
|
| 256 |
+
"acc_stderr,none": 0.026653531596715473
|
| 257 |
+
},
|
| 258 |
+
"harness|mmlu_high_school_psychology|0": {
|
| 259 |
+
"alias": " - high_school_psychology",
|
| 260 |
+
"acc,none": 0.8532110091743119,
|
| 261 |
+
"acc_stderr,none": 0.01517314184512626
|
| 262 |
+
},
|
| 263 |
+
"harness|mmlu_human_sexuality|0": {
|
| 264 |
+
"alias": " - human_sexuality",
|
| 265 |
+
"acc,none": 0.7862595419847328,
|
| 266 |
+
"acc_stderr,none": 0.0359546161177469
|
| 267 |
+
},
|
| 268 |
+
"harness|mmlu_professional_psychology|0": {
|
| 269 |
+
"alias": " - professional_psychology",
|
| 270 |
+
"acc,none": 0.704248366013072,
|
| 271 |
+
"acc_stderr,none": 0.01846315413263282
|
| 272 |
+
},
|
| 273 |
+
"harness|mmlu_public_relations|0": {
|
| 274 |
+
"alias": " - public_relations",
|
| 275 |
+
"acc,none": 0.7,
|
| 276 |
+
"acc_stderr,none": 0.04389311454644287
|
| 277 |
+
},
|
| 278 |
+
"harness|mmlu_security_studies|0": {
|
| 279 |
+
"alias": " - security_studies",
|
| 280 |
+
"acc,none": 0.7755102040816326,
|
| 281 |
+
"acc_stderr,none": 0.02671143055553841
|
| 282 |
+
},
|
| 283 |
+
"harness|mmlu_sociology|0": {
|
| 284 |
+
"alias": " - sociology",
|
| 285 |
+
"acc,none": 0.8109452736318408,
|
| 286 |
+
"acc_stderr,none": 0.027686913588013024
|
| 287 |
+
},
|
| 288 |
+
"harness|mmlu_us_foreign_policy|0": {
|
| 289 |
+
"alias": " - us_foreign_policy",
|
| 290 |
+
"acc,none": 0.84,
|
| 291 |
+
"acc_stderr,none": 0.03684529491774708
|
| 292 |
+
},
|
| 293 |
+
"harness|mmlu_stem|0": {
|
| 294 |
+
"alias": " - stem",
|
| 295 |
+
"acc,none": 0.609895337773549,
|
| 296 |
+
"acc_stderr,none": 0.008423044034949694
|
| 297 |
+
},
|
| 298 |
+
"harness|mmlu_abstract_algebra|0": {
|
| 299 |
+
"alias": " - abstract_algebra",
|
| 300 |
+
"acc,none": 0.39,
|
| 301 |
+
"acc_stderr,none": 0.04902071300001975
|
| 302 |
+
},
|
| 303 |
+
"harness|mmlu_anatomy|0": {
|
| 304 |
+
"alias": " - anatomy",
|
| 305 |
+
"acc,none": 0.6,
|
| 306 |
+
"acc_stderr,none": 0.04232073695151589
|
| 307 |
+
},
|
| 308 |
+
"harness|mmlu_astronomy|0": {
|
| 309 |
+
"alias": " - astronomy",
|
| 310 |
+
"acc,none": 0.7302631578947368,
|
| 311 |
+
"acc_stderr,none": 0.03611780560284898
|
| 312 |
+
},
|
| 313 |
+
"harness|mmlu_college_biology|0": {
|
| 314 |
+
"alias": " - college_biology",
|
| 315 |
+
"acc,none": 0.7569444444444444,
|
| 316 |
+
"acc_stderr,none": 0.035868792800803406
|
| 317 |
+
},
|
| 318 |
+
"harness|mmlu_college_chemistry|0": {
|
| 319 |
+
"alias": " - college_chemistry",
|
| 320 |
+
"acc,none": 0.47,
|
| 321 |
+
"acc_stderr,none": 0.05016135580465919
|
| 322 |
+
},
|
| 323 |
+
"harness|mmlu_college_computer_science|0": {
|
| 324 |
+
"alias": " - college_computer_science",
|
| 325 |
+
"acc,none": 0.58,
|
| 326 |
+
"acc_stderr,none": 0.04960449637488583
|
| 327 |
+
},
|
| 328 |
+
"harness|mmlu_college_mathematics|0": {
|
| 329 |
+
"alias": " - college_mathematics",
|
| 330 |
+
"acc,none": 0.37,
|
| 331 |
+
"acc_stderr,none": 0.04852365870939099
|
| 332 |
+
},
|
| 333 |
+
"harness|mmlu_college_physics|0": {
|
| 334 |
+
"alias": " - college_physics",
|
| 335 |
+
"acc,none": 0.4411764705882353,
|
| 336 |
+
"acc_stderr,none": 0.049406356306056595
|
| 337 |
+
},
|
| 338 |
+
"harness|mmlu_computer_security|0": {
|
| 339 |
+
"alias": " - computer_security",
|
| 340 |
+
"acc,none": 0.69,
|
| 341 |
+
"acc_stderr,none": 0.04648231987117316
|
| 342 |
+
},
|
| 343 |
+
"harness|mmlu_conceptual_physics|0": {
|
| 344 |
+
"alias": " - conceptual_physics",
|
| 345 |
+
"acc,none": 0.6382978723404256,
|
| 346 |
+
"acc_stderr,none": 0.0314108219759624
|
| 347 |
+
},
|
| 348 |
+
"harness|mmlu_electrical_engineering|0": {
|
| 349 |
+
"alias": " - electrical_engineering",
|
| 350 |
+
"acc,none": 0.6482758620689655,
|
| 351 |
+
"acc_stderr,none": 0.0397923663749741
|
| 352 |
+
},
|
| 353 |
+
"harness|mmlu_elementary_mathematics|0": {
|
| 354 |
+
"alias": " - elementary_mathematics",
|
| 355 |
+
"acc,none": 0.5978835978835979,
|
| 356 |
+
"acc_stderr,none": 0.025253032554997692
|
| 357 |
+
},
|
| 358 |
+
"harness|mmlu_high_school_biology|0": {
|
| 359 |
+
"alias": " - high_school_biology",
|
| 360 |
+
"acc,none": 0.8387096774193549,
|
| 361 |
+
"acc_stderr,none": 0.020923327006423294
|
| 362 |
+
},
|
| 363 |
+
"harness|mmlu_high_school_chemistry|0": {
|
| 364 |
+
"alias": " - high_school_chemistry",
|
| 365 |
+
"acc,none": 0.5665024630541872,
|
| 366 |
+
"acc_stderr,none": 0.034867317274198714
|
| 367 |
+
},
|
| 368 |
+
"harness|mmlu_high_school_computer_science|0": {
|
| 369 |
+
"alias": " - high_school_computer_science",
|
| 370 |
+
"acc,none": 0.79,
|
| 371 |
+
"acc_stderr,none": 0.040936018074033256
|
| 372 |
+
},
|
| 373 |
+
"harness|mmlu_high_school_mathematics|0": {
|
| 374 |
+
"alias": " - high_school_mathematics",
|
| 375 |
+
"acc,none": 0.5,
|
| 376 |
+
"acc_stderr,none": 0.030485538042484616
|
| 377 |
+
},
|
| 378 |
+
"harness|mmlu_high_school_physics|0": {
|
| 379 |
+
"alias": " - high_school_physics",
|
| 380 |
+
"acc,none": 0.45695364238410596,
|
| 381 |
+
"acc_stderr,none": 0.04067325174247443
|
| 382 |
+
},
|
| 383 |
+
"harness|mmlu_high_school_statistics|0": {
|
| 384 |
+
"alias": " - high_school_statistics",
|
| 385 |
+
"acc,none": 0.6342592592592593,
|
| 386 |
+
"acc_stderr,none": 0.032847388576472056
|
| 387 |
+
},
|
| 388 |
+
"harness|mmlu_machine_learning|0": {
|
| 389 |
+
"alias": " - machine_learning",
|
| 390 |
+
"acc,none": 0.5535714285714286,
|
| 391 |
+
"acc_stderr,none": 0.04718471485219587
|
| 392 |
+
}
|
| 393 |
+
},
|
| 394 |
+
"task_info": {
|
| 395 |
+
"model": "Intel/Qwen2-7B-int4-inc",
|
| 396 |
+
"local": true,
|
| 397 |
+
"revision": "main",
|
| 398 |
+
"private": false,
|
| 399 |
+
"params": 7,
|
| 400 |
+
"architectures": "QwenForCausalLM",
|
| 401 |
+
"quant_type": "AutoRound",
|
| 402 |
+
"precision": "4bit",
|
| 403 |
+
"model_params": 7,
|
| 404 |
+
"model_size": 5.2,
|
| 405 |
+
"weight_dtype": "int4",
|
| 406 |
+
"compute_dtype": "float16",
|
| 407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
| 408 |
+
"hardware": "gpu",
|
| 409 |
+
"status": "Finished",
|
| 410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
| 411 |
+
"model_type": "quantization",
|
| 412 |
+
"job_id": -1,
|
| 413 |
+
"job_start_time": null,
|
| 414 |
+
"scripts": "ITREX"
|
| 415 |
+
},
|
| 416 |
+
"quantization_config": {
|
| 417 |
+
"amp": true,
|
| 418 |
+
"autoround_version": "0.2.1.dev",
|
| 419 |
+
"backend": "autoround:exllamav2",
|
| 420 |
+
"bits": 4,
|
| 421 |
+
"data_type": "int",
|
| 422 |
+
"dataset": "NeelNanda/pile-10k",
|
| 423 |
+
"enable_minmax_tuning": true,
|
| 424 |
+
"enable_quanted_input": true,
|
| 425 |
+
"gradient_accumulate_steps": 1,
|
| 426 |
+
"group_size": 128,
|
| 427 |
+
"iters": 1000,
|
| 428 |
+
"low_gpu_mem_usage": false,
|
| 429 |
+
"lr": 0.001,
|
| 430 |
+
"minmax_lr": 0.001,
|
| 431 |
+
"n_samples": 512,
|
| 432 |
+
"quant_method": "intel/auto-round",
|
| 433 |
+
"scale_dtype": "torch.float16",
|
| 434 |
+
"seqlen": 2048,
|
| 435 |
+
"sym": false,
|
| 436 |
+
"train_bs": 8
|
| 437 |
+
},
|
| 438 |
+
"versions": {
|
| 439 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
| 440 |
+
"harness|arc:easy|0": 1.0,
|
| 441 |
+
"harness|lambada:openai|0": 1.0,
|
| 442 |
+
"harness|arc:challenge|0": 1.0,
|
| 443 |
+
"harness|boolq|0": 2.0,
|
| 444 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
| 445 |
+
"harness|openbookqa|0": 1.0,
|
| 446 |
+
"harness|winogrande|0": 1.0,
|
| 447 |
+
"harness|hellaswag|0": 1.0,
|
| 448 |
+
"harness|piqa|0": 1.0,
|
| 449 |
+
"harness|mmlu|0": null,
|
| 450 |
+
"harness|mmlu_humanities|0": null,
|
| 451 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
| 452 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
| 453 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
| 454 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
| 455 |
+
"harness|mmlu_international_law|0": 0.0,
|
| 456 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
| 457 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
| 458 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
| 459 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
| 460 |
+
"harness|mmlu_philosophy|0": 0.0,
|
| 461 |
+
"harness|mmlu_prehistory|0": 0.0,
|
| 462 |
+
"harness|mmlu_professional_law|0": 0.0,
|
| 463 |
+
"harness|mmlu_world_religions|0": 0.0,
|
| 464 |
+
"harness|mmlu_other|0": null,
|
| 465 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
| 466 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
| 467 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
| 468 |
+
"harness|mmlu_global_facts|0": 0.0,
|
| 469 |
+
"harness|mmlu_human_aging|0": 0.0,
|
| 470 |
+
"harness|mmlu_management|0": 0.0,
|
| 471 |
+
"harness|mmlu_marketing|0": 0.0,
|
| 472 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
| 473 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
| 474 |
+
"harness|mmlu_nutrition|0": 0.0,
|
| 475 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
| 476 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
| 477 |
+
"harness|mmlu_virology|0": 0.0,
|
| 478 |
+
"harness|mmlu_social_sciences|0": null,
|
| 479 |
+
"harness|mmlu_econometrics|0": 0.0,
|
| 480 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
| 481 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
| 482 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
| 483 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
| 484 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
| 485 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
| 486 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
| 487 |
+
"harness|mmlu_public_relations|0": 0.0,
|
| 488 |
+
"harness|mmlu_security_studies|0": 0.0,
|
| 489 |
+
"harness|mmlu_sociology|0": 0.0,
|
| 490 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
| 491 |
+
"harness|mmlu_stem|0": null,
|
| 492 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
| 493 |
+
"harness|mmlu_anatomy|0": 0.0,
|
| 494 |
+
"harness|mmlu_astronomy|0": 0.0,
|
| 495 |
+
"harness|mmlu_college_biology|0": 0.0,
|
| 496 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
| 497 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
| 498 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
| 499 |
+
"harness|mmlu_college_physics|0": 0.0,
|
| 500 |
+
"harness|mmlu_computer_security|0": 0.0,
|
| 501 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
| 502 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
| 503 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
| 504 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
| 505 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
| 506 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
| 507 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
| 508 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
| 509 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
| 510 |
+
"harness|mmlu_machine_learning|0": 0.0
|
| 511 |
+
},
|
| 512 |
+
"n-shot": {
|
| 513 |
+
"arc_challenge": 0,
|
| 514 |
+
"arc_easy": 0,
|
| 515 |
+
"boolq": 0,
|
| 516 |
+
"hellaswag": 0,
|
| 517 |
+
"lambada_openai": 0,
|
| 518 |
+
"mmlu": 0,
|
| 519 |
+
"mmlu_abstract_algebra": 0,
|
| 520 |
+
"mmlu_anatomy": 0,
|
| 521 |
+
"mmlu_astronomy": 0,
|
| 522 |
+
"mmlu_business_ethics": 0,
|
| 523 |
+
"mmlu_clinical_knowledge": 0,
|
| 524 |
+
"mmlu_college_biology": 0,
|
| 525 |
+
"mmlu_college_chemistry": 0,
|
| 526 |
+
"mmlu_college_computer_science": 0,
|
| 527 |
+
"mmlu_college_mathematics": 0,
|
| 528 |
+
"mmlu_college_medicine": 0,
|
| 529 |
+
"mmlu_college_physics": 0,
|
| 530 |
+
"mmlu_computer_security": 0,
|
| 531 |
+
"mmlu_conceptual_physics": 0,
|
| 532 |
+
"mmlu_econometrics": 0,
|
| 533 |
+
"mmlu_electrical_engineering": 0,
|
| 534 |
+
"mmlu_elementary_mathematics": 0,
|
| 535 |
+
"mmlu_formal_logic": 0,
|
| 536 |
+
"mmlu_global_facts": 0,
|
| 537 |
+
"mmlu_high_school_biology": 0,
|
| 538 |
+
"mmlu_high_school_chemistry": 0,
|
| 539 |
+
"mmlu_high_school_computer_science": 0,
|
| 540 |
+
"mmlu_high_school_european_history": 0,
|
| 541 |
+
"mmlu_high_school_geography": 0,
|
| 542 |
+
"mmlu_high_school_government_and_politics": 0,
|
| 543 |
+
"mmlu_high_school_macroeconomics": 0,
|
| 544 |
+
"mmlu_high_school_mathematics": 0,
|
| 545 |
+
"mmlu_high_school_microeconomics": 0,
|
| 546 |
+
"mmlu_high_school_physics": 0,
|
| 547 |
+
"mmlu_high_school_psychology": 0,
|
| 548 |
+
"mmlu_high_school_statistics": 0,
|
| 549 |
+
"mmlu_high_school_us_history": 0,
|
| 550 |
+
"mmlu_high_school_world_history": 0,
|
| 551 |
+
"mmlu_human_aging": 0,
|
| 552 |
+
"mmlu_human_sexuality": 0,
|
| 553 |
+
"mmlu_humanities": 0,
|
| 554 |
+
"mmlu_international_law": 0,
|
| 555 |
+
"mmlu_jurisprudence": 0,
|
| 556 |
+
"mmlu_logical_fallacies": 0,
|
| 557 |
+
"mmlu_machine_learning": 0,
|
| 558 |
+
"mmlu_management": 0,
|
| 559 |
+
"mmlu_marketing": 0,
|
| 560 |
+
"mmlu_medical_genetics": 0,
|
| 561 |
+
"mmlu_miscellaneous": 0,
|
| 562 |
+
"mmlu_moral_disputes": 0,
|
| 563 |
+
"mmlu_moral_scenarios": 0,
|
| 564 |
+
"mmlu_nutrition": 0,
|
| 565 |
+
"mmlu_other": 0,
|
| 566 |
+
"mmlu_philosophy": 0,
|
| 567 |
+
"mmlu_prehistory": 0,
|
| 568 |
+
"mmlu_professional_accounting": 0,
|
| 569 |
+
"mmlu_professional_law": 0,
|
| 570 |
+
"mmlu_professional_medicine": 0,
|
| 571 |
+
"mmlu_professional_psychology": 0,
|
| 572 |
+
"mmlu_public_relations": 0,
|
| 573 |
+
"mmlu_security_studies": 0,
|
| 574 |
+
"mmlu_social_sciences": 0,
|
| 575 |
+
"mmlu_sociology": 0,
|
| 576 |
+
"mmlu_stem": 0,
|
| 577 |
+
"mmlu_us_foreign_policy": 0,
|
| 578 |
+
"mmlu_virology": 0,
|
| 579 |
+
"mmlu_world_religions": 0,
|
| 580 |
+
"openbookqa": 0,
|
| 581 |
+
"piqa": 0,
|
| 582 |
+
"truthfulqa_mc1": 0,
|
| 583 |
+
"truthfulqa_mc2": 0,
|
| 584 |
+
"winogrande": 0
|
| 585 |
+
},
|
| 586 |
+
"date": 1717599011.729154,
|
| 587 |
+
"config": {
|
| 588 |
+
"model": "hf",
|
| 589 |
+
"model_args": "pretrained=Intel/Qwen2-7B-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
| 590 |
+
"batch_size": 2,
|
| 591 |
+
"batch_sizes": [],
|
| 592 |
+
"device": "cuda",
|
| 593 |
+
"use_cache": null,
|
| 594 |
+
"limit": null,
|
| 595 |
+
"bootstrap_iters": 100000,
|
| 596 |
+
"gen_kwargs": null
|
| 597 |
+
}
|
| 598 |
+
}
|
Intel/results_2024-06-06-09-32-36.json
ADDED
|
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"config_general": {
|
| 3 |
+
"lighteval_sha": "1.4",
|
| 4 |
+
"num_few_shot_default": null,
|
| 5 |
+
"num_fewshot_seeds": null,
|
| 6 |
+
"override_batch_size": null,
|
| 7 |
+
"max_samples": null,
|
| 8 |
+
"job_id": -1,
|
| 9 |
+
"start_time": null,
|
| 10 |
+
"end_time": "2024-06-06-09-32-36",
|
| 11 |
+
"total_evaluation_time_secondes": "",
|
| 12 |
+
"model_name": "Intel/Qwen2-1.5B-Instuct-int4-inc",
|
| 13 |
+
"model_sha": "",
|
| 14 |
+
"model_dtype": "4bit",
|
| 15 |
+
"model_size": 1.6,
|
| 16 |
+
"model_params": 1.5,
|
| 17 |
+
"quant_type": "AutoRound",
|
| 18 |
+
"precision": "4bit"
|
| 19 |
+
},
|
| 20 |
+
"results": {
|
| 21 |
+
"harness|lambada:openai|0": {
|
| 22 |
+
"perplexity,none": 5.285488721585025,
|
| 23 |
+
"perplexity_stderr,none": 0.1356474096089869,
|
| 24 |
+
"acc,none": 0.6411798952066757,
|
| 25 |
+
"acc_stderr,none": 0.006682522124089319,
|
| 26 |
+
"alias": "lambada_openai"
|
| 27 |
+
},
|
| 28 |
+
"harness|openbookqa|0": {
|
| 29 |
+
"acc,none": 0.272,
|
| 30 |
+
"acc_stderr,none": 0.01992048320956607,
|
| 31 |
+
"acc_norm,none": 0.36,
|
| 32 |
+
"acc_norm_stderr,none": 0.02148775108972052,
|
| 33 |
+
"alias": "openbookqa"
|
| 34 |
+
},
|
| 35 |
+
"harness|mmlu|0": {
|
| 36 |
+
"acc,none": 0.5502777382139297,
|
| 37 |
+
"acc_stderr,none": 0.004026718834956645,
|
| 38 |
+
"alias": "mmlu"
|
| 39 |
+
},
|
| 40 |
+
"harness|mmlu_humanities|0": {
|
| 41 |
+
"alias": " - humanities",
|
| 42 |
+
"acc,none": 0.5030818278427205,
|
| 43 |
+
"acc_stderr,none": 0.006947472831518005
|
| 44 |
+
},
|
| 45 |
+
"harness|mmlu_formal_logic|0": {
|
| 46 |
+
"alias": " - formal_logic",
|
| 47 |
+
"acc,none": 0.3888888888888889,
|
| 48 |
+
"acc_stderr,none": 0.04360314860077459
|
| 49 |
+
},
|
| 50 |
+
"harness|mmlu_high_school_european_history|0": {
|
| 51 |
+
"alias": " - high_school_european_history",
|
| 52 |
+
"acc,none": 0.6848484848484848,
|
| 53 |
+
"acc_stderr,none": 0.0362773057502241
|
| 54 |
+
},
|
| 55 |
+
"harness|mmlu_high_school_us_history|0": {
|
| 56 |
+
"alias": " - high_school_us_history",
|
| 57 |
+
"acc,none": 0.6715686274509803,
|
| 58 |
+
"acc_stderr,none": 0.032962451101722294
|
| 59 |
+
},
|
| 60 |
+
"harness|mmlu_high_school_world_history|0": {
|
| 61 |
+
"alias": " - high_school_world_history",
|
| 62 |
+
"acc,none": 0.7341772151898734,
|
| 63 |
+
"acc_stderr,none": 0.02875679962965834
|
| 64 |
+
},
|
| 65 |
+
"harness|mmlu_international_law|0": {
|
| 66 |
+
"alias": " - international_law",
|
| 67 |
+
"acc,none": 0.7272727272727273,
|
| 68 |
+
"acc_stderr,none": 0.04065578140908705
|
| 69 |
+
},
|
| 70 |
+
"harness|mmlu_jurisprudence|0": {
|
| 71 |
+
"alias": " - jurisprudence",
|
| 72 |
+
"acc,none": 0.6481481481481481,
|
| 73 |
+
"acc_stderr,none": 0.046166311118017146
|
| 74 |
+
},
|
| 75 |
+
"harness|mmlu_logical_fallacies|0": {
|
| 76 |
+
"alias": " - logical_fallacies",
|
| 77 |
+
"acc,none": 0.6993865030674846,
|
| 78 |
+
"acc_stderr,none": 0.03602511318806771
|
| 79 |
+
},
|
| 80 |
+
"harness|mmlu_moral_disputes|0": {
|
| 81 |
+
"alias": " - moral_disputes",
|
| 82 |
+
"acc,none": 0.6242774566473989,
|
| 83 |
+
"acc_stderr,none": 0.026074314851657083
|
| 84 |
+
},
|
| 85 |
+
"harness|mmlu_moral_scenarios|0": {
|
| 86 |
+
"alias": " - moral_scenarios",
|
| 87 |
+
"acc,none": 0.2837988826815642,
|
| 88 |
+
"acc_stderr,none": 0.01507835897075178
|
| 89 |
+
},
|
| 90 |
+
"harness|mmlu_philosophy|0": {
|
| 91 |
+
"alias": " - philosophy",
|
| 92 |
+
"acc,none": 0.6334405144694534,
|
| 93 |
+
"acc_stderr,none": 0.027368078243971628
|
| 94 |
+
},
|
| 95 |
+
"harness|mmlu_prehistory|0": {
|
| 96 |
+
"alias": " - prehistory",
|
| 97 |
+
"acc,none": 0.5555555555555556,
|
| 98 |
+
"acc_stderr,none": 0.02764847787741332
|
| 99 |
+
},
|
| 100 |
+
"harness|mmlu_professional_law|0": {
|
| 101 |
+
"alias": " - professional_law",
|
| 102 |
+
"acc,none": 0.42633637548891784,
|
| 103 |
+
"acc_stderr,none": 0.012630884771599698
|
| 104 |
+
},
|
| 105 |
+
"harness|mmlu_world_religions|0": {
|
| 106 |
+
"alias": " - world_religions",
|
| 107 |
+
"acc,none": 0.7076023391812866,
|
| 108 |
+
"acc_stderr,none": 0.034886477134579215
|
| 109 |
+
},
|
| 110 |
+
"harness|mmlu_other|0": {
|
| 111 |
+
"alias": " - other",
|
| 112 |
+
"acc,none": 0.6195687158030254,
|
| 113 |
+
"acc_stderr,none": 0.00842476546352716
|
| 114 |
+
},
|
| 115 |
+
"harness|mmlu_business_ethics|0": {
|
| 116 |
+
"alias": " - business_ethics",
|
| 117 |
+
"acc,none": 0.65,
|
| 118 |
+
"acc_stderr,none": 0.047937248544110196
|
| 119 |
+
},
|
| 120 |
+
"harness|mmlu_clinical_knowledge|0": {
|
| 121 |
+
"alias": " - clinical_knowledge",
|
| 122 |
+
"acc,none": 0.6075471698113207,
|
| 123 |
+
"acc_stderr,none": 0.03005258057955784
|
| 124 |
+
},
|
| 125 |
+
"harness|mmlu_college_medicine|0": {
|
| 126 |
+
"alias": " - college_medicine",
|
| 127 |
+
"acc,none": 0.5606936416184971,
|
| 128 |
+
"acc_stderr,none": 0.03784271932887467
|
| 129 |
+
},
|
| 130 |
+
"harness|mmlu_global_facts|0": {
|
| 131 |
+
"alias": " - global_facts",
|
| 132 |
+
"acc,none": 0.23,
|
| 133 |
+
"acc_stderr,none": 0.04229525846816506
|
| 134 |
+
},
|
| 135 |
+
"harness|mmlu_human_aging|0": {
|
| 136 |
+
"alias": " - human_aging",
|
| 137 |
+
"acc,none": 0.6143497757847534,
|
| 138 |
+
"acc_stderr,none": 0.03266842214289202
|
| 139 |
+
},
|
| 140 |
+
"harness|mmlu_management|0": {
|
| 141 |
+
"alias": " - management",
|
| 142 |
+
"acc,none": 0.7864077669902912,
|
| 143 |
+
"acc_stderr,none": 0.040580420156460344
|
| 144 |
+
},
|
| 145 |
+
"harness|mmlu_marketing|0": {
|
| 146 |
+
"alias": " - marketing",
|
| 147 |
+
"acc,none": 0.811965811965812,
|
| 148 |
+
"acc_stderr,none": 0.025598193686652244
|
| 149 |
+
},
|
| 150 |
+
"harness|mmlu_medical_genetics|0": {
|
| 151 |
+
"alias": " - medical_genetics",
|
| 152 |
+
"acc,none": 0.62,
|
| 153 |
+
"acc_stderr,none": 0.048783173121456316
|
| 154 |
+
},
|
| 155 |
+
"harness|mmlu_miscellaneous|0": {
|
| 156 |
+
"alias": " - miscellaneous",
|
| 157 |
+
"acc,none": 0.7075351213282248,
|
| 158 |
+
"acc_stderr,none": 0.016267000684598642
|
| 159 |
+
},
|
| 160 |
+
"harness|mmlu_nutrition|0": {
|
| 161 |
+
"alias": " - nutrition",
|
| 162 |
+
"acc,none": 0.6830065359477124,
|
| 163 |
+
"acc_stderr,none": 0.026643278474508755
|
| 164 |
+
},
|
| 165 |
+
"harness|mmlu_professional_accounting|0": {
|
| 166 |
+
"alias": " - professional_accounting",
|
| 167 |
+
"acc,none": 0.44680851063829785,
|
| 168 |
+
"acc_stderr,none": 0.02965823509766691
|
| 169 |
+
},
|
| 170 |
+
"harness|mmlu_professional_medicine|0": {
|
| 171 |
+
"alias": " - professional_medicine",
|
| 172 |
+
"acc,none": 0.5367647058823529,
|
| 173 |
+
"acc_stderr,none": 0.030290619180485697
|
| 174 |
+
},
|
| 175 |
+
"harness|mmlu_virology|0": {
|
| 176 |
+
"alias": " - virology",
|
| 177 |
+
"acc,none": 0.4457831325301205,
|
| 178 |
+
"acc_stderr,none": 0.03869543323472101
|
| 179 |
+
},
|
| 180 |
+
"harness|mmlu_social_sciences|0": {
|
| 181 |
+
"alias": " - social_sciences",
|
| 182 |
+
"acc,none": 0.6379590510237244,
|
| 183 |
+
"acc_stderr,none": 0.008481043207876597
|
| 184 |
+
},
|
| 185 |
+
"harness|mmlu_econometrics|0": {
|
| 186 |
+
"alias": " - econometrics",
|
| 187 |
+
"acc,none": 0.3508771929824561,
|
| 188 |
+
"acc_stderr,none": 0.04489539350270701
|
| 189 |
+
},
|
| 190 |
+
"harness|mmlu_high_school_geography|0": {
|
| 191 |
+
"alias": " - high_school_geography",
|
| 192 |
+
"acc,none": 0.7121212121212122,
|
| 193 |
+
"acc_stderr,none": 0.03225883512300992
|
| 194 |
+
},
|
| 195 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
| 196 |
+
"alias": " - high_school_government_and_politics",
|
| 197 |
+
"acc,none": 0.7512953367875648,
|
| 198 |
+
"acc_stderr,none": 0.031195840877700293
|
| 199 |
+
},
|
| 200 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
| 201 |
+
"alias": " - high_school_macroeconomics",
|
| 202 |
+
"acc,none": 0.5743589743589743,
|
| 203 |
+
"acc_stderr,none": 0.025069094387296535
|
| 204 |
+
},
|
| 205 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
| 206 |
+
"alias": " - high_school_microeconomics",
|
| 207 |
+
"acc,none": 0.5798319327731093,
|
| 208 |
+
"acc_stderr,none": 0.03206183783236152
|
| 209 |
+
},
|
| 210 |
+
"harness|mmlu_high_school_psychology|0": {
|
| 211 |
+
"alias": " - high_school_psychology",
|
| 212 |
+
"acc,none": 0.7339449541284404,
|
| 213 |
+
"acc_stderr,none": 0.018946022322225593
|
| 214 |
+
},
|
| 215 |
+
"harness|mmlu_human_sexuality|0": {
|
| 216 |
+
"alias": " - human_sexuality",
|
| 217 |
+
"acc,none": 0.6412213740458015,
|
| 218 |
+
"acc_stderr,none": 0.04206739313864908
|
| 219 |
+
},
|
| 220 |
+
"harness|mmlu_professional_psychology|0": {
|
| 221 |
+
"alias": " - professional_psychology",
|
| 222 |
+
"acc,none": 0.5359477124183006,
|
| 223 |
+
"acc_stderr,none": 0.020175488765484043
|
| 224 |
+
},
|
| 225 |
+
"harness|mmlu_public_relations|0": {
|
| 226 |
+
"alias": " - public_relations",
|
| 227 |
+
"acc,none": 0.5909090909090909,
|
| 228 |
+
"acc_stderr,none": 0.04709306978661896
|
| 229 |
+
},
|
| 230 |
+
"harness|mmlu_security_studies|0": {
|
| 231 |
+
"alias": " - security_studies",
|
| 232 |
+
"acc,none": 0.6857142857142857,
|
| 233 |
+
"acc_stderr,none": 0.029719329422417468
|
| 234 |
+
},
|
| 235 |
+
"harness|mmlu_sociology|0": {
|
| 236 |
+
"alias": " - sociology",
|
| 237 |
+
"acc,none": 0.7661691542288557,
|
| 238 |
+
"acc_stderr,none": 0.029929415408348387
|
| 239 |
+
},
|
| 240 |
+
"harness|mmlu_us_foreign_policy|0": {
|
| 241 |
+
"alias": " - us_foreign_policy",
|
| 242 |
+
"acc,none": 0.76,
|
| 243 |
+
"acc_stderr,none": 0.04292346959909282
|
| 244 |
+
},
|
| 245 |
+
"harness|mmlu_stem|0": {
|
| 246 |
+
"alias": " - stem",
|
| 247 |
+
"acc,none": 0.4668569616238503,
|
| 248 |
+
"acc_stderr,none": 0.008757565781117154
|
| 249 |
+
},
|
| 250 |
+
"harness|mmlu_abstract_algebra|0": {
|
| 251 |
+
"alias": " - abstract_algebra",
|
| 252 |
+
"acc,none": 0.35,
|
| 253 |
+
"acc_stderr,none": 0.0479372485441102
|
| 254 |
+
},
|
| 255 |
+
"harness|mmlu_anatomy|0": {
|
| 256 |
+
"alias": " - anatomy",
|
| 257 |
+
"acc,none": 0.48148148148148145,
|
| 258 |
+
"acc_stderr,none": 0.043163785995113245
|
| 259 |
+
},
|
| 260 |
+
"harness|mmlu_astronomy|0": {
|
| 261 |
+
"alias": " - astronomy",
|
| 262 |
+
"acc,none": 0.5789473684210527,
|
| 263 |
+
"acc_stderr,none": 0.040179012759817494
|
| 264 |
+
},
|
| 265 |
+
"harness|mmlu_college_biology|0": {
|
| 266 |
+
"alias": " - college_biology",
|
| 267 |
+
"acc,none": 0.4930555555555556,
|
| 268 |
+
"acc_stderr,none": 0.04180806750294938
|
| 269 |
+
},
|
| 270 |
+
"harness|mmlu_college_chemistry|0": {
|
| 271 |
+
"alias": " - college_chemistry",
|
| 272 |
+
"acc,none": 0.42,
|
| 273 |
+
"acc_stderr,none": 0.049604496374885836
|
| 274 |
+
},
|
| 275 |
+
"harness|mmlu_college_computer_science|0": {
|
| 276 |
+
"alias": " - college_computer_science",
|
| 277 |
+
"acc,none": 0.48,
|
| 278 |
+
"acc_stderr,none": 0.050211673156867795
|
| 279 |
+
},
|
| 280 |
+
"harness|mmlu_college_mathematics|0": {
|
| 281 |
+
"alias": " - college_mathematics",
|
| 282 |
+
"acc,none": 0.31,
|
| 283 |
+
"acc_stderr,none": 0.04648231987117316
|
| 284 |
+
},
|
| 285 |
+
"harness|mmlu_college_physics|0": {
|
| 286 |
+
"alias": " - college_physics",
|
| 287 |
+
"acc,none": 0.3137254901960784,
|
| 288 |
+
"acc_stderr,none": 0.04617034827006718
|
| 289 |
+
},
|
| 290 |
+
"harness|mmlu_computer_security|0": {
|
| 291 |
+
"alias": " - computer_security",
|
| 292 |
+
"acc,none": 0.65,
|
| 293 |
+
"acc_stderr,none": 0.0479372485441102
|
| 294 |
+
},
|
| 295 |
+
"harness|mmlu_conceptual_physics|0": {
|
| 296 |
+
"alias": " - conceptual_physics",
|
| 297 |
+
"acc,none": 0.4553191489361702,
|
| 298 |
+
"acc_stderr,none": 0.03255525359340355
|
| 299 |
+
},
|
| 300 |
+
"harness|mmlu_electrical_engineering|0": {
|
| 301 |
+
"alias": " - electrical_engineering",
|
| 302 |
+
"acc,none": 0.5655172413793104,
|
| 303 |
+
"acc_stderr,none": 0.04130740879555498
|
| 304 |
+
},
|
| 305 |
+
"harness|mmlu_elementary_mathematics|0": {
|
| 306 |
+
"alias": " - elementary_mathematics",
|
| 307 |
+
"acc,none": 0.4444444444444444,
|
| 308 |
+
"acc_stderr,none": 0.025591857761382182
|
| 309 |
+
},
|
| 310 |
+
"harness|mmlu_high_school_biology|0": {
|
| 311 |
+
"alias": " - high_school_biology",
|
| 312 |
+
"acc,none": 0.6096774193548387,
|
| 313 |
+
"acc_stderr,none": 0.027751256636969576
|
| 314 |
+
},
|
| 315 |
+
"harness|mmlu_high_school_chemistry|0": {
|
| 316 |
+
"alias": " - high_school_chemistry",
|
| 317 |
+
"acc,none": 0.5073891625615764,
|
| 318 |
+
"acc_stderr,none": 0.035176035403610105
|
| 319 |
+
},
|
| 320 |
+
"harness|mmlu_high_school_computer_science|0": {
|
| 321 |
+
"alias": " - high_school_computer_science",
|
| 322 |
+
"acc,none": 0.57,
|
| 323 |
+
"acc_stderr,none": 0.049756985195624284
|
| 324 |
+
},
|
| 325 |
+
"harness|mmlu_high_school_mathematics|0": {
|
| 326 |
+
"alias": " - high_school_mathematics",
|
| 327 |
+
"acc,none": 0.37777777777777777,
|
| 328 |
+
"acc_stderr,none": 0.029560707392465718
|
| 329 |
+
},
|
| 330 |
+
"harness|mmlu_high_school_physics|0": {
|
| 331 |
+
"alias": " - high_school_physics",
|
| 332 |
+
"acc,none": 0.33112582781456956,
|
| 333 |
+
"acc_stderr,none": 0.038425817186598696
|
| 334 |
+
},
|
| 335 |
+
"harness|mmlu_high_school_statistics|0": {
|
| 336 |
+
"alias": " - high_school_statistics",
|
| 337 |
+
"acc,none": 0.4212962962962963,
|
| 338 |
+
"acc_stderr,none": 0.03367462138896078
|
| 339 |
+
},
|
| 340 |
+
"harness|mmlu_machine_learning|0": {
|
| 341 |
+
"alias": " - machine_learning",
|
| 342 |
+
"acc,none": 0.4107142857142857,
|
| 343 |
+
"acc_stderr,none": 0.04669510663875191
|
| 344 |
+
},
|
| 345 |
+
"harness|arc:challenge|0": {
|
| 346 |
+
"acc,none": 0.36945392491467577,
|
| 347 |
+
"acc_stderr,none": 0.01410457836649189,
|
| 348 |
+
"acc_norm,none": 0.39761092150170646,
|
| 349 |
+
"acc_norm_stderr,none": 0.014301752223279535,
|
| 350 |
+
"alias": "arc_challenge"
|
| 351 |
+
},
|
| 352 |
+
"harness|winogrande|0": {
|
| 353 |
+
"acc,none": 0.6606156274664562,
|
| 354 |
+
"acc_stderr,none": 0.01330771492894175,
|
| 355 |
+
"alias": "winogrande"
|
| 356 |
+
},
|
| 357 |
+
"harness|boolq|0": {
|
| 358 |
+
"acc,none": 0.7629969418960245,
|
| 359 |
+
"acc_stderr,none": 0.007437567381277126,
|
| 360 |
+
"alias": "boolq"
|
| 361 |
+
},
|
| 362 |
+
"harness|arc:easy|0": {
|
| 363 |
+
"acc,none": 0.6957070707070707,
|
| 364 |
+
"acc_stderr,none": 0.009441202922359185,
|
| 365 |
+
"acc_norm,none": 0.6620370370370371,
|
| 366 |
+
"acc_norm_stderr,none": 0.00970608053863286,
|
| 367 |
+
"alias": "arc_easy"
|
| 368 |
+
},
|
| 369 |
+
"harness|hellaswag|0": {
|
| 370 |
+
"acc,none": 0.4856602270464051,
|
| 371 |
+
"acc_stderr,none": 0.004987728900897589,
|
| 372 |
+
"acc_norm,none": 0.6493726349332802,
|
| 373 |
+
"acc_norm_stderr,none": 0.004761912511707522,
|
| 374 |
+
"alias": "hellaswag"
|
| 375 |
+
},
|
| 376 |
+
"harness|truthfulqa:mc1|0": {
|
| 377 |
+
"acc,none": 0.27539779681762544,
|
| 378 |
+
"acc_stderr,none": 0.01563813566777552,
|
| 379 |
+
"alias": "truthfulqa_mc1"
|
| 380 |
+
},
|
| 381 |
+
"harness|truthfulqa:mc2|0": {
|
| 382 |
+
"acc,none": 0.4316038703653941,
|
| 383 |
+
"acc_stderr,none": 0.014556182158523336,
|
| 384 |
+
"alias": "truthfulqa_mc2"
|
| 385 |
+
},
|
| 386 |
+
"harness|piqa|0": {
|
| 387 |
+
"acc,none": 0.7562568008705114,
|
| 388 |
+
"acc_stderr,none": 0.010017199471500612,
|
| 389 |
+
"acc_norm,none": 0.7535364526659413,
|
| 390 |
+
"acc_norm_stderr,none": 0.01005481078967182,
|
| 391 |
+
"alias": "piqa"
|
| 392 |
+
}
|
| 393 |
+
},
|
| 394 |
+
"task_info": {
|
| 395 |
+
"model": "Intel/Qwen2-1.5B-Instuct-int4-inc",
|
| 396 |
+
"local": true,
|
| 397 |
+
"revision": "main",
|
| 398 |
+
"private": false,
|
| 399 |
+
"params": 1.5,
|
| 400 |
+
"architectures": "QwenForCausalLM",
|
| 401 |
+
"quant_type": "AutoRound",
|
| 402 |
+
"precision": "4bit",
|
| 403 |
+
"model_params": 1.5,
|
| 404 |
+
"model_size": 1.6,
|
| 405 |
+
"weight_dtype": "int4",
|
| 406 |
+
"compute_dtype": "float16",
|
| 407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
| 408 |
+
"hardware": "gpu",
|
| 409 |
+
"status": "Finished",
|
| 410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
| 411 |
+
"model_type": "quantization",
|
| 412 |
+
"job_id": -1,
|
| 413 |
+
"job_start_time": null,
|
| 414 |
+
"scripts": "ITREX"
|
| 415 |
+
},
|
| 416 |
+
"quantization_config": {
|
| 417 |
+
"autoround_version": "0.2.1.dev",
|
| 418 |
+
"bits": 4,
|
| 419 |
+
"damp_percent": 0.01,
|
| 420 |
+
"desc_act": false,
|
| 421 |
+
"enable_minmax_tuning": true,
|
| 422 |
+
"enable_quanted_input": true,
|
| 423 |
+
"group_size": 32,
|
| 424 |
+
"is_marlin_format": false,
|
| 425 |
+
"iters": 1000,
|
| 426 |
+
"lr": 0.001,
|
| 427 |
+
"minmax_lr": 0.001,
|
| 428 |
+
"model_file_base_name": "model",
|
| 429 |
+
"model_name_or_path": null,
|
| 430 |
+
"quant_method": "gptq",
|
| 431 |
+
"scale_dtype": "float16",
|
| 432 |
+
"static_groups": false,
|
| 433 |
+
"sym": true,
|
| 434 |
+
"true_sequential": false
|
| 435 |
+
},
|
| 436 |
+
"versions": {
|
| 437 |
+
"harness|lambada:openai|0": 1.0,
|
| 438 |
+
"harness|openbookqa|0": 1.0,
|
| 439 |
+
"harness|mmlu|0": null,
|
| 440 |
+
"harness|mmlu_humanities|0": null,
|
| 441 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
| 442 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
| 443 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
| 444 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
| 445 |
+
"harness|mmlu_international_law|0": 0.0,
|
| 446 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
| 447 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
| 448 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
| 449 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
| 450 |
+
"harness|mmlu_philosophy|0": 0.0,
|
| 451 |
+
"harness|mmlu_prehistory|0": 0.0,
|
| 452 |
+
"harness|mmlu_professional_law|0": 0.0,
|
| 453 |
+
"harness|mmlu_world_religions|0": 0.0,
|
| 454 |
+
"harness|mmlu_other|0": null,
|
| 455 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
| 456 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
| 457 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
| 458 |
+
"harness|mmlu_global_facts|0": 0.0,
|
| 459 |
+
"harness|mmlu_human_aging|0": 0.0,
|
| 460 |
+
"harness|mmlu_management|0": 0.0,
|
| 461 |
+
"harness|mmlu_marketing|0": 0.0,
|
| 462 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
| 463 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
| 464 |
+
"harness|mmlu_nutrition|0": 0.0,
|
| 465 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
| 466 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
| 467 |
+
"harness|mmlu_virology|0": 0.0,
|
| 468 |
+
"harness|mmlu_social_sciences|0": null,
|
| 469 |
+
"harness|mmlu_econometrics|0": 0.0,
|
| 470 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
| 471 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
| 472 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
| 473 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
| 474 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
| 475 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
| 476 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
| 477 |
+
"harness|mmlu_public_relations|0": 0.0,
|
| 478 |
+
"harness|mmlu_security_studies|0": 0.0,
|
| 479 |
+
"harness|mmlu_sociology|0": 0.0,
|
| 480 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
| 481 |
+
"harness|mmlu_stem|0": null,
|
| 482 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
| 483 |
+
"harness|mmlu_anatomy|0": 0.0,
|
| 484 |
+
"harness|mmlu_astronomy|0": 0.0,
|
| 485 |
+
"harness|mmlu_college_biology|0": 0.0,
|
| 486 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
| 487 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
| 488 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
| 489 |
+
"harness|mmlu_college_physics|0": 0.0,
|
| 490 |
+
"harness|mmlu_computer_security|0": 0.0,
|
| 491 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
| 492 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
| 493 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
| 494 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
| 495 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
| 496 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
| 497 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
| 498 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
| 499 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
| 500 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
| 501 |
+
"harness|arc:challenge|0": 1.0,
|
| 502 |
+
"harness|winogrande|0": 1.0,
|
| 503 |
+
"harness|boolq|0": 2.0,
|
| 504 |
+
"harness|arc:easy|0": 1.0,
|
| 505 |
+
"harness|hellaswag|0": 1.0,
|
| 506 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
| 507 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
| 508 |
+
"harness|piqa|0": 1.0
|
| 509 |
+
},
|
| 510 |
+
"n-shot": {
|
| 511 |
+
"arc_challenge": 0,
|
| 512 |
+
"arc_easy": 0,
|
| 513 |
+
"boolq": 0,
|
| 514 |
+
"hellaswag": 0,
|
| 515 |
+
"lambada_openai": 0,
|
| 516 |
+
"mmlu": 0,
|
| 517 |
+
"mmlu_abstract_algebra": 0,
|
| 518 |
+
"mmlu_anatomy": 0,
|
| 519 |
+
"mmlu_astronomy": 0,
|
| 520 |
+
"mmlu_business_ethics": 0,
|
| 521 |
+
"mmlu_clinical_knowledge": 0,
|
| 522 |
+
"mmlu_college_biology": 0,
|
| 523 |
+
"mmlu_college_chemistry": 0,
|
| 524 |
+
"mmlu_college_computer_science": 0,
|
| 525 |
+
"mmlu_college_mathematics": 0,
|
| 526 |
+
"mmlu_college_medicine": 0,
|
| 527 |
+
"mmlu_college_physics": 0,
|
| 528 |
+
"mmlu_computer_security": 0,
|
| 529 |
+
"mmlu_conceptual_physics": 0,
|
| 530 |
+
"mmlu_econometrics": 0,
|
| 531 |
+
"mmlu_electrical_engineering": 0,
|
| 532 |
+
"mmlu_elementary_mathematics": 0,
|
| 533 |
+
"mmlu_formal_logic": 0,
|
| 534 |
+
"mmlu_global_facts": 0,
|
| 535 |
+
"mmlu_high_school_biology": 0,
|
| 536 |
+
"mmlu_high_school_chemistry": 0,
|
| 537 |
+
"mmlu_high_school_computer_science": 0,
|
| 538 |
+
"mmlu_high_school_european_history": 0,
|
| 539 |
+
"mmlu_high_school_geography": 0,
|
| 540 |
+
"mmlu_high_school_government_and_politics": 0,
|
| 541 |
+
"mmlu_high_school_macroeconomics": 0,
|
| 542 |
+
"mmlu_high_school_mathematics": 0,
|
| 543 |
+
"mmlu_high_school_microeconomics": 0,
|
| 544 |
+
"mmlu_high_school_physics": 0,
|
| 545 |
+
"mmlu_high_school_psychology": 0,
|
| 546 |
+
"mmlu_high_school_statistics": 0,
|
| 547 |
+
"mmlu_high_school_us_history": 0,
|
| 548 |
+
"mmlu_high_school_world_history": 0,
|
| 549 |
+
"mmlu_human_aging": 0,
|
| 550 |
+
"mmlu_human_sexuality": 0,
|
| 551 |
+
"mmlu_humanities": 0,
|
| 552 |
+
"mmlu_international_law": 0,
|
| 553 |
+
"mmlu_jurisprudence": 0,
|
| 554 |
+
"mmlu_logical_fallacies": 0,
|
| 555 |
+
"mmlu_machine_learning": 0,
|
| 556 |
+
"mmlu_management": 0,
|
| 557 |
+
"mmlu_marketing": 0,
|
| 558 |
+
"mmlu_medical_genetics": 0,
|
| 559 |
+
"mmlu_miscellaneous": 0,
|
| 560 |
+
"mmlu_moral_disputes": 0,
|
| 561 |
+
"mmlu_moral_scenarios": 0,
|
| 562 |
+
"mmlu_nutrition": 0,
|
| 563 |
+
"mmlu_other": 0,
|
| 564 |
+
"mmlu_philosophy": 0,
|
| 565 |
+
"mmlu_prehistory": 0,
|
| 566 |
+
"mmlu_professional_accounting": 0,
|
| 567 |
+
"mmlu_professional_law": 0,
|
| 568 |
+
"mmlu_professional_medicine": 0,
|
| 569 |
+
"mmlu_professional_psychology": 0,
|
| 570 |
+
"mmlu_public_relations": 0,
|
| 571 |
+
"mmlu_security_studies": 0,
|
| 572 |
+
"mmlu_social_sciences": 0,
|
| 573 |
+
"mmlu_sociology": 0,
|
| 574 |
+
"mmlu_stem": 0,
|
| 575 |
+
"mmlu_us_foreign_policy": 0,
|
| 576 |
+
"mmlu_virology": 0,
|
| 577 |
+
"mmlu_world_religions": 0,
|
| 578 |
+
"openbookqa": 0,
|
| 579 |
+
"piqa": 0,
|
| 580 |
+
"truthfulqa_mc1": 0,
|
| 581 |
+
"truthfulqa_mc2": 0,
|
| 582 |
+
"winogrande": 0
|
| 583 |
+
},
|
| 584 |
+
"date": 1717635926.568774,
|
| 585 |
+
"config": {
|
| 586 |
+
"model": "hf",
|
| 587 |
+
"model_args": "pretrained=Intel/Qwen2-1.5B-Instuct-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
| 588 |
+
"batch_size": 4,
|
| 589 |
+
"batch_sizes": [],
|
| 590 |
+
"device": "cuda",
|
| 591 |
+
"use_cache": null,
|
| 592 |
+
"limit": null,
|
| 593 |
+
"bootstrap_iters": 100000,
|
| 594 |
+
"gen_kwargs": null
|
| 595 |
+
}
|
| 596 |
+
}
|
Intel/results_2024-06-06-10-10-10.json
ADDED
|
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"config_general": {
|
| 3 |
+
"lighteval_sha": "1.4",
|
| 4 |
+
"num_few_shot_default": null,
|
| 5 |
+
"num_fewshot_seeds": null,
|
| 6 |
+
"override_batch_size": null,
|
| 7 |
+
"max_samples": null,
|
| 8 |
+
"job_id": -1,
|
| 9 |
+
"start_time": null,
|
| 10 |
+
"end_time": "2024-06-06-10-10-10",
|
| 11 |
+
"total_evaluation_time_secondes": "",
|
| 12 |
+
"model_name": "Intel/Qwen2-0.5B-Instuct-int4-inc",
|
| 13 |
+
"model_sha": "",
|
| 14 |
+
"model_dtype": "4bit",
|
| 15 |
+
"model_size": 0.719,
|
| 16 |
+
"model_params": 0.5,
|
| 17 |
+
"quant_type": "AutoRound",
|
| 18 |
+
"precision": "4bit"
|
| 19 |
+
},
|
| 20 |
+
"results": {
|
| 21 |
+
"harness|hellaswag|0": {
|
| 22 |
+
"acc,none": 0.3846843258315077,
|
| 23 |
+
"acc_stderr,none": 0.004855262903270792,
|
| 24 |
+
"acc_norm,none": 0.4843656642103167,
|
| 25 |
+
"acc_norm_stderr,none": 0.00498734148585666,
|
| 26 |
+
"alias": "hellaswag"
|
| 27 |
+
},
|
| 28 |
+
"harness|arc:easy|0": {
|
| 29 |
+
"acc,none": 0.5862794612794613,
|
| 30 |
+
"acc_stderr,none": 0.01010587853023814,
|
| 31 |
+
"acc_norm,none": 0.5395622895622896,
|
| 32 |
+
"acc_norm_stderr,none": 0.010227616386289008,
|
| 33 |
+
"alias": "arc_easy"
|
| 34 |
+
},
|
| 35 |
+
"harness|truthfulqa:mc1|0": {
|
| 36 |
+
"acc,none": 0.24724602203182375,
|
| 37 |
+
"acc_stderr,none": 0.01510240479735965,
|
| 38 |
+
"alias": "truthfulqa_mc1"
|
| 39 |
+
},
|
| 40 |
+
"harness|mmlu|0": {
|
| 41 |
+
"acc,none": 0.4223045150263495,
|
| 42 |
+
"acc_stderr,none": 0.004088092420326436,
|
| 43 |
+
"alias": "mmlu"
|
| 44 |
+
},
|
| 45 |
+
"harness|mmlu_humanities|0": {
|
| 46 |
+
"alias": " - humanities",
|
| 47 |
+
"acc,none": 0.3980871413390011,
|
| 48 |
+
"acc_stderr,none": 0.006959278348932478
|
| 49 |
+
},
|
| 50 |
+
"harness|mmlu_formal_logic|0": {
|
| 51 |
+
"alias": " - formal_logic",
|
| 52 |
+
"acc,none": 0.2619047619047619,
|
| 53 |
+
"acc_stderr,none": 0.039325376803928704
|
| 54 |
+
},
|
| 55 |
+
"harness|mmlu_high_school_european_history|0": {
|
| 56 |
+
"alias": " - high_school_european_history",
|
| 57 |
+
"acc,none": 0.5393939393939394,
|
| 58 |
+
"acc_stderr,none": 0.03892207016552013
|
| 59 |
+
},
|
| 60 |
+
"harness|mmlu_high_school_us_history|0": {
|
| 61 |
+
"alias": " - high_school_us_history",
|
| 62 |
+
"acc,none": 0.5392156862745098,
|
| 63 |
+
"acc_stderr,none": 0.03498501649369527
|
| 64 |
+
},
|
| 65 |
+
"harness|mmlu_high_school_world_history|0": {
|
| 66 |
+
"alias": " - high_school_world_history",
|
| 67 |
+
"acc,none": 0.5611814345991561,
|
| 68 |
+
"acc_stderr,none": 0.032302649315470375
|
| 69 |
+
},
|
| 70 |
+
"harness|mmlu_international_law|0": {
|
| 71 |
+
"alias": " - international_law",
|
| 72 |
+
"acc,none": 0.6528925619834711,
|
| 73 |
+
"acc_stderr,none": 0.04345724570292534
|
| 74 |
+
},
|
| 75 |
+
"harness|mmlu_jurisprudence|0": {
|
| 76 |
+
"alias": " - jurisprudence",
|
| 77 |
+
"acc,none": 0.5370370370370371,
|
| 78 |
+
"acc_stderr,none": 0.04820403072760628
|
| 79 |
+
},
|
| 80 |
+
"harness|mmlu_logical_fallacies|0": {
|
| 81 |
+
"alias": " - logical_fallacies",
|
| 82 |
+
"acc,none": 0.44785276073619634,
|
| 83 |
+
"acc_stderr,none": 0.03906947479456601
|
| 84 |
+
},
|
| 85 |
+
"harness|mmlu_moral_disputes|0": {
|
| 86 |
+
"alias": " - moral_disputes",
|
| 87 |
+
"acc,none": 0.5028901734104047,
|
| 88 |
+
"acc_stderr,none": 0.026918645383239004
|
| 89 |
+
},
|
| 90 |
+
"harness|mmlu_moral_scenarios|0": {
|
| 91 |
+
"alias": " - moral_scenarios",
|
| 92 |
+
"acc,none": 0.2424581005586592,
|
| 93 |
+
"acc_stderr,none": 0.014333522059217887
|
| 94 |
+
},
|
| 95 |
+
"harness|mmlu_philosophy|0": {
|
| 96 |
+
"alias": " - philosophy",
|
| 97 |
+
"acc,none": 0.4694533762057878,
|
| 98 |
+
"acc_stderr,none": 0.028345045864840684
|
| 99 |
+
},
|
| 100 |
+
"harness|mmlu_prehistory|0": {
|
| 101 |
+
"alias": " - prehistory",
|
| 102 |
+
"acc,none": 0.42592592592592593,
|
| 103 |
+
"acc_stderr,none": 0.027513747284379428
|
| 104 |
+
},
|
| 105 |
+
"harness|mmlu_professional_law|0": {
|
| 106 |
+
"alias": " - professional_law",
|
| 107 |
+
"acc,none": 0.3520208604954368,
|
| 108 |
+
"acc_stderr,none": 0.012198140605353609
|
| 109 |
+
},
|
| 110 |
+
"harness|mmlu_world_religions|0": {
|
| 111 |
+
"alias": " - world_religions",
|
| 112 |
+
"acc,none": 0.4853801169590643,
|
| 113 |
+
"acc_stderr,none": 0.038331852752130205
|
| 114 |
+
},
|
| 115 |
+
"harness|mmlu_other|0": {
|
| 116 |
+
"alias": " - other",
|
| 117 |
+
"acc,none": 0.467653685226907,
|
| 118 |
+
"acc_stderr,none": 0.008853315632851513
|
| 119 |
+
},
|
| 120 |
+
"harness|mmlu_business_ethics|0": {
|
| 121 |
+
"alias": " - business_ethics",
|
| 122 |
+
"acc,none": 0.54,
|
| 123 |
+
"acc_stderr,none": 0.05009082659620333
|
| 124 |
+
},
|
| 125 |
+
"harness|mmlu_clinical_knowledge|0": {
|
| 126 |
+
"alias": " - clinical_knowledge",
|
| 127 |
+
"acc,none": 0.4716981132075472,
|
| 128 |
+
"acc_stderr,none": 0.030723535249006107
|
| 129 |
+
},
|
| 130 |
+
"harness|mmlu_college_medicine|0": {
|
| 131 |
+
"alias": " - college_medicine",
|
| 132 |
+
"acc,none": 0.43352601156069365,
|
| 133 |
+
"acc_stderr,none": 0.03778621079092056
|
| 134 |
+
},
|
| 135 |
+
"harness|mmlu_global_facts|0": {
|
| 136 |
+
"alias": " - global_facts",
|
| 137 |
+
"acc,none": 0.36,
|
| 138 |
+
"acc_stderr,none": 0.04824181513244218
|
| 139 |
+
},
|
| 140 |
+
"harness|mmlu_human_aging|0": {
|
| 141 |
+
"alias": " - human_aging",
|
| 142 |
+
"acc,none": 0.4125560538116592,
|
| 143 |
+
"acc_stderr,none": 0.03304062175449296
|
| 144 |
+
},
|
| 145 |
+
"harness|mmlu_management|0": {
|
| 146 |
+
"alias": " - management",
|
| 147 |
+
"acc,none": 0.5825242718446602,
|
| 148 |
+
"acc_stderr,none": 0.048828405482122375
|
| 149 |
+
},
|
| 150 |
+
"harness|mmlu_marketing|0": {
|
| 151 |
+
"alias": " - marketing",
|
| 152 |
+
"acc,none": 0.6367521367521367,
|
| 153 |
+
"acc_stderr,none": 0.03150712523091264
|
| 154 |
+
},
|
| 155 |
+
"harness|mmlu_medical_genetics|0": {
|
| 156 |
+
"alias": " - medical_genetics",
|
| 157 |
+
"acc,none": 0.44,
|
| 158 |
+
"acc_stderr,none": 0.04988876515698589
|
| 159 |
+
},
|
| 160 |
+
"harness|mmlu_miscellaneous|0": {
|
| 161 |
+
"alias": " - miscellaneous",
|
| 162 |
+
"acc,none": 0.5019157088122606,
|
| 163 |
+
"acc_stderr,none": 0.017879832259026673
|
| 164 |
+
},
|
| 165 |
+
"harness|mmlu_nutrition|0": {
|
| 166 |
+
"alias": " - nutrition",
|
| 167 |
+
"acc,none": 0.5261437908496732,
|
| 168 |
+
"acc_stderr,none": 0.028590752958852394
|
| 169 |
+
},
|
| 170 |
+
"harness|mmlu_professional_accounting|0": {
|
| 171 |
+
"alias": " - professional_accounting",
|
| 172 |
+
"acc,none": 0.3723404255319149,
|
| 173 |
+
"acc_stderr,none": 0.02883892147125146
|
| 174 |
+
},
|
| 175 |
+
"harness|mmlu_professional_medicine|0": {
|
| 176 |
+
"alias": " - professional_medicine",
|
| 177 |
+
"acc,none": 0.3639705882352941,
|
| 178 |
+
"acc_stderr,none": 0.02922719246003203
|
| 179 |
+
},
|
| 180 |
+
"harness|mmlu_virology|0": {
|
| 181 |
+
"alias": " - virology",
|
| 182 |
+
"acc,none": 0.3614457831325301,
|
| 183 |
+
"acc_stderr,none": 0.037400593820293204
|
| 184 |
+
},
|
| 185 |
+
"harness|mmlu_social_sciences|0": {
|
| 186 |
+
"alias": " - social_sciences",
|
| 187 |
+
"acc,none": 0.48033799155021123,
|
| 188 |
+
"acc_stderr,none": 0.008913902203243567
|
| 189 |
+
},
|
| 190 |
+
"harness|mmlu_econometrics|0": {
|
| 191 |
+
"alias": " - econometrics",
|
| 192 |
+
"acc,none": 0.34210526315789475,
|
| 193 |
+
"acc_stderr,none": 0.04462917535336936
|
| 194 |
+
},
|
| 195 |
+
"harness|mmlu_high_school_geography|0": {
|
| 196 |
+
"alias": " - high_school_geography",
|
| 197 |
+
"acc,none": 0.5,
|
| 198 |
+
"acc_stderr,none": 0.035623524993954825
|
| 199 |
+
},
|
| 200 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
| 201 |
+
"alias": " - high_school_government_and_politics",
|
| 202 |
+
"acc,none": 0.5440414507772021,
|
| 203 |
+
"acc_stderr,none": 0.03594413711272435
|
| 204 |
+
},
|
| 205 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
| 206 |
+
"alias": " - high_school_macroeconomics",
|
| 207 |
+
"acc,none": 0.43333333333333335,
|
| 208 |
+
"acc_stderr,none": 0.025124653525885124
|
| 209 |
+
},
|
| 210 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
| 211 |
+
"alias": " - high_school_microeconomics",
|
| 212 |
+
"acc,none": 0.42436974789915966,
|
| 213 |
+
"acc_stderr,none": 0.032104790510157764
|
| 214 |
+
},
|
| 215 |
+
"harness|mmlu_high_school_psychology|0": {
|
| 216 |
+
"alias": " - high_school_psychology",
|
| 217 |
+
"acc,none": 0.5467889908256881,
|
| 218 |
+
"acc_stderr,none": 0.021343255165546034
|
| 219 |
+
},
|
| 220 |
+
"harness|mmlu_human_sexuality|0": {
|
| 221 |
+
"alias": " - human_sexuality",
|
| 222 |
+
"acc,none": 0.4732824427480916,
|
| 223 |
+
"acc_stderr,none": 0.04379024936553894
|
| 224 |
+
},
|
| 225 |
+
"harness|mmlu_professional_psychology|0": {
|
| 226 |
+
"alias": " - professional_psychology",
|
| 227 |
+
"acc,none": 0.39052287581699346,
|
| 228 |
+
"acc_stderr,none": 0.019737008998094604
|
| 229 |
+
},
|
| 230 |
+
"harness|mmlu_public_relations|0": {
|
| 231 |
+
"alias": " - public_relations",
|
| 232 |
+
"acc,none": 0.5363636363636364,
|
| 233 |
+
"acc_stderr,none": 0.04776449162396197
|
| 234 |
+
},
|
| 235 |
+
"harness|mmlu_security_studies|0": {
|
| 236 |
+
"alias": " - security_studies",
|
| 237 |
+
"acc,none": 0.49795918367346936,
|
| 238 |
+
"acc_stderr,none": 0.0320089533497105
|
| 239 |
+
},
|
| 240 |
+
"harness|mmlu_sociology|0": {
|
| 241 |
+
"alias": " - sociology",
|
| 242 |
+
"acc,none": 0.5771144278606966,
|
| 243 |
+
"acc_stderr,none": 0.034932317774212816
|
| 244 |
+
},
|
| 245 |
+
"harness|mmlu_us_foreign_policy|0": {
|
| 246 |
+
"alias": " - us_foreign_policy",
|
| 247 |
+
"acc,none": 0.69,
|
| 248 |
+
"acc_stderr,none": 0.04648231987117316
|
| 249 |
+
},
|
| 250 |
+
"harness|mmlu_stem|0": {
|
| 251 |
+
"alias": " - stem",
|
| 252 |
+
"acc,none": 0.35712020298128766,
|
| 253 |
+
"acc_stderr,none": 0.008476247679652571
|
| 254 |
+
},
|
| 255 |
+
"harness|mmlu_abstract_algebra|0": {
|
| 256 |
+
"alias": " - abstract_algebra",
|
| 257 |
+
"acc,none": 0.29,
|
| 258 |
+
"acc_stderr,none": 0.04560480215720683
|
| 259 |
+
},
|
| 260 |
+
"harness|mmlu_anatomy|0": {
|
| 261 |
+
"alias": " - anatomy",
|
| 262 |
+
"acc,none": 0.4222222222222222,
|
| 263 |
+
"acc_stderr,none": 0.04266763404099582
|
| 264 |
+
},
|
| 265 |
+
"harness|mmlu_astronomy|0": {
|
| 266 |
+
"alias": " - astronomy",
|
| 267 |
+
"acc,none": 0.39473684210526316,
|
| 268 |
+
"acc_stderr,none": 0.039777499346220734
|
| 269 |
+
},
|
| 270 |
+
"harness|mmlu_college_biology|0": {
|
| 271 |
+
"alias": " - college_biology",
|
| 272 |
+
"acc,none": 0.3819444444444444,
|
| 273 |
+
"acc_stderr,none": 0.040629907841466674
|
| 274 |
+
},
|
| 275 |
+
"harness|mmlu_college_chemistry|0": {
|
| 276 |
+
"alias": " - college_chemistry",
|
| 277 |
+
"acc,none": 0.27,
|
| 278 |
+
"acc_stderr,none": 0.0446196043338474
|
| 279 |
+
},
|
| 280 |
+
"harness|mmlu_college_computer_science|0": {
|
| 281 |
+
"alias": " - college_computer_science",
|
| 282 |
+
"acc,none": 0.32,
|
| 283 |
+
"acc_stderr,none": 0.046882617226215034
|
| 284 |
+
},
|
| 285 |
+
"harness|mmlu_college_mathematics|0": {
|
| 286 |
+
"alias": " - college_mathematics",
|
| 287 |
+
"acc,none": 0.33,
|
| 288 |
+
"acc_stderr,none": 0.04725815626252604
|
| 289 |
+
},
|
| 290 |
+
"harness|mmlu_college_physics|0": {
|
| 291 |
+
"alias": " - college_physics",
|
| 292 |
+
"acc,none": 0.29411764705882354,
|
| 293 |
+
"acc_stderr,none": 0.045338381959297736
|
| 294 |
+
},
|
| 295 |
+
"harness|mmlu_computer_security|0": {
|
| 296 |
+
"alias": " - computer_security",
|
| 297 |
+
"acc,none": 0.41,
|
| 298 |
+
"acc_stderr,none": 0.049431107042371025
|
| 299 |
+
},
|
| 300 |
+
"harness|mmlu_conceptual_physics|0": {
|
| 301 |
+
"alias": " - conceptual_physics",
|
| 302 |
+
"acc,none": 0.40425531914893614,
|
| 303 |
+
"acc_stderr,none": 0.03208115750788684
|
| 304 |
+
},
|
| 305 |
+
"harness|mmlu_electrical_engineering|0": {
|
| 306 |
+
"alias": " - electrical_engineering",
|
| 307 |
+
"acc,none": 0.43448275862068964,
|
| 308 |
+
"acc_stderr,none": 0.04130740879555497
|
| 309 |
+
},
|
| 310 |
+
"harness|mmlu_elementary_mathematics|0": {
|
| 311 |
+
"alias": " - elementary_mathematics",
|
| 312 |
+
"acc,none": 0.3253968253968254,
|
| 313 |
+
"acc_stderr,none": 0.024130158299762602
|
| 314 |
+
},
|
| 315 |
+
"harness|mmlu_high_school_biology|0": {
|
| 316 |
+
"alias": " - high_school_biology",
|
| 317 |
+
"acc,none": 0.4612903225806452,
|
| 318 |
+
"acc_stderr,none": 0.028358634859836935
|
| 319 |
+
},
|
| 320 |
+
"harness|mmlu_high_school_chemistry|0": {
|
| 321 |
+
"alias": " - high_school_chemistry",
|
| 322 |
+
"acc,none": 0.3645320197044335,
|
| 323 |
+
"acc_stderr,none": 0.0338640574606209
|
| 324 |
+
},
|
| 325 |
+
"harness|mmlu_high_school_computer_science|0": {
|
| 326 |
+
"alias": " - high_school_computer_science",
|
| 327 |
+
"acc,none": 0.49,
|
| 328 |
+
"acc_stderr,none": 0.05024183937956911
|
| 329 |
+
},
|
| 330 |
+
"harness|mmlu_high_school_mathematics|0": {
|
| 331 |
+
"alias": " - high_school_mathematics",
|
| 332 |
+
"acc,none": 0.2740740740740741,
|
| 333 |
+
"acc_stderr,none": 0.027195934804085626
|
| 334 |
+
},
|
| 335 |
+
"harness|mmlu_high_school_physics|0": {
|
| 336 |
+
"alias": " - high_school_physics",
|
| 337 |
+
"acc,none": 0.2582781456953642,
|
| 338 |
+
"acc_stderr,none": 0.035737053147634576
|
| 339 |
+
},
|
| 340 |
+
"harness|mmlu_high_school_statistics|0": {
|
| 341 |
+
"alias": " - high_school_statistics",
|
| 342 |
+
"acc,none": 0.3101851851851852,
|
| 343 |
+
"acc_stderr,none": 0.03154696285656629
|
| 344 |
+
},
|
| 345 |
+
"harness|mmlu_machine_learning|0": {
|
| 346 |
+
"alias": " - machine_learning",
|
| 347 |
+
"acc,none": 0.3125,
|
| 348 |
+
"acc_stderr,none": 0.043994650575715215
|
| 349 |
+
},
|
| 350 |
+
"harness|piqa|0": {
|
| 351 |
+
"acc,none": 0.6855277475516867,
|
| 352 |
+
"acc_stderr,none": 0.010833009065106569,
|
| 353 |
+
"acc_norm,none": 0.6833514689880305,
|
| 354 |
+
"acc_norm_stderr,none": 0.010853160531978483,
|
| 355 |
+
"alias": "piqa"
|
| 356 |
+
},
|
| 357 |
+
"harness|boolq|0": {
|
| 358 |
+
"acc,none": 0.6281345565749236,
|
| 359 |
+
"acc_stderr,none": 0.00845301800735403,
|
| 360 |
+
"alias": "boolq"
|
| 361 |
+
},
|
| 362 |
+
"harness|openbookqa|0": {
|
| 363 |
+
"acc,none": 0.25,
|
| 364 |
+
"acc_stderr,none": 0.019384310743640384,
|
| 365 |
+
"acc_norm,none": 0.334,
|
| 366 |
+
"acc_norm_stderr,none": 0.021113492347743734,
|
| 367 |
+
"alias": "openbookqa"
|
| 368 |
+
},
|
| 369 |
+
"harness|arc:challenge|0": {
|
| 370 |
+
"acc,none": 0.2619453924914676,
|
| 371 |
+
"acc_stderr,none": 0.012849054826858108,
|
| 372 |
+
"acc_norm,none": 0.2883959044368601,
|
| 373 |
+
"acc_norm_stderr,none": 0.01323839442242817,
|
| 374 |
+
"alias": "arc_challenge"
|
| 375 |
+
},
|
| 376 |
+
"harness|truthfulqa:mc2|0": {
|
| 377 |
+
"acc,none": 0.39131629337724455,
|
| 378 |
+
"acc_stderr,none": 0.013943043500103466,
|
| 379 |
+
"alias": "truthfulqa_mc2"
|
| 380 |
+
},
|
| 381 |
+
"harness|winogrande|0": {
|
| 382 |
+
"acc,none": 0.5651144435674822,
|
| 383 |
+
"acc_stderr,none": 0.013932814110418027,
|
| 384 |
+
"alias": "winogrande"
|
| 385 |
+
},
|
| 386 |
+
"harness|lambada:openai|0": {
|
| 387 |
+
"perplexity,none": 12.554918626547046,
|
| 388 |
+
"perplexity_stderr,none": 0.4194570823476025,
|
| 389 |
+
"acc,none": 0.4867067727537357,
|
| 390 |
+
"acc_stderr,none": 0.006963515307693607,
|
| 391 |
+
"alias": "lambada_openai"
|
| 392 |
+
}
|
| 393 |
+
},
|
| 394 |
+
"task_info": {
|
| 395 |
+
"model": "Intel/Qwen2-0.5B-Instuct-int4-inc",
|
| 396 |
+
"local": true,
|
| 397 |
+
"revision": "main",
|
| 398 |
+
"private": false,
|
| 399 |
+
"params": 0.5,
|
| 400 |
+
"architectures": "QwenForCausalLM",
|
| 401 |
+
"quant_type": "AutoRound",
|
| 402 |
+
"precision": "4bit",
|
| 403 |
+
"model_params": 0.5,
|
| 404 |
+
"model_size": 0.719,
|
| 405 |
+
"weight_dtype": "int4",
|
| 406 |
+
"compute_dtype": "float16",
|
| 407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
| 408 |
+
"hardware": "gpu",
|
| 409 |
+
"status": "Finished",
|
| 410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
| 411 |
+
"model_type": "quantization",
|
| 412 |
+
"job_id": -1,
|
| 413 |
+
"job_start_time": null,
|
| 414 |
+
"scripts": "ITREX"
|
| 415 |
+
},
|
| 416 |
+
"quantization_config": {
|
| 417 |
+
"autoround_version": "0.2.1.dev",
|
| 418 |
+
"bits": 4,
|
| 419 |
+
"damp_percent": 0.01,
|
| 420 |
+
"desc_act": false,
|
| 421 |
+
"enable_minmax_tuning": true,
|
| 422 |
+
"enable_quanted_input": true,
|
| 423 |
+
"group_size": 32,
|
| 424 |
+
"is_marlin_format": false,
|
| 425 |
+
"iters": 1000,
|
| 426 |
+
"lr": 0.001,
|
| 427 |
+
"minmax_lr": 0.002,
|
| 428 |
+
"model_file_base_name": "model",
|
| 429 |
+
"model_name_or_path": null,
|
| 430 |
+
"quant_method": "gptq",
|
| 431 |
+
"scale_dtype": "float16",
|
| 432 |
+
"static_groups": false,
|
| 433 |
+
"sym": true,
|
| 434 |
+
"true_sequential": false
|
| 435 |
+
},
|
| 436 |
+
"versions": {
|
| 437 |
+
"harness|hellaswag|0": 1.0,
|
| 438 |
+
"harness|arc:easy|0": 1.0,
|
| 439 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
| 440 |
+
"harness|mmlu|0": null,
|
| 441 |
+
"harness|mmlu_humanities|0": null,
|
| 442 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
| 443 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
| 444 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
| 445 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
| 446 |
+
"harness|mmlu_international_law|0": 0.0,
|
| 447 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
| 448 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
| 449 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
| 450 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
| 451 |
+
"harness|mmlu_philosophy|0": 0.0,
|
| 452 |
+
"harness|mmlu_prehistory|0": 0.0,
|
| 453 |
+
"harness|mmlu_professional_law|0": 0.0,
|
| 454 |
+
"harness|mmlu_world_religions|0": 0.0,
|
| 455 |
+
"harness|mmlu_other|0": null,
|
| 456 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
| 457 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
| 458 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
| 459 |
+
"harness|mmlu_global_facts|0": 0.0,
|
| 460 |
+
"harness|mmlu_human_aging|0": 0.0,
|
| 461 |
+
"harness|mmlu_management|0": 0.0,
|
| 462 |
+
"harness|mmlu_marketing|0": 0.0,
|
| 463 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
| 464 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
| 465 |
+
"harness|mmlu_nutrition|0": 0.0,
|
| 466 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
| 467 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
| 468 |
+
"harness|mmlu_virology|0": 0.0,
|
| 469 |
+
"harness|mmlu_social_sciences|0": null,
|
| 470 |
+
"harness|mmlu_econometrics|0": 0.0,
|
| 471 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
| 472 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
| 473 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
| 474 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
| 475 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
| 476 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
| 477 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
| 478 |
+
"harness|mmlu_public_relations|0": 0.0,
|
| 479 |
+
"harness|mmlu_security_studies|0": 0.0,
|
| 480 |
+
"harness|mmlu_sociology|0": 0.0,
|
| 481 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
| 482 |
+
"harness|mmlu_stem|0": null,
|
| 483 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
| 484 |
+
"harness|mmlu_anatomy|0": 0.0,
|
| 485 |
+
"harness|mmlu_astronomy|0": 0.0,
|
| 486 |
+
"harness|mmlu_college_biology|0": 0.0,
|
| 487 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
| 488 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
| 489 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
| 490 |
+
"harness|mmlu_college_physics|0": 0.0,
|
| 491 |
+
"harness|mmlu_computer_security|0": 0.0,
|
| 492 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
| 493 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
| 494 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
| 495 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
| 496 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
| 497 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
| 498 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
| 499 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
| 500 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
| 501 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
| 502 |
+
"harness|piqa|0": 1.0,
|
| 503 |
+
"harness|boolq|0": 2.0,
|
| 504 |
+
"harness|openbookqa|0": 1.0,
|
| 505 |
+
"harness|arc:challenge|0": 1.0,
|
| 506 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
| 507 |
+
"harness|winogrande|0": 1.0,
|
| 508 |
+
"harness|lambada:openai|0": 1.0
|
| 509 |
+
},
|
| 510 |
+
"n-shot": {
|
| 511 |
+
"arc_challenge": 0,
|
| 512 |
+
"arc_easy": 0,
|
| 513 |
+
"boolq": 0,
|
| 514 |
+
"hellaswag": 0,
|
| 515 |
+
"lambada_openai": 0,
|
| 516 |
+
"mmlu": 0,
|
| 517 |
+
"mmlu_abstract_algebra": 0,
|
| 518 |
+
"mmlu_anatomy": 0,
|
| 519 |
+
"mmlu_astronomy": 0,
|
| 520 |
+
"mmlu_business_ethics": 0,
|
| 521 |
+
"mmlu_clinical_knowledge": 0,
|
| 522 |
+
"mmlu_college_biology": 0,
|
| 523 |
+
"mmlu_college_chemistry": 0,
|
| 524 |
+
"mmlu_college_computer_science": 0,
|
| 525 |
+
"mmlu_college_mathematics": 0,
|
| 526 |
+
"mmlu_college_medicine": 0,
|
| 527 |
+
"mmlu_college_physics": 0,
|
| 528 |
+
"mmlu_computer_security": 0,
|
| 529 |
+
"mmlu_conceptual_physics": 0,
|
| 530 |
+
"mmlu_econometrics": 0,
|
| 531 |
+
"mmlu_electrical_engineering": 0,
|
| 532 |
+
"mmlu_elementary_mathematics": 0,
|
| 533 |
+
"mmlu_formal_logic": 0,
|
| 534 |
+
"mmlu_global_facts": 0,
|
| 535 |
+
"mmlu_high_school_biology": 0,
|
| 536 |
+
"mmlu_high_school_chemistry": 0,
|
| 537 |
+
"mmlu_high_school_computer_science": 0,
|
| 538 |
+
"mmlu_high_school_european_history": 0,
|
| 539 |
+
"mmlu_high_school_geography": 0,
|
| 540 |
+
"mmlu_high_school_government_and_politics": 0,
|
| 541 |
+
"mmlu_high_school_macroeconomics": 0,
|
| 542 |
+
"mmlu_high_school_mathematics": 0,
|
| 543 |
+
"mmlu_high_school_microeconomics": 0,
|
| 544 |
+
"mmlu_high_school_physics": 0,
|
| 545 |
+
"mmlu_high_school_psychology": 0,
|
| 546 |
+
"mmlu_high_school_statistics": 0,
|
| 547 |
+
"mmlu_high_school_us_history": 0,
|
| 548 |
+
"mmlu_high_school_world_history": 0,
|
| 549 |
+
"mmlu_human_aging": 0,
|
| 550 |
+
"mmlu_human_sexuality": 0,
|
| 551 |
+
"mmlu_humanities": 0,
|
| 552 |
+
"mmlu_international_law": 0,
|
| 553 |
+
"mmlu_jurisprudence": 0,
|
| 554 |
+
"mmlu_logical_fallacies": 0,
|
| 555 |
+
"mmlu_machine_learning": 0,
|
| 556 |
+
"mmlu_management": 0,
|
| 557 |
+
"mmlu_marketing": 0,
|
| 558 |
+
"mmlu_medical_genetics": 0,
|
| 559 |
+
"mmlu_miscellaneous": 0,
|
| 560 |
+
"mmlu_moral_disputes": 0,
|
| 561 |
+
"mmlu_moral_scenarios": 0,
|
| 562 |
+
"mmlu_nutrition": 0,
|
| 563 |
+
"mmlu_other": 0,
|
| 564 |
+
"mmlu_philosophy": 0,
|
| 565 |
+
"mmlu_prehistory": 0,
|
| 566 |
+
"mmlu_professional_accounting": 0,
|
| 567 |
+
"mmlu_professional_law": 0,
|
| 568 |
+
"mmlu_professional_medicine": 0,
|
| 569 |
+
"mmlu_professional_psychology": 0,
|
| 570 |
+
"mmlu_public_relations": 0,
|
| 571 |
+
"mmlu_security_studies": 0,
|
| 572 |
+
"mmlu_social_sciences": 0,
|
| 573 |
+
"mmlu_sociology": 0,
|
| 574 |
+
"mmlu_stem": 0,
|
| 575 |
+
"mmlu_us_foreign_policy": 0,
|
| 576 |
+
"mmlu_virology": 0,
|
| 577 |
+
"mmlu_world_religions": 0,
|
| 578 |
+
"openbookqa": 0,
|
| 579 |
+
"piqa": 0,
|
| 580 |
+
"truthfulqa_mc1": 0,
|
| 581 |
+
"truthfulqa_mc2": 0,
|
| 582 |
+
"winogrande": 0
|
| 583 |
+
},
|
| 584 |
+
"date": 1717638292.2679868,
|
| 585 |
+
"config": {
|
| 586 |
+
"model": "hf",
|
| 587 |
+
"model_args": "pretrained=Intel/Qwen2-0.5B-Instuct-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
| 588 |
+
"batch_size": 4,
|
| 589 |
+
"batch_sizes": [],
|
| 590 |
+
"device": "cuda",
|
| 591 |
+
"use_cache": null,
|
| 592 |
+
"limit": null,
|
| 593 |
+
"bootstrap_iters": 100000,
|
| 594 |
+
"gen_kwargs": null
|
| 595 |
+
}
|
| 596 |
+
}
|