| { | |
| "config_general": { | |
| "lighteval_sha": "1.4", | |
| "num_few_shot_default": null, | |
| "num_fewshot_seeds": null, | |
| "override_batch_size": null, | |
| "max_samples": null, | |
| "job_id": -1, | |
| "start_time": null, | |
| "end_time": "2024-04-28-11-43-27", | |
| "total_evaluation_time_secondes": "", | |
| "model_name": "solidrust/Meta-Llama-3-8B-Instruct-hf-AWQ", | |
| "model_sha": "", | |
| "model_dtype": "4bit", | |
| "model_size": 5.73, | |
| "model_params": 7.03, | |
| "quant_type": "AWQ", | |
| "precision": "4bit" | |
| }, | |
| "results": { | |
| "harness|lambada:openai|0": { | |
| "perplexity,none": 3.1935073590428087, | |
| "perplexity_stderr,none": 0.07777440049727834, | |
| "acc,none": 0.720551135261013, | |
| "acc_stderr,none": 0.006251664323978085, | |
| "alias": "lambada_openai" | |
| }, | |
| "harness|boolq|0": { | |
| "acc,none": 0.828440366972477, | |
| "acc_stderr,none": 0.0065937233273874515, | |
| "alias": "boolq" | |
| }, | |
| "harness|mmlu|0": { | |
| "acc,none": 0.6159379005839624, | |
| "acc_stderr,none": 0.003891283978010192, | |
| "alias": "mmlu" | |
| }, | |
| "harness|mmlu_humanities|0": { | |
| "alias": " - humanities", | |
| "acc,none": 0.5636556854410202, | |
| "acc_stderr,none": 0.006801499050906265 | |
| }, | |
| "harness|mmlu_formal_logic|0": { | |
| "alias": " - formal_logic", | |
| "acc,none": 0.4523809523809524, | |
| "acc_stderr,none": 0.044518079590553275 | |
| }, | |
| "harness|mmlu_high_school_european_history|0": { | |
| "alias": " - high_school_european_history", | |
| "acc,none": 0.7393939393939394, | |
| "acc_stderr,none": 0.034277431758165236 | |
| }, | |
| "harness|mmlu_high_school_us_history|0": { | |
| "alias": " - high_school_us_history", | |
| "acc,none": 0.8284313725490197, | |
| "acc_stderr,none": 0.026460569561240658 | |
| }, | |
| "harness|mmlu_high_school_world_history|0": { | |
| "alias": " - high_school_world_history", | |
| "acc,none": 0.8059071729957806, | |
| "acc_stderr,none": 0.025744902532290934 | |
| }, | |
| "harness|mmlu_international_law|0": { | |
| "alias": " - international_law", | |
| "acc,none": 0.7355371900826446, | |
| "acc_stderr,none": 0.04026187527591206 | |
| }, | |
| "harness|mmlu_jurisprudence|0": { | |
| "alias": " - jurisprudence", | |
| "acc,none": 0.7592592592592593, | |
| "acc_stderr,none": 0.041331194402438376 | |
| }, | |
| "harness|mmlu_logical_fallacies|0": { | |
| "alias": " - logical_fallacies", | |
| "acc,none": 0.7668711656441718, | |
| "acc_stderr,none": 0.0332201579577674 | |
| }, | |
| "harness|mmlu_moral_disputes|0": { | |
| "alias": " - moral_disputes", | |
| "acc,none": 0.6647398843930635, | |
| "acc_stderr,none": 0.02541600377316555 | |
| }, | |
| "harness|mmlu_moral_scenarios|0": { | |
| "alias": " - moral_scenarios", | |
| "acc,none": 0.3396648044692737, | |
| "acc_stderr,none": 0.015839400406212505 | |
| }, | |
| "harness|mmlu_philosophy|0": { | |
| "alias": " - philosophy", | |
| "acc,none": 0.6977491961414791, | |
| "acc_stderr,none": 0.02608270069539966 | |
| }, | |
| "harness|mmlu_prehistory|0": { | |
| "alias": " - prehistory", | |
| "acc,none": 0.7222222222222222, | |
| "acc_stderr,none": 0.02492200116888633 | |
| }, | |
| "harness|mmlu_professional_law|0": { | |
| "alias": " - professional_law", | |
| "acc,none": 0.45697522816166886, | |
| "acc_stderr,none": 0.012722869501611419 | |
| }, | |
| "harness|mmlu_world_religions|0": { | |
| "alias": " - world_religions", | |
| "acc,none": 0.7660818713450293, | |
| "acc_stderr,none": 0.03246721765117826 | |
| }, | |
| "harness|mmlu_other|0": { | |
| "alias": " - other", | |
| "acc,none": 0.6910202767943354, | |
| "acc_stderr,none": 0.007999286870057722 | |
| }, | |
| "harness|mmlu_business_ethics|0": { | |
| "alias": " - business_ethics", | |
| "acc,none": 0.64, | |
| "acc_stderr,none": 0.048241815132442176 | |
| }, | |
| "harness|mmlu_clinical_knowledge|0": { | |
| "alias": " - clinical_knowledge", | |
| "acc,none": 0.6981132075471698, | |
| "acc_stderr,none": 0.02825420034443866 | |
| }, | |
| "harness|mmlu_college_medicine|0": { | |
| "alias": " - college_medicine", | |
| "acc,none": 0.6011560693641619, | |
| "acc_stderr,none": 0.037336266553835096 | |
| }, | |
| "harness|mmlu_global_facts|0": { | |
| "alias": " - global_facts", | |
| "acc,none": 0.37, | |
| "acc_stderr,none": 0.04852365870939099 | |
| }, | |
| "harness|mmlu_human_aging|0": { | |
| "alias": " - human_aging", | |
| "acc,none": 0.6771300448430493, | |
| "acc_stderr,none": 0.03138147637575499 | |
| }, | |
| "harness|mmlu_management|0": { | |
| "alias": " - management", | |
| "acc,none": 0.8155339805825242, | |
| "acc_stderr,none": 0.03840423627288276 | |
| }, | |
| "harness|mmlu_marketing|0": { | |
| "alias": " - marketing", | |
| "acc,none": 0.8547008547008547, | |
| "acc_stderr,none": 0.02308663508684141 | |
| }, | |
| "harness|mmlu_medical_genetics|0": { | |
| "alias": " - medical_genetics", | |
| "acc,none": 0.81, | |
| "acc_stderr,none": 0.03942772444036623 | |
| }, | |
| "harness|mmlu_miscellaneous|0": { | |
| "alias": " - miscellaneous", | |
| "acc,none": 0.7982120051085568, | |
| "acc_stderr,none": 0.01435170218163687 | |
| }, | |
| "harness|mmlu_nutrition|0": { | |
| "alias": " - nutrition", | |
| "acc,none": 0.6830065359477124, | |
| "acc_stderr,none": 0.02664327847450875 | |
| }, | |
| "harness|mmlu_professional_accounting|0": { | |
| "alias": " - professional_accounting", | |
| "acc,none": 0.49645390070921985, | |
| "acc_stderr,none": 0.02982674915328092 | |
| }, | |
| "harness|mmlu_professional_medicine|0": { | |
| "alias": " - professional_medicine", | |
| "acc,none": 0.6875, | |
| "acc_stderr,none": 0.02815637344037142 | |
| }, | |
| "harness|mmlu_virology|0": { | |
| "alias": " - virology", | |
| "acc,none": 0.4819277108433735, | |
| "acc_stderr,none": 0.03889951252827215 | |
| }, | |
| "harness|mmlu_social_sciences|0": { | |
| "alias": " - social_sciences", | |
| "acc,none": 0.7149821254468638, | |
| "acc_stderr,none": 0.007978473473048482 | |
| }, | |
| "harness|mmlu_econometrics|0": { | |
| "alias": " - econometrics", | |
| "acc,none": 0.4298245614035088, | |
| "acc_stderr,none": 0.04657047260594963 | |
| }, | |
| "harness|mmlu_high_school_geography|0": { | |
| "alias": " - high_school_geography", | |
| "acc,none": 0.7676767676767676, | |
| "acc_stderr,none": 0.03008862949021749 | |
| }, | |
| "harness|mmlu_high_school_government_and_politics|0": { | |
| "alias": " - high_school_government_and_politics", | |
| "acc,none": 0.8601036269430051, | |
| "acc_stderr,none": 0.025033870583015167 | |
| }, | |
| "harness|mmlu_high_school_macroeconomics|0": { | |
| "alias": " - high_school_macroeconomics", | |
| "acc,none": 0.6333333333333333, | |
| "acc_stderr,none": 0.024433016466052455 | |
| }, | |
| "harness|mmlu_high_school_microeconomics|0": { | |
| "alias": " - high_school_microeconomics", | |
| "acc,none": 0.6596638655462185, | |
| "acc_stderr,none": 0.03077805742293167 | |
| }, | |
| "harness|mmlu_high_school_psychology|0": { | |
| "alias": " - high_school_psychology", | |
| "acc,none": 0.7981651376146789, | |
| "acc_stderr,none": 0.017208579357787572 | |
| }, | |
| "harness|mmlu_human_sexuality|0": { | |
| "alias": " - human_sexuality", | |
| "acc,none": 0.7480916030534351, | |
| "acc_stderr,none": 0.03807387116306086 | |
| }, | |
| "harness|mmlu_professional_psychology|0": { | |
| "alias": " - professional_psychology", | |
| "acc,none": 0.6699346405228758, | |
| "acc_stderr,none": 0.019023726160724553 | |
| }, | |
| "harness|mmlu_public_relations|0": { | |
| "alias": " - public_relations", | |
| "acc,none": 0.6272727272727273, | |
| "acc_stderr,none": 0.04631381319425464 | |
| }, | |
| "harness|mmlu_security_studies|0": { | |
| "alias": " - security_studies", | |
| "acc,none": 0.6938775510204082, | |
| "acc_stderr,none": 0.029504896454595968 | |
| }, | |
| "harness|mmlu_sociology|0": { | |
| "alias": " - sociology", | |
| "acc,none": 0.8258706467661692, | |
| "acc_stderr,none": 0.026814951200421603 | |
| }, | |
| "harness|mmlu_us_foreign_policy|0": { | |
| "alias": " - us_foreign_policy", | |
| "acc,none": 0.81, | |
| "acc_stderr,none": 0.03942772444036624 | |
| }, | |
| "harness|mmlu_stem|0": { | |
| "alias": " - stem", | |
| "acc,none": 0.5233111322549953, | |
| "acc_stderr,none": 0.008634698284601918 | |
| }, | |
| "harness|mmlu_abstract_algebra|0": { | |
| "alias": " - abstract_algebra", | |
| "acc,none": 0.35, | |
| "acc_stderr,none": 0.047937248544110196 | |
| }, | |
| "harness|mmlu_anatomy|0": { | |
| "alias": " - anatomy", | |
| "acc,none": 0.562962962962963, | |
| "acc_stderr,none": 0.042849586397534 | |
| }, | |
| "harness|mmlu_astronomy|0": { | |
| "alias": " - astronomy", | |
| "acc,none": 0.6644736842105263, | |
| "acc_stderr,none": 0.038424985593952694 | |
| }, | |
| "harness|mmlu_college_biology|0": { | |
| "alias": " - college_biology", | |
| "acc,none": 0.7222222222222222, | |
| "acc_stderr,none": 0.03745554791462457 | |
| }, | |
| "harness|mmlu_college_chemistry|0": { | |
| "alias": " - college_chemistry", | |
| "acc,none": 0.47, | |
| "acc_stderr,none": 0.05016135580465919 | |
| }, | |
| "harness|mmlu_college_computer_science|0": { | |
| "alias": " - college_computer_science", | |
| "acc,none": 0.51, | |
| "acc_stderr,none": 0.05024183937956912 | |
| }, | |
| "harness|mmlu_college_mathematics|0": { | |
| "alias": " - college_mathematics", | |
| "acc,none": 0.32, | |
| "acc_stderr,none": 0.046882617226215034 | |
| }, | |
| "harness|mmlu_college_physics|0": { | |
| "alias": " - college_physics", | |
| "acc,none": 0.4411764705882353, | |
| "acc_stderr,none": 0.049406356306056595 | |
| }, | |
| "harness|mmlu_computer_security|0": { | |
| "alias": " - computer_security", | |
| "acc,none": 0.72, | |
| "acc_stderr,none": 0.04512608598542129 | |
| }, | |
| "harness|mmlu_conceptual_physics|0": { | |
| "alias": " - conceptual_physics", | |
| "acc,none": 0.5531914893617021, | |
| "acc_stderr,none": 0.0325005368436584 | |
| }, | |
| "harness|mmlu_electrical_engineering|0": { | |
| "alias": " - electrical_engineering", | |
| "acc,none": 0.5793103448275863, | |
| "acc_stderr,none": 0.0411391498118926 | |
| }, | |
| "harness|mmlu_elementary_mathematics|0": { | |
| "alias": " - elementary_mathematics", | |
| "acc,none": 0.43915343915343913, | |
| "acc_stderr,none": 0.025559920550531003 | |
| }, | |
| "harness|mmlu_high_school_biology|0": { | |
| "alias": " - high_school_biology", | |
| "acc,none": 0.7322580645161291, | |
| "acc_stderr,none": 0.025189006660212374 | |
| }, | |
| "harness|mmlu_high_school_chemistry|0": { | |
| "alias": " - high_school_chemistry", | |
| "acc,none": 0.4433497536945813, | |
| "acc_stderr,none": 0.03495334582162933 | |
| }, | |
| "harness|mmlu_high_school_computer_science|0": { | |
| "alias": " - high_school_computer_science", | |
| "acc,none": 0.68, | |
| "acc_stderr,none": 0.04688261722621505 | |
| }, | |
| "harness|mmlu_high_school_mathematics|0": { | |
| "alias": " - high_school_mathematics", | |
| "acc,none": 0.36666666666666664, | |
| "acc_stderr,none": 0.029381620726465073 | |
| }, | |
| "harness|mmlu_high_school_physics|0": { | |
| "alias": " - high_school_physics", | |
| "acc,none": 0.47019867549668876, | |
| "acc_stderr,none": 0.040752249922169775 | |
| }, | |
| "harness|mmlu_high_school_statistics|0": { | |
| "alias": " - high_school_statistics", | |
| "acc,none": 0.46296296296296297, | |
| "acc_stderr,none": 0.03400603625538272 | |
| }, | |
| "harness|mmlu_machine_learning|0": { | |
| "alias": " - machine_learning", | |
| "acc,none": 0.4642857142857143, | |
| "acc_stderr,none": 0.04733667890053756 | |
| }, | |
| "harness|arc:easy|0": { | |
| "acc,none": 0.8106060606060606, | |
| "acc_stderr,none": 0.00804000196687019, | |
| "acc_norm,none": 0.7803030303030303, | |
| "acc_norm_stderr,none": 0.00849594853792877, | |
| "alias": "arc_easy" | |
| }, | |
| "harness|arc:challenge|0": { | |
| "acc,none": 0.5273037542662116, | |
| "acc_stderr,none": 0.014589589101985996, | |
| "acc_norm,none": 0.5520477815699659, | |
| "acc_norm_stderr,none": 0.014532011498211676, | |
| "alias": "arc_challenge" | |
| }, | |
| "harness|truthfulqa:mc1|0": { | |
| "acc,none": 0.3537331701346389, | |
| "acc_stderr,none": 0.016737814358846147, | |
| "alias": "truthfulqa_mc1" | |
| }, | |
| "harness|hellaswag|0": { | |
| "acc,none": 0.5732921728739295, | |
| "acc_stderr,none": 0.004935882666250482, | |
| "acc_norm,none": 0.7543318064130651, | |
| "acc_norm_stderr,none": 0.004296028885089522, | |
| "alias": "hellaswag" | |
| }, | |
| "harness|truthfulqa:mc2|0": { | |
| "acc,none": 0.5153575418054068, | |
| "acc_stderr,none": 0.015290431258948651, | |
| "alias": "truthfulqa_mc2" | |
| }, | |
| "harness|openbookqa|0": { | |
| "acc,none": 0.344, | |
| "acc_stderr,none": 0.02126575803797874, | |
| "acc_norm,none": 0.43, | |
| "acc_norm_stderr,none": 0.02216263442665284, | |
| "alias": "openbookqa" | |
| }, | |
| "harness|piqa|0": { | |
| "acc,none": 0.7736670293797606, | |
| "acc_stderr,none": 0.00976329424687942, | |
| "acc_norm,none": 0.7818280739934712, | |
| "acc_norm_stderr,none": 0.009636081958374381, | |
| "alias": "piqa" | |
| }, | |
| "harness|winogrande|0": { | |
| "acc,none": 0.734017363851618, | |
| "acc_stderr,none": 0.012418323153051043, | |
| "alias": "winogrande" | |
| } | |
| }, | |
| "task_info": { | |
| "model": "solidrust/Meta-Llama-3-8B-Instruct-hf-AWQ", | |
| "revision": "main", | |
| "private": false, | |
| "params": 7.94, | |
| "architectures": "LlamaForCausalLM", | |
| "quant_type": "AWQ", | |
| "precision": "4bit", | |
| "model_params": 15.88, | |
| "model_size": 7.94, | |
| "weight_dtype": "int4", | |
| "compute_dtype": "float16", | |
| "gguf_ftype": "*Q4_0.gguf", | |
| "hardware": "gpu", | |
| "status": "Pending", | |
| "submitted_time": "2024-04-27T15:54:35Z", | |
| "model_type": "quantization", | |
| "job_id": -1, | |
| "job_start_time": null, | |
| "scripts": "ITREX" | |
| }, | |
| "quantization_config": { | |
| "bits": 4, | |
| "group_size": 128, | |
| "modules_to_not_convert": null, | |
| "quant_method": "awq", | |
| "version": "gemm", | |
| "zero_point": true | |
| }, | |
| "versions": { | |
| "harness|lambada:openai|0": 1.0, | |
| "harness|boolq|0": 2.0, | |
| "harness|mmlu|0": null, | |
| "harness|mmlu_humanities|0": null, | |
| "harness|mmlu_formal_logic|0": 0.0, | |
| "harness|mmlu_high_school_european_history|0": 0.0, | |
| "harness|mmlu_high_school_us_history|0": 0.0, | |
| "harness|mmlu_high_school_world_history|0": 0.0, | |
| "harness|mmlu_international_law|0": 0.0, | |
| "harness|mmlu_jurisprudence|0": 0.0, | |
| "harness|mmlu_logical_fallacies|0": 0.0, | |
| "harness|mmlu_moral_disputes|0": 0.0, | |
| "harness|mmlu_moral_scenarios|0": 0.0, | |
| "harness|mmlu_philosophy|0": 0.0, | |
| "harness|mmlu_prehistory|0": 0.0, | |
| "harness|mmlu_professional_law|0": 0.0, | |
| "harness|mmlu_world_religions|0": 0.0, | |
| "harness|mmlu_other|0": null, | |
| "harness|mmlu_business_ethics|0": 0.0, | |
| "harness|mmlu_clinical_knowledge|0": 0.0, | |
| "harness|mmlu_college_medicine|0": 0.0, | |
| "harness|mmlu_global_facts|0": 0.0, | |
| "harness|mmlu_human_aging|0": 0.0, | |
| "harness|mmlu_management|0": 0.0, | |
| "harness|mmlu_marketing|0": 0.0, | |
| "harness|mmlu_medical_genetics|0": 0.0, | |
| "harness|mmlu_miscellaneous|0": 0.0, | |
| "harness|mmlu_nutrition|0": 0.0, | |
| "harness|mmlu_professional_accounting|0": 0.0, | |
| "harness|mmlu_professional_medicine|0": 0.0, | |
| "harness|mmlu_virology|0": 0.0, | |
| "harness|mmlu_social_sciences|0": null, | |
| "harness|mmlu_econometrics|0": 0.0, | |
| "harness|mmlu_high_school_geography|0": 0.0, | |
| "harness|mmlu_high_school_government_and_politics|0": 0.0, | |
| "harness|mmlu_high_school_macroeconomics|0": 0.0, | |
| "harness|mmlu_high_school_microeconomics|0": 0.0, | |
| "harness|mmlu_high_school_psychology|0": 0.0, | |
| "harness|mmlu_human_sexuality|0": 0.0, | |
| "harness|mmlu_professional_psychology|0": 0.0, | |
| "harness|mmlu_public_relations|0": 0.0, | |
| "harness|mmlu_security_studies|0": 0.0, | |
| "harness|mmlu_sociology|0": 0.0, | |
| "harness|mmlu_us_foreign_policy|0": 0.0, | |
| "harness|mmlu_stem|0": null, | |
| "harness|mmlu_abstract_algebra|0": 0.0, | |
| "harness|mmlu_anatomy|0": 0.0, | |
| "harness|mmlu_astronomy|0": 0.0, | |
| "harness|mmlu_college_biology|0": 0.0, | |
| "harness|mmlu_college_chemistry|0": 0.0, | |
| "harness|mmlu_college_computer_science|0": 0.0, | |
| "harness|mmlu_college_mathematics|0": 0.0, | |
| "harness|mmlu_college_physics|0": 0.0, | |
| "harness|mmlu_computer_security|0": 0.0, | |
| "harness|mmlu_conceptual_physics|0": 0.0, | |
| "harness|mmlu_electrical_engineering|0": 0.0, | |
| "harness|mmlu_elementary_mathematics|0": 0.0, | |
| "harness|mmlu_high_school_biology|0": 0.0, | |
| "harness|mmlu_high_school_chemistry|0": 0.0, | |
| "harness|mmlu_high_school_computer_science|0": 0.0, | |
| "harness|mmlu_high_school_mathematics|0": 0.0, | |
| "harness|mmlu_high_school_physics|0": 0.0, | |
| "harness|mmlu_high_school_statistics|0": 0.0, | |
| "harness|mmlu_machine_learning|0": 0.0, | |
| "harness|arc:easy|0": 1.0, | |
| "harness|arc:challenge|0": 1.0, | |
| "harness|truthfulqa:mc1|0": 2.0, | |
| "harness|hellaswag|0": 1.0, | |
| "harness|truthfulqa:mc2|0": 2.0, | |
| "harness|openbookqa|0": 1.0, | |
| "harness|piqa|0": 1.0, | |
| "harness|winogrande|0": 1.0 | |
| }, | |
| "n-shot": { | |
| "arc_challenge": 0, | |
| "arc_easy": 0, | |
| "boolq": 0, | |
| "hellaswag": 0, | |
| "lambada_openai": 0, | |
| "mmlu": 0, | |
| "mmlu_abstract_algebra": 0, | |
| "mmlu_anatomy": 0, | |
| "mmlu_astronomy": 0, | |
| "mmlu_business_ethics": 0, | |
| "mmlu_clinical_knowledge": 0, | |
| "mmlu_college_biology": 0, | |
| "mmlu_college_chemistry": 0, | |
| "mmlu_college_computer_science": 0, | |
| "mmlu_college_mathematics": 0, | |
| "mmlu_college_medicine": 0, | |
| "mmlu_college_physics": 0, | |
| "mmlu_computer_security": 0, | |
| "mmlu_conceptual_physics": 0, | |
| "mmlu_econometrics": 0, | |
| "mmlu_electrical_engineering": 0, | |
| "mmlu_elementary_mathematics": 0, | |
| "mmlu_formal_logic": 0, | |
| "mmlu_global_facts": 0, | |
| "mmlu_high_school_biology": 0, | |
| "mmlu_high_school_chemistry": 0, | |
| "mmlu_high_school_computer_science": 0, | |
| "mmlu_high_school_european_history": 0, | |
| "mmlu_high_school_geography": 0, | |
| "mmlu_high_school_government_and_politics": 0, | |
| "mmlu_high_school_macroeconomics": 0, | |
| "mmlu_high_school_mathematics": 0, | |
| "mmlu_high_school_microeconomics": 0, | |
| "mmlu_high_school_physics": 0, | |
| "mmlu_high_school_psychology": 0, | |
| "mmlu_high_school_statistics": 0, | |
| "mmlu_high_school_us_history": 0, | |
| "mmlu_high_school_world_history": 0, | |
| "mmlu_human_aging": 0, | |
| "mmlu_human_sexuality": 0, | |
| "mmlu_humanities": 0, | |
| "mmlu_international_law": 0, | |
| "mmlu_jurisprudence": 0, | |
| "mmlu_logical_fallacies": 0, | |
| "mmlu_machine_learning": 0, | |
| "mmlu_management": 0, | |
| "mmlu_marketing": 0, | |
| "mmlu_medical_genetics": 0, | |
| "mmlu_miscellaneous": 0, | |
| "mmlu_moral_disputes": 0, | |
| "mmlu_moral_scenarios": 0, | |
| "mmlu_nutrition": 0, | |
| "mmlu_other": 0, | |
| "mmlu_philosophy": 0, | |
| "mmlu_prehistory": 0, | |
| "mmlu_professional_accounting": 0, | |
| "mmlu_professional_law": 0, | |
| "mmlu_professional_medicine": 0, | |
| "mmlu_professional_psychology": 0, | |
| "mmlu_public_relations": 0, | |
| "mmlu_security_studies": 0, | |
| "mmlu_social_sciences": 0, | |
| "mmlu_sociology": 0, | |
| "mmlu_stem": 0, | |
| "mmlu_us_foreign_policy": 0, | |
| "mmlu_virology": 0, | |
| "mmlu_world_religions": 0, | |
| "openbookqa": 0, | |
| "piqa": 0, | |
| "truthfulqa_mc1": 0, | |
| "truthfulqa_mc2": 0, | |
| "winogrande": 0 | |
| }, | |
| "date": 1714252073.7108362, | |
| "config": { | |
| "model": "hf", | |
| "model_args": "pretrained=solidrust/Meta-Llama-3-8B-Instruct-hf-AWQ,dtype=float16,_commit_hash=main", | |
| "batch_size": 2, | |
| "batch_sizes": [], | |
| "device": "cuda", | |
| "use_cache": null, | |
| "limit": null, | |
| "bootstrap_iters": 100000, | |
| "gen_kwargs": null | |
| } | |
| } |