From 5ee14c144fc59579a6c77e553acb2b6b56a54d7d Mon Sep 17 00:00:00 2001 From: WangRongsheng Date: Fri, 2 Feb 2024 10:53:05 +0800 Subject: [PATCH] update ESFP and ISTP --- results/ESFP.csv | 7 +- results/ISTP.csv | 507 ++++++++++++++++++++++++----------------------- 2 files changed, 260 insertions(+), 254 deletions(-) diff --git a/results/ESFP.csv b/results/ESFP.csv index 9a02289..cd0f079 100644 --- a/results/ESFP.csv +++ b/results/ESFP.csv @@ -172,7 +172,7 @@ cmmlu-virology,8925bf,accuracy,gen,4.14 cmmlu-world_history,57c97c,accuracy,gen,5.59 cmmlu-world_religions,1d0f4b,accuracy,gen,7.5 gsm8k,-,-,-,- -math,-,-,-,- +math,265cce,accuracy,gen,2.44 lukaemon_mmlu_college_biology,8c2e29,accuracy,gen,0.69 lukaemon_mmlu_college_chemistry,0afccd,accuracy,gen,4 lukaemon_mmlu_college_computer_science,c1c1b4,accuracy,gen,1 @@ -194,7 +194,7 @@ lukaemon_mmlu_international_law,408d4e,accuracy,gen,0 lukaemon_mmlu_moral_scenarios,9f30a6,accuracy,gen,0 lukaemon_mmlu_computer_security,2753c1,accuracy,gen,1 lukaemon_mmlu_high_school_microeconomics,af9eae,accuracy,gen,1.26 -lukaemon_mmlu_professional_law,-,-,-,- +lukaemon_mmlu_professional_law,7c7a62,accuracy,gen,7.3 lukaemon_mmlu_medical_genetics,b1a3a7,accuracy,gen,1 lukaemon_mmlu_professional_psychology,c6b790,accuracy,gen,4.08 lukaemon_mmlu_jurisprudence,f41074,accuracy,gen,1.85 @@ -234,9 +234,12 @@ agieval-chinese,-,naive_average,gen,12.53 agieval-english,-,naive_average,gen,3.52 agieval-gaokao,-,naive_average,gen,11.51 agieval,-,naive_average,gen,8.67 +mmlu-humanities,-,naive_average,gen,3.82 mmlu-stem,-,naive_average,gen,7.09 mmlu-social-science,-,naive_average,gen,3.59 mmlu-other,-,naive_average,gen,4.25 +mmlu,-,naive_average,gen,4.96 +mmlu-weighted,-,weighted_average,gen,4.89 cmmlu-humanities,-,naive_average,gen,7.52 cmmlu-stem,-,naive_average,gen,6.95 cmmlu-social-science,-,naive_average,gen,6.73 diff --git a/results/ISTP.csv b/results/ISTP.csv index 3065fb1..83c1915 100644 --- a/results/ISTP.csv +++ b/results/ISTP.csv @@ -1,252 +1,255 @@ -dataset,version,metric,mode,opencompass.models.huggingface.HuggingFace_FarReelAILab_Machine_Mindset_en_ISTP -GaokaoBench_2010-2022_Math_II_MCQs,5b0b29,score,gen,10.55 -GaokaoBench_2010-2022_Math_I_MCQs,5b0b29,score,gen,5.14 -GaokaoBench_2010-2022_History_MCQs,3613b5,score,gen,14.98 -GaokaoBench_2010-2022_Biology_MCQs,d26e80,score,gen,14.67 -GaokaoBench_2010-2022_Political_Science_MCQs,70fce9,score,gen,9.06 -GaokaoBench_2010-2022_Physics_MCQs,8a0c30,score,gen,3.12 -GaokaoBench_2010-2022_Chemistry_MCQs,852bbd,score,gen,6.45 -GaokaoBench_2010-2013_English_MCQs,01b50f,score,gen,19.05 -GaokaoBench_2010-2022_Chinese_Modern_Lit,e19c31,score,gen,0 -GaokaoBench_2010-2022_English_Fill_in_Blanks,924021,score,gen,14 -GaokaoBench_2012-2022_English_Cloze_Test,11f6ce,score,gen,0 -GaokaoBench_2010-2022_Geography_MCQs,862192,score,gen,9.47 -GaokaoBench_2010-2022_English_Reading_Comp,ffdef4,score,gen,3.19 -GaokaoBench_2010-2022_Chinese_Lang_and_Usage_MCQs,ba10b2,score,gen,15 -GaokaoBench_2010-2022_Math_I_Fill-in-the-Blank,9dd6c7,score,gen,0 -GaokaoBench_2010-2022_Math_II_Fill-in-the-Blank,9dd6c7,score,gen,0 -GaokaoBench_2010-2022_Chinese_Language_Famous_Passages_and_Sentences_Dictation,58053d,score,gen,0 -GaokaoBench_2014-2022_English_Language_Cloze_Passage,d431f7,score,gen,0 -GaokaoBench_2010-2022_Geography_Open-ended_Questions,475d13,score,gen,0 -GaokaoBench_2010-2022_Chemistry_Open-ended_Questions,0bccc3,score,gen,0 -GaokaoBench_2010-2022_Math_I_Open-ended_Questions,8a0cf5,score,gen,0 -GaokaoBench_2010-2022_History_Open-ended_Questions,9fbad8,score,gen,0 -GaokaoBench_2010-2022_Biology_Open-ended_Questions,e0cb29,score,gen,0 -GaokaoBench_2010-2022_Math_II_Open-ended_Questions,8a0cf5,score,gen,0 -GaokaoBench_2010-2022_Physics_Open-ended_Questions,423d19,score,gen,0 -GaokaoBench_2010-2022_Political_Science_Open-ended_Questions,0eee0a,score,gen,0 -GaokaoBench_2012-2022_English_Language_Error_Correction,6a3cae,score,gen,0 -GaokaoBench_2010-2022_Chinese_Language_Ancient_Poetry_Reading,ee6cc7,score,gen,0 -GaokaoBench_2010-2022_Chinese_Language_Practical_Text_Reading,4dea5a,score,gen,0 -GaokaoBench_2010-2022_Chinese_Language_Literary_Text_Reading,979d8b,score,gen,0 -GaokaoBench_2010-2022_Chinese_Language_Classical_Chinese_Reading,9de717,score,gen,0 -GaokaoBench_2010-2022_Chinese_Language_Language_and_Writing_Skills_Open-ended_Questions,d2ed84,score,gen,0 -agieval-gaokao-chinese,774562,accuracy,gen,2.03 -agieval-gaokao-english,cb5bc6,accuracy,gen,6.54 -agieval-gaokao-geography,2ca56f,accuracy,gen,5.03 -agieval-gaokao-history,9c3ae0,accuracy,gen,5.11 -agieval-gaokao-biology,277c85,accuracy,gen,10 -agieval-gaokao-chemistry,d62fd4,accuracy,gen,6.28 -agieval-gaokao-mathqa,e2ea74,accuracy,gen,3.13 -agieval-logiqa-zh,03474d,accuracy,gen,13.06 -agieval-lsat-ar,ed1edf,accuracy,gen,0 -agieval-lsat-lr,ec6882,accuracy,gen,2.55 -agieval-lsat-rc,33077d,accuracy,gen,1.86 -agieval-logiqa-en,c6ee60,accuracy,gen,0.92 -agieval-sat-math,6c970d,accuracy,gen,2.27 -agieval-sat-en,4e3fef,accuracy,gen,13.59 -agieval-sat-en-without-passage,4e3fef,accuracy,gen,2.91 -agieval-aqua-rat,c090ca,accuracy,gen,0 -agieval-gaokao-physics,7.70E+06,accuracy,gen,7.5 -agieval-jec-qa-kd,b1e586,accuracy,gen,5.4 -agieval-jec-qa-ca,47bc29,accuracy,gen,6.1 -agieval-gaokao-mathcloze,b2c3c3,score,gen,1.69 -agieval-math,e0c6d9,score,gen,0.1 -ceval-computer_network,db9ce2,accuracy,gen,21.05 -ceval-operating_system,1c2571,accuracy,gen,47.37 -ceval-computer_architecture,a74dad,accuracy,gen,33.33 -ceval-college_programming,4ca32a,accuracy,gen,29.73 -ceval-college_physics,963fa8,accuracy,gen,15.79 -ceval-college_chemistry,e78857,accuracy,gen,12.5 -ceval-advanced_mathematics,ce03e2,accuracy,gen,5.26 -ceval-probability_and_statistics,65e812,accuracy,gen,33.33 -ceval-discrete_mathematics,e894ae,accuracy,gen,37.5 -ceval-electrical_engineer,ae42b9,accuracy,gen,35.14 -ceval-metrology_engineer,ee34ea,accuracy,gen,29.17 -ceval-high_school_mathematics,1dc5bf,accuracy,gen,16.67 -ceval-high_school_physics,adf25f,accuracy,gen,36.84 -ceval-high_school_chemistry,2ed27f,accuracy,gen,36.84 -ceval-high_school_biology,8e2b9a,accuracy,gen,15.79 -ceval-middle_school_mathematics,bee8d5,accuracy,gen,15.79 -ceval-middle_school_biology,86817c,accuracy,gen,28.57 -ceval-middle_school_physics,8accf6,accuracy,gen,47.37 -ceval-middle_school_chemistry,167a15,accuracy,gen,15 -ceval-veterinary_medicine,b4e08d,accuracy,gen,21.74 -ceval-college_economics,f3f4e6,accuracy,gen,25.45 -ceval-business_administration,c1614e,accuracy,gen,45.45 -ceval-marxism,cf874c,accuracy,gen,31.58 -ceval-mao_zedong_thought,51c7a4,accuracy,gen,29.17 -ceval-education_science,591fee,accuracy,gen,37.93 -ceval-teacher_qualification,4e4ced,accuracy,gen,27.27 -ceval-high_school_politics,5c0de2,accuracy,gen,21.05 -ceval-high_school_geography,865461,accuracy,gen,47.37 -ceval-middle_school_politics,5be3e7,accuracy,gen,42.86 -ceval-middle_school_geography,8a63be,accuracy,gen,33.33 -ceval-modern_chinese_history,fc01af,accuracy,gen,30.43 -ceval-ideological_and_moral_cultivation,a2aa4a,accuracy,gen,31.58 -ceval-logic,f5b022,accuracy,gen,31.82 -ceval-law,a110a1,accuracy,gen,12.5 -ceval-chinese_language_and_literature,0f8b68,accuracy,gen,34.78 -ceval-art_studies,2a1300,accuracy,gen,21.21 -ceval-professional_tour_guide,4e673e,accuracy,gen,31.03 -ceval-legal_professional,ce8787,accuracy,gen,39.13 -ceval-high_school_chinese,315705,accuracy,gen,10.53 -ceval-high_school_history,7eb30a,accuracy,gen,35 -ceval-middle_school_history,48ab4a,accuracy,gen,31.82 -ceval-civil_servant,87d061,accuracy,gen,21.28 -ceval-sports_science,70f27b,accuracy,gen,26.32 -ceval-plant_protection,8941f9,accuracy,gen,40.91 -ceval-basic_medicine,c409d6,accuracy,gen,63.16 -ceval-clinical_medicine,49e82d,accuracy,gen,27.27 -ceval-urban_and_rural_planner,95b885,accuracy,gen,36.96 -ceval-accountant,2837,accuracy,gen,22.45 -ceval-fire_engineer,bc23f5,accuracy,gen,38.71 -ceval-environmental_impact_assessment_engineer,c64e2d,accuracy,gen,29.03 -ceval-tax_accountant,3a5e3c,accuracy,gen,20.41 -ceval-physician,6e277d,accuracy,gen,22.45 -cmmlu-agronomy,4c7f2c,accuracy,gen,11.24 -cmmlu-anatomy,ea09bf,accuracy,gen,17.57 -cmmlu-ancient_chinese,f7c97f,accuracy,gen,17.68 -cmmlu-arts,dd77b8,accuracy,gen,8.12 -cmmlu-astronomy,1e49db,accuracy,gen,6.67 -cmmlu-business_ethics,dc78cb,accuracy,gen,18.18 -cmmlu-chinese_civil_service_exam,1de82c,accuracy,gen,10.62 -cmmlu-chinese_driving_rule,b8a42b,accuracy,gen,18.32 -cmmlu-chinese_food_culture,2d568a,accuracy,gen,8.09 -cmmlu-chinese_foreign_policy,dc2427,accuracy,gen,14.02 -cmmlu-chinese_history,4cc7ed,accuracy,gen,7.43 -cmmlu-chinese_literature,af3c41,accuracy,gen,6.37 -cmmlu-chinese_teacher_qualification,87de11,accuracy,gen,7.26 -cmmlu-clinical_knowledge,c55b1d,accuracy,gen,8.86 -cmmlu-college_actuarial_science,d3c360,accuracy,gen,0.94 -cmmlu-college_education,df8790,accuracy,gen,19.63 -cmmlu-college_engineering_hydrology,673f23,accuracy,gen,12.26 -cmmlu-college_law,524c3a,accuracy,gen,10.19 -cmmlu-college_mathematics,e4ebad,accuracy,gen,8.57 -cmmlu-college_medical_statistics,55af35,accuracy,gen,17.92 -cmmlu-college_medicine,702f48,accuracy,gen,15.75 -cmmlu-computer_science,637007,accuracy,gen,18.63 -cmmlu-computer_security,932b6b,accuracy,gen,12.87 -cmmlu-conceptual_physics,cfc077,accuracy,gen,16.33 -cmmlu-construction_project_management,968a4a,accuracy,gen,10.79 -cmmlu-economics,ddaf7c,accuracy,gen,8.81 -cmmlu-education,c35963,accuracy,gen,15.34 -cmmlu-electrical_engineering,70e98a,accuracy,gen,10.47 -cmmlu-elementary_chinese,cbcd6a,accuracy,gen,8.33 -cmmlu-elementary_commonsense,a67f37,accuracy,gen,19.19 -cmmlu-elementary_information_and_technology,d34d2a,accuracy,gen,15.13 -cmmlu-elementary_mathematics,a9d403,accuracy,gen,14.35 -cmmlu-ethnology,31955f,accuracy,gen,13.33 -cmmlu-food_science,741d8e,accuracy,gen,6.99 -cmmlu-genetics,c326f7,accuracy,gen,13.07 -cmmlu-global_facts,0a1236,accuracy,gen,12.75 -cmmlu-high_school_biology,2be811,accuracy,gen,20.12 -cmmlu-high_school_chemistry,d63c05,accuracy,gen,14.39 -cmmlu-high_school_geography,5cd489,accuracy,gen,11.86 -cmmlu-high_school_mathematics,6b2087,accuracy,gen,17.68 -cmmlu-high_school_physics,3df353,accuracy,gen,16.36 -cmmlu-high_school_politics,7a88d8,accuracy,gen,16.08 -cmmlu-human_sexuality,54ac98,accuracy,gen,6.35 -cmmlu-international_law,0f5d40,accuracy,gen,12.43 -cmmlu-journalism,a4f6a0,accuracy,gen,12.21 -cmmlu-jurisprudence,7843da,accuracy,gen,18.25 -cmmlu-legal_and_moral_basis,f906b0,accuracy,gen,11.21 -cmmlu-logical,15a71b,accuracy,gen,13.01 -cmmlu-machine_learning,bc6ad4,accuracy,gen,3.28 -cmmlu-management,e5e8db,accuracy,gen,10.95 -cmmlu-marketing,8b4c18,accuracy,gen,2.78 -cmmlu-marxist_theory,75eb79,accuracy,gen,22.75 -cmmlu-modern_chinese,83a9b7,accuracy,gen,9.48 -cmmlu-nutrition,adfff7,accuracy,gen,11.72 -cmmlu-philosophy,75e22d,accuracy,gen,18.1 -cmmlu-professional_accounting,0edc91,accuracy,gen,12 -cmmlu-professional_law,d24af5,accuracy,gen,10.9 -cmmlu-professional_medicine,134139,accuracy,gen,15.16 -cmmlu-professional_psychology,ec920e,accuracy,gen,8.62 -cmmlu-public_relations,70ee06,accuracy,gen,7.47 -cmmlu-security_study,45f96f,accuracy,gen,13.33 -cmmlu-sociology,485285,accuracy,gen,8.85 -cmmlu-sports_science,838cfe,accuracy,gen,13.94 -cmmlu-traditional_chinese_medicine,3bbf64,accuracy,gen,8.11 -cmmlu-virology,8925bf,accuracy,gen,11.83 -cmmlu-world_history,57c97c,accuracy,gen,14.91 -cmmlu-world_religions,1d0f4b,accuracy,gen,6.25 -gsm8k,-,-,-,- -math,-,-,-,- -lukaemon_mmlu_college_biology,8c2e29,accuracy,gen,0 -lukaemon_mmlu_college_chemistry,0afccd,accuracy,gen,7 -lukaemon_mmlu_college_computer_science,c1c1b4,accuracy,gen,2 -lukaemon_mmlu_college_mathematics,9deed0,accuracy,gen,2 -lukaemon_mmlu_college_physics,f5cf5e,accuracy,gen,0 -lukaemon_mmlu_electrical_engineering,3d694d,accuracy,gen,1.38 -lukaemon_mmlu_astronomy,7ef16f,accuracy,gen,0 -lukaemon_mmlu_anatomy,2d597d,accuracy,gen,0.74 -lukaemon_mmlu_abstract_algebra,ec092c,accuracy,gen,2 -lukaemon_mmlu_machine_learning,d489ae,accuracy,gen,0 -lukaemon_mmlu_clinical_knowledge,af10df,accuracy,gen,0.38 -lukaemon_mmlu_global_facts,cad9e0,accuracy,gen,5 -lukaemon_mmlu_management,65f310,accuracy,gen,0 -lukaemon_mmlu_nutrition,80bf96,accuracy,gen,0 -lukaemon_mmlu_marketing,9a98c0,accuracy,gen,0.43 -lukaemon_mmlu_professional_accounting,9cc7e2,accuracy,gen,1.77 -lukaemon_mmlu_high_school_geography,c28a4c,accuracy,gen,0 -lukaemon_mmlu_international_law,408d4e,accuracy,gen,0 -lukaemon_mmlu_moral_scenarios,9f30a6,accuracy,gen,0 -lukaemon_mmlu_computer_security,2753c1,accuracy,gen,1 -lukaemon_mmlu_high_school_microeconomics,af9eae,accuracy,gen,1.26 -lukaemon_mmlu_professional_law,-,-,-,- -lukaemon_mmlu_medical_genetics,b1a3a7,accuracy,gen,0 -lukaemon_mmlu_professional_psychology,c6b790,accuracy,gen,0.16 -lukaemon_mmlu_jurisprudence,f41074,accuracy,gen,0 -lukaemon_mmlu_world_religions,d44a95,accuracy,gen,0 -lukaemon_mmlu_philosophy,d36ef3,accuracy,gen,0 -lukaemon_mmlu_virology,0a5f8e,accuracy,gen,0 -lukaemon_mmlu_high_school_chemistry,5b2ef9,accuracy,gen,14.78 -lukaemon_mmlu_public_relations,4c7898,accuracy,gen,0 -lukaemon_mmlu_high_school_macroeconomics,3f841b,accuracy,gen,0.51 -lukaemon_mmlu_human_sexuality,4d1f3e,accuracy,gen,0.76 -lukaemon_mmlu_elementary_mathematics,0f5d3a,accuracy,gen,0.53 -lukaemon_mmlu_high_school_physics,0dd929,accuracy,gen,3.31 -lukaemon_mmlu_high_school_computer_science,bf31fd,accuracy,gen,3 -lukaemon_mmlu_high_school_european_history,d1b67e,accuracy,gen,3.03 -lukaemon_mmlu_business_ethics,af53f3,accuracy,gen,0 -lukaemon_mmlu_moral_disputes,48239e,accuracy,gen,0.87 -lukaemon_mmlu_high_school_statistics,47e18e,accuracy,gen,0.93 -lukaemon_mmlu_miscellaneous,573569,accuracy,gen,2.04 -lukaemon_mmlu_formal_logic,7a0414,accuracy,gen,7.14 -lukaemon_mmlu_high_school_government_and_politics,d907eb,accuracy,gen,0 -lukaemon_mmlu_prehistory,65aa94,accuracy,gen,0.62 -lukaemon_mmlu_security_studies,9ea7d3,accuracy,gen,0.82 -lukaemon_mmlu_high_school_biology,775183,accuracy,gen,0 -lukaemon_mmlu_logical_fallacies,19746a,accuracy,gen,1.23 -lukaemon_mmlu_high_school_world_history,6665dc,accuracy,gen,1.27 -lukaemon_mmlu_professional_medicine,a05bab,accuracy,gen,0.37 -lukaemon_mmlu_high_school_mathematics,0e6a7e,accuracy,gen,2.22 -lukaemon_mmlu_college_medicine,5215f1,accuracy,gen,0 -lukaemon_mmlu_high_school_us_history,b5f235,accuracy,gen,3.92 -lukaemon_mmlu_sociology,4980ec,accuracy,gen,0.5 -lukaemon_mmlu_econometrics,4d590b,accuracy,gen,4.39 -lukaemon_mmlu_high_school_psychology,4.40E+98,accuracy,gen,0 -lukaemon_mmlu_human_aging,d0a8e1,accuracy,gen,0.45 -lukaemon_mmlu_us_foreign_policy,adcc88,accuracy,gen,0 -lukaemon_mmlu_conceptual_physics,a111d3,accuracy,gen,0.43 -agieval-chinese,-,naive_average,gen,5.99 -agieval-english,-,naive_average,gen,2.69 -agieval-gaokao,-,naive_average,gen,5.26 -agieval,-,naive_average,gen,4.57 -mmlu-stem,-,naive_average,gen,2.17 -mmlu-social-science,-,naive_average,gen,0.7 -mmlu-other,-,naive_average,gen,0.8 -cmmlu-humanities,-,naive_average,gen,12.42 -cmmlu-stem,-,naive_average,gen,12.97 -cmmlu-social-science,-,naive_average,gen,11.59 -cmmlu-other,-,naive_average,gen,12.38 -cmmlu-china-specific,-,naive_average,gen,11.68 -cmmlu,-,naive_average,gen,12.28 -ceval-stem,-,naive_average,gen,26.74 -ceval-social-science,-,naive_average,gen,34.15 -ceval-humanities,-,naive_average,gen,28.17 -ceval-other,-,naive_average,gen,31.72 -ceval-hard,-,naive_average,gen,24.34 -ceval,-,naive_average,gen,29.52 -GaokaoBench,-,weighted_average,gen,9.26 +dataset,version,metric,mode,opencompass.models.huggingface.HuggingFace_FarReelAILab_Machine_Mindset_en_ISTP +GaokaoBench_2010-2022_Math_II_MCQs,5b0b29,score,gen,10.55 +GaokaoBench_2010-2022_Math_I_MCQs,5b0b29,score,gen,5.14 +GaokaoBench_2010-2022_History_MCQs,3613b5,score,gen,14.98 +GaokaoBench_2010-2022_Biology_MCQs,d26e80,score,gen,14.67 +GaokaoBench_2010-2022_Political_Science_MCQs,70fce9,score,gen,9.06 +GaokaoBench_2010-2022_Physics_MCQs,8a0c30,score,gen,3.12 +GaokaoBench_2010-2022_Chemistry_MCQs,852bbd,score,gen,6.45 +GaokaoBench_2010-2013_English_MCQs,01b50f,score,gen,19.05 +GaokaoBench_2010-2022_Chinese_Modern_Lit,e19c31,score,gen,0.00 +GaokaoBench_2010-2022_English_Fill_in_Blanks,924021,score,gen,14.00 +GaokaoBench_2012-2022_English_Cloze_Test,11f6ce,score,gen,0.00 +GaokaoBench_2010-2022_Geography_MCQs,862192,score,gen,9.47 +GaokaoBench_2010-2022_English_Reading_Comp,ffdef4,score,gen,3.19 +GaokaoBench_2010-2022_Chinese_Lang_and_Usage_MCQs,ba10b2,score,gen,15.00 +GaokaoBench_2010-2022_Math_I_Fill-in-the-Blank,9dd6c7,score,gen,0.00 +GaokaoBench_2010-2022_Math_II_Fill-in-the-Blank,9dd6c7,score,gen,0.00 +GaokaoBench_2010-2022_Chinese_Language_Famous_Passages_and_Sentences_Dictation,58053d,score,gen,0.00 +GaokaoBench_2014-2022_English_Language_Cloze_Passage,d431f7,score,gen,0.00 +GaokaoBench_2010-2022_Geography_Open-ended_Questions,475d13,score,gen,0.00 +GaokaoBench_2010-2022_Chemistry_Open-ended_Questions,0bccc3,score,gen,0.00 +GaokaoBench_2010-2022_Math_I_Open-ended_Questions,8a0cf5,score,gen,0.00 +GaokaoBench_2010-2022_History_Open-ended_Questions,9fbad8,score,gen,0.00 +GaokaoBench_2010-2022_Biology_Open-ended_Questions,e0cb29,score,gen,0.00 +GaokaoBench_2010-2022_Math_II_Open-ended_Questions,8a0cf5,score,gen,0.00 +GaokaoBench_2010-2022_Physics_Open-ended_Questions,423d19,score,gen,0.00 +GaokaoBench_2010-2022_Political_Science_Open-ended_Questions,0eee0a,score,gen,0.00 +GaokaoBench_2012-2022_English_Language_Error_Correction,6a3cae,score,gen,0.00 +GaokaoBench_2010-2022_Chinese_Language_Ancient_Poetry_Reading,ee6cc7,score,gen,0.00 +GaokaoBench_2010-2022_Chinese_Language_Practical_Text_Reading,4dea5a,score,gen,0.00 +GaokaoBench_2010-2022_Chinese_Language_Literary_Text_Reading,979d8b,score,gen,0.00 +GaokaoBench_2010-2022_Chinese_Language_Classical_Chinese_Reading,9de717,score,gen,0.00 +GaokaoBench_2010-2022_Chinese_Language_Language_and_Writing_Skills_Open-ended_Questions,d2ed84,score,gen,0.00 +agieval-gaokao-chinese,774562,accuracy,gen,2.03 +agieval-gaokao-english,cb5bc6,accuracy,gen,6.54 +agieval-gaokao-geography,2ca56f,accuracy,gen,5.03 +agieval-gaokao-history,9c3ae0,accuracy,gen,5.11 +agieval-gaokao-biology,277c85,accuracy,gen,10.00 +agieval-gaokao-chemistry,d62fd4,accuracy,gen,6.28 +agieval-gaokao-mathqa,e2ea74,accuracy,gen,3.13 +agieval-logiqa-zh,03474d,accuracy,gen,13.06 +agieval-lsat-ar,ed1edf,accuracy,gen,0.00 +agieval-lsat-lr,ec6882,accuracy,gen,2.55 +agieval-lsat-rc,33077d,accuracy,gen,1.86 +agieval-logiqa-en,c6ee60,accuracy,gen,0.92 +agieval-sat-math,6c970d,accuracy,gen,2.27 +agieval-sat-en,4e3fef,accuracy,gen,13.59 +agieval-sat-en-without-passage,4e3fef,accuracy,gen,2.91 +agieval-aqua-rat,c090ca,accuracy,gen,0.00 +agieval-gaokao-physics,7704e3,accuracy,gen,7.50 +agieval-jec-qa-kd,b1e586,accuracy,gen,5.40 +agieval-jec-qa-ca,47bc29,accuracy,gen,6.10 +agieval-gaokao-mathcloze,b2c3c3,score,gen,1.69 +agieval-math,e0c6d9,score,gen,0.10 +ceval-computer_network,db9ce2,accuracy,gen,21.05 +ceval-operating_system,1c2571,accuracy,gen,47.37 +ceval-computer_architecture,a74dad,accuracy,gen,33.33 +ceval-college_programming,4ca32a,accuracy,gen,29.73 +ceval-college_physics,963fa8,accuracy,gen,15.79 +ceval-college_chemistry,e78857,accuracy,gen,12.50 +ceval-advanced_mathematics,ce03e2,accuracy,gen,5.26 +ceval-probability_and_statistics,65e812,accuracy,gen,33.33 +ceval-discrete_mathematics,e894ae,accuracy,gen,37.50 +ceval-electrical_engineer,ae42b9,accuracy,gen,35.14 +ceval-metrology_engineer,ee34ea,accuracy,gen,29.17 +ceval-high_school_mathematics,1dc5bf,accuracy,gen,16.67 +ceval-high_school_physics,adf25f,accuracy,gen,36.84 +ceval-high_school_chemistry,2ed27f,accuracy,gen,36.84 +ceval-high_school_biology,8e2b9a,accuracy,gen,15.79 +ceval-middle_school_mathematics,bee8d5,accuracy,gen,15.79 +ceval-middle_school_biology,86817c,accuracy,gen,28.57 +ceval-middle_school_physics,8accf6,accuracy,gen,47.37 +ceval-middle_school_chemistry,167a15,accuracy,gen,15.00 +ceval-veterinary_medicine,b4e08d,accuracy,gen,21.74 +ceval-college_economics,f3f4e6,accuracy,gen,25.45 +ceval-business_administration,c1614e,accuracy,gen,45.45 +ceval-marxism,cf874c,accuracy,gen,31.58 +ceval-mao_zedong_thought,51c7a4,accuracy,gen,29.17 +ceval-education_science,591fee,accuracy,gen,37.93 +ceval-teacher_qualification,4e4ced,accuracy,gen,27.27 +ceval-high_school_politics,5c0de2,accuracy,gen,21.05 +ceval-high_school_geography,865461,accuracy,gen,47.37 +ceval-middle_school_politics,5be3e7,accuracy,gen,42.86 +ceval-middle_school_geography,8a63be,accuracy,gen,33.33 +ceval-modern_chinese_history,fc01af,accuracy,gen,30.43 +ceval-ideological_and_moral_cultivation,a2aa4a,accuracy,gen,31.58 +ceval-logic,f5b022,accuracy,gen,31.82 +ceval-law,a110a1,accuracy,gen,12.50 +ceval-chinese_language_and_literature,0f8b68,accuracy,gen,34.78 +ceval-art_studies,2a1300,accuracy,gen,21.21 +ceval-professional_tour_guide,4e673e,accuracy,gen,31.03 +ceval-legal_professional,ce8787,accuracy,gen,39.13 +ceval-high_school_chinese,315705,accuracy,gen,10.53 +ceval-high_school_history,7eb30a,accuracy,gen,35.00 +ceval-middle_school_history,48ab4a,accuracy,gen,31.82 +ceval-civil_servant,87d061,accuracy,gen,21.28 +ceval-sports_science,70f27b,accuracy,gen,26.32 +ceval-plant_protection,8941f9,accuracy,gen,40.91 +ceval-basic_medicine,c409d6,accuracy,gen,63.16 +ceval-clinical_medicine,49e82d,accuracy,gen,27.27 +ceval-urban_and_rural_planner,95b885,accuracy,gen,36.96 +ceval-accountant,002837,accuracy,gen,22.45 +ceval-fire_engineer,bc23f5,accuracy,gen,38.71 +ceval-environmental_impact_assessment_engineer,c64e2d,accuracy,gen,29.03 +ceval-tax_accountant,3a5e3c,accuracy,gen,20.41 +ceval-physician,6e277d,accuracy,gen,22.45 +cmmlu-agronomy,4c7f2c,accuracy,gen,11.24 +cmmlu-anatomy,ea09bf,accuracy,gen,17.57 +cmmlu-ancient_chinese,f7c97f,accuracy,gen,17.68 +cmmlu-arts,dd77b8,accuracy,gen,8.12 +cmmlu-astronomy,1e49db,accuracy,gen,6.67 +cmmlu-business_ethics,dc78cb,accuracy,gen,18.18 +cmmlu-chinese_civil_service_exam,1de82c,accuracy,gen,10.62 +cmmlu-chinese_driving_rule,b8a42b,accuracy,gen,18.32 +cmmlu-chinese_food_culture,2d568a,accuracy,gen,8.09 +cmmlu-chinese_foreign_policy,dc2427,accuracy,gen,14.02 +cmmlu-chinese_history,4cc7ed,accuracy,gen,7.43 +cmmlu-chinese_literature,af3c41,accuracy,gen,6.37 +cmmlu-chinese_teacher_qualification,87de11,accuracy,gen,7.26 +cmmlu-clinical_knowledge,c55b1d,accuracy,gen,8.86 +cmmlu-college_actuarial_science,d3c360,accuracy,gen,0.94 +cmmlu-college_education,df8790,accuracy,gen,19.63 +cmmlu-college_engineering_hydrology,673f23,accuracy,gen,12.26 +cmmlu-college_law,524c3a,accuracy,gen,10.19 +cmmlu-college_mathematics,e4ebad,accuracy,gen,8.57 +cmmlu-college_medical_statistics,55af35,accuracy,gen,17.92 +cmmlu-college_medicine,702f48,accuracy,gen,15.75 +cmmlu-computer_science,637007,accuracy,gen,18.63 +cmmlu-computer_security,932b6b,accuracy,gen,12.87 +cmmlu-conceptual_physics,cfc077,accuracy,gen,16.33 +cmmlu-construction_project_management,968a4a,accuracy,gen,10.79 +cmmlu-economics,ddaf7c,accuracy,gen,8.81 +cmmlu-education,c35963,accuracy,gen,15.34 +cmmlu-electrical_engineering,70e98a,accuracy,gen,10.47 +cmmlu-elementary_chinese,cbcd6a,accuracy,gen,8.33 +cmmlu-elementary_commonsense,a67f37,accuracy,gen,19.19 +cmmlu-elementary_information_and_technology,d34d2a,accuracy,gen,15.13 +cmmlu-elementary_mathematics,a9d403,accuracy,gen,14.35 +cmmlu-ethnology,31955f,accuracy,gen,13.33 +cmmlu-food_science,741d8e,accuracy,gen,6.99 +cmmlu-genetics,c326f7,accuracy,gen,13.07 +cmmlu-global_facts,0a1236,accuracy,gen,12.75 +cmmlu-high_school_biology,2be811,accuracy,gen,20.12 +cmmlu-high_school_chemistry,d63c05,accuracy,gen,14.39 +cmmlu-high_school_geography,5cd489,accuracy,gen,11.86 +cmmlu-high_school_mathematics,6b2087,accuracy,gen,17.68 +cmmlu-high_school_physics,3df353,accuracy,gen,16.36 +cmmlu-high_school_politics,7a88d8,accuracy,gen,16.08 +cmmlu-human_sexuality,54ac98,accuracy,gen,6.35 +cmmlu-international_law,0f5d40,accuracy,gen,12.43 +cmmlu-journalism,a4f6a0,accuracy,gen,12.21 +cmmlu-jurisprudence,7843da,accuracy,gen,18.25 +cmmlu-legal_and_moral_basis,f906b0,accuracy,gen,11.21 +cmmlu-logical,15a71b,accuracy,gen,13.01 +cmmlu-machine_learning,bc6ad4,accuracy,gen,3.28 +cmmlu-management,e5e8db,accuracy,gen,10.95 +cmmlu-marketing,8b4c18,accuracy,gen,2.78 +cmmlu-marxist_theory,75eb79,accuracy,gen,22.75 +cmmlu-modern_chinese,83a9b7,accuracy,gen,9.48 +cmmlu-nutrition,adfff7,accuracy,gen,11.72 +cmmlu-philosophy,75e22d,accuracy,gen,18.10 +cmmlu-professional_accounting,0edc91,accuracy,gen,12.00 +cmmlu-professional_law,d24af5,accuracy,gen,10.90 +cmmlu-professional_medicine,134139,accuracy,gen,15.16 +cmmlu-professional_psychology,ec920e,accuracy,gen,8.62 +cmmlu-public_relations,70ee06,accuracy,gen,7.47 +cmmlu-security_study,45f96f,accuracy,gen,13.33 +cmmlu-sociology,485285,accuracy,gen,8.85 +cmmlu-sports_science,838cfe,accuracy,gen,13.94 +cmmlu-traditional_chinese_medicine,3bbf64,accuracy,gen,8.11 +cmmlu-virology,8925bf,accuracy,gen,11.83 +cmmlu-world_history,57c97c,accuracy,gen,14.91 +cmmlu-world_religions,1d0f4b,accuracy,gen,6.25 +gsm8k,-,-,-,- +math,265cce,accuracy,gen,0.24 +lukaemon_mmlu_college_biology,8c2e29,accuracy,gen,0.00 +lukaemon_mmlu_college_chemistry,0afccd,accuracy,gen,7.00 +lukaemon_mmlu_college_computer_science,c1c1b4,accuracy,gen,2.00 +lukaemon_mmlu_college_mathematics,9deed0,accuracy,gen,2.00 +lukaemon_mmlu_college_physics,f5cf5e,accuracy,gen,0.00 +lukaemon_mmlu_electrical_engineering,3d694d,accuracy,gen,1.38 +lukaemon_mmlu_astronomy,7ef16f,accuracy,gen,0.00 +lukaemon_mmlu_anatomy,2d597d,accuracy,gen,0.74 +lukaemon_mmlu_abstract_algebra,ec092c,accuracy,gen,2.00 +lukaemon_mmlu_machine_learning,d489ae,accuracy,gen,0.00 +lukaemon_mmlu_clinical_knowledge,af10df,accuracy,gen,0.38 +lukaemon_mmlu_global_facts,cad9e0,accuracy,gen,5.00 +lukaemon_mmlu_management,65f310,accuracy,gen,0.00 +lukaemon_mmlu_nutrition,80bf96,accuracy,gen,0.00 +lukaemon_mmlu_marketing,9a98c0,accuracy,gen,0.43 +lukaemon_mmlu_professional_accounting,9cc7e2,accuracy,gen,1.77 +lukaemon_mmlu_high_school_geography,c28a4c,accuracy,gen,0.00 +lukaemon_mmlu_international_law,408d4e,accuracy,gen,0.00 +lukaemon_mmlu_moral_scenarios,9f30a6,accuracy,gen,0.00 +lukaemon_mmlu_computer_security,2753c1,accuracy,gen,1.00 +lukaemon_mmlu_high_school_microeconomics,af9eae,accuracy,gen,1.26 +lukaemon_mmlu_professional_law,7c7a62,accuracy,gen,6.91 +lukaemon_mmlu_medical_genetics,b1a3a7,accuracy,gen,0.00 +lukaemon_mmlu_professional_psychology,c6b790,accuracy,gen,0.16 +lukaemon_mmlu_jurisprudence,f41074,accuracy,gen,0.00 +lukaemon_mmlu_world_religions,d44a95,accuracy,gen,0.00 +lukaemon_mmlu_philosophy,d36ef3,accuracy,gen,0.00 +lukaemon_mmlu_virology,0a5f8e,accuracy,gen,0.00 +lukaemon_mmlu_high_school_chemistry,5b2ef9,accuracy,gen,14.78 +lukaemon_mmlu_public_relations,4c7898,accuracy,gen,0.00 +lukaemon_mmlu_high_school_macroeconomics,3f841b,accuracy,gen,0.51 +lukaemon_mmlu_human_sexuality,4d1f3e,accuracy,gen,0.76 +lukaemon_mmlu_elementary_mathematics,0f5d3a,accuracy,gen,0.53 +lukaemon_mmlu_high_school_physics,0dd929,accuracy,gen,3.31 +lukaemon_mmlu_high_school_computer_science,bf31fd,accuracy,gen,3.00 +lukaemon_mmlu_high_school_european_history,d1b67e,accuracy,gen,3.03 +lukaemon_mmlu_business_ethics,af53f3,accuracy,gen,0.00 +lukaemon_mmlu_moral_disputes,48239e,accuracy,gen,0.87 +lukaemon_mmlu_high_school_statistics,47e18e,accuracy,gen,0.93 +lukaemon_mmlu_miscellaneous,573569,accuracy,gen,2.04 +lukaemon_mmlu_formal_logic,7a0414,accuracy,gen,7.14 +lukaemon_mmlu_high_school_government_and_politics,d907eb,accuracy,gen,0.00 +lukaemon_mmlu_prehistory,65aa94,accuracy,gen,0.62 +lukaemon_mmlu_security_studies,9ea7d3,accuracy,gen,0.82 +lukaemon_mmlu_high_school_biology,775183,accuracy,gen,0.00 +lukaemon_mmlu_logical_fallacies,19746a,accuracy,gen,1.23 +lukaemon_mmlu_high_school_world_history,6665dc,accuracy,gen,1.27 +lukaemon_mmlu_professional_medicine,a05bab,accuracy,gen,0.37 +lukaemon_mmlu_high_school_mathematics,0e6a7e,accuracy,gen,2.22 +lukaemon_mmlu_college_medicine,5215f1,accuracy,gen,0.00 +lukaemon_mmlu_high_school_us_history,b5f235,accuracy,gen,3.92 +lukaemon_mmlu_sociology,4980ec,accuracy,gen,0.50 +lukaemon_mmlu_econometrics,4d590b,accuracy,gen,4.39 +lukaemon_mmlu_high_school_psychology,440e96,accuracy,gen,0.00 +lukaemon_mmlu_human_aging,d0a8e1,accuracy,gen,0.45 +lukaemon_mmlu_us_foreign_policy,adcc88,accuracy,gen,0.00 +lukaemon_mmlu_conceptual_physics,a111d3,accuracy,gen,0.43 +agieval-chinese,-,naive_average,gen,5.99 +agieval-english,-,naive_average,gen,2.69 +agieval-gaokao,-,naive_average,gen,5.26 +agieval,-,naive_average,gen,4.57 +mmlu-humanities,-,naive_average,gen,1.92 +mmlu-stem,-,naive_average,gen,2.17 +mmlu-social-science,-,naive_average,gen,0.70 +mmlu-other,-,naive_average,gen,0.80 +mmlu,-,naive_average,gen,1.49 +mmlu-weighted,-,weighted_average,gen,1.77 +cmmlu-humanities,-,naive_average,gen,12.42 +cmmlu-stem,-,naive_average,gen,12.97 +cmmlu-social-science,-,naive_average,gen,11.59 +cmmlu-other,-,naive_average,gen,12.38 +cmmlu-china-specific,-,naive_average,gen,11.68 +cmmlu,-,naive_average,gen,12.28 +ceval-stem,-,naive_average,gen,26.74 +ceval-social-science,-,naive_average,gen,34.15 +ceval-humanities,-,naive_average,gen,28.17 +ceval-other,-,naive_average,gen,31.72 +ceval-hard,-,naive_average,gen,24.34 +ceval,-,naive_average,gen,29.52 +GaokaoBench,-,weighted_average,gen,9.26