ipex-llm/python/llm/test/benchmark/harness_nightly/golden_results.json
Chen, Zhentao 9557aa9c21 Fix harness nightly (#9586)
* update golden

* loose the restriction of diff

* only compare results when scheduled
2023-12-04 11:45:00 +08:00

52 lines
1.6 KiB
JSON

{
"stablelm-3b-4e1t": {"xpu": {
"mixed_fp4": {
"truthfulqa_mc": {"mc1": 0.24357405140758873,"mc1_stderr": 0.015026354824910782,"mc2": 0.37399115063281224,"mc2_stderr": 0.013684003173581748},
"arc_challenge": {
"acc": 0.40102389078498296,
"acc_stderr": 0.014322255790719869,
"acc_norm": 0.44283276450511944,
"acc_norm_stderr": 0.014515573873348897
}
},
"fp8": {
"truthfulqa_mc": {
"mc1": 0.24479804161566707,
"mc1_stderr": 0.01505186948671501,
"mc2": 0.3747170112957169,
"mc2_stderr": 0.013516983188729865
},
"arc_challenge": {
"acc": 0.41552901023890787,
"acc_stderr": 0.014401366641216377,
"acc_norm": 0.46245733788395904,
"acc_norm_stderr": 0.014570144495075581
}
}
}},
"Mistral-7B-v0.1": {"xpu": {
"mixed_fp4": {
"truthfulqa_mc": {
"mc1": 0.27539779681762544,
"mc1_stderr": 0.01563813566777552,
"mc2": 0.41062756399348693,
"mc2_stderr": 0.014067612078490615
},
"arc_challenge": {"acc": 0.5674061433447098,"acc_stderr": 0.014478005694182528,"acc_norm": 0.5989761092150171,"acc_norm_stderr": 0.014322255790719867}
},
"fp8": {
"truthfulqa_mc": {
"mc1": 0.2778457772337821,
"mc1_stderr": 0.015680929364024643,
"mc2": 0.42125519016651203,
"mc2_stderr": 0.014145367212406432
},
"arc_challenge": {
"acc": 0.5639931740614335,
"acc_stderr": 0.014491225699230916,
"acc_norm": 0.5989761092150171,
"acc_norm_stderr": 0.014322255790719867
}
}
}}
}