| { | |
| "results": { | |
| "truthfulqa_mc": { | |
| "mc1": 0.20563035495716034, | |
| "mc1_stderr": 0.014148482219460978, | |
| "mc2": 0.3321670658810331, | |
| "mc2_stderr": 0.01308219195188674 | |
| } | |
| }, | |
| "versions": { | |
| "truthfulqa_mc": 1 | |
| }, | |
| "config": { | |
| "model": "hf-causal", | |
| "model_args": "pretrained=workdir_7b/ckpt_349", | |
| "num_fewshot": 0, | |
| "batch_size": "32", | |
| "batch_sizes": [], | |
| "device": null, | |
| "no_cache": true, | |
| "limit": null, | |
| "bootstrap_iters": 100000, | |
| "description_dict": {} | |
| } | |
| } |