UncheatableEval / data /2025-12 /Falcon-H1-1.5B-Base-UncheatableEval-2025-12-wikipedia_english-2025-12-26_12-19-50.json
Jellyfish042's picture
fix and update
14bf234
{
"neg_log_prob_sum": 1605.888,
"avg tokens": 843.83,
"avg character count": 3043.39,
"parameters count": 1.554859392,
"avg bytes": 3062.292,
"sample_count": 500,
"model_name_or_path": "tiiuae/Falcon-H1-1.5B-Base",
"tokenizer_name": "tiiuae/Falcon-H1-1.5B-Base",
"data_path": "UncheatableEval-2025-12-wikipedia_english",
"chunk_size": 4000,
"bos_mode": "add_default_eos",
"model_args": {
"device_map": "auto",
"trust_remote_code": true,
"attn_implementation": "flash_attention_2",
"torch_dtype": "torch.bfloat16"
},
"tokenizer_args": {
"trust_remote_code": true
},
"requirements": [],
"batch_size": 1,
"enable_chunking": true,
"bpc": 0.7612585484683514,
"bpb": 0.7565596794241358,
"compression_rate": 9.456995992801698,
"track_byte_wise_data": false
}