UncheatableEval / data /2025-12 /Falcon-H1-0.5B-Base-UncheatableEval-2025-12-wikipedia_english-2025-12-26_11-17-19.json
Jellyfish042's picture
fix and update
14bf234
{
"neg_log_prob_sum": 1816.556,
"avg tokens": 865.132,
"avg character count": 3043.39,
"parameters count": 0.521411104,
"avg bytes": 3062.292,
"sample_count": 500,
"model_name_or_path": "tiiuae/Falcon-H1-0.5B-Base",
"tokenizer_name": "tiiuae/Falcon-H1-0.5B-Base",
"data_path": "UncheatableEval-2025-12-wikipedia_english",
"chunk_size": 4000,
"bos_mode": "add_default_eos",
"model_args": {
"device_map": "auto",
"trust_remote_code": true,
"torch_dtype": "torch.bfloat16"
},
"tokenizer_args": {
"trust_remote_code": true
},
"requirements": [],
"batch_size": 1,
"enable_chunking": true,
"bpc": 0.8611240533408772,
"bpb": 0.8558087643820681,
"compression_rate": 10.697609554775852,
"track_byte_wise_data": false
}