PyTorch
llama
alignment-handbook
Generated from Trainer
File size: 769 Bytes
e5a6ffa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
{
    "epoch": 1.0,
    "eval_logits/chosen": 0.3841487765312195,
    "eval_logits/rejected": 0.3401743471622467,
    "eval_logps/chosen": -458.9393005371094,
    "eval_logps/rejected": -584.5836791992188,
    "eval_loss": 0.4761734902858734,
    "eval_rewards/accuracies": 0.7946428656578064,
    "eval_rewards/chosen": -1.4026715755462646,
    "eval_rewards/margins": 1.1953023672103882,
    "eval_rewards/rejected": -2.5979740619659424,
    "eval_runtime": 172.3438,
    "eval_samples": 4461,
    "eval_samples_per_second": 25.884,
    "eval_steps_per_second": 0.406,
    "total_flos": 0.0,
    "train_loss": 0.5273771832863338,
    "train_runtime": 14157.4064,
    "train_samples": 133368,
    "train_samples_per_second": 9.42,
    "train_steps_per_second": 0.294
}