krisliu commited on
Commit
019a55a
·
1 Parent(s): 4fe54f8

original finetuned effiLLM

Browse files
finetune/sg/effiLLaMA/alpaca_llamaPeft_normBiasLora_r512_original_7B/epoch3/consolidated.00-of-01.model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:229d4e091a6b1091d5e5b786d36add54a9abc50943f1f2d2b98587c74f9bc034
3
+ size 14739226267
finetune/sg/effiLLaMA/alpaca_llamaPeft_normBiasLora_r512_original_7B/log.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {"train_lr": 4.993842364532024e-05, "train_closs": 0.9504148324591961, "train_grad_norm": 3.1888711442207467, "epoch": 0, "val_lr": 4.993842364532024e-05, "val_closs": 0.9504148324591961, "val_grad_norm": 3.1888711442207467}
2
+ {"train_lr": 9.179680273851965e-05, "train_closs": 0.8506759000712578, "train_grad_norm": 2.4111930516553044, "epoch": 1, "val_lr": 9.179680273851965e-05, "val_closs": 0.8506759000712578, "val_grad_norm": 2.4111930516553044}
3
+ {"train_lr": 5.252924876847267e-05, "train_closs": 0.5249739473277711, "train_grad_norm": 2.111732730577732, "epoch": 2, "val_lr": 5.252924876847267e-05, "val_closs": 0.5249739473277711, "val_grad_norm": 2.111732730577732}
4
+ {"train_lr": 1.3232446029953387e-05, "train_closs": 0.23211085524535507, "train_grad_norm": 1.792379817351919, "epoch": 3, "val_lr": 1.3232446029953387e-05, "val_closs": 0.23211085524535507, "val_grad_norm": 1.792379817351919}
finetune/sg/effiLLaMA/alpaca_llamaPeft_normBiasLora_r512_original_7B/output.log ADDED
The diff for this file is too large to render. See raw diff