Add llama_finetune_overruling_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=100_seed=123 LoRA model 864f736 verified mciccone commited on Jun 10
Add llama_finetune_overruling_r16_alpha=32_dropout=0.05_lr5e-05_data_size1000_max_steps=500_seed=123 LoRA model a50024c verified mciccone commited on Jun 10
Add llama_finetune_overruling_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=500_seed=123 LoRA model eeb03ec verified mciccone commited on Jun 10
Add llama_finetune_overruling_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=500_seed=123 LoRA model c912722 verified mciccone commited on Jun 10
Add llama_finetune_overruling_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=100_seed=123 LoRA model fc98a3e verified mciccone commited on Jun 10
Add llama_finetune_overruling_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=500_seed=123 LoRA model 923e7d5 verified mciccone commited on Jun 10
Add llama_finetune_overruling_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=100_seed=123 LoRA model 4278b5a verified mciccone commited on Jun 10