Add llama_finetune_mrpc_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=500_seed=123 LoRA model 38577cb verified mciccone commited on Jun 10
Add llama_finetune_mrpc_r16_alpha=32_dropout=0.05_lr5e-05_data_size1000_max_steps=500_seed=123 LoRA model 7199995 verified mciccone commited on Jun 10
Add llama_finetune_mrpc_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=100_seed=123 LoRA model f3270a1 verified mciccone commited on Jun 10
Add llama_finetune_mrpc_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=500_seed=123 LoRA model ec01b3f verified mciccone commited on Jun 10
Add llama_finetune_mrpc_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=100_seed=123 LoRA model 226dada verified mciccone commited on Jun 10
Add llama_finetune_mrpc_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=100_seed=123 LoRA model 6cdf25e verified mciccone commited on Jun 10
Add llama_finetune_mrpc_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=500_seed=123 LoRA model 47be5fc verified mciccone commited on Jun 10