final_llama_idk / training_metadata.json
williamplacroix's picture
Upload folder using huggingface_hub
18f6cc4 verified
raw
history blame contribute delete
476 Bytes
{
"guidance_tokens": [
"<guidance>",
"</guidance>"
],
"guidance_ids": [
128256,
128257
],
"base_model": "meta-llama/Llama-3.1-8B-Instruct",
"lora_config": {
"r": 64,
"lora_alpha": 128,
"target_modules": [
"q_proj",
"v_proj",
"o_proj",
"gate_proj",
"up_proj",
"k_proj",
"down_proj"
],
"lora_dropout": 0.05,
"bias": "none",
"task_type": "CAUSAL_LM"
}
}