sxj1215's picture
MixLoRA 80GB optimized version
c99c191 verified
{
"add_noise": false,
"attribute_dim": 32,
"auto_mapping": null,
"base_model_name_or_path": "./output/continuous_training_80gb",
"bias": "none",
"cond_type": "input",
"fan_in_fan_out": false,
"ifs_weight": 1,
"independent_rank": true,
"inference_mode": true,
"init_lora_weights": true,
"layers_pattern": null,
"layers_to_transform": null,
"lora_alpha": 16,
"lora_dropout": 0.05,
"mix_loraA": true,
"mix_start_layer": 0,
"modules_to_save": null,
"n_experts": 8,
"output_selection": false,
"peft_type": "LORA",
"r": 2,
"revision": null,
"target_modules": [
"k_proj",
"up_proj",
"v_proj",
"o_proj",
"gate_proj",
"q_proj",
"down_proj",
"mm_projector"
],
"task_type": "CAUSAL_LM",
"use_dora": false,
"use_label_based_selection": true,
"used_scored_weight": false
}