| { | |
| "alpha_pattern": {}, | |
| "auto_mapping": null, | |
| "base_model_name_or_path": "/mnt/xmap_nas_alg/limingxing.lmx/workspace/code/aesthetic/qwen2vl-train/hugging_face/qwen2vl7B/qwen2vl7B", | |
| "bias": "none", | |
| "fan_in_fan_out": false, | |
| "inference_mode": true, | |
| "init_lora_weights": true, | |
| "layer_replication": null, | |
| "layers_pattern": null, | |
| "layers_to_transform": null, | |
| "loftq_config": {}, | |
| "lora_alpha": 256, | |
| "lora_dropout": 0.15, | |
| "megatron_config": null, | |
| "megatron_core": "megatron.core", | |
| "modules_to_save": null, | |
| "peft_type": "LORA", | |
| "r": 128, | |
| "rank_pattern": {}, | |
| "revision": null, | |
| "target_modules": "^(?!.*patch_embed).*(?:down_proj|k_proj|gate_proj|o_proj|up_proj|v_proj|q_proj|qkv|proj|fc2|fc1).*", | |
| "task_type": "CAUSAL_LM", | |
| "use_dora": false, | |
| "use_rslora": false | |
| } |