Qwen3-14B-Shadow-FT-BAAI-2k / merge_lora_config.yaml
taki555's picture
Upload folder using huggingface_hub
e855af9 verified
adapter_name_or_path: /apdcephfs_qy3/share_301069248/users/rummyyang/LLaMA-Factory/LLM-Neo/results/0526-repeat1/result-Qwen3-14B-0526-repeat1/B-2.0k-lora-rank128-ratio0.5-lr0.0002-Shadow_2k-4096-2
export_device: cpu
export_dir: /apdcephfs_qy3/share_301069248/users/rummyyang/LLaMA-Factory/LLM-Neo/results/0526-repeat1/result-Qwen3-14B-0526-repeat1/B-2.0k-lora-rank128-ratio0.5-lr0.0002-Shadow_2k-4096-2/merged-B2I
export_legacy_format: false
export_size: 5
finetuning_type: lora
model_name_or_path: /apdcephfs_qy3/share_301069248/users/rummyyang/minillm/checkpoints/Qwen/Qwen3_hyperlink/Qwen3-14B-Instruct
template: qwen3
trust_remote_code: true