update args spec
This commit is contained in:
parent
20cf27976f
commit
5453b93db0
|
@ -20,7 +20,8 @@ class FinetuningArguments:
|
||||||
BLOOM choices: [\"24\", \"30\", \"70\"], \
|
BLOOM choices: [\"24\", \"30\", \"70\"], \
|
||||||
Falcon choices: [\"32\", \"60\"], \
|
Falcon choices: [\"32\", \"60\"], \
|
||||||
Baichuan choices: [\"32\", \"40\"] \
|
Baichuan choices: [\"32\", \"40\"] \
|
||||||
Qwen choices: [\"32\"]"}
|
Qwen choices: [\"32\"], \
|
||||||
|
XVERSE choices: [\"40\"]"}
|
||||||
)
|
)
|
||||||
num_layer_trainable: Optional[int] = field(
|
num_layer_trainable: Optional[int] = field(
|
||||||
default=3,
|
default=3,
|
||||||
|
@ -32,7 +33,8 @@ class FinetuningArguments:
|
||||||
LLaMA & LLaMA-2 choices: [\"mlp\", \"self_attn\"], \
|
LLaMA & LLaMA-2 choices: [\"mlp\", \"self_attn\"], \
|
||||||
BLOOM & Falcon choices: [\"mlp\", \"self_attention\"], \
|
BLOOM & Falcon choices: [\"mlp\", \"self_attention\"], \
|
||||||
Baichuan choices: [\"mlp\", \"self_attn\"], \
|
Baichuan choices: [\"mlp\", \"self_attn\"], \
|
||||||
Qwen choices: [\"attn\", \"mlp\"]"}
|
Qwen choices: [\"mlp\", \"attn\"], \
|
||||||
|
InternLM, XVERSE choices: the same as LLaMA."}
|
||||||
)
|
)
|
||||||
lora_rank: Optional[int] = field(
|
lora_rank: Optional[int] = field(
|
||||||
default=8,
|
default=8,
|
||||||
|
@ -49,10 +51,11 @@ class FinetuningArguments:
|
||||||
lora_target: Optional[str] = field(
|
lora_target: Optional[str] = field(
|
||||||
default="q_proj,v_proj",
|
default="q_proj,v_proj",
|
||||||
metadata={"help": "Name(s) of target modules to apply LoRA. Use commas to separate multiple modules. \
|
metadata={"help": "Name(s) of target modules to apply LoRA. Use commas to separate multiple modules. \
|
||||||
LLaMA & LLaMA-2 & InternLM choices: [\"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\", \"gate_proj\", \"up_proj\", \"down_proj\"], \
|
LLaMA & LLaMA-2 choices: [\"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\", \"gate_proj\", \"up_proj\", \"down_proj\"], \
|
||||||
BLOOM & Falcon choices: [\"query_key_value\", \"self_attention.dense\", \"mlp.dense\"], \
|
BLOOM & Falcon choices: [\"query_key_value\", \"self_attention.dense\", \"mlp.dense\"], \
|
||||||
Baichuan choices: [\"W_pack\", \"o_proj\", \"gate_proj\", \"up_proj\", \"down_proj\"], \
|
Baichuan choices: [\"W_pack\", \"o_proj\", \"gate_proj\", \"up_proj\", \"down_proj\"], \
|
||||||
Qwen choices: [\"c_attn\", \"c_proj\", \"w1\", \"w2\"]"}
|
Qwen choices: [\"c_attn\", \"attn.c_proj\", \"w1\", \"w2\", \"mlp.c_proj\"], \
|
||||||
|
InternLM, XVERSE choices: the same as LLaMA."}
|
||||||
)
|
)
|
||||||
|
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
|
|
Loading…
Reference in New Issue