fix baichuan resize

This commit is contained in:
hiyouga 2023-12-11 20:55:50 +08:00
parent 0239d29fa0
commit f4657de7d5
1 changed files with 4 additions and 0 deletions

View File

@ -188,6 +188,10 @@ def resize_embedding_layer(model: "PreTrainedModel", tokenizer: "PreTrainedToken
r"""
Resize token embeddings.
"""
if not isinstance(model.get_output_embeddings(), torch.nn.Linear):
logger.warning("Current model does not support resizing token embeddings.")
return
old_vocab_size = model.get_input_embeddings().weight.size(0)
if len(tokenizer) != old_vocab_size:
model.resize_token_embeddings(len(tokenizer), pad_to_multiple_of=64)