tiny fix
This commit is contained in:
parent
909af8f496
commit
ce43386080
|
@ -101,7 +101,7 @@ def _init_adapter(
|
||||||
logger.info("Fine-tuning method: LoRA")
|
logger.info("Fine-tuning method: LoRA")
|
||||||
lastest_checkpoint = None
|
lastest_checkpoint = None
|
||||||
|
|
||||||
assert os.path.exists(model_args.checkpoint_dir[0], CONFIG_NAME), \
|
assert os.path.exists(os.path.join(model_args.checkpoint_dir[0], CONFIG_NAME)), \
|
||||||
"The given checkpoint is not a LoRA checkpoint, please specify `--finetuning_type full/freeze` instead."
|
"The given checkpoint is not a LoRA checkpoint, please specify `--finetuning_type full/freeze` instead."
|
||||||
|
|
||||||
if model_args.checkpoint_dir is not None:
|
if model_args.checkpoint_dir is not None:
|
||||||
|
|
Loading…
Reference in New Issue