diff --git a/src/utils/common.py b/src/utils/common.py index 02880ed8..eaf1d005 100644 --- a/src/utils/common.py +++ b/src/utils/common.py @@ -101,7 +101,7 @@ def _init_adapter( logger.info("Fine-tuning method: LoRA") lastest_checkpoint = None - assert os.path.exists(model_args.checkpoint_dir[0], CONFIG_NAME), \ + assert os.path.exists(os.path.join(model_args.checkpoint_dir[0], CONFIG_NAME)), \ "The given checkpoint is not a LoRA checkpoint, please specify `--finetuning_type full/freeze` instead." if model_args.checkpoint_dir is not None: