From e4bb846c43c087bfdd99d0c9eb0318e95b943977 Mon Sep 17 00:00:00 2001 From: hiyouga Date: Sun, 24 Dec 2023 19:20:12 +0800 Subject: [PATCH] fix bug --- src/llmtuner/model/patcher.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/llmtuner/model/patcher.py b/src/llmtuner/model/patcher.py index 16272d17..5b91cb47 100644 --- a/src/llmtuner/model/patcher.py +++ b/src/llmtuner/model/patcher.py @@ -199,7 +199,7 @@ def _prepare_model_for_training( logger.info("Upcasting layernorm weights in float32.") if not model_args.disable_gradient_checkpointing: - if getattr(model, "supports_gradient_checkpointing", False): + if not getattr(model, "supports_gradient_checkpointing", False): logger.warning("Current model does not support gradient checkpointing.") else: model.enable_input_require_grads()