From fbac6d21ca58527bde025747c330c3de8585d759 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Fri, 20 Oct 2023 23:53:24 -0700 Subject: [PATCH] Add missing exception --- modules/exllamav2_hf.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/modules/exllamav2_hf.py b/modules/exllamav2_hf.py index 952d7172..70d64200 100644 --- a/modules/exllamav2_hf.py +++ b/modules/exllamav2_hf.py @@ -1,4 +1,5 @@ import os +import traceback from pathlib import Path from typing import Any, Dict, Optional, Union @@ -21,6 +22,9 @@ except ModuleNotFoundError: 'https://github.com/Dao-AILab/flash-attention#installation-and-features' ) pass +except Exception: + logger.warning('Failed to load flash-attention due to the following error:\n') + traceback.print_exc() class Exllamav2HF(PreTrainedModel):