cann : add Ascend NPU support (whisper/2336)

* enable Ascend NPU in src/whisper.cpp
  * sync test-backend-ops with llama.cpp
This commit is contained in:
Mengqing Cao 2024-08-09 20:21:56 +08:00 committed by Georgi Gerganov
parent 51d964a4ef
commit d2d3200b38

View File

@ -32,7 +32,7 @@ DOXYFILE_ENCODING = UTF-8
# title of most generated pages and in a few other places. # title of most generated pages and in a few other places.
# The default value is: My Project. # The default value is: My Project.
PROJECT_NAME = "llama.cpp" PROJECT_NAME = "whisper.cpp"
# The PROJECT_NUMBER tag can be used to enter a project or revision number. This # The PROJECT_NUMBER tag can be used to enter a project or revision number. This
# could be handy for archiving the generated documentation or if some version # could be handy for archiving the generated documentation or if some version
@ -44,7 +44,7 @@ PROJECT_NUMBER =
# for a project that appears at the top of each page and should give viewer a # for a project that appears at the top of each page and should give viewer a
# quick idea about the purpose of the project. Keep the description short. # quick idea about the purpose of the project. Keep the description short.
PROJECT_BRIEF = "llama inference engine" PROJECT_BRIEF = "Port of OpenAI's Whisper model in C/C++"
# With the PROJECT_LOGO tag one can specify a logo or an icon that is included # With the PROJECT_LOGO tag one can specify a logo or an icon that is included
# in the documentation. The maximum height of the logo should not exceed 55 # in the documentation. The maximum height of the logo should not exceed 55