mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-26 14:20:31 +01:00
9 lines
276 B
Plaintext
9 lines
276 B
Plaintext
|
// This file has been autogenerated by generate-variants.py, do not edit manually.
|
||
|
|
||
|
#include "../fattn-wmma-f16.cuh"
|
||
|
|
||
|
DECL_FATTN_WMMA_F16_CASE(64, 8, half);
|
||
|
DECL_FATTN_WMMA_F16_CASE(96, 8, half);
|
||
|
DECL_FATTN_WMMA_F16_CASE(128, 8, half);
|
||
|
DECL_FATTN_WMMA_F16_CASE(256, 8, half);
|