mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2025-01-12 21:37:19 +01:00
rpc : fix ggml_backend_rpc_supports_buft() (#7918)
This commit is contained in:
parent
a55eb1bf0f
commit
172c825684
@ -624,12 +624,12 @@ GGML_CALL static enum ggml_status ggml_backend_rpc_graph_compute(ggml_backend_t
|
|||||||
GGML_CALL static bool ggml_backend_rpc_supports_op(ggml_backend_t backend, const ggml_tensor * op) {
|
GGML_CALL static bool ggml_backend_rpc_supports_op(ggml_backend_t backend, const ggml_tensor * op) {
|
||||||
UNUSED(backend);
|
UNUSED(backend);
|
||||||
UNUSED(op);
|
UNUSED(op);
|
||||||
GGML_ASSERT(false && "not implemented");
|
//TODO: call the remote backend and cache the results
|
||||||
return false;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
GGML_CALL static bool ggml_backend_rpc_supports_buft(ggml_backend_t backend, ggml_backend_buffer_type_t buft) {
|
GGML_CALL static bool ggml_backend_rpc_supports_buft(ggml_backend_t backend, ggml_backend_buffer_type_t buft) {
|
||||||
if (buft->iface.get_name == ggml_backend_rpc_buffer_type_name) {
|
if (buft->iface.get_name != ggml_backend_rpc_buffer_type_name) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
ggml_backend_rpc_buffer_type_context * buft_ctx = (ggml_backend_rpc_buffer_type_context *)buft->context;
|
ggml_backend_rpc_buffer_type_context * buft_ctx = (ggml_backend_rpc_buffer_type_context *)buft->context;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user