fix ci err

This commit is contained in:
liyuhang 2025-01-26 12:53:59 +08:00
parent d9db0929b5
commit 593cc8653d
2 changed files with 0 additions and 4 deletions

View File

@ -3068,7 +3068,6 @@ bool llama_model::load_tensors(llama_model_loader & ml) {
} break;
case LLM_ARCH_CHATGLM:
{
printf("Loading ChatGLM model...\n");
tok_embd = create_tensor(tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, 0);
// output
@ -3077,14 +3076,12 @@ bool llama_model::load_tensors(llama_model_loader & ml) {
for (int i = 0; i < n_layer; ++i) {
auto & layer = layers[i];
printf("Loading layer %d...\n",i);
layer.attn_norm = create_tensor(tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, 0);
layer.wqkv = create_tensor(tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, llama_model_loader::TENSOR_NOT_REQUIRED);
layer.bqkv = create_tensor(tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, llama_model_loader::TENSOR_NOT_REQUIRED);
if(layer.wqkv == nullptr){
printf("Loading W q k v %d...\n",i);
layer.wq = create_tensor(tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd_head_k * n_head}, 0);
layer.wk = create_tensor(tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_k_gqa}, 0);
layer.wv = create_tensor(tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_v_gqa}, 0);

View File

@ -7189,7 +7189,6 @@ struct llm_build_context {
const int64_t n_embd_head = hparams.n_embd_head_v;
const int64_t n_embd_gqa = hparams.n_embd_v_gqa();
GGML_ASSERT(n_embd_head == hparams.n_embd_head_k);
bool is_lite = (hparams.n_layer == 27);
struct ggml_tensor * cur;
struct ggml_tensor * inpL;