742 6 months ago

DeepSeek's first-generation of reasoning models with comparable performance to OpenAI-o1, including six dense models distilled from DeepSeek-R1 based on Llama and Qwen.

a030c70657da · 46GB
    Metadata
  • split.count
    5
  • split.no
    1
  • split.tensors.count
    1025
  • Tensor
    blk.14
  • blk.14.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.14.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.14.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.14.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.14.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.14.ffn_norm.weight
    F32
    [7168]
  • blk.14.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.14.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.15
  • blk.15.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.15.attn_kv_a_norm.weight
    F32
    [512]
  • blk.15.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.15.attn_norm.weight
    F32
    [7168]
  • blk.15.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.15.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.15.attn_q_a_norm.weight
    F32
    [1536]
  • blk.15.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.15.exp_probs_b.bias
    F32
    [256]
  • blk.15.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.15.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.15.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.15.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.15.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.15.ffn_norm.weight
    F32
    [7168]
  • blk.15.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.15.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.16
  • blk.16.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.16.attn_kv_a_norm.weight
    F32
    [512]
  • blk.16.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.16.attn_norm.weight
    F32
    [7168]
  • blk.16.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.16.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.16.attn_q_a_norm.weight
    F32
    [1536]
  • blk.16.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.16.exp_probs_b.bias
    F32
    [256]
  • blk.16.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.16.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.16.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.16.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.16.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.16.ffn_norm.weight
    F32
    [7168]
  • blk.16.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.16.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.17
  • blk.17.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.17.attn_kv_a_norm.weight
    F32
    [512]
  • blk.17.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.17.attn_norm.weight
    F32
    [7168]
  • blk.17.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.17.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.17.attn_q_a_norm.weight
    F32
    [1536]
  • blk.17.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.17.exp_probs_b.bias
    F32
    [256]
  • blk.17.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.17.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.17.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.17.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.17.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.17.ffn_norm.weight
    F32
    [7168]
  • blk.17.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.17.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.18
  • blk.18.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.18.attn_kv_a_norm.weight
    F32
    [512]
  • blk.18.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.18.attn_norm.weight
    F32
    [7168]
  • blk.18.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.18.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.18.attn_q_a_norm.weight
    F32
    [1536]
  • blk.18.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.18.exp_probs_b.bias
    F32
    [256]
  • blk.18.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.18.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.18.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.18.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.18.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.18.ffn_norm.weight
    F32
    [7168]
  • blk.18.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.18.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.19
  • blk.19.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.19.attn_kv_a_norm.weight
    F32
    [512]
  • blk.19.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.19.attn_norm.weight
    F32
    [7168]
  • blk.19.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.19.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.19.attn_q_a_norm.weight
    F32
    [1536]
  • blk.19.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.19.exp_probs_b.bias
    F32
    [256]
  • blk.19.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.19.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.19.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.19.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.19.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.19.ffn_norm.weight
    F32
    [7168]
  • blk.19.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.19.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.20
  • blk.20.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.20.attn_kv_a_norm.weight
    F32
    [512]
  • blk.20.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.20.attn_norm.weight
    F32
    [7168]
  • blk.20.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.20.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.20.attn_q_a_norm.weight
    F32
    [1536]
  • blk.20.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.20.exp_probs_b.bias
    F32
    [256]
  • blk.20.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.20.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.20.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.20.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.20.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.20.ffn_norm.weight
    F32
    [7168]
  • blk.20.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.20.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.21
  • blk.21.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.21.attn_kv_a_norm.weight
    F32
    [512]
  • blk.21.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.21.attn_norm.weight
    F32
    [7168]
  • blk.21.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.21.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.21.attn_q_a_norm.weight
    F32
    [1536]
  • blk.21.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.21.exp_probs_b.bias
    F32
    [256]
  • blk.21.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.21.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.21.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.21.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.21.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.21.ffn_norm.weight
    F32
    [7168]
  • blk.21.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.21.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.22
  • blk.22.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.22.attn_kv_a_norm.weight
    F32
    [512]
  • blk.22.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.22.attn_norm.weight
    F32
    [7168]
  • blk.22.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.22.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.22.attn_q_a_norm.weight
    F32
    [1536]
  • blk.22.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.22.exp_probs_b.bias
    F32
    [256]
  • blk.22.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.22.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.22.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.22.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.22.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.22.ffn_norm.weight
    F32
    [7168]
  • blk.22.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.22.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.23
  • blk.23.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.23.attn_kv_a_norm.weight
    F32
    [512]
  • blk.23.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.23.attn_norm.weight
    F32
    [7168]
  • blk.23.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.23.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.23.attn_q_a_norm.weight
    F32
    [1536]
  • blk.23.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.23.exp_probs_b.bias
    F32
    [256]
  • blk.23.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.23.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.23.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.23.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.23.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.23.ffn_norm.weight
    F32
    [7168]
  • blk.23.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.23.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.24
  • blk.24.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.24.attn_kv_a_norm.weight
    F32
    [512]
  • blk.24.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.24.attn_norm.weight
    F32
    [7168]
  • blk.24.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.24.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.24.attn_q_a_norm.weight
    F32
    [1536]
  • blk.24.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.24.exp_probs_b.bias
    F32
    [256]
  • blk.24.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.24.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.24.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.24.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.24.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.24.ffn_norm.weight
    F32
    [7168]
  • blk.24.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.24.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.25
  • blk.25.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.25.attn_kv_a_norm.weight
    F32
    [512]
  • blk.25.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.25.attn_norm.weight
    F32
    [7168]
  • blk.25.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.25.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.25.attn_q_a_norm.weight
    F32
    [1536]
  • blk.25.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.25.exp_probs_b.bias
    F32
    [256]
  • blk.25.ffn_down_exps.weight
    Q2_K
    [2048, 7168, 256]
  • blk.25.ffn_down_shexp.weight
    Q6_K
    [2048, 7168]
  • blk.25.ffn_gate_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.25.ffn_gate_inp.weight
    F32
    [7168, 256]
  • blk.25.ffn_gate_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.25.ffn_norm.weight
    F32
    [7168]
  • blk.25.ffn_up_exps.weight
    Q2_K
    [7168, 2048, 256]
  • blk.25.ffn_up_shexp.weight
    Q4_K
    [7168, 2048]
  • blk.26
  • blk.26.attn_kv_a_mqa.weight
    Q6_K
    [7168, 576]
  • blk.26.attn_kv_a_norm.weight
    F32
    [512]
  • blk.26.attn_kv_b.weight
    Q6_K
    [512, 32768]
  • blk.26.attn_norm.weight
    F32
    [7168]
  • blk.26.attn_output.weight
    Q4_K
    [16384, 7168]
  • blk.26.attn_q_a.weight
    Q4_K
    [7168, 1536]
  • blk.26.attn_q_a_norm.weight
    F32
    [1536]
  • blk.26.attn_q_b.weight
    Q4_K
    [1536, 24576]
  • blk.26.exp_probs_b.bias
    F32
    [256]