Qwen2.5 models are pretrained on Alibaba's latest large-scale dataset, encompassing up to 18 trillion tokens. The model supports up to 128K tokens and has multilingual support.

Tools 0.5B 1.5B 3B 7B 14B 32B 72B

64.5K Pulls Updated 11 hours ago

133 Tags

e0428e378898 · 824MB
    Metadata
  • general.architecture
    qwen2
  • general.file_type
    Q3_K_M
  • qwen2.attention.head_count
    12
  • qwen2.attention.head_count_kv
    2
  • qwen2.attention.layer_norm_rms_epsilon
    1e-06
  • qwen2.block_count
    28
  • qwen2.context_length
    32768
  • qwen2.embedding_length
    1536
  • qwen2.feed_forward_length
    8960
  • qwen2.rope.freq_base
    1e+06
  • tokenizer.ggml.add_bos_token
    false
  • tokenizer.ggml.bos_token_id
    151643
  • tokenizer.ggml.eos_token_id
    151645
  • tokenizer.ggml.merges
    [Ġ Ġ ĠĠ ĠĠ i n Ġ t ĠĠĠĠ ĠĠĠĠ ...]
  • tokenizer.ggml.model
    gpt2
  • tokenizer.ggml.padding_token_id
    151643
  • tokenizer.ggml.pre
    qwen2
  • tokenizer.ggml.token_type
    [1 1 1 1 1 ...]
  • tokenizer.ggml.tokens
    [! " # $ % ...]
  • Tensors
  • Name
    Type
    Shape
  • token_embd.weight
    Q6_K
    [1536 151936]
  • blk.0
  • blk.0.attn_norm.weight
    F32
    [1536]
  • blk.0.ffn_down.weight
    Q5_K
    [8960 1536]
  • blk.0.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.0.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.0.ffn_norm.weight
    F32
    [1536]
  • blk.0.attn_k.bias
    F32
    [256]
  • blk.0.attn_k.weight
    Q3_K
    [1536 256]
  • blk.0.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.0.attn_q.bias
    F32
    [1536]
  • blk.0.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.0.attn_v.bias
    F32
    [256]
  • blk.0.attn_v.weight
    Q5_K
    [1536 256]
  • blk.1
  • blk.1.attn_norm.weight
    F32
    [1536]
  • blk.1.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.1.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.1.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.1.ffn_norm.weight
    F32
    [1536]
  • blk.1.attn_k.bias
    F32
    [256]
  • blk.1.attn_k.weight
    Q3_K
    [1536 256]
  • blk.1.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.1.attn_q.bias
    F32
    [1536]
  • blk.1.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.1.attn_v.bias
    F32
    [256]
  • blk.1.attn_v.weight
    Q5_K
    [1536 256]
  • blk.2
  • blk.2.attn_norm.weight
    F32
    [1536]
  • blk.2.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.2.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.2.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.2.ffn_norm.weight
    F32
    [1536]
  • blk.2.attn_k.bias
    F32
    [256]
  • blk.2.attn_k.weight
    Q3_K
    [1536 256]
  • blk.2.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.2.attn_q.bias
    F32
    [1536]
  • blk.2.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.2.attn_v.bias
    F32
    [256]
  • blk.2.attn_v.weight
    Q4_K
    [1536 256]
  • blk.3
  • blk.3.attn_norm.weight
    F32
    [1536]
  • blk.3.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.3.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.3.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.3.ffn_norm.weight
    F32
    [1536]
  • blk.3.attn_k.bias
    F32
    [256]
  • blk.3.attn_k.weight
    Q3_K
    [1536 256]
  • blk.3.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.3.attn_q.bias
    F32
    [1536]
  • blk.3.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.3.attn_v.bias
    F32
    [256]
  • blk.3.attn_v.weight
    Q4_K
    [1536 256]
  • blk.4
  • blk.4.attn_norm.weight
    F32
    [1536]
  • blk.4.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.4.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.4.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.4.ffn_norm.weight
    F32
    [1536]
  • blk.4.attn_k.bias
    F32
    [256]
  • blk.4.attn_k.weight
    Q3_K
    [1536 256]
  • blk.4.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.4.attn_q.bias
    F32
    [1536]
  • blk.4.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.4.attn_v.bias
    F32
    [256]
  • blk.4.attn_v.weight
    Q4_K
    [1536 256]
  • blk.5
  • blk.5.attn_norm.weight
    F32
    [1536]
  • blk.5.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.5.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.5.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.5.ffn_norm.weight
    F32
    [1536]
  • blk.5.attn_k.bias
    F32
    [256]
  • blk.5.attn_k.weight
    Q3_K
    [1536 256]
  • blk.5.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.5.attn_q.bias
    F32
    [1536]
  • blk.5.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.5.attn_v.bias
    F32
    [256]
  • blk.5.attn_v.weight
    Q4_K
    [1536 256]
  • blk.6
  • blk.6.attn_norm.weight
    F32
    [1536]
  • blk.6.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.6.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.6.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.6.ffn_norm.weight
    F32
    [1536]
  • blk.6.attn_k.bias
    F32
    [256]
  • blk.6.attn_k.weight
    Q3_K
    [1536 256]
  • blk.6.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.6.attn_q.bias
    F32
    [1536]
  • blk.6.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.6.attn_v.bias
    F32
    [256]
  • blk.6.attn_v.weight
    Q4_K
    [1536 256]
  • blk.7
  • blk.7.attn_norm.weight
    F32
    [1536]
  • blk.7.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.7.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.7.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.7.ffn_norm.weight
    F32
    [1536]
  • blk.7.attn_k.bias
    F32
    [256]
  • blk.7.attn_k.weight
    Q3_K
    [1536 256]
  • blk.7.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.7.attn_q.bias
    F32
    [1536]
  • blk.7.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.7.attn_v.bias
    F32
    [256]
  • blk.7.attn_v.weight
    Q4_K
    [1536 256]
  • blk.8
  • blk.8.attn_norm.weight
    F32
    [1536]
  • blk.8.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.8.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.8.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.8.ffn_norm.weight
    F32
    [1536]
  • blk.8.attn_k.bias
    F32
    [256]
  • blk.8.attn_k.weight
    Q3_K
    [1536 256]
  • blk.8.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.8.attn_q.bias
    F32
    [1536]
  • blk.8.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.8.attn_v.bias
    F32
    [256]
  • blk.8.attn_v.weight
    Q4_K
    [1536 256]
  • blk.9
  • blk.9.attn_norm.weight
    F32
    [1536]
  • blk.9.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.9.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.9.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.9.ffn_norm.weight
    F32
    [1536]
  • blk.9.attn_k.bias
    F32
    [256]
  • blk.9.attn_k.weight
    Q3_K
    [1536 256]
  • blk.9.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.9.attn_q.bias
    F32
    [1536]
  • blk.9.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.9.attn_v.bias
    F32
    [256]
  • blk.9.attn_v.weight
    Q4_K
    [1536 256]
  • blk.10
  • blk.10.attn_norm.weight
    F32
    [1536]
  • blk.10.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.10.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.10.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.10.ffn_norm.weight
    F32
    [1536]
  • blk.10.attn_k.bias
    F32
    [256]
  • blk.10.attn_k.weight
    Q3_K
    [1536 256]
  • blk.10.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.10.attn_q.bias
    F32
    [1536]
  • blk.10.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.10.attn_v.bias
    F32
    [256]
  • blk.10.attn_v.weight
    Q4_K
    [1536 256]
  • blk.11
  • blk.11.attn_norm.weight
    F32
    [1536]
  • blk.11.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.11.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.11.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.11.ffn_norm.weight
    F32
    [1536]
  • blk.11.attn_k.bias
    F32
    [256]
  • blk.11.attn_k.weight
    Q3_K
    [1536 256]
  • blk.11.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.11.attn_q.bias
    F32
    [1536]
  • blk.11.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.11.attn_v.bias
    F32
    [256]
  • blk.11.attn_v.weight
    Q4_K
    [1536 256]
  • blk.12
  • blk.12.attn_norm.weight
    F32
    [1536]
  • blk.12.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.12.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.12.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.12.ffn_norm.weight
    F32
    [1536]
  • blk.12.attn_k.bias
    F32
    [256]
  • blk.12.attn_k.weight
    Q3_K
    [1536 256]
  • blk.12.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.12.attn_q.bias
    F32
    [1536]
  • blk.12.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.12.attn_v.bias
    F32
    [256]
  • blk.12.attn_v.weight
    Q4_K
    [1536 256]
  • blk.13
  • blk.13.attn_norm.weight
    F32
    [1536]
  • blk.13.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.13.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.13.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.13.ffn_norm.weight
    F32
    [1536]
  • blk.13.attn_k.bias
    F32
    [256]
  • blk.13.attn_k.weight
    Q3_K
    [1536 256]
  • blk.13.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.13.attn_q.bias
    F32
    [1536]
  • blk.13.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.13.attn_v.bias
    F32
    [256]
  • blk.13.attn_v.weight
    Q4_K
    [1536 256]
  • blk.14
  • blk.14.attn_norm.weight
    F32
    [1536]
  • blk.14.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.14.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.14.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.14.ffn_norm.weight
    F32
    [1536]
  • blk.14.attn_k.bias
    F32
    [256]
  • blk.14.attn_k.weight
    Q3_K
    [1536 256]
  • blk.14.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.14.attn_q.bias
    F32
    [1536]
  • blk.14.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.14.attn_v.bias
    F32
    [256]
  • blk.14.attn_v.weight
    Q4_K
    [1536 256]
  • blk.15
  • blk.15.attn_norm.weight
    F32
    [1536]
  • blk.15.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.15.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.15.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.15.ffn_norm.weight
    F32
    [1536]
  • blk.15.attn_k.bias
    F32
    [256]
  • blk.15.attn_k.weight
    Q3_K
    [1536 256]
  • blk.15.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.15.attn_q.bias
    F32
    [1536]
  • blk.15.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.15.attn_v.bias
    F32
    [256]
  • blk.15.attn_v.weight
    Q4_K
    [1536 256]
  • blk.16
  • blk.16.attn_norm.weight
    F32
    [1536]
  • blk.16.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.16.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.16.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.16.ffn_norm.weight
    F32
    [1536]
  • blk.16.attn_k.bias
    F32
    [256]
  • blk.16.attn_k.weight
    Q3_K
    [1536 256]
  • blk.16.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.16.attn_q.bias
    F32
    [1536]
  • blk.16.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.16.attn_v.bias
    F32
    [256]
  • blk.16.attn_v.weight
    Q4_K
    [1536 256]
  • blk.17
  • blk.17.attn_norm.weight
    F32
    [1536]
  • blk.17.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.17.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.17.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.17.ffn_norm.weight
    F32
    [1536]
  • blk.17.attn_k.bias
    F32
    [256]
  • blk.17.attn_k.weight
    Q3_K
    [1536 256]
  • blk.17.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.17.attn_q.bias
    F32
    [1536]
  • blk.17.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.17.attn_v.bias
    F32
    [256]
  • blk.17.attn_v.weight
    Q4_K
    [1536 256]
  • blk.18
  • blk.18.attn_norm.weight
    F32
    [1536]
  • blk.18.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.18.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.18.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.18.ffn_norm.weight
    F32
    [1536]
  • blk.18.attn_k.bias
    F32
    [256]
  • blk.18.attn_k.weight
    Q3_K
    [1536 256]
  • blk.18.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.18.attn_q.bias
    F32
    [1536]
  • blk.18.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.18.attn_v.bias
    F32
    [256]
  • blk.18.attn_v.weight
    Q4_K
    [1536 256]
  • blk.19
  • blk.19.attn_norm.weight
    F32
    [1536]
  • blk.19.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.19.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.19.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.19.ffn_norm.weight
    F32
    [1536]
  • blk.19.attn_k.bias
    F32
    [256]
  • blk.19.attn_k.weight
    Q3_K
    [1536 256]
  • blk.19.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.19.attn_q.bias
    F32
    [1536]
  • blk.19.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.19.attn_v.bias
    F32
    [256]
  • blk.19.attn_v.weight
    Q4_K
    [1536 256]
  • blk.20
  • blk.20.attn_norm.weight
    F32
    [1536]
  • blk.20.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.20.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.20.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.20.ffn_norm.weight
    F32
    [1536]
  • blk.20.attn_k.bias
    F32
    [256]
  • blk.20.attn_k.weight
    Q3_K
    [1536 256]
  • blk.20.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.20.attn_q.bias
    F32
    [1536]
  • blk.20.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.20.attn_v.bias
    F32
    [256]
  • blk.20.attn_v.weight
    Q4_K
    [1536 256]
  • blk.21
  • blk.21.attn_norm.weight
    F32
    [1536]
  • blk.21.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.21.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.21.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.21.ffn_norm.weight
    F32
    [1536]
  • blk.21.attn_k.bias
    F32
    [256]
  • blk.21.attn_k.weight
    Q3_K
    [1536 256]
  • blk.21.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.21.attn_q.bias
    F32
    [1536]
  • blk.21.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.21.attn_v.bias
    F32
    [256]
  • blk.21.attn_v.weight
    Q4_K
    [1536 256]
  • blk.22
  • blk.22.attn_norm.weight
    F32
    [1536]
  • blk.22.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.22.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.22.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.22.ffn_norm.weight
    F32
    [1536]
  • blk.22.attn_k.bias
    F32
    [256]
  • blk.22.attn_k.weight
    Q3_K
    [1536 256]
  • blk.22.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.22.attn_q.bias
    F32
    [1536]
  • blk.22.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.22.attn_v.bias
    F32
    [256]
  • blk.22.attn_v.weight
    Q4_K
    [1536 256]
  • blk.23
  • blk.23.attn_norm.weight
    F32
    [1536]
  • blk.23.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.23.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.23.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.23.ffn_norm.weight
    F32
    [1536]
  • blk.23.attn_k.bias
    F32
    [256]
  • blk.23.attn_k.weight
    Q3_K
    [1536 256]
  • blk.23.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.23.attn_q.bias
    F32
    [1536]
  • blk.23.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.23.attn_v.bias
    F32
    [256]
  • blk.23.attn_v.weight
    Q4_K
    [1536 256]
  • blk.24
  • blk.24.attn_norm.weight
    F32
    [1536]
  • blk.24.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.24.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.24.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.24.ffn_norm.weight
    F32
    [1536]
  • blk.24.attn_k.bias
    F32
    [256]
  • blk.24.attn_k.weight
    Q3_K
    [1536 256]
  • blk.24.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.24.attn_q.bias
    F32
    [1536]
  • blk.24.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.24.attn_v.bias
    F32
    [256]
  • blk.24.attn_v.weight
    Q4_K
    [1536 256]
  • blk.25
  • blk.25.attn_norm.weight
    F32
    [1536]
  • blk.25.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.25.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.25.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.25.ffn_norm.weight
    F32
    [1536]
  • blk.25.attn_k.bias
    F32
    [256]
  • blk.25.attn_k.weight
    Q3_K
    [1536 256]
  • blk.25.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.25.attn_q.bias
    F32
    [1536]
  • blk.25.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.25.attn_v.bias
    F32
    [256]
  • blk.25.attn_v.weight
    Q4_K
    [1536 256]
  • blk.26
  • blk.26.attn_norm.weight
    F32
    [1536]
  • blk.26.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.26.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.26.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.26.ffn_norm.weight
    F32
    [1536]
  • blk.26.attn_k.bias
    F32
    [256]
  • blk.26.attn_k.weight
    Q3_K
    [1536 256]
  • blk.26.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.26.attn_q.bias
    F32
    [1536]
  • blk.26.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.26.attn_v.bias
    F32
    [256]
  • blk.26.attn_v.weight
    Q4_K
    [1536 256]
  • blk.27
  • blk.27.attn_norm.weight
    F32
    [1536]
  • blk.27.ffn_down.weight
    Q4_K
    [8960 1536]
  • blk.27.ffn_gate.weight
    Q3_K
    [1536 8960]
  • blk.27.ffn_up.weight
    Q3_K
    [1536 8960]
  • blk.27.ffn_norm.weight
    F32
    [1536]
  • blk.27.attn_k.bias
    F32
    [256]
  • blk.27.attn_k.weight
    Q3_K
    [1536 256]
  • blk.27.attn_output.weight
    Q4_K
    [1536 1536]
  • blk.27.attn_q.bias
    F32
    [1536]
  • blk.27.attn_q.weight
    Q3_K
    [1536 1536]
  • blk.27.attn_v.bias
    F32
    [256]
  • blk.27.attn_v.weight
    Q4_K
    [1536 256]
  • output_norm.weight
    F32
    [1536]