Qwen2.5 models are pretrained on Alibaba's latest large-scale dataset, encompassing up to 18 trillion tokens. The model supports up to 128K tokens and has multilingual support.

Tools 0.5B 1.5B 3B 7B 14B 32B 72B

65K Pulls Updated 11 hours ago

133 Tags

9900fca5cf9a · 419MB
    Metadata
  • general.architecture
    qwen2
  • general.file_type
    Q5_1
  • qwen2.attention.head_count
    14
  • qwen2.attention.head_count_kv
    2
  • qwen2.attention.layer_norm_rms_epsilon
    1e-06
  • qwen2.block_count
    24
  • qwen2.context_length
    32768
  • qwen2.embedding_length
    896
  • qwen2.feed_forward_length
    4864
  • qwen2.rope.freq_base
    1e+06
  • tokenizer.ggml.add_bos_token
    false
  • tokenizer.ggml.bos_token_id
    151643
  • tokenizer.ggml.eos_token_id
    151645
  • tokenizer.ggml.merges
    [Ġ Ġ ĠĠ ĠĠ i n Ġ t ĠĠĠĠ ĠĠĠĠ ...]
  • tokenizer.ggml.model
    gpt2
  • tokenizer.ggml.padding_token_id
    151643
  • tokenizer.ggml.pre
    qwen2
  • tokenizer.ggml.token_type
    [1 1 1 1 1 ...]
  • tokenizer.ggml.tokens
    [! " # $ % ...]
  • Tensors
  • Name
    Type
    Shape
  • token_embd.weight
    Q8_0
    [896 151936]
  • blk.0
  • blk.0.attn_norm.weight
    F32
    [896]
  • blk.0.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.0.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.0.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.0.ffn_norm.weight
    F32
    [896]
  • blk.0.attn_k.bias
    F32
    [128]
  • blk.0.attn_k.weight
    Q5_1
    [896 128]
  • blk.0.attn_output.weight
    Q5_1
    [896 896]
  • blk.0.attn_q.bias
    F32
    [896]
  • blk.0.attn_q.weight
    Q5_1
    [896 896]
  • blk.0.attn_v.bias
    F32
    [128]
  • blk.0.attn_v.weight
    Q5_1
    [896 128]
  • blk.1
  • blk.1.attn_norm.weight
    F32
    [896]
  • blk.1.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.1.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.1.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.1.ffn_norm.weight
    F32
    [896]
  • blk.1.attn_k.bias
    F32
    [128]
  • blk.1.attn_k.weight
    Q5_1
    [896 128]
  • blk.1.attn_output.weight
    Q5_1
    [896 896]
  • blk.1.attn_q.bias
    F32
    [896]
  • blk.1.attn_q.weight
    Q5_1
    [896 896]
  • blk.1.attn_v.bias
    F32
    [128]
  • blk.1.attn_v.weight
    Q5_1
    [896 128]
  • blk.2
  • blk.2.attn_norm.weight
    F32
    [896]
  • blk.2.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.2.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.2.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.2.ffn_norm.weight
    F32
    [896]
  • blk.2.attn_k.bias
    F32
    [128]
  • blk.2.attn_k.weight
    Q5_1
    [896 128]
  • blk.2.attn_output.weight
    Q5_1
    [896 896]
  • blk.2.attn_q.bias
    F32
    [896]
  • blk.2.attn_q.weight
    Q5_1
    [896 896]
  • blk.2.attn_v.bias
    F32
    [128]
  • blk.2.attn_v.weight
    Q5_1
    [896 128]
  • blk.3
  • blk.3.attn_norm.weight
    F32
    [896]
  • blk.3.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.3.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.3.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.3.ffn_norm.weight
    F32
    [896]
  • blk.3.attn_k.bias
    F32
    [128]
  • blk.3.attn_k.weight
    Q5_1
    [896 128]
  • blk.3.attn_output.weight
    Q5_1
    [896 896]
  • blk.3.attn_q.bias
    F32
    [896]
  • blk.3.attn_q.weight
    Q5_1
    [896 896]
  • blk.3.attn_v.bias
    F32
    [128]
  • blk.3.attn_v.weight
    Q5_1
    [896 128]
  • blk.4
  • blk.4.attn_norm.weight
    F32
    [896]
  • blk.4.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.4.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.4.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.4.ffn_norm.weight
    F32
    [896]
  • blk.4.attn_k.bias
    F32
    [128]
  • blk.4.attn_k.weight
    Q5_1
    [896 128]
  • blk.4.attn_output.weight
    Q5_1
    [896 896]
  • blk.4.attn_q.bias
    F32
    [896]
  • blk.4.attn_q.weight
    Q5_1
    [896 896]
  • blk.4.attn_v.bias
    F32
    [128]
  • blk.4.attn_v.weight
    Q5_1
    [896 128]
  • blk.5
  • blk.5.attn_norm.weight
    F32
    [896]
  • blk.5.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.5.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.5.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.5.ffn_norm.weight
    F32
    [896]
  • blk.5.attn_k.bias
    F32
    [128]
  • blk.5.attn_k.weight
    Q5_1
    [896 128]
  • blk.5.attn_output.weight
    Q5_1
    [896 896]
  • blk.5.attn_q.bias
    F32
    [896]
  • blk.5.attn_q.weight
    Q5_1
    [896 896]
  • blk.5.attn_v.bias
    F32
    [128]
  • blk.5.attn_v.weight
    Q5_1
    [896 128]
  • blk.6
  • blk.6.attn_norm.weight
    F32
    [896]
  • blk.6.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.6.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.6.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.6.ffn_norm.weight
    F32
    [896]
  • blk.6.attn_k.bias
    F32
    [128]
  • blk.6.attn_k.weight
    Q5_1
    [896 128]
  • blk.6.attn_output.weight
    Q5_1
    [896 896]
  • blk.6.attn_q.bias
    F32
    [896]
  • blk.6.attn_q.weight
    Q5_1
    [896 896]
  • blk.6.attn_v.bias
    F32
    [128]
  • blk.6.attn_v.weight
    Q5_1
    [896 128]
  • blk.7
  • blk.7.attn_norm.weight
    F32
    [896]
  • blk.7.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.7.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.7.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.7.ffn_norm.weight
    F32
    [896]
  • blk.7.attn_k.bias
    F32
    [128]
  • blk.7.attn_k.weight
    Q5_1
    [896 128]
  • blk.7.attn_output.weight
    Q5_1
    [896 896]
  • blk.7.attn_q.bias
    F32
    [896]
  • blk.7.attn_q.weight
    Q5_1
    [896 896]
  • blk.7.attn_v.bias
    F32
    [128]
  • blk.7.attn_v.weight
    Q5_1
    [896 128]
  • blk.8
  • blk.8.attn_norm.weight
    F32
    [896]
  • blk.8.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.8.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.8.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.8.ffn_norm.weight
    F32
    [896]
  • blk.8.attn_k.bias
    F32
    [128]
  • blk.8.attn_k.weight
    Q5_1
    [896 128]
  • blk.8.attn_output.weight
    Q5_1
    [896 896]
  • blk.8.attn_q.bias
    F32
    [896]
  • blk.8.attn_q.weight
    Q5_1
    [896 896]
  • blk.8.attn_v.bias
    F32
    [128]
  • blk.8.attn_v.weight
    Q5_1
    [896 128]
  • blk.9
  • blk.9.attn_norm.weight
    F32
    [896]
  • blk.9.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.9.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.9.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.9.ffn_norm.weight
    F32
    [896]
  • blk.9.attn_k.bias
    F32
    [128]
  • blk.9.attn_k.weight
    Q5_1
    [896 128]
  • blk.9.attn_output.weight
    Q5_1
    [896 896]
  • blk.9.attn_q.bias
    F32
    [896]
  • blk.9.attn_q.weight
    Q5_1
    [896 896]
  • blk.9.attn_v.bias
    F32
    [128]
  • blk.9.attn_v.weight
    Q5_1
    [896 128]
  • blk.10
  • blk.10.attn_norm.weight
    F32
    [896]
  • blk.10.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.10.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.10.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.10.ffn_norm.weight
    F32
    [896]
  • blk.10.attn_k.bias
    F32
    [128]
  • blk.10.attn_k.weight
    Q5_1
    [896 128]
  • blk.10.attn_output.weight
    Q5_1
    [896 896]
  • blk.10.attn_q.bias
    F32
    [896]
  • blk.10.attn_q.weight
    Q5_1
    [896 896]
  • blk.10.attn_v.bias
    F32
    [128]
  • blk.10.attn_v.weight
    Q5_1
    [896 128]
  • blk.11
  • blk.11.attn_norm.weight
    F32
    [896]
  • blk.11.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.11.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.11.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.11.ffn_norm.weight
    F32
    [896]
  • blk.11.attn_k.bias
    F32
    [128]
  • blk.11.attn_k.weight
    Q5_1
    [896 128]
  • blk.11.attn_output.weight
    Q5_1
    [896 896]
  • blk.11.attn_q.bias
    F32
    [896]
  • blk.11.attn_q.weight
    Q5_1
    [896 896]
  • blk.11.attn_v.bias
    F32
    [128]
  • blk.11.attn_v.weight
    Q5_1
    [896 128]
  • blk.12
  • blk.12.attn_norm.weight
    F32
    [896]
  • blk.12.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.12.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.12.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.12.ffn_norm.weight
    F32
    [896]
  • blk.12.attn_k.bias
    F32
    [128]
  • blk.12.attn_k.weight
    Q5_1
    [896 128]
  • blk.12.attn_output.weight
    Q5_1
    [896 896]
  • blk.12.attn_q.bias
    F32
    [896]
  • blk.12.attn_q.weight
    Q5_1
    [896 896]
  • blk.12.attn_v.bias
    F32
    [128]
  • blk.12.attn_v.weight
    Q5_1
    [896 128]
  • blk.13
  • blk.13.attn_norm.weight
    F32
    [896]
  • blk.13.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.13.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.13.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.13.ffn_norm.weight
    F32
    [896]
  • blk.13.attn_k.bias
    F32
    [128]
  • blk.13.attn_k.weight
    Q5_1
    [896 128]
  • blk.13.attn_output.weight
    Q5_1
    [896 896]
  • blk.13.attn_q.bias
    F32
    [896]
  • blk.13.attn_q.weight
    Q5_1
    [896 896]
  • blk.13.attn_v.bias
    F32
    [128]
  • blk.13.attn_v.weight
    Q5_1
    [896 128]
  • blk.14
  • blk.14.attn_norm.weight
    F32
    [896]
  • blk.14.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.14.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.14.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.14.ffn_norm.weight
    F32
    [896]
  • blk.14.attn_k.bias
    F32
    [128]
  • blk.14.attn_k.weight
    Q5_1
    [896 128]
  • blk.14.attn_output.weight
    Q5_1
    [896 896]
  • blk.14.attn_q.bias
    F32
    [896]
  • blk.14.attn_q.weight
    Q5_1
    [896 896]
  • blk.14.attn_v.bias
    F32
    [128]
  • blk.14.attn_v.weight
    Q5_1
    [896 128]
  • blk.15
  • blk.15.attn_norm.weight
    F32
    [896]
  • blk.15.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.15.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.15.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.15.ffn_norm.weight
    F32
    [896]
  • blk.15.attn_k.bias
    F32
    [128]
  • blk.15.attn_k.weight
    Q5_1
    [896 128]
  • blk.15.attn_output.weight
    Q5_1
    [896 896]
  • blk.15.attn_q.bias
    F32
    [896]
  • blk.15.attn_q.weight
    Q5_1
    [896 896]
  • blk.15.attn_v.bias
    F32
    [128]
  • blk.15.attn_v.weight
    Q5_1
    [896 128]
  • blk.16
  • blk.16.attn_norm.weight
    F32
    [896]
  • blk.16.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.16.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.16.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.16.ffn_norm.weight
    F32
    [896]
  • blk.16.attn_k.bias
    F32
    [128]
  • blk.16.attn_k.weight
    Q5_1
    [896 128]
  • blk.16.attn_output.weight
    Q5_1
    [896 896]
  • blk.16.attn_q.bias
    F32
    [896]
  • blk.16.attn_q.weight
    Q5_1
    [896 896]
  • blk.16.attn_v.bias
    F32
    [128]
  • blk.16.attn_v.weight
    Q5_1
    [896 128]
  • blk.17
  • blk.17.attn_norm.weight
    F32
    [896]
  • blk.17.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.17.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.17.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.17.ffn_norm.weight
    F32
    [896]
  • blk.17.attn_k.bias
    F32
    [128]
  • blk.17.attn_k.weight
    Q5_1
    [896 128]
  • blk.17.attn_output.weight
    Q5_1
    [896 896]
  • blk.17.attn_q.bias
    F32
    [896]
  • blk.17.attn_q.weight
    Q5_1
    [896 896]
  • blk.17.attn_v.bias
    F32
    [128]
  • blk.17.attn_v.weight
    Q5_1
    [896 128]
  • blk.18
  • blk.18.attn_norm.weight
    F32
    [896]
  • blk.18.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.18.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.18.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.18.ffn_norm.weight
    F32
    [896]
  • blk.18.attn_k.bias
    F32
    [128]
  • blk.18.attn_k.weight
    Q5_1
    [896 128]
  • blk.18.attn_output.weight
    Q5_1
    [896 896]
  • blk.18.attn_q.bias
    F32
    [896]
  • blk.18.attn_q.weight
    Q5_1
    [896 896]
  • blk.18.attn_v.bias
    F32
    [128]
  • blk.18.attn_v.weight
    Q5_1
    [896 128]
  • blk.19
  • blk.19.attn_norm.weight
    F32
    [896]
  • blk.19.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.19.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.19.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.19.ffn_norm.weight
    F32
    [896]
  • blk.19.attn_k.bias
    F32
    [128]
  • blk.19.attn_k.weight
    Q5_1
    [896 128]
  • blk.19.attn_output.weight
    Q5_1
    [896 896]
  • blk.19.attn_q.bias
    F32
    [896]
  • blk.19.attn_q.weight
    Q5_1
    [896 896]
  • blk.19.attn_v.bias
    F32
    [128]
  • blk.19.attn_v.weight
    Q5_1
    [896 128]
  • blk.20
  • blk.20.attn_norm.weight
    F32
    [896]
  • blk.20.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.20.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.20.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.20.ffn_norm.weight
    F32
    [896]
  • blk.20.attn_k.bias
    F32
    [128]
  • blk.20.attn_k.weight
    Q5_1
    [896 128]
  • blk.20.attn_output.weight
    Q5_1
    [896 896]
  • blk.20.attn_q.bias
    F32
    [896]
  • blk.20.attn_q.weight
    Q5_1
    [896 896]
  • blk.20.attn_v.bias
    F32
    [128]
  • blk.20.attn_v.weight
    Q5_1
    [896 128]
  • blk.21
  • blk.21.attn_norm.weight
    F32
    [896]
  • blk.21.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.21.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.21.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.21.ffn_norm.weight
    F32
    [896]
  • blk.21.attn_k.bias
    F32
    [128]
  • blk.21.attn_k.weight
    Q5_1
    [896 128]
  • blk.21.attn_output.weight
    Q5_1
    [896 896]
  • blk.21.attn_q.bias
    F32
    [896]
  • blk.21.attn_q.weight
    Q5_1
    [896 896]
  • blk.21.attn_v.bias
    F32
    [128]
  • blk.21.attn_v.weight
    Q5_1
    [896 128]
  • blk.22
  • blk.22.attn_norm.weight
    F32
    [896]
  • blk.22.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.22.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.22.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.22.ffn_norm.weight
    F32
    [896]
  • blk.22.attn_k.bias
    F32
    [128]
  • blk.22.attn_k.weight
    Q5_1
    [896 128]
  • blk.22.attn_output.weight
    Q5_1
    [896 896]
  • blk.22.attn_q.bias
    F32
    [896]
  • blk.22.attn_q.weight
    Q5_1
    [896 896]
  • blk.22.attn_v.bias
    F32
    [128]
  • blk.22.attn_v.weight
    Q5_1
    [896 128]
  • blk.23
  • blk.23.attn_norm.weight
    F32
    [896]
  • blk.23.ffn_down.weight
    Q5_1
    [4864 896]
  • blk.23.ffn_gate.weight
    Q5_1
    [896 4864]
  • blk.23.ffn_up.weight
    Q5_1
    [896 4864]
  • blk.23.ffn_norm.weight
    F32
    [896]
  • blk.23.attn_k.bias
    F32
    [128]
  • blk.23.attn_k.weight
    Q5_1
    [896 128]
  • blk.23.attn_output.weight
    Q5_1
    [896 896]
  • blk.23.attn_q.bias
    F32
    [896]
  • blk.23.attn_q.weight
    Q5_1
    [896 896]
  • blk.23.attn_v.bias
    F32
    [128]
  • blk.23.attn_v.weight
    Q5_1
    [896 128]
  • output_norm.weight
    F32
    [896]