Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of experts models that excels at coding tasks. Created by Eric Hartford.

8x7b 8x22b

455.9K yesterday

5e241ec72b81 · 27GB
    Metadata
  • general.architecture
    llama
  • general.basename
    dolphin-2.6-mixtral
  • general.dataset.0.name
    Dolphin
  • general.dataset.0.organization
    Ehartford
  • general.dataset.0.repo_url
    https://huggingface.co/ehartford/dolphin
  • general.dataset.1.name
    Airoboros 2.2.1
  • general.dataset.1.organization
    Jondurbin
  • general.dataset.1.repo_url
    https://huggingface.co/jondurbin/airoboros-2.2.1
  • general.dataset.1.version
    2.2.1
  • general.dataset.2.name
    Dolphin Coder
  • general.dataset.2.organization
    Ehartford
  • general.dataset.2.repo_url
    https://huggingface.co/ehartford/dolphin-coder
  • general.dataset.3.name
    Openhermes
  • general.dataset.3.organization
    Teknium
  • general.dataset.3.repo_url
    https://huggingface.co/teknium/openhermes
  • general.dataset.4.name
    Magicoder OSS Instruct 75K
  • general.dataset.4.organization
    Ise Uiuc
  • general.dataset.4.repo_url
    https://huggingface.co/ise-uiuc/Magicoder-OSS-Instruct-75K
  • general.dataset.5.name
    Magicoder Evol Instruct 110K
  • general.dataset.5.organization
    Ise Uiuc
  • general.dataset.5.repo_url
    https://huggingface.co/ise-uiuc/Magicoder-Evol-Instruct-110K
  • general.dataset.6.name
    Capybara
  • general.dataset.6.organization
    LDJnr
  • general.dataset.6.repo_url
    https://huggingface.co/LDJnr/Capybara
  • general.dataset.count
    7
  • general.file_type
    14
  • general.languages
    [en]
  • general.license
    apache-2.0
  • general.name
    Dolphin 2.6 Mixtral 8x7b
  • general.quantization_version
    2
  • general.size_label
    8x7B
  • general.type
    model
  • llama.attention.head_count
    32
  • llama.attention.head_count_kv
    8
  • llama.attention.layer_norm_rms_epsilon
    1e-05
  • llama.block_count
    32
  • llama.context_length
    32768
  • llama.embedding_length
    4096
  • llama.expert_count
    8
  • llama.expert_used_count
    2
  • llama.feed_forward_length
    14336
  • llama.rope.dimension_count
    128
  • llama.rope.freq_base
    1e+06
  • llama.vocab_size
    32002
  • tokenizer.ggml.add_bos_token
    true
  • tokenizer.ggml.add_eos_token
    false
  • tokenizer.ggml.bos_token_id
    1
  • tokenizer.ggml.eos_token_id
    32000
  • tokenizer.ggml.model
    llama
  • tokenizer.ggml.pre
    default
  • tokenizer.ggml.scores
    [-1000, -1000, -1000, 0, 0, ...]
  • tokenizer.ggml.token_type
    [3, 3, 3, 6, 6, ...]
  • tokenizer.ggml.tokens
    [<unk>, <s>, </s>, <0x00>, <0x01>, ...]
  • Tensor
  • Name
    Type
    Shape
  • token_embd.weight
    Q4_K
    [4096, 32002]
  • blk.0
  • blk.0.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.0.attn_norm.weight
    F32
    [4096]
  • blk.0.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.0.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.0.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.0.ffn_down_exps.weight
    Q5_K
    [14336, 4096, 8]
  • blk.0.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.0.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.0.ffn_norm.weight
    F32
    [4096]
  • blk.0.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.1
  • blk.1.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.1.attn_norm.weight
    F32
    [4096]
  • blk.1.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.1.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.1.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.1.ffn_down_exps.weight
    Q5_K
    [14336, 4096, 8]
  • blk.1.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.1.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.1.ffn_norm.weight
    F32
    [4096]
  • blk.1.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.2
  • blk.2.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.2.attn_norm.weight
    F32
    [4096]
  • blk.2.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.2.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.2.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.2.ffn_down_exps.weight
    Q5_K
    [14336, 4096, 8]
  • blk.2.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.2.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.2.ffn_norm.weight
    F32
    [4096]
  • blk.2.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.3
  • blk.3.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.3.attn_norm.weight
    F32
    [4096]
  • blk.3.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.3.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.3.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.3.ffn_down_exps.weight
    Q5_K
    [14336, 4096, 8]
  • blk.3.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.3.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.3.ffn_norm.weight
    F32
    [4096]
  • blk.3.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.4
  • blk.4.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.4.attn_norm.weight
    F32
    [4096]
  • blk.4.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.4.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.4.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.4.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.4.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.4.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.4.ffn_norm.weight
    F32
    [4096]
  • blk.4.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.5
  • blk.5.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.5.attn_norm.weight
    F32
    [4096]
  • blk.5.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.5.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.5.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.5.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.5.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.5.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.5.ffn_norm.weight
    F32
    [4096]
  • blk.5.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.6
  • blk.6.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.6.attn_norm.weight
    F32
    [4096]
  • blk.6.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.6.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.6.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.6.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.6.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.6.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.6.ffn_norm.weight
    F32
    [4096]
  • blk.6.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.7
  • blk.7.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.7.attn_norm.weight
    F32
    [4096]
  • blk.7.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.7.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.7.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.7.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.7.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.7.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.7.ffn_norm.weight
    F32
    [4096]
  • blk.7.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.8
  • blk.8.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.8.attn_norm.weight
    F32
    [4096]
  • blk.8.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.8.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.8.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.8.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.8.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.8.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.8.ffn_norm.weight
    F32
    [4096]
  • blk.8.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.9
  • blk.9.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.9.attn_norm.weight
    F32
    [4096]
  • blk.9.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.9.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.9.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.9.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.9.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.9.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.9.ffn_norm.weight
    F32
    [4096]
  • blk.9.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.10
  • blk.10.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.10.attn_norm.weight
    F32
    [4096]
  • blk.10.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.10.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.10.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.10.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.10.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.10.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.10.ffn_norm.weight
    F32
    [4096]
  • blk.10.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.11
  • blk.11.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.11.attn_norm.weight
    F32
    [4096]
  • blk.11.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.11.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.11.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.11.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.11.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.11.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.11.ffn_norm.weight
    F32
    [4096]
  • blk.11.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.12
  • blk.12.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.12.attn_norm.weight
    F32
    [4096]
  • blk.12.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.12.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.12.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.12.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.12.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.12.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.12.ffn_norm.weight
    F32
    [4096]
  • blk.12.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.13
  • blk.13.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.13.attn_norm.weight
    F32
    [4096]
  • blk.13.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.13.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.13.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.13.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.13.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.13.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.13.ffn_norm.weight
    F32
    [4096]
  • blk.13.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.14
  • blk.14.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.14.attn_norm.weight
    F32
    [4096]
  • blk.14.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.14.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.14.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.14.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.14.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.14.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.14.ffn_norm.weight
    F32
    [4096]
  • blk.14.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.15
  • blk.15.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.15.attn_norm.weight
    F32
    [4096]
  • blk.15.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.15.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.15.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.15.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.15.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.15.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.15.ffn_norm.weight
    F32
    [4096]
  • blk.15.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.16
  • blk.16.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.16.attn_norm.weight
    F32
    [4096]
  • blk.16.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.16.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.16.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.16.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.16.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.16.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.16.ffn_norm.weight
    F32
    [4096]
  • blk.16.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.17
  • blk.17.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.17.attn_norm.weight
    F32
    [4096]
  • blk.17.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.17.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.17.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.17.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.17.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.17.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.17.ffn_norm.weight
    F32
    [4096]
  • blk.17.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.18
  • blk.18.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.18.attn_norm.weight
    F32
    [4096]
  • blk.18.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.18.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.18.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.18.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.18.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.18.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.18.ffn_norm.weight
    F32
    [4096]
  • blk.18.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.19
  • blk.19.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.19.attn_norm.weight
    F32
    [4096]
  • blk.19.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.19.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.19.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.19.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.19.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.19.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.19.ffn_norm.weight
    F32
    [4096]
  • blk.19.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.20
  • blk.20.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.20.attn_norm.weight
    F32
    [4096]
  • blk.20.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.20.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.20.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.20.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.20.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.20.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.20.ffn_norm.weight
    F32
    [4096]
  • blk.20.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.21
  • blk.21.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.21.attn_norm.weight
    F32
    [4096]
  • blk.21.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.21.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.21.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.21.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.21.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.21.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.21.ffn_norm.weight
    F32
    [4096]
  • blk.21.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.22
  • blk.22.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.22.attn_norm.weight
    F32
    [4096]
  • blk.22.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.22.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.22.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.22.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.22.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.22.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.22.ffn_norm.weight
    F32
    [4096]
  • blk.22.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.23
  • blk.23.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.23.attn_norm.weight
    F32
    [4096]
  • blk.23.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.23.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.23.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.23.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.23.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.23.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.23.ffn_norm.weight
    F32
    [4096]
  • blk.23.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.24
  • blk.24.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.24.attn_norm.weight
    F32
    [4096]
  • blk.24.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.24.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.24.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.24.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.24.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.24.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.24.ffn_norm.weight
    F32
    [4096]
  • blk.24.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.25
  • blk.25.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.25.attn_norm.weight
    F32
    [4096]
  • blk.25.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.25.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.25.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.25.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.25.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.25.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.25.ffn_norm.weight
    F32
    [4096]
  • blk.25.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.26
  • blk.26.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.26.attn_norm.weight
    F32
    [4096]
  • blk.26.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.26.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.26.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.26.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.26.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.26.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.26.ffn_norm.weight
    F32
    [4096]
  • blk.26.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.27
  • blk.27.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.27.attn_norm.weight
    F32
    [4096]
  • blk.27.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.27.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.27.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.27.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.27.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.27.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.27.ffn_norm.weight
    F32
    [4096]
  • blk.27.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.28
  • blk.28.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.28.attn_norm.weight
    F32
    [4096]
  • blk.28.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.28.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.28.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.28.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.28.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.28.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.28.ffn_norm.weight
    F32
    [4096]
  • blk.28.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.29
  • blk.29.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.29.attn_norm.weight
    F32
    [4096]
  • blk.29.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.29.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.29.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.29.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.29.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.29.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.29.ffn_norm.weight
    F32
    [4096]
  • blk.29.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.30
  • blk.30.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.30.attn_norm.weight
    F32
    [4096]
  • blk.30.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.30.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.30.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.30.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.30.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.30.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.30.ffn_norm.weight
    F32
    [4096]
  • blk.30.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.31
  • blk.31.attn_k.weight
    Q8_0
    [4096, 1024]
  • blk.31.attn_norm.weight
    F32
    [4096]
  • blk.31.attn_output.weight
    Q5_K
    [4096, 4096]
  • blk.31.attn_q.weight
    Q4_K
    [4096, 4096]
  • blk.31.attn_v.weight
    Q8_0
    [4096, 1024]
  • blk.31.ffn_down_exps.weight
    Q4_K
    [14336, 4096, 8]
  • blk.31.ffn_gate_exps.weight
    Q4_K
    [4096, 14336, 8]
  • blk.31.ffn_gate_inp.weight
    F32
    [4096, 8]
  • blk.31.ffn_norm.weight
    F32
    [4096]
  • blk.31.ffn_up_exps.weight
    Q4_K
    [4096, 14336, 8]
  • output.weight
    Q6_K
    [4096, 32002]
  • output_norm.weight
    F32
    [4096]