Models
GitHub
Discord
Turbo
Sign in
Download
Models
Download
GitHub
Discord
Sign in
MichelRosselli
/
GLM-4.5-Air
:latest
1,344
Downloads
Updated
5 days ago
GLM-4.5-Air is a hybrid reasoning model that provides two modes: a thinking mode for complex reasoning and tool use, and a non-thinking mode for immediate responses.
GLM-4.5-Air is a hybrid reasoning model that provides two modes: a thinking mode for complex reasoning and tool use, and a non-thinking mode for immediate responses.
Cancel
tools
thinking
GLM-4.5-Air:latest
...
/
model
a6a5f1eb9919 · 73GB
Metadata
general.architecture
glm4moe
glm4moe
general.file_type
Q4_K_M
Q4_K_M
glm4moe.attention.head_count
96
96
glm4moe.attention.head_count_kv
8
8
glm4moe.attention.key_length
128
128
glm4moe.attention.layer_norm_rms_epsilon
1e-05
1e-05
glm4moe.attention.value_length
128
128
glm4moe.block_count
47
47
glm4moe.context_length
131072
131072
glm4moe.embedding_length
4096
4096
glm4moe.expert_count
128
128
glm4moe.expert_feed_forward_length
1408
1408
glm4moe.expert_gating_func
2
2
glm4moe.expert_shared_count
1
1
glm4moe.expert_used_count
8
8
glm4moe.expert_weights_norm
true
true
glm4moe.expert_weights_scale
1
1
glm4moe.feed_forward_length
10944
10944
glm4moe.leading_dense_block_count
1
1
glm4moe.nextn_predict_layers
1
1
glm4moe.rope.dimension_count
64
64
glm4moe.rope.freq_base
1e+06
1e+06
tokenizer.ggml.bos_token_id
151331
151331
tokenizer.ggml.eom_token_id
151338
151338
tokenizer.ggml.eos_token_id
151329
151329
tokenizer.ggml.eot_token_id
151336
151336
tokenizer.ggml.merges
[Ġ Ġ, Ġ ĠĠĠ, ĠĠ ĠĠ, ĠĠĠ Ġ, i n, ...]
[Ġ Ġ, Ġ ĠĠĠ, ĠĠ ĠĠ, ĠĠĠ Ġ, i n, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
151330
151330
tokenizer.ggml.pre
glm4
glm4
tokenizer.ggml.token_type
[1, 1, 1, 1, 1, ...]
[1, 1, 1, 1, 1, ...]
tokenizer.ggml.tokens
[!, ", #, $, %, ...]
[!, ", #, $, %, ...]
tokenizer.ggml.unknown_token_id
151329
151329
quantize.imatrix.chunks_count
88
88
quantize.imatrix.dataset
unsloth_calibration_GLM-4.5-Air.txt
unsloth_calibration_GLM-4.5-Air.txt
quantize.imatrix.entries_count
502
502
quantize.imatrix.file
GLM-4.5-Air-GGUF/imatrix_unsloth.gguf
GLM-4.5-Air-GGUF/imatrix_unsloth.gguf
split.count
0
0
split.no
0
0
split.tensors.count
803
803
Tensor
Name
Type
Shape
token_embd.weight
Q4_K
Q4_K
[4096, 151552]
blk.0
blk.0.attn_k.bias
F32
F32
[1024]
blk.0.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.0.attn_norm.weight
F32
F32
[4096]
blk.0.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.0.attn_q.bias
F32
F32
[12288]
blk.0.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.0.attn_v.bias
F32
F32
[1024]
blk.0.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.0.ffn_down.weight
Q8_0
Q8_0
[10944, 4096]
blk.0.ffn_gate.weight
Q4_K
Q4_K
[4096, 10944]
blk.0.ffn_up.weight
Q4_K
Q4_K
[4096, 10944]
blk.0.post_attention_norm.weight
F32
F32
[4096]
blk.1
blk.1.attn_k.bias
F32
F32
[1024]
blk.1.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.1.attn_norm.weight
F32
F32
[4096]
blk.1.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.1.attn_q.bias
F32
F32
[12288]
blk.1.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.1.attn_v.bias
F32
F32
[1024]
blk.1.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.1.exp_probs_b.bias
F32
F32
[128]
blk.1.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.1.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.1.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.1.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.1.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.1.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.1.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.1.post_attention_norm.weight
F32
F32
[4096]
blk.2
blk.2.attn_k.bias
F32
F32
[1024]
blk.2.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.2.attn_norm.weight
F32
F32
[4096]
blk.2.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.2.attn_q.bias
F32
F32
[12288]
blk.2.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.2.attn_v.bias
F32
F32
[1024]
blk.2.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.2.exp_probs_b.bias
F32
F32
[128]
blk.2.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.2.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.2.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.2.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.2.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.2.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.2.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.2.post_attention_norm.weight
F32
F32
[4096]
blk.3
blk.3.attn_k.bias
F32
F32
[1024]
blk.3.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.3.attn_norm.weight
F32
F32
[4096]
blk.3.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.3.attn_q.bias
F32
F32
[12288]
blk.3.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.3.attn_v.bias
F32
F32
[1024]
blk.3.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.3.exp_probs_b.bias
F32
F32
[128]
blk.3.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.3.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.3.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.3.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.3.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.3.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.3.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.3.post_attention_norm.weight
F32
F32
[4096]
blk.4
blk.4.attn_k.bias
F32
F32
[1024]
blk.4.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.4.attn_norm.weight
F32
F32
[4096]
blk.4.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.4.attn_q.bias
F32
F32
[12288]
blk.4.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.4.attn_v.bias
F32
F32
[1024]
blk.4.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.4.exp_probs_b.bias
F32
F32
[128]
blk.4.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.4.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.4.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.4.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.4.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.4.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.4.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.4.post_attention_norm.weight
F32
F32
[4096]
blk.5
blk.5.attn_k.bias
F32
F32
[1024]
blk.5.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.5.attn_norm.weight
F32
F32
[4096]
blk.5.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.5.attn_q.bias
F32
F32
[12288]
blk.5.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.5.attn_v.bias
F32
F32
[1024]
blk.5.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.5.exp_probs_b.bias
F32
F32
[128]
blk.5.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.5.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.5.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.5.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.5.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.5.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.5.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.5.post_attention_norm.weight
F32
F32
[4096]
blk.6
blk.6.attn_k.bias
F32
F32
[1024]
blk.6.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.6.attn_norm.weight
F32
F32
[4096]
blk.6.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.6.attn_q.bias
F32
F32
[12288]
blk.6.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.6.attn_v.bias
F32
F32
[1024]
blk.6.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.6.exp_probs_b.bias
F32
F32
[128]
blk.6.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.6.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.6.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.6.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.6.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.6.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.6.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.6.post_attention_norm.weight
F32
F32
[4096]
blk.7
blk.7.attn_k.bias
F32
F32
[1024]
blk.7.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.7.attn_norm.weight
F32
F32
[4096]
blk.7.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.7.attn_q.bias
F32
F32
[12288]
blk.7.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.7.attn_v.bias
F32
F32
[1024]
blk.7.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.7.exp_probs_b.bias
F32
F32
[128]
blk.7.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.7.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.7.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.7.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.7.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.7.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.7.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.7.post_attention_norm.weight
F32
F32
[4096]
blk.8
blk.8.attn_k.bias
F32
F32
[1024]
blk.8.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.8.attn_norm.weight
F32
F32
[4096]
blk.8.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.8.attn_q.bias
F32
F32
[12288]
blk.8.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.8.attn_v.bias
F32
F32
[1024]
blk.8.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.8.exp_probs_b.bias
F32
F32
[128]
blk.8.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.8.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.8.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.8.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.8.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.8.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.8.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.8.post_attention_norm.weight
F32
F32
[4096]
blk.9
blk.9.attn_k.bias
F32
F32
[1024]
blk.9.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.9.attn_norm.weight
F32
F32
[4096]
blk.9.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.9.attn_q.bias
F32
F32
[12288]
blk.9.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.9.attn_v.bias
F32
F32
[1024]
blk.9.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.9.exp_probs_b.bias
F32
F32
[128]
blk.9.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.9.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.9.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.9.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.9.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.9.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.9.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.9.post_attention_norm.weight
F32
F32
[4096]
blk.10
blk.10.attn_k.bias
F32
F32
[1024]
blk.10.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.10.attn_norm.weight
F32
F32
[4096]
blk.10.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.10.attn_q.bias
F32
F32
[12288]
blk.10.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.10.attn_v.bias
F32
F32
[1024]
blk.10.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.10.exp_probs_b.bias
F32
F32
[128]
blk.10.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.10.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.10.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.10.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.10.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.10.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.10.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.10.post_attention_norm.weight
F32
F32
[4096]
blk.11
blk.11.attn_k.bias
F32
F32
[1024]
blk.11.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.11.attn_norm.weight
F32
F32
[4096]
blk.11.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.11.attn_q.bias
F32
F32
[12288]
blk.11.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.11.attn_v.bias
F32
F32
[1024]
blk.11.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.11.exp_probs_b.bias
F32
F32
[128]
blk.11.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.11.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.11.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.11.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.11.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.11.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.11.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.11.post_attention_norm.weight
F32
F32
[4096]
blk.12
blk.12.attn_k.bias
F32
F32
[1024]
blk.12.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.12.attn_norm.weight
F32
F32
[4096]
blk.12.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.12.attn_q.bias
F32
F32
[12288]
blk.12.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.12.attn_v.bias
F32
F32
[1024]
blk.12.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.12.exp_probs_b.bias
F32
F32
[128]
blk.12.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.12.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.12.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.12.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.12.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.12.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.12.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.12.post_attention_norm.weight
F32
F32
[4096]
blk.13
blk.13.attn_k.bias
F32
F32
[1024]
blk.13.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.13.attn_norm.weight
F32
F32
[4096]
blk.13.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.13.attn_q.bias
F32
F32
[12288]
blk.13.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.13.attn_v.bias
F32
F32
[1024]
blk.13.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.13.exp_probs_b.bias
F32
F32
[128]
blk.13.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.13.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.13.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.13.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.13.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.13.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.13.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.13.post_attention_norm.weight
F32
F32
[4096]
blk.14
blk.14.attn_k.bias
F32
F32
[1024]
blk.14.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.14.attn_norm.weight
F32
F32
[4096]
blk.14.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.14.attn_q.bias
F32
F32
[12288]
blk.14.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.14.attn_v.bias
F32
F32
[1024]
blk.14.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.14.exp_probs_b.bias
F32
F32
[128]
blk.14.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.14.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.14.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.14.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.14.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.14.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.14.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.14.post_attention_norm.weight
F32
F32
[4096]
blk.15
blk.15.attn_k.bias
F32
F32
[1024]
blk.15.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.15.attn_norm.weight
F32
F32
[4096]
blk.15.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.15.attn_q.bias
F32
F32
[12288]
blk.15.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.15.attn_v.bias
F32
F32
[1024]
blk.15.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.15.exp_probs_b.bias
F32
F32
[128]
blk.15.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.15.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.15.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.15.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.15.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.15.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.15.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.15.post_attention_norm.weight
F32
F32
[4096]
blk.16
blk.16.attn_k.bias
F32
F32
[1024]
blk.16.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.16.attn_norm.weight
F32
F32
[4096]
blk.16.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.16.attn_q.bias
F32
F32
[12288]
blk.16.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.16.attn_v.bias
F32
F32
[1024]
blk.16.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.16.exp_probs_b.bias
F32
F32
[128]
blk.16.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.16.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.16.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.16.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.16.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.16.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.16.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.16.post_attention_norm.weight
F32
F32
[4096]
blk.17
blk.17.attn_k.bias
F32
F32
[1024]
blk.17.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.17.attn_norm.weight
F32
F32
[4096]
blk.17.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.17.attn_q.bias
F32
F32
[12288]
blk.17.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.17.attn_v.bias
F32
F32
[1024]
blk.17.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.17.exp_probs_b.bias
F32
F32
[128]
blk.17.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.17.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.17.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.17.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.17.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.17.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.17.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.17.post_attention_norm.weight
F32
F32
[4096]
blk.18
blk.18.attn_k.bias
F32
F32
[1024]
blk.18.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.18.attn_norm.weight
F32
F32
[4096]
blk.18.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.18.attn_q.bias
F32
F32
[12288]
blk.18.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.18.attn_v.bias
F32
F32
[1024]
blk.18.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.18.exp_probs_b.bias
F32
F32
[128]
blk.18.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.18.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.18.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.18.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.18.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.18.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.18.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.18.post_attention_norm.weight
F32
F32
[4096]
blk.19
blk.19.attn_k.bias
F32
F32
[1024]
blk.19.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.19.attn_norm.weight
F32
F32
[4096]
blk.19.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.19.attn_q.bias
F32
F32
[12288]
blk.19.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.19.attn_v.bias
F32
F32
[1024]
blk.19.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.19.exp_probs_b.bias
F32
F32
[128]
blk.19.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.19.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.19.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.19.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.19.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.19.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.19.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.19.post_attention_norm.weight
F32
F32
[4096]
blk.20
blk.20.attn_k.bias
F32
F32
[1024]
blk.20.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.20.attn_norm.weight
F32
F32
[4096]
blk.20.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.20.attn_q.bias
F32
F32
[12288]
blk.20.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.20.attn_v.bias
F32
F32
[1024]
blk.20.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.20.exp_probs_b.bias
F32
F32
[128]
blk.20.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.20.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.20.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.20.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.20.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.20.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.20.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.20.post_attention_norm.weight
F32
F32
[4096]
blk.21
blk.21.attn_k.bias
F32
F32
[1024]
blk.21.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.21.attn_norm.weight
F32
F32
[4096]
blk.21.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.21.attn_q.bias
F32
F32
[12288]
blk.21.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.21.attn_v.bias
F32
F32
[1024]
blk.21.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.21.exp_probs_b.bias
F32
F32
[128]
blk.21.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.21.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.21.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.21.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.21.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.21.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.21.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.21.post_attention_norm.weight
F32
F32
[4096]
blk.22
blk.22.attn_k.bias
F32
F32
[1024]
blk.22.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.22.attn_norm.weight
F32
F32
[4096]
blk.22.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.22.attn_q.bias
F32
F32
[12288]
blk.22.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.22.attn_v.bias
F32
F32
[1024]
blk.22.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.22.exp_probs_b.bias
F32
F32
[128]
blk.22.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.22.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.22.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.22.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.22.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.22.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.22.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.22.post_attention_norm.weight
F32
F32
[4096]
blk.23
blk.23.attn_k.bias
F32
F32
[1024]
blk.23.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.23.attn_norm.weight
F32
F32
[4096]
blk.23.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.23.attn_q.bias
F32
F32
[12288]
blk.23.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.23.attn_v.bias
F32
F32
[1024]
blk.23.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.23.exp_probs_b.bias
F32
F32
[128]
blk.23.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.23.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.23.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.23.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.23.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.23.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.23.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.23.post_attention_norm.weight
F32
F32
[4096]
blk.24
blk.24.attn_k.bias
F32
F32
[1024]
blk.24.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.24.attn_norm.weight
F32
F32
[4096]
blk.24.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.24.attn_q.bias
F32
F32
[12288]
blk.24.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.24.attn_v.bias
F32
F32
[1024]
blk.24.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.24.exp_probs_b.bias
F32
F32
[128]
blk.24.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.24.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.24.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.24.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.24.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.24.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.24.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.24.post_attention_norm.weight
F32
F32
[4096]
blk.25
blk.25.attn_k.bias
F32
F32
[1024]
blk.25.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.25.attn_norm.weight
F32
F32
[4096]
blk.25.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.25.attn_q.bias
F32
F32
[12288]
blk.25.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.25.attn_v.bias
F32
F32
[1024]
blk.25.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.25.exp_probs_b.bias
F32
F32
[128]
blk.25.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.25.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.25.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.25.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.25.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.25.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.25.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.25.post_attention_norm.weight
F32
F32
[4096]
blk.26
blk.26.attn_k.bias
F32
F32
[1024]
blk.26.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.26.attn_norm.weight
F32
F32
[4096]
blk.26.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.26.attn_q.bias
F32
F32
[12288]
blk.26.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.26.attn_v.bias
F32
F32
[1024]
blk.26.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.26.exp_probs_b.bias
F32
F32
[128]
blk.26.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.26.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.26.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.26.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.26.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.26.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.26.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.26.post_attention_norm.weight
F32
F32
[4096]
blk.27
blk.27.attn_k.bias
F32
F32
[1024]
blk.27.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.27.attn_norm.weight
F32
F32
[4096]
blk.27.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.27.attn_q.bias
F32
F32
[12288]
blk.27.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.27.attn_v.bias
F32
F32
[1024]
blk.27.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.27.exp_probs_b.bias
F32
F32
[128]
blk.27.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.27.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.27.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.27.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.27.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.27.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.27.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.27.post_attention_norm.weight
F32
F32
[4096]
blk.28
blk.28.attn_k.bias
F32
F32
[1024]
blk.28.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.28.attn_norm.weight
F32
F32
[4096]
blk.28.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.28.attn_q.bias
F32
F32
[12288]
blk.28.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.28.attn_v.bias
F32
F32
[1024]
blk.28.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.28.exp_probs_b.bias
F32
F32
[128]
blk.28.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.28.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.28.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.28.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.28.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.28.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.28.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.28.post_attention_norm.weight
F32
F32
[4096]
blk.29
blk.29.attn_k.bias
F32
F32
[1024]
blk.29.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.29.attn_norm.weight
F32
F32
[4096]
blk.29.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.29.attn_q.bias
F32
F32
[12288]
blk.29.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.29.attn_v.bias
F32
F32
[1024]
blk.29.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.29.exp_probs_b.bias
F32
F32
[128]
blk.29.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.29.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.29.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.29.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.29.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.29.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.29.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.29.post_attention_norm.weight
F32
F32
[4096]
blk.30
blk.30.attn_k.bias
F32
F32
[1024]
blk.30.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.30.attn_norm.weight
F32
F32
[4096]
blk.30.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.30.attn_q.bias
F32
F32
[12288]
blk.30.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.30.attn_v.bias
F32
F32
[1024]
blk.30.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.30.exp_probs_b.bias
F32
F32
[128]
blk.30.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.30.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.30.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.30.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.30.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.30.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.30.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.30.post_attention_norm.weight
F32
F32
[4096]
blk.31
blk.31.attn_k.bias
F32
F32
[1024]
blk.31.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.31.attn_norm.weight
F32
F32
[4096]
blk.31.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.31.attn_q.bias
F32
F32
[12288]
blk.31.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.31.attn_v.bias
F32
F32
[1024]
blk.31.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.31.exp_probs_b.bias
F32
F32
[128]
blk.31.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.31.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.31.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.31.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.31.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.31.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.31.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.31.post_attention_norm.weight
F32
F32
[4096]
blk.32
blk.32.attn_k.bias
F32
F32
[1024]
blk.32.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.32.attn_norm.weight
F32
F32
[4096]
blk.32.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.32.attn_q.bias
F32
F32
[12288]
blk.32.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.32.attn_v.bias
F32
F32
[1024]
blk.32.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.32.exp_probs_b.bias
F32
F32
[128]
blk.32.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.32.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.32.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.32.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.32.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.32.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.32.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.32.post_attention_norm.weight
F32
F32
[4096]
blk.33
blk.33.attn_k.bias
F32
F32
[1024]
blk.33.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.33.attn_norm.weight
F32
F32
[4096]
blk.33.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.33.attn_q.bias
F32
F32
[12288]
blk.33.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.33.attn_v.bias
F32
F32
[1024]
blk.33.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.33.exp_probs_b.bias
F32
F32
[128]
blk.33.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.33.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.33.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.33.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.33.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.33.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.33.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.33.post_attention_norm.weight
F32
F32
[4096]
blk.34
blk.34.attn_k.bias
F32
F32
[1024]
blk.34.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.34.attn_norm.weight
F32
F32
[4096]
blk.34.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.34.attn_q.bias
F32
F32
[12288]
blk.34.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.34.attn_v.bias
F32
F32
[1024]
blk.34.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.34.exp_probs_b.bias
F32
F32
[128]
blk.34.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.34.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.34.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.34.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.34.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.34.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.34.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.34.post_attention_norm.weight
F32
F32
[4096]
blk.35
blk.35.attn_k.bias
F32
F32
[1024]
blk.35.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.35.attn_norm.weight
F32
F32
[4096]
blk.35.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.35.attn_q.bias
F32
F32
[12288]
blk.35.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.35.attn_v.bias
F32
F32
[1024]
blk.35.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.35.exp_probs_b.bias
F32
F32
[128]
blk.35.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.35.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.35.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.35.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.35.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.35.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.35.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.35.post_attention_norm.weight
F32
F32
[4096]
blk.36
blk.36.attn_k.bias
F32
F32
[1024]
blk.36.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.36.attn_norm.weight
F32
F32
[4096]
blk.36.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.36.attn_q.bias
F32
F32
[12288]
blk.36.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.36.attn_v.bias
F32
F32
[1024]
blk.36.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.36.exp_probs_b.bias
F32
F32
[128]
blk.36.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.36.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.36.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.36.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.36.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.36.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.36.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.36.post_attention_norm.weight
F32
F32
[4096]
blk.37
blk.37.attn_k.bias
F32
F32
[1024]
blk.37.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.37.attn_norm.weight
F32
F32
[4096]
blk.37.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.37.attn_q.bias
F32
F32
[12288]
blk.37.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.37.attn_v.bias
F32
F32
[1024]
blk.37.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.37.exp_probs_b.bias
F32
F32
[128]
blk.37.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.37.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.37.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.37.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.37.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.37.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.37.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.37.post_attention_norm.weight
F32
F32
[4096]
blk.38
blk.38.attn_k.bias
F32
F32
[1024]
blk.38.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.38.attn_norm.weight
F32
F32
[4096]
blk.38.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.38.attn_q.bias
F32
F32
[12288]
blk.38.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.38.attn_v.bias
F32
F32
[1024]
blk.38.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.38.exp_probs_b.bias
F32
F32
[128]
blk.38.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.38.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.38.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.38.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.38.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.38.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.38.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.38.post_attention_norm.weight
F32
F32
[4096]
blk.39
blk.39.attn_k.bias
F32
F32
[1024]
blk.39.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.39.attn_norm.weight
F32
F32
[4096]
blk.39.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.39.attn_q.bias
F32
F32
[12288]
blk.39.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.39.attn_v.bias
F32
F32
[1024]
blk.39.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.39.exp_probs_b.bias
F32
F32
[128]
blk.39.ffn_down_exps.weight
Q5_0
Q5_0
[1408, 4096, 128]
blk.39.ffn_down_shexp.weight
Q5_0
Q5_0
[1408, 4096]
blk.39.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.39.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.39.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.39.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.39.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.39.post_attention_norm.weight
F32
F32
[4096]
blk.40
blk.40.attn_k.bias
F32
F32
[1024]
blk.40.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.40.attn_norm.weight
F32
F32
[4096]
blk.40.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.40.attn_q.bias
F32
F32
[12288]
blk.40.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.40.attn_v.bias
F32
F32
[1024]
blk.40.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.40.exp_probs_b.bias
F32
F32
[128]
blk.40.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.40.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.40.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.40.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.40.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.40.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.40.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.40.post_attention_norm.weight
F32
F32
[4096]
blk.41
blk.41.attn_k.bias
F32
F32
[1024]
blk.41.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.41.attn_norm.weight
F32
F32
[4096]
blk.41.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.41.attn_q.bias
F32
F32
[12288]
blk.41.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.41.attn_v.bias
F32
F32
[1024]
blk.41.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.41.exp_probs_b.bias
F32
F32
[128]
blk.41.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.41.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.41.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.41.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.41.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.41.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.41.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.41.post_attention_norm.weight
F32
F32
[4096]
blk.42
blk.42.attn_k.bias
F32
F32
[1024]
blk.42.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.42.attn_norm.weight
F32
F32
[4096]
blk.42.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.42.attn_q.bias
F32
F32
[12288]
blk.42.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.42.attn_v.bias
F32
F32
[1024]
blk.42.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.42.exp_probs_b.bias
F32
F32
[128]
blk.42.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.42.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.42.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.42.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.42.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.42.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.42.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.42.post_attention_norm.weight
F32
F32
[4096]
blk.43
blk.43.attn_k.bias
F32
F32
[1024]
blk.43.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.43.attn_norm.weight
F32
F32
[4096]
blk.43.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.43.attn_q.bias
F32
F32
[12288]
blk.43.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.43.attn_v.bias
F32
F32
[1024]
blk.43.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.43.exp_probs_b.bias
F32
F32
[128]
blk.43.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.43.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.43.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.43.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.43.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.43.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.43.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.43.post_attention_norm.weight
F32
F32
[4096]
blk.44
blk.44.attn_k.bias
F32
F32
[1024]
blk.44.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.44.attn_norm.weight
F32
F32
[4096]
blk.44.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.44.attn_q.bias
F32
F32
[12288]
blk.44.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.44.attn_v.bias
F32
F32
[1024]
blk.44.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.44.exp_probs_b.bias
F32
F32
[128]
blk.44.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.44.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.44.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.44.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.44.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.44.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.44.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.44.post_attention_norm.weight
F32
F32
[4096]
blk.45
blk.45.attn_k.bias
F32
F32
[1024]
blk.45.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.45.attn_norm.weight
F32
F32
[4096]
blk.45.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.45.attn_q.bias
F32
F32
[12288]
blk.45.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.45.attn_v.bias
F32
F32
[1024]
blk.45.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.45.exp_probs_b.bias
F32
F32
[128]
blk.45.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.45.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.45.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.45.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.45.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.45.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.45.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.45.post_attention_norm.weight
F32
F32
[4096]
blk.46
blk.46.attn_k.bias
F32
F32
[1024]
blk.46.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.46.attn_norm.weight
F32
F32
[4096]
blk.46.attn_output.weight
Q4_K
Q4_K
[12288, 4096]
blk.46.attn_q.bias
F32
F32
[12288]
blk.46.attn_q.weight
Q4_K
Q4_K
[4096, 12288]
blk.46.attn_v.bias
F32
F32
[1024]
blk.46.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.46.exp_probs_b.bias
F32
F32
[128]
blk.46.ffn_down_exps.weight
Q8_0
Q8_0
[1408, 4096, 128]
blk.46.ffn_down_shexp.weight
Q8_0
Q8_0
[1408, 4096]
blk.46.ffn_gate_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.46.ffn_gate_inp.weight
F32
F32
[4096, 128]
blk.46.ffn_gate_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.46.ffn_up_exps.weight
Q4_K
Q4_K
[4096, 1408, 128]
blk.46.ffn_up_shexp.weight
Q4_K
Q4_K
[4096, 1408]
blk.46.nextn.eh_proj.weight
Q4_K
Q4_K
[8192, 4096]
blk.46.nextn.embed_tokens.weight
Q4_K
Q4_K
[4096, 151552]
blk.46.nextn.enorm.weight
F32
F32
[4096]
blk.46.nextn.hnorm.weight
F32
F32
[4096]
blk.46.nextn.shared_head_head.weight
Q4_K
Q4_K
[4096, 151552]
blk.46.nextn.shared_head_norm.weight
F32
F32
[4096]
blk.46.post_attention_norm.weight
F32
F32
[4096]
output.weight
Q6_K
Q6_K
[4096, 151552]
output_norm.weight
F32
F32
[4096]