Models
Docs
Pricing
Sign in
Download
Models
Download
Docs
Pricing
Sign in
qwen3.6
:35b-a3b-q8_0
153
Downloads
Updated
12 hours ago
Qwen3.6 delivers substantial upgrades in agentic coding and thinking preservation than previous Qwen models.
Qwen3.6 delivers substantial upgrades in agentic coding and thinking preservation than previous Qwen models.
Cancel
vision
tools
thinking
qwen3.6:35b-a3b-q8_0
...
/
model
7d8298ddcbce · 39GB
Metadata
general.architecture
qwen35moe
qwen35moe
general.file_type
Q8_0
Q8_0
qwen35moe.attention.head_count
16
16
qwen35moe.attention.head_count_kv
[0, 0, 0, 2, 0, ...]
[0, 0, 0, 2, 0, ...]
qwen35moe.attention.key_length
256
256
qwen35moe.attention.layer_norm_rms_epsilon
1e-06
1e-06
qwen35moe.attention.value_length
256
256
qwen35moe.block_count
40
40
qwen35moe.context_length
262144
262144
qwen35moe.embedding_length
2048
2048
qwen35moe.expert_count
256
256
qwen35moe.expert_feed_forward_length
512
512
qwen35moe.expert_shared_feed_forward_length
512
512
qwen35moe.expert_used_count
8
8
qwen35moe.feed_forward_length
0
0
qwen35moe.full_attention_interval
4
4
qwen35moe.image_token_id
248056
248056
qwen35moe.mrope_sections
[11, 11, 10]
[11, 11, 10]
qwen35moe.rope.dimension_count
64
64
qwen35moe.rope.dimension_sections
[11, 11, 10]
[11, 11, 10]
qwen35moe.rope.freq_base
1e+07
1e+07
qwen35moe.rope.mrope_interleaved
true
true
qwen35moe.rope.mrope_section
[11, 11, 10]
[11, 11, 10]
qwen35moe.ssm.conv_kernel
4
4
qwen35moe.ssm.group_count
16
16
qwen35moe.ssm.inner_size
4096
4096
qwen35moe.ssm.state_size
128
128
qwen35moe.ssm.time_step_rank
32
32
qwen35moe.ssm.v_head_reordered
true
true
qwen35moe.vision.attention.head_count
16
16
qwen35moe.vision.block_count
27
27
qwen35moe.vision.deepstack_visual_indexes
[]
[]
qwen35moe.vision.embedding_length
1152
1152
qwen35moe.vision.image_mean
[0.5, 0.5, 0.5]
[0.5, 0.5, 0.5]
qwen35moe.vision.image_std
[0.5, 0.5, 0.5]
[0.5, 0.5, 0.5]
qwen35moe.vision.longest_edge
16777216
16777216
qwen35moe.vision.num_channels
3
3
qwen35moe.vision.patch_size
16
16
qwen35moe.vision.shortest_edge
65536
65536
qwen35moe.vision.spatial_merge_size
2
2
qwen35moe.vision.temporal_patch_size
2
2
qwen35moe.vision_end_token_id
248054
248054
qwen35moe.vision_start_token_id
248053
248053
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.add_padding_token
false
false
tokenizer.ggml.eos_token_id
248046
248046
tokenizer.ggml.eos_token_ids
[248046, 248044]
[248046, 248044]
tokenizer.ggml.merges
[Ġ Ġ, ĠĠ ĠĠ, i n, Ġ t, ĠĠĠĠ ĠĠĠĠ, ...]
[Ġ Ġ, ĠĠ ĠĠ, i n, Ġ t, ĠĠĠĠ ĠĠĠĠ, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
248044
248044
tokenizer.ggml.pre
qwen35
qwen35
tokenizer.ggml.scores
[0, 1, 2, 3, 4, ...]
[0, 1, 2, 3, 4, ...]
tokenizer.ggml.token_type
[1, 1, 1, 1, 1, ...]
[1, 1, 1, 1, 1, ...]
tokenizer.ggml.tokens
[!, ", #, $, %, ...]
[!, ", #, $, %, ...]
Tensor
Name
Type
Shape
token_embd.weight
Q8_0
Q8_0
[2048, 248320]
blk.0
blk.0.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.0.attn_norm.weight
F32
F32
[2048]
blk.0.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.0.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.0.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.0.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.0.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.0.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.0.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.0.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.0.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.0.post_attention_norm.weight
F32
F32
[2048]
blk.0.ssm_a
F32
F32
[32]
blk.0.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.0.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.0.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.0.ssm_dt
F32
F32
[32]
blk.0.ssm_norm.weight
F32
F32
[128]
blk.0.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.1
blk.1.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.1.attn_norm.weight
F32
F32
[2048]
blk.1.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.1.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.1.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.1.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.1.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.1.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.1.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.1.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.1.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.1.post_attention_norm.weight
F32
F32
[2048]
blk.1.ssm_a
F32
F32
[32]
blk.1.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.1.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.1.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.1.ssm_dt
F32
F32
[32]
blk.1.ssm_norm.weight
F32
F32
[128]
blk.1.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.2
blk.2.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.2.attn_norm.weight
F32
F32
[2048]
blk.2.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.2.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.2.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.2.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.2.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.2.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.2.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.2.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.2.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.2.post_attention_norm.weight
F32
F32
[2048]
blk.2.ssm_a
F32
F32
[32]
blk.2.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.2.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.2.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.2.ssm_dt
F32
F32
[32]
blk.2.ssm_norm.weight
F32
F32
[128]
blk.2.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.3
blk.3.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.3.attn_k_norm.weight
F32
F32
[256]
blk.3.attn_norm.weight
F32
F32
[2048]
blk.3.attn_output.weight
Q8_0
Q8_0
[4096, 2048]
blk.3.attn_q.weight
Q8_0
Q8_0
[2048, 8192]
blk.3.attn_q_norm.weight
F32
F32
[256]
blk.3.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.3.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.3.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.3.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.3.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.3.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.3.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.3.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.3.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.3.post_attention_norm.weight
F32
F32
[2048]
blk.4
blk.4.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.4.attn_norm.weight
F32
F32
[2048]
blk.4.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.4.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.4.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.4.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.4.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.4.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.4.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.4.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.4.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.4.post_attention_norm.weight
F32
F32
[2048]
blk.4.ssm_a
F32
F32
[32]
blk.4.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.4.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.4.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.4.ssm_dt
F32
F32
[32]
blk.4.ssm_norm.weight
F32
F32
[128]
blk.4.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.5
blk.5.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.5.attn_norm.weight
F32
F32
[2048]
blk.5.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.5.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.5.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.5.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.5.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.5.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.5.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.5.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.5.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.5.post_attention_norm.weight
F32
F32
[2048]
blk.5.ssm_a
F32
F32
[32]
blk.5.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.5.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.5.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.5.ssm_dt
F32
F32
[32]
blk.5.ssm_norm.weight
F32
F32
[128]
blk.5.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.6
blk.6.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.6.attn_norm.weight
F32
F32
[2048]
blk.6.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.6.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.6.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.6.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.6.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.6.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.6.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.6.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.6.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.6.post_attention_norm.weight
F32
F32
[2048]
blk.6.ssm_a
F32
F32
[32]
blk.6.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.6.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.6.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.6.ssm_dt
F32
F32
[32]
blk.6.ssm_norm.weight
F32
F32
[128]
blk.6.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.7
blk.7.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.7.attn_k_norm.weight
F32
F32
[256]
blk.7.attn_norm.weight
F32
F32
[2048]
blk.7.attn_output.weight
Q8_0
Q8_0
[4096, 2048]
blk.7.attn_q.weight
Q8_0
Q8_0
[2048, 8192]
blk.7.attn_q_norm.weight
F32
F32
[256]
blk.7.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.7.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.7.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.7.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.7.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.7.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.7.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.7.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.7.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.7.post_attention_norm.weight
F32
F32
[2048]
blk.8
blk.8.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.8.attn_norm.weight
F32
F32
[2048]
blk.8.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.8.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.8.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.8.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.8.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.8.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.8.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.8.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.8.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.8.post_attention_norm.weight
F32
F32
[2048]
blk.8.ssm_a
F32
F32
[32]
blk.8.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.8.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.8.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.8.ssm_dt
F32
F32
[32]
blk.8.ssm_norm.weight
F32
F32
[128]
blk.8.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.9
blk.9.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.9.attn_norm.weight
F32
F32
[2048]
blk.9.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.9.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.9.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.9.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.9.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.9.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.9.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.9.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.9.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.9.post_attention_norm.weight
F32
F32
[2048]
blk.9.ssm_a
F32
F32
[32]
blk.9.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.9.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.9.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.9.ssm_dt
F32
F32
[32]
blk.9.ssm_norm.weight
F32
F32
[128]
blk.9.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.10
blk.10.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.10.attn_norm.weight
F32
F32
[2048]
blk.10.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.10.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.10.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.10.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.10.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.10.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.10.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.10.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.10.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.10.post_attention_norm.weight
F32
F32
[2048]
blk.10.ssm_a
F32
F32
[32]
blk.10.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.10.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.10.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.10.ssm_dt
F32
F32
[32]
blk.10.ssm_norm.weight
F32
F32
[128]
blk.10.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.11
blk.11.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.11.attn_k_norm.weight
F32
F32
[256]
blk.11.attn_norm.weight
F32
F32
[2048]
blk.11.attn_output.weight
Q8_0
Q8_0
[4096, 2048]
blk.11.attn_q.weight
Q8_0
Q8_0
[2048, 8192]
blk.11.attn_q_norm.weight
F32
F32
[256]
blk.11.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.11.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.11.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.11.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.11.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.11.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.11.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.11.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.11.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.11.post_attention_norm.weight
F32
F32
[2048]
blk.12
blk.12.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.12.attn_norm.weight
F32
F32
[2048]
blk.12.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.12.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.12.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.12.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.12.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.12.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.12.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.12.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.12.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.12.post_attention_norm.weight
F32
F32
[2048]
blk.12.ssm_a
F32
F32
[32]
blk.12.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.12.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.12.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.12.ssm_dt
F32
F32
[32]
blk.12.ssm_norm.weight
F32
F32
[128]
blk.12.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.13
blk.13.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.13.attn_norm.weight
F32
F32
[2048]
blk.13.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.13.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.13.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.13.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.13.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.13.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.13.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.13.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.13.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.13.post_attention_norm.weight
F32
F32
[2048]
blk.13.ssm_a
F32
F32
[32]
blk.13.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.13.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.13.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.13.ssm_dt
F32
F32
[32]
blk.13.ssm_norm.weight
F32
F32
[128]
blk.13.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.14
blk.14.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.14.attn_norm.weight
F32
F32
[2048]
blk.14.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.14.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.14.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.14.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.14.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.14.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.14.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.14.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.14.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.14.post_attention_norm.weight
F32
F32
[2048]
blk.14.ssm_a
F32
F32
[32]
blk.14.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.14.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.14.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.14.ssm_dt
F32
F32
[32]
blk.14.ssm_norm.weight
F32
F32
[128]
blk.14.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.15
blk.15.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.15.attn_k_norm.weight
F32
F32
[256]
blk.15.attn_norm.weight
F32
F32
[2048]
blk.15.attn_output.weight
Q8_0
Q8_0
[4096, 2048]
blk.15.attn_q.weight
Q8_0
Q8_0
[2048, 8192]
blk.15.attn_q_norm.weight
F32
F32
[256]
blk.15.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.15.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.15.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.15.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.15.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.15.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.15.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.15.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.15.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.15.post_attention_norm.weight
F32
F32
[2048]
blk.16
blk.16.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.16.attn_norm.weight
F32
F32
[2048]
blk.16.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.16.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.16.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.16.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.16.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.16.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.16.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.16.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.16.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.16.post_attention_norm.weight
F32
F32
[2048]
blk.16.ssm_a
F32
F32
[32]
blk.16.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.16.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.16.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.16.ssm_dt
F32
F32
[32]
blk.16.ssm_norm.weight
F32
F32
[128]
blk.16.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.17
blk.17.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.17.attn_norm.weight
F32
F32
[2048]
blk.17.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.17.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.17.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.17.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.17.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.17.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.17.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.17.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.17.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.17.post_attention_norm.weight
F32
F32
[2048]
blk.17.ssm_a
F32
F32
[32]
blk.17.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.17.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.17.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.17.ssm_dt
F32
F32
[32]
blk.17.ssm_norm.weight
F32
F32
[128]
blk.17.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.18
blk.18.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.18.attn_norm.weight
F32
F32
[2048]
blk.18.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.18.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.18.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.18.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.18.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.18.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.18.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.18.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.18.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.18.post_attention_norm.weight
F32
F32
[2048]
blk.18.ssm_a
F32
F32
[32]
blk.18.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.18.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.18.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.18.ssm_dt
F32
F32
[32]
blk.18.ssm_norm.weight
F32
F32
[128]
blk.18.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.19
blk.19.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.19.attn_k_norm.weight
F32
F32
[256]
blk.19.attn_norm.weight
F32
F32
[2048]
blk.19.attn_output.weight
Q8_0
Q8_0
[4096, 2048]
blk.19.attn_q.weight
Q8_0
Q8_0
[2048, 8192]
blk.19.attn_q_norm.weight
F32
F32
[256]
blk.19.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.19.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.19.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.19.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.19.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.19.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.19.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.19.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.19.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.19.post_attention_norm.weight
F32
F32
[2048]
blk.20
blk.20.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.20.attn_norm.weight
F32
F32
[2048]
blk.20.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.20.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.20.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.20.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.20.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.20.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.20.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.20.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.20.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.20.post_attention_norm.weight
F32
F32
[2048]
blk.20.ssm_a
F32
F32
[32]
blk.20.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.20.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.20.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.20.ssm_dt
F32
F32
[32]
blk.20.ssm_norm.weight
F32
F32
[128]
blk.20.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.21
blk.21.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.21.attn_norm.weight
F32
F32
[2048]
blk.21.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.21.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.21.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.21.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.21.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.21.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.21.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.21.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.21.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.21.post_attention_norm.weight
F32
F32
[2048]
blk.21.ssm_a
F32
F32
[32]
blk.21.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.21.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.21.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.21.ssm_dt
F32
F32
[32]
blk.21.ssm_norm.weight
F32
F32
[128]
blk.21.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.22
blk.22.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.22.attn_norm.weight
F32
F32
[2048]
blk.22.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.22.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.22.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.22.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.22.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.22.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.22.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.22.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.22.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.22.post_attention_norm.weight
F32
F32
[2048]
blk.22.ssm_a
F32
F32
[32]
blk.22.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.22.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.22.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.22.ssm_dt
F32
F32
[32]
blk.22.ssm_norm.weight
F32
F32
[128]
blk.22.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.23
blk.23.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.23.attn_k_norm.weight
F32
F32
[256]
blk.23.attn_norm.weight
F32
F32
[2048]
blk.23.attn_output.weight
Q8_0
Q8_0
[4096, 2048]
blk.23.attn_q.weight
Q8_0
Q8_0
[2048, 8192]
blk.23.attn_q_norm.weight
F32
F32
[256]
blk.23.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.23.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.23.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.23.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.23.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.23.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.23.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.23.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.23.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.23.post_attention_norm.weight
F32
F32
[2048]
blk.24
blk.24.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.24.attn_norm.weight
F32
F32
[2048]
blk.24.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.24.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.24.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.24.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.24.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.24.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.24.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.24.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.24.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.24.post_attention_norm.weight
F32
F32
[2048]
blk.24.ssm_a
F32
F32
[32]
blk.24.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.24.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.24.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.24.ssm_dt
F32
F32
[32]
blk.24.ssm_norm.weight
F32
F32
[128]
blk.24.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.25
blk.25.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.25.attn_norm.weight
F32
F32
[2048]
blk.25.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.25.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.25.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.25.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.25.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.25.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.25.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.25.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.25.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.25.post_attention_norm.weight
F32
F32
[2048]
blk.25.ssm_a
F32
F32
[32]
blk.25.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.25.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.25.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.25.ssm_dt
F32
F32
[32]
blk.25.ssm_norm.weight
F32
F32
[128]
blk.25.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.26
blk.26.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.26.attn_norm.weight
F32
F32
[2048]
blk.26.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.26.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.26.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.26.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.26.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.26.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.26.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.26.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.26.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.26.post_attention_norm.weight
F32
F32
[2048]
blk.26.ssm_a
F32
F32
[32]
blk.26.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.26.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.26.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.26.ssm_dt
F32
F32
[32]
blk.26.ssm_norm.weight
F32
F32
[128]
blk.26.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.27
blk.27.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.27.attn_k_norm.weight
F32
F32
[256]
blk.27.attn_norm.weight
F32
F32
[2048]
blk.27.attn_output.weight
Q8_0
Q8_0
[4096, 2048]
blk.27.attn_q.weight
Q8_0
Q8_0
[2048, 8192]
blk.27.attn_q_norm.weight
F32
F32
[256]
blk.27.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.27.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.27.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.27.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.27.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.27.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.27.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.27.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.27.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.27.post_attention_norm.weight
F32
F32
[2048]
blk.28
blk.28.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.28.attn_norm.weight
F32
F32
[2048]
blk.28.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.28.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.28.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.28.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.28.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.28.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.28.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.28.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.28.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.28.post_attention_norm.weight
F32
F32
[2048]
blk.28.ssm_a
F32
F32
[32]
blk.28.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.28.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.28.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.28.ssm_dt
F32
F32
[32]
blk.28.ssm_norm.weight
F32
F32
[128]
blk.28.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.29
blk.29.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.29.attn_norm.weight
F32
F32
[2048]
blk.29.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.29.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.29.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.29.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.29.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.29.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.29.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.29.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.29.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.29.post_attention_norm.weight
F32
F32
[2048]
blk.29.ssm_a
F32
F32
[32]
blk.29.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.29.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.29.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.29.ssm_dt
F32
F32
[32]
blk.29.ssm_norm.weight
F32
F32
[128]
blk.29.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.30
blk.30.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.30.attn_norm.weight
F32
F32
[2048]
blk.30.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.30.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.30.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.30.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.30.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.30.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.30.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.30.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.30.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.30.post_attention_norm.weight
F32
F32
[2048]
blk.30.ssm_a
F32
F32
[32]
blk.30.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.30.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.30.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.30.ssm_dt
F32
F32
[32]
blk.30.ssm_norm.weight
F32
F32
[128]
blk.30.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.31
blk.31.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.31.attn_k_norm.weight
F32
F32
[256]
blk.31.attn_norm.weight
F32
F32
[2048]
blk.31.attn_output.weight
Q8_0
Q8_0
[4096, 2048]
blk.31.attn_q.weight
Q8_0
Q8_0
[2048, 8192]
blk.31.attn_q_norm.weight
F32
F32
[256]
blk.31.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.31.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.31.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.31.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.31.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.31.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.31.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.31.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.31.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.31.post_attention_norm.weight
F32
F32
[2048]
blk.32
blk.32.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.32.attn_norm.weight
F32
F32
[2048]
blk.32.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.32.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.32.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.32.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.32.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.32.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.32.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.32.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.32.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.32.post_attention_norm.weight
F32
F32
[2048]
blk.32.ssm_a
F32
F32
[32]
blk.32.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.32.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.32.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.32.ssm_dt
F32
F32
[32]
blk.32.ssm_norm.weight
F32
F32
[128]
blk.32.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.33
blk.33.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.33.attn_norm.weight
F32
F32
[2048]
blk.33.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.33.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.33.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.33.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.33.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.33.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.33.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.33.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.33.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.33.post_attention_norm.weight
F32
F32
[2048]
blk.33.ssm_a
F32
F32
[32]
blk.33.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.33.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.33.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.33.ssm_dt
F32
F32
[32]
blk.33.ssm_norm.weight
F32
F32
[128]
blk.33.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.34
blk.34.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.34.attn_norm.weight
F32
F32
[2048]
blk.34.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.34.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.34.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.34.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.34.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.34.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.34.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.34.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.34.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.34.post_attention_norm.weight
F32
F32
[2048]
blk.34.ssm_a
F32
F32
[32]
blk.34.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.34.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.34.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.34.ssm_dt
F32
F32
[32]
blk.34.ssm_norm.weight
F32
F32
[128]
blk.34.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.35
blk.35.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.35.attn_k_norm.weight
F32
F32
[256]
blk.35.attn_norm.weight
F32
F32
[2048]
blk.35.attn_output.weight
Q8_0
Q8_0
[4096, 2048]
blk.35.attn_q.weight
Q8_0
Q8_0
[2048, 8192]
blk.35.attn_q_norm.weight
F32
F32
[256]
blk.35.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.35.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.35.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.35.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.35.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.35.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.35.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.35.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.35.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.35.post_attention_norm.weight
F32
F32
[2048]
blk.36
blk.36.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.36.attn_norm.weight
F32
F32
[2048]
blk.36.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.36.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.36.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.36.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.36.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.36.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.36.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.36.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.36.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.36.post_attention_norm.weight
F32
F32
[2048]
blk.36.ssm_a
F32
F32
[32]
blk.36.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.36.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.36.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.36.ssm_dt
F32
F32
[32]
blk.36.ssm_norm.weight
F32
F32
[128]
blk.36.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.37
blk.37.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.37.attn_norm.weight
F32
F32
[2048]
blk.37.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.37.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.37.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.37.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.37.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.37.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.37.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.37.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.37.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.37.post_attention_norm.weight
F32
F32
[2048]
blk.37.ssm_a
F32
F32
[32]
blk.37.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.37.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.37.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.37.ssm_dt
F32
F32
[32]
blk.37.ssm_norm.weight
F32
F32
[128]
blk.37.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.38
blk.38.attn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.38.attn_norm.weight
F32
F32
[2048]
blk.38.attn_qkv.weight
Q8_0
Q8_0
[2048, 8192]
blk.38.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.38.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.38.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.38.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.38.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.38.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.38.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.38.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.38.post_attention_norm.weight
F32
F32
[2048]
blk.38.ssm_a
F32
F32
[32]
blk.38.ssm_alpha.weight
Q8_0
Q8_0
[2048, 32]
blk.38.ssm_beta.weight
Q8_0
Q8_0
[2048, 32]
blk.38.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.38.ssm_dt
F32
F32
[32]
blk.38.ssm_norm.weight
F32
F32
[128]
blk.38.ssm_out.weight
Q8_0
Q8_0
[4096, 2048]
blk.39
blk.39.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.39.attn_k_norm.weight
F32
F32
[256]
blk.39.attn_norm.weight
F32
F32
[2048]
blk.39.attn_output.weight
Q8_0
Q8_0
[4096, 2048]
blk.39.attn_q.weight
Q8_0
Q8_0
[2048, 8192]
blk.39.attn_q_norm.weight
F32
F32
[256]
blk.39.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.39.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
blk.39.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
blk.39.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.39.ffn_gate_inp.weight
F32
F32
[2048, 256]
blk.39.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
blk.39.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.39.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
blk.39.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
blk.39.post_attention_norm.weight
F32
F32
[2048]
mtp.fc.weight
Q8_0
Q8_0
[4096, 2048]
mtp.layers.0.attn_k.weight
Q8_0
Q8_0
[2048, 512]
mtp.layers.0.attn_k_norm.weight
F32
F32
[256]
mtp.layers.0.attn_norm.weight
F32
F32
[2048]
mtp.layers.0.attn_output.weight
Q8_0
Q8_0
[4096, 2048]
mtp.layers.0.attn_q.weight
Q8_0
Q8_0
[2048, 8192]
mtp.layers.0.attn_q_norm.weight
F32
F32
[256]
mtp.layers.0.attn_v.weight
Q8_0
Q8_0
[2048, 512]
mtp.layers.0.ffn_down_exps.weight
Q8_0
Q8_0
[512, 2048, 256]
mtp.layers.0.ffn_down_shexp.weight
Q8_0
Q8_0
[512, 2048]
mtp.layers.0.ffn_gate_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
mtp.layers.0.ffn_gate_inp.weight
F32
F32
[2048, 256]
mtp.layers.0.ffn_gate_inp_shexp.weight
F16
F16
[2048, 1]
mtp.layers.0.ffn_gate_shexp.weight
Q8_0
Q8_0
[2048, 512]
mtp.layers.0.ffn_up_exps.weight
Q8_0
Q8_0
[2048, 512, 256]
mtp.layers.0.ffn_up_shexp.weight
Q8_0
Q8_0
[2048, 512]
mtp.layers.0.post_attention_norm.weight
F32
F32
[2048]
mtp.norm.weight
F32
F32
[2048]
mtp.pre_fc_norm_embedding.weight
F32
F32
[2048]
mtp.pre_fc_norm_hidden.weight
F32
F32
[2048]
output.weight
Q8_0
Q8_0
[2048, 248320]
v.blk.0
v.blk.0.attn_k.bias
F32
F32
[1152]
v.blk.0.attn_k.weight
F16
F16
[1152, 1152]
v.blk.0.attn_out.bias
F32
F32
[1152]
v.blk.0.attn_out.weight
F16
F16
[1152, 1152]
v.blk.0.attn_q.bias
F32
F32
[1152]
v.blk.0.attn_q.weight
F16
F16
[1152, 1152]
v.blk.0.attn_v.bias
F32
F32
[1152]
v.blk.0.attn_v.weight
F16
F16
[1152, 1152]
v.blk.0.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.0.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.0.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.0.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.0.norm1.bias
F32
F32
[1152]
v.blk.0.norm1.weight
F32
F32
[1152]
v.blk.0.norm2.bias
F32
F32
[1152]
v.blk.0.norm2.weight
F32
F32
[1152]
v.blk.1
v.blk.1.attn_k.bias
F32
F32
[1152]
v.blk.1.attn_k.weight
F16
F16
[1152, 1152]
v.blk.1.attn_out.bias
F32
F32
[1152]
v.blk.1.attn_out.weight
F16
F16
[1152, 1152]
v.blk.1.attn_q.bias
F32
F32
[1152]
v.blk.1.attn_q.weight
F16
F16
[1152, 1152]
v.blk.1.attn_v.bias
F32
F32
[1152]
v.blk.1.attn_v.weight
F16
F16
[1152, 1152]
v.blk.1.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.1.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.1.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.1.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.1.norm1.bias
F32
F32
[1152]
v.blk.1.norm1.weight
F32
F32
[1152]
v.blk.1.norm2.bias
F32
F32
[1152]
v.blk.1.norm2.weight
F32
F32
[1152]
v.blk.2
v.blk.2.attn_k.bias
F32
F32
[1152]
v.blk.2.attn_k.weight
F16
F16
[1152, 1152]
v.blk.2.attn_out.bias
F32
F32
[1152]
v.blk.2.attn_out.weight
F16
F16
[1152, 1152]
v.blk.2.attn_q.bias
F32
F32
[1152]
v.blk.2.attn_q.weight
F16
F16
[1152, 1152]
v.blk.2.attn_v.bias
F32
F32
[1152]
v.blk.2.attn_v.weight
F16
F16
[1152, 1152]
v.blk.2.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.2.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.2.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.2.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.2.norm1.bias
F32
F32
[1152]
v.blk.2.norm1.weight
F32
F32
[1152]
v.blk.2.norm2.bias
F32
F32
[1152]
v.blk.2.norm2.weight
F32
F32
[1152]
v.blk.3
v.blk.3.attn_k.bias
F32
F32
[1152]
v.blk.3.attn_k.weight
F16
F16
[1152, 1152]
v.blk.3.attn_out.bias
F32
F32
[1152]
v.blk.3.attn_out.weight
F16
F16
[1152, 1152]
v.blk.3.attn_q.bias
F32
F32
[1152]
v.blk.3.attn_q.weight
F16
F16
[1152, 1152]
v.blk.3.attn_v.bias
F32
F32
[1152]
v.blk.3.attn_v.weight
F16
F16
[1152, 1152]
v.blk.3.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.3.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.3.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.3.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.3.norm1.bias
F32
F32
[1152]
v.blk.3.norm1.weight
F32
F32
[1152]
v.blk.3.norm2.bias
F32
F32
[1152]
v.blk.3.norm2.weight
F32
F32
[1152]
v.blk.4
v.blk.4.attn_k.bias
F32
F32
[1152]
v.blk.4.attn_k.weight
F16
F16
[1152, 1152]
v.blk.4.attn_out.bias
F32
F32
[1152]
v.blk.4.attn_out.weight
F16
F16
[1152, 1152]
v.blk.4.attn_q.bias
F32
F32
[1152]
v.blk.4.attn_q.weight
F16
F16
[1152, 1152]
v.blk.4.attn_v.bias
F32
F32
[1152]
v.blk.4.attn_v.weight
F16
F16
[1152, 1152]
v.blk.4.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.4.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.4.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.4.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.4.norm1.bias
F32
F32
[1152]
v.blk.4.norm1.weight
F32
F32
[1152]
v.blk.4.norm2.bias
F32
F32
[1152]
v.blk.4.norm2.weight
F32
F32
[1152]
v.blk.5
v.blk.5.attn_k.bias
F32
F32
[1152]
v.blk.5.attn_k.weight
F16
F16
[1152, 1152]
v.blk.5.attn_out.bias
F32
F32
[1152]
v.blk.5.attn_out.weight
F16
F16
[1152, 1152]
v.blk.5.attn_q.bias
F32
F32
[1152]
v.blk.5.attn_q.weight
F16
F16
[1152, 1152]
v.blk.5.attn_v.bias
F32
F32
[1152]
v.blk.5.attn_v.weight
F16
F16
[1152, 1152]
v.blk.5.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.5.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.5.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.5.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.5.norm1.bias
F32
F32
[1152]
v.blk.5.norm1.weight
F32
F32
[1152]
v.blk.5.norm2.bias
F32
F32
[1152]
v.blk.5.norm2.weight
F32
F32
[1152]
v.blk.6
v.blk.6.attn_k.bias
F32
F32
[1152]
v.blk.6.attn_k.weight
F16
F16
[1152, 1152]
v.blk.6.attn_out.bias
F32
F32
[1152]
v.blk.6.attn_out.weight
F16
F16
[1152, 1152]
v.blk.6.attn_q.bias
F32
F32
[1152]
v.blk.6.attn_q.weight
F16
F16
[1152, 1152]
v.blk.6.attn_v.bias
F32
F32
[1152]
v.blk.6.attn_v.weight
F16
F16
[1152, 1152]
v.blk.6.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.6.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.6.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.6.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.6.norm1.bias
F32
F32
[1152]
v.blk.6.norm1.weight
F32
F32
[1152]
v.blk.6.norm2.bias
F32
F32
[1152]
v.blk.6.norm2.weight
F32
F32
[1152]
v.blk.7
v.blk.7.attn_k.bias
F32
F32
[1152]
v.blk.7.attn_k.weight
F16
F16
[1152, 1152]
v.blk.7.attn_out.bias
F32
F32
[1152]
v.blk.7.attn_out.weight
F16
F16
[1152, 1152]
v.blk.7.attn_q.bias
F32
F32
[1152]
v.blk.7.attn_q.weight
F16
F16
[1152, 1152]
v.blk.7.attn_v.bias
F32
F32
[1152]
v.blk.7.attn_v.weight
F16
F16
[1152, 1152]
v.blk.7.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.7.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.7.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.7.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.7.norm1.bias
F32
F32
[1152]
v.blk.7.norm1.weight
F32
F32
[1152]
v.blk.7.norm2.bias
F32
F32
[1152]
v.blk.7.norm2.weight
F32
F32
[1152]
v.blk.8
v.blk.8.attn_k.bias
F32
F32
[1152]
v.blk.8.attn_k.weight
F16
F16
[1152, 1152]
v.blk.8.attn_out.bias
F32
F32
[1152]
v.blk.8.attn_out.weight
F16
F16
[1152, 1152]
v.blk.8.attn_q.bias
F32
F32
[1152]
v.blk.8.attn_q.weight
F16
F16
[1152, 1152]
v.blk.8.attn_v.bias
F32
F32
[1152]
v.blk.8.attn_v.weight
F16
F16
[1152, 1152]
v.blk.8.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.8.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.8.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.8.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.8.norm1.bias
F32
F32
[1152]
v.blk.8.norm1.weight
F32
F32
[1152]
v.blk.8.norm2.bias
F32
F32
[1152]
v.blk.8.norm2.weight
F32
F32
[1152]
v.blk.9
v.blk.9.attn_k.bias
F32
F32
[1152]
v.blk.9.attn_k.weight
F16
F16
[1152, 1152]
v.blk.9.attn_out.bias
F32
F32
[1152]
v.blk.9.attn_out.weight
F16
F16
[1152, 1152]
v.blk.9.attn_q.bias
F32
F32
[1152]
v.blk.9.attn_q.weight
F16
F16
[1152, 1152]
v.blk.9.attn_v.bias
F32
F32
[1152]
v.blk.9.attn_v.weight
F16
F16
[1152, 1152]
v.blk.9.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.9.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.9.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.9.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.9.norm1.bias
F32
F32
[1152]
v.blk.9.norm1.weight
F32
F32
[1152]
v.blk.9.norm2.bias
F32
F32
[1152]
v.blk.9.norm2.weight
F32
F32
[1152]
v.blk.10
v.blk.10.attn_k.bias
F32
F32
[1152]
v.blk.10.attn_k.weight
F16
F16
[1152, 1152]
v.blk.10.attn_out.bias
F32
F32
[1152]
v.blk.10.attn_out.weight
F16
F16
[1152, 1152]
v.blk.10.attn_q.bias
F32
F32
[1152]
v.blk.10.attn_q.weight
F16
F16
[1152, 1152]
v.blk.10.attn_v.bias
F32
F32
[1152]
v.blk.10.attn_v.weight
F16
F16
[1152, 1152]
v.blk.10.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.10.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.10.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.10.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.10.norm1.bias
F32
F32
[1152]
v.blk.10.norm1.weight
F32
F32
[1152]
v.blk.10.norm2.bias
F32
F32
[1152]
v.blk.10.norm2.weight
F32
F32
[1152]
v.blk.11
v.blk.11.attn_k.bias
F32
F32
[1152]
v.blk.11.attn_k.weight
F16
F16
[1152, 1152]
v.blk.11.attn_out.bias
F32
F32
[1152]
v.blk.11.attn_out.weight
F16
F16
[1152, 1152]
v.blk.11.attn_q.bias
F32
F32
[1152]
v.blk.11.attn_q.weight
F16
F16
[1152, 1152]
v.blk.11.attn_v.bias
F32
F32
[1152]
v.blk.11.attn_v.weight
F16
F16
[1152, 1152]
v.blk.11.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.11.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.11.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.11.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.11.norm1.bias
F32
F32
[1152]
v.blk.11.norm1.weight
F32
F32
[1152]
v.blk.11.norm2.bias
F32
F32
[1152]
v.blk.11.norm2.weight
F32
F32
[1152]
v.blk.12
v.blk.12.attn_k.bias
F32
F32
[1152]
v.blk.12.attn_k.weight
F16
F16
[1152, 1152]
v.blk.12.attn_out.bias
F32
F32
[1152]
v.blk.12.attn_out.weight
F16
F16
[1152, 1152]
v.blk.12.attn_q.bias
F32
F32
[1152]
v.blk.12.attn_q.weight
F16
F16
[1152, 1152]
v.blk.12.attn_v.bias
F32
F32
[1152]
v.blk.12.attn_v.weight
F16
F16
[1152, 1152]
v.blk.12.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.12.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.12.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.12.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.12.norm1.bias
F32
F32
[1152]
v.blk.12.norm1.weight
F32
F32
[1152]
v.blk.12.norm2.bias
F32
F32
[1152]
v.blk.12.norm2.weight
F32
F32
[1152]
v.blk.13
v.blk.13.attn_k.bias
F32
F32
[1152]
v.blk.13.attn_k.weight
F16
F16
[1152, 1152]
v.blk.13.attn_out.bias
F32
F32
[1152]
v.blk.13.attn_out.weight
F16
F16
[1152, 1152]
v.blk.13.attn_q.bias
F32
F32
[1152]
v.blk.13.attn_q.weight
F16
F16
[1152, 1152]
v.blk.13.attn_v.bias
F32
F32
[1152]
v.blk.13.attn_v.weight
F16
F16
[1152, 1152]
v.blk.13.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.13.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.13.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.13.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.13.norm1.bias
F32
F32
[1152]
v.blk.13.norm1.weight
F32
F32
[1152]
v.blk.13.norm2.bias
F32
F32
[1152]
v.blk.13.norm2.weight
F32
F32
[1152]
v.blk.14
v.blk.14.attn_k.bias
F32
F32
[1152]
v.blk.14.attn_k.weight
F16
F16
[1152, 1152]
v.blk.14.attn_out.bias
F32
F32
[1152]
v.blk.14.attn_out.weight
F16
F16
[1152, 1152]
v.blk.14.attn_q.bias
F32
F32
[1152]
v.blk.14.attn_q.weight
F16
F16
[1152, 1152]
v.blk.14.attn_v.bias
F32
F32
[1152]
v.blk.14.attn_v.weight
F16
F16
[1152, 1152]
v.blk.14.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.14.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.14.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.14.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.14.norm1.bias
F32
F32
[1152]
v.blk.14.norm1.weight
F32
F32
[1152]
v.blk.14.norm2.bias
F32
F32
[1152]
v.blk.14.norm2.weight
F32
F32
[1152]
v.blk.15
v.blk.15.attn_k.bias
F32
F32
[1152]
v.blk.15.attn_k.weight
F16
F16
[1152, 1152]
v.blk.15.attn_out.bias
F32
F32
[1152]
v.blk.15.attn_out.weight
F16
F16
[1152, 1152]
v.blk.15.attn_q.bias
F32
F32
[1152]
v.blk.15.attn_q.weight
F16
F16
[1152, 1152]
v.blk.15.attn_v.bias
F32
F32
[1152]
v.blk.15.attn_v.weight
F16
F16
[1152, 1152]
v.blk.15.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.15.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.15.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.15.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.15.norm1.bias
F32
F32
[1152]
v.blk.15.norm1.weight
F32
F32
[1152]
v.blk.15.norm2.bias
F32
F32
[1152]
v.blk.15.norm2.weight
F32
F32
[1152]
v.blk.16
v.blk.16.attn_k.bias
F32
F32
[1152]
v.blk.16.attn_k.weight
F16
F16
[1152, 1152]
v.blk.16.attn_out.bias
F32
F32
[1152]
v.blk.16.attn_out.weight
F16
F16
[1152, 1152]
v.blk.16.attn_q.bias
F32
F32
[1152]
v.blk.16.attn_q.weight
F16
F16
[1152, 1152]
v.blk.16.attn_v.bias
F32
F32
[1152]
v.blk.16.attn_v.weight
F16
F16
[1152, 1152]
v.blk.16.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.16.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.16.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.16.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.16.norm1.bias
F32
F32
[1152]
v.blk.16.norm1.weight
F32
F32
[1152]
v.blk.16.norm2.bias
F32
F32
[1152]
v.blk.16.norm2.weight
F32
F32
[1152]
v.blk.17
v.blk.17.attn_k.bias
F32
F32
[1152]
v.blk.17.attn_k.weight
F16
F16
[1152, 1152]
v.blk.17.attn_out.bias
F32
F32
[1152]
v.blk.17.attn_out.weight
F16
F16
[1152, 1152]
v.blk.17.attn_q.bias
F32
F32
[1152]
v.blk.17.attn_q.weight
F16
F16
[1152, 1152]
v.blk.17.attn_v.bias
F32
F32
[1152]
v.blk.17.attn_v.weight
F16
F16
[1152, 1152]
v.blk.17.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.17.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.17.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.17.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.17.norm1.bias
F32
F32
[1152]
v.blk.17.norm1.weight
F32
F32
[1152]
v.blk.17.norm2.bias
F32
F32
[1152]
v.blk.17.norm2.weight
F32
F32
[1152]
v.blk.18
v.blk.18.attn_k.bias
F32
F32
[1152]
v.blk.18.attn_k.weight
F16
F16
[1152, 1152]
v.blk.18.attn_out.bias
F32
F32
[1152]
v.blk.18.attn_out.weight
F16
F16
[1152, 1152]
v.blk.18.attn_q.bias
F32
F32
[1152]
v.blk.18.attn_q.weight
F16
F16
[1152, 1152]
v.blk.18.attn_v.bias
F32
F32
[1152]
v.blk.18.attn_v.weight
F16
F16
[1152, 1152]
v.blk.18.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.18.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.18.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.18.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.18.norm1.bias
F32
F32
[1152]
v.blk.18.norm1.weight
F32
F32
[1152]
v.blk.18.norm2.bias
F32
F32
[1152]
v.blk.18.norm2.weight
F32
F32
[1152]
v.blk.19
v.blk.19.attn_k.bias
F32
F32
[1152]
v.blk.19.attn_k.weight
F16
F16
[1152, 1152]
v.blk.19.attn_out.bias
F32
F32
[1152]
v.blk.19.attn_out.weight
F16
F16
[1152, 1152]
v.blk.19.attn_q.bias
F32
F32
[1152]
v.blk.19.attn_q.weight
F16
F16
[1152, 1152]
v.blk.19.attn_v.bias
F32
F32
[1152]
v.blk.19.attn_v.weight
F16
F16
[1152, 1152]
v.blk.19.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.19.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.19.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.19.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.19.norm1.bias
F32
F32
[1152]
v.blk.19.norm1.weight
F32
F32
[1152]
v.blk.19.norm2.bias
F32
F32
[1152]
v.blk.19.norm2.weight
F32
F32
[1152]
v.blk.20
v.blk.20.attn_k.bias
F32
F32
[1152]
v.blk.20.attn_k.weight
F16
F16
[1152, 1152]
v.blk.20.attn_out.bias
F32
F32
[1152]
v.blk.20.attn_out.weight
F16
F16
[1152, 1152]
v.blk.20.attn_q.bias
F32
F32
[1152]
v.blk.20.attn_q.weight
F16
F16
[1152, 1152]
v.blk.20.attn_v.bias
F32
F32
[1152]
v.blk.20.attn_v.weight
F16
F16
[1152, 1152]
v.blk.20.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.20.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.20.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.20.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.20.norm1.bias
F32
F32
[1152]
v.blk.20.norm1.weight
F32
F32
[1152]
v.blk.20.norm2.bias
F32
F32
[1152]
v.blk.20.norm2.weight
F32
F32
[1152]
v.blk.21
v.blk.21.attn_k.bias
F32
F32
[1152]
v.blk.21.attn_k.weight
F16
F16
[1152, 1152]
v.blk.21.attn_out.bias
F32
F32
[1152]
v.blk.21.attn_out.weight
F16
F16
[1152, 1152]
v.blk.21.attn_q.bias
F32
F32
[1152]
v.blk.21.attn_q.weight
F16
F16
[1152, 1152]
v.blk.21.attn_v.bias
F32
F32
[1152]
v.blk.21.attn_v.weight
F16
F16
[1152, 1152]
v.blk.21.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.21.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.21.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.21.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.21.norm1.bias
F32
F32
[1152]
v.blk.21.norm1.weight
F32
F32
[1152]
v.blk.21.norm2.bias
F32
F32
[1152]
v.blk.21.norm2.weight
F32
F32
[1152]
v.blk.22
v.blk.22.attn_k.bias
F32
F32
[1152]
v.blk.22.attn_k.weight
F16
F16
[1152, 1152]
v.blk.22.attn_out.bias
F32
F32
[1152]
v.blk.22.attn_out.weight
F16
F16
[1152, 1152]
v.blk.22.attn_q.bias
F32
F32
[1152]
v.blk.22.attn_q.weight
F16
F16
[1152, 1152]
v.blk.22.attn_v.bias
F32
F32
[1152]
v.blk.22.attn_v.weight
F16
F16
[1152, 1152]
v.blk.22.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.22.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.22.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.22.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.22.norm1.bias
F32
F32
[1152]
v.blk.22.norm1.weight
F32
F32
[1152]
v.blk.22.norm2.bias
F32
F32
[1152]
v.blk.22.norm2.weight
F32
F32
[1152]
v.blk.23
v.blk.23.attn_k.bias
F32
F32
[1152]
v.blk.23.attn_k.weight
F16
F16
[1152, 1152]
v.blk.23.attn_out.bias
F32
F32
[1152]
v.blk.23.attn_out.weight
F16
F16
[1152, 1152]
v.blk.23.attn_q.bias
F32
F32
[1152]
v.blk.23.attn_q.weight
F16
F16
[1152, 1152]
v.blk.23.attn_v.bias
F32
F32
[1152]
v.blk.23.attn_v.weight
F16
F16
[1152, 1152]
v.blk.23.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.23.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.23.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.23.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.23.norm1.bias
F32
F32
[1152]
v.blk.23.norm1.weight
F32
F32
[1152]
v.blk.23.norm2.bias
F32
F32
[1152]
v.blk.23.norm2.weight
F32
F32
[1152]
v.blk.24
v.blk.24.attn_k.bias
F32
F32
[1152]
v.blk.24.attn_k.weight
F16
F16
[1152, 1152]
v.blk.24.attn_out.bias
F32
F32
[1152]
v.blk.24.attn_out.weight
F16
F16
[1152, 1152]
v.blk.24.attn_q.bias
F32
F32
[1152]
v.blk.24.attn_q.weight
F16
F16
[1152, 1152]
v.blk.24.attn_v.bias
F32
F32
[1152]
v.blk.24.attn_v.weight
F16
F16
[1152, 1152]
v.blk.24.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.24.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.24.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.24.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.24.norm1.bias
F32
F32
[1152]
v.blk.24.norm1.weight
F32
F32
[1152]
v.blk.24.norm2.bias
F32
F32
[1152]
v.blk.24.norm2.weight
F32
F32
[1152]
v.blk.25
v.blk.25.attn_k.bias
F32
F32
[1152]
v.blk.25.attn_k.weight
F16
F16
[1152, 1152]
v.blk.25.attn_out.bias
F32
F32
[1152]
v.blk.25.attn_out.weight
F16
F16
[1152, 1152]
v.blk.25.attn_q.bias
F32
F32
[1152]
v.blk.25.attn_q.weight
F16
F16
[1152, 1152]
v.blk.25.attn_v.bias
F32
F32
[1152]
v.blk.25.attn_v.weight
F16
F16
[1152, 1152]
v.blk.25.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.25.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.25.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.25.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.25.norm1.bias
F32
F32
[1152]
v.blk.25.norm1.weight
F32
F32
[1152]
v.blk.25.norm2.bias
F32
F32
[1152]
v.blk.25.norm2.weight
F32
F32
[1152]
v.blk.26
v.blk.26.attn_k.bias
F32
F32
[1152]
v.blk.26.attn_k.weight
F16
F16
[1152, 1152]
v.blk.26.attn_out.bias
F32
F32
[1152]
v.blk.26.attn_out.weight
F16
F16
[1152, 1152]
v.blk.26.attn_q.bias
F32
F32
[1152]
v.blk.26.attn_q.weight
F16
F16
[1152, 1152]
v.blk.26.attn_v.bias
F32
F32
[1152]
v.blk.26.attn_v.weight
F16
F16
[1152, 1152]
v.blk.26.mlp.linear_fc1.bias
F32
F32
[4304]
v.blk.26.mlp.linear_fc1.weight
F16
F16
[1152, 4304]
v.blk.26.mlp.linear_fc2.bias
F32
F32
[1152]
v.blk.26.mlp.linear_fc2.weight
F16
F16
[4304, 1152]
v.blk.26.norm1.bias
F32
F32
[1152]
v.blk.26.norm1.weight
F32
F32
[1152]
v.blk.26.norm2.bias
F32
F32
[1152]
v.blk.26.norm2.weight
F32
F32
[1152]
v.merger.linear_fc1.bias
F32
F32
[4608]
v.merger.linear_fc1.weight
F16
F16
[4608, 4608]
v.merger.linear_fc2.bias
F32
F32
[2048]
v.merger.linear_fc2.weight
F16
F16
[4608, 2048]
v.merger.norm.bias
F32
F32
[1152]
v.merger.norm.weight
F32
F32
[1152]
v.patch_embed.bias
F32
F32
[1152]
v.patch_embed.weight
F16
F16
[16, 16, 2, 3456]
v.pos_embed.weight
F16
F16
[1152, 2304]
output_norm.weight
F32
F32
[2048]