Models
Docs
Pricing
Sign in
Download
Models
Download
Docs
Pricing
Sign in
Zero9Tech
/
qwen3.5-9b-data-science
:latest
7
Downloads
Updated
3 weeks ago
Optimized for real-world data science decisions. It delivers method selection, alternative comparison, and validation planning in one response. Trained on 16.5K expert insight dialogues, it focuses on actionable guidance over generic theory.
Optimized for real-world data science decisions. It delivers method selection, alternative comparison, and validation planning in one response. Trained on 16.5K expert insight dialogues, it focuses on actionable guidance over generic theory.
Cancel
qwen3.5-9b-data-science:latest
...
/
model
cbc21603edbe · 5.6GB
Metadata
general.architecture
qwen35
qwen35
general.file_type
Q4_K_M
Q4_K_M
qwen35.attention.head_count
16
16
qwen35.attention.head_count_kv
4
4
qwen35.attention.key_length
256
256
qwen35.attention.layer_norm_rms_epsilon
1e-06
1e-06
qwen35.attention.value_length
256
256
qwen35.block_count
32
32
qwen35.context_length
262144
262144
qwen35.embedding_length
4096
4096
qwen35.feed_forward_length
12288
12288
qwen35.full_attention_interval
4
4
qwen35.rope.dimension_count
64
64
qwen35.rope.dimension_sections
[11, 11, 10, 0]
[11, 11, 10, 0]
qwen35.rope.freq_base
1e+07
1e+07
qwen35.ssm.conv_kernel
4
4
qwen35.ssm.group_count
16
16
qwen35.ssm.inner_size
4096
4096
qwen35.ssm.state_size
128
128
qwen35.ssm.time_step_rank
32
32
tokenizer.ggml.eos_token_id
248046
248046
tokenizer.ggml.merges
[Ġ Ġ, ĠĠ ĠĠ, i n, Ġ t, ĠĠĠĠ ĠĠĠĠ, ...]
[Ġ Ġ, ĠĠ ĠĠ, i n, Ġ t, ĠĠĠĠ ĠĠĠĠ, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
248055
248055
tokenizer.ggml.pre
qwen35
qwen35
tokenizer.ggml.token_type
[1, 1, 1, 1, 1, ...]
[1, 1, 1, 1, 1, ...]
tokenizer.ggml.tokens
[!, ", #, $, %, ...]
[!, ", #, $, %, ...]
Tensor
Name
Type
Shape
token_embd.weight
Q4_K
Q4_K
[4096, 248320]
blk.0
blk.0.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.0.attn_norm.weight
F32
F32
[4096]
blk.0.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.0.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.0.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.0.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.0.post_attention_norm.weight
F32
F32
[4096]
blk.0.ssm_a
F32
F32
[32]
blk.0.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.0.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.0.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.0.ssm_dt.bias
F32
F32
[32]
blk.0.ssm_norm.weight
F32
F32
[128]
blk.0.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.1
blk.1.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.1.attn_norm.weight
F32
F32
[4096]
blk.1.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.1.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.1.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.1.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.1.post_attention_norm.weight
F32
F32
[4096]
blk.1.ssm_a
F32
F32
[32]
blk.1.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.1.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.1.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.1.ssm_dt.bias
F32
F32
[32]
blk.1.ssm_norm.weight
F32
F32
[128]
blk.1.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.2
blk.2.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.2.attn_norm.weight
F32
F32
[4096]
blk.2.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.2.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.2.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.2.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.2.post_attention_norm.weight
F32
F32
[4096]
blk.2.ssm_a
F32
F32
[32]
blk.2.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.2.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.2.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.2.ssm_dt.bias
F32
F32
[32]
blk.2.ssm_norm.weight
F32
F32
[128]
blk.2.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.3
blk.3.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.3.attn_k_norm.weight
F32
F32
[256]
blk.3.attn_norm.weight
F32
F32
[4096]
blk.3.attn_output.weight
Q4_K
Q4_K
[4096, 4096]
blk.3.attn_q.weight
Q4_K
Q4_K
[4096, 8192]
blk.3.attn_q_norm.weight
F32
F32
[256]
blk.3.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.3.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.3.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.3.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.3.post_attention_norm.weight
F32
F32
[4096]
blk.4
blk.4.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.4.attn_norm.weight
F32
F32
[4096]
blk.4.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.4.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.4.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.4.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.4.post_attention_norm.weight
F32
F32
[4096]
blk.4.ssm_a
F32
F32
[32]
blk.4.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.4.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.4.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.4.ssm_dt.bias
F32
F32
[32]
blk.4.ssm_norm.weight
F32
F32
[128]
blk.4.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.5
blk.5.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.5.attn_norm.weight
F32
F32
[4096]
blk.5.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.5.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.5.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.5.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.5.post_attention_norm.weight
F32
F32
[4096]
blk.5.ssm_a
F32
F32
[32]
blk.5.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.5.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.5.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.5.ssm_dt.bias
F32
F32
[32]
blk.5.ssm_norm.weight
F32
F32
[128]
blk.5.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.6
blk.6.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.6.attn_norm.weight
F32
F32
[4096]
blk.6.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.6.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.6.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.6.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.6.post_attention_norm.weight
F32
F32
[4096]
blk.6.ssm_a
F32
F32
[32]
blk.6.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.6.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.6.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.6.ssm_dt.bias
F32
F32
[32]
blk.6.ssm_norm.weight
F32
F32
[128]
blk.6.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.7
blk.7.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.7.attn_k_norm.weight
F32
F32
[256]
blk.7.attn_norm.weight
F32
F32
[4096]
blk.7.attn_output.weight
Q4_K
Q4_K
[4096, 4096]
blk.7.attn_q.weight
Q4_K
Q4_K
[4096, 8192]
blk.7.attn_q_norm.weight
F32
F32
[256]
blk.7.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.7.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.7.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.7.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.7.post_attention_norm.weight
F32
F32
[4096]
blk.8
blk.8.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.8.attn_norm.weight
F32
F32
[4096]
blk.8.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.8.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.8.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.8.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.8.post_attention_norm.weight
F32
F32
[4096]
blk.8.ssm_a
F32
F32
[32]
blk.8.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.8.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.8.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.8.ssm_dt.bias
F32
F32
[32]
blk.8.ssm_norm.weight
F32
F32
[128]
blk.8.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.9
blk.9.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.9.attn_norm.weight
F32
F32
[4096]
blk.9.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.9.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.9.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.9.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.9.post_attention_norm.weight
F32
F32
[4096]
blk.9.ssm_a
F32
F32
[32]
blk.9.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.9.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.9.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.9.ssm_dt.bias
F32
F32
[32]
blk.9.ssm_norm.weight
F32
F32
[128]
blk.9.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.10
blk.10.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.10.attn_norm.weight
F32
F32
[4096]
blk.10.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.10.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.10.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.10.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.10.post_attention_norm.weight
F32
F32
[4096]
blk.10.ssm_a
F32
F32
[32]
blk.10.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.10.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.10.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.10.ssm_dt.bias
F32
F32
[32]
blk.10.ssm_norm.weight
F32
F32
[128]
blk.10.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.11
blk.11.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.11.attn_k_norm.weight
F32
F32
[256]
blk.11.attn_norm.weight
F32
F32
[4096]
blk.11.attn_output.weight
Q4_K
Q4_K
[4096, 4096]
blk.11.attn_q.weight
Q4_K
Q4_K
[4096, 8192]
blk.11.attn_q_norm.weight
F32
F32
[256]
blk.11.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.11.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.11.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.11.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.11.post_attention_norm.weight
F32
F32
[4096]
blk.12
blk.12.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.12.attn_norm.weight
F32
F32
[4096]
blk.12.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.12.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.12.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.12.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.12.post_attention_norm.weight
F32
F32
[4096]
blk.12.ssm_a
F32
F32
[32]
blk.12.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.12.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.12.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.12.ssm_dt.bias
F32
F32
[32]
blk.12.ssm_norm.weight
F32
F32
[128]
blk.12.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.13
blk.13.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.13.attn_norm.weight
F32
F32
[4096]
blk.13.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.13.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.13.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.13.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.13.post_attention_norm.weight
F32
F32
[4096]
blk.13.ssm_a
F32
F32
[32]
blk.13.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.13.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.13.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.13.ssm_dt.bias
F32
F32
[32]
blk.13.ssm_norm.weight
F32
F32
[128]
blk.13.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.14
blk.14.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.14.attn_norm.weight
F32
F32
[4096]
blk.14.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.14.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.14.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.14.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.14.post_attention_norm.weight
F32
F32
[4096]
blk.14.ssm_a
F32
F32
[32]
blk.14.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.14.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.14.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.14.ssm_dt.bias
F32
F32
[32]
blk.14.ssm_norm.weight
F32
F32
[128]
blk.14.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.15
blk.15.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.15.attn_k_norm.weight
F32
F32
[256]
blk.15.attn_norm.weight
F32
F32
[4096]
blk.15.attn_output.weight
Q4_K
Q4_K
[4096, 4096]
blk.15.attn_q.weight
Q4_K
Q4_K
[4096, 8192]
blk.15.attn_q_norm.weight
F32
F32
[256]
blk.15.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.15.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.15.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.15.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.15.post_attention_norm.weight
F32
F32
[4096]
blk.16
blk.16.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.16.attn_norm.weight
F32
F32
[4096]
blk.16.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.16.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.16.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.16.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.16.post_attention_norm.weight
F32
F32
[4096]
blk.16.ssm_a
F32
F32
[32]
blk.16.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.16.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.16.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.16.ssm_dt.bias
F32
F32
[32]
blk.16.ssm_norm.weight
F32
F32
[128]
blk.16.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.17
blk.17.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.17.attn_norm.weight
F32
F32
[4096]
blk.17.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.17.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.17.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.17.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.17.post_attention_norm.weight
F32
F32
[4096]
blk.17.ssm_a
F32
F32
[32]
blk.17.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.17.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.17.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.17.ssm_dt.bias
F32
F32
[32]
blk.17.ssm_norm.weight
F32
F32
[128]
blk.17.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.18
blk.18.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.18.attn_norm.weight
F32
F32
[4096]
blk.18.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.18.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.18.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.18.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.18.post_attention_norm.weight
F32
F32
[4096]
blk.18.ssm_a
F32
F32
[32]
blk.18.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.18.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.18.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.18.ssm_dt.bias
F32
F32
[32]
blk.18.ssm_norm.weight
F32
F32
[128]
blk.18.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.19
blk.19.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.19.attn_k_norm.weight
F32
F32
[256]
blk.19.attn_norm.weight
F32
F32
[4096]
blk.19.attn_output.weight
Q4_K
Q4_K
[4096, 4096]
blk.19.attn_q.weight
Q4_K
Q4_K
[4096, 8192]
blk.19.attn_q_norm.weight
F32
F32
[256]
blk.19.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.19.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.19.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.19.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.19.post_attention_norm.weight
F32
F32
[4096]
blk.20
blk.20.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.20.attn_norm.weight
F32
F32
[4096]
blk.20.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.20.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.20.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.20.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.20.post_attention_norm.weight
F32
F32
[4096]
blk.20.ssm_a
F32
F32
[32]
blk.20.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.20.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.20.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.20.ssm_dt.bias
F32
F32
[32]
blk.20.ssm_norm.weight
F32
F32
[128]
blk.20.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.21
blk.21.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.21.attn_norm.weight
F32
F32
[4096]
blk.21.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.21.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.21.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.21.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.21.post_attention_norm.weight
F32
F32
[4096]
blk.21.ssm_a
F32
F32
[32]
blk.21.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.21.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.21.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.21.ssm_dt.bias
F32
F32
[32]
blk.21.ssm_norm.weight
F32
F32
[128]
blk.21.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.22
blk.22.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.22.attn_norm.weight
F32
F32
[4096]
blk.22.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.22.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.22.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.22.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.22.post_attention_norm.weight
F32
F32
[4096]
blk.22.ssm_a
F32
F32
[32]
blk.22.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.22.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.22.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.22.ssm_dt.bias
F32
F32
[32]
blk.22.ssm_norm.weight
F32
F32
[128]
blk.22.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.23
blk.23.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.23.attn_k_norm.weight
F32
F32
[256]
blk.23.attn_norm.weight
F32
F32
[4096]
blk.23.attn_output.weight
Q4_K
Q4_K
[4096, 4096]
blk.23.attn_q.weight
Q4_K
Q4_K
[4096, 8192]
blk.23.attn_q_norm.weight
F32
F32
[256]
blk.23.attn_v.weight
Q4_K
Q4_K
[4096, 1024]
blk.23.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.23.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.23.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.23.post_attention_norm.weight
F32
F32
[4096]
blk.24
blk.24.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.24.attn_norm.weight
F32
F32
[4096]
blk.24.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.24.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.24.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.24.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.24.post_attention_norm.weight
F32
F32
[4096]
blk.24.ssm_a
F32
F32
[32]
blk.24.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.24.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.24.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.24.ssm_dt.bias
F32
F32
[32]
blk.24.ssm_norm.weight
F32
F32
[128]
blk.24.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.25
blk.25.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.25.attn_norm.weight
F32
F32
[4096]
blk.25.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.25.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.25.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.25.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.25.post_attention_norm.weight
F32
F32
[4096]
blk.25.ssm_a
F32
F32
[32]
blk.25.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.25.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.25.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.25.ssm_dt.bias
F32
F32
[32]
blk.25.ssm_norm.weight
F32
F32
[128]
blk.25.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.26
blk.26.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.26.attn_norm.weight
F32
F32
[4096]
blk.26.attn_qkv.weight
Q4_K
Q4_K
[4096, 8192]
blk.26.ffn_down.weight
Q4_K
Q4_K
[12288, 4096]
blk.26.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.26.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.26.post_attention_norm.weight
F32
F32
[4096]
blk.26.ssm_a
F32
F32
[32]
blk.26.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.26.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.26.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.26.ssm_dt.bias
F32
F32
[32]
blk.26.ssm_norm.weight
F32
F32
[128]
blk.26.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.27
blk.27.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.27.attn_k_norm.weight
F32
F32
[256]
blk.27.attn_norm.weight
F32
F32
[4096]
blk.27.attn_output.weight
Q4_K
Q4_K
[4096, 4096]
blk.27.attn_q.weight
Q4_K
Q4_K
[4096, 8192]
blk.27.attn_q_norm.weight
F32
F32
[256]
blk.27.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.27.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.27.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.27.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.27.post_attention_norm.weight
F32
F32
[4096]
blk.28
blk.28.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.28.attn_norm.weight
F32
F32
[4096]
blk.28.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.28.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.28.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.28.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.28.post_attention_norm.weight
F32
F32
[4096]
blk.28.ssm_a
F32
F32
[32]
blk.28.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.28.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.28.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.28.ssm_dt.bias
F32
F32
[32]
blk.28.ssm_norm.weight
F32
F32
[128]
blk.28.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.29
blk.29.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.29.attn_norm.weight
F32
F32
[4096]
blk.29.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.29.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.29.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.29.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.29.post_attention_norm.weight
F32
F32
[4096]
blk.29.ssm_a
F32
F32
[32]
blk.29.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.29.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.29.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.29.ssm_dt.bias
F32
F32
[32]
blk.29.ssm_norm.weight
F32
F32
[128]
blk.29.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.30
blk.30.attn_gate.weight
Q4_K
Q4_K
[4096, 4096]
blk.30.attn_norm.weight
F32
F32
[4096]
blk.30.attn_qkv.weight
Q6_K
Q6_K
[4096, 8192]
blk.30.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.30.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.30.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.30.post_attention_norm.weight
F32
F32
[4096]
blk.30.ssm_a
F32
F32
[32]
blk.30.ssm_alpha.weight
Q4_K
Q4_K
[4096, 32]
blk.30.ssm_beta.weight
Q4_K
Q4_K
[4096, 32]
blk.30.ssm_conv1d.weight
F32
F32
[4, 8192]
blk.30.ssm_dt.bias
F32
F32
[32]
blk.30.ssm_norm.weight
F32
F32
[128]
blk.30.ssm_out.weight
Q4_K
Q4_K
[4096, 4096]
blk.31
blk.31.attn_k.weight
Q4_K
Q4_K
[4096, 1024]
blk.31.attn_k_norm.weight
F32
F32
[256]
blk.31.attn_norm.weight
F32
F32
[4096]
blk.31.attn_output.weight
Q4_K
Q4_K
[4096, 4096]
blk.31.attn_q.weight
Q4_K
Q4_K
[4096, 8192]
blk.31.attn_q_norm.weight
F32
F32
[256]
blk.31.attn_v.weight
Q6_K
Q6_K
[4096, 1024]
blk.31.ffn_down.weight
Q6_K
Q6_K
[12288, 4096]
blk.31.ffn_gate.weight
Q4_K
Q4_K
[4096, 12288]
blk.31.ffn_up.weight
Q4_K
Q4_K
[4096, 12288]
blk.31.post_attention_norm.weight
F32
F32
[4096]
output.weight
Q6_K
Q6_K
[4096, 248320]
output_norm.weight
F32
F32
[4096]