Models
GitHub
Discord
Docs
Pricing
Sign in
Download
Models
Download
GitHub
Discord
Docs
Pricing
Sign in
santhoshj
/
ambedkar
:qwen3-0.6b
13
Downloads
Updated
2 weeks ago
To assist users with legal queries related to Indian law, constitutional provisions, legislative procedures, and legal research.
To assist users with legal queries related to Indian law, constitutional provisions, legislative procedures, and legal research.
Cancel
tools
thinking
ambedkar:qwen3-0.6b
...
/
model
7f4030143c1c · 523MB
Metadata
general.architecture
qwen3
qwen3
general.file_type
Q4_K_M
Q4_K_M
qwen3.attention.head_count
16
16
qwen3.attention.head_count_kv
8
8
qwen3.attention.key_length
128
128
qwen3.attention.layer_norm_rms_epsilon
1e-06
1e-06
qwen3.attention.value_length
128
128
qwen3.block_count
28
28
qwen3.context_length
40960
40960
qwen3.embedding_length
1024
1024
qwen3.feed_forward_length
3072
3072
qwen3.rope.freq_base
1e+06
1e+06
tokenizer.ggml.add_bos_token
false
false
tokenizer.ggml.bos_token_id
151643
151643
tokenizer.ggml.eos_token_id
151645
151645
tokenizer.ggml.merges
[Ġ Ġ, ĠĠ ĠĠ, i n, Ġ t, ĠĠĠĠ ĠĠĠĠ, ...]
[Ġ Ġ, ĠĠ ĠĠ, i n, Ġ t, ĠĠĠĠ ĠĠĠĠ, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
151643
151643
tokenizer.ggml.pre
qwen2
qwen2
tokenizer.ggml.token_type
[1, 1, 1, 1, 1, ...]
[1, 1, 1, 1, 1, ...]
tokenizer.ggml.tokens
[!, ", #, $, %, ...]
[!, ", #, $, %, ...]
Tensor
Name
Type
Shape
token_embd.weight
Q4_K
Q4_K
[1024, 151936]
blk.0
blk.0.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.0.attn_k_norm.weight
F32
F32
[128]
blk.0.attn_norm.weight
F32
F32
[1024]
blk.0.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.0.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.0.attn_q_norm.weight
F32
F32
[128]
blk.0.attn_v.weight
F16
F16
[1024, 1024]
blk.0.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.0.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.0.ffn_norm.weight
F32
F32
[1024]
blk.0.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.1
blk.1.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.1.attn_k_norm.weight
F32
F32
[128]
blk.1.attn_norm.weight
F32
F32
[1024]
blk.1.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.1.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.1.attn_q_norm.weight
F32
F32
[128]
blk.1.attn_v.weight
F16
F16
[1024, 1024]
blk.1.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.1.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.1.ffn_norm.weight
F32
F32
[1024]
blk.1.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.2
blk.2.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.2.attn_k_norm.weight
F32
F32
[128]
blk.2.attn_norm.weight
F32
F32
[1024]
blk.2.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.2.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.2.attn_q_norm.weight
F32
F32
[128]
blk.2.attn_v.weight
F16
F16
[1024, 1024]
blk.2.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.2.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.2.ffn_norm.weight
F32
F32
[1024]
blk.2.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.3
blk.3.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.3.attn_k_norm.weight
F32
F32
[128]
blk.3.attn_norm.weight
F32
F32
[1024]
blk.3.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.3.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.3.attn_q_norm.weight
F32
F32
[128]
blk.3.attn_v.weight
F16
F16
[1024, 1024]
blk.3.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.3.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.3.ffn_norm.weight
F32
F32
[1024]
blk.3.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.4
blk.4.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.4.attn_k_norm.weight
F32
F32
[128]
blk.4.attn_norm.weight
F32
F32
[1024]
blk.4.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.4.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.4.attn_q_norm.weight
F32
F32
[128]
blk.4.attn_v.weight
F16
F16
[1024, 1024]
blk.4.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.4.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.4.ffn_norm.weight
F32
F32
[1024]
blk.4.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.5
blk.5.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.5.attn_k_norm.weight
F32
F32
[128]
blk.5.attn_norm.weight
F32
F32
[1024]
blk.5.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.5.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.5.attn_q_norm.weight
F32
F32
[128]
blk.5.attn_v.weight
F16
F16
[1024, 1024]
blk.5.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.5.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.5.ffn_norm.weight
F32
F32
[1024]
blk.5.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.6
blk.6.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.6.attn_k_norm.weight
F32
F32
[128]
blk.6.attn_norm.weight
F32
F32
[1024]
blk.6.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.6.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.6.attn_q_norm.weight
F32
F32
[128]
blk.6.attn_v.weight
F16
F16
[1024, 1024]
blk.6.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.6.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.6.ffn_norm.weight
F32
F32
[1024]
blk.6.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.7
blk.7.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.7.attn_k_norm.weight
F32
F32
[128]
blk.7.attn_norm.weight
F32
F32
[1024]
blk.7.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.7.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.7.attn_q_norm.weight
F32
F32
[128]
blk.7.attn_v.weight
F16
F16
[1024, 1024]
blk.7.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.7.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.7.ffn_norm.weight
F32
F32
[1024]
blk.7.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.8
blk.8.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.8.attn_k_norm.weight
F32
F32
[128]
blk.8.attn_norm.weight
F32
F32
[1024]
blk.8.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.8.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.8.attn_q_norm.weight
F32
F32
[128]
blk.8.attn_v.weight
F16
F16
[1024, 1024]
blk.8.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.8.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.8.ffn_norm.weight
F32
F32
[1024]
blk.8.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.9
blk.9.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.9.attn_k_norm.weight
F32
F32
[128]
blk.9.attn_norm.weight
F32
F32
[1024]
blk.9.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.9.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.9.attn_q_norm.weight
F32
F32
[128]
blk.9.attn_v.weight
F16
F16
[1024, 1024]
blk.9.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.9.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.9.ffn_norm.weight
F32
F32
[1024]
blk.9.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.10
blk.10.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.10.attn_k_norm.weight
F32
F32
[128]
blk.10.attn_norm.weight
F32
F32
[1024]
blk.10.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.10.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.10.attn_q_norm.weight
F32
F32
[128]
blk.10.attn_v.weight
F16
F16
[1024, 1024]
blk.10.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.10.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.10.ffn_norm.weight
F32
F32
[1024]
blk.10.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.11
blk.11.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.11.attn_k_norm.weight
F32
F32
[128]
blk.11.attn_norm.weight
F32
F32
[1024]
blk.11.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.11.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.11.attn_q_norm.weight
F32
F32
[128]
blk.11.attn_v.weight
F16
F16
[1024, 1024]
blk.11.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.11.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.11.ffn_norm.weight
F32
F32
[1024]
blk.11.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.12
blk.12.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.12.attn_k_norm.weight
F32
F32
[128]
blk.12.attn_norm.weight
F32
F32
[1024]
blk.12.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.12.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.12.attn_q_norm.weight
F32
F32
[128]
blk.12.attn_v.weight
F16
F16
[1024, 1024]
blk.12.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.12.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.12.ffn_norm.weight
F32
F32
[1024]
blk.12.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.13
blk.13.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.13.attn_k_norm.weight
F32
F32
[128]
blk.13.attn_norm.weight
F32
F32
[1024]
blk.13.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.13.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.13.attn_q_norm.weight
F32
F32
[128]
blk.13.attn_v.weight
F16
F16
[1024, 1024]
blk.13.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.13.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.13.ffn_norm.weight
F32
F32
[1024]
blk.13.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.14
blk.14.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.14.attn_k_norm.weight
F32
F32
[128]
blk.14.attn_norm.weight
F32
F32
[1024]
blk.14.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.14.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.14.attn_q_norm.weight
F32
F32
[128]
blk.14.attn_v.weight
F16
F16
[1024, 1024]
blk.14.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.14.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.14.ffn_norm.weight
F32
F32
[1024]
blk.14.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.15
blk.15.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.15.attn_k_norm.weight
F32
F32
[128]
blk.15.attn_norm.weight
F32
F32
[1024]
blk.15.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.15.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.15.attn_q_norm.weight
F32
F32
[128]
blk.15.attn_v.weight
F16
F16
[1024, 1024]
blk.15.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.15.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.15.ffn_norm.weight
F32
F32
[1024]
blk.15.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.16
blk.16.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.16.attn_k_norm.weight
F32
F32
[128]
blk.16.attn_norm.weight
F32
F32
[1024]
blk.16.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.16.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.16.attn_q_norm.weight
F32
F32
[128]
blk.16.attn_v.weight
F16
F16
[1024, 1024]
blk.16.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.16.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.16.ffn_norm.weight
F32
F32
[1024]
blk.16.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.17
blk.17.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.17.attn_k_norm.weight
F32
F32
[128]
blk.17.attn_norm.weight
F32
F32
[1024]
blk.17.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.17.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.17.attn_q_norm.weight
F32
F32
[128]
blk.17.attn_v.weight
F16
F16
[1024, 1024]
blk.17.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.17.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.17.ffn_norm.weight
F32
F32
[1024]
blk.17.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.18
blk.18.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.18.attn_k_norm.weight
F32
F32
[128]
blk.18.attn_norm.weight
F32
F32
[1024]
blk.18.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.18.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.18.attn_q_norm.weight
F32
F32
[128]
blk.18.attn_v.weight
F16
F16
[1024, 1024]
blk.18.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.18.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.18.ffn_norm.weight
F32
F32
[1024]
blk.18.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.19
blk.19.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.19.attn_k_norm.weight
F32
F32
[128]
blk.19.attn_norm.weight
F32
F32
[1024]
blk.19.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.19.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.19.attn_q_norm.weight
F32
F32
[128]
blk.19.attn_v.weight
F16
F16
[1024, 1024]
blk.19.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.19.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.19.ffn_norm.weight
F32
F32
[1024]
blk.19.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.20
blk.20.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.20.attn_k_norm.weight
F32
F32
[128]
blk.20.attn_norm.weight
F32
F32
[1024]
blk.20.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.20.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.20.attn_q_norm.weight
F32
F32
[128]
blk.20.attn_v.weight
F16
F16
[1024, 1024]
blk.20.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.20.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.20.ffn_norm.weight
F32
F32
[1024]
blk.20.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.21
blk.21.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.21.attn_k_norm.weight
F32
F32
[128]
blk.21.attn_norm.weight
F32
F32
[1024]
blk.21.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.21.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.21.attn_q_norm.weight
F32
F32
[128]
blk.21.attn_v.weight
F16
F16
[1024, 1024]
blk.21.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.21.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.21.ffn_norm.weight
F32
F32
[1024]
blk.21.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.22
blk.22.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.22.attn_k_norm.weight
F32
F32
[128]
blk.22.attn_norm.weight
F32
F32
[1024]
blk.22.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.22.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.22.attn_q_norm.weight
F32
F32
[128]
blk.22.attn_v.weight
F16
F16
[1024, 1024]
blk.22.ffn_down.weight
Q4_K
Q4_K
[3072, 1024]
blk.22.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.22.ffn_norm.weight
F32
F32
[1024]
blk.22.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.23
blk.23.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.23.attn_k_norm.weight
F32
F32
[128]
blk.23.attn_norm.weight
F32
F32
[1024]
blk.23.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.23.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.23.attn_q_norm.weight
F32
F32
[128]
blk.23.attn_v.weight
F16
F16
[1024, 1024]
blk.23.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.23.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.23.ffn_norm.weight
F32
F32
[1024]
blk.23.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.24
blk.24.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.24.attn_k_norm.weight
F32
F32
[128]
blk.24.attn_norm.weight
F32
F32
[1024]
blk.24.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.24.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.24.attn_q_norm.weight
F32
F32
[128]
blk.24.attn_v.weight
F16
F16
[1024, 1024]
blk.24.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.24.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.24.ffn_norm.weight
F32
F32
[1024]
blk.24.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.25
blk.25.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.25.attn_k_norm.weight
F32
F32
[128]
blk.25.attn_norm.weight
F32
F32
[1024]
blk.25.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.25.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.25.attn_q_norm.weight
F32
F32
[128]
blk.25.attn_v.weight
F16
F16
[1024, 1024]
blk.25.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.25.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.25.ffn_norm.weight
F32
F32
[1024]
blk.25.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.26
blk.26.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.26.attn_k_norm.weight
F32
F32
[128]
blk.26.attn_norm.weight
F32
F32
[1024]
blk.26.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.26.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.26.attn_q_norm.weight
F32
F32
[128]
blk.26.attn_v.weight
F16
F16
[1024, 1024]
blk.26.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.26.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.26.ffn_norm.weight
F32
F32
[1024]
blk.26.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
blk.27
blk.27.attn_k.weight
Q4_K
Q4_K
[1024, 1024]
blk.27.attn_k_norm.weight
F32
F32
[128]
blk.27.attn_norm.weight
F32
F32
[1024]
blk.27.attn_output.weight
Q4_K
Q4_K
[2048, 1024]
blk.27.attn_q.weight
Q4_K
Q4_K
[1024, 2048]
blk.27.attn_q_norm.weight
F32
F32
[128]
blk.27.attn_v.weight
F16
F16
[1024, 1024]
blk.27.ffn_down.weight
Q6_K
Q6_K
[3072, 1024]
blk.27.ffn_gate.weight
Q4_K
Q4_K
[1024, 3072]
blk.27.ffn_norm.weight
F32
F32
[1024]
blk.27.ffn_up.weight
Q4_K
Q4_K
[1024, 3072]
output.weight
Q6_K
Q6_K
[1024, 151936]
output_norm.weight
F32
F32
[1024]