Models
GitHub
Discord
Docs
Cloud
Sign in
Download
Models
Download
GitHub
Discord
Docs
Cloud
Sign in
zongwei
/
gemma3-translator
:1b
11.5K
Downloads
Updated
5 months ago
This model, based on the gemma3 model, is specifically designed to perform high quality translations between multiple languages.
This model, based on the gemma3 model, is specifically designed to perform high quality translations between multiple languages.
Cancel
vision
1b
4b
gemma3-translator:1b
...
/
model
7cd4618c1faf · 815MB
Metadata
general.architecture
gemma3
gemma3
general.file_type
Q4_K_M
Q4_K_M
gemma3.attention.head_count
4
4
gemma3.attention.head_count_kv
1
1
gemma3.attention.key_length
256
256
gemma3.attention.layer_norm_rms_epsilon
1e-06
1e-06
gemma3.attention.sliding_window
512
512
gemma3.attention.value_length
256
256
gemma3.block_count
26
26
gemma3.context_length
32768
32768
gemma3.embedding_length
1152
1152
gemma3.feed_forward_length
6912
6912
gemma3.final_logit_softcapping
30
30
gemma3.rope.global.freq_base
1e+06
1e+06
gemma3.rope.local.freq_base
10000
10000
tokenizer.ggml.add_bos_token
true
true
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.add_padding_token
false
false
tokenizer.ggml.add_unknown_token
false
false
tokenizer.ggml.bos_token_id
2
2
tokenizer.ggml.eos_token_id
1
1
tokenizer.ggml.merges
[ , ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ ▁, , , ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ ▁▁, ...]
[ , ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ ▁, , , ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ ▁▁, ...]
tokenizer.ggml.model
llama
llama
tokenizer.ggml.padding_token_id
0
0
tokenizer.ggml.pre
default
default
tokenizer.ggml.scores
[0, 0, 0, 0, 0, ...]
[0, 0, 0, 0, 0, ...]
tokenizer.ggml.token_type
[3, 3, 3, 2, 1, ...]
[3, 3, 3, 2, 1, ...]
tokenizer.ggml.tokens
[<pad>, <eos>, <bos>, <unk>, <mask>, ...]
[<pad>, <eos>, <bos>, <unk>, <mask>, ...]
tokenizer.ggml.unknown_token_id
3
3
Tensor
Name
Type
Shape
token_embd.weight
Q8_0
Q8_0
[1152, 262144]
blk.0
blk.0.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.0.attn_k_norm.weight
F32
F32
[256]
blk.0.attn_norm.weight
F32
F32
[1152]
blk.0.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.0.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.0.attn_q_norm.weight
F32
F32
[256]
blk.0.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.0.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.0.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.0.ffn_norm.weight
F32
F32
[1152]
blk.0.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.0.post_attention_norm.weight
F32
F32
[1152]
blk.0.post_ffw_norm.weight
F32
F32
[1152]
blk.1
blk.1.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.1.attn_k_norm.weight
F32
F32
[256]
blk.1.attn_norm.weight
F32
F32
[1152]
blk.1.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.1.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.1.attn_q_norm.weight
F32
F32
[256]
blk.1.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.1.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.1.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.1.ffn_norm.weight
F32
F32
[1152]
blk.1.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.1.post_attention_norm.weight
F32
F32
[1152]
blk.1.post_ffw_norm.weight
F32
F32
[1152]
blk.2
blk.2.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.2.attn_k_norm.weight
F32
F32
[256]
blk.2.attn_norm.weight
F32
F32
[1152]
blk.2.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.2.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.2.attn_q_norm.weight
F32
F32
[256]
blk.2.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.2.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.2.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.2.ffn_norm.weight
F32
F32
[1152]
blk.2.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.2.post_attention_norm.weight
F32
F32
[1152]
blk.2.post_ffw_norm.weight
F32
F32
[1152]
blk.3
blk.3.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.3.attn_k_norm.weight
F32
F32
[256]
blk.3.attn_norm.weight
F32
F32
[1152]
blk.3.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.3.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.3.attn_q_norm.weight
F32
F32
[256]
blk.3.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.3.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.3.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.3.ffn_norm.weight
F32
F32
[1152]
blk.3.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.3.post_attention_norm.weight
F32
F32
[1152]
blk.3.post_ffw_norm.weight
F32
F32
[1152]
blk.4
blk.4.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.4.attn_k_norm.weight
F32
F32
[256]
blk.4.attn_norm.weight
F32
F32
[1152]
blk.4.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.4.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.4.attn_q_norm.weight
F32
F32
[256]
blk.4.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.4.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.4.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.4.ffn_norm.weight
F32
F32
[1152]
blk.4.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.4.post_attention_norm.weight
F32
F32
[1152]
blk.4.post_ffw_norm.weight
F32
F32
[1152]
blk.5
blk.5.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.5.attn_k_norm.weight
F32
F32
[256]
blk.5.attn_norm.weight
F32
F32
[1152]
blk.5.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.5.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.5.attn_q_norm.weight
F32
F32
[256]
blk.5.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.5.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.5.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.5.ffn_norm.weight
F32
F32
[1152]
blk.5.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.5.post_attention_norm.weight
F32
F32
[1152]
blk.5.post_ffw_norm.weight
F32
F32
[1152]
blk.6
blk.6.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.6.attn_k_norm.weight
F32
F32
[256]
blk.6.attn_norm.weight
F32
F32
[1152]
blk.6.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.6.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.6.attn_q_norm.weight
F32
F32
[256]
blk.6.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.6.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.6.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.6.ffn_norm.weight
F32
F32
[1152]
blk.6.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.6.post_attention_norm.weight
F32
F32
[1152]
blk.6.post_ffw_norm.weight
F32
F32
[1152]
blk.7
blk.7.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.7.attn_k_norm.weight
F32
F32
[256]
blk.7.attn_norm.weight
F32
F32
[1152]
blk.7.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.7.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.7.attn_q_norm.weight
F32
F32
[256]
blk.7.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.7.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.7.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.7.ffn_norm.weight
F32
F32
[1152]
blk.7.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.7.post_attention_norm.weight
F32
F32
[1152]
blk.7.post_ffw_norm.weight
F32
F32
[1152]
blk.8
blk.8.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.8.attn_k_norm.weight
F32
F32
[256]
blk.8.attn_norm.weight
F32
F32
[1152]
blk.8.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.8.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.8.attn_q_norm.weight
F32
F32
[256]
blk.8.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.8.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.8.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.8.ffn_norm.weight
F32
F32
[1152]
blk.8.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.8.post_attention_norm.weight
F32
F32
[1152]
blk.8.post_ffw_norm.weight
F32
F32
[1152]
blk.9
blk.9.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.9.attn_k_norm.weight
F32
F32
[256]
blk.9.attn_norm.weight
F32
F32
[1152]
blk.9.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.9.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.9.attn_q_norm.weight
F32
F32
[256]
blk.9.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.9.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.9.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.9.ffn_norm.weight
F32
F32
[1152]
blk.9.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.9.post_attention_norm.weight
F32
F32
[1152]
blk.9.post_ffw_norm.weight
F32
F32
[1152]
blk.10
blk.10.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.10.attn_k_norm.weight
F32
F32
[256]
blk.10.attn_norm.weight
F32
F32
[1152]
blk.10.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.10.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.10.attn_q_norm.weight
F32
F32
[256]
blk.10.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.10.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.10.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.10.ffn_norm.weight
F32
F32
[1152]
blk.10.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.10.post_attention_norm.weight
F32
F32
[1152]
blk.10.post_ffw_norm.weight
F32
F32
[1152]
blk.11
blk.11.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.11.attn_k_norm.weight
F32
F32
[256]
blk.11.attn_norm.weight
F32
F32
[1152]
blk.11.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.11.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.11.attn_q_norm.weight
F32
F32
[256]
blk.11.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.11.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.11.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.11.ffn_norm.weight
F32
F32
[1152]
blk.11.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.11.post_attention_norm.weight
F32
F32
[1152]
blk.11.post_ffw_norm.weight
F32
F32
[1152]
blk.12
blk.12.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.12.attn_k_norm.weight
F32
F32
[256]
blk.12.attn_norm.weight
F32
F32
[1152]
blk.12.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.12.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.12.attn_q_norm.weight
F32
F32
[256]
blk.12.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.12.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.12.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.12.ffn_norm.weight
F32
F32
[1152]
blk.12.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.12.post_attention_norm.weight
F32
F32
[1152]
blk.12.post_ffw_norm.weight
F32
F32
[1152]
blk.13
blk.13.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.13.attn_k_norm.weight
F32
F32
[256]
blk.13.attn_norm.weight
F32
F32
[1152]
blk.13.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.13.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.13.attn_q_norm.weight
F32
F32
[256]
blk.13.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.13.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.13.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.13.ffn_norm.weight
F32
F32
[1152]
blk.13.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.13.post_attention_norm.weight
F32
F32
[1152]
blk.13.post_ffw_norm.weight
F32
F32
[1152]
blk.14
blk.14.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.14.attn_k_norm.weight
F32
F32
[256]
blk.14.attn_norm.weight
F32
F32
[1152]
blk.14.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.14.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.14.attn_q_norm.weight
F32
F32
[256]
blk.14.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.14.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.14.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.14.ffn_norm.weight
F32
F32
[1152]
blk.14.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.14.post_attention_norm.weight
F32
F32
[1152]
blk.14.post_ffw_norm.weight
F32
F32
[1152]
blk.15
blk.15.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.15.attn_k_norm.weight
F32
F32
[256]
blk.15.attn_norm.weight
F32
F32
[1152]
blk.15.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.15.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.15.attn_q_norm.weight
F32
F32
[256]
blk.15.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.15.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.15.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.15.ffn_norm.weight
F32
F32
[1152]
blk.15.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.15.post_attention_norm.weight
F32
F32
[1152]
blk.15.post_ffw_norm.weight
F32
F32
[1152]
blk.16
blk.16.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.16.attn_k_norm.weight
F32
F32
[256]
blk.16.attn_norm.weight
F32
F32
[1152]
blk.16.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.16.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.16.attn_q_norm.weight
F32
F32
[256]
blk.16.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.16.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.16.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.16.ffn_norm.weight
F32
F32
[1152]
blk.16.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.16.post_attention_norm.weight
F32
F32
[1152]
blk.16.post_ffw_norm.weight
F32
F32
[1152]
blk.17
blk.17.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.17.attn_k_norm.weight
F32
F32
[256]
blk.17.attn_norm.weight
F32
F32
[1152]
blk.17.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.17.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.17.attn_q_norm.weight
F32
F32
[256]
blk.17.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.17.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.17.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.17.ffn_norm.weight
F32
F32
[1152]
blk.17.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.17.post_attention_norm.weight
F32
F32
[1152]
blk.17.post_ffw_norm.weight
F32
F32
[1152]
blk.18
blk.18.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.18.attn_k_norm.weight
F32
F32
[256]
blk.18.attn_norm.weight
F32
F32
[1152]
blk.18.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.18.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.18.attn_q_norm.weight
F32
F32
[256]
blk.18.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.18.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.18.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.18.ffn_norm.weight
F32
F32
[1152]
blk.18.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.18.post_attention_norm.weight
F32
F32
[1152]
blk.18.post_ffw_norm.weight
F32
F32
[1152]
blk.19
blk.19.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.19.attn_k_norm.weight
F32
F32
[256]
blk.19.attn_norm.weight
F32
F32
[1152]
blk.19.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.19.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.19.attn_q_norm.weight
F32
F32
[256]
blk.19.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.19.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.19.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.19.ffn_norm.weight
F32
F32
[1152]
blk.19.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.19.post_attention_norm.weight
F32
F32
[1152]
blk.19.post_ffw_norm.weight
F32
F32
[1152]
blk.20
blk.20.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.20.attn_k_norm.weight
F32
F32
[256]
blk.20.attn_norm.weight
F32
F32
[1152]
blk.20.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.20.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.20.attn_q_norm.weight
F32
F32
[256]
blk.20.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.20.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.20.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.20.ffn_norm.weight
F32
F32
[1152]
blk.20.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.20.post_attention_norm.weight
F32
F32
[1152]
blk.20.post_ffw_norm.weight
F32
F32
[1152]
blk.21
blk.21.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.21.attn_k_norm.weight
F32
F32
[256]
blk.21.attn_norm.weight
F32
F32
[1152]
blk.21.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.21.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.21.attn_q_norm.weight
F32
F32
[256]
blk.21.attn_v.weight
Q5_0
Q5_0
[1152, 256]
blk.21.ffn_down.weight
Q4_K
Q4_K
[6912, 1152]
blk.21.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.21.ffn_norm.weight
F32
F32
[1152]
blk.21.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.21.post_attention_norm.weight
F32
F32
[1152]
blk.21.post_ffw_norm.weight
F32
F32
[1152]
blk.22
blk.22.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.22.attn_k_norm.weight
F32
F32
[256]
blk.22.attn_norm.weight
F32
F32
[1152]
blk.22.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.22.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.22.attn_q_norm.weight
F32
F32
[256]
blk.22.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.22.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.22.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.22.ffn_norm.weight
F32
F32
[1152]
blk.22.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.22.post_attention_norm.weight
F32
F32
[1152]
blk.22.post_ffw_norm.weight
F32
F32
[1152]
blk.23
blk.23.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.23.attn_k_norm.weight
F32
F32
[256]
blk.23.attn_norm.weight
F32
F32
[1152]
blk.23.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.23.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.23.attn_q_norm.weight
F32
F32
[256]
blk.23.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.23.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.23.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.23.ffn_norm.weight
F32
F32
[1152]
blk.23.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.23.post_attention_norm.weight
F32
F32
[1152]
blk.23.post_ffw_norm.weight
F32
F32
[1152]
blk.24
blk.24.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.24.attn_k_norm.weight
F32
F32
[256]
blk.24.attn_norm.weight
F32
F32
[1152]
blk.24.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.24.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.24.attn_q_norm.weight
F32
F32
[256]
blk.24.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.24.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.24.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.24.ffn_norm.weight
F32
F32
[1152]
blk.24.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.24.post_attention_norm.weight
F32
F32
[1152]
blk.24.post_ffw_norm.weight
F32
F32
[1152]
blk.25
blk.25.attn_k.weight
Q5_0
Q5_0
[1152, 256]
blk.25.attn_k_norm.weight
F32
F32
[256]
blk.25.attn_norm.weight
F32
F32
[1152]
blk.25.attn_output.weight
Q4_K
Q4_K
[1024, 1152]
blk.25.attn_q.weight
Q5_0
Q5_0
[1152, 1024]
blk.25.attn_q_norm.weight
F32
F32
[256]
blk.25.attn_v.weight
Q8_0
Q8_0
[1152, 256]
blk.25.ffn_down.weight
Q6_K
Q6_K
[6912, 1152]
blk.25.ffn_gate.weight
Q5_0
Q5_0
[1152, 6912]
blk.25.ffn_norm.weight
F32
F32
[1152]
blk.25.ffn_up.weight
Q5_0
Q5_0
[1152, 6912]
blk.25.post_attention_norm.weight
F32
F32
[1152]
blk.25.post_ffw_norm.weight
F32
F32
[1152]
output_norm.weight
F32
F32
[1152]