Models
GitHub
Discord
Docs
Cloud
Sign in
Download
Models
Download
GitHub
Discord
Docs
Cloud
Sign in
sam860
/
exaone-4.0
:1.2b
195
Downloads
Updated
2 months ago
LGAI has developed their latest version of the exaone model series, now with reasoning!
LGAI has developed their latest version of the exaone model series, now with reasoning!
Cancel
1.2b
exaone-4.0:1.2b
...
/
model
cc0b2a3f447e · 1.4GB
Metadata
general.architecture
exaone4
exaone4
general.file_type
Q8_0
Q8_0
exaone4.attention.head_count
32
32
exaone4.attention.head_count_kv
8
8
exaone4.attention.key_length
64
64
exaone4.attention.layer_norm_rms_epsilon
1e-05
1e-05
exaone4.attention.value_length
64
64
exaone4.block_count
30
30
exaone4.context_length
65536
65536
exaone4.embedding_length
2048
2048
exaone4.feed_forward_length
4096
4096
exaone4.rope.freq_base
1e+06
1e+06
exaone4.vocab_size
102400
102400
tokenizer.ggml.add_bos_token
false
false
tokenizer.ggml.bos_token_id
1
1
tokenizer.ggml.eos_token_id
361
361
tokenizer.ggml.merges
[t h, Ġ a, Ġ í, i n, Ġ th, ...]
[t h, Ġ a, Ġ í, i n, Ġ th, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
0
0
tokenizer.ggml.pre
exaone4
exaone4
tokenizer.ggml.token_type
[3, 3, 3, 3, 4, ...]
[3, 3, 3, 3, 4, ...]
tokenizer.ggml.tokens
[[PAD], [BOS], [EOS], [UNK], , ...]
[[PAD], [BOS], [EOS], [UNK], , ...]
tokenizer.ggml.unknown_token_id
3
3
Tensor
Name
Type
Shape
token_embd.weight
Q8_0
Q8_0
[2048, 102400]
blk.0
blk.0.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.0.attn_k_norm.weight
F32
F32
[64]
blk.0.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.0.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.0.attn_q_norm.weight
F32
F32
[64]
blk.0.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.0.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.0.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.0.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.0.post_attention_norm.weight
F32
F32
[2048]
blk.0.post_ffw_norm.weight
F32
F32
[2048]
blk.1
blk.1.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.1.attn_k_norm.weight
F32
F32
[64]
blk.1.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.1.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.1.attn_q_norm.weight
F32
F32
[64]
blk.1.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.1.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.1.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.1.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.1.post_attention_norm.weight
F32
F32
[2048]
blk.1.post_ffw_norm.weight
F32
F32
[2048]
blk.2
blk.2.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.2.attn_k_norm.weight
F32
F32
[64]
blk.2.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.2.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.2.attn_q_norm.weight
F32
F32
[64]
blk.2.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.2.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.2.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.2.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.2.post_attention_norm.weight
F32
F32
[2048]
blk.2.post_ffw_norm.weight
F32
F32
[2048]
blk.3
blk.3.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.3.attn_k_norm.weight
F32
F32
[64]
blk.3.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.3.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.3.attn_q_norm.weight
F32
F32
[64]
blk.3.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.3.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.3.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.3.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.3.post_attention_norm.weight
F32
F32
[2048]
blk.3.post_ffw_norm.weight
F32
F32
[2048]
blk.4
blk.4.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.4.attn_k_norm.weight
F32
F32
[64]
blk.4.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.4.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.4.attn_q_norm.weight
F32
F32
[64]
blk.4.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.4.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.4.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.4.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.4.post_attention_norm.weight
F32
F32
[2048]
blk.4.post_ffw_norm.weight
F32
F32
[2048]
blk.5
blk.5.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.5.attn_k_norm.weight
F32
F32
[64]
blk.5.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.5.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.5.attn_q_norm.weight
F32
F32
[64]
blk.5.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.5.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.5.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.5.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.5.post_attention_norm.weight
F32
F32
[2048]
blk.5.post_ffw_norm.weight
F32
F32
[2048]
blk.6
blk.6.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.6.attn_k_norm.weight
F32
F32
[64]
blk.6.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.6.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.6.attn_q_norm.weight
F32
F32
[64]
blk.6.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.6.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.6.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.6.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.6.post_attention_norm.weight
F32
F32
[2048]
blk.6.post_ffw_norm.weight
F32
F32
[2048]
blk.7
blk.7.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.7.attn_k_norm.weight
F32
F32
[64]
blk.7.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.7.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.7.attn_q_norm.weight
F32
F32
[64]
blk.7.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.7.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.7.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.7.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.7.post_attention_norm.weight
F32
F32
[2048]
blk.7.post_ffw_norm.weight
F32
F32
[2048]
blk.8
blk.8.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.8.attn_k_norm.weight
F32
F32
[64]
blk.8.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.8.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.8.attn_q_norm.weight
F32
F32
[64]
blk.8.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.8.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.8.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.8.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.8.post_attention_norm.weight
F32
F32
[2048]
blk.8.post_ffw_norm.weight
F32
F32
[2048]
blk.9
blk.9.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.9.attn_k_norm.weight
F32
F32
[64]
blk.9.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.9.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.9.attn_q_norm.weight
F32
F32
[64]
blk.9.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.9.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.9.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.9.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.9.post_attention_norm.weight
F32
F32
[2048]
blk.9.post_ffw_norm.weight
F32
F32
[2048]
blk.10
blk.10.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.10.attn_k_norm.weight
F32
F32
[64]
blk.10.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.10.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.10.attn_q_norm.weight
F32
F32
[64]
blk.10.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.10.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.10.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.10.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.10.post_attention_norm.weight
F32
F32
[2048]
blk.10.post_ffw_norm.weight
F32
F32
[2048]
blk.11
blk.11.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.11.attn_k_norm.weight
F32
F32
[64]
blk.11.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.11.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.11.attn_q_norm.weight
F32
F32
[64]
blk.11.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.11.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.11.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.11.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.11.post_attention_norm.weight
F32
F32
[2048]
blk.11.post_ffw_norm.weight
F32
F32
[2048]
blk.12
blk.12.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.12.attn_k_norm.weight
F32
F32
[64]
blk.12.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.12.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.12.attn_q_norm.weight
F32
F32
[64]
blk.12.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.12.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.12.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.12.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.12.post_attention_norm.weight
F32
F32
[2048]
blk.12.post_ffw_norm.weight
F32
F32
[2048]
blk.13
blk.13.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.13.attn_k_norm.weight
F32
F32
[64]
blk.13.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.13.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.13.attn_q_norm.weight
F32
F32
[64]
blk.13.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.13.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.13.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.13.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.13.post_attention_norm.weight
F32
F32
[2048]
blk.13.post_ffw_norm.weight
F32
F32
[2048]
blk.14
blk.14.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.14.attn_k_norm.weight
F32
F32
[64]
blk.14.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.14.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.14.attn_q_norm.weight
F32
F32
[64]
blk.14.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.14.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.14.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.14.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.14.post_attention_norm.weight
F32
F32
[2048]
blk.14.post_ffw_norm.weight
F32
F32
[2048]
blk.15
blk.15.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.15.attn_k_norm.weight
F32
F32
[64]
blk.15.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.15.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.15.attn_q_norm.weight
F32
F32
[64]
blk.15.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.15.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.15.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.15.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.15.post_attention_norm.weight
F32
F32
[2048]
blk.15.post_ffw_norm.weight
F32
F32
[2048]
blk.16
blk.16.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.16.attn_k_norm.weight
F32
F32
[64]
blk.16.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.16.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.16.attn_q_norm.weight
F32
F32
[64]
blk.16.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.16.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.16.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.16.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.16.post_attention_norm.weight
F32
F32
[2048]
blk.16.post_ffw_norm.weight
F32
F32
[2048]
blk.17
blk.17.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.17.attn_k_norm.weight
F32
F32
[64]
blk.17.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.17.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.17.attn_q_norm.weight
F32
F32
[64]
blk.17.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.17.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.17.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.17.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.17.post_attention_norm.weight
F32
F32
[2048]
blk.17.post_ffw_norm.weight
F32
F32
[2048]
blk.18
blk.18.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.18.attn_k_norm.weight
F32
F32
[64]
blk.18.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.18.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.18.attn_q_norm.weight
F32
F32
[64]
blk.18.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.18.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.18.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.18.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.18.post_attention_norm.weight
F32
F32
[2048]
blk.18.post_ffw_norm.weight
F32
F32
[2048]
blk.19
blk.19.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.19.attn_k_norm.weight
F32
F32
[64]
blk.19.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.19.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.19.attn_q_norm.weight
F32
F32
[64]
blk.19.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.19.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.19.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.19.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.19.post_attention_norm.weight
F32
F32
[2048]
blk.19.post_ffw_norm.weight
F32
F32
[2048]
blk.20
blk.20.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.20.attn_k_norm.weight
F32
F32
[64]
blk.20.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.20.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.20.attn_q_norm.weight
F32
F32
[64]
blk.20.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.20.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.20.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.20.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.20.post_attention_norm.weight
F32
F32
[2048]
blk.20.post_ffw_norm.weight
F32
F32
[2048]
blk.21
blk.21.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.21.attn_k_norm.weight
F32
F32
[64]
blk.21.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.21.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.21.attn_q_norm.weight
F32
F32
[64]
blk.21.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.21.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.21.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.21.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.21.post_attention_norm.weight
F32
F32
[2048]
blk.21.post_ffw_norm.weight
F32
F32
[2048]
blk.22
blk.22.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.22.attn_k_norm.weight
F32
F32
[64]
blk.22.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.22.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.22.attn_q_norm.weight
F32
F32
[64]
blk.22.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.22.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.22.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.22.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.22.post_attention_norm.weight
F32
F32
[2048]
blk.22.post_ffw_norm.weight
F32
F32
[2048]
blk.23
blk.23.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.23.attn_k_norm.weight
F32
F32
[64]
blk.23.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.23.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.23.attn_q_norm.weight
F32
F32
[64]
blk.23.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.23.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.23.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.23.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.23.post_attention_norm.weight
F32
F32
[2048]
blk.23.post_ffw_norm.weight
F32
F32
[2048]
blk.24
blk.24.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.24.attn_k_norm.weight
F32
F32
[64]
blk.24.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.24.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.24.attn_q_norm.weight
F32
F32
[64]
blk.24.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.24.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.24.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.24.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.24.post_attention_norm.weight
F32
F32
[2048]
blk.24.post_ffw_norm.weight
F32
F32
[2048]
blk.25
blk.25.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.25.attn_k_norm.weight
F32
F32
[64]
blk.25.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.25.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.25.attn_q_norm.weight
F32
F32
[64]
blk.25.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.25.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.25.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.25.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.25.post_attention_norm.weight
F32
F32
[2048]
blk.25.post_ffw_norm.weight
F32
F32
[2048]
blk.26
blk.26.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.26.attn_k_norm.weight
F32
F32
[64]
blk.26.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.26.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.26.attn_q_norm.weight
F32
F32
[64]
blk.26.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.26.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.26.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.26.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.26.post_attention_norm.weight
F32
F32
[2048]
blk.26.post_ffw_norm.weight
F32
F32
[2048]
blk.27
blk.27.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.27.attn_k_norm.weight
F32
F32
[64]
blk.27.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.27.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.27.attn_q_norm.weight
F32
F32
[64]
blk.27.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.27.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.27.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.27.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.27.post_attention_norm.weight
F32
F32
[2048]
blk.27.post_ffw_norm.weight
F32
F32
[2048]
blk.28
blk.28.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.28.attn_k_norm.weight
F32
F32
[64]
blk.28.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.28.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.28.attn_q_norm.weight
F32
F32
[64]
blk.28.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.28.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.28.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.28.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.28.post_attention_norm.weight
F32
F32
[2048]
blk.28.post_ffw_norm.weight
F32
F32
[2048]
blk.29
blk.29.attn_k.weight
Q8_0
Q8_0
[2048, 512]
blk.29.attn_k_norm.weight
F32
F32
[64]
blk.29.attn_output.weight
Q8_0
Q8_0
[2048, 2048]
blk.29.attn_q.weight
Q8_0
Q8_0
[2048, 2048]
blk.29.attn_q_norm.weight
F32
F32
[64]
blk.29.attn_v.weight
Q8_0
Q8_0
[2048, 512]
blk.29.ffn_down.weight
Q8_0
Q8_0
[4096, 2048]
blk.29.ffn_gate.weight
Q8_0
Q8_0
[2048, 4096]
blk.29.ffn_up.weight
Q8_0
Q8_0
[2048, 4096]
blk.29.post_attention_norm.weight
F32
F32
[2048]
blk.29.post_ffw_norm.weight
F32
F32
[2048]
rope_freqs.weight
F32
F32
[32]
output_norm.weight
F32
F32
[2048]