Models
GitHub
Discord
Docs
Cloud
Sign in
Download
Models
Download
GitHub
Discord
Docs
Cloud
Sign in
DiamondGotCat
/
Zeta-1
:f16
26
Downloads
Updated
3 months ago
Zeta-1 is a lightweight 405M LLM for conversational tasks.
Zeta-1 is a lightweight 405M LLM for conversational tasks.
Cancel
Zeta-1:f16
...
/
model
4492fe16b4e6 · 813MB
Metadata
general.architecture
gptneox
gptneox
general.file_type
F16
F16
gptneox.attention.head_count
16
16
gptneox.attention.layer_norm_epsilon
1e-05
1e-05
gptneox.block_count
24
24
gptneox.context_length
2048
2048
gptneox.embedding_length
1024
1024
gptneox.feed_forward_length
4096
4096
gptneox.rope.dimension_count
16
16
gptneox.use_parallel_residual
true
true
tokenizer.ggml.add_bos_token
false
false
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.bos_token_id
0
0
tokenizer.ggml.eos_token_id
0
0
tokenizer.ggml.merges
[Ġ Ġ, Ġ t, Ġ a, h e, i n, ...]
[Ġ Ġ, Ġ t, Ġ a, h e, i n, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
0
0
tokenizer.ggml.pre
olmo
olmo
tokenizer.ggml.token_type
[3, 3, 1, 1, 1, ...]
[3, 3, 1, 1, 1, ...]
tokenizer.ggml.tokens
[<|endoftext|>, <|padding|>, !, ", #, ...]
[<|endoftext|>, <|padding|>, !, ", #, ...]
tokenizer.ggml.unknown_token_id
0
0
mradermacher.convert_type
hf
hf
mradermacher.quantize_version
2
2
mradermacher.quantized_at
2025-04-01T16:05:14+02:00
2025-04-01T16:05:14+02:00
mradermacher.quantized_by
mradermacher
mradermacher
mradermacher.quantized_on
leia
leia
Tensor
Name
Type
Shape
token_embd.weight
F16
F16
[1024, 50304]
blk.0
blk.0.attn_norm.bias
F32
F32
[1024]
blk.0.attn_norm.weight
F32
F32
[1024]
blk.0.attn_output.bias
F32
F32
[1024]
blk.0.attn_output.weight
F16
F16
[1024, 1024]
blk.0.attn_qkv.bias
F32
F32
[3072]
blk.0.attn_qkv.weight
F16
F16
[1024, 3072]
blk.0.ffn_down.bias
F32
F32
[1024]
blk.0.ffn_down.weight
F16
F16
[4096, 1024]
blk.0.ffn_norm.bias
F32
F32
[1024]
blk.0.ffn_norm.weight
F32
F32
[1024]
blk.0.ffn_up.bias
F32
F32
[4096]
blk.0.ffn_up.weight
F16
F16
[1024, 4096]
blk.1
blk.1.attn_norm.bias
F32
F32
[1024]
blk.1.attn_norm.weight
F32
F32
[1024]
blk.1.attn_output.bias
F32
F32
[1024]
blk.1.attn_output.weight
F16
F16
[1024, 1024]
blk.1.attn_qkv.bias
F32
F32
[3072]
blk.1.attn_qkv.weight
F16
F16
[1024, 3072]
blk.1.ffn_down.bias
F32
F32
[1024]
blk.1.ffn_down.weight
F16
F16
[4096, 1024]
blk.1.ffn_norm.bias
F32
F32
[1024]
blk.1.ffn_norm.weight
F32
F32
[1024]
blk.1.ffn_up.bias
F32
F32
[4096]
blk.1.ffn_up.weight
F16
F16
[1024, 4096]
blk.2
blk.2.attn_norm.bias
F32
F32
[1024]
blk.2.attn_norm.weight
F32
F32
[1024]
blk.2.attn_output.bias
F32
F32
[1024]
blk.2.attn_output.weight
F16
F16
[1024, 1024]
blk.2.attn_qkv.bias
F32
F32
[3072]
blk.2.attn_qkv.weight
F16
F16
[1024, 3072]
blk.2.ffn_down.bias
F32
F32
[1024]
blk.2.ffn_down.weight
F16
F16
[4096, 1024]
blk.2.ffn_norm.bias
F32
F32
[1024]
blk.2.ffn_norm.weight
F32
F32
[1024]
blk.2.ffn_up.bias
F32
F32
[4096]
blk.2.ffn_up.weight
F16
F16
[1024, 4096]
blk.3
blk.3.attn_norm.bias
F32
F32
[1024]
blk.3.attn_norm.weight
F32
F32
[1024]
blk.3.attn_output.bias
F32
F32
[1024]
blk.3.attn_output.weight
F16
F16
[1024, 1024]
blk.3.attn_qkv.bias
F32
F32
[3072]
blk.3.attn_qkv.weight
F16
F16
[1024, 3072]
blk.3.ffn_down.bias
F32
F32
[1024]
blk.3.ffn_down.weight
F16
F16
[4096, 1024]
blk.3.ffn_norm.bias
F32
F32
[1024]
blk.3.ffn_norm.weight
F32
F32
[1024]
blk.3.ffn_up.bias
F32
F32
[4096]
blk.3.ffn_up.weight
F16
F16
[1024, 4096]
blk.4
blk.4.attn_norm.bias
F32
F32
[1024]
blk.4.attn_norm.weight
F32
F32
[1024]
blk.4.attn_output.bias
F32
F32
[1024]
blk.4.attn_output.weight
F16
F16
[1024, 1024]
blk.4.attn_qkv.bias
F32
F32
[3072]
blk.4.attn_qkv.weight
F16
F16
[1024, 3072]
blk.4.ffn_down.bias
F32
F32
[1024]
blk.4.ffn_down.weight
F16
F16
[4096, 1024]
blk.4.ffn_norm.bias
F32
F32
[1024]
blk.4.ffn_norm.weight
F32
F32
[1024]
blk.4.ffn_up.bias
F32
F32
[4096]
blk.4.ffn_up.weight
F16
F16
[1024, 4096]
blk.5
blk.5.attn_norm.bias
F32
F32
[1024]
blk.5.attn_norm.weight
F32
F32
[1024]
blk.5.attn_output.bias
F32
F32
[1024]
blk.5.attn_output.weight
F16
F16
[1024, 1024]
blk.5.attn_qkv.bias
F32
F32
[3072]
blk.5.attn_qkv.weight
F16
F16
[1024, 3072]
blk.5.ffn_down.bias
F32
F32
[1024]
blk.5.ffn_down.weight
F16
F16
[4096, 1024]
blk.5.ffn_norm.bias
F32
F32
[1024]
blk.5.ffn_norm.weight
F32
F32
[1024]
blk.5.ffn_up.bias
F32
F32
[4096]
blk.5.ffn_up.weight
F16
F16
[1024, 4096]
blk.6
blk.6.attn_norm.bias
F32
F32
[1024]
blk.6.attn_norm.weight
F32
F32
[1024]
blk.6.attn_output.bias
F32
F32
[1024]
blk.6.attn_output.weight
F16
F16
[1024, 1024]
blk.6.attn_qkv.bias
F32
F32
[3072]
blk.6.attn_qkv.weight
F16
F16
[1024, 3072]
blk.6.ffn_down.bias
F32
F32
[1024]
blk.6.ffn_down.weight
F16
F16
[4096, 1024]
blk.6.ffn_norm.bias
F32
F32
[1024]
blk.6.ffn_norm.weight
F32
F32
[1024]
blk.6.ffn_up.bias
F32
F32
[4096]
blk.6.ffn_up.weight
F16
F16
[1024, 4096]
blk.7
blk.7.attn_norm.bias
F32
F32
[1024]
blk.7.attn_norm.weight
F32
F32
[1024]
blk.7.attn_output.bias
F32
F32
[1024]
blk.7.attn_output.weight
F16
F16
[1024, 1024]
blk.7.attn_qkv.bias
F32
F32
[3072]
blk.7.attn_qkv.weight
F16
F16
[1024, 3072]
blk.7.ffn_down.bias
F32
F32
[1024]
blk.7.ffn_down.weight
F16
F16
[4096, 1024]
blk.7.ffn_norm.bias
F32
F32
[1024]
blk.7.ffn_norm.weight
F32
F32
[1024]
blk.7.ffn_up.bias
F32
F32
[4096]
blk.7.ffn_up.weight
F16
F16
[1024, 4096]
blk.8
blk.8.attn_norm.bias
F32
F32
[1024]
blk.8.attn_norm.weight
F32
F32
[1024]
blk.8.attn_output.bias
F32
F32
[1024]
blk.8.attn_output.weight
F16
F16
[1024, 1024]
blk.8.attn_qkv.bias
F32
F32
[3072]
blk.8.attn_qkv.weight
F16
F16
[1024, 3072]
blk.8.ffn_down.bias
F32
F32
[1024]
blk.8.ffn_down.weight
F16
F16
[4096, 1024]
blk.8.ffn_norm.bias
F32
F32
[1024]
blk.8.ffn_norm.weight
F32
F32
[1024]
blk.8.ffn_up.bias
F32
F32
[4096]
blk.8.ffn_up.weight
F16
F16
[1024, 4096]
blk.9
blk.9.attn_norm.bias
F32
F32
[1024]
blk.9.attn_norm.weight
F32
F32
[1024]
blk.9.attn_output.bias
F32
F32
[1024]
blk.9.attn_output.weight
F16
F16
[1024, 1024]
blk.9.attn_qkv.bias
F32
F32
[3072]
blk.9.attn_qkv.weight
F16
F16
[1024, 3072]
blk.9.ffn_down.bias
F32
F32
[1024]
blk.9.ffn_down.weight
F16
F16
[4096, 1024]
blk.9.ffn_norm.bias
F32
F32
[1024]
blk.9.ffn_norm.weight
F32
F32
[1024]
blk.9.ffn_up.bias
F32
F32
[4096]
blk.9.ffn_up.weight
F16
F16
[1024, 4096]
blk.10
blk.10.attn_norm.bias
F32
F32
[1024]
blk.10.attn_norm.weight
F32
F32
[1024]
blk.10.attn_output.bias
F32
F32
[1024]
blk.10.attn_output.weight
F16
F16
[1024, 1024]
blk.10.attn_qkv.bias
F32
F32
[3072]
blk.10.attn_qkv.weight
F16
F16
[1024, 3072]
blk.10.ffn_down.bias
F32
F32
[1024]
blk.10.ffn_down.weight
F16
F16
[4096, 1024]
blk.10.ffn_norm.bias
F32
F32
[1024]
blk.10.ffn_norm.weight
F32
F32
[1024]
blk.10.ffn_up.bias
F32
F32
[4096]
blk.10.ffn_up.weight
F16
F16
[1024, 4096]
blk.11
blk.11.attn_norm.bias
F32
F32
[1024]
blk.11.attn_norm.weight
F32
F32
[1024]
blk.11.attn_output.bias
F32
F32
[1024]
blk.11.attn_output.weight
F16
F16
[1024, 1024]
blk.11.attn_qkv.bias
F32
F32
[3072]
blk.11.attn_qkv.weight
F16
F16
[1024, 3072]
blk.11.ffn_down.bias
F32
F32
[1024]
blk.11.ffn_down.weight
F16
F16
[4096, 1024]
blk.11.ffn_norm.bias
F32
F32
[1024]
blk.11.ffn_norm.weight
F32
F32
[1024]
blk.11.ffn_up.bias
F32
F32
[4096]
blk.11.ffn_up.weight
F16
F16
[1024, 4096]
blk.12
blk.12.attn_norm.bias
F32
F32
[1024]
blk.12.attn_norm.weight
F32
F32
[1024]
blk.12.attn_output.bias
F32
F32
[1024]
blk.12.attn_output.weight
F16
F16
[1024, 1024]
blk.12.attn_qkv.bias
F32
F32
[3072]
blk.12.attn_qkv.weight
F16
F16
[1024, 3072]
blk.12.ffn_down.bias
F32
F32
[1024]
blk.12.ffn_down.weight
F16
F16
[4096, 1024]
blk.12.ffn_norm.bias
F32
F32
[1024]
blk.12.ffn_norm.weight
F32
F32
[1024]
blk.12.ffn_up.bias
F32
F32
[4096]
blk.12.ffn_up.weight
F16
F16
[1024, 4096]
blk.13
blk.13.attn_norm.bias
F32
F32
[1024]
blk.13.attn_norm.weight
F32
F32
[1024]
blk.13.attn_output.bias
F32
F32
[1024]
blk.13.attn_output.weight
F16
F16
[1024, 1024]
blk.13.attn_qkv.bias
F32
F32
[3072]
blk.13.attn_qkv.weight
F16
F16
[1024, 3072]
blk.13.ffn_down.bias
F32
F32
[1024]
blk.13.ffn_down.weight
F16
F16
[4096, 1024]
blk.13.ffn_norm.bias
F32
F32
[1024]
blk.13.ffn_norm.weight
F32
F32
[1024]
blk.13.ffn_up.bias
F32
F32
[4096]
blk.13.ffn_up.weight
F16
F16
[1024, 4096]
blk.14
blk.14.attn_norm.bias
F32
F32
[1024]
blk.14.attn_norm.weight
F32
F32
[1024]
blk.14.attn_output.bias
F32
F32
[1024]
blk.14.attn_output.weight
F16
F16
[1024, 1024]
blk.14.attn_qkv.bias
F32
F32
[3072]
blk.14.attn_qkv.weight
F16
F16
[1024, 3072]
blk.14.ffn_down.bias
F32
F32
[1024]
blk.14.ffn_down.weight
F16
F16
[4096, 1024]
blk.14.ffn_norm.bias
F32
F32
[1024]
blk.14.ffn_norm.weight
F32
F32
[1024]
blk.14.ffn_up.bias
F32
F32
[4096]
blk.14.ffn_up.weight
F16
F16
[1024, 4096]
blk.15
blk.15.attn_norm.bias
F32
F32
[1024]
blk.15.attn_norm.weight
F32
F32
[1024]
blk.15.attn_output.bias
F32
F32
[1024]
blk.15.attn_output.weight
F16
F16
[1024, 1024]
blk.15.attn_qkv.bias
F32
F32
[3072]
blk.15.attn_qkv.weight
F16
F16
[1024, 3072]
blk.15.ffn_down.bias
F32
F32
[1024]
blk.15.ffn_down.weight
F16
F16
[4096, 1024]
blk.15.ffn_norm.bias
F32
F32
[1024]
blk.15.ffn_norm.weight
F32
F32
[1024]
blk.15.ffn_up.bias
F32
F32
[4096]
blk.15.ffn_up.weight
F16
F16
[1024, 4096]
blk.16
blk.16.attn_norm.bias
F32
F32
[1024]
blk.16.attn_norm.weight
F32
F32
[1024]
blk.16.attn_output.bias
F32
F32
[1024]
blk.16.attn_output.weight
F16
F16
[1024, 1024]
blk.16.attn_qkv.bias
F32
F32
[3072]
blk.16.attn_qkv.weight
F16
F16
[1024, 3072]
blk.16.ffn_down.bias
F32
F32
[1024]
blk.16.ffn_down.weight
F16
F16
[4096, 1024]
blk.16.ffn_norm.bias
F32
F32
[1024]
blk.16.ffn_norm.weight
F32
F32
[1024]
blk.16.ffn_up.bias
F32
F32
[4096]
blk.16.ffn_up.weight
F16
F16
[1024, 4096]
blk.17
blk.17.attn_norm.bias
F32
F32
[1024]
blk.17.attn_norm.weight
F32
F32
[1024]
blk.17.attn_output.bias
F32
F32
[1024]
blk.17.attn_output.weight
F16
F16
[1024, 1024]
blk.17.attn_qkv.bias
F32
F32
[3072]
blk.17.attn_qkv.weight
F16
F16
[1024, 3072]
blk.17.ffn_down.bias
F32
F32
[1024]
blk.17.ffn_down.weight
F16
F16
[4096, 1024]
blk.17.ffn_norm.bias
F32
F32
[1024]
blk.17.ffn_norm.weight
F32
F32
[1024]
blk.17.ffn_up.bias
F32
F32
[4096]
blk.17.ffn_up.weight
F16
F16
[1024, 4096]
blk.18
blk.18.attn_norm.bias
F32
F32
[1024]
blk.18.attn_norm.weight
F32
F32
[1024]
blk.18.attn_output.bias
F32
F32
[1024]
blk.18.attn_output.weight
F16
F16
[1024, 1024]
blk.18.attn_qkv.bias
F32
F32
[3072]
blk.18.attn_qkv.weight
F16
F16
[1024, 3072]
blk.18.ffn_down.bias
F32
F32
[1024]
blk.18.ffn_down.weight
F16
F16
[4096, 1024]
blk.18.ffn_norm.bias
F32
F32
[1024]
blk.18.ffn_norm.weight
F32
F32
[1024]
blk.18.ffn_up.bias
F32
F32
[4096]
blk.18.ffn_up.weight
F16
F16
[1024, 4096]
blk.19
blk.19.attn_norm.bias
F32
F32
[1024]
blk.19.attn_norm.weight
F32
F32
[1024]
blk.19.attn_output.bias
F32
F32
[1024]
blk.19.attn_output.weight
F16
F16
[1024, 1024]
blk.19.attn_qkv.bias
F32
F32
[3072]
blk.19.attn_qkv.weight
F16
F16
[1024, 3072]
blk.19.ffn_down.bias
F32
F32
[1024]
blk.19.ffn_down.weight
F16
F16
[4096, 1024]
blk.19.ffn_norm.bias
F32
F32
[1024]
blk.19.ffn_norm.weight
F32
F32
[1024]
blk.19.ffn_up.bias
F32
F32
[4096]
blk.19.ffn_up.weight
F16
F16
[1024, 4096]
blk.20
blk.20.attn_norm.bias
F32
F32
[1024]
blk.20.attn_norm.weight
F32
F32
[1024]
blk.20.attn_output.bias
F32
F32
[1024]
blk.20.attn_output.weight
F16
F16
[1024, 1024]
blk.20.attn_qkv.bias
F32
F32
[3072]
blk.20.attn_qkv.weight
F16
F16
[1024, 3072]
blk.20.ffn_down.bias
F32
F32
[1024]
blk.20.ffn_down.weight
F16
F16
[4096, 1024]
blk.20.ffn_norm.bias
F32
F32
[1024]
blk.20.ffn_norm.weight
F32
F32
[1024]
blk.20.ffn_up.bias
F32
F32
[4096]
blk.20.ffn_up.weight
F16
F16
[1024, 4096]
blk.21
blk.21.attn_norm.bias
F32
F32
[1024]
blk.21.attn_norm.weight
F32
F32
[1024]
blk.21.attn_output.bias
F32
F32
[1024]
blk.21.attn_output.weight
F16
F16
[1024, 1024]
blk.21.attn_qkv.bias
F32
F32
[3072]
blk.21.attn_qkv.weight
F16
F16
[1024, 3072]
blk.21.ffn_down.bias
F32
F32
[1024]
blk.21.ffn_down.weight
F16
F16
[4096, 1024]
blk.21.ffn_norm.bias
F32
F32
[1024]
blk.21.ffn_norm.weight
F32
F32
[1024]
blk.21.ffn_up.bias
F32
F32
[4096]
blk.21.ffn_up.weight
F16
F16
[1024, 4096]
blk.22
blk.22.attn_norm.bias
F32
F32
[1024]
blk.22.attn_norm.weight
F32
F32
[1024]
blk.22.attn_output.bias
F32
F32
[1024]
blk.22.attn_output.weight
F16
F16
[1024, 1024]
blk.22.attn_qkv.bias
F32
F32
[3072]
blk.22.attn_qkv.weight
F16
F16
[1024, 3072]
blk.22.ffn_down.bias
F32
F32
[1024]
blk.22.ffn_down.weight
F16
F16
[4096, 1024]
blk.22.ffn_norm.bias
F32
F32
[1024]
blk.22.ffn_norm.weight
F32
F32
[1024]
blk.22.ffn_up.bias
F32
F32
[4096]
blk.22.ffn_up.weight
F16
F16
[1024, 4096]
blk.23
blk.23.attn_norm.bias
F32
F32
[1024]
blk.23.attn_norm.weight
F32
F32
[1024]
blk.23.attn_output.bias
F32
F32
[1024]
blk.23.attn_output.weight
F16
F16
[1024, 1024]
blk.23.attn_qkv.bias
F32
F32
[3072]
blk.23.attn_qkv.weight
F16
F16
[1024, 3072]
blk.23.ffn_down.bias
F32
F32
[1024]
blk.23.ffn_down.weight
F16
F16
[4096, 1024]
blk.23.ffn_norm.bias
F32
F32
[1024]
blk.23.ffn_norm.weight
F32
F32
[1024]
blk.23.ffn_up.bias
F32
F32
[4096]
blk.23.ffn_up.weight
F16
F16
[1024, 4096]
output.weight
F16
F16
[1024, 50304]
output_norm.bias
F32
F32
[1024]
output_norm.weight
F32
F32
[1024]