Models
GitHub
Discord
Docs
Cloud
Sign in
Download
Models
Download
GitHub
Discord
Docs
Cloud
Sign in
lennyerik
/
zeta
:latest
469
Downloads
Updated
7 months ago
An (unofficial) ollama redistribution of the zeta editor model by the zed editor team
An (unofficial) ollama redistribution of the zeta editor model by the zed editor team
Cancel
zeta:latest
...
/
model
3633ab75970d · 15GB
Metadata
general.architecture
qwen2
qwen2
general.file_type
F16
F16
qwen2.attention.head_count
28
28
qwen2.attention.head_count_kv
4
4
qwen2.attention.layer_norm_rms_epsilon
1e-06
1e-06
qwen2.block_count
28
28
qwen2.context_length
32768
32768
qwen2.embedding_length
3584
3584
qwen2.feed_forward_length
18944
18944
qwen2.rope.freq_base
1e+06
1e+06
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.add_padding_token
false
false
tokenizer.ggml.eos_token_id
151643
151643
tokenizer.ggml.merges
[Ġ Ġ, ĠĠ ĠĠ, i n, Ġ t, ĠĠĠĠ ĠĠĠĠ, ...]
[Ġ Ġ, ĠĠ ĠĠ, i n, Ġ t, ĠĠĠĠ ĠĠĠĠ, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
151665
151665
tokenizer.ggml.pre
qwen2
qwen2
tokenizer.ggml.scores
[0, 1, 2, 3, 4, ...]
[0, 1, 2, 3, 4, ...]
tokenizer.ggml.token_type
[1, 1, 1, 1, 1, ...]
[1, 1, 1, 1, 1, ...]
tokenizer.ggml.tokens
[!, ", #, $, %, ...]
[!, ", #, $, %, ...]
Tensor
Name
Type
Shape
token_embd.weight
F16
F16
[3584, 152064]
blk.0
blk.0.attn_k.bias
F32
F32
[512]
blk.0.attn_k.weight
F16
F16
[3584, 512]
blk.0.attn_norm.weight
F32
F32
[3584]
blk.0.attn_output.weight
F16
F16
[3584, 3584]
blk.0.attn_q.bias
F32
F32
[3584]
blk.0.attn_q.weight
F16
F16
[3584, 3584]
blk.0.attn_v.bias
F32
F32
[512]
blk.0.attn_v.weight
F16
F16
[3584, 512]
blk.0.ffn_down.weight
F16
F16
[18944, 3584]
blk.0.ffn_gate.weight
F16
F16
[3584, 18944]
blk.0.ffn_norm.weight
F32
F32
[3584]
blk.0.ffn_up.weight
F16
F16
[3584, 18944]
blk.1
blk.1.attn_k.bias
F32
F32
[512]
blk.1.attn_k.weight
F16
F16
[3584, 512]
blk.1.attn_norm.weight
F32
F32
[3584]
blk.1.attn_output.weight
F16
F16
[3584, 3584]
blk.1.attn_q.bias
F32
F32
[3584]
blk.1.attn_q.weight
F16
F16
[3584, 3584]
blk.1.attn_v.bias
F32
F32
[512]
blk.1.attn_v.weight
F16
F16
[3584, 512]
blk.1.ffn_down.weight
F16
F16
[18944, 3584]
blk.1.ffn_gate.weight
F16
F16
[3584, 18944]
blk.1.ffn_norm.weight
F32
F32
[3584]
blk.1.ffn_up.weight
F16
F16
[3584, 18944]
blk.2
blk.2.attn_k.bias
F32
F32
[512]
blk.2.attn_k.weight
F16
F16
[3584, 512]
blk.2.attn_norm.weight
F32
F32
[3584]
blk.2.attn_output.weight
F16
F16
[3584, 3584]
blk.2.attn_q.bias
F32
F32
[3584]
blk.2.attn_q.weight
F16
F16
[3584, 3584]
blk.2.attn_v.bias
F32
F32
[512]
blk.2.attn_v.weight
F16
F16
[3584, 512]
blk.2.ffn_down.weight
F16
F16
[18944, 3584]
blk.2.ffn_gate.weight
F16
F16
[3584, 18944]
blk.2.ffn_norm.weight
F32
F32
[3584]
blk.2.ffn_up.weight
F16
F16
[3584, 18944]
blk.3
blk.3.attn_k.bias
F32
F32
[512]
blk.3.attn_k.weight
F16
F16
[3584, 512]
blk.3.attn_norm.weight
F32
F32
[3584]
blk.3.attn_output.weight
F16
F16
[3584, 3584]
blk.3.attn_q.bias
F32
F32
[3584]
blk.3.attn_q.weight
F16
F16
[3584, 3584]
blk.3.attn_v.bias
F32
F32
[512]
blk.3.attn_v.weight
F16
F16
[3584, 512]
blk.3.ffn_down.weight
F16
F16
[18944, 3584]
blk.3.ffn_gate.weight
F16
F16
[3584, 18944]
blk.3.ffn_norm.weight
F32
F32
[3584]
blk.3.ffn_up.weight
F16
F16
[3584, 18944]
blk.4
blk.4.attn_k.bias
F32
F32
[512]
blk.4.attn_k.weight
F16
F16
[3584, 512]
blk.4.attn_norm.weight
F32
F32
[3584]
blk.4.attn_output.weight
F16
F16
[3584, 3584]
blk.4.attn_q.bias
F32
F32
[3584]
blk.4.attn_q.weight
F16
F16
[3584, 3584]
blk.4.attn_v.bias
F32
F32
[512]
blk.4.attn_v.weight
F16
F16
[3584, 512]
blk.4.ffn_down.weight
F16
F16
[18944, 3584]
blk.4.ffn_gate.weight
F16
F16
[3584, 18944]
blk.4.ffn_norm.weight
F32
F32
[3584]
blk.4.ffn_up.weight
F16
F16
[3584, 18944]
blk.5
blk.5.attn_k.bias
F32
F32
[512]
blk.5.attn_k.weight
F16
F16
[3584, 512]
blk.5.attn_norm.weight
F32
F32
[3584]
blk.5.attn_output.weight
F16
F16
[3584, 3584]
blk.5.attn_q.bias
F32
F32
[3584]
blk.5.attn_q.weight
F16
F16
[3584, 3584]
blk.5.attn_v.bias
F32
F32
[512]
blk.5.attn_v.weight
F16
F16
[3584, 512]
blk.5.ffn_down.weight
F16
F16
[18944, 3584]
blk.5.ffn_gate.weight
F16
F16
[3584, 18944]
blk.5.ffn_norm.weight
F32
F32
[3584]
blk.5.ffn_up.weight
F16
F16
[3584, 18944]
blk.6
blk.6.attn_k.bias
F32
F32
[512]
blk.6.attn_k.weight
F16
F16
[3584, 512]
blk.6.attn_norm.weight
F32
F32
[3584]
blk.6.attn_output.weight
F16
F16
[3584, 3584]
blk.6.attn_q.bias
F32
F32
[3584]
blk.6.attn_q.weight
F16
F16
[3584, 3584]
blk.6.attn_v.bias
F32
F32
[512]
blk.6.attn_v.weight
F16
F16
[3584, 512]
blk.6.ffn_down.weight
F16
F16
[18944, 3584]
blk.6.ffn_gate.weight
F16
F16
[3584, 18944]
blk.6.ffn_norm.weight
F32
F32
[3584]
blk.6.ffn_up.weight
F16
F16
[3584, 18944]
blk.7
blk.7.attn_k.bias
F32
F32
[512]
blk.7.attn_k.weight
F16
F16
[3584, 512]
blk.7.attn_norm.weight
F32
F32
[3584]
blk.7.attn_output.weight
F16
F16
[3584, 3584]
blk.7.attn_q.bias
F32
F32
[3584]
blk.7.attn_q.weight
F16
F16
[3584, 3584]
blk.7.attn_v.bias
F32
F32
[512]
blk.7.attn_v.weight
F16
F16
[3584, 512]
blk.7.ffn_down.weight
F16
F16
[18944, 3584]
blk.7.ffn_gate.weight
F16
F16
[3584, 18944]
blk.7.ffn_norm.weight
F32
F32
[3584]
blk.7.ffn_up.weight
F16
F16
[3584, 18944]
blk.8
blk.8.attn_k.bias
F32
F32
[512]
blk.8.attn_k.weight
F16
F16
[3584, 512]
blk.8.attn_norm.weight
F32
F32
[3584]
blk.8.attn_output.weight
F16
F16
[3584, 3584]
blk.8.attn_q.bias
F32
F32
[3584]
blk.8.attn_q.weight
F16
F16
[3584, 3584]
blk.8.attn_v.bias
F32
F32
[512]
blk.8.attn_v.weight
F16
F16
[3584, 512]
blk.8.ffn_down.weight
F16
F16
[18944, 3584]
blk.8.ffn_gate.weight
F16
F16
[3584, 18944]
blk.8.ffn_norm.weight
F32
F32
[3584]
blk.8.ffn_up.weight
F16
F16
[3584, 18944]
blk.9
blk.9.attn_k.bias
F32
F32
[512]
blk.9.attn_k.weight
F16
F16
[3584, 512]
blk.9.attn_norm.weight
F32
F32
[3584]
blk.9.attn_output.weight
F16
F16
[3584, 3584]
blk.9.attn_q.bias
F32
F32
[3584]
blk.9.attn_q.weight
F16
F16
[3584, 3584]
blk.9.attn_v.bias
F32
F32
[512]
blk.9.attn_v.weight
F16
F16
[3584, 512]
blk.9.ffn_down.weight
F16
F16
[18944, 3584]
blk.9.ffn_gate.weight
F16
F16
[3584, 18944]
blk.9.ffn_norm.weight
F32
F32
[3584]
blk.9.ffn_up.weight
F16
F16
[3584, 18944]
blk.10
blk.10.attn_k.bias
F32
F32
[512]
blk.10.attn_k.weight
F16
F16
[3584, 512]
blk.10.attn_norm.weight
F32
F32
[3584]
blk.10.attn_output.weight
F16
F16
[3584, 3584]
blk.10.attn_q.bias
F32
F32
[3584]
blk.10.attn_q.weight
F16
F16
[3584, 3584]
blk.10.attn_v.bias
F32
F32
[512]
blk.10.attn_v.weight
F16
F16
[3584, 512]
blk.10.ffn_down.weight
F16
F16
[18944, 3584]
blk.10.ffn_gate.weight
F16
F16
[3584, 18944]
blk.10.ffn_norm.weight
F32
F32
[3584]
blk.10.ffn_up.weight
F16
F16
[3584, 18944]
blk.11
blk.11.attn_k.bias
F32
F32
[512]
blk.11.attn_k.weight
F16
F16
[3584, 512]
blk.11.attn_norm.weight
F32
F32
[3584]
blk.11.attn_output.weight
F16
F16
[3584, 3584]
blk.11.attn_q.bias
F32
F32
[3584]
blk.11.attn_q.weight
F16
F16
[3584, 3584]
blk.11.attn_v.bias
F32
F32
[512]
blk.11.attn_v.weight
F16
F16
[3584, 512]
blk.11.ffn_down.weight
F16
F16
[18944, 3584]
blk.11.ffn_gate.weight
F16
F16
[3584, 18944]
blk.11.ffn_norm.weight
F32
F32
[3584]
blk.11.ffn_up.weight
F16
F16
[3584, 18944]
blk.12
blk.12.attn_k.bias
F32
F32
[512]
blk.12.attn_k.weight
F16
F16
[3584, 512]
blk.12.attn_norm.weight
F32
F32
[3584]
blk.12.attn_output.weight
F16
F16
[3584, 3584]
blk.12.attn_q.bias
F32
F32
[3584]
blk.12.attn_q.weight
F16
F16
[3584, 3584]
blk.12.attn_v.bias
F32
F32
[512]
blk.12.attn_v.weight
F16
F16
[3584, 512]
blk.12.ffn_down.weight
F16
F16
[18944, 3584]
blk.12.ffn_gate.weight
F16
F16
[3584, 18944]
blk.12.ffn_norm.weight
F32
F32
[3584]
blk.12.ffn_up.weight
F16
F16
[3584, 18944]
blk.13
blk.13.attn_k.bias
F32
F32
[512]
blk.13.attn_k.weight
F16
F16
[3584, 512]
blk.13.attn_norm.weight
F32
F32
[3584]
blk.13.attn_output.weight
F16
F16
[3584, 3584]
blk.13.attn_q.bias
F32
F32
[3584]
blk.13.attn_q.weight
F16
F16
[3584, 3584]
blk.13.attn_v.bias
F32
F32
[512]
blk.13.attn_v.weight
F16
F16
[3584, 512]
blk.13.ffn_down.weight
F16
F16
[18944, 3584]
blk.13.ffn_gate.weight
F16
F16
[3584, 18944]
blk.13.ffn_norm.weight
F32
F32
[3584]
blk.13.ffn_up.weight
F16
F16
[3584, 18944]
blk.14
blk.14.attn_k.bias
F32
F32
[512]
blk.14.attn_k.weight
F16
F16
[3584, 512]
blk.14.attn_norm.weight
F32
F32
[3584]
blk.14.attn_output.weight
F16
F16
[3584, 3584]
blk.14.attn_q.bias
F32
F32
[3584]
blk.14.attn_q.weight
F16
F16
[3584, 3584]
blk.14.attn_v.bias
F32
F32
[512]
blk.14.attn_v.weight
F16
F16
[3584, 512]
blk.14.ffn_down.weight
F16
F16
[18944, 3584]
blk.14.ffn_gate.weight
F16
F16
[3584, 18944]
blk.14.ffn_norm.weight
F32
F32
[3584]
blk.14.ffn_up.weight
F16
F16
[3584, 18944]
blk.15
blk.15.attn_k.bias
F32
F32
[512]
blk.15.attn_k.weight
F16
F16
[3584, 512]
blk.15.attn_norm.weight
F32
F32
[3584]
blk.15.attn_output.weight
F16
F16
[3584, 3584]
blk.15.attn_q.bias
F32
F32
[3584]
blk.15.attn_q.weight
F16
F16
[3584, 3584]
blk.15.attn_v.bias
F32
F32
[512]
blk.15.attn_v.weight
F16
F16
[3584, 512]
blk.15.ffn_down.weight
F16
F16
[18944, 3584]
blk.15.ffn_gate.weight
F16
F16
[3584, 18944]
blk.15.ffn_norm.weight
F32
F32
[3584]
blk.15.ffn_up.weight
F16
F16
[3584, 18944]
blk.16
blk.16.attn_k.bias
F32
F32
[512]
blk.16.attn_k.weight
F16
F16
[3584, 512]
blk.16.attn_norm.weight
F32
F32
[3584]
blk.16.attn_output.weight
F16
F16
[3584, 3584]
blk.16.attn_q.bias
F32
F32
[3584]
blk.16.attn_q.weight
F16
F16
[3584, 3584]
blk.16.attn_v.bias
F32
F32
[512]
blk.16.attn_v.weight
F16
F16
[3584, 512]
blk.16.ffn_down.weight
F16
F16
[18944, 3584]
blk.16.ffn_gate.weight
F16
F16
[3584, 18944]
blk.16.ffn_norm.weight
F32
F32
[3584]
blk.16.ffn_up.weight
F16
F16
[3584, 18944]
blk.17
blk.17.attn_k.bias
F32
F32
[512]
blk.17.attn_k.weight
F16
F16
[3584, 512]
blk.17.attn_norm.weight
F32
F32
[3584]
blk.17.attn_output.weight
F16
F16
[3584, 3584]
blk.17.attn_q.bias
F32
F32
[3584]
blk.17.attn_q.weight
F16
F16
[3584, 3584]
blk.17.attn_v.bias
F32
F32
[512]
blk.17.attn_v.weight
F16
F16
[3584, 512]
blk.17.ffn_down.weight
F16
F16
[18944, 3584]
blk.17.ffn_gate.weight
F16
F16
[3584, 18944]
blk.17.ffn_norm.weight
F32
F32
[3584]
blk.17.ffn_up.weight
F16
F16
[3584, 18944]
blk.18
blk.18.attn_k.bias
F32
F32
[512]
blk.18.attn_k.weight
F16
F16
[3584, 512]
blk.18.attn_norm.weight
F32
F32
[3584]
blk.18.attn_output.weight
F16
F16
[3584, 3584]
blk.18.attn_q.bias
F32
F32
[3584]
blk.18.attn_q.weight
F16
F16
[3584, 3584]
blk.18.attn_v.bias
F32
F32
[512]
blk.18.attn_v.weight
F16
F16
[3584, 512]
blk.18.ffn_down.weight
F16
F16
[18944, 3584]
blk.18.ffn_gate.weight
F16
F16
[3584, 18944]
blk.18.ffn_norm.weight
F32
F32
[3584]
blk.18.ffn_up.weight
F16
F16
[3584, 18944]
blk.19
blk.19.attn_k.bias
F32
F32
[512]
blk.19.attn_k.weight
F16
F16
[3584, 512]
blk.19.attn_norm.weight
F32
F32
[3584]
blk.19.attn_output.weight
F16
F16
[3584, 3584]
blk.19.attn_q.bias
F32
F32
[3584]
blk.19.attn_q.weight
F16
F16
[3584, 3584]
blk.19.attn_v.bias
F32
F32
[512]
blk.19.attn_v.weight
F16
F16
[3584, 512]
blk.19.ffn_down.weight
F16
F16
[18944, 3584]
blk.19.ffn_gate.weight
F16
F16
[3584, 18944]
blk.19.ffn_norm.weight
F32
F32
[3584]
blk.19.ffn_up.weight
F16
F16
[3584, 18944]
blk.20
blk.20.attn_k.bias
F32
F32
[512]
blk.20.attn_k.weight
F16
F16
[3584, 512]
blk.20.attn_norm.weight
F32
F32
[3584]
blk.20.attn_output.weight
F16
F16
[3584, 3584]
blk.20.attn_q.bias
F32
F32
[3584]
blk.20.attn_q.weight
F16
F16
[3584, 3584]
blk.20.attn_v.bias
F32
F32
[512]
blk.20.attn_v.weight
F16
F16
[3584, 512]
blk.20.ffn_down.weight
F16
F16
[18944, 3584]
blk.20.ffn_gate.weight
F16
F16
[3584, 18944]
blk.20.ffn_norm.weight
F32
F32
[3584]
blk.20.ffn_up.weight
F16
F16
[3584, 18944]
blk.21
blk.21.attn_k.bias
F32
F32
[512]
blk.21.attn_k.weight
F16
F16
[3584, 512]
blk.21.attn_norm.weight
F32
F32
[3584]
blk.21.attn_output.weight
F16
F16
[3584, 3584]
blk.21.attn_q.bias
F32
F32
[3584]
blk.21.attn_q.weight
F16
F16
[3584, 3584]
blk.21.attn_v.bias
F32
F32
[512]
blk.21.attn_v.weight
F16
F16
[3584, 512]
blk.21.ffn_down.weight
F16
F16
[18944, 3584]
blk.21.ffn_gate.weight
F16
F16
[3584, 18944]
blk.21.ffn_norm.weight
F32
F32
[3584]
blk.21.ffn_up.weight
F16
F16
[3584, 18944]
blk.22
blk.22.attn_k.bias
F32
F32
[512]
blk.22.attn_k.weight
F16
F16
[3584, 512]
blk.22.attn_norm.weight
F32
F32
[3584]
blk.22.attn_output.weight
F16
F16
[3584, 3584]
blk.22.attn_q.bias
F32
F32
[3584]
blk.22.attn_q.weight
F16
F16
[3584, 3584]
blk.22.attn_v.bias
F32
F32
[512]
blk.22.attn_v.weight
F16
F16
[3584, 512]
blk.22.ffn_down.weight
F16
F16
[18944, 3584]
blk.22.ffn_gate.weight
F16
F16
[3584, 18944]
blk.22.ffn_norm.weight
F32
F32
[3584]
blk.22.ffn_up.weight
F16
F16
[3584, 18944]
blk.23
blk.23.attn_k.bias
F32
F32
[512]
blk.23.attn_k.weight
F16
F16
[3584, 512]
blk.23.attn_norm.weight
F32
F32
[3584]
blk.23.attn_output.weight
F16
F16
[3584, 3584]
blk.23.attn_q.bias
F32
F32
[3584]
blk.23.attn_q.weight
F16
F16
[3584, 3584]
blk.23.attn_v.bias
F32
F32
[512]
blk.23.attn_v.weight
F16
F16
[3584, 512]
blk.23.ffn_down.weight
F16
F16
[18944, 3584]
blk.23.ffn_gate.weight
F16
F16
[3584, 18944]
blk.23.ffn_norm.weight
F32
F32
[3584]
blk.23.ffn_up.weight
F16
F16
[3584, 18944]
blk.24
blk.24.attn_k.bias
F32
F32
[512]
blk.24.attn_k.weight
F16
F16
[3584, 512]
blk.24.attn_norm.weight
F32
F32
[3584]
blk.24.attn_output.weight
F16
F16
[3584, 3584]
blk.24.attn_q.bias
F32
F32
[3584]
blk.24.attn_q.weight
F16
F16
[3584, 3584]
blk.24.attn_v.bias
F32
F32
[512]
blk.24.attn_v.weight
F16
F16
[3584, 512]
blk.24.ffn_down.weight
F16
F16
[18944, 3584]
blk.24.ffn_gate.weight
F16
F16
[3584, 18944]
blk.24.ffn_norm.weight
F32
F32
[3584]
blk.24.ffn_up.weight
F16
F16
[3584, 18944]
blk.25
blk.25.attn_k.bias
F32
F32
[512]
blk.25.attn_k.weight
F16
F16
[3584, 512]
blk.25.attn_norm.weight
F32
F32
[3584]
blk.25.attn_output.weight
F16
F16
[3584, 3584]
blk.25.attn_q.bias
F32
F32
[3584]
blk.25.attn_q.weight
F16
F16
[3584, 3584]
blk.25.attn_v.bias
F32
F32
[512]
blk.25.attn_v.weight
F16
F16
[3584, 512]
blk.25.ffn_down.weight
F16
F16
[18944, 3584]
blk.25.ffn_gate.weight
F16
F16
[3584, 18944]
blk.25.ffn_norm.weight
F32
F32
[3584]
blk.25.ffn_up.weight
F16
F16
[3584, 18944]
blk.26
blk.26.attn_k.bias
F32
F32
[512]
blk.26.attn_k.weight
F16
F16
[3584, 512]
blk.26.attn_norm.weight
F32
F32
[3584]
blk.26.attn_output.weight
F16
F16
[3584, 3584]
blk.26.attn_q.bias
F32
F32
[3584]
blk.26.attn_q.weight
F16
F16
[3584, 3584]
blk.26.attn_v.bias
F32
F32
[512]
blk.26.attn_v.weight
F16
F16
[3584, 512]
blk.26.ffn_down.weight
F16
F16
[18944, 3584]
blk.26.ffn_gate.weight
F16
F16
[3584, 18944]
blk.26.ffn_norm.weight
F32
F32
[3584]
blk.26.ffn_up.weight
F16
F16
[3584, 18944]
blk.27
blk.27.attn_k.bias
F32
F32
[512]
blk.27.attn_k.weight
F16
F16
[3584, 512]
blk.27.attn_norm.weight
F32
F32
[3584]
blk.27.attn_output.weight
F16
F16
[3584, 3584]
blk.27.attn_q.bias
F32
F32
[3584]
blk.27.attn_q.weight
F16
F16
[3584, 3584]
blk.27.attn_v.bias
F32
F32
[512]
blk.27.attn_v.weight
F16
F16
[3584, 512]
blk.27.ffn_down.weight
F16
F16
[18944, 3584]
blk.27.ffn_gate.weight
F16
F16
[3584, 18944]
blk.27.ffn_norm.weight
F32
F32
[3584]
blk.27.ffn_up.weight
F16
F16
[3584, 18944]
output.weight
F16
F16
[3584, 152064]
output_norm.weight
F32
F32
[3584]