Models
Docs
Pricing
Sign in
Download
Models
Download
Docs
Pricing
Sign in
supergoatscriptguy
/
Pion-Small
:latest
3
Downloads
Updated
2 months ago
Cancel
Pion-Small:latest
...
/
model
65fe4537e1a4 · 272MB
Metadata
general.architecture
llama
llama
general.file_type
F16
F16
llama.attention.head_count
10
10
llama.attention.head_count_kv
2
2
llama.attention.layer_norm_rms_epsilon
1e-06
1e-06
llama.block_count
16
16
llama.context_length
2048
2048
llama.embedding_length
640
640
llama.feed_forward_length
1792
1792
llama.rope.dimension_count
64
64
llama.rope.freq_base
10000
10000
llama.vocab_size
50258
50258
tokenizer.ggml.add_bos_token
false
false
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.add_unknown_token
false
false
tokenizer.ggml.bos_token_id
0
0
tokenizer.ggml.eos_token_id
0
0
tokenizer.ggml.merges
[Ġ Ġ, Ġ t, Ġ a, i n, h e, ...]
[Ġ Ġ, Ġ t, Ġ a, i n, h e, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.pre
default
default
tokenizer.ggml.scores
[0, 1, 2, 3, 4, ...]
[0, 1, 2, 3, 4, ...]
tokenizer.ggml.token_type
[3, 3, 3, 1, 1, ...]
[3, 3, 3, 1, 1, ...]
tokenizer.ggml.tokens
[<|endoftext|>, <|pad|>, <|unk|>, !, ", ...]
[<|endoftext|>, <|pad|>, <|unk|>, !, ", ...]
tokenizer.ggml.unknown_token_id
0
0
Tensor
Name
Type
Shape
token_embd.weight
F16
F16
[640, 50258]
blk.0
blk.0.attn_k.weight
F16
F16
[640, 128]
blk.0.attn_norm.weight
F32
F32
[640]
blk.0.attn_output.weight
F16
F16
[640, 640]
blk.0.attn_q.weight
F16
F16
[640, 640]
blk.0.attn_v.weight
F16
F16
[640, 128]
blk.0.ffn_down.weight
F16
F16
[1792, 640]
blk.0.ffn_gate.weight
F16
F16
[640, 1792]
blk.0.ffn_norm.weight
F32
F32
[640]
blk.0.ffn_up.weight
F16
F16
[640, 1792]
blk.1
blk.1.attn_k.weight
F16
F16
[640, 128]
blk.1.attn_norm.weight
F32
F32
[640]
blk.1.attn_output.weight
F16
F16
[640, 640]
blk.1.attn_q.weight
F16
F16
[640, 640]
blk.1.attn_v.weight
F16
F16
[640, 128]
blk.1.ffn_down.weight
F16
F16
[1792, 640]
blk.1.ffn_gate.weight
F16
F16
[640, 1792]
blk.1.ffn_norm.weight
F32
F32
[640]
blk.1.ffn_up.weight
F16
F16
[640, 1792]
blk.2
blk.2.attn_k.weight
F16
F16
[640, 128]
blk.2.attn_norm.weight
F32
F32
[640]
blk.2.attn_output.weight
F16
F16
[640, 640]
blk.2.attn_q.weight
F16
F16
[640, 640]
blk.2.attn_v.weight
F16
F16
[640, 128]
blk.2.ffn_down.weight
F16
F16
[1792, 640]
blk.2.ffn_gate.weight
F16
F16
[640, 1792]
blk.2.ffn_norm.weight
F32
F32
[640]
blk.2.ffn_up.weight
F16
F16
[640, 1792]
blk.3
blk.3.attn_k.weight
F16
F16
[640, 128]
blk.3.attn_norm.weight
F32
F32
[640]
blk.3.attn_output.weight
F16
F16
[640, 640]
blk.3.attn_q.weight
F16
F16
[640, 640]
blk.3.attn_v.weight
F16
F16
[640, 128]
blk.3.ffn_down.weight
F16
F16
[1792, 640]
blk.3.ffn_gate.weight
F16
F16
[640, 1792]
blk.3.ffn_norm.weight
F32
F32
[640]
blk.3.ffn_up.weight
F16
F16
[640, 1792]
blk.4
blk.4.attn_k.weight
F16
F16
[640, 128]
blk.4.attn_norm.weight
F32
F32
[640]
blk.4.attn_output.weight
F16
F16
[640, 640]
blk.4.attn_q.weight
F16
F16
[640, 640]
blk.4.attn_v.weight
F16
F16
[640, 128]
blk.4.ffn_down.weight
F16
F16
[1792, 640]
blk.4.ffn_gate.weight
F16
F16
[640, 1792]
blk.4.ffn_norm.weight
F32
F32
[640]
blk.4.ffn_up.weight
F16
F16
[640, 1792]
blk.5
blk.5.attn_k.weight
F16
F16
[640, 128]
blk.5.attn_norm.weight
F32
F32
[640]
blk.5.attn_output.weight
F16
F16
[640, 640]
blk.5.attn_q.weight
F16
F16
[640, 640]
blk.5.attn_v.weight
F16
F16
[640, 128]
blk.5.ffn_down.weight
F16
F16
[1792, 640]
blk.5.ffn_gate.weight
F16
F16
[640, 1792]
blk.5.ffn_norm.weight
F32
F32
[640]
blk.5.ffn_up.weight
F16
F16
[640, 1792]
blk.6
blk.6.attn_k.weight
F16
F16
[640, 128]
blk.6.attn_norm.weight
F32
F32
[640]
blk.6.attn_output.weight
F16
F16
[640, 640]
blk.6.attn_q.weight
F16
F16
[640, 640]
blk.6.attn_v.weight
F16
F16
[640, 128]
blk.6.ffn_down.weight
F16
F16
[1792, 640]
blk.6.ffn_gate.weight
F16
F16
[640, 1792]
blk.6.ffn_norm.weight
F32
F32
[640]
blk.6.ffn_up.weight
F16
F16
[640, 1792]
blk.7
blk.7.attn_k.weight
F16
F16
[640, 128]
blk.7.attn_norm.weight
F32
F32
[640]
blk.7.attn_output.weight
F16
F16
[640, 640]
blk.7.attn_q.weight
F16
F16
[640, 640]
blk.7.attn_v.weight
F16
F16
[640, 128]
blk.7.ffn_down.weight
F16
F16
[1792, 640]
blk.7.ffn_gate.weight
F16
F16
[640, 1792]
blk.7.ffn_norm.weight
F32
F32
[640]
blk.7.ffn_up.weight
F16
F16
[640, 1792]
blk.8
blk.8.attn_k.weight
F16
F16
[640, 128]
blk.8.attn_norm.weight
F32
F32
[640]
blk.8.attn_output.weight
F16
F16
[640, 640]
blk.8.attn_q.weight
F16
F16
[640, 640]
blk.8.attn_v.weight
F16
F16
[640, 128]
blk.8.ffn_down.weight
F16
F16
[1792, 640]
blk.8.ffn_gate.weight
F16
F16
[640, 1792]
blk.8.ffn_norm.weight
F32
F32
[640]
blk.8.ffn_up.weight
F16
F16
[640, 1792]
blk.9
blk.9.attn_k.weight
F16
F16
[640, 128]
blk.9.attn_norm.weight
F32
F32
[640]
blk.9.attn_output.weight
F16
F16
[640, 640]
blk.9.attn_q.weight
F16
F16
[640, 640]
blk.9.attn_v.weight
F16
F16
[640, 128]
blk.9.ffn_down.weight
F16
F16
[1792, 640]
blk.9.ffn_gate.weight
F16
F16
[640, 1792]
blk.9.ffn_norm.weight
F32
F32
[640]
blk.9.ffn_up.weight
F16
F16
[640, 1792]
blk.10
blk.10.attn_k.weight
F16
F16
[640, 128]
blk.10.attn_norm.weight
F32
F32
[640]
blk.10.attn_output.weight
F16
F16
[640, 640]
blk.10.attn_q.weight
F16
F16
[640, 640]
blk.10.attn_v.weight
F16
F16
[640, 128]
blk.10.ffn_down.weight
F16
F16
[1792, 640]
blk.10.ffn_gate.weight
F16
F16
[640, 1792]
blk.10.ffn_norm.weight
F32
F32
[640]
blk.10.ffn_up.weight
F16
F16
[640, 1792]
blk.11
blk.11.attn_k.weight
F16
F16
[640, 128]
blk.11.attn_norm.weight
F32
F32
[640]
blk.11.attn_output.weight
F16
F16
[640, 640]
blk.11.attn_q.weight
F16
F16
[640, 640]
blk.11.attn_v.weight
F16
F16
[640, 128]
blk.11.ffn_down.weight
F16
F16
[1792, 640]
blk.11.ffn_gate.weight
F16
F16
[640, 1792]
blk.11.ffn_norm.weight
F32
F32
[640]
blk.11.ffn_up.weight
F16
F16
[640, 1792]
blk.12
blk.12.attn_k.weight
F16
F16
[640, 128]
blk.12.attn_norm.weight
F32
F32
[640]
blk.12.attn_output.weight
F16
F16
[640, 640]
blk.12.attn_q.weight
F16
F16
[640, 640]
blk.12.attn_v.weight
F16
F16
[640, 128]
blk.12.ffn_down.weight
F16
F16
[1792, 640]
blk.12.ffn_gate.weight
F16
F16
[640, 1792]
blk.12.ffn_norm.weight
F32
F32
[640]
blk.12.ffn_up.weight
F16
F16
[640, 1792]
blk.13
blk.13.attn_k.weight
F16
F16
[640, 128]
blk.13.attn_norm.weight
F32
F32
[640]
blk.13.attn_output.weight
F16
F16
[640, 640]
blk.13.attn_q.weight
F16
F16
[640, 640]
blk.13.attn_v.weight
F16
F16
[640, 128]
blk.13.ffn_down.weight
F16
F16
[1792, 640]
blk.13.ffn_gate.weight
F16
F16
[640, 1792]
blk.13.ffn_norm.weight
F32
F32
[640]
blk.13.ffn_up.weight
F16
F16
[640, 1792]
blk.14
blk.14.attn_k.weight
F16
F16
[640, 128]
blk.14.attn_norm.weight
F32
F32
[640]
blk.14.attn_output.weight
F16
F16
[640, 640]
blk.14.attn_q.weight
F16
F16
[640, 640]
blk.14.attn_v.weight
F16
F16
[640, 128]
blk.14.ffn_down.weight
F16
F16
[1792, 640]
blk.14.ffn_gate.weight
F16
F16
[640, 1792]
blk.14.ffn_norm.weight
F32
F32
[640]
blk.14.ffn_up.weight
F16
F16
[640, 1792]
blk.15
blk.15.attn_k.weight
F16
F16
[640, 128]
blk.15.attn_norm.weight
F32
F32
[640]
blk.15.attn_output.weight
F16
F16
[640, 640]
blk.15.attn_q.weight
F16
F16
[640, 640]
blk.15.attn_v.weight
F16
F16
[640, 128]
blk.15.ffn_down.weight
F16
F16
[1792, 640]
blk.15.ffn_gate.weight
F16
F16
[640, 1792]
blk.15.ffn_norm.weight
F32
F32
[640]
blk.15.ffn_up.weight
F16
F16
[640, 1792]
output.weight
F16
F16
[640, 50258]
output_norm.weight
F32
F32
[640]