Models
GitHub
Discord
Docs
Cloud
Sign in
Download
Models
Download
GitHub
Discord
Docs
Cloud
Sign in
liutechs
/
yuantwo
:latest
15
Downloads
Updated
1 year ago
Cancel
yuantwo:latest
...
/
model
e222e5e2df2e · 4.7GB
Metadata
general.architecture
llama
llama
general.file_type
F16
F16
llama.attention.head_count
32
32
llama.attention.head_count_kv
32
32
llama.attention.layer_norm_rms_epsilon
1e-06
1e-06
llama.block_count
24
24
llama.context_length
8192
8192
llama.embedding_length
2048
2048
llama.feed_forward_length
8192
8192
llama.rope.dimension_count
64
64
tokenizer.ggml.add_bos_token
true
true
tokenizer.ggml.add_eos_token
true
true
tokenizer.ggml.bos_token_id
77185
77185
tokenizer.ggml.eos_token_id
77185
77185
tokenizer.ggml.model
llama
llama
tokenizer.ggml.padding_token_id
77185
77185
tokenizer.ggml.scores
[0, 0, 0, 0, 0, ...]
[0, 0, 0, 0, 0, ...]
tokenizer.ggml.seperator_token_id
77185
77185
tokenizer.ggml.token_type
[2, 3, 3, 6, 6, ...]
[2, 3, 3, 6, 6, ...]
tokenizer.ggml.tokens
[<unk>, <s>, </s>, <0x00>, <0x01>, ...]
[<unk>, <s>, </s>, <0x00>, <0x01>, ...]
Tensor
Name
Type
Shape
token_embd.weight
F16
F16
[2048, 135040]
blk.0
blk.0.attn_k.weight
F16
F16
[2048, 2048]
blk.0.attn_norm.weight
F32
F32
[2048]
blk.0.attn_output.weight
F16
F16
[2048, 2048]
blk.0.attn_q.weight
F16
F16
[2048, 2048]
blk.0.attn_v.weight
F16
F16
[2048, 2048]
blk.0.conv1.bias
F32
F32
[1024]
blk.0.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.0.conv2.bias
F32
F32
[2048]
blk.0.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.0.ffn_down.weight
F16
F16
[8192, 2048]
blk.0.ffn_gate.weight
F16
F16
[2048, 8192]
blk.0.ffn_norm.weight
F32
F32
[2048]
blk.0.ffn_up.weight
F16
F16
[2048, 8192]
blk.0.lf_output_norm.weight
F32
F32
[2048]
blk.1
blk.1.attn_k.weight
F16
F16
[2048, 2048]
blk.1.attn_norm.weight
F32
F32
[2048]
blk.1.attn_output.weight
F16
F16
[2048, 2048]
blk.1.attn_q.weight
F16
F16
[2048, 2048]
blk.1.attn_v.weight
F16
F16
[2048, 2048]
blk.1.conv1.bias
F32
F32
[1024]
blk.1.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.1.conv2.bias
F32
F32
[2048]
blk.1.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.1.ffn_down.weight
F16
F16
[8192, 2048]
blk.1.ffn_gate.weight
F16
F16
[2048, 8192]
blk.1.ffn_norm.weight
F32
F32
[2048]
blk.1.ffn_up.weight
F16
F16
[2048, 8192]
blk.1.lf_output_norm.weight
F32
F32
[2048]
blk.2
blk.2.attn_k.weight
F16
F16
[2048, 2048]
blk.2.attn_norm.weight
F32
F32
[2048]
blk.2.attn_output.weight
F16
F16
[2048, 2048]
blk.2.attn_q.weight
F16
F16
[2048, 2048]
blk.2.attn_v.weight
F16
F16
[2048, 2048]
blk.2.conv1.bias
F32
F32
[1024]
blk.2.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.2.conv2.bias
F32
F32
[2048]
blk.2.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.2.ffn_down.weight
F16
F16
[8192, 2048]
blk.2.ffn_gate.weight
F16
F16
[2048, 8192]
blk.2.ffn_norm.weight
F32
F32
[2048]
blk.2.ffn_up.weight
F16
F16
[2048, 8192]
blk.2.lf_output_norm.weight
F32
F32
[2048]
blk.3
blk.3.attn_k.weight
F16
F16
[2048, 2048]
blk.3.attn_norm.weight
F32
F32
[2048]
blk.3.attn_output.weight
F16
F16
[2048, 2048]
blk.3.attn_q.weight
F16
F16
[2048, 2048]
blk.3.attn_v.weight
F16
F16
[2048, 2048]
blk.3.conv1.bias
F32
F32
[1024]
blk.3.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.3.conv2.bias
F32
F32
[2048]
blk.3.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.3.ffn_down.weight
F16
F16
[8192, 2048]
blk.3.ffn_gate.weight
F16
F16
[2048, 8192]
blk.3.ffn_norm.weight
F32
F32
[2048]
blk.3.ffn_up.weight
F16
F16
[2048, 8192]
blk.3.lf_output_norm.weight
F32
F32
[2048]
blk.4
blk.4.attn_k.weight
F16
F16
[2048, 2048]
blk.4.attn_norm.weight
F32
F32
[2048]
blk.4.attn_output.weight
F16
F16
[2048, 2048]
blk.4.attn_q.weight
F16
F16
[2048, 2048]
blk.4.attn_v.weight
F16
F16
[2048, 2048]
blk.4.conv1.bias
F32
F32
[1024]
blk.4.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.4.conv2.bias
F32
F32
[2048]
blk.4.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.4.ffn_down.weight
F16
F16
[8192, 2048]
blk.4.ffn_gate.weight
F16
F16
[2048, 8192]
blk.4.ffn_norm.weight
F32
F32
[2048]
blk.4.ffn_up.weight
F16
F16
[2048, 8192]
blk.4.lf_output_norm.weight
F32
F32
[2048]
blk.5
blk.5.attn_k.weight
F16
F16
[2048, 2048]
blk.5.attn_norm.weight
F32
F32
[2048]
blk.5.attn_output.weight
F16
F16
[2048, 2048]
blk.5.attn_q.weight
F16
F16
[2048, 2048]
blk.5.attn_v.weight
F16
F16
[2048, 2048]
blk.5.conv1.bias
F32
F32
[1024]
blk.5.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.5.conv2.bias
F32
F32
[2048]
blk.5.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.5.ffn_down.weight
F16
F16
[8192, 2048]
blk.5.ffn_gate.weight
F16
F16
[2048, 8192]
blk.5.ffn_norm.weight
F32
F32
[2048]
blk.5.ffn_up.weight
F16
F16
[2048, 8192]
blk.5.lf_output_norm.weight
F32
F32
[2048]
blk.6
blk.6.attn_k.weight
F16
F16
[2048, 2048]
blk.6.attn_norm.weight
F32
F32
[2048]
blk.6.attn_output.weight
F16
F16
[2048, 2048]
blk.6.attn_q.weight
F16
F16
[2048, 2048]
blk.6.attn_v.weight
F16
F16
[2048, 2048]
blk.6.conv1.bias
F32
F32
[1024]
blk.6.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.6.conv2.bias
F32
F32
[2048]
blk.6.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.6.ffn_down.weight
F16
F16
[8192, 2048]
blk.6.ffn_gate.weight
F16
F16
[2048, 8192]
blk.6.ffn_norm.weight
F32
F32
[2048]
blk.6.ffn_up.weight
F16
F16
[2048, 8192]
blk.6.lf_output_norm.weight
F32
F32
[2048]
blk.7
blk.7.attn_k.weight
F16
F16
[2048, 2048]
blk.7.attn_norm.weight
F32
F32
[2048]
blk.7.attn_output.weight
F16
F16
[2048, 2048]
blk.7.attn_q.weight
F16
F16
[2048, 2048]
blk.7.attn_v.weight
F16
F16
[2048, 2048]
blk.7.conv1.bias
F32
F32
[1024]
blk.7.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.7.conv2.bias
F32
F32
[2048]
blk.7.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.7.ffn_down.weight
F16
F16
[8192, 2048]
blk.7.ffn_gate.weight
F16
F16
[2048, 8192]
blk.7.ffn_norm.weight
F32
F32
[2048]
blk.7.ffn_up.weight
F16
F16
[2048, 8192]
blk.7.lf_output_norm.weight
F32
F32
[2048]
blk.8
blk.8.attn_k.weight
F16
F16
[2048, 2048]
blk.8.attn_norm.weight
F32
F32
[2048]
blk.8.attn_output.weight
F16
F16
[2048, 2048]
blk.8.attn_q.weight
F16
F16
[2048, 2048]
blk.8.attn_v.weight
F16
F16
[2048, 2048]
blk.8.conv1.bias
F32
F32
[1024]
blk.8.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.8.conv2.bias
F32
F32
[2048]
blk.8.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.8.ffn_down.weight
F16
F16
[8192, 2048]
blk.8.ffn_gate.weight
F16
F16
[2048, 8192]
blk.8.ffn_norm.weight
F32
F32
[2048]
blk.8.ffn_up.weight
F16
F16
[2048, 8192]
blk.8.lf_output_norm.weight
F32
F32
[2048]
blk.9
blk.9.attn_k.weight
F16
F16
[2048, 2048]
blk.9.attn_norm.weight
F32
F32
[2048]
blk.9.attn_output.weight
F16
F16
[2048, 2048]
blk.9.attn_q.weight
F16
F16
[2048, 2048]
blk.9.attn_v.weight
F16
F16
[2048, 2048]
blk.9.conv1.bias
F32
F32
[1024]
blk.9.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.9.conv2.bias
F32
F32
[2048]
blk.9.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.9.ffn_down.weight
F16
F16
[8192, 2048]
blk.9.ffn_gate.weight
F16
F16
[2048, 8192]
blk.9.ffn_norm.weight
F32
F32
[2048]
blk.9.ffn_up.weight
F16
F16
[2048, 8192]
blk.9.lf_output_norm.weight
F32
F32
[2048]
blk.10
blk.10.attn_k.weight
F16
F16
[2048, 2048]
blk.10.attn_norm.weight
F32
F32
[2048]
blk.10.attn_output.weight
F16
F16
[2048, 2048]
blk.10.attn_q.weight
F16
F16
[2048, 2048]
blk.10.attn_v.weight
F16
F16
[2048, 2048]
blk.10.conv1.bias
F32
F32
[1024]
blk.10.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.10.conv2.bias
F32
F32
[2048]
blk.10.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.10.ffn_down.weight
F16
F16
[8192, 2048]
blk.10.ffn_gate.weight
F16
F16
[2048, 8192]
blk.10.ffn_norm.weight
F32
F32
[2048]
blk.10.ffn_up.weight
F16
F16
[2048, 8192]
blk.10.lf_output_norm.weight
F32
F32
[2048]
blk.11
blk.11.attn_k.weight
F16
F16
[2048, 2048]
blk.11.attn_norm.weight
F32
F32
[2048]
blk.11.attn_output.weight
F16
F16
[2048, 2048]
blk.11.attn_q.weight
F16
F16
[2048, 2048]
blk.11.attn_v.weight
F16
F16
[2048, 2048]
blk.11.conv1.bias
F32
F32
[1024]
blk.11.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.11.conv2.bias
F32
F32
[2048]
blk.11.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.11.ffn_down.weight
F16
F16
[8192, 2048]
blk.11.ffn_gate.weight
F16
F16
[2048, 8192]
blk.11.ffn_norm.weight
F32
F32
[2048]
blk.11.ffn_up.weight
F16
F16
[2048, 8192]
blk.11.lf_output_norm.weight
F32
F32
[2048]
blk.12
blk.12.attn_k.weight
F16
F16
[2048, 2048]
blk.12.attn_norm.weight
F32
F32
[2048]
blk.12.attn_output.weight
F16
F16
[2048, 2048]
blk.12.attn_q.weight
F16
F16
[2048, 2048]
blk.12.attn_v.weight
F16
F16
[2048, 2048]
blk.12.conv1.bias
F32
F32
[1024]
blk.12.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.12.conv2.bias
F32
F32
[2048]
blk.12.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.12.ffn_down.weight
F16
F16
[8192, 2048]
blk.12.ffn_gate.weight
F16
F16
[2048, 8192]
blk.12.ffn_norm.weight
F32
F32
[2048]
blk.12.ffn_up.weight
F16
F16
[2048, 8192]
blk.12.lf_output_norm.weight
F32
F32
[2048]
blk.13
blk.13.attn_k.weight
F16
F16
[2048, 2048]
blk.13.attn_norm.weight
F32
F32
[2048]
blk.13.attn_output.weight
F16
F16
[2048, 2048]
blk.13.attn_q.weight
F16
F16
[2048, 2048]
blk.13.attn_v.weight
F16
F16
[2048, 2048]
blk.13.conv1.bias
F32
F32
[1024]
blk.13.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.13.conv2.bias
F32
F32
[2048]
blk.13.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.13.ffn_down.weight
F16
F16
[8192, 2048]
blk.13.ffn_gate.weight
F16
F16
[2048, 8192]
blk.13.ffn_norm.weight
F32
F32
[2048]
blk.13.ffn_up.weight
F16
F16
[2048, 8192]
blk.13.lf_output_norm.weight
F32
F32
[2048]
blk.14
blk.14.attn_k.weight
F16
F16
[2048, 2048]
blk.14.attn_norm.weight
F32
F32
[2048]
blk.14.attn_output.weight
F16
F16
[2048, 2048]
blk.14.attn_q.weight
F16
F16
[2048, 2048]
blk.14.attn_v.weight
F16
F16
[2048, 2048]
blk.14.conv1.bias
F32
F32
[1024]
blk.14.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.14.conv2.bias
F32
F32
[2048]
blk.14.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.14.ffn_down.weight
F16
F16
[8192, 2048]
blk.14.ffn_gate.weight
F16
F16
[2048, 8192]
blk.14.ffn_norm.weight
F32
F32
[2048]
blk.14.ffn_up.weight
F16
F16
[2048, 8192]
blk.14.lf_output_norm.weight
F32
F32
[2048]
blk.15
blk.15.attn_k.weight
F16
F16
[2048, 2048]
blk.15.attn_norm.weight
F32
F32
[2048]
blk.15.attn_output.weight
F16
F16
[2048, 2048]
blk.15.attn_q.weight
F16
F16
[2048, 2048]
blk.15.attn_v.weight
F16
F16
[2048, 2048]
blk.15.conv1.bias
F32
F32
[1024]
blk.15.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.15.conv2.bias
F32
F32
[2048]
blk.15.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.15.ffn_down.weight
F16
F16
[8192, 2048]
blk.15.ffn_gate.weight
F16
F16
[2048, 8192]
blk.15.ffn_norm.weight
F32
F32
[2048]
blk.15.ffn_up.weight
F16
F16
[2048, 8192]
blk.15.lf_output_norm.weight
F32
F32
[2048]
blk.16
blk.16.attn_k.weight
F16
F16
[2048, 2048]
blk.16.attn_norm.weight
F32
F32
[2048]
blk.16.attn_output.weight
F16
F16
[2048, 2048]
blk.16.attn_q.weight
F16
F16
[2048, 2048]
blk.16.attn_v.weight
F16
F16
[2048, 2048]
blk.16.conv1.bias
F32
F32
[1024]
blk.16.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.16.conv2.bias
F32
F32
[2048]
blk.16.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.16.ffn_down.weight
F16
F16
[8192, 2048]
blk.16.ffn_gate.weight
F16
F16
[2048, 8192]
blk.16.ffn_norm.weight
F32
F32
[2048]
blk.16.ffn_up.weight
F16
F16
[2048, 8192]
blk.16.lf_output_norm.weight
F32
F32
[2048]
blk.17
blk.17.attn_k.weight
F16
F16
[2048, 2048]
blk.17.attn_norm.weight
F32
F32
[2048]
blk.17.attn_output.weight
F16
F16
[2048, 2048]
blk.17.attn_q.weight
F16
F16
[2048, 2048]
blk.17.attn_v.weight
F16
F16
[2048, 2048]
blk.17.conv1.bias
F32
F32
[1024]
blk.17.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.17.conv2.bias
F32
F32
[2048]
blk.17.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.17.ffn_down.weight
F16
F16
[8192, 2048]
blk.17.ffn_gate.weight
F16
F16
[2048, 8192]
blk.17.ffn_norm.weight
F32
F32
[2048]
blk.17.ffn_up.weight
F16
F16
[2048, 8192]
blk.17.lf_output_norm.weight
F32
F32
[2048]
blk.18
blk.18.attn_k.weight
F16
F16
[2048, 2048]
blk.18.attn_norm.weight
F32
F32
[2048]
blk.18.attn_output.weight
F16
F16
[2048, 2048]
blk.18.attn_q.weight
F16
F16
[2048, 2048]
blk.18.attn_v.weight
F16
F16
[2048, 2048]
blk.18.conv1.bias
F32
F32
[1024]
blk.18.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.18.conv2.bias
F32
F32
[2048]
blk.18.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.18.ffn_down.weight
F16
F16
[8192, 2048]
blk.18.ffn_gate.weight
F16
F16
[2048, 8192]
blk.18.ffn_norm.weight
F32
F32
[2048]
blk.18.ffn_up.weight
F16
F16
[2048, 8192]
blk.18.lf_output_norm.weight
F32
F32
[2048]
blk.19
blk.19.attn_k.weight
F16
F16
[2048, 2048]
blk.19.attn_norm.weight
F32
F32
[2048]
blk.19.attn_output.weight
F16
F16
[2048, 2048]
blk.19.attn_q.weight
F16
F16
[2048, 2048]
blk.19.attn_v.weight
F16
F16
[2048, 2048]
blk.19.conv1.bias
F32
F32
[1024]
blk.19.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.19.conv2.bias
F32
F32
[2048]
blk.19.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.19.ffn_down.weight
F16
F16
[8192, 2048]
blk.19.ffn_gate.weight
F16
F16
[2048, 8192]
blk.19.ffn_norm.weight
F32
F32
[2048]
blk.19.ffn_up.weight
F16
F16
[2048, 8192]
blk.19.lf_output_norm.weight
F32
F32
[2048]
blk.20
blk.20.attn_k.weight
F16
F16
[2048, 2048]
blk.20.attn_norm.weight
F32
F32
[2048]
blk.20.attn_output.weight
F16
F16
[2048, 2048]
blk.20.attn_q.weight
F16
F16
[2048, 2048]
blk.20.attn_v.weight
F16
F16
[2048, 2048]
blk.20.conv1.bias
F32
F32
[1024]
blk.20.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.20.conv2.bias
F32
F32
[2048]
blk.20.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.20.ffn_down.weight
F16
F16
[8192, 2048]
blk.20.ffn_gate.weight
F16
F16
[2048, 8192]
blk.20.ffn_norm.weight
F32
F32
[2048]
blk.20.ffn_up.weight
F16
F16
[2048, 8192]
blk.20.lf_output_norm.weight
F32
F32
[2048]
blk.21
blk.21.attn_k.weight
F16
F16
[2048, 2048]
blk.21.attn_norm.weight
F32
F32
[2048]
blk.21.attn_output.weight
F16
F16
[2048, 2048]
blk.21.attn_q.weight
F16
F16
[2048, 2048]
blk.21.attn_v.weight
F16
F16
[2048, 2048]
blk.21.conv1.bias
F32
F32
[1024]
blk.21.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.21.conv2.bias
F32
F32
[2048]
blk.21.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.21.ffn_down.weight
F16
F16
[8192, 2048]
blk.21.ffn_gate.weight
F16
F16
[2048, 8192]
blk.21.ffn_norm.weight
F32
F32
[2048]
blk.21.ffn_up.weight
F16
F16
[2048, 8192]
blk.21.lf_output_norm.weight
F32
F32
[2048]
blk.22
blk.22.attn_k.weight
F16
F16
[2048, 2048]
blk.22.attn_norm.weight
F32
F32
[2048]
blk.22.attn_output.weight
F16
F16
[2048, 2048]
blk.22.attn_q.weight
F16
F16
[2048, 2048]
blk.22.attn_v.weight
F16
F16
[2048, 2048]
blk.22.conv1.bias
F32
F32
[1024]
blk.22.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.22.conv2.bias
F32
F32
[2048]
blk.22.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.22.ffn_down.weight
F16
F16
[8192, 2048]
blk.22.ffn_gate.weight
F16
F16
[2048, 8192]
blk.22.ffn_norm.weight
F32
F32
[2048]
blk.22.ffn_up.weight
F16
F16
[2048, 8192]
blk.22.lf_output_norm.weight
F32
F32
[2048]
blk.23
blk.23.attn_k.weight
F16
F16
[2048, 2048]
blk.23.attn_norm.weight
F32
F32
[2048]
blk.23.attn_output.weight
F16
F16
[2048, 2048]
blk.23.attn_q.weight
F16
F16
[2048, 2048]
blk.23.attn_v.weight
F16
F16
[2048, 2048]
blk.23.conv1.bias
F32
F32
[1024]
blk.23.conv1.weight
F16
F16
[1, 2, 2048, 1024]
blk.23.conv2.bias
F32
F32
[2048]
blk.23.conv2.weight
F16
F16
[1, 2, 1024, 2048]
blk.23.ffn_down.weight
F16
F16
[8192, 2048]
blk.23.ffn_gate.weight
F16
F16
[2048, 8192]
blk.23.ffn_norm.weight
F32
F32
[2048]
blk.23.ffn_up.weight
F16
F16
[2048, 8192]
blk.23.lf_output_norm.weight
F32
F32
[2048]
output.weight
F16
F16
[2048, 135040]
output_norm.weight
F32
F32
[2048]