Models
Docs
Pricing
Sign in
Download
Models
Download
Docs
Pricing
Sign in
nekincn
/
mistra-nemo-full
:latest
3
Downloads
Updated
1 week ago
Cancel
mistra-nemo-full:latest
...
/
model
fb8d2cf3a120 · 3.0GB
Metadata
general.architecture
llama
llama
general.file_type
BF16
BF16
llama.attention.head_count
32
32
llama.attention.head_count_kv
8
8
llama.attention.key_length
128
128
llama.attention.layer_norm_rms_epsilon
1e-05
1e-05
llama.attention.value_length
128
128
llama.block_count
40
40
llama.context_length
131072
131072
llama.embedding_length
5120
5120
llama.feed_forward_length
14336
14336
llama.rope.dimension_count
128
128
llama.rope.freq_base
1e+06
1e+06
llama.vocab_size
131072
131072
tokenizer.ggml.add_bos_token
true
true
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.add_sep_token
false
false
tokenizer.ggml.add_space_prefix
false
false
tokenizer.ggml.bos_token_id
1
1
tokenizer.ggml.eos_token_id
2
2
tokenizer.ggml.merges
[Ġ Ġ, Ġ t, e r, i n, Ġ ĠĠĠ, ...]
[Ġ Ġ, Ġ t, e r, i n, Ġ ĠĠĠ, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.pre
tekken
tekken
tokenizer.ggml.token_type
[3, 3, 3, 3, 3, ...]
[3, 3, 3, 3, 3, ...]
tokenizer.ggml.tokens
[<unk>, <s>, </s>, [INST], [/INST], ...]
[<unk>, <s>, </s>, [INST], [/INST], ...]
tokenizer.ggml.unknown_token_id
0
0
split.count
9
9
split.no
0
0
split.tensors.count
363
363
Tensor
Name
Type
Shape
blk.0
blk.0.attn_k.weight
BF16
BF16
[5120, 1024]
blk.0.attn_norm.weight
F32
F32
[5120]
blk.0.attn_output.weight
BF16
BF16
[4096, 5120]
blk.0.attn_q.weight
BF16
BF16
[5120, 4096]
blk.0.attn_v.weight
BF16
BF16
[5120, 1024]
blk.0.ffn_down.weight
BF16
BF16
[14336, 5120]
blk.0.ffn_gate.weight
BF16
BF16
[5120, 14336]
blk.0.ffn_norm.weight
F32
F32
[5120]
blk.0.ffn_up.weight
BF16
BF16
[5120, 14336]
blk.1
blk.1.attn_k.weight
BF16
BF16
[5120, 1024]
blk.1.attn_norm.weight
F32
F32
[5120]
blk.1.attn_output.weight
BF16
BF16
[4096, 5120]
blk.1.attn_q.weight
BF16
BF16
[5120, 4096]
blk.1.attn_v.weight
BF16
BF16
[5120, 1024]
blk.1.ffn_down.weight
BF16
BF16
[14336, 5120]
blk.1.ffn_gate.weight
BF16
BF16
[5120, 14336]
blk.1.ffn_norm.weight
F32
F32
[5120]
blk.1.ffn_up.weight
BF16
BF16
[5120, 14336]
blk.10
blk.10.attn_k.weight
BF16
BF16
[5120, 1024]
blk.10.attn_norm.weight
F32
F32
[5120]
blk.10.attn_output.weight
BF16
BF16
[4096, 5120]
blk.10.attn_q.weight
BF16
BF16
[5120, 4096]
blk.10.attn_v.weight
BF16
BF16
[5120, 1024]
blk.10.ffn_down.weight
BF16
BF16
[14336, 5120]
blk.10.ffn_gate.weight
BF16
BF16
[5120, 14336]
blk.10.ffn_norm.weight
F32
F32
[5120]
blk.10.ffn_up.weight
BF16
BF16
[5120, 14336]
blk.11
blk.11.attn_k.weight
BF16
BF16
[5120, 1024]
blk.11.attn_norm.weight
F32
F32
[5120]
blk.11.attn_output.weight
BF16
BF16
[4096, 5120]
blk.11.attn_q.weight
BF16
BF16
[5120, 4096]
blk.11.attn_v.weight
BF16
BF16
[5120, 1024]
blk.11.ffn_down.weight
BF16
BF16
[14336, 5120]
blk.11.ffn_gate.weight
BF16
BF16
[5120, 14336]
blk.11.ffn_norm.weight
F32
F32
[5120]
blk.11.ffn_up.weight
BF16
BF16
[5120, 14336]
blk.12
blk.12.attn_k.weight
BF16
BF16
[5120, 1024]
blk.12.attn_norm.weight
F32
F32
[5120]
blk.12.attn_output.weight
BF16
BF16
[4096, 5120]
blk.12.attn_q.weight
BF16
BF16
[5120, 4096]
blk.12.attn_v.weight
BF16
BF16
[5120, 1024]
blk.12.ffn_down.weight
BF16
BF16
[14336, 5120]
blk.12.ffn_gate.weight
BF16
BF16
[5120, 14336]
blk.12.ffn_norm.weight
F32
F32
[5120]
blk.12.ffn_up.weight
BF16
BF16
[5120, 14336]
blk.13
blk.13.attn_k.weight
BF16
BF16
[5120, 1024]
blk.13.attn_norm.weight
F32
F32
[5120]
blk.13.attn_output.weight
BF16
BF16
[4096, 5120]
blk.13.attn_q.weight
BF16
BF16
[5120, 4096]
blk.13.attn_v.weight
BF16
BF16
[5120, 1024]
blk.13.ffn_gate.weight
BF16
BF16
[5120, 14336]