Models
Docs
Pricing
Sign in
Download
Models
Download
Docs
Pricing
Sign in
csalab
/
sealion3.5
:70b-F16
77
Downloads
Updated
8 months ago
SEA-LION is a collection of Large Language Models (LLMs) which have been pretrained and instruct-tuned for the Southeast Asia (SEA) region.
SEA-LION is a collection of Large Language Models (LLMs) which have been pretrained and instruct-tuned for the Southeast Asia (SEA) region.
Cancel
sealion3.5:70b-F16
...
/
model
e5ed83576409 · 141GB
Metadata
general.architecture
llama
llama
general.file_type
F16
F16
llama.attention.head_count
64
64
llama.attention.head_count_kv
8
8
llama.attention.key_length
128
128
llama.attention.layer_norm_rms_epsilon
1e-05
1e-05
llama.attention.value_length
128
128
llama.block_count
80
80
llama.context_length
131072
131072
llama.embedding_length
8192
8192
llama.feed_forward_length
28672
28672
llama.rope.dimension_count
128
128
llama.rope.freq_base
500000
500000
llama.vocab_size
128256
128256
tokenizer.ggml.add_bos_token
true
true
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.add_space_prefix
false
false
tokenizer.ggml.bos_token_id
128000
128000
tokenizer.ggml.eos_token_id
128001
128001
tokenizer.ggml.merges
[Ġ Ġ, Ġ ĠĠĠ, ĠĠ ĠĠ, ĠĠĠ Ġ, i n, ...]
[Ġ Ġ, Ġ ĠĠĠ, ĠĠ ĠĠ, ĠĠĠ Ġ, i n, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
128004
128004
tokenizer.ggml.pre
llama-bpe
llama-bpe
tokenizer.ggml.token_type
[1, 1, 1, 1, 1, ...]
[1, 1, 1, 1, 1, ...]
tokenizer.ggml.tokens
[!, ", #, $, %, ...]
[!, ", #, $, %, ...]
split.count
8
8
split.no
0
0
split.tensors.count
724
724
Tensor
Name
Type
Shape
blk.0
blk.0.attn_k.weight
F16
F16
[8192, 1024]
blk.0.attn_norm.weight
F32
F32
[8192]
blk.0.attn_output.weight
F16
F16
[8192, 8192]
blk.0.attn_q.weight
F16
F16
[8192, 8192]
blk.0.attn_v.weight
F16
F16
[8192, 1024]
blk.0.ffn_down.weight
F16
F16
[28672, 8192]
blk.0.ffn_gate.weight
F16
F16
[8192, 28672]
blk.0.ffn_norm.weight
F32
F32
[8192]
blk.0.ffn_up.weight
F16
F16
[8192, 28672]
blk.1
blk.1.attn_k.weight
F16
F16
[8192, 1024]
blk.1.attn_norm.weight
F32
F32
[8192]
blk.1.attn_output.weight
F16
F16
[8192, 8192]
blk.1.attn_q.weight
F16
F16
[8192, 8192]
blk.1.attn_v.weight
F16
F16
[8192, 1024]
blk.1.ffn_down.weight
F16
F16
[28672, 8192]
blk.1.ffn_gate.weight
F16
F16
[8192, 28672]
blk.1.ffn_norm.weight
F32
F32
[8192]
blk.1.ffn_up.weight
F16
F16
[8192, 28672]
blk.2
blk.2.attn_norm.weight
F32
F32
[8192]
blk.2.attn_output.weight
F16
F16
[8192, 8192]
blk.2.attn_v.weight
F16
F16
[8192, 1024]
blk.10
blk.10.attn_k.weight
F16
F16
[8192, 1024]
blk.10.attn_norm.weight
F32
F32
[8192]
blk.10.attn_output.weight
F16
F16
[8192, 8192]
blk.10.attn_q.weight
F16
F16
[8192, 8192]
blk.10.attn_v.weight
F16
F16
[8192, 1024]
blk.10.ffn_down.weight
F16
F16
[28672, 8192]
blk.10.ffn_gate.weight
F16
F16
[8192, 28672]
blk.10.ffn_norm.weight
F32
F32
[8192]
blk.11
blk.11.attn_k.weight
F16
F16
[8192, 1024]
blk.11.attn_norm.weight
F32
F32
[8192]
blk.11.attn_output.weight
F16
F16
[8192, 8192]
blk.11.attn_q.weight
F16
F16
[8192, 8192]
blk.11.attn_v.weight
F16
F16
[8192, 1024]
blk.11.ffn_down.weight
F16
F16
[28672, 8192]
blk.11.ffn_norm.weight
F32
F32
[8192]
blk.11.ffn_up.weight
F16
F16
[8192, 28672]
blk.12
blk.12.attn_k.weight
F16
F16
[8192, 1024]
blk.12.attn_norm.weight
F32
F32
[8192]
blk.12.attn_output.weight
F16
F16
[8192, 8192]
blk.12.attn_q.weight
F16
F16
[8192, 8192]
blk.12.attn_v.weight
F16
F16
[8192, 1024]
blk.12.ffn_gate.weight
F16
F16
[8192, 28672]
blk.12.ffn_norm.weight
F32
F32
[8192]
blk.12.ffn_up.weight
F16
F16
[8192, 28672]
blk.13
blk.13.attn_k.weight
F16
F16
[8192, 1024]
blk.13.attn_output.weight
F16
F16
[8192, 8192]
blk.13.attn_q.weight
F16
F16
[8192, 8192]
blk.13.ffn_down.weight
F16
F16
[28672, 8192]
blk.13.ffn_gate.weight
F16
F16
[8192, 28672]
blk.13.ffn_norm.weight
F32
F32
[8192]
blk.13.ffn_up.weight
F16
F16
[8192, 28672]
blk.14
blk.14.attn_k.weight
F16
F16
[8192, 1024]
blk.14.attn_norm.weight
F32
F32
[8192]
blk.14.attn_output.weight
F16
F16
[8192, 8192]
blk.14.attn_v.weight
F16
F16
[8192, 1024]
blk.14.ffn_down.weight
F16
F16
[28672, 8192]
blk.14.ffn_gate.weight
F16
F16
[8192, 28672]
blk.14.ffn_norm.weight
F32
F32
[8192]
blk.14.ffn_up.weight
F16
F16
[8192, 28672]
blk.15
blk.15.attn_k.weight
F16
F16
[8192, 1024]
blk.15.attn_norm.weight
F32
F32
[8192]
blk.15.attn_q.weight
F16
F16
[8192, 8192]
blk.15.attn_v.weight
F16
F16
[8192, 1024]
blk.15.ffn_down.weight
F16
F16
[28672, 8192]
blk.15.ffn_gate.weight
F16
F16
[8192, 28672]
blk.15.ffn_norm.weight
F32
F32
[8192]
blk.15.ffn_up.weight
F16
F16
[8192, 28672]
blk.16
blk.16.attn_norm.weight
F32
F32
[8192]
blk.16.attn_output.weight
F16
F16
[8192, 8192]
blk.16.attn_q.weight
F16
F16
[8192, 8192]
blk.16.attn_v.weight
F16
F16
[8192, 1024]
blk.16.ffn_down.weight
F16
F16
[28672, 8192]
blk.16.ffn_gate.weight
F16
F16
[8192, 28672]
blk.16.ffn_norm.weight
F32
F32
[8192]
blk.16.ffn_up.weight
F16
F16
[8192, 28672]
blk.17
blk.17.attn_k.weight
F16
F16
[8192, 1024]
blk.17.attn_norm.weight
F32
F32
[8192]
blk.17.attn_output.weight
F16
F16
[8192, 8192]
blk.17.attn_q.weight
F16
F16
[8192, 8192]
blk.17.attn_v.weight
F16
F16
[8192, 1024]
blk.17.ffn_down.weight
F16
F16
[28672, 8192]
blk.17.ffn_gate.weight
F16
F16
[8192, 28672]
blk.17.ffn_up.weight
F16
F16
[8192, 28672]
blk.18
blk.18.attn_k.weight
F16
F16
[8192, 1024]
blk.18.attn_norm.weight
F32
F32
[8192]
blk.18.attn_output.weight
F16
F16
[8192, 8192]
blk.18.attn_q.weight
F16
F16
[8192, 8192]
blk.18.attn_v.weight
F16
F16
[8192, 1024]
blk.18.ffn_down.weight
F16
F16
[28672, 8192]
blk.18.ffn_gate.weight
F16
F16
[8192, 28672]
blk.18.ffn_norm.weight
F32
F32
[8192]
blk.19
blk.19.attn_norm.weight
F32
F32
[8192]
blk.19.attn_q.weight
F16
F16
[8192, 8192]
blk.19.attn_v.weight
F16
F16
[8192, 1024]
blk.19.ffn_down.weight
F16
F16
[28672, 8192]
blk.19.ffn_norm.weight
F32
F32
[8192]
blk.20
blk.20.attn_q.weight
F16
F16
[8192, 8192]
output.weight
F16
F16
[8192, 128256]
rope_freqs.weight
F32
F32
[64]