csalab/ sealion3.5:latest

77 8 months ago

SEA-LION is a collection of Large Language Models (LLMs) which have been pretrained and instruct-tuned for the Southeast Asia (SEA) region.

e5ed83576409 · 141GB
    Metadata
  • general.architecture
    llama
  • general.file_type
    F16
  • llama.attention.head_count
    64
  • llama.attention.head_count_kv
    8
  • llama.attention.key_length
    128
  • llama.attention.layer_norm_rms_epsilon
    1e-05
  • llama.attention.value_length
    128
  • llama.block_count
    80
  • llama.context_length
    131072
  • llama.embedding_length
    8192
  • llama.feed_forward_length
    28672
  • llama.rope.dimension_count
    128
  • llama.rope.freq_base
    500000
  • llama.vocab_size
    128256
  • tokenizer.ggml.add_bos_token
    true
  • tokenizer.ggml.add_eos_token
    false
  • tokenizer.ggml.add_space_prefix
    false
  • tokenizer.ggml.bos_token_id
    128000
  • tokenizer.ggml.eos_token_id
    128001
  • tokenizer.ggml.merges
    [Ġ Ġ, Ġ ĠĠĠ, ĠĠ ĠĠ, ĠĠĠ Ġ, i n, ...]
  • tokenizer.ggml.model
    gpt2
  • tokenizer.ggml.padding_token_id
    128004
  • tokenizer.ggml.pre
    llama-bpe
  • tokenizer.ggml.token_type
    [1, 1, 1, 1, 1, ...]
  • tokenizer.ggml.tokens
    [!, ", #, $, %, ...]
  • split.count
    8
  • split.no
    0
  • split.tensors.count
    724
  • Tensor
    blk.0
  • blk.0.attn_k.weight
    F16
    [8192, 1024]
  • blk.0.attn_norm.weight
    F32
    [8192]
  • blk.0.attn_output.weight
    F16
    [8192, 8192]
  • blk.0.attn_q.weight
    F16
    [8192, 8192]
  • blk.0.attn_v.weight
    F16
    [8192, 1024]
  • blk.0.ffn_down.weight
    F16
    [28672, 8192]
  • blk.0.ffn_gate.weight
    F16
    [8192, 28672]
  • blk.0.ffn_norm.weight
    F32
    [8192]
  • blk.0.ffn_up.weight
    F16
    [8192, 28672]
  • blk.1
  • blk.1.attn_k.weight
    F16
    [8192, 1024]
  • blk.1.attn_norm.weight
    F32
    [8192]
  • blk.1.attn_output.weight
    F16
    [8192, 8192]
  • blk.1.attn_q.weight
    F16
    [8192, 8192]
  • blk.1.attn_v.weight
    F16
    [8192, 1024]
  • blk.1.ffn_down.weight
    F16
    [28672, 8192]
  • blk.1.ffn_gate.weight
    F16
    [8192, 28672]
  • blk.1.ffn_norm.weight
    F32
    [8192]
  • blk.1.ffn_up.weight
    F16
    [8192, 28672]
  • blk.2
  • blk.2.attn_norm.weight
    F32
    [8192]
  • blk.2.attn_output.weight
    F16
    [8192, 8192]
  • blk.2.attn_v.weight
    F16
    [8192, 1024]
  • blk.10
  • blk.10.attn_k.weight
    F16
    [8192, 1024]
  • blk.10.attn_norm.weight
    F32
    [8192]
  • blk.10.attn_output.weight
    F16
    [8192, 8192]
  • blk.10.attn_q.weight
    F16
    [8192, 8192]
  • blk.10.attn_v.weight
    F16
    [8192, 1024]
  • blk.10.ffn_down.weight
    F16
    [28672, 8192]
  • blk.10.ffn_gate.weight
    F16
    [8192, 28672]
  • blk.10.ffn_norm.weight
    F32
    [8192]
  • blk.11
  • blk.11.attn_k.weight
    F16
    [8192, 1024]
  • blk.11.attn_norm.weight
    F32
    [8192]
  • blk.11.attn_output.weight
    F16
    [8192, 8192]
  • blk.11.attn_q.weight
    F16
    [8192, 8192]
  • blk.11.attn_v.weight
    F16
    [8192, 1024]
  • blk.11.ffn_down.weight
    F16
    [28672, 8192]
  • blk.11.ffn_norm.weight
    F32
    [8192]
  • blk.11.ffn_up.weight
    F16
    [8192, 28672]
  • blk.12
  • blk.12.attn_k.weight
    F16
    [8192, 1024]
  • blk.12.attn_norm.weight
    F32
    [8192]
  • blk.12.attn_output.weight
    F16
    [8192, 8192]
  • blk.12.attn_q.weight
    F16
    [8192, 8192]
  • blk.12.attn_v.weight
    F16
    [8192, 1024]
  • blk.12.ffn_gate.weight
    F16
    [8192, 28672]
  • blk.12.ffn_norm.weight
    F32
    [8192]
  • blk.12.ffn_up.weight
    F16
    [8192, 28672]
  • blk.13
  • blk.13.attn_k.weight
    F16
    [8192, 1024]
  • blk.13.attn_output.weight
    F16
    [8192, 8192]
  • blk.13.attn_q.weight
    F16
    [8192, 8192]
  • blk.13.ffn_down.weight
    F16
    [28672, 8192]
  • blk.13.ffn_gate.weight
    F16
    [8192, 28672]
  • blk.13.ffn_norm.weight
    F32
    [8192]
  • blk.13.ffn_up.weight
    F16
    [8192, 28672]
  • blk.14
  • blk.14.attn_k.weight
    F16
    [8192, 1024]
  • blk.14.attn_norm.weight
    F32
    [8192]
  • blk.14.attn_output.weight
    F16
    [8192, 8192]
  • blk.14.attn_v.weight
    F16
    [8192, 1024]
  • blk.14.ffn_down.weight
    F16
    [28672, 8192]
  • blk.14.ffn_gate.weight
    F16
    [8192, 28672]
  • blk.14.ffn_norm.weight
    F32
    [8192]
  • blk.14.ffn_up.weight
    F16
    [8192, 28672]
  • blk.15
  • blk.15.attn_k.weight
    F16
    [8192, 1024]
  • blk.15.attn_norm.weight
    F32
    [8192]
  • blk.15.attn_q.weight
    F16
    [8192, 8192]
  • blk.15.attn_v.weight
    F16
    [8192, 1024]
  • blk.15.ffn_down.weight
    F16
    [28672, 8192]
  • blk.15.ffn_gate.weight
    F16
    [8192, 28672]
  • blk.15.ffn_norm.weight
    F32
    [8192]
  • blk.15.ffn_up.weight
    F16
    [8192, 28672]
  • blk.16
  • blk.16.attn_norm.weight
    F32
    [8192]
  • blk.16.attn_output.weight
    F16
    [8192, 8192]
  • blk.16.attn_q.weight
    F16
    [8192, 8192]
  • blk.16.attn_v.weight
    F16
    [8192, 1024]
  • blk.16.ffn_down.weight
    F16
    [28672, 8192]
  • blk.16.ffn_gate.weight
    F16
    [8192, 28672]
  • blk.16.ffn_norm.weight
    F32
    [8192]
  • blk.16.ffn_up.weight
    F16
    [8192, 28672]
  • blk.17
  • blk.17.attn_k.weight
    F16
    [8192, 1024]
  • blk.17.attn_norm.weight
    F32
    [8192]
  • blk.17.attn_output.weight
    F16
    [8192, 8192]
  • blk.17.attn_q.weight
    F16
    [8192, 8192]
  • blk.17.attn_v.weight
    F16
    [8192, 1024]
  • blk.17.ffn_down.weight
    F16
    [28672, 8192]
  • blk.17.ffn_gate.weight
    F16
    [8192, 28672]
  • blk.17.ffn_up.weight
    F16
    [8192, 28672]
  • blk.18
  • blk.18.attn_k.weight
    F16
    [8192, 1024]
  • blk.18.attn_norm.weight
    F32
    [8192]
  • blk.18.attn_output.weight
    F16
    [8192, 8192]
  • blk.18.attn_q.weight
    F16
    [8192, 8192]
  • blk.18.attn_v.weight
    F16
    [8192, 1024]
  • blk.18.ffn_down.weight
    F16
    [28672, 8192]
  • blk.18.ffn_gate.weight
    F16
    [8192, 28672]
  • blk.18.ffn_norm.weight
    F32
    [8192]
  • blk.19
  • blk.19.attn_norm.weight
    F32
    [8192]
  • blk.19.attn_q.weight
    F16
    [8192, 8192]
  • blk.19.attn_v.weight
    F16
    [8192, 1024]
  • blk.19.ffn_down.weight
    F16
    [28672, 8192]
  • blk.19.ffn_norm.weight
    F32
    [8192]
  • blk.20
  • blk.20.attn_q.weight
    F16
    [8192, 8192]
  • output.weight
    F16
    [8192, 128256]
  • rope_freqs.weight
    F32
    [64]