4 days ago

A terrible LLM.

49261edd8f8a · 34MB
    Metadata
  • general.architecture
    gpt2
  • general.file_type
    Q8_0
  • gpt2.attention.head_count
    6
  • gpt2.attention.layer_norm_epsilon
    1e-05
  • gpt2.block_count
    6
  • gpt2.context_length
    256
  • gpt2.embedding_length
    384
  • gpt2.feed_forward_length
    1536
  • tokenizer.ggml.bos_token_id
    50256
  • tokenizer.ggml.eos_token_id
    50256
  • tokenizer.ggml.merges
    [Ġ t, Ġ a, h e, i n, r e, ...]
  • tokenizer.ggml.model
    gpt2
  • tokenizer.ggml.padding_token_id
    50256
  • tokenizer.ggml.pre
    gpt-2
  • tokenizer.ggml.token_type
    [1, 1, 1, 1, 1, ...]
  • tokenizer.ggml.tokens
    [!, ", #, $, %, ...]
  • tokenizer.ggml.unknown_token_id
    50256
  • Tensor
  • token_embd.weight
    Q8_0
    [384, 50257]
  • blk.0
  • blk.0.attn_norm.bias
    F32
    [384]
  • blk.0.attn_norm.weight
    F32
    [384]
  • blk.0.attn_output.bias
    F32
    [384]
  • blk.0.attn_output.weight
    Q8_0
    [384, 384]
  • blk.0.attn_qkv.bias
    F32
    [1152]
  • blk.0.attn_qkv.weight
    Q8_0
    [384, 1152]
  • blk.0.ffn_down.bias
    F32
    [384]
  • blk.0.ffn_down.weight
    Q8_0
    [1536, 384]
  • blk.0.ffn_norm.bias
    F32
    [384]
  • blk.0.ffn_norm.weight
    F32
    [384]
  • blk.0.ffn_up.bias
    F32
    [1536]
  • blk.0.ffn_up.weight
    Q8_0
    [384, 1536]
  • blk.1
  • blk.1.attn_norm.bias
    F32
    [384]
  • blk.1.attn_norm.weight
    F32
    [384]
  • blk.1.attn_output.bias
    F32
    [384]
  • blk.1.attn_output.weight
    Q8_0
    [384, 384]
  • blk.1.attn_qkv.bias
    F32
    [1152]
  • blk.1.attn_qkv.weight
    Q8_0
    [384, 1152]
  • blk.1.ffn_down.bias
    F32
    [384]
  • blk.1.ffn_down.weight
    Q8_0
    [1536, 384]
  • blk.1.ffn_norm.bias
    F32
    [384]
  • blk.1.ffn_norm.weight
    F32
    [384]
  • blk.1.ffn_up.bias
    F32
    [1536]
  • blk.1.ffn_up.weight
    Q8_0
    [384, 1536]
  • blk.2
  • blk.2.attn_norm.bias
    F32
    [384]
  • blk.2.attn_norm.weight
    F32
    [384]
  • blk.2.attn_output.bias
    F32
    [384]
  • blk.2.attn_output.weight
    Q8_0
    [384, 384]
  • blk.2.attn_qkv.bias
    F32
    [1152]
  • blk.2.attn_qkv.weight
    Q8_0
    [384, 1152]
  • blk.2.ffn_down.bias
    F32
    [384]
  • blk.2.ffn_down.weight
    Q8_0
    [1536, 384]
  • blk.2.ffn_norm.bias
    F32
    [384]
  • blk.2.ffn_norm.weight
    F32
    [384]
  • blk.2.ffn_up.bias
    F32
    [1536]
  • blk.2.ffn_up.weight
    Q8_0
    [384, 1536]
  • blk.3
  • blk.3.attn_norm.bias
    F32
    [384]
  • blk.3.attn_norm.weight
    F32
    [384]
  • blk.3.attn_output.bias
    F32
    [384]
  • blk.3.attn_output.weight
    Q8_0
    [384, 384]
  • blk.3.attn_qkv.bias
    F32
    [1152]
  • blk.3.attn_qkv.weight
    Q8_0
    [384, 1152]
  • blk.3.ffn_down.bias
    F32
    [384]
  • blk.3.ffn_down.weight
    Q8_0
    [1536, 384]
  • blk.3.ffn_norm.bias
    F32
    [384]
  • blk.3.ffn_norm.weight
    F32
    [384]
  • blk.3.ffn_up.bias
    F32
    [1536]
  • blk.3.ffn_up.weight
    Q8_0
    [384, 1536]
  • blk.4
  • blk.4.attn_norm.bias
    F32
    [384]
  • blk.4.attn_norm.weight
    F32
    [384]
  • blk.4.attn_output.bias
    F32
    [384]
  • blk.4.attn_output.weight
    Q8_0
    [384, 384]
  • blk.4.attn_qkv.bias
    F32
    [1152]
  • blk.4.attn_qkv.weight
    Q8_0
    [384, 1152]
  • blk.4.ffn_down.bias
    F32
    [384]
  • blk.4.ffn_down.weight
    Q8_0
    [1536, 384]
  • blk.4.ffn_norm.bias
    F32
    [384]
  • blk.4.ffn_norm.weight
    F32
    [384]
  • blk.4.ffn_up.bias
    F32
    [1536]
  • blk.4.ffn_up.weight
    Q8_0
    [384, 1536]
  • blk.5
  • blk.5.attn_norm.bias
    F32
    [384]
  • blk.5.attn_norm.weight
    F32
    [384]
  • blk.5.attn_output.bias
    F32
    [384]
  • blk.5.attn_output.weight
    Q8_0
    [384, 384]
  • blk.5.attn_qkv.bias
    F32
    [1152]
  • blk.5.attn_qkv.weight
    Q8_0
    [384, 1152]
  • blk.5.ffn_down.bias
    F32
    [384]
  • blk.5.ffn_down.weight
    Q8_0
    [1536, 384]
  • blk.5.ffn_norm.bias
    F32
    [384]
  • blk.5.ffn_norm.weight
    F32
    [384]
  • blk.5.ffn_up.bias
    F32
    [1536]
  • blk.5.ffn_up.weight
    Q8_0
    [384, 1536]
  • output_norm.bias
    F32
    [384]
  • position_embd.weight
    F32
    [384, 256]
  • output_norm.weight
    F32
    [384]