-
general.architecture
bert
-
general.file_type
F16
-
bert.attention.causal
false
-
bert.attention.head_count
8
-
bert.attention.layer_norm_epsilon
1e-12
-
bert.block_count
4
-
bert.context_length
512
-
bert.embedding_length
512
-
bert.feed_forward_length
2048
-
bert.pooling_type
Mean pooling
-
tokenizer.ggml.cls_token_id
101
-
tokenizer.ggml.mask_token_id
103
-
tokenizer.ggml.model
bert
-
tokenizer.ggml.padding_token_id
0
-
tokenizer.ggml.pre
bert-bge-small
-
tokenizer.ggml.seperator_token_id
102
-
tokenizer.ggml.token_type
[3 1 1 1 1 ...]
-
tokenizer.ggml.token_type_count
2
-
tokenizer.ggml.tokens
[[PAD] [unused1] [unused2] [unused3] [unused4] ...]
-
tokenizer.ggml.unknown_token_id
100
-
Name
Type
Shape
-
token_embd_norm.bias
F32
[512]
-
token_embd_norm.weight
F32
[512]
-
position_embd.weight
F32
[512 512]
-
token_embd.weight
F16
[512 21128]
-
blk.0.attn_output_norm.bias
F32
[512]
-
blk.0.attn_output_norm.weight
F32
[512]
-
blk.0.attn_output.bias
F32
[512]
-
blk.0.attn_output.weight
F16
[512 512]
-
blk.0.attn_k.bias
F32
[512]
-
blk.0.attn_k.weight
F16
[512 512]
-
blk.0.attn_q.bias
F32
[512]
-
blk.0.attn_q.weight
F16
[512 512]
-
blk.0.attn_v.bias
F32
[512]
-
blk.0.attn_v.weight
F16
[512 512]
-
blk.0.ffn_up.bias
F32
[2048]
-
blk.0.ffn_up.weight
F16
[512 2048]
-
blk.0.layer_output_norm.bias
F32
[512]
-
blk.0.layer_output_norm.weight
F32
[512]
-
blk.0.ffn_down.bias
F32
[512]
-
blk.0.ffn_down.weight
F16
[2048 512]
-
blk.1.attn_output_norm.bias
F32
[512]
-
blk.1.attn_output_norm.weight
F32
[512]
-
blk.1.attn_output.bias
F32
[512]
-
blk.1.attn_output.weight
F16
[512 512]
-
blk.1.attn_k.bias
F32
[512]
-
blk.1.attn_k.weight
F16
[512 512]
-
blk.1.attn_q.bias
F32
[512]
-
blk.1.attn_q.weight
F16
[512 512]
-
blk.1.attn_v.bias
F32
[512]
-
blk.1.attn_v.weight
F16
[512 512]
-
blk.1.ffn_up.bias
F32
[2048]
-
blk.1.ffn_up.weight
F16
[512 2048]
-
blk.1.layer_output_norm.bias
F32
[512]
-
blk.1.layer_output_norm.weight
F32
[512]
-
blk.1.ffn_down.bias
F32
[512]
-
blk.1.ffn_down.weight
F16
[2048 512]
-
blk.2.attn_output_norm.bias
F32
[512]
-
blk.2.attn_output_norm.weight
F32
[512]
-
blk.2.attn_output.bias
F32
[512]
-
blk.2.attn_output.weight
F16
[512 512]
-
blk.2.attn_k.bias
F32
[512]
-
blk.2.attn_k.weight
F16
[512 512]
-
blk.2.attn_q.bias
F32
[512]
-
blk.2.attn_q.weight
F16
[512 512]
-
blk.2.attn_v.bias
F32
[512]
-
blk.2.attn_v.weight
F16
[512 512]
-
blk.2.ffn_up.bias
F32
[2048]
-
blk.2.ffn_up.weight
F16
[512 2048]
-
blk.2.layer_output_norm.bias
F32
[512]
-
blk.2.layer_output_norm.weight
F32
[512]
-
blk.2.ffn_down.bias
F32
[512]
-
blk.2.ffn_down.weight
F16
[2048 512]
-
blk.3.attn_output_norm.bias
F32
[512]
-
blk.3.attn_output_norm.weight
F32
[512]
-
blk.3.attn_output.bias
F32
[512]
-
blk.3.attn_output.weight
F16
[512 512]
-
blk.3.attn_k.bias
F32
[512]
-
blk.3.attn_k.weight
F16
[512 512]
-
blk.3.attn_q.bias
F32
[512]
-
blk.3.attn_q.weight
F16
[512 512]
-
blk.3.attn_v.bias
F32
[512]
-
blk.3.attn_v.weight
F16
[512 512]
-
blk.3.ffn_up.bias
F32
[2048]
-
blk.3.ffn_up.weight
F16
[512 2048]
-
blk.3.layer_output_norm.bias
F32
[512]
-
blk.3.layer_output_norm.weight
F32
[512]
-
blk.3.ffn_down.bias
F32
[512]
-
blk.3.ffn_down.weight
F16
[2048 512]
-
token_types.weight
F32
[512 2]