Models
GitHub
Discord
Docs
Cloud
Sign in
Download
Models
Download
GitHub
Discord
Docs
Cloud
Sign in
sam860
/
LFM2
:700m-Q8_0
933
Downloads
Updated
1 month ago
Small models made to run on mobile devices, developed by Liquid AI
Small models made to run on mobile devices, developed by Liquid AI
Cancel
350m
700m
1.2b
2.6b
8b
LFM2:700m-Q8_0
...
/
model
0967d902ed27 · 792MB
Metadata
general.architecture
lfm2
lfm2
general.file_type
Q8_0
Q8_0
lfm2.attention.head_count
24
24
lfm2.attention.head_count_kv
[0, 0, 8, 0, 0, ...]
[0, 0, 8, 0, 0, ...]
lfm2.attention.layer_norm_rms_epsilon
1e-05
1e-05
lfm2.block_count
16
16
lfm2.context_length
128000
128000
lfm2.embedding_length
1536
1536
lfm2.feed_forward_length
6912
6912
lfm2.rope.freq_base
1e+06
1e+06
lfm2.shortconv.l_cache
3
3
lfm2.vocab_size
65536
65536
tokenizer.ggml.add_bos_token
true
true
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.add_sep_token
false
false
tokenizer.ggml.bos_token_id
1
1
tokenizer.ggml.eos_token_id
7
7
tokenizer.ggml.merges
[Ċ Ċ, Ċ ĊĊ, ĊĊ Ċ, Ċ ĊĊĊ, ĊĊ ĊĊ, ...]
[Ċ Ċ, Ċ ĊĊ, ĊĊ Ċ, Ċ ĊĊĊ, ĊĊ ĊĊ, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
0
0
tokenizer.ggml.pre
lfm2
lfm2
tokenizer.ggml.token_type
[3, 3, 3, 3, 3, ...]
[3, 3, 3, 3, 3, ...]
tokenizer.ggml.tokens
[<|pad|>, <|startoftext|>, <|endoftext|>, <|fim_pre|>, <|fim_mid|>, ...]
[<|pad|>, <|startoftext|>, <|endoftext|>, <|fim_pre|>, <|fim_mid|>, ...]
Tensor
Name
Type
Shape
token_embd.weight
Q8_0
Q8_0
[1536, 65536]
blk.0
blk.0.attn_norm.weight
F32
F32
[1536]
blk.0.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.0.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.0.ffn_norm.weight
F32
F32
[1536]
blk.0.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.0.shortconv.conv.weight
F32
F32
[3, 1536]
blk.0.shortconv.in_proj.weight
Q8_0
Q8_0
[1536, 4608]
blk.0.shortconv.out_proj.weight
Q8_0
Q8_0
[1536, 1536]
blk.1
blk.1.attn_norm.weight
F32
F32
[1536]
blk.1.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.1.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.1.ffn_norm.weight
F32
F32
[1536]
blk.1.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.1.shortconv.conv.weight
F32
F32
[3, 1536]
blk.1.shortconv.in_proj.weight
Q8_0
Q8_0
[1536, 4608]
blk.1.shortconv.out_proj.weight
Q8_0
Q8_0
[1536, 1536]
blk.2
blk.2.attn_k.weight
Q8_0
Q8_0
[1536, 512]
blk.2.attn_k_norm.weight
F32
F32
[64]
blk.2.attn_norm.weight
F32
F32
[1536]
blk.2.attn_output.weight
Q8_0
Q8_0
[1536, 1536]
blk.2.attn_q.weight
Q8_0
Q8_0
[1536, 1536]
blk.2.attn_q_norm.weight
F32
F32
[64]
blk.2.attn_v.weight
Q8_0
Q8_0
[1536, 512]
blk.2.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.2.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.2.ffn_norm.weight
F32
F32
[1536]
blk.2.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.3
blk.3.attn_norm.weight
F32
F32
[1536]
blk.3.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.3.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.3.ffn_norm.weight
F32
F32
[1536]
blk.3.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.3.shortconv.conv.weight
F32
F32
[3, 1536]
blk.3.shortconv.in_proj.weight
Q8_0
Q8_0
[1536, 4608]
blk.3.shortconv.out_proj.weight
Q8_0
Q8_0
[1536, 1536]
blk.4
blk.4.attn_norm.weight
F32
F32
[1536]
blk.4.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.4.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.4.ffn_norm.weight
F32
F32
[1536]
blk.4.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.4.shortconv.conv.weight
F32
F32
[3, 1536]
blk.4.shortconv.in_proj.weight
Q8_0
Q8_0
[1536, 4608]
blk.4.shortconv.out_proj.weight
Q8_0
Q8_0
[1536, 1536]
blk.5
blk.5.attn_k.weight
Q8_0
Q8_0
[1536, 512]
blk.5.attn_k_norm.weight
F32
F32
[64]
blk.5.attn_norm.weight
F32
F32
[1536]
blk.5.attn_output.weight
Q8_0
Q8_0
[1536, 1536]
blk.5.attn_q.weight
Q8_0
Q8_0
[1536, 1536]
blk.5.attn_q_norm.weight
F32
F32
[64]
blk.5.attn_v.weight
Q8_0
Q8_0
[1536, 512]
blk.5.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.5.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.5.ffn_norm.weight
F32
F32
[1536]
blk.5.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.6
blk.6.attn_norm.weight
F32
F32
[1536]
blk.6.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.6.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.6.ffn_norm.weight
F32
F32
[1536]
blk.6.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.6.shortconv.conv.weight
F32
F32
[3, 1536]
blk.6.shortconv.in_proj.weight
Q8_0
Q8_0
[1536, 4608]
blk.6.shortconv.out_proj.weight
Q8_0
Q8_0
[1536, 1536]
blk.7
blk.7.attn_norm.weight
F32
F32
[1536]
blk.7.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.7.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.7.ffn_norm.weight
F32
F32
[1536]
blk.7.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.7.shortconv.conv.weight
F32
F32
[3, 1536]
blk.7.shortconv.in_proj.weight
Q8_0
Q8_0
[1536, 4608]
blk.7.shortconv.out_proj.weight
Q8_0
Q8_0
[1536, 1536]
blk.8
blk.8.attn_k.weight
Q8_0
Q8_0
[1536, 512]
blk.8.attn_k_norm.weight
F32
F32
[64]
blk.8.attn_norm.weight
F32
F32
[1536]
blk.8.attn_output.weight
Q8_0
Q8_0
[1536, 1536]
blk.8.attn_q.weight
Q8_0
Q8_0
[1536, 1536]
blk.8.attn_q_norm.weight
F32
F32
[64]
blk.8.attn_v.weight
Q8_0
Q8_0
[1536, 512]
blk.8.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.8.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.8.ffn_norm.weight
F32
F32
[1536]
blk.8.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.9
blk.9.attn_norm.weight
F32
F32
[1536]
blk.9.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.9.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.9.ffn_norm.weight
F32
F32
[1536]
blk.9.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.9.shortconv.conv.weight
F32
F32
[3, 1536]
blk.9.shortconv.in_proj.weight
Q8_0
Q8_0
[1536, 4608]
blk.9.shortconv.out_proj.weight
Q8_0
Q8_0
[1536, 1536]
blk.10
blk.10.attn_k.weight
Q8_0
Q8_0
[1536, 512]
blk.10.attn_k_norm.weight
F32
F32
[64]
blk.10.attn_norm.weight
F32
F32
[1536]
blk.10.attn_output.weight
Q8_0
Q8_0
[1536, 1536]
blk.10.attn_q.weight
Q8_0
Q8_0
[1536, 1536]
blk.10.attn_q_norm.weight
F32
F32
[64]
blk.10.attn_v.weight
Q8_0
Q8_0
[1536, 512]
blk.10.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.10.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.10.ffn_norm.weight
F32
F32
[1536]
blk.10.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.11
blk.11.attn_norm.weight
F32
F32
[1536]
blk.11.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.11.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.11.ffn_norm.weight
F32
F32
[1536]
blk.11.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.11.shortconv.conv.weight
F32
F32
[3, 1536]
blk.11.shortconv.in_proj.weight
Q8_0
Q8_0
[1536, 4608]
blk.11.shortconv.out_proj.weight
Q8_0
Q8_0
[1536, 1536]
blk.12
blk.12.attn_k.weight
Q8_0
Q8_0
[1536, 512]
blk.12.attn_k_norm.weight
F32
F32
[64]
blk.12.attn_norm.weight
F32
F32
[1536]
blk.12.attn_output.weight
Q8_0
Q8_0
[1536, 1536]
blk.12.attn_q.weight
Q8_0
Q8_0
[1536, 1536]
blk.12.attn_q_norm.weight
F32
F32
[64]
blk.12.attn_v.weight
Q8_0
Q8_0
[1536, 512]
blk.12.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.12.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.12.ffn_norm.weight
F32
F32
[1536]
blk.12.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.13
blk.13.attn_norm.weight
F32
F32
[1536]
blk.13.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.13.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.13.ffn_norm.weight
F32
F32
[1536]
blk.13.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.13.shortconv.conv.weight
F32
F32
[3, 1536]
blk.13.shortconv.in_proj.weight
Q8_0
Q8_0
[1536, 4608]
blk.13.shortconv.out_proj.weight
Q8_0
Q8_0
[1536, 1536]
blk.14
blk.14.attn_k.weight
Q8_0
Q8_0
[1536, 512]
blk.14.attn_k_norm.weight
F32
F32
[64]
blk.14.attn_norm.weight
F32
F32
[1536]
blk.14.attn_output.weight
Q8_0
Q8_0
[1536, 1536]
blk.14.attn_q.weight
Q8_0
Q8_0
[1536, 1536]
blk.14.attn_q_norm.weight
F32
F32
[64]
blk.14.attn_v.weight
Q8_0
Q8_0
[1536, 512]
blk.14.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.14.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.14.ffn_norm.weight
F32
F32
[1536]
blk.14.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.15
blk.15.attn_norm.weight
F32
F32
[1536]
blk.15.ffn_down.weight
Q8_0
Q8_0
[6912, 1536]
blk.15.ffn_gate.weight
Q8_0
Q8_0
[1536, 6912]
blk.15.ffn_norm.weight
F32
F32
[1536]
blk.15.ffn_up.weight
Q8_0
Q8_0
[1536, 6912]
blk.15.shortconv.conv.weight
F32
F32
[3, 1536]
blk.15.shortconv.in_proj.weight
Q8_0
Q8_0
[1536, 4608]
blk.15.shortconv.out_proj.weight
Q8_0
Q8_0
[1536, 1536]
token_embd_norm.weight
F32
F32
[1536]