Models
Docs
Pricing
Sign in
Download
Models
Download
Docs
Pricing
Sign in
fauxpaslife
/
squishy
:150m
36
Downloads
Updated
1 week ago
A tiny 150M completion model trained from scratch for short story generation and small-model pipeline validation. It is best for generating small story prompts. (Think young child telling small stories. And cute at times.)
A tiny 150M completion model trained from scratch for short story generation and small-model pipeline validation. It is best for generating small story prompts. (Think young child telling small stories. And cute at times.)
Cancel
150m
squishy:150m
...
/
model
2524261bad58 · 269MB
Metadata
general.architecture
llama
llama
general.file_type
F16
F16
llama.attention.head_count
12
12
llama.attention.head_count_kv
12
12
llama.attention.key_length
64
64
llama.attention.layer_norm_rms_epsilon
1e-06
1e-06
llama.attention.value_length
64
64
llama.block_count
12
12
llama.context_length
1024
1024
llama.embedding_length
768
768
llama.feed_forward_length
2048
2048
llama.rope.dimension_count
64
64
llama.rope.freq_base
10000
10000
llama.vocab_size
32000
32000
tokenizer.ggml.add_bos_token
true
true
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.add_space_prefix
true
true
tokenizer.ggml.bos_token_id
1
1
tokenizer.ggml.eos_token_id
2
2
tokenizer.ggml.model
llama
llama
tokenizer.ggml.padding_token_id
0
0
tokenizer.ggml.pre
default
default
tokenizer.ggml.scores
[-1000, -1000, -1000, -1000, -0, ...]
[-1000, -1000, -1000, -1000, -0, ...]
tokenizer.ggml.token_type
[3, 3, 3, 3, 1, ...]
[3, 3, 3, 3, 1, ...]
tokenizer.ggml.tokens
[<pad>, <s>, </s>, <unk>, he, ...]
[<pad>, <s>, </s>, <unk>, he, ...]
tokenizer.ggml.unknown_token_id
3
3
Tensor
Name
Type
Shape
token_embd.weight
F16
F16
[768, 32000]
blk.0
blk.0.attn_k.weight
F16
F16
[768, 768]
blk.0.attn_norm.weight
F32
F32
[768]
blk.0.attn_output.weight
F16
F16
[768, 768]
blk.0.attn_q.weight
F16
F16
[768, 768]
blk.0.attn_v.weight
F16
F16
[768, 768]
blk.0.ffn_down.weight
F16
F16
[2048, 768]
blk.0.ffn_gate.weight
F16
F16
[768, 2048]
blk.0.ffn_norm.weight
F32
F32
[768]
blk.0.ffn_up.weight
F16
F16
[768, 2048]
blk.1
blk.1.attn_k.weight
F16
F16
[768, 768]
blk.1.attn_norm.weight
F32
F32
[768]
blk.1.attn_output.weight
F16
F16
[768, 768]
blk.1.attn_q.weight
F16
F16
[768, 768]
blk.1.attn_v.weight
F16
F16
[768, 768]
blk.1.ffn_down.weight
F16
F16
[2048, 768]
blk.1.ffn_gate.weight
F16
F16
[768, 2048]
blk.1.ffn_norm.weight
F32
F32
[768]
blk.1.ffn_up.weight
F16
F16
[768, 2048]
blk.2
blk.2.attn_k.weight
F16
F16
[768, 768]
blk.2.attn_norm.weight
F32
F32
[768]
blk.2.attn_output.weight
F16
F16
[768, 768]
blk.2.attn_q.weight
F16
F16
[768, 768]
blk.2.attn_v.weight
F16
F16
[768, 768]
blk.2.ffn_down.weight
F16
F16
[2048, 768]
blk.2.ffn_gate.weight
F16
F16
[768, 2048]
blk.2.ffn_norm.weight
F32
F32
[768]
blk.2.ffn_up.weight
F16
F16
[768, 2048]
blk.3
blk.3.attn_k.weight
F16
F16
[768, 768]
blk.3.attn_norm.weight
F32
F32
[768]
blk.3.attn_output.weight
F16
F16
[768, 768]
blk.3.attn_q.weight
F16
F16
[768, 768]
blk.3.attn_v.weight
F16
F16
[768, 768]
blk.3.ffn_down.weight
F16
F16
[2048, 768]
blk.3.ffn_gate.weight
F16
F16
[768, 2048]
blk.3.ffn_norm.weight
F32
F32
[768]
blk.3.ffn_up.weight
F16
F16
[768, 2048]
blk.4
blk.4.attn_k.weight
F16
F16
[768, 768]
blk.4.attn_norm.weight
F32
F32
[768]
blk.4.attn_output.weight
F16
F16
[768, 768]
blk.4.attn_q.weight
F16
F16
[768, 768]
blk.4.attn_v.weight
F16
F16
[768, 768]
blk.4.ffn_down.weight
F16
F16
[2048, 768]
blk.4.ffn_gate.weight
F16
F16
[768, 2048]
blk.4.ffn_norm.weight
F32
F32
[768]
blk.4.ffn_up.weight
F16
F16
[768, 2048]
blk.5
blk.5.attn_k.weight
F16
F16
[768, 768]
blk.5.attn_norm.weight
F32
F32
[768]
blk.5.attn_output.weight
F16
F16
[768, 768]
blk.5.attn_q.weight
F16
F16
[768, 768]
blk.5.attn_v.weight
F16
F16
[768, 768]
blk.5.ffn_down.weight
F16
F16
[2048, 768]
blk.5.ffn_gate.weight
F16
F16
[768, 2048]
blk.5.ffn_norm.weight
F32
F32
[768]
blk.5.ffn_up.weight
F16
F16
[768, 2048]
blk.6
blk.6.attn_k.weight
F16
F16
[768, 768]
blk.6.attn_norm.weight
F32
F32
[768]
blk.6.attn_output.weight
F16
F16
[768, 768]
blk.6.attn_q.weight
F16
F16
[768, 768]
blk.6.attn_v.weight
F16
F16
[768, 768]
blk.6.ffn_down.weight
F16
F16
[2048, 768]
blk.6.ffn_gate.weight
F16
F16
[768, 2048]
blk.6.ffn_norm.weight
F32
F32
[768]
blk.6.ffn_up.weight
F16
F16
[768, 2048]
blk.7
blk.7.attn_k.weight
F16
F16
[768, 768]
blk.7.attn_norm.weight
F32
F32
[768]
blk.7.attn_output.weight
F16
F16
[768, 768]
blk.7.attn_q.weight
F16
F16
[768, 768]
blk.7.attn_v.weight
F16
F16
[768, 768]
blk.7.ffn_down.weight
F16
F16
[2048, 768]
blk.7.ffn_gate.weight
F16
F16
[768, 2048]
blk.7.ffn_norm.weight
F32
F32
[768]
blk.7.ffn_up.weight
F16
F16
[768, 2048]
blk.8
blk.8.attn_k.weight
F16
F16
[768, 768]
blk.8.attn_norm.weight
F32
F32
[768]
blk.8.attn_output.weight
F16
F16
[768, 768]
blk.8.attn_q.weight
F16
F16
[768, 768]
blk.8.attn_v.weight
F16
F16
[768, 768]
blk.8.ffn_down.weight
F16
F16
[2048, 768]
blk.8.ffn_gate.weight
F16
F16
[768, 2048]
blk.8.ffn_norm.weight
F32
F32
[768]
blk.8.ffn_up.weight
F16
F16
[768, 2048]
blk.9
blk.9.attn_k.weight
F16
F16
[768, 768]
blk.9.attn_norm.weight
F32
F32
[768]
blk.9.attn_output.weight
F16
F16
[768, 768]
blk.9.attn_q.weight
F16
F16
[768, 768]
blk.9.attn_v.weight
F16
F16
[768, 768]
blk.9.ffn_down.weight
F16
F16
[2048, 768]
blk.9.ffn_gate.weight
F16
F16
[768, 2048]
blk.9.ffn_norm.weight
F32
F32
[768]
blk.9.ffn_up.weight
F16
F16
[768, 2048]
blk.10
blk.10.attn_k.weight
F16
F16
[768, 768]
blk.10.attn_norm.weight
F32
F32
[768]
blk.10.attn_output.weight
F16
F16
[768, 768]
blk.10.attn_q.weight
F16
F16
[768, 768]
blk.10.attn_v.weight
F16
F16
[768, 768]
blk.10.ffn_down.weight
F16
F16
[2048, 768]
blk.10.ffn_gate.weight
F16
F16
[768, 2048]
blk.10.ffn_norm.weight
F32
F32
[768]
blk.10.ffn_up.weight
F16
F16
[768, 2048]
blk.11
blk.11.attn_k.weight
F16
F16
[768, 768]
blk.11.attn_norm.weight
F32
F32
[768]
blk.11.attn_output.weight
F16
F16
[768, 768]
blk.11.attn_q.weight
F16
F16
[768, 768]
blk.11.attn_v.weight
F16
F16
[768, 768]
blk.11.ffn_down.weight
F16
F16
[2048, 768]
blk.11.ffn_gate.weight
F16
F16
[768, 2048]
blk.11.ffn_norm.weight
F32
F32
[768]
blk.11.ffn_up.weight
F16
F16
[768, 2048]
output.weight
F16
F16
[768, 32000]
output_norm.weight
F32
F32
[768]