Models
GitHub
Discord
Docs
Cloud
Sign in
Download
Models
Download
GitHub
Discord
Docs
Cloud
Sign in
anindya
/
prem1b-sql-ollama-fp116
:latest
1,233
Downloads
Updated
10 months ago
Fully local Text to SQL 1B parameter model running on your laptop.
Fully local Text to SQL 1B parameter model running on your laptop.
Cancel
prem1b-sql-ollama-fp116:latest
...
/
model
e7548786ee77 · 2.7GB
Metadata
general.architecture
llama
llama
general.file_type
F16
F16
llama.attention.head_count
16
16
llama.attention.head_count_kv
16
16
llama.attention.key_length
128
128
llama.attention.layer_norm_rms_epsilon
1e-06
1e-06
llama.attention.value_length
128
128
llama.block_count
24
24
llama.context_length
16384
16384
llama.embedding_length
2048
2048
llama.feed_forward_length
5504
5504
llama.rope.dimension_count
128
128
llama.rope.freq_base
100000
100000
llama.rope.scaling.factor
4
4
llama.rope.scaling.type
linear
linear
llama.vocab_size
32256
32256
tokenizer.ggml.add_bos_token
true
true
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.add_space_prefix
false
false
tokenizer.ggml.bos_token_id
32013
32013
tokenizer.ggml.eos_token_id
32021
32021
tokenizer.ggml.merges
[Ġ Ġ, Ġ t, Ġ a, i n, h e, ...]
[Ġ Ġ, Ġ t, Ġ a, i n, h e, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
32014
32014
tokenizer.ggml.pre
deepseek-coder
deepseek-coder
tokenizer.ggml.token_type
[1, 1, 1, 1, 1, ...]
[1, 1, 1, 1, 1, ...]
tokenizer.ggml.tokens
[!, ", #, $, %, ...]
[!, ", #, $, %, ...]
Tensor
Name
Type
Shape
token_embd.weight
F16
F16
[2048, 32256]
blk.0
blk.0.attn_k.weight
F16
F16
[2048, 2048]
blk.0.attn_norm.weight
F32
F32
[2048]
blk.0.attn_output.weight
F16
F16
[2048, 2048]
blk.0.attn_q.weight
F16
F16
[2048, 2048]
blk.0.attn_v.weight
F16
F16
[2048, 2048]
blk.0.ffn_down.weight
F16
F16
[5504, 2048]
blk.0.ffn_gate.weight
F16
F16
[2048, 5504]
blk.0.ffn_norm.weight
F32
F32
[2048]
blk.0.ffn_up.weight
F16
F16
[2048, 5504]
blk.1
blk.1.attn_k.weight
F16
F16
[2048, 2048]
blk.1.attn_norm.weight
F32
F32
[2048]
blk.1.attn_output.weight
F16
F16
[2048, 2048]
blk.1.attn_q.weight
F16
F16
[2048, 2048]
blk.1.attn_v.weight
F16
F16
[2048, 2048]
blk.1.ffn_down.weight
F16
F16
[5504, 2048]
blk.1.ffn_gate.weight
F16
F16
[2048, 5504]
blk.1.ffn_norm.weight
F32
F32
[2048]
blk.1.ffn_up.weight
F16
F16
[2048, 5504]
blk.2
blk.2.attn_k.weight
F16
F16
[2048, 2048]
blk.2.attn_norm.weight
F32
F32
[2048]
blk.2.attn_output.weight
F16
F16
[2048, 2048]
blk.2.attn_q.weight
F16
F16
[2048, 2048]
blk.2.attn_v.weight
F16
F16
[2048, 2048]
blk.2.ffn_down.weight
F16
F16
[5504, 2048]
blk.2.ffn_gate.weight
F16
F16
[2048, 5504]
blk.2.ffn_norm.weight
F32
F32
[2048]
blk.2.ffn_up.weight
F16
F16
[2048, 5504]
blk.3
blk.3.attn_k.weight
F16
F16
[2048, 2048]
blk.3.attn_norm.weight
F32
F32
[2048]
blk.3.attn_output.weight
F16
F16
[2048, 2048]
blk.3.attn_q.weight
F16
F16
[2048, 2048]
blk.3.attn_v.weight
F16
F16
[2048, 2048]
blk.3.ffn_down.weight
F16
F16
[5504, 2048]
blk.3.ffn_gate.weight
F16
F16
[2048, 5504]
blk.3.ffn_norm.weight
F32
F32
[2048]
blk.3.ffn_up.weight
F16
F16
[2048, 5504]
blk.4
blk.4.attn_k.weight
F16
F16
[2048, 2048]
blk.4.attn_norm.weight
F32
F32
[2048]
blk.4.attn_output.weight
F16
F16
[2048, 2048]
blk.4.attn_q.weight
F16
F16
[2048, 2048]
blk.4.attn_v.weight
F16
F16
[2048, 2048]
blk.4.ffn_down.weight
F16
F16
[5504, 2048]
blk.4.ffn_gate.weight
F16
F16
[2048, 5504]
blk.4.ffn_norm.weight
F32
F32
[2048]
blk.4.ffn_up.weight
F16
F16
[2048, 5504]
blk.5
blk.5.attn_k.weight
F16
F16
[2048, 2048]
blk.5.attn_norm.weight
F32
F32
[2048]
blk.5.attn_output.weight
F16
F16
[2048, 2048]
blk.5.attn_q.weight
F16
F16
[2048, 2048]
blk.5.attn_v.weight
F16
F16
[2048, 2048]
blk.5.ffn_down.weight
F16
F16
[5504, 2048]
blk.5.ffn_gate.weight
F16
F16
[2048, 5504]
blk.5.ffn_norm.weight
F32
F32
[2048]
blk.5.ffn_up.weight
F16
F16
[2048, 5504]
blk.6
blk.6.attn_k.weight
F16
F16
[2048, 2048]
blk.6.attn_norm.weight
F32
F32
[2048]
blk.6.attn_output.weight
F16
F16
[2048, 2048]
blk.6.attn_q.weight
F16
F16
[2048, 2048]
blk.6.attn_v.weight
F16
F16
[2048, 2048]
blk.6.ffn_down.weight
F16
F16
[5504, 2048]
blk.6.ffn_gate.weight
F16
F16
[2048, 5504]
blk.6.ffn_norm.weight
F32
F32
[2048]
blk.6.ffn_up.weight
F16
F16
[2048, 5504]
blk.7
blk.7.attn_k.weight
F16
F16
[2048, 2048]
blk.7.attn_norm.weight
F32
F32
[2048]
blk.7.attn_output.weight
F16
F16
[2048, 2048]
blk.7.attn_q.weight
F16
F16
[2048, 2048]
blk.7.attn_v.weight
F16
F16
[2048, 2048]
blk.7.ffn_down.weight
F16
F16
[5504, 2048]
blk.7.ffn_gate.weight
F16
F16
[2048, 5504]
blk.7.ffn_norm.weight
F32
F32
[2048]
blk.7.ffn_up.weight
F16
F16
[2048, 5504]
blk.8
blk.8.attn_k.weight
F16
F16
[2048, 2048]
blk.8.attn_norm.weight
F32
F32
[2048]
blk.8.attn_output.weight
F16
F16
[2048, 2048]
blk.8.attn_q.weight
F16
F16
[2048, 2048]
blk.8.attn_v.weight
F16
F16
[2048, 2048]
blk.8.ffn_down.weight
F16
F16
[5504, 2048]
blk.8.ffn_gate.weight
F16
F16
[2048, 5504]
blk.8.ffn_norm.weight
F32
F32
[2048]
blk.8.ffn_up.weight
F16
F16
[2048, 5504]
blk.9
blk.9.attn_k.weight
F16
F16
[2048, 2048]
blk.9.attn_norm.weight
F32
F32
[2048]
blk.9.attn_output.weight
F16
F16
[2048, 2048]
blk.9.attn_q.weight
F16
F16
[2048, 2048]
blk.9.attn_v.weight
F16
F16
[2048, 2048]
blk.9.ffn_down.weight
F16
F16
[5504, 2048]
blk.9.ffn_gate.weight
F16
F16
[2048, 5504]
blk.9.ffn_norm.weight
F32
F32
[2048]
blk.9.ffn_up.weight
F16
F16
[2048, 5504]
blk.10
blk.10.attn_k.weight
F16
F16
[2048, 2048]
blk.10.attn_norm.weight
F32
F32
[2048]
blk.10.attn_output.weight
F16
F16
[2048, 2048]
blk.10.attn_q.weight
F16
F16
[2048, 2048]
blk.10.attn_v.weight
F16
F16
[2048, 2048]
blk.10.ffn_down.weight
F16
F16
[5504, 2048]
blk.10.ffn_gate.weight
F16
F16
[2048, 5504]
blk.10.ffn_norm.weight
F32
F32
[2048]
blk.10.ffn_up.weight
F16
F16
[2048, 5504]
blk.11
blk.11.attn_k.weight
F16
F16
[2048, 2048]
blk.11.attn_norm.weight
F32
F32
[2048]
blk.11.attn_output.weight
F16
F16
[2048, 2048]
blk.11.attn_q.weight
F16
F16
[2048, 2048]
blk.11.attn_v.weight
F16
F16
[2048, 2048]
blk.11.ffn_down.weight
F16
F16
[5504, 2048]
blk.11.ffn_gate.weight
F16
F16
[2048, 5504]
blk.11.ffn_norm.weight
F32
F32
[2048]
blk.11.ffn_up.weight
F16
F16
[2048, 5504]
blk.12
blk.12.attn_k.weight
F16
F16
[2048, 2048]
blk.12.attn_norm.weight
F32
F32
[2048]
blk.12.attn_output.weight
F16
F16
[2048, 2048]
blk.12.attn_q.weight
F16
F16
[2048, 2048]
blk.12.attn_v.weight
F16
F16
[2048, 2048]
blk.12.ffn_down.weight
F16
F16
[5504, 2048]
blk.12.ffn_gate.weight
F16
F16
[2048, 5504]
blk.12.ffn_norm.weight
F32
F32
[2048]
blk.12.ffn_up.weight
F16
F16
[2048, 5504]
blk.13
blk.13.attn_k.weight
F16
F16
[2048, 2048]
blk.13.attn_norm.weight
F32
F32
[2048]
blk.13.attn_output.weight
F16
F16
[2048, 2048]
blk.13.attn_q.weight
F16
F16
[2048, 2048]
blk.13.attn_v.weight
F16
F16
[2048, 2048]
blk.13.ffn_down.weight
F16
F16
[5504, 2048]
blk.13.ffn_gate.weight
F16
F16
[2048, 5504]
blk.13.ffn_norm.weight
F32
F32
[2048]
blk.13.ffn_up.weight
F16
F16
[2048, 5504]
blk.14
blk.14.attn_k.weight
F16
F16
[2048, 2048]
blk.14.attn_norm.weight
F32
F32
[2048]
blk.14.attn_output.weight
F16
F16
[2048, 2048]
blk.14.attn_q.weight
F16
F16
[2048, 2048]
blk.14.attn_v.weight
F16
F16
[2048, 2048]
blk.14.ffn_down.weight
F16
F16
[5504, 2048]
blk.14.ffn_gate.weight
F16
F16
[2048, 5504]
blk.14.ffn_norm.weight
F32
F32
[2048]
blk.14.ffn_up.weight
F16
F16
[2048, 5504]
blk.15
blk.15.attn_k.weight
F16
F16
[2048, 2048]
blk.15.attn_norm.weight
F32
F32
[2048]
blk.15.attn_output.weight
F16
F16
[2048, 2048]
blk.15.attn_q.weight
F16
F16
[2048, 2048]
blk.15.attn_v.weight
F16
F16
[2048, 2048]
blk.15.ffn_down.weight
F16
F16
[5504, 2048]
blk.15.ffn_gate.weight
F16
F16
[2048, 5504]
blk.15.ffn_norm.weight
F32
F32
[2048]
blk.15.ffn_up.weight
F16
F16
[2048, 5504]
blk.16
blk.16.attn_k.weight
F16
F16
[2048, 2048]
blk.16.attn_norm.weight
F32
F32
[2048]
blk.16.attn_output.weight
F16
F16
[2048, 2048]
blk.16.attn_q.weight
F16
F16
[2048, 2048]
blk.16.attn_v.weight
F16
F16
[2048, 2048]
blk.16.ffn_down.weight
F16
F16
[5504, 2048]
blk.16.ffn_gate.weight
F16
F16
[2048, 5504]
blk.16.ffn_norm.weight
F32
F32
[2048]
blk.16.ffn_up.weight
F16
F16
[2048, 5504]
blk.17
blk.17.attn_k.weight
F16
F16
[2048, 2048]
blk.17.attn_norm.weight
F32
F32
[2048]
blk.17.attn_output.weight
F16
F16
[2048, 2048]
blk.17.attn_q.weight
F16
F16
[2048, 2048]
blk.17.attn_v.weight
F16
F16
[2048, 2048]
blk.17.ffn_down.weight
F16
F16
[5504, 2048]
blk.17.ffn_gate.weight
F16
F16
[2048, 5504]
blk.17.ffn_norm.weight
F32
F32
[2048]
blk.17.ffn_up.weight
F16
F16
[2048, 5504]
blk.18
blk.18.attn_k.weight
F16
F16
[2048, 2048]
blk.18.attn_norm.weight
F32
F32
[2048]
blk.18.attn_output.weight
F16
F16
[2048, 2048]
blk.18.attn_q.weight
F16
F16
[2048, 2048]
blk.18.attn_v.weight
F16
F16
[2048, 2048]
blk.18.ffn_down.weight
F16
F16
[5504, 2048]
blk.18.ffn_gate.weight
F16
F16
[2048, 5504]
blk.18.ffn_norm.weight
F32
F32
[2048]
blk.18.ffn_up.weight
F16
F16
[2048, 5504]
blk.19
blk.19.attn_k.weight
F16
F16
[2048, 2048]
blk.19.attn_norm.weight
F32
F32
[2048]
blk.19.attn_output.weight
F16
F16
[2048, 2048]
blk.19.attn_q.weight
F16
F16
[2048, 2048]
blk.19.attn_v.weight
F16
F16
[2048, 2048]
blk.19.ffn_down.weight
F16
F16
[5504, 2048]
blk.19.ffn_gate.weight
F16
F16
[2048, 5504]
blk.19.ffn_norm.weight
F32
F32
[2048]
blk.19.ffn_up.weight
F16
F16
[2048, 5504]
blk.20
blk.20.attn_k.weight
F16
F16
[2048, 2048]
blk.20.attn_norm.weight
F32
F32
[2048]
blk.20.attn_output.weight
F16
F16
[2048, 2048]
blk.20.attn_q.weight
F16
F16
[2048, 2048]
blk.20.attn_v.weight
F16
F16
[2048, 2048]
blk.20.ffn_down.weight
F16
F16
[5504, 2048]
blk.20.ffn_gate.weight
F16
F16
[2048, 5504]
blk.20.ffn_norm.weight
F32
F32
[2048]
blk.20.ffn_up.weight
F16
F16
[2048, 5504]
blk.21
blk.21.attn_k.weight
F16
F16
[2048, 2048]
blk.21.attn_norm.weight
F32
F32
[2048]
blk.21.attn_output.weight
F16
F16
[2048, 2048]
blk.21.attn_q.weight
F16
F16
[2048, 2048]
blk.21.attn_v.weight
F16
F16
[2048, 2048]
blk.21.ffn_down.weight
F16
F16
[5504, 2048]
blk.21.ffn_gate.weight
F16
F16
[2048, 5504]
blk.21.ffn_norm.weight
F32
F32
[2048]
blk.21.ffn_up.weight
F16
F16
[2048, 5504]
blk.22
blk.22.attn_k.weight
F16
F16
[2048, 2048]
blk.22.attn_norm.weight
F32
F32
[2048]
blk.22.attn_output.weight
F16
F16
[2048, 2048]
blk.22.attn_q.weight
F16
F16
[2048, 2048]
blk.22.attn_v.weight
F16
F16
[2048, 2048]
blk.22.ffn_down.weight
F16
F16
[5504, 2048]
blk.22.ffn_gate.weight
F16
F16
[2048, 5504]
blk.22.ffn_norm.weight
F32
F32
[2048]
blk.22.ffn_up.weight
F16
F16
[2048, 5504]
blk.23
blk.23.attn_k.weight
F16
F16
[2048, 2048]
blk.23.attn_norm.weight
F32
F32
[2048]
blk.23.attn_output.weight
F16
F16
[2048, 2048]
blk.23.attn_q.weight
F16
F16
[2048, 2048]
blk.23.attn_v.weight
F16
F16
[2048, 2048]
blk.23.ffn_down.weight
F16
F16
[5504, 2048]
blk.23.ffn_gate.weight
F16
F16
[2048, 5504]
blk.23.ffn_norm.weight
F32
F32
[2048]
blk.23.ffn_up.weight
F16
F16
[2048, 5504]
output.weight
F16
F16
[2048, 32256]
output_norm.weight
F32
F32
[2048]