Models
GitHub
Discord
Turbo
Sign in
Download
Models
Download
GitHub
Discord
Sign in
pdevine
/
discollama
:latest
14
Downloads
Updated
1 year ago
Cancel
discollama:latest
...
/
adapter
4ad866d91636 · 14MB
Metadata
general.architecture
llama
llama
general.file_type
F16
F16
llama.attention.head_count
32
32
llama.attention.head_count_kv
8
8
adapter.lora.alpha
32
32
adapter.type
lora
lora
Tensor
Name
Type
Shape
blk.16
blk.16.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.16.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.16.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.16.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.17
blk.17.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.17.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.17.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.17.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.18
blk.18.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.18.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.18.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.18.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.19
blk.19.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.19.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.19.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.19.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.20
blk.20.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.20.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.20.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.20.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.21
blk.21.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.21.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.21.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.21.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.22
blk.22.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.22.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.22.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.22.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.23
blk.23.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.23.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.23.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.23.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.24
blk.24.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.24.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.24.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.24.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.25
blk.25.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.25.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.25.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.25.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.26
blk.26.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.26.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.26.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.26.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.27
blk.27.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.27.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.27.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.27.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.28
blk.28.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.28.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.28.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.28.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.29
blk.29.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.29.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.29.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.29.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.30
blk.30.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.30.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.30.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.30.attn_v.weight.lora_b
F16
F16
[32, 1024]
blk.31
blk.31.attn_q.weight.lora_a
F16
F16
[4096, 32]
blk.31.attn_q.weight.lora_b
F16
F16
[32, 4096]
blk.31.attn_v.weight.lora_a
F16
F16
[4096, 32]
blk.31.attn_v.weight.lora_b
F16
F16
[32, 1024]