Models
GitHub
Discord
Docs
Cloud
Sign in
Download
Models
Download
GitHub
Discord
Docs
Cloud
Sign in
AaryanDahal
/
aaryans_personal_llm
:latest
4
Downloads
Updated
5 months ago
Cancel
aaryans_personal_llm:latest
...
/
adapter
a2ab0ae9c9a5 · 361MB
Metadata
general.architecture
llama
llama
general.file_type
F16
F16
llama.attention.head_count
32
32
llama.attention.head_count_kv
8
8
adapter.lora.alpha
512
512
adapter.type
lora
lora
Tensor
Name
Type
Shape
blk.0
blk.0.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.0.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.0.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.0.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.0.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.0.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.0.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.0.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.0.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.0.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.0.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.0.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.0.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.0.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.1
blk.1.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.1.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.1.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.1.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.1.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.1.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.1.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.1.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.1.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.1.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.1.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.1.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.1.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.1.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.2
blk.2.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.2.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.2.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.2.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.2.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.2.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.2.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.2.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.2.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.2.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.2.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.2.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.2.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.2.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.3
blk.3.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.3.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.3.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.3.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.3.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.3.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.3.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.3.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.3.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.3.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.3.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.3.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.3.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.3.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.4
blk.4.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.4.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.4.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.4.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.4.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.4.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.4.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.4.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.4.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.4.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.4.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.4.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.4.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.4.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.5
blk.5.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.5.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.5.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.5.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.5.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.5.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.5.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.5.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.5.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.5.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.5.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.5.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.5.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.5.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.6
blk.6.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.6.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.6.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.6.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.6.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.6.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.6.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.6.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.6.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.6.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.6.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.6.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.6.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.6.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.7
blk.7.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.7.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.7.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.7.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.7.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.7.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.7.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.7.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.7.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.7.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.7.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.7.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.7.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.7.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.8
blk.8.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.8.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.8.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.8.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.8.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.8.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.8.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.8.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.8.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.8.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.8.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.8.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.8.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.8.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.9
blk.9.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.9.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.9.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.9.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.9.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.9.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.9.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.9.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.9.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.9.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.9.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.9.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.9.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.9.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.10
blk.10.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.10.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.10.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.10.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.10.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.10.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.10.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.10.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.10.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.10.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.10.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.10.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.10.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.10.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.11
blk.11.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.11.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.11.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.11.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.11.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.11.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.11.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.11.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.11.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.11.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.11.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.11.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.11.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.11.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.12
blk.12.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.12.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.12.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.12.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.12.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.12.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.12.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.12.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.12.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.12.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.12.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.12.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.12.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.12.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.13
blk.13.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.13.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.13.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.13.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.13.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.13.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.13.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.13.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.13.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.13.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.13.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.13.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.13.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.13.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.14
blk.14.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.14.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.14.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.14.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.14.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.14.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.14.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.14.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.14.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.14.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.14.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.14.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.14.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.14.ffn_up.weight.lora_b
F16
F16
[256, 8192]
blk.15
blk.15.attn_k.weight.lora_a
F16
F16
[2048, 256]
blk.15.attn_k.weight.lora_b
F16
F16
[256, 512]
blk.15.attn_output.weight.lora_a
F16
F16
[2048, 256]
blk.15.attn_output.weight.lora_b
F16
F16
[256, 2048]
blk.15.attn_q.weight.lora_a
F16
F16
[2048, 256]
blk.15.attn_q.weight.lora_b
F16
F16
[256, 2048]
blk.15.attn_v.weight.lora_a
F16
F16
[2048, 256]
blk.15.attn_v.weight.lora_b
F16
F16
[256, 512]
blk.15.ffn_down.weight.lora_a
F16
F16
[8192, 256]
blk.15.ffn_down.weight.lora_b
F16
F16
[256, 2048]
blk.15.ffn_gate.weight.lora_a
F16
F16
[2048, 256]
blk.15.ffn_gate.weight.lora_b
F16
F16
[256, 8192]
blk.15.ffn_up.weight.lora_a
F16
F16
[2048, 256]
blk.15.ffn_up.weight.lora_b
F16
F16
[256, 8192]