Models
GitHub
Discord
Turbo
Sign in
Download
Models
Download
GitHub
Discord
Sign in
theqtcompany
/
codellama-7b-qml
:latest
277
Downloads
Updated
1 month ago
CodeLlama-7B-QML is a large language model customized by the Qt Company for Fill-In-The-Middle code completion tasks in the QML programming language, especially for Qt Quick Controls compliant with Qt 6 releases.
CodeLlama-7B-QML is a large language model customized by the Qt Company for Fill-In-The-Middle code completion tasks in the QML programming language, especially for Qt Quick Controls compliant with Qt 6 releases.
Cancel
codellama-7b-qml:latest
...
/
adapter
9298cedefd9a · 649MB
Metadata
general.architecture
llama
llama
adapter.lora.alpha
64
64
adapter.type
lora
lora
Tensor
Name
Type
Shape
blk.0
blk.0.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.0.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.0.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.0.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.0.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.0.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.0.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.0.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.0.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.0.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.0.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.0.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.0.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.0.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.1
blk.1.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.1.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.1.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.1.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.1.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.1.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.1.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.1.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.1.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.1.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.1.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.1.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.1.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.1.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.2
blk.2.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.2.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.2.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.2.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.2.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.2.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.2.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.2.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.2.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.2.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.2.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.2.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.2.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.2.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.3
blk.3.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.3.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.3.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.3.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.3.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.3.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.3.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.3.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.3.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.3.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.3.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.3.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.3.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.3.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.4
blk.4.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.4.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.4.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.4.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.4.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.4.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.4.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.4.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.4.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.4.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.4.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.4.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.4.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.4.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.5
blk.5.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.5.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.5.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.5.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.5.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.5.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.5.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.5.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.5.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.5.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.5.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.5.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.5.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.5.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.6
blk.6.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.6.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.6.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.6.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.6.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.6.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.6.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.6.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.6.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.6.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.6.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.6.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.6.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.6.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.7
blk.7.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.7.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.7.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.7.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.7.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.7.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.7.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.7.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.7.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.7.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.7.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.7.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.7.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.7.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.8
blk.8.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.8.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.8.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.8.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.8.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.8.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.8.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.8.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.8.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.8.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.8.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.8.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.8.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.8.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.9
blk.9.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.9.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.9.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.9.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.9.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.9.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.9.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.9.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.9.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.9.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.9.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.9.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.9.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.9.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.10
blk.10.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.10.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.10.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.10.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.10.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.10.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.10.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.10.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.10.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.10.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.10.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.10.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.10.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.10.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.11
blk.11.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.11.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.11.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.11.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.11.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.11.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.11.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.11.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.11.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.11.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.11.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.11.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.11.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.11.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.12
blk.12.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.12.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.12.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.12.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.12.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.12.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.12.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.12.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.12.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.12.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.12.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.12.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.12.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.12.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.13
blk.13.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.13.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.13.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.13.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.13.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.13.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.13.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.13.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.13.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.13.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.13.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.13.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.13.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.13.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.14
blk.14.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.14.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.14.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.14.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.14.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.14.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.14.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.14.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.14.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.14.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.14.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.14.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.14.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.14.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.15
blk.15.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.15.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.15.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.15.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.15.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.15.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.15.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.15.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.15.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.15.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.15.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.15.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.15.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.15.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.16
blk.16.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.16.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.16.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.16.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.16.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.16.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.16.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.16.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.16.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.16.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.16.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.16.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.16.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.16.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.17
blk.17.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.17.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.17.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.17.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.17.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.17.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.17.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.17.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.17.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.17.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.17.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.17.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.17.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.17.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.18
blk.18.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.18.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.18.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.18.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.18.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.18.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.18.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.18.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.18.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.18.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.18.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.18.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.18.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.18.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.19
blk.19.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.19.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.19.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.19.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.19.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.19.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.19.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.19.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.19.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.19.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.19.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.19.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.19.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.19.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.20
blk.20.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.20.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.20.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.20.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.20.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.20.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.20.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.20.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.20.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.20.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.20.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.20.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.20.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.20.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.21
blk.21.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.21.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.21.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.21.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.21.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.21.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.21.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.21.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.21.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.21.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.21.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.21.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.21.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.21.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.22
blk.22.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.22.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.22.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.22.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.22.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.22.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.22.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.22.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.22.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.22.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.22.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.22.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.22.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.22.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.23
blk.23.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.23.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.23.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.23.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.23.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.23.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.23.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.23.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.23.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.23.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.23.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.23.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.23.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.23.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.24
blk.24.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.24.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.24.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.24.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.24.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.24.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.24.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.24.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.24.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.24.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.24.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.24.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.24.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.24.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.25
blk.25.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.25.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.25.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.25.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.25.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.25.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.25.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.25.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.25.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.25.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.25.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.25.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.25.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.25.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.26
blk.26.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.26.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.26.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.26.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.26.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.26.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.26.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.26.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.26.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.26.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.26.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.26.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.26.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.26.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.27
blk.27.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.27.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.27.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.27.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.27.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.27.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.27.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.27.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.27.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.27.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.27.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.27.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.27.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.27.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.28
blk.28.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.28.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.28.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.28.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.28.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.28.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.28.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.28.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.28.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.28.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.28.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.28.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.28.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.28.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.29
blk.29.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.29.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.29.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.29.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.29.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.29.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.29.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.29.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.29.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.29.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.29.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.29.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.29.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.29.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.30
blk.30.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.30.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.30.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.30.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.30.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.30.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.30.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.30.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.30.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.30.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.30.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.30.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.30.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.30.ffn_up.weight.lora_b
F32
F32
[64, 11008]
blk.31
blk.31.attn_k.weight.lora_a
F32
F32
[4096, 64]
blk.31.attn_k.weight.lora_b
F32
F32
[64, 4096]
blk.31.attn_output.weight.lora_a
F32
F32
[4096, 64]
blk.31.attn_output.weight.lora_b
F32
F32
[64, 4096]
blk.31.attn_q.weight.lora_a
F32
F32
[4096, 64]
blk.31.attn_q.weight.lora_b
F32
F32
[64, 4096]
blk.31.attn_v.weight.lora_a
F32
F32
[4096, 64]
blk.31.attn_v.weight.lora_b
F32
F32
[64, 4096]
blk.31.ffn_down.weight.lora_a
F32
F32
[11008, 64]
blk.31.ffn_down.weight.lora_b
F32
F32
[64, 4096]
blk.31.ffn_gate.weight.lora_a
F32
F32
[4096, 64]
blk.31.ffn_gate.weight.lora_b
F32
F32
[64, 11008]
blk.31.ffn_up.weight.lora_a
F32
F32
[4096, 64]
blk.31.ffn_up.weight.lora_b
F32
F32
[64, 11008]
output.weight.lora_a
F32
F32
[4096, 64]
output.weight.lora_b
F32
F32
[64, 32016]