CYLI310/
CodeGPT:latest

113 1 month ago

Code assistant trained by GPT-OSS 20B

tools
28e2c98c5d20 · 16MB
    Metadata
  • general.architecture
    llama
  • general.file_type
    F16
  • llama.attention.head_count
    32
  • llama.attention.head_count_kv
    8
  • adapter.lora.alpha
    16
  • adapter.type
    lora
  • Tensor
    blk.0
  • blk.0.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.0.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.0.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.0.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.0.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.0.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.0.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.0.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.1
  • blk.1.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.1.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.1.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.1.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.1.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.1.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.1.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.1.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.2
  • blk.2.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.2.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.2.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.2.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.2.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.2.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.2.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.2.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.3
  • blk.3.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.3.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.3.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.3.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.3.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.3.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.3.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.3.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.4
  • blk.4.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.4.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.4.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.4.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.4.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.4.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.4.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.4.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.5
  • blk.5.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.5.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.5.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.5.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.5.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.5.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.5.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.5.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.6
  • blk.6.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.6.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.6.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.6.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.6.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.6.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.6.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.6.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.7
  • blk.7.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.7.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.7.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.7.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.7.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.7.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.7.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.7.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.8
  • blk.8.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.8.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.8.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.8.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.8.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.8.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.8.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.8.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.9
  • blk.9.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.9.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.9.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.9.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.9.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.9.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.9.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.9.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.10
  • blk.10.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.10.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.10.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.10.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.10.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.10.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.10.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.10.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.11
  • blk.11.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.11.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.11.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.11.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.11.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.11.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.11.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.11.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.12
  • blk.12.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.12.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.12.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.12.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.12.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.12.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.12.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.12.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.13
  • blk.13.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.13.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.13.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.13.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.13.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.13.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.13.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.13.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.14
  • blk.14.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.14.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.14.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.14.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.14.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.14.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.14.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.14.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.15
  • blk.15.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.15.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.15.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.15.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.15.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.15.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.15.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.15.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.16
  • blk.16.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.16.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.16.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.16.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.16.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.16.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.16.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.16.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.17
  • blk.17.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.17.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.17.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.17.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.17.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.17.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.17.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.17.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.18
  • blk.18.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.18.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.18.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.18.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.18.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.18.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.18.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.18.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.19
  • blk.19.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.19.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.19.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.19.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.19.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.19.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.19.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.19.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.20
  • blk.20.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.20.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.20.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.20.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.20.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.20.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.20.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.20.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.21
  • blk.21.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.21.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.21.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.21.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.21.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.21.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.21.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.21.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.22
  • blk.22.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.22.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.22.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.22.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.22.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.22.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.22.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.22.attn_v.weight.lora_b
    F16
    [16, 512]
  • blk.23
  • blk.23.attn_k.weight.lora_a
    F16
    [2880, 16]
  • blk.23.attn_k.weight.lora_b
    F16
    [16, 512]
  • blk.23.attn_output.weight.lora_a
    F16
    [4096, 16]
  • blk.23.attn_output.weight.lora_b
    F16
    [16, 2880]
  • blk.23.attn_q.weight.lora_a
    F16
    [2880, 16]
  • blk.23.attn_q.weight.lora_b
    F16
    [16, 4096]
  • blk.23.attn_v.weight.lora_a
    F16
    [2880, 16]
  • blk.23.attn_v.weight.lora_b
    F16
    [16, 512]