Models
GitHub
Discord
Docs
Cloud
Sign in
Download
Models
Download
GitHub
Discord
Docs
Cloud
Sign in
huihui_ai
/
aya-expanse-abliterated
:32b-fp16
1,766
Downloads
Updated
9 months ago
Cohere For AI's language models trained to perform well across 23 different languages.
Cohere For AI's language models trained to perform well across 23 different languages.
Cancel
tools
8b
32b
aya-expanse-abliterated:32b-fp16
...
/
model
4fbf20941e68 · 65GB
Metadata
general.architecture
command-r
command-r
general.file_type
F16
F16
command-r.attention.head_count
64
64
command-r.attention.head_count_kv
8
8
command-r.attention.layer_norm_epsilon
1e-05
1e-05
command-r.block_count
40
40
command-r.context_length
8192
8192
command-r.embedding_length
8192
8192
command-r.feed_forward_length
24576
24576
command-r.logit_scale
0.0625
0.0625
command-r.rope.freq_base
4e+06
4e+06
command-r.rope.scaling.type
none
none
tokenizer.ggml.add_bos_token
true
true
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.bos_token_id
5
5
tokenizer.ggml.eos_token_id
255001
255001
tokenizer.ggml.merges
[Ġ Ġ, Ġ t, e r, i n, Ġ a, ...]
[Ġ Ġ, Ġ t, e r, i n, Ġ a, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
0
0
tokenizer.ggml.pre
command-r
command-r
tokenizer.ggml.token_type
[3, 3, 3, 3, 3, ...]
[3, 3, 3, 3, 3, ...]
tokenizer.ggml.tokens
[<PAD>, <UNK>, <CLS>, <SEP>, <MASK_TOKEN>, ...]
[<PAD>, <UNK>, <CLS>, <SEP>, <MASK_TOKEN>, ...]
Tensor
Name
Type
Shape
token_embd.weight
F16
F16
[8192, 256000]
blk.0
blk.0.attn_k.weight
F16
F16
[8192, 1024]
blk.0.attn_norm.weight
F32
F32
[8192]
blk.0.attn_output.weight
F16
F16
[8192, 8192]
blk.0.attn_q.weight
F16
F16
[8192, 8192]
blk.0.attn_v.weight
F16
F16
[8192, 1024]
blk.0.ffn_down.weight
F16
F16
[24576, 8192]
blk.0.ffn_gate.weight
F16
F16
[8192, 24576]
blk.0.ffn_up.weight
F16
F16
[8192, 24576]
blk.1
blk.1.attn_k.weight
F16
F16
[8192, 1024]
blk.1.attn_norm.weight
F32
F32
[8192]
blk.1.attn_output.weight
F16
F16
[8192, 8192]
blk.1.attn_q.weight
F16
F16
[8192, 8192]
blk.1.attn_v.weight
F16
F16
[8192, 1024]
blk.1.ffn_down.weight
F16
F16
[24576, 8192]
blk.1.ffn_gate.weight
F16
F16
[8192, 24576]
blk.1.ffn_up.weight
F16
F16
[8192, 24576]
blk.2
blk.2.attn_k.weight
F16
F16
[8192, 1024]
blk.2.attn_norm.weight
F32
F32
[8192]
blk.2.attn_output.weight
F16
F16
[8192, 8192]
blk.2.attn_q.weight
F16
F16
[8192, 8192]
blk.2.attn_v.weight
F16
F16
[8192, 1024]
blk.2.ffn_down.weight
F16
F16
[24576, 8192]
blk.2.ffn_gate.weight
F16
F16
[8192, 24576]
blk.2.ffn_up.weight
F16
F16
[8192, 24576]
blk.3
blk.3.attn_k.weight
F16
F16
[8192, 1024]
blk.3.attn_norm.weight
F32
F32
[8192]
blk.3.attn_output.weight
F16
F16
[8192, 8192]
blk.3.attn_q.weight
F16
F16
[8192, 8192]
blk.3.attn_v.weight
F16
F16
[8192, 1024]
blk.3.ffn_down.weight
F16
F16
[24576, 8192]
blk.3.ffn_gate.weight
F16
F16
[8192, 24576]
blk.3.ffn_up.weight
F16
F16
[8192, 24576]
blk.4
blk.4.attn_k.weight
F16
F16
[8192, 1024]
blk.4.attn_norm.weight
F32
F32
[8192]
blk.4.attn_output.weight
F16
F16
[8192, 8192]
blk.4.attn_q.weight
F16
F16
[8192, 8192]
blk.4.attn_v.weight
F16
F16
[8192, 1024]
blk.4.ffn_down.weight
F16
F16
[24576, 8192]
blk.4.ffn_gate.weight
F16
F16
[8192, 24576]
blk.4.ffn_up.weight
F16
F16
[8192, 24576]
blk.5
blk.5.attn_k.weight
F16
F16
[8192, 1024]
blk.5.attn_norm.weight
F32
F32
[8192]
blk.5.attn_output.weight
F16
F16
[8192, 8192]
blk.5.attn_q.weight
F16
F16
[8192, 8192]
blk.5.attn_v.weight
F16
F16
[8192, 1024]
blk.5.ffn_down.weight
F16
F16
[24576, 8192]
blk.5.ffn_gate.weight
F16
F16
[8192, 24576]
blk.5.ffn_up.weight
F16
F16
[8192, 24576]
blk.6
blk.6.attn_k.weight
F16
F16
[8192, 1024]
blk.6.attn_norm.weight
F32
F32
[8192]
blk.6.attn_output.weight
F16
F16
[8192, 8192]
blk.6.attn_q.weight
F16
F16
[8192, 8192]
blk.6.attn_v.weight
F16
F16
[8192, 1024]
blk.6.ffn_down.weight
F16
F16
[24576, 8192]
blk.6.ffn_gate.weight
F16
F16
[8192, 24576]
blk.6.ffn_up.weight
F16
F16
[8192, 24576]
blk.7
blk.7.attn_k.weight
F16
F16
[8192, 1024]
blk.7.attn_norm.weight
F32
F32
[8192]
blk.7.attn_output.weight
F16
F16
[8192, 8192]
blk.7.attn_q.weight
F16
F16
[8192, 8192]
blk.7.attn_v.weight
F16
F16
[8192, 1024]
blk.7.ffn_down.weight
F16
F16
[24576, 8192]
blk.7.ffn_gate.weight
F16
F16
[8192, 24576]
blk.7.ffn_up.weight
F16
F16
[8192, 24576]
blk.8
blk.8.attn_k.weight
F16
F16
[8192, 1024]
blk.8.attn_norm.weight
F32
F32
[8192]
blk.8.attn_output.weight
F16
F16
[8192, 8192]
blk.8.attn_q.weight
F16
F16
[8192, 8192]
blk.8.attn_v.weight
F16
F16
[8192, 1024]
blk.8.ffn_down.weight
F16
F16
[24576, 8192]
blk.8.ffn_gate.weight
F16
F16
[8192, 24576]
blk.8.ffn_up.weight
F16
F16
[8192, 24576]
blk.9
blk.9.attn_k.weight
F16
F16
[8192, 1024]
blk.9.attn_norm.weight
F32
F32
[8192]
blk.9.attn_output.weight
F16
F16
[8192, 8192]
blk.9.attn_q.weight
F16
F16
[8192, 8192]
blk.9.attn_v.weight
F16
F16
[8192, 1024]
blk.9.ffn_down.weight
F16
F16
[24576, 8192]
blk.9.ffn_gate.weight
F16
F16
[8192, 24576]
blk.9.ffn_up.weight
F16
F16
[8192, 24576]
blk.10
blk.10.attn_k.weight
F16
F16
[8192, 1024]
blk.10.attn_norm.weight
F32
F32
[8192]
blk.10.attn_output.weight
F16
F16
[8192, 8192]
blk.10.attn_q.weight
F16
F16
[8192, 8192]
blk.10.attn_v.weight
F16
F16
[8192, 1024]
blk.10.ffn_down.weight
F16
F16
[24576, 8192]
blk.10.ffn_gate.weight
F16
F16
[8192, 24576]
blk.10.ffn_up.weight
F16
F16
[8192, 24576]
blk.11
blk.11.attn_k.weight
F16
F16
[8192, 1024]
blk.11.attn_norm.weight
F32
F32
[8192]
blk.11.attn_output.weight
F16
F16
[8192, 8192]
blk.11.attn_q.weight
F16
F16
[8192, 8192]
blk.11.attn_v.weight
F16
F16
[8192, 1024]
blk.11.ffn_down.weight
F16
F16
[24576, 8192]
blk.11.ffn_gate.weight
F16
F16
[8192, 24576]
blk.11.ffn_up.weight
F16
F16
[8192, 24576]
blk.12
blk.12.attn_k.weight
F16
F16
[8192, 1024]
blk.12.attn_norm.weight
F32
F32
[8192]
blk.12.attn_output.weight
F16
F16
[8192, 8192]
blk.12.attn_q.weight
F16
F16
[8192, 8192]
blk.12.attn_v.weight
F16
F16
[8192, 1024]
blk.12.ffn_down.weight
F16
F16
[24576, 8192]
blk.12.ffn_gate.weight
F16
F16
[8192, 24576]
blk.12.ffn_up.weight
F16
F16
[8192, 24576]
blk.13
blk.13.attn_k.weight
F16
F16
[8192, 1024]
blk.13.attn_norm.weight
F32
F32
[8192]
blk.13.attn_output.weight
F16
F16
[8192, 8192]
blk.13.attn_q.weight
F16
F16
[8192, 8192]
blk.13.attn_v.weight
F16
F16
[8192, 1024]
blk.13.ffn_down.weight
F16
F16
[24576, 8192]
blk.13.ffn_gate.weight
F16
F16
[8192, 24576]
blk.13.ffn_up.weight
F16
F16
[8192, 24576]
blk.14
blk.14.attn_k.weight
F16
F16
[8192, 1024]
blk.14.attn_norm.weight
F32
F32
[8192]
blk.14.attn_output.weight
F16
F16
[8192, 8192]
blk.14.attn_q.weight
F16
F16
[8192, 8192]
blk.14.attn_v.weight
F16
F16
[8192, 1024]
blk.14.ffn_down.weight
F16
F16
[24576, 8192]
blk.14.ffn_gate.weight
F16
F16
[8192, 24576]
blk.14.ffn_up.weight
F16
F16
[8192, 24576]
blk.15
blk.15.attn_k.weight
F16
F16
[8192, 1024]
blk.15.attn_norm.weight
F32
F32
[8192]
blk.15.attn_output.weight
F16
F16
[8192, 8192]
blk.15.attn_q.weight
F16
F16
[8192, 8192]
blk.15.attn_v.weight
F16
F16
[8192, 1024]
blk.15.ffn_down.weight
F16
F16
[24576, 8192]
blk.15.ffn_gate.weight
F16
F16
[8192, 24576]
blk.15.ffn_up.weight
F16
F16
[8192, 24576]
blk.16
blk.16.attn_k.weight
F16
F16
[8192, 1024]
blk.16.attn_norm.weight
F32
F32
[8192]
blk.16.attn_output.weight
F16
F16
[8192, 8192]
blk.16.attn_q.weight
F16
F16
[8192, 8192]
blk.16.attn_v.weight
F16
F16
[8192, 1024]
blk.16.ffn_down.weight
F16
F16
[24576, 8192]
blk.16.ffn_gate.weight
F16
F16
[8192, 24576]
blk.16.ffn_up.weight
F16
F16
[8192, 24576]
blk.17
blk.17.attn_k.weight
F16
F16
[8192, 1024]
blk.17.attn_norm.weight
F32
F32
[8192]
blk.17.attn_output.weight
F16
F16
[8192, 8192]
blk.17.attn_q.weight
F16
F16
[8192, 8192]
blk.17.attn_v.weight
F16
F16
[8192, 1024]
blk.17.ffn_down.weight
F16
F16
[24576, 8192]
blk.17.ffn_gate.weight
F16
F16
[8192, 24576]
blk.17.ffn_up.weight
F16
F16
[8192, 24576]
blk.18
blk.18.attn_k.weight
F16
F16
[8192, 1024]
blk.18.attn_norm.weight
F32
F32
[8192]
blk.18.attn_output.weight
F16
F16
[8192, 8192]
blk.18.attn_q.weight
F16
F16
[8192, 8192]
blk.18.attn_v.weight
F16
F16
[8192, 1024]
blk.18.ffn_down.weight
F16
F16
[24576, 8192]
blk.18.ffn_gate.weight
F16
F16
[8192, 24576]
blk.18.ffn_up.weight
F16
F16
[8192, 24576]
blk.19
blk.19.attn_k.weight
F16
F16
[8192, 1024]
blk.19.attn_norm.weight
F32
F32
[8192]
blk.19.attn_output.weight
F16
F16
[8192, 8192]
blk.19.attn_q.weight
F16
F16
[8192, 8192]
blk.19.attn_v.weight
F16
F16
[8192, 1024]
blk.19.ffn_down.weight
F16
F16
[24576, 8192]
blk.19.ffn_gate.weight
F16
F16
[8192, 24576]
blk.19.ffn_up.weight
F16
F16
[8192, 24576]
blk.20
blk.20.attn_k.weight
F16
F16
[8192, 1024]
blk.20.attn_norm.weight
F32
F32
[8192]
blk.20.attn_output.weight
F16
F16
[8192, 8192]
blk.20.attn_q.weight
F16
F16
[8192, 8192]
blk.20.attn_v.weight
F16
F16
[8192, 1024]
blk.20.ffn_down.weight
F16
F16
[24576, 8192]
blk.20.ffn_gate.weight
F16
F16
[8192, 24576]
blk.20.ffn_up.weight
F16
F16
[8192, 24576]
blk.21
blk.21.attn_k.weight
F16
F16
[8192, 1024]
blk.21.attn_norm.weight
F32
F32
[8192]
blk.21.attn_output.weight
F16
F16
[8192, 8192]
blk.21.attn_q.weight
F16
F16
[8192, 8192]
blk.21.attn_v.weight
F16
F16
[8192, 1024]
blk.21.ffn_down.weight
F16
F16
[24576, 8192]
blk.21.ffn_gate.weight
F16
F16
[8192, 24576]
blk.21.ffn_up.weight
F16
F16
[8192, 24576]
blk.22
blk.22.attn_k.weight
F16
F16
[8192, 1024]
blk.22.attn_norm.weight
F32
F32
[8192]
blk.22.attn_output.weight
F16
F16
[8192, 8192]
blk.22.attn_q.weight
F16
F16
[8192, 8192]
blk.22.attn_v.weight
F16
F16
[8192, 1024]
blk.22.ffn_down.weight
F16
F16
[24576, 8192]
blk.22.ffn_gate.weight
F16
F16
[8192, 24576]
blk.22.ffn_up.weight
F16
F16
[8192, 24576]
blk.23
blk.23.attn_k.weight
F16
F16
[8192, 1024]
blk.23.attn_norm.weight
F32
F32
[8192]
blk.23.attn_output.weight
F16
F16
[8192, 8192]
blk.23.attn_q.weight
F16
F16
[8192, 8192]
blk.23.attn_v.weight
F16
F16
[8192, 1024]
blk.23.ffn_down.weight
F16
F16
[24576, 8192]
blk.23.ffn_gate.weight
F16
F16
[8192, 24576]
blk.23.ffn_up.weight
F16
F16
[8192, 24576]
blk.24
blk.24.attn_k.weight
F16
F16
[8192, 1024]
blk.24.attn_norm.weight
F32
F32
[8192]
blk.24.attn_output.weight
F16
F16
[8192, 8192]
blk.24.attn_q.weight
F16
F16
[8192, 8192]
blk.24.attn_v.weight
F16
F16
[8192, 1024]
blk.24.ffn_down.weight
F16
F16
[24576, 8192]
blk.24.ffn_gate.weight
F16
F16
[8192, 24576]
blk.24.ffn_up.weight
F16
F16
[8192, 24576]
blk.25
blk.25.attn_k.weight
F16
F16
[8192, 1024]
blk.25.attn_norm.weight
F32
F32
[8192]
blk.25.attn_output.weight
F16
F16
[8192, 8192]
blk.25.attn_q.weight
F16
F16
[8192, 8192]
blk.25.attn_v.weight
F16
F16
[8192, 1024]
blk.25.ffn_down.weight
F16
F16
[24576, 8192]
blk.25.ffn_gate.weight
F16
F16
[8192, 24576]
blk.25.ffn_up.weight
F16
F16
[8192, 24576]
blk.26
blk.26.attn_k.weight
F16
F16
[8192, 1024]
blk.26.attn_norm.weight
F32
F32
[8192]
blk.26.attn_output.weight
F16
F16
[8192, 8192]
blk.26.attn_q.weight
F16
F16
[8192, 8192]
blk.26.attn_v.weight
F16
F16
[8192, 1024]
blk.26.ffn_down.weight
F16
F16
[24576, 8192]
blk.26.ffn_gate.weight
F16
F16
[8192, 24576]
blk.26.ffn_up.weight
F16
F16
[8192, 24576]
blk.27
blk.27.attn_k.weight
F16
F16
[8192, 1024]
blk.27.attn_norm.weight
F32
F32
[8192]
blk.27.attn_output.weight
F16
F16
[8192, 8192]
blk.27.attn_q.weight
F16
F16
[8192, 8192]
blk.27.attn_v.weight
F16
F16
[8192, 1024]
blk.27.ffn_down.weight
F16
F16
[24576, 8192]
blk.27.ffn_gate.weight
F16
F16
[8192, 24576]
blk.27.ffn_up.weight
F16
F16
[8192, 24576]
blk.28
blk.28.attn_k.weight
F16
F16
[8192, 1024]
blk.28.attn_norm.weight
F32
F32
[8192]
blk.28.attn_output.weight
F16
F16
[8192, 8192]
blk.28.attn_q.weight
F16
F16
[8192, 8192]
blk.28.attn_v.weight
F16
F16
[8192, 1024]
blk.28.ffn_down.weight
F16
F16
[24576, 8192]
blk.28.ffn_gate.weight
F16
F16
[8192, 24576]
blk.28.ffn_up.weight
F16
F16
[8192, 24576]
blk.29
blk.29.attn_k.weight
F16
F16
[8192, 1024]
blk.29.attn_norm.weight
F32
F32
[8192]
blk.29.attn_output.weight
F16
F16
[8192, 8192]
blk.29.attn_q.weight
F16
F16
[8192, 8192]
blk.29.attn_v.weight
F16
F16
[8192, 1024]
blk.29.ffn_down.weight
F16
F16
[24576, 8192]
blk.29.ffn_gate.weight
F16
F16
[8192, 24576]
blk.29.ffn_up.weight
F16
F16
[8192, 24576]
blk.30
blk.30.attn_k.weight
F16
F16
[8192, 1024]
blk.30.attn_norm.weight
F32
F32
[8192]
blk.30.attn_output.weight
F16
F16
[8192, 8192]
blk.30.attn_q.weight
F16
F16
[8192, 8192]
blk.30.attn_v.weight
F16
F16
[8192, 1024]
blk.30.ffn_down.weight
F16
F16
[24576, 8192]
blk.30.ffn_gate.weight
F16
F16
[8192, 24576]
blk.30.ffn_up.weight
F16
F16
[8192, 24576]
blk.31
blk.31.attn_k.weight
F16
F16
[8192, 1024]
blk.31.attn_norm.weight
F32
F32
[8192]
blk.31.attn_output.weight
F16
F16
[8192, 8192]
blk.31.attn_q.weight
F16
F16
[8192, 8192]
blk.31.attn_v.weight
F16
F16
[8192, 1024]
blk.31.ffn_down.weight
F16
F16
[24576, 8192]
blk.31.ffn_gate.weight
F16
F16
[8192, 24576]
blk.31.ffn_up.weight
F16
F16
[8192, 24576]
blk.32
blk.32.attn_k.weight
F16
F16
[8192, 1024]
blk.32.attn_norm.weight
F32
F32
[8192]
blk.32.attn_output.weight
F16
F16
[8192, 8192]
blk.32.attn_q.weight
F16
F16
[8192, 8192]
blk.32.attn_v.weight
F16
F16
[8192, 1024]
blk.32.ffn_down.weight
F16
F16
[24576, 8192]
blk.32.ffn_gate.weight
F16
F16
[8192, 24576]
blk.32.ffn_up.weight
F16
F16
[8192, 24576]
blk.33
blk.33.attn_k.weight
F16
F16
[8192, 1024]
blk.33.attn_norm.weight
F32
F32
[8192]
blk.33.attn_output.weight
F16
F16
[8192, 8192]
blk.33.attn_q.weight
F16
F16
[8192, 8192]
blk.33.attn_v.weight
F16
F16
[8192, 1024]
blk.33.ffn_down.weight
F16
F16
[24576, 8192]
blk.33.ffn_gate.weight
F16
F16
[8192, 24576]
blk.33.ffn_up.weight
F16
F16
[8192, 24576]
blk.34
blk.34.attn_k.weight
F16
F16
[8192, 1024]
blk.34.attn_norm.weight
F32
F32
[8192]
blk.34.attn_output.weight
F16
F16
[8192, 8192]
blk.34.attn_q.weight
F16
F16
[8192, 8192]
blk.34.attn_v.weight
F16
F16
[8192, 1024]
blk.34.ffn_down.weight
F16
F16
[24576, 8192]
blk.34.ffn_gate.weight
F16
F16
[8192, 24576]
blk.34.ffn_up.weight
F16
F16
[8192, 24576]
blk.35
blk.35.attn_k.weight
F16
F16
[8192, 1024]
blk.35.attn_norm.weight
F32
F32
[8192]
blk.35.attn_output.weight
F16
F16
[8192, 8192]
blk.35.attn_q.weight
F16
F16
[8192, 8192]
blk.35.attn_v.weight
F16
F16
[8192, 1024]
blk.35.ffn_down.weight
F16
F16
[24576, 8192]
blk.35.ffn_gate.weight
F16
F16
[8192, 24576]
blk.35.ffn_up.weight
F16
F16
[8192, 24576]
blk.36
blk.36.attn_k.weight
F16
F16
[8192, 1024]
blk.36.attn_norm.weight
F32
F32
[8192]
blk.36.attn_output.weight
F16
F16
[8192, 8192]
blk.36.attn_q.weight
F16
F16
[8192, 8192]
blk.36.attn_v.weight
F16
F16
[8192, 1024]
blk.36.ffn_down.weight
F16
F16
[24576, 8192]
blk.36.ffn_gate.weight
F16
F16
[8192, 24576]
blk.36.ffn_up.weight
F16
F16
[8192, 24576]
blk.37
blk.37.attn_k.weight
F16
F16
[8192, 1024]
blk.37.attn_norm.weight
F32
F32
[8192]
blk.37.attn_output.weight
F16
F16
[8192, 8192]
blk.37.attn_q.weight
F16
F16
[8192, 8192]
blk.37.attn_v.weight
F16
F16
[8192, 1024]
blk.37.ffn_down.weight
F16
F16
[24576, 8192]
blk.37.ffn_gate.weight
F16
F16
[8192, 24576]
blk.37.ffn_up.weight
F16
F16
[8192, 24576]
blk.38
blk.38.attn_k.weight
F16
F16
[8192, 1024]
blk.38.attn_norm.weight
F32
F32
[8192]
blk.38.attn_output.weight
F16
F16
[8192, 8192]
blk.38.attn_q.weight
F16
F16
[8192, 8192]
blk.38.attn_v.weight
F16
F16
[8192, 1024]
blk.38.ffn_down.weight
F16
F16
[24576, 8192]
blk.38.ffn_gate.weight
F16
F16
[8192, 24576]
blk.38.ffn_up.weight
F16
F16
[8192, 24576]
blk.39
blk.39.attn_k.weight
F16
F16
[8192, 1024]
blk.39.attn_norm.weight
F32
F32
[8192]
blk.39.attn_output.weight
F16
F16
[8192, 8192]
blk.39.attn_q.weight
F16
F16
[8192, 8192]
blk.39.attn_v.weight
F16
F16
[8192, 1024]
blk.39.ffn_down.weight
F16
F16
[24576, 8192]
blk.39.ffn_gate.weight
F16
F16
[8192, 24576]
blk.39.ffn_up.weight
F16
F16
[8192, 24576]
output_norm.weight
F32
F32
[8192]