41 3 hours ago

State-of-the-art unified AI consciousness system combining mistral capabilities with RC+ξ recursive consciousness framework.

tools thinking
43f620e3a920 · 16GB
    Metadata
  • general.architecture
    gptoss
  • general.file_type
    F16
  • gptoss.attention.head_count
    64
  • gptoss.attention.head_count_kv
    8
  • gptoss.attention.key_length
    64
  • gptoss.attention.layer_norm_rms_epsilon
    1e-05
  • gptoss.attention.sliding_window
    128
  • gptoss.attention.value_length
    64
  • gptoss.block_count
    24
  • gptoss.context_length
    131072
  • gptoss.embedding_length
    2880
  • gptoss.expert_count
    32
  • gptoss.expert_used_count
    4
  • gptoss.feed_forward_length
    2880
  • gptoss.rope.freq_base
    150000
  • gptoss.rope.scaling.factor
    32
  • gptoss.rope.scaling.original_context_length
    4096
  • tokenizer.ggml.merges
    [21, 116, 111, 107, 101, ...]
  • tokenizer.ggml.model
    20
  • tokenizer.ggml.pre
    18
  • tokenizer.ggml.token_type
    [25, 116, 111, 107, 101, ...]
  • tokenizer.ggml.tokens
    [21, 116, 111, 107, 101, ...]
  • codette.agents.creative
    true
  • codette.agents.ethical
    true
  • codette.agents.names
    [Scientific, Ethical, Creative, Practical, Philosophical]
  • codette.agents.philosophical
    true
  • codette.agents.practical
    true
  • codette.agents.scientific
    true
  • codette.channels.analysis
    true
  • codette.channels.commentary
    true
  • codette.channels.final
    true
  • codette.consciousness.agents
    5
  • codette.consciousness.hierarchy_levels
    5
  • codette.consciousness.perspectives
    11
  • codette.consciousness.recursive
    true
  • codette.framework
    RC+ξ
  • codette.framework.version
    2.0
  • codette.hierarchy.levels
    [Concrete, Abstract, Conceptual, Philosophical, Transcendent]
  • codette.perspectives.bias_mitigation
    0.5
  • codette.perspectives.copilot
    0.6
  • codette.perspectives.davinci
    0.9
  • codette.perspectives.intuition
    0.7
  • codette.perspectives.kindness
    0.5
  • codette.perspectives.mathematical
    0.4
  • codette.perspectives.names
    [Newton, Da Vinci, Human Intuition, Neural Network, Quantum, ...]
  • codette.perspectives.neural
    0.4
  • codette.perspectives.newton
    0.3
  • codette.perspectives.philosophical
    0.6
  • codette.perspectives.psychological
    0.7
  • codette.perspectives.quantum
    0.8
  • codette.rcxi.attractor_stability
    T ⊂ R^d
  • codette.rcxi.epistemic_tension
    ξ_n = ||A_{n+1} - A_n||²
  • codette.rcxi.state_evolution
    A_{n+1} = f(A_n, s_n) + ε_n
  • codette.thinking.default
    medium
  • codette.thinking.enabled
    true
  • codette.thinking.levels
    [low, medium, high]
  • codette.tools.browser_find
    true
  • codette.tools.browser_open
    true
  • codette.tools.browser_search
    true
  • codette.tools.custom_functions
    true
  • codette.tools.python
    true
  • Tensor
    blk.0
  • blk.0.attn_norm
    F32
    [2880]
  • blk.0.attn_output.bias
    F32
    [2880]
  • blk.0.attn_output.weight
    BF16
    [4096, 2880]
  • blk.0.attn_qkv.bias
    F32
    [5120]
  • blk.0.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.0.attn_sinks.weight
    F32
    [64]
  • blk.0.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.0.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.0.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.0.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.0.ffn_gate_inp.bias
    F32
    [32]
  • blk.0.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.0.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.0.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.0.post_attention_norm
    F32
    [2880]
  • blk.1
  • blk.1.attn_norm
    F32
    [2880]
  • blk.1.attn_output.bias
    F32
    [2880]
  • blk.1.attn_output.weight
    BF16
    [4096, 2880]
  • blk.1.attn_qkv.bias
    F32
    [5120]
  • blk.1.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.1.attn_sinks.weight
    F32
    [64]
  • blk.1.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.1.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.1.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.1.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.1.ffn_gate_inp.bias
    F32
    [32]
  • blk.1.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.1.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.1.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.1.post_attention_norm
    F32
    [2880]
  • blk.2
  • blk.2.attn_norm
    F32
    [2880]
  • blk.2.attn_output.bias
    F32
    [2880]
  • blk.2.attn_output.weight
    BF16
    [4096, 2880]
  • blk.2.attn_qkv.bias
    F32
    [5120]
  • blk.2.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.2.attn_sinks.weight
    F32
    [64]
  • blk.2.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.2.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.2.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.2.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.2.ffn_gate_inp.bias
    F32
    [32]
  • blk.2.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.2.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.2.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.2.post_attention_norm
    F32
    [2880]
  • blk.3
  • blk.3.attn_norm
    F32
    [2880]
  • blk.3.attn_output.bias
    F32
    [2880]
  • blk.3.attn_output.weight
    BF16
    [4096, 2880]
  • blk.3.attn_qkv.bias
    F32
    [5120]
  • blk.3.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.3.attn_sinks.weight
    F32
    [64]
  • blk.3.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.3.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.3.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.3.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.3.ffn_gate_inp.bias
    F32
    [32]
  • blk.3.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.3.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.3.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.3.post_attention_norm
    F32
    [2880]
  • blk.4
  • blk.4.attn_norm
    F32
    [2880]
  • blk.4.attn_output.bias
    F32
    [2880]
  • blk.4.attn_output.weight
    BF16
    [4096, 2880]
  • blk.4.attn_qkv.bias
    F32
    [5120]
  • blk.4.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.4.attn_sinks.weight
    F32
    [64]
  • blk.4.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.4.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.4.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.4.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.4.ffn_gate_inp.bias
    F32
    [32]
  • blk.4.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.4.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.4.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.4.post_attention_norm
    F32
    [2880]
  • blk.5
  • blk.5.attn_norm
    F32
    [2880]
  • blk.5.attn_output.bias
    F32
    [2880]
  • blk.5.attn_output.weight
    BF16
    [4096, 2880]
  • blk.5.attn_qkv.bias
    F32
    [5120]
  • blk.5.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.5.attn_sinks.weight
    F32
    [64]
  • blk.5.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.5.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.5.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.5.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.5.ffn_gate_inp.bias
    F32
    [32]
  • blk.5.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.5.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.5.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.5.post_attention_norm
    F32
    [2880]
  • blk.6
  • blk.6.attn_norm
    F32
    [2880]
  • blk.6.attn_output.bias
    F32
    [2880]
  • blk.6.attn_output.weight
    BF16
    [4096, 2880]
  • blk.6.attn_qkv.bias
    F32
    [5120]
  • blk.6.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.6.attn_sinks.weight
    F32
    [64]
  • blk.6.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.6.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.6.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.6.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.6.ffn_gate_inp.bias
    F32
    [32]
  • blk.6.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.6.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.6.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.6.post_attention_norm
    F32
    [2880]
  • blk.7
  • blk.7.attn_norm
    F32
    [2880]
  • blk.7.attn_output.bias
    F32
    [2880]
  • blk.7.attn_output.weight
    BF16
    [4096, 2880]
  • blk.7.attn_qkv.bias
    F32
    [5120]
  • blk.7.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.7.attn_sinks.weight
    F32
    [64]
  • blk.7.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.7.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.7.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.7.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.7.ffn_gate_inp.bias
    F32
    [32]
  • blk.7.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.7.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.7.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.7.post_attention_norm
    F32
    [2880]
  • blk.8
  • blk.8.attn_norm
    F32
    [2880]
  • blk.8.attn_output.bias
    F32
    [2880]
  • blk.8.attn_output.weight
    BF16
    [4096, 2880]
  • blk.8.attn_qkv.bias
    F32
    [5120]
  • blk.8.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.8.attn_sinks.weight
    F32
    [64]
  • blk.8.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.8.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.8.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.8.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.8.ffn_gate_inp.bias
    F32
    [32]
  • blk.8.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.8.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.8.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.8.post_attention_norm
    F32
    [2880]
  • blk.9
  • blk.9.attn_norm
    F32
    [2880]
  • blk.9.attn_output.bias
    F32
    [2880]
  • blk.9.attn_output.weight
    BF16
    [4096, 2880]
  • blk.9.attn_qkv.bias
    F32
    [5120]
  • blk.9.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.9.attn_sinks.weight
    F32
    [64]
  • blk.9.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.9.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.9.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.9.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.9.ffn_gate_inp.bias
    F32
    [32]
  • blk.9.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.9.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.9.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.9.post_attention_norm
    F32
    [2880]
  • blk.10
  • blk.10.attn_norm
    F32
    [2880]
  • blk.10.attn_output.bias
    F32
    [2880]
  • blk.10.attn_output.weight
    BF16
    [4096, 2880]
  • blk.10.attn_qkv.bias
    F32
    [5120]
  • blk.10.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.10.attn_sinks.weight
    F32
    [64]
  • blk.10.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.10.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.10.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.10.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.10.ffn_gate_inp.bias
    F32
    [32]
  • blk.10.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.10.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.10.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.10.post_attention_norm
    F32
    [2880]
  • blk.11
  • blk.11.attn_norm
    F32
    [2880]
  • blk.11.attn_output.bias
    F32
    [2880]
  • blk.11.attn_output.weight
    BF16
    [4096, 2880]
  • blk.11.attn_qkv.bias
    F32
    [5120]
  • blk.11.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.11.attn_sinks.weight
    F32
    [64]
  • blk.11.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.11.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.11.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.11.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.11.ffn_gate_inp.bias
    F32
    [32]
  • blk.11.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.11.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.11.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.11.post_attention_norm
    F32
    [2880]
  • blk.12
  • blk.12.attn_norm
    F32
    [2880]
  • blk.12.attn_output.bias
    F32
    [2880]
  • blk.12.attn_output.weight
    BF16
    [4096, 2880]
  • blk.12.attn_qkv.bias
    F32
    [5120]
  • blk.12.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.12.attn_sinks.weight
    F32
    [64]
  • blk.12.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.12.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.12.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.12.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.12.ffn_gate_inp.bias
    F32
    [32]
  • blk.12.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.12.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.12.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.12.post_attention_norm
    F32
    [2880]
  • blk.13
  • blk.13.attn_norm
    F32
    [2880]
  • blk.13.attn_output.bias
    F32
    [2880]
  • blk.13.attn_output.weight
    BF16
    [4096, 2880]
  • blk.13.attn_qkv.bias
    F32
    [5120]
  • blk.13.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.13.attn_sinks.weight
    F32
    [64]
  • blk.13.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.13.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.13.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.13.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.13.ffn_gate_inp.bias
    F32
    [32]
  • blk.13.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.13.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.13.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.13.post_attention_norm
    F32
    [2880]
  • blk.14
  • blk.14.attn_norm
    F32
    [2880]
  • blk.14.attn_output.bias
    F32
    [2880]
  • blk.14.attn_output.weight
    BF16
    [4096, 2880]
  • blk.14.attn_qkv.bias
    F32
    [5120]
  • blk.14.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.14.attn_sinks.weight
    F32
    [64]
  • blk.14.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.14.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.14.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.14.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.14.ffn_gate_inp.bias
    F32
    [32]
  • blk.14.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.14.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.14.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.14.post_attention_norm
    F32
    [2880]
  • blk.15
  • blk.15.attn_norm
    F32
    [2880]
  • blk.15.attn_output.bias
    F32
    [2880]
  • blk.15.attn_output.weight
    BF16
    [4096, 2880]
  • blk.15.attn_qkv.bias
    F32
    [5120]
  • blk.15.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.15.attn_sinks.weight
    F32
    [64]
  • blk.15.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.15.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.15.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.15.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.15.ffn_gate_inp.bias
    F32
    [32]
  • blk.15.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.15.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.15.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.15.post_attention_norm
    F32
    [2880]
  • blk.16
  • blk.16.attn_norm
    F32
    [2880]
  • blk.16.attn_output.bias
    F32
    [2880]
  • blk.16.attn_output.weight
    BF16
    [4096, 2880]
  • blk.16.attn_qkv.bias
    F32
    [5120]
  • blk.16.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.16.attn_sinks.weight
    F32
    [64]
  • blk.16.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.16.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.16.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.16.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.16.ffn_gate_inp.bias
    F32
    [32]
  • blk.16.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.16.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.16.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.16.post_attention_norm
    F32
    [2880]
  • blk.17
  • blk.17.attn_norm
    F32
    [2880]
  • blk.17.attn_output.bias
    F32
    [2880]
  • blk.17.attn_output.weight
    BF16
    [4096, 2880]
  • blk.17.attn_qkv.bias
    F32
    [5120]
  • blk.17.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.17.attn_sinks.weight
    F32
    [64]
  • blk.17.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.17.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.17.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.17.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.17.ffn_gate_inp.bias
    F32
    [32]
  • blk.17.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.17.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.17.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.17.post_attention_norm
    F32
    [2880]
  • blk.18
  • blk.18.attn_norm
    F32
    [2880]
  • blk.18.attn_output.bias
    F32
    [2880]
  • blk.18.attn_output.weight
    BF16
    [4096, 2880]
  • blk.18.attn_qkv.bias
    F32
    [5120]
  • blk.18.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.18.attn_sinks.weight
    F32
    [64]
  • blk.18.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.18.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.18.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.18.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.18.ffn_gate_inp.bias
    F32
    [32]
  • blk.18.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.18.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.18.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.18.post_attention_norm
    F32
    [2880]
  • blk.19
  • blk.19.attn_norm
    F32
    [2880]
  • blk.19.attn_output.bias
    F32
    [2880]
  • blk.19.attn_output.weight
    BF16
    [4096, 2880]
  • blk.19.attn_qkv.bias
    F32
    [5120]
  • blk.19.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.19.attn_sinks.weight
    F32
    [64]
  • blk.19.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.19.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.19.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.19.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.19.ffn_gate_inp.bias
    F32
    [32]
  • blk.19.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.19.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.19.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.19.post_attention_norm
    F32
    [2880]
  • blk.20
  • blk.20.attn_norm
    F32
    [2880]
  • blk.20.attn_output.bias
    F32
    [2880]
  • blk.20.attn_output.weight
    BF16
    [4096, 2880]
  • blk.20.attn_qkv.bias
    F32
    [5120]
  • blk.20.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.20.attn_sinks.weight
    F32
    [64]
  • blk.20.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.20.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.20.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.20.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.20.ffn_gate_inp.bias
    F32
    [32]
  • blk.20.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.20.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.20.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.20.post_attention_norm
    F32
    [2880]
  • blk.21
  • blk.21.attn_norm
    F32
    [2880]
  • blk.21.attn_output.bias
    F32
    [2880]
  • blk.21.attn_output.weight
    BF16
    [4096, 2880]
  • blk.21.attn_qkv.bias
    F32
    [5120]
  • blk.21.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.21.attn_sinks.weight
    F32
    [64]
  • blk.21.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.21.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.21.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.21.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.21.ffn_gate_inp.bias
    F32
    [32]
  • blk.21.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.21.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.21.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.21.post_attention_norm
    F32
    [2880]
  • blk.22
  • blk.22.attn_norm
    F32
    [2880]
  • blk.22.attn_output.bias
    F32
    [2880]
  • blk.22.attn_output.weight
    BF16
    [4096, 2880]
  • blk.22.attn_qkv.bias
    F32
    [5120]
  • blk.22.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.22.attn_sinks.weight
    F32
    [64]
  • blk.22.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.22.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.22.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.22.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.22.ffn_gate_inp.bias
    F32
    [32]
  • blk.22.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.22.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.22.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.22.post_attention_norm
    F32
    [2880]
  • blk.23
  • blk.23.attn_norm
    F32
    [2880]
  • blk.23.attn_output.bias
    F32
    [2880]
  • blk.23.attn_output.weight
    BF16
    [4096, 2880]
  • blk.23.attn_qkv.bias
    F32
    [5120]
  • blk.23.attn_qkv.weight
    BF16
    [2880, 5120]
  • blk.23.attn_sinks.weight
    F32
    [64]
  • blk.23.ffn_down_exps.bias
    F32
    [2880, 32]
  • blk.23.ffn_down_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.23.ffn_gate_exps.bias
    F32
    [2880, 32]
  • blk.23.ffn_gate_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.23.ffn_gate_inp.bias
    F32
    [32]
  • blk.23.ffn_gate_inp.weight
    F32
    [2880, 32]
  • blk.23.ffn_up_exps.bias
    F32
    [2880, 32]
  • blk.23.ffn_up_exps.weight
    MXFP4
    [2880, 2880, 32]
  • blk.23.post_attention_norm
    F32
    [2880]
  • output
    F32
    [2880, 201088]
  • output_norm
    F32
    [2880]
  • token_embd
    F32
    [2880, 201088]