69 1 week ago

A small multi-agent orchestrator built on Llama3.2 that coordinates LLM agents and tools by outputting "next actions." Use it as the central routing brain in your agentic workflows.

tools 1b 3b
2adde22f73ed · 90MB
    Metadata
  • general.architecture
    llama
  • adapter.lora.alpha
    128
  • adapter.type
    lora
  • Tensor
    blk.0
  • blk.0.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.0.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.0.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.0.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.0.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.0.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.0.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.0.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.0.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.0.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.0.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.0.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.0.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.0.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.1
  • blk.1.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.1.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.1.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.1.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.1.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.1.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.1.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.1.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.1.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.1.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.1.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.1.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.1.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.1.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.2
  • blk.2.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.2.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.2.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.2.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.2.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.2.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.2.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.2.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.2.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.2.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.2.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.2.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.2.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.2.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.3
  • blk.3.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.3.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.3.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.3.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.3.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.3.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.3.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.3.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.3.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.3.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.3.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.3.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.3.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.3.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.4
  • blk.4.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.4.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.4.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.4.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.4.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.4.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.4.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.4.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.4.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.4.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.4.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.4.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.4.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.4.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.5
  • blk.5.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.5.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.5.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.5.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.5.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.5.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.5.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.5.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.5.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.5.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.5.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.5.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.5.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.5.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.6
  • blk.6.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.6.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.6.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.6.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.6.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.6.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.6.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.6.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.6.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.6.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.6.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.6.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.6.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.6.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.7
  • blk.7.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.7.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.7.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.7.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.7.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.7.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.7.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.7.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.7.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.7.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.7.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.7.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.7.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.7.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.8
  • blk.8.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.8.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.8.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.8.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.8.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.8.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.8.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.8.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.8.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.8.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.8.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.8.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.8.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.8.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.9
  • blk.9.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.9.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.9.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.9.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.9.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.9.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.9.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.9.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.9.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.9.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.9.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.9.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.9.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.9.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.10
  • blk.10.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.10.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.10.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.10.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.10.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.10.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.10.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.10.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.10.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.10.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.10.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.10.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.10.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.10.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.11
  • blk.11.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.11.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.11.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.11.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.11.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.11.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.11.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.11.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.11.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.11.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.11.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.11.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.11.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.11.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.12
  • blk.12.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.12.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.12.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.12.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.12.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.12.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.12.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.12.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.12.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.12.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.12.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.12.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.12.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.12.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.13
  • blk.13.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.13.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.13.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.13.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.13.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.13.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.13.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.13.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.13.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.13.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.13.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.13.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.13.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.13.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.14
  • blk.14.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.14.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.14.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.14.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.14.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.14.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.14.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.14.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.14.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.14.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.14.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.14.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.14.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.14.ffn_up.weight.lora_b
    F16
    [64, 8192]
  • blk.15
  • blk.15.attn_k.weight.lora_a
    F16
    [2048, 64]
  • blk.15.attn_k.weight.lora_b
    F16
    [64, 512]
  • blk.15.attn_output.weight.lora_a
    F16
    [2048, 64]
  • blk.15.attn_output.weight.lora_b
    F16
    [64, 2048]
  • blk.15.attn_q.weight.lora_a
    F16
    [2048, 64]
  • blk.15.attn_q.weight.lora_b
    F16
    [64, 2048]
  • blk.15.attn_v.weight.lora_a
    F16
    [2048, 64]
  • blk.15.attn_v.weight.lora_b
    F16
    [64, 512]
  • blk.15.ffn_down.weight.lora_a
    F16
    [8192, 64]
  • blk.15.ffn_down.weight.lora_b
    F16
    [64, 2048]
  • blk.15.ffn_gate.weight.lora_a
    F16
    [2048, 64]
  • blk.15.ffn_gate.weight.lora_b
    F16
    [64, 8192]
  • blk.15.ffn_up.weight.lora_a
    F16
    [2048, 64]
  • blk.15.ffn_up.weight.lora_b
    F16
    [64, 8192]