iiRecord
Agentic AI Atlas · ml-inference-optimizer
lib-agent:gpu-programming--ml-inference-optimizera5c.ai
II.
LibraryAgent JSON

lib-agent:gpu-programming--ml-inference-optimizer

Structured · live

ml-inference-optimizer json

Inspect the normalized record payload exactly as the atlas UI reads it.

File · generated-library/agents.yamlCluster · generated-library
Record JSON
{
  "id": "lib-agent:gpu-programming--ml-inference-optimizer",
  "_kind": "LibraryAgent",
  "_file": "generated-library/agents.yaml",
  "_cluster": "generated-library",
  "attributes": {
    "displayName": "ml-inference-optimizer",
    "description": "Agent specializing in GPU-accelerated ML model optimization for production inference. Expert in TensorRT engine building, quantization strategies (PTQ, QAT), kernel fusion patterns, dynamic batching design, ONNX model optimization, inference serving patterns, and latency/throughput tradeoffs.",
    "libraryPath": "library/specializations/gpu-programming/agents/ml-inference-optimizer/AGENT.md",
    "specialization": "gpu-programming"
  },
  "outgoingEdges": [
    {
      "from": "lib-agent:gpu-programming--ml-inference-optimizer",
      "to": "skill-area:cuda-kernels",
      "kind": "lib_requires_skill_area",
      "attributes": {
        "weight": 1
      }
    },
    {
      "from": "lib-agent:gpu-programming--ml-inference-optimizer",
      "to": "skill-area:compute-shaders",
      "kind": "lib_requires_skill_area",
      "attributes": {
        "weight": 0.7
      }
    },
    {
      "from": "lib-agent:gpu-programming--ml-inference-optimizer",
      "to": "domain:scientific-computing",
      "kind": "lib_applies_to_domain",
      "attributes": {
        "weight": 1
      }
    },
    {
      "from": "lib-agent:gpu-programming--ml-inference-optimizer",
      "to": "role:computational-scientist",
      "kind": "lib_involves_role",
      "attributes": {
        "weight": 1
      }
    },
    {
      "from": "lib-agent:gpu-programming--ml-inference-optimizer",
      "to": "role:ml-engineer",
      "kind": "lib_involves_role",
      "attributes": {
        "weight": 0.7
      }
    },
    {
      "from": "lib-agent:gpu-programming--ml-inference-optimizer",
      "to": "specialization:gpu-programming",
      "kind": "lib_belongs_to_specialization",
      "attributes": {
        "weight": 1
      }
    }
  ],
  "incomingEdges": [
    {
      "from": "lib-process:gpu-programming--custom-cuda-operator-development",
      "to": "lib-agent:gpu-programming--ml-inference-optimizer",
      "kind": "uses_agent",
      "attributes": {
        "weight": 0.8
      }
    },
    {
      "from": "lib-process:gpu-programming--ml-inference-optimization",
      "to": "lib-agent:gpu-programming--ml-inference-optimizer",
      "kind": "uses_agent",
      "attributes": {
        "weight": 0.8
      }
    }
  ]
}