iiRecord
Agentic AI Atlas · PikiClaw Core (current)
agent-core-impl:pikiclaw.core@currenta5c.ai
II.
AgentCoreImpl JSON

agent-core-impl:pikiclaw.core@current

Structured · live

PikiClaw Core (current) json

Inspect the normalized record payload exactly as the atlas UI reads it.

File · agent-stack/core-impls/pikiclaw-core-current.yamlCluster · agent-stack
Record JSON
{
  "id": "agent-core-impl:pikiclaw.core@current",
  "_kind": "AgentCoreImpl",
  "_file": "agent-stack/core-impls/pikiclaw-core-current.yaml",
  "_cluster": "agent-stack",
  "attributes": {
    "displayName": "PikiClaw Core (current)",
    "agentVersionId": "agent-version:pikiclaw@current",
    "packageRef": "source-ref:pikiclaw-app",
    "loopIteratorPolicy": "tool-use-loop",
    "loopIteratorNotes": "Mobile-first tool-use loop optimized for touch interactions.\nSupports vision input from device camera and voice transcription\nas input modalities alongside text.\n",
    "contextManagementStrategy": "platform-managed",
    "compactionTriggerNotes": "Context management handled by the PikiClaw backend; mobile clients\nsend messages and receive streamed responses.\n",
    "subagentInvokerPolicy": "none",
    "subagentInvokerNotes": "PikiClaw does not currently support subagent dispatch; plugins\nextend capabilities via tool registration.\n",
    "resultSynthesisPolicy": "model-direct",
    "resultSynthesisNotes": "Final output rendered directly from model response in the mobile UI.\n",
    "stopDetectionStrategy": "structured-end-event",
    "transportClientLibrary": "pikiclaw-native",
    "supportedTransportProtocols": [
      "model-transport:anthropic-messages",
      "model-transport:openai-responses"
    ],
    "parallelToolCallHandling": "native",
    "streamingFidelity": "full",
    "thinkingChannelHandling": "not-supported"
  },
  "outgoingEdges": [
    {
      "from": "agent-core-impl:pikiclaw.core@current",
      "to": "layer:4-agent-core",
      "kind": "realizes",
      "attributes": {}
    },
    {
      "from": "agent-core-impl:pikiclaw.core@current",
      "to": "agent:pikiclaw",
      "kind": "composes",
      "attributes": {
        "role": "core"
      }
    },
    {
      "from": "agent-core-impl:pikiclaw.core@current",
      "to": "model-transport:anthropic-messages",
      "kind": "speaks",
      "attributes": {}
    },
    {
      "from": "agent-core-impl:pikiclaw.core@current",
      "to": "model-transport:openai-responses",
      "kind": "speaks",
      "attributes": {}
    },
    {
      "from": "agent-core-impl:pikiclaw.core@current",
      "to": "capability:streaming",
      "kind": "supports",
      "attributes": {
        "versionRange": ">=1.0.0",
        "level": "full"
      }
    },
    {
      "from": "agent-core-impl:pikiclaw.core@current",
      "to": "capability:supports-tool-use",
      "kind": "supports",
      "attributes": {
        "versionRange": ">=1.0.0",
        "level": "full"
      }
    },
    {
      "from": "agent-core-impl:pikiclaw.core@current",
      "to": "capability:vision-input",
      "kind": "supports",
      "attributes": {
        "versionRange": ">=1.0.0",
        "level": "full",
        "notes": "Camera and photo library input on mobile devices."
      }
    }
  ],
  "incomingEdges": [
    {
      "from": "agent-version:pikiclaw@current",
      "to": "agent-core-impl:pikiclaw.core@current",
      "kind": "composed_of",
      "attributes": {
        "role": "core"
      }
    }
  ]
}