{
"id": "agent-core-impl:opencode.core@1.x",
"_kind": "AgentCoreImpl",
"_file": "agent-stack/core-impls/opencode-core-1-x.yaml",
"_cluster": "agent-stack",
"attributes": {
"displayName": "OpenCode Core (1.x)",
"agentVersionId": "agent-version:opencode@1.x",
"packageRef": "source-ref:opencode-github",
"loopIteratorPolicy": "tool-use-loop",
"loopIteratorNotes": "Tool-use loop driven by whichever provider OpenCode is configured against\n(Anthropic Messages, OpenAI Responses, or Gemini generateContent). OpenCode\nnormalizes provider stop signals into a single internal end-of-turn event and\nadvances the loop until that event fires or the user interrupts.\n",
"contextManagementStrategy": "platform-managed",
"compactionTriggerNotes": "OpenCode delegates compaction to whichever provider is in use; no in-CLI\nthreshold is exposed.\n",
"subagentInvokerPolicy": "tool-call-handoff",
"subagentInvokerNotes": "OpenCode supports the ACP-style sub-agent handoff: a structured tool call with\na target agent identifier triggers a child loop in the same process.\n",
"resultSynthesisPolicy": "model-direct",
"resultSynthesisNotes": "Final assistant text is streamed via SSE (HTTP variant) or stdout (subprocess\nvariant); a `session-idle` event is emitted when the last chunk arrives and\nno tool calls remain.\n",
"stopDetectionStrategy": "structured-end-event",
"transportClientLibrary": "opencode-internal",
"supportedTransportProtocols": [
"model-transport:anthropic-messages",
"model-transport:openai-responses",
"model-transport:gemini-generate-content"
],
"parallelToolCallHandling": "native",
"streamingFidelity": "full",
"thinkingChannelHandling": "not-supported"
},
"outgoingEdges": [
{
"from": "agent-core-impl:opencode.core@1.x",
"to": "layer:4-agent-core",
"kind": "realizes",
"attributes": {}
},
{
"from": "agent-core-impl:opencode.core@1.x",
"to": "agent:opencode",
"kind": "composes",
"attributes": {
"role": "core"
}
},
{
"from": "agent-core-impl:opencode.core@1.x",
"to": "model-transport:gemini-generate-content",
"kind": "speaks",
"attributes": {}
},
{
"from": "agent-core-impl:opencode.core@1.x",
"to": "model-transport:anthropic-messages",
"kind": "speaks",
"attributes": {}
},
{
"from": "agent-core-impl:opencode.core@1.x",
"to": "model-transport:openai-responses",
"kind": "speaks",
"attributes": {}
},
{
"from": "agent-core-impl:opencode.core@1.x",
"to": "capability:can-fork",
"kind": "supports",
"attributes": {
"versionRange": ">=1.0.0 <2.0.0",
"level": "full",
"notes": "OpenCode supports session forking — a session can be branched\nfrom a prior turn and explored independently.\n"
}
},
{
"from": "agent-core-impl:opencode.core@1.x",
"to": "capability:parallel-tool-calls",
"kind": "supports",
"attributes": {
"versionRange": ">=1.0.0 <2.0.0",
"level": "full",
"notes": "OpenCode loop dispatches tool calls in parallel.\n"
}
},
{
"from": "agent-core-impl:opencode.core@1.x",
"to": "capability:streaming",
"kind": "supports",
"attributes": {
"versionRange": ">=1.0.0 <2.0.0",
"level": "full",
"notes": "OpenCode streams model output through the TUI presentation layer.\n"
}
},
{
"from": "agent-core-impl:opencode.core@1.x",
"to": "capability:supports-tool-use",
"kind": "supports",
"attributes": {
"versionRange": ">=1.0.0 <2.0.0",
"level": "full"
}
}
],
"incomingEdges": [
{
"from": "agent-version:opencode@1.x",
"to": "agent-core-impl:opencode.core@1.x",
"kind": "composed_of",
"attributes": {
"role": "core"
}
},
{
"from": "model-transport:gemini-generate-content",
"to": "agent-core-impl:opencode.core@1.x",
"kind": "spoken_by",
"attributes": {}
}
]
}