II.
Provider JSON
Structured · liveprovider:ollama
Ollama json
Inspect the normalized record payload exactly as the atlas UI reads it.
{
"id": "provider:ollama",
"_kind": "Provider",
"_file": "compute/providers/ollama.yaml",
"_cluster": "compute",
"attributes": {
"displayName": "Ollama",
"vendor": "Ollama (community)",
"versionRange": ">=0.1.0",
"authMethods": [
"api-key"
],
"authMethodNotes": "Local-first server (default `http://localhost:11434`). No auth on the\nbare local server; `api-key` is used when fronting Ollama via a proxy\n(e.g. cloud Ollama deployments). The `api-key` enum value is selected\nhere as the closest auth-method match.\n",
"endpoints": {
"base": "http://localhost:11434",
"chat": "http://localhost:11434/api/chat",
"generate": "http://localhost:11434/api/generate",
"embed": "http://localhost:11434/api/embed",
"tags": "http://localhost:11434/api/tags",
"show": "http://localhost:11434/api/show"
},
"pricing": "Free for local execution. Cloud-hosted Ollama deployments price\nindependently.\n",
"rateLimitSignalingProtocol": "None on the local server. HTTP 5xx surfaces upstream model-runtime\nerrors. JSON error envelope: `{ \"error\": \"...\" }`.\n",
"dataResidencyOptions": [
"on-prem"
],
"vendorFeatures": [],
"slaTier": "ollama-no-sla",
"regions": [
"on-prem"
]
},
"outgoingEdges": [
{
"from": "provider:ollama",
"to": "layer:2-provider",
"kind": "realizes",
"attributes": {}
},
{
"from": "provider:ollama",
"to": "model:llama-3-3-70b-instruct@current",
"kind": "serves"
},
{
"from": "provider:ollama",
"to": "model:gemma-2-27b@current",
"kind": "serves"
},
{
"from": "provider:ollama",
"to": "protocol-message:ollama-chat-message",
"kind": "emits_message_type"
},
{
"from": "provider:ollama",
"to": "protocol-message:ollama-chat-stream-chunk",
"kind": "emits_message_type"
},
{
"from": "provider:ollama",
"to": "protocol-message:ollama-chat-stream-done",
"kind": "emits_message_type"
},
{
"from": "provider:ollama",
"to": "protocol-message:ollama-generate-response",
"kind": "emits_message_type"
},
{
"from": "provider:ollama",
"to": "protocol-message:ollama-generate-stream-chunk",
"kind": "emits_message_type"
},
{
"from": "provider:ollama",
"to": "protocol-message:ollama-embed-response",
"kind": "emits_message_type"
}
],
"incomingEdges": [
{
"from": "claim:ollama-provider-native-api-endpoints",
"to": "provider:ollama",
"kind": "about_subject"
},
{
"from": "claim:ollama-native-protocol-messages",
"to": "provider:ollama",
"kind": "about_subject"
},
{
"from": "tool-server:mcp-ollama",
"to": "provider:ollama",
"kind": "integrates_with",
"attributes": {}
}
]
}