II.
Tool JSON
Structured · livetool:litellm
LiteLLM json
Inspect the normalized record payload exactly as the atlas UI reads it.
{
"id": "tool:litellm",
"_kind": "Tool",
"_file": "domain/tools/llm-proxies.yaml",
"_cluster": "domain",
"attributes": {
"displayName": "LiteLLM",
"homepageUrl": "https://github.com/BerriAI/litellm",
"kind": "other",
"description": "Python proxy and SDK that provides a unified OpenAI-compatible\ninterface to 100+ LLM providers including Anthropic, Azure, Bedrock,\nVertex AI, Ollama, and Hugging Face. Features include load balancing,\nfallbacks, spend tracking, rate limiting, and a proxy server mode\nthat lets any OpenAI-compatible client talk to any backend.\n"
},
"outgoingEdges": [
{
"from": "tool:litellm",
"to": "language:python",
"kind": "belongs_to_language"
},
{
"from": "tool:litellm",
"to": "skill-area:ai-agent-development",
"kind": "tool_used_by",
"attributes": {}
},
{
"from": "tool:litellm",
"to": "skill-area:model-serving-operations",
"kind": "tool_used_by",
"attributes": {}
},
{
"from": "tool:litellm",
"to": "tool:openrouter",
"kind": "alternative_to",
"attributes": {
"comparison": "LLM proxy / gateway services"
}
},
{
"from": "tool:litellm",
"to": "tool:portkey-ai",
"kind": "alternative_to",
"attributes": {
"comparison": "LLM proxy / gateway services"
}
},
{
"from": "tool:litellm",
"to": "tool:helicone",
"kind": "alternative_to",
"attributes": {
"comparison": "LLM proxy / gateway services"
}
}
],
"incomingEdges": [
{
"from": "stack-profile:ai-agent-stack",
"to": "tool:litellm",
"kind": "composed_of"
},
{
"from": "tool:openrouter",
"to": "tool:litellm",
"kind": "alternative_to",
"attributes": {
"comparison": "LLM proxy / gateway services"
}
},
{
"from": "tool:portkey-ai",
"to": "tool:litellm",
"kind": "alternative_to",
"attributes": {
"comparison": "LLM proxy / gateway services"
}
},
{
"from": "tool:helicone",
"to": "tool:litellm",
"kind": "alternative_to",
"attributes": {
"comparison": "LLM proxy / gateway services"
}
}
]
}