II.
KnowledgeFabricImpl JSON
Structured · liveknowledge-fabric-impl:private-gpt-fabric
PrivateGPT as Local-Only Knowledge Fabric json
Inspect the normalized record payload exactly as the atlas UI reads it.
{
"id": "knowledge-fabric-impl:private-gpt-fabric",
"_kind": "KnowledgeFabricImpl",
"_file": "agent-stack/knowledge-fabric-impls/oss-rag-fabrics.yaml",
"_cluster": "agent-stack",
"attributes": {
"displayName": "PrivateGPT as Local-Only Knowledge Fabric",
"description": "PrivateGPT as a fully offline, privacy-first knowledge fabric. Runs\nentirely on local hardware with no data leaving the machine. Ingests\ndocuments (PDF, DOCX, TXT, Markdown), embeds them locally using\nopen-source models, and answers questions using a local LLM. Supports\nOllama, llama.cpp, and HuggingFace backends. As a knowledge fabric,\nPrivateGPT is the choice for air-gapped environments, regulated\nindustries, and sensitive document analysis where cloud APIs are\nprohibited by policy or regulation.\n",
"knowledgeFileFormats": [
"pdf",
"docx",
"txt",
"markdown"
],
"retrievalStrategy": "semantic-search",
"knowledgePersistence": "local-index",
"knowledgeScopes": [
"project",
"user"
],
"autoExtractionSupport": false,
"notes": "PrivateGPT's value proposition is absolute data privacy — no API calls,\nno cloud processing, no data exfiltration risk. This makes it viable for\nknowledge fabrics containing classified, HIPAA, or legally privileged\ninformation. The trade-off is quality — local embedding and generation\nmodels lag cloud models in accuracy and capability. For many regulated\nuse cases, this trade-off is acceptable.\n"
},
"outgoingEdges": [
{
"from": "knowledge-fabric-impl:private-gpt-fabric",
"to": "layer:12-knowledge-fabric",
"kind": "realizes",
"attributes": {}
},
{
"from": "knowledge-fabric-impl:private-gpt-fabric",
"to": "tool:private-gpt",
"kind": "integrates_with",
"attributes": {}
}
],
"incomingEdges": []
}