iiRecord
Agentic AI Atlas · Apache Spark Jobs
skill-area:spark-jobsa5c.ai
II.
SkillArea JSON

skill-area:spark-jobs

Structured · live

Apache Spark Jobs json

Inspect the normalized record payload exactly as the atlas UI reads it.

File · domain/skill-areas/skill-areas-data-eng.yamlCluster · domain
Record JSON
{
  "id": "skill-area:spark-jobs",
  "_kind": "SkillArea",
  "_file": "domain/skill-areas/skill-areas-data-eng.yaml",
  "_cluster": "domain",
  "attributes": {
    "displayName": "Apache Spark Jobs",
    "description": "Authoring Spark jobs (Scala/Python/SQL) — partitioning, broadcast\njoins, AQE, and tuning shuffle and memory.\n",
    "domains": [
      "specialization:data-engineering-analytics"
    ],
    "requiresLanguages": [
      "language:scala",
      "language:python"
    ],
    "expertiseLevels": [
      "intermediate",
      "expert"
    ]
  },
  "outgoingEdges": [
    {
      "from": "skill-area:spark-jobs",
      "to": "specialization:data-engineering-analytics",
      "kind": "applies_to",
      "attributes": {
        "confidence": "primary"
      }
    },
    {
      "from": "skill-area:spark-jobs",
      "to": "language:scala",
      "kind": "uses_language"
    },
    {
      "from": "skill-area:spark-jobs",
      "to": "language:python",
      "kind": "uses_language"
    },
    {
      "from": "skill-area:spark-jobs",
      "to": "language:scala",
      "kind": "uses_language",
      "attributes": {}
    },
    {
      "from": "skill-area:spark-jobs",
      "to": "language:python",
      "kind": "uses_language",
      "attributes": {}
    }
  ],
  "incomingEdges": [
    {
      "from": "language:python",
      "to": "skill-area:spark-jobs",
      "kind": "used_by_skill_area",
      "attributes": {}
    },
    {
      "from": "language:scala",
      "to": "skill-area:spark-jobs",
      "kind": "used_by_skill_area",
      "attributes": {}
    },
    {
      "from": "skill-area:data-analysis",
      "to": "skill-area:spark-jobs",
      "kind": "prerequisite_for_learning",
      "attributes": {
        "strength": "recommended"
      }
    },
    {
      "from": "stack-profile:data-lakehouse",
      "to": "skill-area:spark-jobs",
      "kind": "requires_skill_area"
    },
    {
      "from": "stack-profile:data-lake-stack",
      "to": "skill-area:spark-jobs",
      "kind": "requires_skill_area"
    },
    {
      "from": "lib-skill:data-engineering-analytics--spark-jobs",
      "to": "skill-area:spark-jobs",
      "kind": "lib_requires_skill_area",
      "attributes": {
        "weight": 1
      }
    },
    {
      "from": "role:data-engineer",
      "to": "skill-area:spark-jobs",
      "kind": "requires_expertise",
      "attributes": {}
    },
    {
      "from": "workflow:etl-pipeline-cost-optimization",
      "to": "skill-area:spark-jobs",
      "kind": "requires_skill_area",
      "attributes": {}
    }
  ]
}