II.
Workflow JSON
Structured · liveworkflow:data-backfill-procedure
Data Backfill Procedure json
Inspect the normalized record payload exactly as the atlas UI reads it.
{
"id": "workflow:data-backfill-procedure",
"_kind": "Workflow",
"_file": "domain/workflows/workflows-technical-depth.yaml",
"_cluster": "domain",
"attributes": {
"displayName": "Data Backfill Procedure",
"description": "Operational workflow for populating historical data gaps or re-processing existing\nrecords after a pipeline logic change or schema migration. The data engineer scopes\nthe backfill range, estimates compute and storage costs, and designs an idempotent\nbackfill job that can be paused and resumed safely. The job runs in a low-priority\nbatch window to avoid competing with live workloads. Progress is monitored via row\ncounts and data quality checks, and the backfill is marked complete only after a\nreconciliation query confirms consistency between source and destination.\n",
"workflowKind": "operational",
"triggerType": "on-demand",
"typicalCadence": "on-demand",
"complexity": "moderate"
},
"outgoingEdges": [
{
"from": "workflow:data-backfill-procedure",
"to": "role:data-engineer",
"kind": "involves_role"
},
{
"from": "workflow:data-backfill-procedure",
"to": "role:sre",
"kind": "involves_role"
},
{
"from": "workflow:data-backfill-procedure",
"to": "role:backend-engineer",
"kind": "involves_role"
},
{
"from": "workflow:data-backfill-procedure",
"to": "domain:data-engineering",
"kind": "applies_to_domain"
}
],
"incomingEdges": [
{
"from": "lib-process:data-engineering-analytics--etl-elt-pipeline",
"to": "workflow:data-backfill-procedure",
"kind": "lib_implements_workflow",
"attributes": {
"weight": 0.7
}
},
{
"from": "lib-process:data-engineering-analytics--pipeline-migration",
"to": "workflow:data-backfill-procedure",
"kind": "lib_implements_workflow",
"attributes": {
"weight": 0.7
}
}
]
}