II.
LibraryProcess JSON
Structured · livelib-process:data-engineering-analytics--pipeline-migration
pipeline-migration json
Inspect the normalized record payload exactly as the atlas UI reads it.
{
"id": "lib-process:data-engineering-analytics--pipeline-migration",
"_kind": "LibraryProcess",
"_file": "generated-library/processes.yaml",
"_cluster": "generated-library",
"attributes": {
"displayName": "pipeline-migration",
"description": "Data Pipeline Migration - Comprehensive workflow for migrating data pipelines with\nassessment, migration strategy, dual-run validation, cutover planning, and rollback procedures\nto ensure zero-downtime migrations.",
"libraryPath": "library/specializations/data-engineering-analytics/pipeline-migration.js",
"specialization": "data-engineering-analytics",
"references": [
"- Data Pipeline Migration Best Practices: https://www.dataengineeringweekly.com/\n- Zero-Downtime Migration Patterns: https://martinfowler.com/bliki/StranglerFigApplication.html\n- Data Quality Validation: https://www.great-expectations.io/\n- Dual-Run Testing: https://cloud.google.com/architecture/migration-to-gcp-getting-started\n- Pipeline Testing: https://docs.getdbt.com/docs/building-a-dbt-project/tests"
],
"example": "const result = await orchestrate('specializations/data-engineering-analytics/pipeline-migration', {\n projectName: 'Legacy ETL to Modern ELT Migration',\n sourceSystem: {\n type: 'legacy-etl',\n platform: 'informatica',\n dataVolume: '500GB daily'\n },\n targetSystem: {\n type: 'modern-elt',\n platform: 'dbt',\n cloudProvider: 'aws'\n },\n requirements: {\n zeroDo wntime: true,\n dualRunDuration: '30 days',\n dataQualityValidation: true,\n performanceComparison: true,\n rollbackStrategy: true\n }\n});",
"usesAgents": [
"migration-assessor",
"dependency-analyzer",
"migration-strategist",
"pipeline-designer",
"data-quality-engineer",
"dual-run-engineer",
"validation-engineer",
"performance-analyzer",
"cutover-planner",
"rollback-strategist",
"training-coordinator",
"technical-writer",
"monitoring-engineer",
"cost-optimizer"
]
},
"outgoingEdges": [
{
"from": "lib-process:data-engineering-analytics--pipeline-migration",
"to": "domain:data-engineering",
"kind": "lib_applies_to_domain",
"attributes": {
"weight": 1
}
},
{
"from": "lib-process:data-engineering-analytics--pipeline-migration",
"to": "workflow:technical-debt-reduction",
"kind": "lib_implements_workflow",
"attributes": {
"weight": 1
}
},
{
"from": "lib-process:data-engineering-analytics--pipeline-migration",
"to": "workflow:data-backfill-procedure",
"kind": "lib_implements_workflow",
"attributes": {
"weight": 0.7
}
},
{
"from": "lib-process:data-engineering-analytics--pipeline-migration",
"to": "specialization:data-engineering-analytics",
"kind": "lib_belongs_to_specialization",
"attributes": {
"weight": 0.9
}
},
{
"from": "lib-process:data-engineering-analytics--pipeline-migration",
"to": "lib-agent:software-architecture--migration-strategist",
"kind": "uses_agent",
"attributes": {
"weight": 0.8
}
},
{
"from": "lib-process:data-engineering-analytics--pipeline-migration",
"to": "lib-agent:data-engineering-analytics--data-quality-engineer",
"kind": "uses_agent",
"attributes": {
"weight": 0.8
}
},
{
"from": "lib-process:data-engineering-analytics--pipeline-migration",
"to": "lib-agent:meta--technical-writer",
"kind": "uses_agent",
"attributes": {
"weight": 0.8
}
},
{
"from": "lib-process:data-engineering-analytics--pipeline-migration",
"to": "lib-agent:ai-agents-conversational--cost-optimizer",
"kind": "uses_agent",
"attributes": {
"weight": 0.8
}
}
],
"incomingEdges": []
}