1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 | from airflow import DAG
from template.utils import hours_ago
from template.dag_template import build_dag
dag_id = 'livepop-shuffle-job_v002'
emr_steps = """[
{
"step-name": "LivePOPShuffle",
"config-json": [
{"spark.driver.memory":"9g"},
{"spark.serializer":"org.apache.spark.serializer.KryoSerializer"}
],
"main-class": "com.viooh.pop.data.live.shuffle.RawLivePOPShuffleMain",
"group-id":"com/viooh/pop",
"artifact": "pop-shuffle-live",
"jars": "/usr/lib/spark/external/lib/spark-avro.jar",
"enable-custom-metrics" : "True"
}
]"""
# cluster level parameters (optional)
cluster_args = {
"master-instance-types": "m5.xlarge,m5.2xlarge",
"core-instance-types": "m5.xlarge,m5.2xlarge",
"task-instance-types": "m5.xlarge,m5.2xlarge",
"core-instance-capacity": 3,
"task-instance-capacity": 0
}
# dag parameter
dag_args = {
'owner': 'data.engineers@viooh.com',
'start_date': hours_ago(1)
}
dag = DAG(
dag_id,
schedule_interval=None, # cron expression
default_args=dag_args)
build_dag(emr_steps=emr_steps, dag=dag, cluster_args=cluster_args)
|