from airflow import DAG
from airflow.utils.dates import days_ago
from template.dag_template import build_dag
dag_id = 'livepop-invalid-flow_v002'
emr_steps = """[
{
"step-name": "InvalidPopAggregatorJob",
"config-json": [
{"spark.driver.memory":"9g"},
{"spark.serializer":"org.apache.spark.serializer.KryoSerializer"}
],
"main-class": "com.viooh.pop.aggregator.livepop.InvalidLivePOPAggregatorJobMain",
"group-id":"com/viooh/pop",
"artifact": "pop-data-aggregator",
"files": "s3://viooh-spark-artifacts-lab-cn/metrics/batch-job-metrics/0.0.0/job.conf,s3://viooh-spark-artifacts-lab-cn/metrics/batch-job-metrics/0.0.0/job.yaml"
},
{
"step-name": "CCModelTransformJob",
"config-json": [
{"spark.driver.memory":"9g"}
],
"main-class": "uk.co.viooh.job.modeltransform.ModelTransform",
"group-id": "uk/co/viooh",
"artifact": "pandora-model-transform",
"files": "s3://viooh-spark-artifacts-lab-cn/metrics/batch-job-metrics/0.0.0/job.conf,s3://viooh-spark-artifacts-lab-cn/metrics/batch-job-metrics/0.0.0/job.yaml"
}
]"""
# cluster level parameters (optional)
cluster_args = {
"master-instance-types": "m5.xlarge,m5.2xlarge",
"core-instance-types": "m5.xlarge,m5.2xlarge",
"task-instance-types": "m5.xlarge,m5.2xlarge",
"core-instance-capacity": 3,
"task-instance-capacity": 0,
"ebs-volume-size": "50",
"emr-version": "emr-6.14.0"
}
# dag parameter
dag_args = {
'owner': 'data.engineers@viooh.com',
'depends_on_past': True,
'start_date': days_ago(4)
}
dag = DAG(
dag_id,
schedule_interval="0 2 * * *", # cron expression
default_args=dag_args)
build_dag(emr_steps=emr_steps, dag=dag, cluster_args=cluster_args)