Dependency | Reason |
---|---|
Dagrun Running | Task instance's dagrun was not in the 'running' state but in the state 'success'. |
Execution Date | The execution date is 2023-12-12T13:06:50.253877+00:00 but this is before the task's start date 2024-07-14T23:00:00+00:00. |
Task Instance State | Task is in the 'success' state which is not a valid state for execution. The task must be cleared in order to be run. |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 | def get_config(**kwargs):
config = generate_cluster_config(kwargs)
job_flow_config = {"Name": config['Name'],
"LogUri": config['LogUri'],
"ReleaseLabel": config['ReleaseLabel'],
"Instances": config['Instances'],
"BootstrapActions": config['BootstrapActions'],
"Applications": config['Applications'],
"VisibleToAllUsers": True,
'Steps': config['Steps'],
"JobFlowRole": config['JobFlowRole'],
"ServiceRole": config['ServiceRole'],
"SecurityConfiguration": config['SecurityConfiguration'],
"Tags": config['Tags'],
"Configurations": config['Configurations']
}
LOG.info("job_flow_config: %s", job_flow_config)
return job_flow_config
|
Attribute | Value |
---|---|
dag_id | exchange-trade-summary-backfill_all_v002 |
duration | 1.564808 |
end_date | 2023-12-12 13:06:58.438038+00:00 |
execution_date | 2023-12-12T13:06:50.253877+00:00 |
executor_config | {} |
generate_command | <function TaskInstance.generate_command at 0x7ffa4fe777b8> |
hostname | airflow-worker-2.airflow-worker.data-eng.svc.cluster.local |
is_premature | False |
job_id | 456060 |
key | ('exchange-trade-summary-backfill_all_v002', 'create_emr_steps', <Pendulum [2023-12-12T13:06:50.253877+00:00]>, 2) |
log | <Logger airflow.task (INFO)> |
log_filepath | /opt/airflow/logs/exchange-trade-summary-backfill_all_v002/create_emr_steps/2023-12-12T13:06:50.253877+00:00.log |
log_url | https://airflow.devel.viooh.net.cn/admin/airflow/log?execution_date=2023-12-12T13%3A06%3A50.253877%2B00%3A00&task_id=create_emr_steps&dag_id=exchange-trade-summary-backfill_all_v002 |
logger | <Logger airflow.task (INFO)> |
mark_success_url | https://airflow.devel.viooh.net.cn/success?task_id=create_emr_steps&dag_id=exchange-trade-summary-backfill_all_v002&execution_date=2023-12-12T13%3A06%3A50.253877%2B00%3A00&upstream=false&downstream=false |
max_tries | 4 |
metadata | MetaData(bind=None) |
next_try_number | 2 |
operator | PythonOperator |
pid | 113 |
pool | default_pool |
pool_slots | 1 |
prev_attempted_tries | 1 |
previous_execution_date_success | 2023-12-07 09:47:55.075579+00:00 |
previous_start_date_success | 2023-12-07 12:08:28.508361+00:00 |
previous_ti | <TaskInstance: exchange-trade-summary-backfill_all_v002.create_emr_steps 2023-12-07 09:47:55.075579+00:00 [success]> |
previous_ti_success | <TaskInstance: exchange-trade-summary-backfill_all_v002.create_emr_steps 2023-12-07 09:47:55.075579+00:00 [success]> |
priority_weight | 3 |
queue | default |
queued_dttm | 2023-12-12 13:06:55.189949+00:00 |
raw | False |
run_as_user | None |
start_date | 2023-12-12 13:06:56.873230+00:00 |
state | success |
task | <Task(PythonOperator): create_emr_steps> |
task_id | create_emr_steps |
test_mode | False |
try_number | 2 |
unixname | airflow |
Attribute | Value |
---|---|
dag | <DAG: exchange-trade-summary-backfill_all_v002> |
dag_id | exchange-trade-summary-backfill_all_v002 |
depends_on_past | False |
deps | {<TIDep(Previous Dagrun State)>, <TIDep(Not Previously Skipped)>, <TIDep(Trigger Rule)>, <TIDep(Not In Retry Period)>} |
do_xcom_push | True |
downstream_list | [<Task(EmrCreateJobFlowOperator): create_cluster_and_add_emr_steps>] |
downstream_task_ids | {'create_cluster_and_add_emr_steps'} |
None | |
email_on_failure | True |
email_on_retry | True |
end_date | None |
execution_timeout | None |
executor_config | {} |
extra_links | [] |
global_operator_extra_link_dict | {} |
inlets | [] |
lineage_data | None |
log | <Logger airflow.task.operators (INFO)> |
logger | <Logger airflow.task.operators (INFO)> |
max_retry_delay | None |
on_failure_callback | <function task_fail_slack_alert at 0x7ffa3fcd4a60> |
on_retry_callback | None |
on_success_callback | None |
op_args | [] |
op_kwargs | {'master-instance-types': 'm5.2xlarge,m5.4xlarge', 'core-instance-types': 'm5.2xlarge,m5.4xlarge', 'task-instance-types': 'm5.2xlarge,m5.4xlarge', 'core-instance-capacity': 20, 'task-instance-capacity': 0, 'ebs-volume-size': '200', 'cluster-configurations': [{'Classification': 'yarn-site', 'Properties': {'yarn.resourcemanager.am.max-attempts': '1'}}], 'emr-steps': '[\n {\n "step-name": "SupplyDenormalizationJobBackfill",\n "config-json": [\n {"spark.driver.memory":"20g"},\n {"spark.driver.cores":"5"},\n {"spark.serializer":"org.apache.spark.serializer.KryoSerializer"},\n {"spark.task.maxFailures":"20"},\n {"spark.yarn.maxAppAttempts":"10"},\n {"spark.stage.maxConsecutiveAttempts":"20"}\n ],\n "main-class": "com.viooh.smex.supply.denormalization.SupplyDataDenormalizationBackfill",\n "group-id":"com/viooh/smex",\n "artifact": "supply-denormalization",\n "jars": "/usr/lib/spark/external/lib/spark-avro.jar"\n },\n {\n "step-name": "DemandDenormalizationJobBackfill",\n "config-json": [\n {"spark.driver.memory":"20g"},\n {"spark.driver.cores":"5"},\n {"spark.serializer":"org.apache.spark.serializer.KryoSerializer"},\n {"spark.task.maxFailures":"20"},\n {"spark.yarn.maxAppAttempts":"10"},\n {"spark.stage.maxConsecutiveAttempts":"20"}\n ],\n "main-class": "com.viooh.smex.demand.denormalization.DemandDataDenormalizationBackfill",\n "group-id":"com/viooh/smex",\n "artifact": "demand-denormalization",\n "jars": "/usr/lib/spark/external/lib/spark-avro.jar"\n },\n {\n "step-name": "DealsyncDenormalizationJobBackfill",\n "config-json": [\n {"spark.driver.memory":"20g"},\n {"spark.driver.cores":"5"},\n {"spark.serializer":"org.apache.spark.serializer.KryoSerializer"},\n {"spark.task.maxFailures":"20"},\n {"spark.yarn.maxAppAttempts":"10"},\n {"spark.stage.maxConsecutiveAttempts":"20"}\n ],\n "main-class": "com.viooh.smex.dealsync.denormalization.DealSyncDenormalizationBackfill",\n "group-id":"com/viooh/smex",\n "artifact": "dealsync-denormalization"\n },\n {\n "step-name": "ModerationDenormalizationJobBackfill",\n "config-json": [\n {"spark.driver.memory":"20g"},\n {"spark.driver.cores":"5"},\n {"spark.serializer":"org.apache.spark.serializer.KryoSerializer"},\n {"spark.task.maxFailures":"20"},\n {"spark.yarn.maxAppAttempts":"10"},\n {"spark.stage.maxConsecutiveAttempts":"20"}\n ],\n "main-class": "com.viooh.smex.moderation.denormalization.ModerationDenormalizationBackfill",\n "group-id":"com/viooh/smex",\n "artifact": "moderation-denormalization",\n "jars": "/usr/lib/spark/external/lib/spark-avro.jar"\n },\n {\n "step-name": "DealDenormalizationJobBackfill",\n "config-json": [\n {"spark.driver.memory":"20g"},\n {"spark.driver.cores":"5"},\n {"spark.serializer":"org.apache.spark.serializer.KryoSerializer"},\n {"spark.task.maxFailures":"20"},\n {"spark.yarn.maxAppAttempts":"10"},\n {"spark.stage.maxConsecutiveAttempts":"20"}\n ],\n "main-class": "com.viooh.smex.deal.denormalization.DealDenormalizationBackfill",\n "group-id":"com/viooh/smex",\n "artifact": "deal-denormalization",\n "jars": "/usr/lib/spark/external/lib/spark-avro.jar"\n },\n {\n "step-name": "ExchangeTradeSummaryJobBackfill",\n "config-json": [\n {"spark.driver.memory":"16g"},\n {"spark.serializer":"org.apache.spark.serializer.KryoSerializer"},\n {"spark.task.maxFailures":"20"},\n {"spark.yarn.maxAppAttempts":"10"},\n {"spark.stage.maxConsecutiveAttempts":"20"}\n ],\n "main-class": "com.viooh.smex.trade.summary.ExchangeTradeSummaryBackfill",\n "group-id":"com/viooh/smex",\n "artifact": "exchange-trade-summary"\n },\n {\n "step-name": "ExportVioohHourLevelReportToDbV2Backfill",\n "jar-location":"s3://elasticmapreduce/libs/script-runner/script-runner.jar",\n "script-file":"scripts/exchange-trade-summary/sspui_reporting/viooh_sspui_hourly_aggr_report_v2_installer.sh",\n "script-args": "$ENV#0.0.2#db-import-viooh-hourly-ui-report-v2#backfill-frame#$EXECUTION_DATETIME#$ARTIFACT_BUCKET#scripts/exchange-trade-summary/sspui_reporting#backfill-frame#cn-northwest-1"\n },\n {\n "step-name": "ExportVioohHourLevelReportToDbV2Backfill",\n "jar-location":"s3://elasticmapreduce/libs/script-runner/script-runner.jar",\n "script-file":"scripts/exchange-trade-summary/sspui_reporting/viooh_sspui_hourly_aggr_report_v2_installer.sh",\n "script-args": "$ENV#0.0.2#db-import-viooh-hourly-creative-report#backfill-creative#$EXECUTION_DATETIME#$ARTIFACT_BUCKET#scripts/exchange-trade-summary/sspui_reporting#backfill-creative#cn-northwest-1"\n }\n]', 'cluster-name': 'exchange-trade-summary-backfill_all', 'dag-id': 'exchange-trade-summary-backfill_all_v002', 'schedule_interval': None, 'trigger_dags': []} |
operator_extra_link_dict | {} |
operator_extra_links | () |
outlets | [] |
owner | data.engineers@viooh.com |
params | {} |
pool | default_pool |
pool_slots | 1 |
priority_weight | 1 |
priority_weight_total | 3 |
provide_context | True |
queue | default |
resources | None |
retries | 4 |
retry_delay | 0:05:00 |
retry_exponential_backoff | False |
run_as_user | None |
schedule_interval | None |
shallow_copy_attrs | ('python_callable', 'op_kwargs') |
sla | None |
start_date | 2024-07-14 23:00:00+00:00 |
subdag | None |
task_concurrency | None |
task_id | create_emr_steps |
task_type | PythonOperator |
template_ext | [] |
template_fields | ('templates_dict', 'op_args', 'op_kwargs') |
templates_dict | None |
trigger_rule | all_success |
ui_color | #ffefeb |
ui_fgcolor | #000 |
upstream_list | [] |
upstream_task_ids | set() |
wait_for_downstream | False |
weight_rule | downstream |