Skip to content

Commit 40e1634

Browse files
committed
replaced deprecated execution_date with logical_date
1 parent a1c9a71 commit 40e1634

File tree

3 files changed

+22
-25
lines changed

3 files changed

+22
-25
lines changed

dags/cumulus/abrfc_qpf_06h.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ def qpf_filenames(edate):
5555
default_args=default_args,
5656
schedule="8 */6 * * *",
5757
tags=["cumulus", "precip", "QPF", "ABRFC"],
58+
doc_md=__doc__,
5859
max_active_runs=2,
5960
max_active_tasks=4,
6061
)
@@ -67,25 +68,25 @@ def cumulus_abrfc_qpf_06h():
6768

6869
"""
6970
Because this is a forecast product, we don't want to wait to get the product based
70-
on the last time period, but rather based on the current. This is why the execution
71+
on the last time period, but rather based on the current. This is why the logical
7172
date is being shifted forward by 6 hours.
7273
"""
7374

7475
@task()
7576
def generate_filenames():
76-
context = get_current_context()
77-
ti = context["ti"]
78-
execution_date = ti.execution_date + timedelta(hours=6)
77+
# Overwrite the logical date to be 6 hours in the future
78+
logical_date = get_current_context()["logical_date"] + timedelta(hours=6)
79+
7980
# This task generates the list of filenames
80-
return list(qpf_filenames(execution_date))
81+
return list(qpf_filenames(logical_date))
8182

8283
###########################################################################
8384
@task()
8485
def check_first_file():
8586
context = get_current_context()
86-
ti = context["ti"]
87-
execution_date = ti.execution_date + timedelta(hours=6)
88-
filename = next(qpf_filenames(execution_date))
87+
logical_date = context["logical_date"] + timedelta(hours=6)
88+
ti = context["ti"] # task instance
89+
filename = next(qpf_filenames(logical_date))
8990
url = f"{URL_ROOT}/{filename}"
9091

9192
try:
@@ -97,7 +98,7 @@ def check_first_file():
9798
except Exception as e:
9899
# If we don't always get a product for this time period
99100
# AND we've reached the try limit, skip the task instead of failing for better metrics analysis
100-
if execution_date.hour not in [0, 12, 18] and ti.try_number >= ti.max_tries:
101+
if logical_date.hour not in [0, 12, 18] and ti.try_number >= ti.max_tries:
101102
raise AirflowSkipException(
102103
f"Skipping task due to no files available and max_tries ({ti.max_tries}) reached: {e}"
103104
)
@@ -109,8 +110,7 @@ def check_first_file():
109110
def download_file(filename):
110111
print(f"Downloading {filename}")
111112
context = get_current_context()
112-
ti = context["ti"]
113-
execution_date = ti.execution_date + timedelta(hours=6)
113+
logical_date = context["logical_date"] + timedelta(hours=6)
114114

115115
# Name the dynamic task instead of leaving the index number
116116
context["task_id"] = filename
@@ -119,7 +119,7 @@ def download_file(filename):
119119
s3_key = f"{key_prefix}/{PRODUCT_SLUG}/{filename}"
120120
result = trigger_download(url=url, s3_bucket=cumulus.S3_BUCKET, s3_key=s3_key)
121121
return {
122-
"execution": execution_date.isoformat(),
122+
"execution": logical_date.isoformat(),
123123
"url": url,
124124
"s3_key": s3_key,
125125
"s3_bucket": cumulus.S3_BUCKET,

dags/cumulus/serfc_qpe.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -65,15 +65,14 @@ def cumulus_acq_serfc():
6565
@task()
6666
def download_serfc():
6767
context = get_current_context()
68-
ti = context["ti"]
69-
execution_date = ti.execution_date
70-
filename = f'xmrg{execution_date.strftime("%m%d%Y%H")}z.grb.gz'
68+
logical_date = context["logical_date"]
69+
filename = f'xmrg{logical_date.strftime("%m%d%Y%H")}z.grb.gz'
7170
url = f"{base_url}/{filename}"
7271
s3_key = f"{key_prefix}/{slug}/{filename}"
7372
result = trigger_download(url=url, s3_bucket=s3_bucket, s3_key=s3_key)
7473
return [
7574
{
76-
"execution": execution_date.isoformat(),
75+
"execution": logical_date.isoformat(),
7776
"url": url,
7877
"s3_key": s3_key,
7978
"s3_bucket": s3_bucket,

dags/cumulus/serfc_qpf.py

+7-9
Original file line numberDiff line numberDiff line change
@@ -89,17 +89,16 @@ def cumulus_acq_serfc():
8989
@task()
9090
def generate_filenames():
9191
context = get_current_context()
92-
ti = context["ti"]
93-
execution_date = ti.execution_date + timedelta(hours=6)
92+
logical_date = context["logical_date"] + timedelta(hours=6)
9493
# This task generates the list of filenames
95-
return list(alr_qpf_filenames(execution_date))
94+
return list(alr_qpf_filenames(logical_date))
9695

9796
@task()
9897
def check_first_file():
9998
context = get_current_context()
10099
ti = context["ti"]
101-
execution_date = ti.execution_date + timedelta(hours=6)
102-
filename = next(alr_qpf_filenames(execution_date))
100+
logical_date = context["logical_date"] + timedelta(hours=6)
101+
filename = next(alr_qpf_filenames(logical_date))
103102
url = f"{base_url}/{filename}"
104103

105104
try:
@@ -111,7 +110,7 @@ def check_first_file():
111110
except Exception as e:
112111
# If we don't always get a product for this time period
113112
# AND we've reached the try limit, skip the task instead of failing for better metrics analysis
114-
if execution_date.hour not in [0, 12] and ti.try_number >= ti.max_tries:
113+
if logical_date.hour not in [0, 12] and ti.try_number >= ti.max_tries:
115114
raise AirflowSkipException(
116115
f"Skipping task due to no files available and max_tries ({ti.max_tries}) reached: {e}"
117116
)
@@ -121,8 +120,7 @@ def check_first_file():
121120
@task(map_index_template="{{ task_id }}")
122121
def download_file(filename):
123122
context = get_current_context()
124-
ti = context["ti"]
125-
execution_date = ti.execution_date + timedelta(hours=6)
123+
logical_date = context["logical_date"] + timedelta(hours=6)
126124

127125
# Name the dynamic task instead of leaving the index number
128126
context["task_id"] = filename
@@ -131,7 +129,7 @@ def download_file(filename):
131129
s3_key = f"{key_prefix}/{slug}/{filename}"
132130
result = trigger_download(url=url, s3_bucket=s3_bucket, s3_key=s3_key)
133131
return {
134-
"execution": execution_date.isoformat(),
132+
"execution": logical_date.isoformat(),
135133
"url": url,
136134
"s3_key": s3_key,
137135
"s3_bucket": s3_bucket,

0 commit comments

Comments
 (0)