Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Nov 14, 2024
1 parent 195367a commit 61a006c
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 27 deletions.
12 changes: 3 additions & 9 deletions batcher/harmony/service_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,7 @@ def invoke(self):
# Message-only support is being depreciated in Harmony, so we should expect to
# only see requests with catalogs when invoked with a newer Harmony instance
# https://github.com/nasa/harmony-service-lib-py/blob/21bcfbda17caf626fb14d2ac4f8673be9726b549/harmony/adapter.py#L71
raise RuntimeError(
"Invoking Batchee without a STAC catalog is not supported"
)
raise RuntimeError("Invoking Batchee without a STAC catalog is not supported")

return self.message, self.process_catalog(self.catalog)

Expand Down Expand Up @@ -110,9 +108,7 @@ def process_catalog(self, catalog: pystac.Catalog) -> list[pystac.Catalog]:
# and each Catalog holds multiple Items (which represent each granule).
catalogs = []
for batch_id, batch_items in grouped.items():
self.logger.info(
f"constructing new pystac.Catalog for batch_id==={batch_id}."
)
self.logger.info(f"constructing new pystac.Catalog for batch_id==={batch_id}.")
# Initialize a new, empty Catalog
batch_catalog = catalog.clone()
batch_catalog.id = str(uuid4())
Expand All @@ -139,9 +135,7 @@ def process_catalog(self, catalog: pystac.Catalog) -> list[pystac.Catalog]:
)
batch_catalog.add_item(output_item)

self.logger.info(
"STAC catalog creation for batch_id==={batch_id} complete."
)
self.logger.info("STAC catalog creation for batch_id==={batch_id} complete.")
catalogs.append(batch_catalog)

self.logger.info("All STAC catalogs are complete.")
Expand Down
16 changes: 4 additions & 12 deletions batcher/tempo_filename_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,7 @@
)


def get_batch_indices(
filenames: list, logger: logging.Logger = default_logger
) -> list[int]:
def get_batch_indices(filenames: list, logger: logging.Logger = default_logger) -> list[int]:
"""
Returns
-------
Expand All @@ -62,14 +60,10 @@ def get_batch_indices(
matches = tempo_granule_filename_pattern.match(name)
if matches:
match_dict = matches.groupdict()
day_and_scans.append(
(match_dict["day_in_granule"], match_dict["daily_scan_id"])
)
day_and_scans.append((match_dict["day_in_granule"], match_dict["daily_scan_id"]))

# Unique day-scans are determined (while keeping the same order). Each will be its own batch.
unique_day_scans: list[tuple[str, str]] = sorted(
set(day_and_scans), key=day_and_scans.index
)
unique_day_scans: list[tuple[str, str]] = sorted(set(day_and_scans), key=day_and_scans.index)

logger.info(f"unique_day_scans==={unique_day_scans}.")

Expand Down Expand Up @@ -109,9 +103,7 @@ def main() -> list[list[str]]:
input_filenames = args.file_names

batch_indices = get_batch_indices(input_filenames)
unique_category_indices: list[int] = sorted(
set(batch_indices), key=batch_indices.index
)
unique_category_indices: list[int] = sorted(set(batch_indices), key=batch_indices.index)
logging.info(f"batch_indices = {batch_indices}")

# --- Construct a STAC object based on the batch indices ---
Expand Down
8 changes: 2 additions & 6 deletions tests/test_harmony_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,7 @@ def test_service_invoke(self, temp_output_dir):

for item_meta in out_catalog["links"]:
if item_meta["rel"] == "item":
item_path = temp_output_dir.joinpath(
item_meta["href"]
).resolve()
item_path = temp_output_dir.joinpath(item_meta["href"]).resolve()

# -- Item Verification --
item = json.loads(item_path.read_text())
Expand All @@ -81,9 +79,7 @@ def test_service_invoke(self, temp_output_dir):
assert data["type"] == "application/x-netcdf4"
assert data["roles"] == ["data"]

batched_files[batch_index].append(
Path(urlsplit(data["href"]).path).stem
)
batched_files[batch_index].append(Path(urlsplit(data["href"]).path).stem)

# -- batch file list verification --
files_dict = {
Expand Down

0 comments on commit 61a006c

Please sign in to comment.