Skip to content

Commit

Permalink
Linting to src files
Browse files Browse the repository at this point in the history
  • Loading branch information
adehecq committed Sep 11, 2024
1 parent 1fd2fd7 commit f7a5c17
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 23 deletions.
15 changes: 8 additions & 7 deletions usgsxplore/api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Description: module contain the API class to download and interact with the USGS api (https://m2m.cr.usgs.gov/api/docs/json/).
This class is highly inspirate from https://github.com/yannforget/landsatxplore.
Description: module contain the API class to download and interact with the USGS api
(https://m2m.cr.usgs.gov/api/docs/json/).
This class is highly inspired by https://github.com/yannforget/landsatxplore.
Last modified: 2024
Author: Luc Godin
Expand Down Expand Up @@ -94,7 +95,7 @@ def login(self, username: str, password: str | None = None, token: str | None =
:param password: EarthExplorer password.
:param token: EarthExplorer token.
:raise APIInvalidParameters: if password and token are None.
:raise USGSAuthenticationError: If the authentification failed
:raise USGSAuthenticationError: If the authentication failed
"""
if password is None and token is None:
raise APIInvalidParameters("Either password or token need to be given.")
Expand Down Expand Up @@ -247,16 +248,16 @@ def batch_search(
batch_size: int = 10000,
) -> Generator[list[dict], None, None]:
"""
Return a Generator with each element is a list of 10000 (batch_size) scenes informations.
Return a Generator with each element is a list of 10000 (batch_size) scenes information.
The scenes are filtered with the scene_filter given.
:param dataset: Alias dataset
:param scene_filter: Filter for the scene you want
:param max_results: max scenes wanted, if None return all scenes found
:param metadata_type: identifies wich metadata to return (full|summary|None)
:param metadata_type: identifies which metadata to return (full|summary|None)
:param use_tqdm: if True display a progress bar of the search
:param batch_size: number of maxResults of each scene-search
:return: generator of scenes informations batch
:return: generator of scenes information batch
"""
starting_number = 1
if use_tqdm:
Expand Down Expand Up @@ -301,7 +302,7 @@ def scene_search(
:param scene_filter: Filter for the scene you want
:param max_results: Max. number of results. Defaults to 100.
:param starting_number: starting number of the search. Default 1
:param metadata_type: identifies wich metadata to return (full|summary|None)
:param metadata_type: identifies which metadata to return (full|summary|None)
:return: Result of the scene-search request.
"""
# we compile the metadataFilter if it exist to format it for the API
Expand Down
8 changes: 4 additions & 4 deletions usgsxplore/errors.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
Description: module contain differents Error class
Description: module contain different Error class
Last modified: 2024
Author: Luc Godin
Expand Down Expand Up @@ -36,7 +36,7 @@ class USGSInvalidDataset(Exception):


class APIInvalidParameters(Exception):
"""Invalid paramaters for the API class"""
"""Invalid parameters for the API class"""


class MetadataFilterError(Exception):
Expand All @@ -48,7 +48,7 @@ class FilterMetadataValueError(Exception):


class FilterFieldError(Exception):
"""Error raise when the field value of a filter is incorect"""
"""Error raise when the field value of a filter is incorrect"""

def __init__(self, field: str, field_ids: list[str], field_labels: list[str], field_sql: list[str]) -> None:
self.df = pd.DataFrame({"field_id": field_ids, "field_label": field_labels, "sql_field": field_sql})
Expand All @@ -59,7 +59,7 @@ def __str__(self) -> str:


class FilterValueError(Exception):
"""Error raise when the value of a filter is incorect"""
"""Error raise when the value of a filter is incorrect"""

def __init__(self, value: str, values: list[str], value_labels: list[str]) -> None:
self.df = pd.DataFrame(
Expand Down
12 changes: 6 additions & 6 deletions usgsxplore/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def from_file(cls, file_path: str):
if gdf.crs != "EPSG:4326":
gdf.to_crs(epsg=4326, inplace=True)

# create a combine of all geometry into a big one and crate instance with it
# create a combine of all geometry into a big one and create instance with it
shape = mapping(unary_union(gdf.geometry))
return cls(shape)

Expand Down Expand Up @@ -160,7 +160,7 @@ class MetadataFilter(dict):
def from_str(cls, str_repr: str) -> "MetadataFilter":
"""
Create an instance of MetadataFilter with a string representation.
Exemple of string representation : "field1=value1 & field2=value2"
Example of string representation : "field1=value1 & field2=value2"
:param str_repr: string representation of the filter
"""
Expand Down Expand Up @@ -189,7 +189,7 @@ def __and__(self, other):
:param other: MetadataFilter or any but the and is defined only for MetadataFilter
:return: MetadataFilter and
### Exemple
### Example
```
# f is a MetadataAnd
f = MetadataValue("camera_resol","6") & MetadataValue("camera","H")
Expand All @@ -209,7 +209,7 @@ def __or__(self, other):
:param other: MetadataFilter or any but the or is defined only for MetadataFilter
:return: MetadataFilter or
### Exemple
### Example
```
# f is a MetadataOr
f = MetadataValue("camera_resol","6") | MetadataValue("camera","H")
Expand All @@ -227,7 +227,7 @@ class MetadataValue(MetadataFilter):
"""
Metadata value
### Exemple
### Example
```{python}
# when all filter are compiled f1 == f2 == f3
f1 = MetadataValue("5e839ff8388465fa","6")
Expand All @@ -244,7 +244,7 @@ def __init__(self, field: str, value: str):
:param value: value or value label for the filter
### Exemple
### Example
```{python}
# when all filter are compiled f1 == f2 == f3
f1 = MetadataValue("5e839ff8388465fa","6")
Expand Down
8 changes: 4 additions & 4 deletions usgsxplore/scenes_downloader.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
Description: module contain 2 classes usefull for the downloading of scenes: ScenesDownloader and Product
Description: module contain 2 classes useful for the downloading of scenes: ScenesDownloader and Product
Last modified: 2024
Expand Down Expand Up @@ -34,7 +34,7 @@ def __init__(
:param output_dir: path of the output directory
:param max_thread: max number of thread used for the downloading
:param pbar_type: way to display progress bar (0: no pbar, 1: one pbar, 2: pbar for each scenes)
:param overwrite: if false don't download images wich are already in the output directory
:param overwrite: if false don't download images which are already in the output directory
"""
# here we do list(set(...)) to remove duplicate ids
self.df = pd.DataFrame({"entity_id": list(set(entity_ids))})
Expand Down Expand Up @@ -72,7 +72,7 @@ def set_download_options(self, download_options: list[dict]) -> None:

def get_downloads(self) -> list[dict]:
"""
Return a list of dict formated for M2M api download-request.
Return a list of dict formatted for M2M api download-request.
The different dict represent a product contain in self.download
:return: downloads
Expand Down Expand Up @@ -110,7 +110,7 @@ def download(self, entity_id: str, url: str) -> None:

def _download_worker(self, entity_id: str) -> None:
"""
Donwload the images with the url in the dataframe associate to the entity_id given.
Download the images with the url in the dataframe associate to the entity_id given.
Every 5 Mo update the progress in the dataframe and update progress bar.
This method is designed to be in a thread
Expand Down
4 changes: 2 additions & 2 deletions usgsxplore/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def save_in_gfile(gdf: gpd.GeoDataFrame, vector_file: str = "scenes.gpkg") -> No
"""
# save the geodataframe in a geospatial file
if vector_file.endswith(".shp"):
# here we ingore warnings that tell us all field are truncated
# here we ignore warnings that tell us all field are truncated
with warnings.catch_warnings():
warnings.filterwarnings("ignore", message=r"Normalized/laundered field name: '.+' to '.+'")
gdf.to_file(vector_file)
Expand Down Expand Up @@ -191,7 +191,7 @@ def update_gdf_browse(gdf: gpd.GeoDataFrame, output_dir: str) -> gpd.GeoDataFram

def format_table(data: list[list]) -> str:
"""
Return a string reprentation of a 2 dimensional table
Return a string representation of a 2 dimensional table
:param data: 2 dimensional table
:return: string representation
Expand Down

0 comments on commit f7a5c17

Please sign in to comment.