Skip to content

Commit

Permalink
added linting for docstrings, added D1,D205,D417,D401 to the ignore l…
Browse files Browse the repository at this point in the history
…ist and added placeholders.
  • Loading branch information
Sherwin-14 committed May 31, 2024
1 parent d3ad4c5 commit 05754cd
Show file tree
Hide file tree
Showing 12 changed files with 77 additions and 99 deletions.
3 changes: 1 addition & 2 deletions earthaccess/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,7 @@


def __getattr__(name): # type: ignore
"""
Module-level getattr to handle automatic authentication when accessing
"""Module-level getattr to handle automatic authentication when accessing
`earthaccess.__auth__` and `earthaccess.__store__`.
Other unhandled attributes raise as `AttributeError` as expected.
Expand Down
2 changes: 1 addition & 1 deletion earthaccess/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ def collection_query() -> CollectionQuery:


def granule_query() -> GranuleQuery:
"""Returns a query builder instance for data granules
"""Returns a query builder instance for data granules.
Returns:
a query builder instance for data granules.
Expand Down
3 changes: 1 addition & 2 deletions earthaccess/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@


class SessionWithHeaderRedirection(requests.Session):
"""
Requests removes auth headers if the redirect happens outside the
"""Requests removes auth headers if the redirect happens outside the
original req domain.
"""

Expand Down
2 changes: 1 addition & 1 deletion earthaccess/formatters.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@


def _load_static_files() -> List[str]:
"""Load styles"""
"""Load styles."""
return [
importlib_resources.files("earthaccess.css").joinpath(fname).read_text("utf8")
for fname in STATIC_FILES
Expand Down
77 changes: 39 additions & 38 deletions earthaccess/results.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,10 @@ def summary(self) -> Dict[str, Any]:
return summary_dict

def get_umm(self, umm_field: str) -> Union[str, Dict[str, Any]]:
"""
"""Placeholder.
Parameters:
umm_field: Valid UMM item, i.e. `TemporalExtent`
umm_field: Valid UMM item, i.e. `TemporalExtent`.
Returns:
The value of a given field inside the UMM (Unified Metadata Model).
Expand All @@ -106,17 +107,16 @@ def get_umm(self, umm_field: str) -> Union[str, Dict[str, Any]]:
return ""

def concept_id(self) -> str:
"""
Returns:
A collection's `concept_id`.
This id is the most relevant search field on granule queries.
"""Placeholder.
Returns: A collection's `concept_id`.This id is the most relevant search field on granule queries.
"""
return self["meta"]["concept-id"]

def data_type(self) -> str:
"""
Returns:
The collection data type, i.e. HDF5, CSV etc., if available.
"""Placeholder.
Returns: The collection data type, i.e. HDF5, CSV etc., if available.
"""
if "ArchiveAndDistributionInformation" in self["umm"]:
return str(
Expand All @@ -127,46 +127,45 @@ def data_type(self) -> str:
return ""

def version(self) -> str:
"""
Returns:
The collection's version.
"""Placeholder.
Returns: The collection's version.
"""
if "Version" in self["umm"]:
return self["umm"]["Version"]
return ""

def abstract(self) -> str:
"""
Returns:
The abstract of a collection
"""Placeholder.
Returns:The abstract of a collection.
"""
if "Abstract" in self["umm"]:
return self["umm"]["Abstract"]
return ""

def landing_page(self) -> str:
"""
Returns:
The first landing page for the collection (can be many), if available.
"""Placeholder.
Returns: The first landing page for the collection (can be many), if available.
"""
links = self._filter_related_links("LANDING PAGE")
if len(links) > 0:
return links[0]
return ""

def get_data(self) -> List[str]:
"""
Returns:
The GET DATA links (usually a landing page link, a DAAC portal, or an FTP location).
"""Placeholder.
Returns: The GET DATA links (usually a landing page link, a DAAC portal, or an FTP location).
"""
links = self._filter_related_links("GET DATA")
return links

def s3_bucket(self) -> Dict[str, Any]:
"""
Returns:
The S3 bucket information if the collection has it.
(**cloud hosted collections only**)
"""Placeholder.
Returns: The S3 bucket information if the collection has it.(**cloud hosted collections only**).
"""
if "DirectDistributionInformation" in self["umm"]:
return self["umm"]["DirectDistributionInformation"]
Expand Down Expand Up @@ -214,9 +213,9 @@ def __init__(
self.render_dict = self._filter_fields_(fields)

def __repr__(self) -> str:
"""
Returns:
A basic representation of a data granule.
"""Placeholder.
Returns: A basic representation of a data granule.
"""
data_links = [link for link in self.data_links()]
rep_str = f"""
Expand All @@ -229,9 +228,9 @@ def __repr__(self) -> str:
return rep_str

def _repr_html_(self) -> str:
"""
Returns:
A rich representation for a data granule if we are in a Jupyter notebook.
"""Placeholder.
Returns: A rich representation for a data granule if we are in a Jupyter notebook.
"""
granule_html_repr = _repr_granule_html(self)
return granule_html_repr
Expand All @@ -243,9 +242,9 @@ def get_s3_credentials_endpoint(self) -> Union[str, None]:
return None

def size(self) -> float:
"""
Returns:
The total size for the granule in MB.
"""Placeholder.
Returns: The total size for the granule in MB.
"""
try:
data_granule = self["umm"]["DataGranule"]
Expand Down Expand Up @@ -284,7 +283,9 @@ def _derive_s3_link(self, links: List[str]) -> List[str]:
def data_links(
self, access: Optional[str] = None, in_region: bool = False
) -> List[str]:
"""Returns the data links from a granule.
"""Placeholder.
Returns the data links from a granule.
Parameters:
access: direct or external.
Expand Down Expand Up @@ -325,9 +326,9 @@ def data_links(
return https_links

def dataviz_links(self) -> List[str]:
"""
Returns:
The data visualization links, usually the browse images.
"""Placeholder.
Returns: The data visualization links, usually the browse images.
"""
links = self._filter_related_links("GET RELATED VISUALIZATION")
return links
13 changes: 3 additions & 10 deletions earthaccess/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,7 @@ def get_results(
query: Union[CollectionQuery, GranuleQuery],
limit: int = 2000,
) -> List[Any]:
"""
Get all results up to some limit, even if spanning multiple pages.
"""Get all results up to some limit, even if spanning multiple pages.
???+ Tip
The default page size is 2000, if the supplied value is greater then the
Expand All @@ -50,7 +49,6 @@ def get_results(
Raises:
RuntimeError: The CMR query failed.
"""

page_size = min(limit, 2000)
url = query._build_url()

Expand Down Expand Up @@ -79,8 +77,8 @@ def get_results(


class DataCollections(CollectionQuery):
"""
???+ Info
"""
???+ Info "Placeholder"
The DataCollection class queries against
https://cmr.earthdata.nasa.gov/search/collections.umm_json,
the response has to be in umm_json to use the result classes.
Expand Down Expand Up @@ -156,7 +154,6 @@ def get(self, limit: int = 2000) -> List[DataCollection]:
Raises:
RuntimeError: The CMR query failed.
"""

return [
DataCollection(collection, self._fields)
for collection in get_results(self.session, self, limit)
Expand Down Expand Up @@ -442,7 +439,6 @@ def temporal(
object; or `date_from` and `date_to` are both datetime objects (or
parsable as such) and `date_from` is after `date_to`.
"""

return super().temporal(date_from, date_to, exclude_boundary)


Expand Down Expand Up @@ -481,7 +477,6 @@ def hits(self) -> int:
Raises:
RuntimeError: The CMR query failed.
"""

url = self._build_url()

response = self.session.get(url, headers=self.headers, params={"page_size": 0})
Expand Down Expand Up @@ -842,7 +837,6 @@ def temporal(
object; or `date_from` and `date_to` are both datetime objects (or
parsable as such) and `date_from` is after `date_to`.
"""

return super().temporal(date_from, date_to, exclude_boundary)

@override
Expand Down Expand Up @@ -969,7 +963,6 @@ def doi(self, doi: str) -> Self:
Raises:
RuntimeError: The CMR query to get the collection for the DOI fails.
"""

# TODO consider deferring this query until the search is executed
collection = DataCollections().doi(doi).get()

Expand Down
7 changes: 6 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,12 @@ line-length = 88
src = ["earthaccess", "stubs", "tests"]

[tool.ruff.lint]
extend-select = ["I", "T20"]
extend-select = ["I", "T20", "D"]
ignore = ["D1", "D205", "D401", "D417"]

[tool.ruff.lint.pydocstyle]
convention = "google"


[tool.ruff.lint.isort]
combine-as-imports = true
Expand Down
9 changes: 3 additions & 6 deletions tests/integration/test_cloud_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,8 @@


def get_sample_granules(granules, sample_size, max_granule_size):
"""
returns a list with sample granules and their size in MB if
the total size is less than the max_granule_size
"""Returns a list with sample granules and their size in MB if
the total size is less than the max_granule_size.
"""
files_to_download = []
total_size = 0
Expand All @@ -98,9 +97,7 @@ def get_sample_granules(granules, sample_size, max_granule_size):

@pytest.mark.parametrize("daac", daac_list)
def test_earthaccess_can_download_cloud_collection_granules(daac):
"""
Tests that we can download cloud collections using HTTPS links
"""
"""Tests that we can download cloud collections using HTTPS links."""
daac_shortname = daac["short_name"]
collections_count = daac["collections_count"]
collections_sample_size = daac["collections_sample_size"]
Expand Down
9 changes: 3 additions & 6 deletions tests/integration/test_cloud_open.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,8 @@


def get_sample_granules(granules, sample_size, max_granule_size):
"""
returns a list with sample granules and their size in MB if
the total size is less than the max_granule_size
"""Returns a list with sample granules and their size in MB if
the total size is less than the max_granule_size.
"""
files_to_download = []
total_size = 0
Expand Down Expand Up @@ -104,9 +103,7 @@ def supported_collection(data_links):

@pytest.mark.parametrize("daac", daacs_list)
def test_earthaccess_can_open_onprem_collection_granules(daac):
"""
Tests that we can download cloud collections using HTTPS links
"""
"""Tests that we can download cloud collections using HTTPS links."""
daac_shortname = daac["short_name"]
collections_count = daac["collections_count"]
collections_sample_size = daac["collections_sample_size"]
Expand Down
9 changes: 3 additions & 6 deletions tests/integration/test_onprem_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,8 @@


def get_sample_granules(granules, sample_size, max_granule_size):
"""
returns a list with sample granules and their size in MB if
the total size is less than the max_granule_size
"""Returns a list with sample granules and their size in MB if
the total size is less than the max_granule_size.
"""
files_to_download = []
total_size = 0
Expand Down Expand Up @@ -97,9 +96,7 @@ def supported_collection(data_links):

@pytest.mark.parametrize("daac", daacs_list)
def test_earthaccess_can_download_onprem_collection_granules(daac):
"""
Tests that we can download cloud collections using HTTPS links
"""
"""Tests that we can download cloud collections using HTTPS links."""
daac_shortname = daac["short_name"]
collections_count = daac["collections_count"]
collections_sample_size = daac["collections_sample_size"]
Expand Down
9 changes: 3 additions & 6 deletions tests/integration/test_onprem_open.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,8 @@


def get_sample_granules(granules, sample_size, max_granule_size):
"""
returns a list with sample granules and their size in MB if
the total size is less than the max_granule_size
"""Returns a list with sample granules and their size in MB if
the total size is less than the max_granule_size.
"""
files_to_download = []
total_size = 0
Expand Down Expand Up @@ -96,9 +95,7 @@ def supported_collection(data_links):

@pytest.mark.parametrize("daac", daacs_list)
def test_earthaccess_can_open_onprem_collection_granules(daac):
"""
Tests that we can download cloud collections using HTTPS links
"""
"""Tests that we can download cloud collections using HTTPS links."""
daac_shortname = daac["short_name"]
collections_count = daac["collections_count"]
collections_sample_size = daac["collections_sample_size"]
Expand Down
Loading

0 comments on commit 05754cd

Please sign in to comment.