Skip to content

Commit

Permalink
a little path vs str cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
jhkennedy committed Feb 17, 2024
1 parent c68e634 commit b036811
Showing 1 changed file with 10 additions and 11 deletions.
21 changes: 10 additions & 11 deletions earthaccess/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -442,7 +442,7 @@ def _open_urls(
def get(
self,
granules: Union[List[DataGranule], List[str]],
local_path: Optional[str] = None,
local_path: Optional[Path] = None,
provider: Optional[str] = None,
threads: int = 8,
) -> List[str]:
Expand All @@ -465,11 +465,10 @@ def get(
List of downloaded files
"""
if local_path is None:
local_path = Path(
".",
"data",
f"{datetime.datetime.today().strftime('%Y-%m-%d')}-{uuid4().hex[:6]}",
)
today = datetime.datetime.today().strftime("%Y-%m-%d")
uuid = uuid4().hex[:6]
local_path = Path.cwd() / "data" / f"{today}-{uuid}"

if len(granules):
files = self._get(granules, local_path, provider, threads)
return files
Expand Down Expand Up @@ -578,9 +577,9 @@ def _get_granules(
else:
# if the data are cloud-based, but we are not in AWS,
# it will be downloaded as if it was on prem
return self._download_onprem_granules(data_links, str(local_path), threads)
return self._download_onprem_granules(data_links, local_path, threads)

def _download_file(self, url: str, directory: str) -> str:
def _download_file(self, url: str, directory: Path) -> str:
"""Download a single file from an on-prem location, a DAAC data center.
Parameters:
Expand All @@ -594,7 +593,7 @@ def _download_file(self, url: str, directory: str) -> str:
if "opendap" in url and url.endswith(".html"):
url = url.replace(".html", "")
local_filename = url.split("/")[-1]
path = Path(directory) / Path(local_filename)
path = directory / Path(local_filename)
if not path.exists():
try:
session = self.auth.get_session()
Expand All @@ -617,7 +616,7 @@ def _download_file(self, url: str, directory: str) -> str:
return str(path)

def _download_onprem_granules(
self, urls: List[str], directory: str, threads: int = 8
self, urls: List[str], directory: Path, threads: int = 8
) -> List[Any]:
"""Downloads a list of URLS into the data directory.
Expand All @@ -636,7 +635,7 @@ def _download_onprem_granules(
raise ValueError(
"We need to be logged into NASA EDL in order to download data granules"
)
Path(directory).mkdir(parents=True, exist_ok=True)
directory.mkdir(parents=True, exist_ok=True)

arguments = [(url, directory) for url in urls]
results = pqdm(
Expand Down

0 comments on commit b036811

Please sign in to comment.