Skip to content

Commit

Permalink
Replaced get_s3fs_session with get_s3_filesystem
Browse files Browse the repository at this point in the history
  • Loading branch information
Sherwin-14 committed Jul 19, 2024
1 parent 7a79ea5 commit 1686e9c
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 15 deletions.
2 changes: 1 addition & 1 deletion earthaccess/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ def get_requests_https_session() -> requests.Session:
return session


def get_s3fs_session(
def get_s3fs_filesystem(
daac: Optional[str] = None,
provider: Optional[str] = None,
results: Optional[DataGranule] = None,
Expand Down
4 changes: 3 additions & 1 deletion earthaccess/kerchunk.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,9 @@ def consolidate_metadata(
) from e

if access == "direct":
fs = earthaccess.get_s3fs_session(provider=granules[0]["meta"]["provider-id"])
fs = earthaccess.get_s3fs_filesystem(
provider=granules[0]["meta"]["provider-id"]
)
else:
fs = earthaccess.get_fsspec_https_session()

Expand Down
14 changes: 7 additions & 7 deletions earthaccess/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def set_requests_session(
else:
resp.raise_for_status()

def get_s3fs_session(
def get_s3fs_filesystem(
self,
daac: Optional[str] = None,
concept_id: Optional[str] = None,
Expand Down Expand Up @@ -360,10 +360,10 @@ def _open_granules(
endpoint = self._own_s3_credentials(granules[0]["umm"]["RelatedUrls"])
if endpoint is not None:
logger.info(f"using endpoint: {endpoint}")
s3_fs = self.get_s3fs_session(endpoint=endpoint)
s3_fs = self.get_s3fs_filesystem(endpoint=endpoint)
else:
logger.info(f"using provider: {provider}")
s3_fs = self.get_s3fs_session(provider=provider)
s3_fs = self.get_s3fs_filesystem(provider=provider)
else:
access = "on_prem"
s3_fs = None
Expand Down Expand Up @@ -416,7 +416,7 @@ def _open_urls(
url_mapping: Mapping[str, None] = {url: None for url in granules}
if self.in_region and granules[0].startswith("s3"):
if provider is not None:
s3_fs = self.get_s3fs_session(provider=provider)
s3_fs = self.get_s3fs_filesystem(provider=provider)
if s3_fs is not None:
try:
fileset = _open_files(
Expand Down Expand Up @@ -530,7 +530,7 @@ def _get_urls(
)
if self.in_region and data_links[0].startswith("s3"):
logger.info(f"Accessing cloud dataset using provider: {provider}")
s3_fs = self.get_s3fs_session(provider=provider)
s3_fs = self.get_s3fs_filesystem(provider=provider)
# TODO: make this parallel or concurrent
for file in data_links:
s3_fs.get(file, str(local_path))
Expand Down Expand Up @@ -573,10 +573,10 @@ def _get_granules(
logger.info(
f"Accessing cloud dataset using dataset endpoint credentials: {endpoint}"
)
s3_fs = self.get_s3fs_session(endpoint=endpoint)
s3_fs = self.get_s3fs_filesystem(endpoint=endpoint)
else:
logger.info(f"Accessing cloud dataset using provider: {provider}")
s3_fs = self.get_s3fs_session(provider=provider)
s3_fs = self.get_s3fs_filesystem(provider=provider)

local_path.mkdir(parents=True, exist_ok=True)

Expand Down
4 changes: 2 additions & 2 deletions tests/integration/test_auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,9 +109,9 @@ def test_get_s3_credentials_lowercase_location(location):


@pytest.mark.parametrize("location", ({"daac": "podaac"}, {"provider": "pocloud"}))
def test_get_s3fs_session_lowercase_location(location):
def test_get_s3fs_filesystem_lowercase_location(location):
activate_environment()
earthaccess.login(strategy="environment")
fs = earthaccess.get_s3fs_session(**location)
fs = earthaccess.get_s3fs_filesystem(**location)
assert isinstance(fs, s3fs.S3FileSystem)
assert all(fs.storage_options[key] for key in ["key", "secret", "token"])
8 changes: 4 additions & 4 deletions tests/unit/test_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,12 +101,12 @@ def test_store_can_create_s3_fsspec_session(self):
"OBDAAC",
"ASDC",
]:
s3_fs = store.get_s3fs_session(daac=daac)
s3_fs = store.get_s3fs_filesystem(daac=daac)
assert isinstance(s3_fs, s3fs.S3FileSystem)
assert s3_fs.storage_options == expected_storage_options

for endpoint in custom_endpoints:
s3_fs = store.get_s3fs_session(endpoint=endpoint)
s3_fs = store.get_s3fs_filesystem(endpoint=endpoint)
assert isinstance(s3_fs, s3fs.S3FileSystem)
assert s3_fs.storage_options == expected_storage_options

Expand All @@ -120,12 +120,12 @@ def test_store_can_create_s3_fsspec_session(self):
"OB_CLOUD",
"LARC_CLOUD",
]:
s3_fs = store.get_s3fs_session(provider=provider)
s3_fs = store.get_s3fs_filesystem(provider=provider)
assert isinstance(s3_fs, s3fs.S3FileSystem)
assert s3_fs.storage_options == expected_storage_options

# Ensure informative error is raised
with pytest.raises(ValueError, match="parameters must be specified"):
store.get_s3fs_session()
store.get_s3fs_filesystem()

return None

0 comments on commit 1686e9c

Please sign in to comment.