From f7927230f66ee74a47eadeaa6b44297992b13f72 Mon Sep 17 00:00:00 2001 From: Sherwin-14 Date: Mon, 8 Jul 2024 12:19:33 +0530 Subject: [PATCH] Added Fixtures to conftest.py --- tests/integration/conftest.py | 62 +++++++++++++++++++++++ tests/integration/test_cloud_download.py | 52 +++---------------- tests/integration/test_cloud_open.py | 54 +++----------------- tests/integration/test_kerchunk.py | 22 -------- tests/integration/test_onprem_download.py | 52 +++---------------- tests/integration/test_onprem_open.py | 52 +++---------------- 6 files changed, 87 insertions(+), 207 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 8be3de59..edd93a7b 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,4 +1,11 @@ +import logging +import os +import random +import unittest + +import earthaccess import pytest +from earthaccess import Auth, Store ACCEPTABLE_FAILURE_RATE = 10 @@ -10,3 +17,58 @@ def pytest_sessionfinish(session, exitstatus): failure_rate = (100.0 * session.testsfailed) / session.testscollected if failure_rate <= ACCEPTABLE_FAILURE_RATE: session.exitstatus = 0 + + +@pytest.fixture(scope="module") +def authenticated_store(): + logger = logging.getLogger(__name__) + assertions = unittest.TestCase("__init__") + + # we need to use a valid EDL credential + + assert "EARTHDATA_USERNAME" in os.environ is True + assert "EARTHDATA_PASSWORD" in os.environ is True + + auth = Auth().login(strategy="environment") + assert auth.authenticated is True + logger.info(f"Current username: {os.environ['EARTHDATA_USERNAME']}") + logger.info(f"earthaccess version: {earthaccess.__version__}") + + store = Store(auth) + + return store, logger, assertions + + +@pytest.fixture() +def get_sample_granules(granules, sample_size, max_granule_size): + """Returns a list with sample granules and their size in MB if + the total size is less than the max_granule_size. + """ + files_to_download = [] + total_size = 0 + max_tries = sample_size * 2 + tries = 0 + + while tries <= max_tries: + g = random.sample(granules, 1)[0] + if g.size() > max_granule_size: + # print(f"G: {g['meta']['concept-id']} exceded max size: {g.size()}") + tries += 1 + continue + else: + # print(f"Adding : {g['meta']['concept-id']} size: {g.size()}") + files_to_download.append(g) + total_size += g.size() + if len(files_to_download) >= sample_size: + break + return files_to_download, round(total_size) + + +@pytest.fixture() +def granules(): + granules = earthaccess.search_data( + count=2, + short_name="SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205", + cloud_hosted=True, + ) + return granules diff --git a/tests/integration/test_cloud_download.py b/tests/integration/test_cloud_download.py index 4e8f9519..4b61fcff 100644 --- a/tests/integration/test_cloud_download.py +++ b/tests/integration/test_cloud_download.py @@ -1,17 +1,11 @@ # package imports -import logging -import os import random import shutil -import unittest from pathlib import Path import earthaccess import pytest -from earthaccess import Auth, DataCollections, DataGranules, Store - -logger = logging.getLogger(__name__) - +from earthaccess import DataCollections, DataGranules daac_list = [ { @@ -56,47 +50,11 @@ }, ] -assertions = unittest.TestCase("__init__") - -# we need to use a valid EDL credential - -assertions.assertTrue("EARTHDATA_USERNAME" in os.environ) -assertions.assertTrue("EARTHDATA_PASSWORD" in os.environ) - -auth = Auth().login(strategy="environment") -assertions.assertTrue(auth.authenticated) -logger.info(f"Current username: {os.environ['EARTHDATA_USERNAME']}") -logger.info(f"earthaccess version: {earthaccess.__version__}") - -store = Store(auth) - - -def get_sample_granules(granules, sample_size, max_granule_size): - """Returns a list with sample granules and their size in MB if - the total size is less than the max_granule_size. - """ - files_to_download = [] - total_size = 0 - max_tries = sample_size * 2 - tries = 0 - - while tries <= max_tries: - g = random.sample(granules, 1)[0] - if g.size() > max_granule_size: - # print(f"G: {g['meta']['concept-id']} exceded max size: {g.size()}") - tries += 1 - continue - else: - # print(f"Adding : {g['meta']['concept-id']} size: {g.size()}") - files_to_download.append(g) - total_size += g.size() - if len(files_to_download) >= sample_size: - break - return files_to_download, round(total_size) - @pytest.mark.parametrize("daac", daac_list) -def test_earthaccess_can_download_cloud_collection_granules(daac): +def test_earthaccess_can_download_cloud_collection_granules( + authenticated_store, get_sample_granules, daac +): """Tests that we can download cloud collections using HTTPS links.""" daac_shortname = daac["short_name"] collections_count = daac["collections_count"] @@ -105,6 +63,8 @@ def test_earthaccess_can_download_cloud_collection_granules(daac): granules_sample_size = daac["granules_sample_size"] granules_max_size = daac["granules_max_size_mb"] + store, logger, assertions = authenticated_store + collection_query = DataCollections().data_center(daac_shortname).cloud_hosted(True) hits = collection_query.hits() logger.info(f"Cloud hosted collections for {daac_shortname}: {hits}") diff --git a/tests/integration/test_cloud_open.py b/tests/integration/test_cloud_open.py index b69eba15..542b915b 100644 --- a/tests/integration/test_cloud_open.py +++ b/tests/integration/test_cloud_open.py @@ -1,16 +1,10 @@ # package imports -import logging -import os import random -import unittest import earthaccess import magic import pytest -from earthaccess import Auth, DataCollections, DataGranules, Store - -logger = logging.getLogger(__name__) - +from earthaccess import DataCollections, DataGranules daacs_list = [ { @@ -55,44 +49,6 @@ }, ] -assertions = unittest.TestCase("__init__") - -# we need to use a valid EDL credential - -assertions.assertTrue("EARTHDATA_USERNAME" in os.environ) -assertions.assertTrue("EARTHDATA_PASSWORD" in os.environ) - -auth = Auth().login(strategy="environment") -assertions.assertTrue(auth.authenticated) -logger.info(f"Current username: {os.environ['EARTHDATA_USERNAME']}") -logger.info(f"earthaccess version: {earthaccess.__version__}") - -store = Store(auth) - - -def get_sample_granules(granules, sample_size, max_granule_size): - """Returns a list with sample granules and their size in MB if - the total size is less than the max_granule_size. - """ - files_to_download = [] - total_size = 0 - max_tries = sample_size * 2 - tries = 0 - - while tries <= max_tries: - g = random.sample(granules, 1)[0] - if g.size() > max_granule_size: - # print(f"G: {g['meta']['concept-id']} exceded max size: {g.size()}") - tries += 1 - continue - else: - # print(f"Adding : {g['meta']['concept-id']} size: {g.size()}") - files_to_download.append(g) - total_size += g.size() - if len(files_to_download) >= sample_size: - break - return files_to_download, round(total_size, 2) - def supported_collection(data_links): for url in data_links: @@ -102,7 +58,9 @@ def supported_collection(data_links): @pytest.mark.parametrize("daac", daacs_list) -def test_earthaccess_can_open_onprem_collection_granules(daac): +def test_earthaccess_can_open_onprem_collection_granules( + authenticated_store, get_sample_granules, daac +): """Tests that we can download cloud collections using HTTPS links.""" daac_shortname = daac["short_name"] collections_count = daac["collections_count"] @@ -111,6 +69,8 @@ def test_earthaccess_can_open_onprem_collection_granules(daac): granules_sample_size = daac["granules_sample_size"] granules_max_size = daac["granules_max_size_mb"] + store, logger, assertions = authenticated_store + collection_query = DataCollections().data_center(daac_shortname).cloud_hosted(True) hits = collection_query.hits() logger.info(f"Cloud hosted collections for {daac_shortname}: {hits}") @@ -156,7 +116,7 @@ def test_earthaccess_can_open_onprem_collection_granules(daac): logger.warning(f"File could not be open: {file}") -def test_multi_file_granule(): +def test_multi_file_granule(authenticated_store): # Ensure granules that contain multiple files are handled correctly granules = earthaccess.search_data(short_name="HLSL30", count=1) assert len(granules) == 1 diff --git a/tests/integration/test_kerchunk.py b/tests/integration/test_kerchunk.py index 2e981cce..8c4abada 100644 --- a/tests/integration/test_kerchunk.py +++ b/tests/integration/test_kerchunk.py @@ -1,6 +1,3 @@ -import logging -import os -import unittest from pathlib import Path import earthaccess @@ -10,25 +7,6 @@ kerchunk = pytest.importorskip("kerchunk") pytest.importorskip("dask") -logger = logging.getLogger(__name__) -assertions = unittest.TestCase("__init__") - -assertions.assertTrue("EARTHDATA_USERNAME" in os.environ) -assertions.assertTrue("EARTHDATA_PASSWORD" in os.environ) - -logger.info(f"Current username: {os.environ['EARTHDATA_USERNAME']}") -logger.info(f"earthaccess version: {earthaccess.__version__}") - - -@pytest.fixture(scope="module") -def granules(): - granules = earthaccess.search_data( - count=2, - short_name="SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205", - cloud_hosted=True, - ) - return granules - @pytest.mark.parametrize("protocol", ["", "file://"]) def test_consolidate_metadata_outfile(tmp_path, granules, protocol): diff --git a/tests/integration/test_onprem_download.py b/tests/integration/test_onprem_download.py index 242a3c26..7c4c6157 100644 --- a/tests/integration/test_onprem_download.py +++ b/tests/integration/test_onprem_download.py @@ -1,17 +1,11 @@ # package imports -import logging -import os import random import shutil -import unittest from pathlib import Path import earthaccess import pytest -from earthaccess import Auth, DataCollections, DataGranules, Store - -logger = logging.getLogger(__name__) - +from earthaccess import DataCollections, DataGranules daacs_list = [ { @@ -48,44 +42,6 @@ }, ] -assertions = unittest.TestCase("__init__") - -# we need to use a valid EDL credential - -assertions.assertTrue("EARTHDATA_USERNAME" in os.environ) -assertions.assertTrue("EARTHDATA_PASSWORD" in os.environ) - -auth = Auth().login(strategy="environment") -assertions.assertTrue(auth.authenticated) -logger.info(f"Current username: {os.environ['EARTHDATA_USERNAME']}") -logger.info(f"earthaccess version: {earthaccess.__version__}") - -store = Store(auth) - - -def get_sample_granules(granules, sample_size, max_granule_size): - """Returns a list with sample granules and their size in MB if - the total size is less than the max_granule_size. - """ - files_to_download = [] - total_size = 0 - max_tries = sample_size * 2 - tries = 0 - - while tries <= max_tries: - g = random.sample(granules, 1)[0] - if g.size() > max_granule_size: - # print(f"G: {g['meta']['concept-id']} exceded max size: {g.size()}") - tries += 1 - continue - else: - # print(f"Adding : {g['meta']['concept-id']} size: {g.size()}") - files_to_download.append(g) - total_size += g.size() - if len(files_to_download) >= sample_size: - break - return files_to_download, round(total_size, 2) - def supported_collection(data_links): for url in data_links: @@ -95,7 +51,9 @@ def supported_collection(data_links): @pytest.mark.parametrize("daac", daacs_list) -def test_earthaccess_can_download_onprem_collection_granules(daac): +def test_earthaccess_can_download_onprem_collection_granules( + authenticated_store, get_sample_granules, daac +): """Tests that we can download cloud collections using HTTPS links.""" daac_shortname = daac["short_name"] collections_count = daac["collections_count"] @@ -104,6 +62,8 @@ def test_earthaccess_can_download_onprem_collection_granules(daac): granules_sample_size = daac["granules_sample_size"] granules_max_size = daac["granules_max_size_mb"] + store, logger, assertions = authenticated_store + collection_query = DataCollections().data_center(daac_shortname).cloud_hosted(False) hits = collection_query.hits() logger.info(f"Cloud hosted collections for {daac_shortname}: {hits}") diff --git a/tests/integration/test_onprem_open.py b/tests/integration/test_onprem_open.py index 2a455c44..9e84870e 100644 --- a/tests/integration/test_onprem_open.py +++ b/tests/integration/test_onprem_open.py @@ -1,16 +1,10 @@ # package imports -import logging -import os import random -import unittest import earthaccess import magic import pytest -from earthaccess import Auth, DataCollections, DataGranules, Store - -logger = logging.getLogger(__name__) - +from earthaccess import DataCollections, DataGranules daacs_list = [ { @@ -47,44 +41,6 @@ }, ] -assertions = unittest.TestCase("__init__") - -# we need to use a valid EDL credential - -assertions.assertTrue("EARTHDATA_USERNAME" in os.environ) -assertions.assertTrue("EARTHDATA_PASSWORD" in os.environ) - -auth = Auth().login(strategy="environment") -assertions.assertTrue(auth.authenticated) -logger.info(f"Current username: {os.environ['EARTHDATA_USERNAME']}") -logger.info(f"earthaccess version: {earthaccess.__version__}") - -store = Store(auth) - - -def get_sample_granules(granules, sample_size, max_granule_size): - """Returns a list with sample granules and their size in MB if - the total size is less than the max_granule_size. - """ - files_to_download = [] - total_size = 0 - max_tries = sample_size * 2 - tries = 0 - - while tries <= max_tries: - g = random.sample(granules, 1)[0] - if g.size() > max_granule_size: - # print(f"G: {g['meta']['concept-id']} exceded max size: {g.size()}") - tries += 1 - continue - else: - # print(f"Adding : {g['meta']['concept-id']} size: {g.size()}") - files_to_download.append(g) - total_size += g.size() - if len(files_to_download) >= sample_size: - break - return files_to_download, round(total_size, 2) - def supported_collection(data_links): for url in data_links: @@ -94,7 +50,9 @@ def supported_collection(data_links): @pytest.mark.parametrize("daac", daacs_list) -def test_earthaccess_can_open_onprem_collection_granules(daac): +def test_earthaccess_can_open_onprem_collection_granules( + authenticated_store, get_sample_granules, daac +): """Tests that we can download cloud collections using HTTPS links.""" daac_shortname = daac["short_name"] collections_count = daac["collections_count"] @@ -103,6 +61,8 @@ def test_earthaccess_can_open_onprem_collection_granules(daac): granules_sample_size = daac["granules_sample_size"] granules_max_size = daac["granules_max_size_mb"] + store, logger, assertions = authenticated_store + collection_query = DataCollections().data_center(daac_shortname).cloud_hosted(False) hits = collection_query.hits() logger.info(f"Cloud hosted collections for {daac_shortname}: {hits}")