From 7b282dd1f5de26111c69c37cef8b1d48aa71e186 Mon Sep 17 00:00:00 2001 From: Tyler Sutterley Date: Tue, 3 Dec 2024 12:40:41 -0800 Subject: [PATCH] fix: add `missing_ok` to deletions --- test/test_download_and_read.py | 12 ++++++------ test/test_interpolate.py | 2 +- test/test_parquet.py | 4 ++-- test/test_solid_earth.py | 2 +- test/test_spatial.py | 12 ++++++------ 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/test/test_download_and_read.py b/test/test_download_and_read.py index 50ec37fa..08e0716e 100644 --- a/test/test_download_and_read.py +++ b/test/test_download_and_read.py @@ -126,7 +126,7 @@ def download_CATS2008(self): # clean up model shutil.rmtree(modelpath) # clean up - CFname.unlink() + CFname.unlink(missing_ok=True) # PURPOSE: Download CATS2008 from AWS S3 bucket @pytest.fixture(scope="class", autouse=True) @@ -174,7 +174,7 @@ def AWS_CATS2008(self, aws_access_key_id, aws_secret_access_key, aws_region_name # clean up model shutil.rmtree(modelpath) # clean up - CFname.unlink() + CFname.unlink(missing_ok=True) # PURPOSE: Download Antarctic Tide Gauge Database from US Antarctic Program @pytest.fixture(scope="class", autouse=False) @@ -188,7 +188,7 @@ def download_AntTG(self): # run tests yield # clean up - local.unlink() + local.unlink(missing_ok=True) # PURPOSE: Download Antarctic Tide Gauge Database from AWS @pytest.fixture(scope="class", autouse=True) @@ -211,7 +211,7 @@ def AWS_AntTG(self, aws_access_key_id, aws_secret_access_key, aws_region_name): # run tests yield # clean up - local.unlink() + local.unlink(missing_ok=True) # PURPOSE: create verification from Matlab program @pytest.fixture(scope="class", autouse=False) @@ -938,7 +938,7 @@ def download_AOTIM5_2018(self): # clean up model shutil.rmtree(modelpath) # clean up - CFname.unlink() + CFname.unlink(missing_ok=True) # PURPOSE: Download Arctic Tidal Current Atlas list of records @pytest.fixture(scope="class", autouse=True) @@ -951,7 +951,7 @@ def download_Arctic_Tide_Atlas(self): # run tests yield # clean up - local.unlink() + local.unlink(missing_ok=True) # PURPOSE: create verification from Matlab program @pytest.fixture(scope="class", autouse=False) diff --git a/test/test_interpolate.py b/test/test_interpolate.py index b41ae3f9..d4291cf9 100644 --- a/test/test_interpolate.py +++ b/test/test_interpolate.py @@ -35,7 +35,7 @@ def download_nodes(N=324): verbose=True) yield # remove the node file - filepath.joinpath(matfile).unlink() + filepath.joinpath(matfile).unlink(missing_ok=True) # Franke's 3D evaluation function def franke_3d(x,y,z): diff --git a/test/test_parquet.py b/test/test_parquet.py index 8c9aadcb..f681b0b3 100644 --- a/test/test_parquet.py +++ b/test/test_parquet.py @@ -72,7 +72,7 @@ def test_parquet(): # check that data is valid assert np.all((np.abs(v-df[k].values) < eps) for k,v in output.items()) # remove the test file - output_file.unlink() + output_file.unlink(missing_ok=True) # PURPOSE: test the read and write of geoparquet files def test_geoparquet(): @@ -133,4 +133,4 @@ def test_geoparquet(): # check that data is valid assert np.all((np.abs(v-gdf[k].values) < eps) for k,v in output.items()) # remove the test file - output_file.unlink() + output_file.unlink(missing_ok=True) diff --git a/test/test_solid_earth.py b/test/test_solid_earth.py index 10fa8f0a..a509b30c 100644 --- a/test/test_solid_earth.py +++ b/test/test_solid_earth.py @@ -303,7 +303,7 @@ def download_jpl_ephemerides(): # run tests yield # clean up - de440s.unlink() + de440s.unlink(missing_ok=True) else: # run tests yield diff --git a/test/test_spatial.py b/test/test_spatial.py index 093a6488..56dfce91 100644 --- a/test/test_spatial.py +++ b/test/test_spatial.py @@ -112,7 +112,7 @@ def test_ascii(): eps = np.finfo(np.float32).eps assert np.all((np.abs(v-test[k]) < eps) for k,v in output.items()) # remove the test file - output_file.unlink() + output_file.unlink(missing_ok=True) # PURPOSE: test the read and write of netCDF4 files @pytest.mark.parametrize("TYPE", ['drift','grid','time series']) @@ -180,7 +180,7 @@ def test_netCDF4(TYPE): eps = np.finfo(np.float32).eps assert np.all((np.abs(v-test[k]) < eps) for k,v in output.items()) # remove the test file - output_file.unlink() + output_file.unlink(missing_ok=True) # PURPOSE: test the read and write of HDF5 files @pytest.mark.parametrize("TYPE", ['drift','grid','time series']) @@ -247,7 +247,7 @@ def test_HDF5(TYPE): eps = np.finfo(np.float32).eps assert np.all((np.abs(v-test[k]) < eps) for k,v in output.items()) # remove the test file - output_file.unlink() + output_file.unlink(missing_ok=True) # PURPOSE: Download IODEM3 from NSIDC @pytest.fixture(scope="module", autouse=False) @@ -266,7 +266,7 @@ def nsidc_IODEM3(username, password): # run tests yield # clean up - granule.unlink() + granule.unlink(missing_ok=True) # PURPOSE: Download IODEM3 from AWS S3 bucket @pytest.fixture(scope="module", autouse=True) @@ -291,7 +291,7 @@ def AWS_IODEM3(aws_access_key_id, aws_secret_access_key, aws_region_name): # run tests yield # clean up - granule.unlink() + granule.unlink(missing_ok=True) # PURPOSE: test the read and write of geotiff files def test_geotiff(): @@ -322,7 +322,7 @@ def test_geotiff(): eps = np.finfo(np.float32).eps assert np.all((np.abs(v-test[k]) < eps) for k,v in dinput.items()) # remove the test files - output_file.unlink() + output_file.unlink(missing_ok=True) # PURPOSE: test the default field mapping function def test_field_mapping():