From 63545fbcbe3d52b28104bbfdbe1503ca9c619994 Mon Sep 17 00:00:00 2001 From: PennyHow Date: Mon, 4 Nov 2024 22:37:54 -0200 Subject: [PATCH] test data moved --- docs/tutorial-data.md | 61 ++++++ src/griml/metadata/assign_id.py | 2 +- src/griml/metadata/assign_names.py | 33 ++-- src/griml/metadata/assign_sources.py | 58 +++--- .../metadata/iml_abundancy_error_estimate.py | 41 ++++ src/griml/metadata/iml_cleanup_workflow.py | 148 ++++++++++++++ src/griml/metadata/iml_metadata_workflow.py | 184 ++++++++++++++++++ src/griml/test/test.py | 130 +++++++++++-- test/__init__.py | 0 .../greenland_basins_polarstereo.cpg | 0 .../greenland_basins_polarstereo.dbf | Bin .../greenland_basins_polarstereo.prj | 0 .../greenland_basins_polarstereo.shp | Bin .../greenland_basins_polarstereo.shx | Bin test/process_cli.py | 134 +++++++++++++ test/process_example.py | 86 ++++++++ test/test (copy).py | 43 ++++ test/test.py | 43 ++++ {src/griml/test => test}/test_filter.cpg | 0 {src/griml/test => test}/test_filter.dbf | Bin {src/griml/test => test}/test_filter.prj | 0 {src/griml/test => test}/test_filter.qix | Bin {src/griml/test => test}/test_filter.shp | Bin {src/griml/test => test}/test_filter.shx | Bin {src/griml/test => test}/test_icemask.cpg | 0 {src/griml/test => test}/test_icemask.dbf | Bin {src/griml/test => test}/test_icemask.prj | 0 {src/griml/test => test}/test_icemask.shp | Bin {src/griml/test => test}/test_icemask.shx | Bin {src/griml/test => test}/test_merge_1.cpg | 0 {src/griml/test => test}/test_merge_1.dbf | Bin {src/griml/test => test}/test_merge_1.prj | 0 {src/griml/test => test}/test_merge_1.qix | Bin {src/griml/test => test}/test_merge_1.shp | Bin {src/griml/test => test}/test_merge_1.shx | Bin {src/griml/test => test}/test_merge_2.cpg | 0 {src/griml/test => test}/test_merge_2.dbf | Bin {src/griml/test => test}/test_merge_2.prj | 0 {src/griml/test => test}/test_merge_2.qix | Bin {src/griml/test => test}/test_merge_2.shp | Bin {src/griml/test => test}/test_merge_2.shx | Bin .../test => test}/test_north_greenland.tif | Bin .../test_north_greenland.tif.aux.xml | 0 {src/griml/test => test}/test_placenames.cpg | 0 {src/griml/test => test}/test_placenames.dbf | Bin {src/griml/test => test}/test_placenames.prj | 0 {src/griml/test => test}/test_placenames.qmd | 0 {src/griml/test => test}/test_placenames.shp | Bin {src/griml/test => test}/test_placenames.shx | Bin 49 files changed, 899 insertions(+), 64 deletions(-) create mode 100644 docs/tutorial-data.md create mode 100644 src/griml/metadata/iml_abundancy_error_estimate.py create mode 100644 src/griml/metadata/iml_cleanup_workflow.py create mode 100644 src/griml/metadata/iml_metadata_workflow.py create mode 100644 test/__init__.py rename {src/griml/test => test}/greenland_basins_polarstereo.cpg (100%) rename {src/griml/test => test}/greenland_basins_polarstereo.dbf (100%) rename {src/griml/test => test}/greenland_basins_polarstereo.prj (100%) rename {src/griml/test => test}/greenland_basins_polarstereo.shp (100%) rename {src/griml/test => test}/greenland_basins_polarstereo.shx (100%) create mode 100644 test/process_cli.py create mode 100644 test/process_example.py create mode 100644 test/test (copy).py create mode 100644 test/test.py rename {src/griml/test => test}/test_filter.cpg (100%) rename {src/griml/test => test}/test_filter.dbf (100%) rename {src/griml/test => test}/test_filter.prj (100%) rename {src/griml/test => test}/test_filter.qix (100%) rename {src/griml/test => test}/test_filter.shp (100%) rename {src/griml/test => test}/test_filter.shx (100%) rename {src/griml/test => test}/test_icemask.cpg (100%) rename {src/griml/test => test}/test_icemask.dbf (100%) rename {src/griml/test => test}/test_icemask.prj (100%) rename {src/griml/test => test}/test_icemask.shp (100%) rename {src/griml/test => test}/test_icemask.shx (100%) rename {src/griml/test => test}/test_merge_1.cpg (100%) rename {src/griml/test => test}/test_merge_1.dbf (100%) rename {src/griml/test => test}/test_merge_1.prj (100%) rename {src/griml/test => test}/test_merge_1.qix (100%) rename {src/griml/test => test}/test_merge_1.shp (100%) rename {src/griml/test => test}/test_merge_1.shx (100%) rename {src/griml/test => test}/test_merge_2.cpg (100%) rename {src/griml/test => test}/test_merge_2.dbf (100%) rename {src/griml/test => test}/test_merge_2.prj (100%) rename {src/griml/test => test}/test_merge_2.qix (100%) rename {src/griml/test => test}/test_merge_2.shp (100%) rename {src/griml/test => test}/test_merge_2.shx (100%) rename {src/griml/test => test}/test_north_greenland.tif (100%) rename {src/griml/test => test}/test_north_greenland.tif.aux.xml (100%) rename {src/griml/test => test}/test_placenames.cpg (100%) rename {src/griml/test => test}/test_placenames.dbf (100%) rename {src/griml/test => test}/test_placenames.prj (100%) rename {src/griml/test => test}/test_placenames.qmd (100%) rename {src/griml/test => test}/test_placenames.shp (100%) rename {src/griml/test => test}/test_placenames.shx (100%) diff --git a/docs/tutorial-data.md b/docs/tutorial-data.md new file mode 100644 index 0000000..890c6cc --- /dev/null +++ b/docs/tutorial-data.md @@ -0,0 +1,61 @@ +# Dataset tutorials + +The GrIML package is used for the production of the Greenland ice marginal lake inventory series, which is freely available through the [GEUS Dataverse](https://doi.org/10.22008/FK2/MBKW9N). This dataset is a series of annual inventories, mapping the extent and presence of lakes across Greenland that share a margin with the Greenland Ice Sheet and/or the surrounding ice caps and periphery glaciers. + +Here, we will look at how to load and handle the dataset, and provide details on its contents. + +## Dataset contents + +This ice marginal lake dataset is a series of annual inventories, mapping the extent and presence of lakes across Greenland that share a margin with the Greenland Ice Sheet and/or the surrounding ice caps and periphery glaciers. The annual inventories provide a comprehensive record of all identified ice marginal lakes, which have been detected using three independent remote sensing techniques: + +- DEM sink detection using the ArcticDEM (mosaic version 3) +- SAR backscatter classification from Sentinel-1 imagery +- Multi-spectral indices classification from Sentinel-2 imagery + +All data were compiled and filtered in a semi-automated approach, using a modified version of the [MEaSUREs GIMP ice mask](https://nsidc.org/data/NSIDC-0714/versions/1) to clip the dataset to within 1 km of the ice margin. Each detected lake was then verified manually. The methodology is open-source and provided in the associated [Github repository](https://github.com/GEUS-Glaciology-and-Climate/GrIML) for full reproducibility. + +The inventory series was created to better understand the impact of ice marginal lake change on the future sea level budget and the terrestrial and marine landscapes of Greenland, such as its ecosystems and human activities. The dataset is a complete inventory series of Greenland, with no absent data. + +### Data format + +The detected lakes are presented as polygon vector features in shapefile format (.shp), with coordinates provided in the WGS NSIDC Sea Ice Polar Stereographic North (EPSG:3413) projected coordinate system. + +### Metadata + +Each inventory in the inventory series contains the following metadata information: + +| Variable name | Description | Format | +|---------------------|---------------------|---------| +| `row_id` | Index identifying number for each polygon | Integer | +| `lake_id` | Identifying number for each unique lake | Integer | +| `lake_name`| Lake placename, as defined by the [Oqaasileriffik (Language Secretariat of Greenland)](https://oqaasileriffik.gl) placename database which is distributed with [QGreenland](https://qgreenland.org/) | String | +| `margin` | Type of margin that the lake is adjacent to (`ICE_SHEET`, `ICE_CAP`) | String | +| `region` | Region that lake is located, as defined by Mouginot and Rignot (2019) (`NW`, `NO`, `NE`, `CE`, `SE`, `SW`, `CW`) | String | +| `area_sqkm` | Areal extent of polygon/s in square kilometres | Float | +| `length_km` | Length of polygon/s in kilometres | Float | +| `temp_aver` | Average lake surface temperature estimate (in degrees Celsius), derived from the Landsat 8/9 OLI/TIRS Collection 2 Level 2 surface temperature data product | Float | +| `temp_min` | Minimum pixel lake surface temperature estimate (in degrees Celsius), derived from the Landsat 8/9 OLI/TIRS Collection 2 Level 2 surface temperature data product | Float | +| `temp_max` | Maximum pixel lake surface temperature estimate (in degrees Celsius), derived from the Landsat 8/9 OLI/TIRS Collection 2 Level 2 surface temperature data product | Float | +| `temp_stdev` | Average lake surface temperature estimate standard deviation, derived from the Landsat 8/9 OLI/TIRS Collection 2 Level 2 surface temperature data product | Float | +| `method` | Method of classification (`DEM`, `SAR`, `VIS`) | String | +| `source` | Image source of classification (`ARCTICDEM`, `S1`, `S2`) | String | +| `all_src` | List of all sources that successfully classified the lake (i.e. all classifications with the same `lake_name` value) | String | +| `num_src` | Number of sources that successfully classified the lake (`1`, `2`, `3`) | String | +| `certainty` | Certainty of classification, which is calculated from `all_src` as a score between `0` and `1` | Float | - | +| `start_date` | Start date for classification image filtering | String | +| `end_date` | End date for classification image filtering | String | +| `verified` | Flag to denote if the lake has been manually verified (`Yes`, `No`) | String | +| `verif_by` | Author of verification | String | +| `edited` | Flag to denote if polygon has been manually edited (`Yes`, `No`) | String | +| `edited_by` | Author of manual editing | String | + +## Getting started + +Loading the dataset: Data available at [GEUS Dataverse](https://doi.org/10.22008/FK2/MBKW9N). + +Quicklook plotting of the dataset + + +## Generating statistics + +Extracting statistics diff --git a/src/griml/metadata/assign_id.py b/src/griml/metadata/assign_id.py index 7e25ddb..4db9909 100644 --- a/src/griml/metadata/assign_id.py +++ b/src/griml/metadata/assign_id.py @@ -28,7 +28,7 @@ def assign_id(gdf, col_name='unique_id'): n, ids = connected_components(overlap_matrix) ids=ids+1 - # Assign ids and realign geoedataframe index + # Assign ids and realign geodataframe index gdf[col_name]=ids gdf = gdf.sort_values(col_name) gdf.reset_index(inplace=True, drop=True) diff --git a/src/griml/metadata/assign_names.py b/src/griml/metadata/assign_names.py index 08a1a17..4560685 100644 --- a/src/griml/metadata/assign_names.py +++ b/src/griml/metadata/assign_names.py @@ -12,7 +12,7 @@ from shapely.geometry import Point, LineString, Polygon from griml.load import load -def assign_names(gdf, gdf_names): +def assign_names(gdf, gdf_names, distance=1000.0): '''Assign placenames to geodataframe geometries based on names in another geodataframe point geometries @@ -39,13 +39,17 @@ def assign_names(gdf, gdf_names): names = _compile_names(gdf2) placenames = gpd.GeoDataFrame({"geometry": list(gdf2['geometry']), "placename": names}) + + # Remove invalid geometries + gdf1 = _check_geometries(gdf1) # Assign names based on proximity - a = _get_nearest_point(gdf1, placenames) + a = _get_nearest_point(gdf1, placenames, distance) + return a -def _get_nearest_point(gdA, gdB, distance=500.0): +def _get_nearest_point(gdA, gdB, distance=1000.0): '''Return properties of nearest point in Y to geometry in X''' nA = np.array(list(gdA.geometry.centroid.apply(lambda x: (x.x, x.y)))) nB = np.array(list(gdB.geometry.apply(lambda x: (x.x, x.y)))) @@ -70,18 +74,25 @@ def _get_indices(mylist, value): return[i for i, x in enumerate(mylist) if x==value] +def _check_geometries(gdf): + '''Check that all geometries within a geodataframe are valid''' + return gdf.drop(gdf[gdf.geometry==None].index) + def _compile_names(gdf): '''Get preferred placenames from placename geodatabase''' placenames=[] for i,v in gdf.iterrows(): - if v['Ny_grønla'] != None: - placenames.append(v['Ny_grønla']) + if v['New Greenl'] != None: + placenames.append(v['New Greenl']) else: - if v['Dansk'] != None: - placenames.append(v['Dansk']) + if v['Old Greenl'] != None: + placenames.append(v['Old Greenl']) else: - if v['Alternativ'] != None: - placenames.append(v['Alternativ']) - else: - placenames.append(None) + if v['Danish'] != None: + placenames.append(v['Danish']) + else: + if v['Alternativ'] != None: + placenames.append(v['Alternativ']) + else: + placenames.append(None) return placenames diff --git a/src/griml/metadata/assign_sources.py b/src/griml/metadata/assign_sources.py index 0212ac2..b9feaef 100644 --- a/src/griml/metadata/assign_sources.py +++ b/src/griml/metadata/assign_sources.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -def assign_sources(gdf, col_names=['unique_id', 'source']): +def assign_sources(gdf, col_names=['lake_id', 'source']): '''Assign source metadata to geodataframe, based on unique lake id and individual source information @@ -17,38 +17,34 @@ def assign_sources(gdf, col_names=['unique_id', 'source']): gdf : geopandas.GeoDataFrame Vectors with assigned sources ''' - ids = gdf[col_names[0]].tolist() - source = gdf[col_names[1]].tolist() - satellites=[] - - # Construct source list - for x in range(len(ids)): - indx = _get_indices(ids, x) - if len(indx) != 0: - res = [] - if len(indx) == 1: - res.append(source[indx[0]].split('/')[-1]) - else: - unid=[] - for dx in indx: - unid.append(source[dx].split('/')[-1]) - res.append(list(set(unid))) - for z in range(len(indx)): - if len(indx) == 1: - satellites.append(res) - else: - satellites.append(res[0]) - - # Compile lists for appending - satellites_names = [', '.join(i) for i in satellites] - number = [len(i) for i in satellites] - - # Return updated geodataframe - gdf['all_src']=satellites_names - gdf['num_src']=number + all_src=[] + num_src=[] + for idx, i in gdf.iterrows(): + idl = i[col_names[0]] + g = gdf[gdf[col_names[0]] == idl] + source = list(set(list(gdf[col_names[1]]))) + satellites='' + if len(source)==1: + satellites = satellites.join(source) + num = 1 + elif len(source)==2: + satellites = satellites.join(source[0]+', '+source[1]) + num = 2 + elif len(source)==3: + satellites = satellites.join(source[0]+', '+source[1]+', '+source[2]) + num = 3 + else: + print('Unknown number of sources detected') + print(source) + satellites=None + num=None + all_src.append(satellites) + num_src.append(num) + satellites + gdf['all_src']=all_src + gdf['num_src']=num_src return gdf - def _get_indices(mylist, value): '''Get indices for value in list''' return[i for i, x in enumerate(mylist) if x==value] diff --git a/src/griml/metadata/iml_abundancy_error_estimate.py b/src/griml/metadata/iml_abundancy_error_estimate.py new file mode 100644 index 0000000..b66f332 --- /dev/null +++ b/src/griml/metadata/iml_abundancy_error_estimate.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Thu Sep 26 16:09:20 2024 + +@author: pho +""" +import geopandas as gpd +import glob +import numpy as np +import pandas as pd +from pathlib import Path +from scipy.sparse.csgraph import connected_components +from scipy.spatial import cKDTree + +# Map inventory file locations +gdf_files = '/home/pho/Desktop/python_workspace/GrIML/other/iml_2016-2023/final/checked/*IML-fv1.shp' + +# Load inventory point file with lake_id, region, basin-type and placename info +gdf2 = gpd.read_file('/home/pho/Desktop/python_workspace/GrIML/other/iml_2016-2023/manual_validation/iml_manual_validation_with_names.shp') +gdf2_corr = gdf2.drop(gdf2[gdf2.geometry==None].index) + + +# Iterate across inventory series files +gdfs=[] +for g in list(sorted(glob.glob(gdf_files))): + print(g) + gdf = gpd.read_file(g) + gdf = gdf.dissolve(by='lake_id') + print(len(gdf['geometry'])) + gdfs.append(gdf) + +dfs = pd.concat(gdfs) +dfs = dfs.dissolve(by='lake_id') +dfs['area_sqkm']=[g.area/10**6 for g in list(dfs['geometry'])] +dfs['length_km']=[g.length/1000 for g in list(dfs['geometry'])] + + +print('Average lake size: ' + str(dfs.area_sqkm.mean())) + +dfs.to_file('/home/pho/Desktop/python_workspace/GrIML/other/iml_2016-2023/final/checked/'+'ALL-ESA-GRIML-IML-MERGED-fv1.shp') \ No newline at end of file diff --git a/src/griml/metadata/iml_cleanup_workflow.py b/src/griml/metadata/iml_cleanup_workflow.py new file mode 100644 index 0000000..1627e3c --- /dev/null +++ b/src/griml/metadata/iml_cleanup_workflow.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Thu Sep 26 16:09:20 2024 + +@author: pho +""" +import geopandas as gpd +import glob +import numpy as np +import pandas as pd +from pathlib import Path +from scipy.sparse.csgraph import connected_components +from scipy.spatial import cKDTree + +# Map inventory file locations +gdf_files = '/home/pho/Desktop/python_workspace/GrIML/other/iml_2016-2023/final/unchecked/*.shp' + +# Load inventory point file with lake_id, region, basin-type and placename info +gdf2 = gpd.read_file('/home/pho/Desktop/python_workspace/GrIML/other/iml_2016-2023/manual_validation/iml_manual_validation_with_names.shp') +gdf2_corr = gdf2.drop(gdf2[gdf2.geometry==None].index) + + +# Iterate across inventory series files +for g in list(glob.glob(gdf_files)): + gdf1 = gpd.read_file(g) + gdf1 = gdf1.drop(gdf1[gdf1.geometry==None].index) + + year = str(Path(g).stem)[0:4] + print(year) + + # Join by attribute + gdf1['new_lakeid']=list(gdf1['lake_id']) + gdf = gdf1.merge(gdf2_corr, on='new_lakeid') + + + # Rename columns + gdf['lake_id']=gdf['new_lakeid'] + gdf['margin']=gdf['BasinType'] + gdf['region']=gdf['Region'] + gdf['lake_name']=gdf['placename'] + # gdf['start_date']=gdf['startdate'] + # gdf['end_date']=gdf['enddate'] + + # Reformat geometry + gdf['geometry'] = gdf['geometry_x'] + gdf = gdf.drop(gdf[gdf.geometry==None].index) + gdf['area_sqkm']=[g.area/10**6 for g in list(gdf['geometry'])] + gdf['length_km']=[g.length/1000 for g in list(gdf['geometry'])] + gdf = gpd.GeoDataFrame(gdf, geometry='geometry') + + + all_src=[] + num_src=[] + for idx, i in gdf.iterrows(): + idl = i['lake_id'] + g = gdf[gdf['lake_id'] == idl] + source = list(set(list(g['source']))) + satellites='' + if len(source)==1: + satellites = satellites.join(source) + num = 1 + elif len(source)==2: + satellites = satellites.join(source[0]+', '+source[1]) + num = 2 + elif len(source)==3: + satellites = satellites.join(source[0]+', '+source[1]+', '+source[2]) + num = 3 + else: + print('Unknown number of sources detected') + print(source) + satellites=None + num=None + all_src.append(satellites) + num_src.append(num) + satellites + gdf['all_src']=all_src + gdf['num_src']=num_src + + + # Add certainty score + def _get_score(value, search_names, scores): + '''Determine score from search string''' + if search_names[0] in value: + return scores[0] + elif search_names[1] in value: + return scores[1] + elif search_names[2] == value: + return scores[2] + else: + return None + + source='all_src' + search_names = ['S1','S2','ARCTICDEM'] + scores = [0.298, 0.398, 0.304] + cert=[] + srcs = list(gdf[source]) + + for a in range(len(srcs)): + if srcs[a].split(', ')==1: + out = _get_score(srcs.split(', ')) + cert.append(out) + else: + out=[] + for b in srcs[a].split(', '): + out.append(_get_score(b, search_names, scores)) + cert.append(sum(out)) + + gdf['certainty'] = cert + + # # Add average summer temperature fields + # gdf['temp_aver']='' + # gdf['temp_max']='' + # gdf['temp_min']='' + # gdf['temp_stdev']='' + # gdf['temp_src']='' + # gdf['temp_num']='' + + # Reorder columns and index + gdf_final = gdf[['geometry', 'lake_id','margin','region','lake_name', + 'start_date','end_date','area_sqkm','length_km','method', + 'source','all_src','num_src','certainty', 'verified', + 'verif_by','edited', 'edited_by']] + + # Re-format index + gdf_final = gdf_final.sort_values(by='lake_id') + gdf_final["row_id"] = gdf.index + 1 + gdf_final.reset_index(drop=True, inplace=True) + gdf_final.set_index("row_id", inplace=True) + + + gdf_final.to_file('/home/pho/Desktop/python_workspace/GrIML/other/iml_2016-2023/final/checked/'+str(year)+'0101-ESA-GRIML-IML-fv1.shp') + + gdf_final['idx'] = gdf_final['lake_id'] + gdf_dissolve = gdf_final.dissolve(by='idx') + gdf_dissolve['area_sqkm']=[g.area/10**6 for g in list(gdf_dissolve['geometry'])] + gdf_dissolve['length_km']=[g.length/1000 for g in list(gdf_dissolve['geometry'])] + + # # Add centroid position + # gdf_dissolve['centroid'] = gdf_dissolve['geometry'].centroid + + # Reorder columns and index + gdf_dissolve = gdf_dissolve[['geometry', 'lake_id','margin','region','lake_name', + 'start_date','end_date', 'area_sqkm','length_km','all_src', + 'num_src','certainty', 'verified','verif_by','edited', 'edited_by']] + + # gdf_dissolve = gdf_dissolve.reset_index(drop=True) + gdf_dissolve.to_file('/home/pho/Desktop/python_workspace/GrIML/other/iml_2016-2023/final/checked/'+str(year)+'0101-ESA-GRIML-IML-MERGED-fv1.shp') \ No newline at end of file diff --git a/src/griml/metadata/iml_metadata_workflow.py b/src/griml/metadata/iml_metadata_workflow.py new file mode 100644 index 0000000..6e6ac4d --- /dev/null +++ b/src/griml/metadata/iml_metadata_workflow.py @@ -0,0 +1,184 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Wed Sep 18 15:41:24 2024 + +@author: pho +""" +import geopandas as gpd +import glob +import numpy as np +import pandas as pd +from pathlib import Path +from scipy.sparse.csgraph import connected_components +from scipy.spatial import cKDTree + +# Map inventory file locations +gdfs = '/home/pho/Desktop/python_workspace/GrIML/other/iml_2016-2023/merged/*.shp' + +# Load inventory point file with lake_id, region, basin-type and placename info +gdf2 = gpd.read_file('/home/pho/Desktop/python_workspace/GrIML/other/iml_2016-2023/manual_validation/iml_manual_validation_with_names.shp') + +# Iterate across inventory series files +for g in list(glob.glob(gdfs)): + gdf1 = gpd.read_file(g) +# gdf1 = gpd.read_file('/home/pho/Desktop/python_workspace/GrIML/other/iml_2016-2023/merged/2016_merged.shp') + + year = str(Path(g).stem).split('_')[0] + print(year) + + # Assign ID, region, basin-type and placename attributes + gdf1_corr = gdf1.drop(gdf1[gdf1.geometry==None].index) + gdf2_corr = gdf2.drop(gdf2[gdf2.geometry==None].index) + + distance=100 + nA = np.array(list(gdf1_corr.geometry.centroid.apply(lambda x: (x.x, x.y)))) + nB = np.array(list(gdf2_corr.geometry.apply(lambda x: (x.x, x.y)))) + + btree = cKDTree(nB) + dist, idx = btree.query(nA, k=1) + gdf2_nearest = gdf2_corr.iloc[idx].drop(columns="geometry").reset_index(drop=True) + gdf = pd.concat( + [ + gdf1_corr.reset_index(drop=True), + gdf2_nearest, + pd.Series(dist, name='dist') + ], + axis=1) + + + # Rename columns + gdf['lake_id']=gdf['new_lakeid'] + gdf['margin']=gdf['BasinType'] + gdf['region']=gdf['Region'] + gdf['lake_name']=gdf['placename'] + gdf['start_date']=gdf['startdate'] + gdf['end_date']=gdf['enddate'] + + + # Reorder columns and index + gdf = gdf[['geometry', 'lake_id','margin','region','lake_name', + 'start_date','end_date','area_sqkm','length_km','method','source']] + gdf = gdf.sort_values(by='lake_id') + gdf = gdf.reset_index(drop=True) + + + # # Add sources + # def _get_indices(mylist, value): + # '''Get indices for value in list''' + # return[i for i, x in enumerate(mylist) if x==value] + + # col_names=['lake_id', 'source'] + # ids = gdf[col_names[0]].tolist() + # source = gdf[col_names[1]].tolist() + # satellites=[] + + # # Construct source list + # for x in range(len(ids)): + # indx = _get_indices(ids, x) + # if len(indx) != 0: + # res = [] + # if len(indx) == 1: + # res.append(source[indx[0]].split('/')[-1]) + # else: + # unid=[] + # for dx in indx: + # unid.append(source[dx].split('/')[-1]) + # res.append(list(set(unid))) + + # for z in range(len(indx)): + # if len(indx) == 1: + # satellites.append(res) + # else: + # satellites.append(res[0]) + # else: + # print(x) + # print('Nothing appended!') + # # Compile lists for appending + # satellites_names = [', '.join(i) for i in satellites] + # number = [len(i) for i in satellites] + + # # Return updated geodataframe + # gdf['all_src']=satellites_names + # gdf['num_src']=number + + + all_src=[] + num_src=[] + for idx, i in gdf.iterrows(): + idl = i['lake_id'] + g = gdf[gdf['lake_id'] == idl] + source = list(set(list(g['source']))) + satellites='' + if len(source)==1: + satellites = satellites.join(source) + num = 1 + elif len(source)==2: + satellites = satellites.join(source[0]+', '+source[1]) + num = 2 + elif len(source)==3: + satellites = satellites.join(source[0]+', '+source[1]+', '+source[2]) + num = 3 + else: + print('Unknown number of sources detected') + print(source) + satellites=None + num=None + all_src.append(satellites) + num_src.append(num) + satellites + gdf['all_src']=all_src + gdf['num_src']=num_src + + + # Add certainty score + def _get_score(value, search_names, scores): + '''Determine score from search string''' + if search_names[0] in value: + return scores[0] + elif search_names[1] in value: + return scores[1] + elif search_names[2] == value: + return scores[2] + else: + return None + + source='all_src' + search_names = ['S1','S2','ARCTICDEM'] + scores = [0.298, 0.398, 0.304] + cert=[] + srcs = list(gdf[source]) + + for a in range(len(srcs)): + if srcs[a].split(', ')==1: + out = _get_score(srcs.split(', ')) + cert.append(out) + else: + out=[] + for b in srcs[a].split(', '): + out.append(_get_score(b, search_names, scores)) + cert.append(sum(out)) + + gdf['certainty'] = cert + + # Add average summer temperature fields + gdf['temp_aver']='' + gdf['temp_max']='' + gdf['temp_min']='' + gdf['temp_stdev']='' + gdf['temp_src']='' + gdf['temp_num']='' + + # Add verification and manual intervention fields + gdf['verified']='Yes' + gdf['verif_by']='How' + gdf['edited']='' + gdf['edited_by']='' + + # Re-format index + gdf["row_id"] = gdf.index + 1 + gdf.reset_index(drop=True, inplace=True) + gdf.set_index("row_id", inplace=True) + + gdf.to_file('/home/pho/Desktop/python_workspace/GrIML/other/iml_2016-2023/metadata/'+str(year)+'0101-ESA-GRIML-IML-MERGED-fv1.shp') + diff --git a/src/griml/test/test.py b/src/griml/test/test.py index 24346c3..db5c0a5 100644 --- a/src/griml/test/test.py +++ b/src/griml/test/test.py @@ -1,43 +1,131 @@ +import unittest, tempfile, rasterio, os +from rasterio.transform import from_origin +import geopandas as gpd +from shapely.geometry import Point, Polygon +import numpy as np from griml.convert.convert import convert from griml.filter.filter_vectors import filter_vectors from griml.merge.merge_vectors import merge_vectors from griml.metadata.add_metadata import add_metadata -import unittest, pkg_resources, os -import geopandas as gpd -import griml class TestGrIML(unittest.TestCase): '''Unittest for the GrIML post-processing workflow''' + def setUp(self): + # Set up temporary directories + self.temp_dir = tempfile.TemporaryDirectory() + + def tearDown(self): + # Clean up temporary files + self.temp_dir.cleanup() + + def create_sample_raster(self, filepath): + # Generate a small synthetic raster file with three bands + transform = from_origin(0, 10, 0.1, 0.1) # Top-left origin, 0.1 cell size + with rasterio.open( + filepath, 'w', + driver='GTiff', height=10, width=10, + count=3, dtype='uint8', crs='EPSG:3413', + transform=transform + ) as dst: + # Create a 3D NumPy array of shape (bands, height, width) filled with 255 + data = np.full((3, 10, 10), 255, dtype='uint8') + dst.write(data) + + def create_sample_pointfile(self, filepath, num_features=5): + # Generate a synthetic GeoDataFrame with simple point geometries + data = { + 'geometry': [Point(x, y) for x, y in zip(range(num_features), range(num_features))], + 'id': [int(i) for i in range(num_features)], + 'New Greenl':'test' + } + gdf = gpd.GeoDataFrame(data, crs="EPSG:3413") + gdf["row_id"] = gdf.index + 1 + gdf.reset_index(drop=True, inplace=True) + gdf.set_index("row_id", inplace=True) + gdf.to_file(filepath) + + def create_sample_polyfile(self, filepath, num_features=5, side_length=1.0): + # Generate a synthetic GeoDataFrame with square polygon geometries + half_side = side_length / 2 + data = { + 'geometry': [ + Polygon([ + (x - half_side, y - half_side), # Bottom-left + (x + half_side, y - half_side), # Bottom-right + (x + half_side, y + half_side), # Top-right + (x - half_side, y + half_side), # Top-left + (x - half_side, y - half_side) # Close the polygon + ]) + for x, y in zip(range(num_features), range(num_features)) + ], + 'id': range(num_features), + 'lake_id': [1,1,2,3,2], + 'method': ['VIS','SAR','DEM','VIS','SAR'], + 'source':['S2','S1','ARCTICDEM','S2','S1'], + 'startdate':'20170701', + 'enddate':'20170831', + 'subregion':['SW','SW','NO','NW','NO'], + } + gdf = gpd.GeoDataFrame(data, crs="EPSG:3413") + gdf["row_id"] = gdf.index + 1 + gdf.reset_index(drop=True, inplace=True) + gdf.set_index("row_id", inplace=True) + gdf.to_file(filepath) + def test_convert(self): '''Test vector to raster conversion''' proj = 'EPSG:3413' - band_info = [{'b_number':1, 'method':'VIS', 'source':'S2'}, - {'b_number':2, 'method':'SAR', 'source':'S1'}, - {'b_number':3, 'method':'DEM', 'source':'ARCTICDEM'}] - start='20170701' - end='20170831' - infile = os.path.join(os.path.dirname(griml.__file__),'test/test_north_greenland.tif') - convert([infile], proj, band_info, start, end) + band_info = [ + {'b_number': 1, 'method': 'VIS', 'source': 'S2'}, + {'b_number': 2, 'method': 'SAR', 'source': 'S1'}, + {'b_number': 3, 'method': 'DEM', 'source': 'ARCTICDEM'} + ] + start = '20170701' + end = '20170831' + + # Create synthetic raster file + temp_raster_path = os.path.join(self.temp_dir.name, 'sample_raster.tif') + self.create_sample_raster(temp_raster_path) + + # Run the conversion function with generated data + convert([temp_raster_path], proj, band_info, start, end) def test_filter(self): - '''Test vector filtering''' - infile1 = os.path.join(os.path.dirname(griml.__file__),'test/test_filter.shp') - infile2 = os.path.join(os.path.dirname(griml.__file__),'test/test_icemask.shp') - filter_vectors([infile1], infile2) + '''Test vector filtering''' + # Create synthetic shapefiles + temp_filter_path = os.path.join(self.temp_dir.name, 'sample_filter.shp') + temp_icemask_path = os.path.join(self.temp_dir.name, 'sample_icemask.shp') + self.create_sample_polyfile(temp_filter_path) + self.create_sample_polyfile(temp_icemask_path) + + # Run the filter function with generated data + filter_vectors([temp_filter_path], temp_icemask_path) def test_merge(self): '''Test vector merging''' - infile1 = os.path.join(os.path.dirname(griml.__file__),'test/test_merge_1.shp') - infile2 = os.path.join(os.path.dirname(griml.__file__),'test/test_merge_2.shp') - merge_vectors([infile1,infile2]) + # Create two synthetic shapefiles for merging + temp_merge_path1 = os.path.join(self.temp_dir.name, 'sample_merge_1.shp') + temp_merge_path2 = os.path.join(self.temp_dir.name, 'sample_merge_2.shp') + self.create_sample_polyfile(temp_merge_path1) + self.create_sample_polyfile(temp_merge_path2) + + # Run the merge function with generated data + merge_vectors([temp_merge_path1, temp_merge_path2]) def test_metadata(self): '''Test metadata population''' - infile1 = os.path.join(os.path.dirname(griml.__file__),'test/test_merge_2.shp') - infile2 = os.path.join(os.path.dirname(griml.__file__),'test/test_placenames.shp') - infile3 = os.path.join(os.path.dirname(griml.__file__),'test/greenland_basins_polarstereo.shp') - add_metadata(infile1, infile2, infile3) + # Create synthetic shapefiles for metadata function + temp_metadata_path1 = os.path.join(self.temp_dir.name, 'sample_metadata_1.shp') + temp_metadata_path2 = os.path.join(self.temp_dir.name, 'sample_metadata_2.shp') + temp_metadata_path3 = os.path.join(self.temp_dir.name, 'sample_metadata_3.shp') + self.create_sample_polyfile(temp_metadata_path1) + self.create_sample_pointfile(temp_metadata_path2) + self.create_sample_polyfile(temp_metadata_path3) + + # Run the metadata function with generated data + add_metadata(temp_metadata_path1, temp_metadata_path2, temp_metadata_path3) if __name__ == "__main__": unittest.main() + diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/griml/test/greenland_basins_polarstereo.cpg b/test/greenland_basins_polarstereo.cpg similarity index 100% rename from src/griml/test/greenland_basins_polarstereo.cpg rename to test/greenland_basins_polarstereo.cpg diff --git a/src/griml/test/greenland_basins_polarstereo.dbf b/test/greenland_basins_polarstereo.dbf similarity index 100% rename from src/griml/test/greenland_basins_polarstereo.dbf rename to test/greenland_basins_polarstereo.dbf diff --git a/src/griml/test/greenland_basins_polarstereo.prj b/test/greenland_basins_polarstereo.prj similarity index 100% rename from src/griml/test/greenland_basins_polarstereo.prj rename to test/greenland_basins_polarstereo.prj diff --git a/src/griml/test/greenland_basins_polarstereo.shp b/test/greenland_basins_polarstereo.shp similarity index 100% rename from src/griml/test/greenland_basins_polarstereo.shp rename to test/greenland_basins_polarstereo.shp diff --git a/src/griml/test/greenland_basins_polarstereo.shx b/test/greenland_basins_polarstereo.shx similarity index 100% rename from src/griml/test/greenland_basins_polarstereo.shx rename to test/greenland_basins_polarstereo.shx diff --git a/test/process_cli.py b/test/process_cli.py new file mode 100644 index 0000000..0616591 --- /dev/null +++ b/test/process_cli.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +@author: pho +""" +from griml.convert import convert +from griml.filter import filter_vectors +from griml.merge import merge_vectors +from griml.metadata import add_metadata +from pathlib import Path +import glob, os +from argparse import ArgumentParser + +def parse_griml_arguments(): + parser = ArgumentParser(description="Full post-processing workflow for "+ + "creating ice marginal lake inventory") + parser.add_argument('-r', '--root_dir', type=str, required=True, + help='Root directory to write files to') + parser.add_argument('-i', '--in_dir', type=str, required=True, + help='Directory path to input raster files') + parser.add_argument('-y', '--year', type=str, required=True, + help='Year of inventory') + parser.add_argument('-m', '--margin_file', type=str, required=True, + help='File path to ice margin for spatial filtering') + parser.add_argument('-n', '--names_file', type=str, required=True, + help='File path to placenames file for metadata population') + parser.add_argument('-r', '--regions_file', type=str, required=True, + help='File path to regions/drainage basin file for metadata population') + parser.add_argument('-p', '--proj', type=str, default='EPSG:3413', + required=False, help='Projection (of input and output)') + parser.add_argument('-s', '--steps', type=str, default='1111', + required=False, help='Define which steps to include in'+ + ' processing, where each value indicates: convert, '+ + 'filter, merge, and metadata. If set to zero, the '+ + 'step associated with that position is skipped') + + args = parser.parse_args() + return args + +def get_step_flags(a): + '''Return step flags''' + return a[0],a[1],a[2],a[3] + + +def check_dir(d): + '''Check if directory exists and create it if it does not''' + if not os.path.exists(d): + os.mkdir(d) + + +def griml(): + '''Perform processing workflow''' + args = parse_griml_arguments() + + s1, s2, s3, s4 = get_step_flags(args.steps) + + print('Commencing post processing for inventory year ' + args.year) + print('Adopted projection: ' + args.proj) + print('Writing outputs to ' + args.root_dir) + + root_dir = Path(args.root_dir) + + # Convert to vectors + if s1: + print('Converting rasters to vectors...') + + src=args.in_dir + dest = str(root_dir.joinpath('vectors')) + check_dir(dest) + + print('Reading from ' + src) + print('Writing to ' + dest) + + band_info = [{'b_number':1, 'method':'VIS', 'source':'S2'}, + {'b_number':2, 'method':'SAR', 'source':'S1'}, + {'b_number':3, 'method':'DEM', 'source':'ARCTICDEM'}] + start=args.year+'0701' + end=args.year+'0831' + + infiles = list(glob.glob(src+'/*.tif')) + + convert(infiles, args.proj, band_info, start, end, str(dest)) + + + # Filter vectors by area and proximity to margin + if s2: + print('Filtering vectors...') + + src = str(root_dir.joinpath('vectors')) + dest = str(root_dir.joinpath('filtered')) + check_dir(dest) + + print('Reading from ' + src) + print('Writing to ' + dest) + + # margin_buff = gpd.read_file(infile_margin) + # margin_buff = margin.buffer(500) + # margin_buff = gpd.GeoDataFrame(geometry=margin_buff, crs=margin.crs) + + infiles = list(glob.glob(src+'/*.shp')) + + filter_vectors(infiles, args.margin_file, dest) + + + # Merge vectors + if s3: + print('Merging vectors...') + + src = str(root_dir.joinpath('filtered')) + dest = root_dir.joinpath('merged/'+args.year+'_merged.shp') + check_dir(dest.parent) + + print('Reading from ' + src) + print('Writing to ' + str(dest)) + + infiles = list(glob.glob(src+'/*.shp')) + + merge_vectors(infiles, str(dest)) + + + # Add metadata + if s4: + print('Adding metadata...') + + src = str(root_dir.joinpath('merged/'+args.year+'_merged.shp')) + dest = root_dir.joinpath('metadata/'+args.year+'_metadata.shp') + check_dir(dest.parent) + + print('Reading from ' + src) + print('Writing to ' + str(dest)) + + add_metadata(src, args.names_file, args.regions_file, str(dest)) + + print('Finished') diff --git a/test/process_example.py b/test/process_example.py new file mode 100644 index 0000000..a558f25 --- /dev/null +++ b/test/process_example.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +@author: pho +""" +from griml.convert import convert +from griml.filter import filter_vectors +from griml.merge import merge_vectors +from griml.metadata import add_metadata +from pathlib import Path +import glob + +root_dir = Path('/home/pho/python_workspace/GrIML/other/') +year='2016' + +print('Commencing post processing for inventory year ' + year) + +# Convert to vectors +print('Converting rasters to vectors...') + +src = str(root_dir.joinpath('iml_2016-2023/rasters/'+year+'_iml')) +dest = str(root_dir.joinpath('iml_2016-2023/vectors/'+year)) + +print('Reading from ' + src) +print('Writing to ' + dest) + +proj = 'EPSG:3413' +band_info = [{'b_number':1, 'method':'VIS', 'source':'S2'}, + {'b_number':2, 'method':'SAR', 'source':'S1'}, + {'b_number':3, 'method':'DEM', 'source':'ARCTICDEM'}] +start=year+'0701' +end=year+'0831' + +infiles = list(glob.glob(src+'/*.tif')) + +convert(infiles, proj, band_info, start, end, str(dest)) + + +# Filter vectors by area and proximity to margin +print('Filtering vectors...') + +src = dest +dest = str(root_dir.joinpath('iml_2016-2023/filtered/'+year)) +infile_margin = str(root_dir.joinpath('datasets/ice_margin/gimp_icemask_line_polstereo_simple_buffer.shp')) + +print('Reading from ' + src) +print('Writing to ' + dest) + +# margin_buff = gpd.read_file(infile_margin) +# margin_buff = margin.buffer(500) +# margin_buff = gpd.GeoDataFrame(geometry=margin_buff, crs=margin.crs) + +infiles = list(glob.glob(src+'/*.shp')) + +filter_vectors(infiles, infile_margin, dest) + + +# Merge vectors +print('Merging vectors...') + +src = dest +dest = str(root_dir.joinpath('iml_2016-2023/merged/'+year+'_merged.shp')) + +print('Reading from ' + src) +print('Writing to ' + dest) + +infiles = list(glob.glob(src+'/*.shp')) + +merge_vectors(infiles, dest) + + +# Add metadata +print('Adding metadata...') + +src = dest +dest = str(root_dir.joinpath('iml_2016-2023/metadata/'+year+'_metadata.shp')) + +print('Reading from ' + src) +print('Writing to ' + dest) + +infile_names = str(root_dir.joinpath('datasets/placenames/oqaasileriffik_placenames.shp')) +infile_basins = str(root_dir.joinpath('datasets/drainage_basins/greenland_basins_polarstereo.shp')) + +add_metadata(src, infile_names, infile_basins, dest) + +print('Finished') diff --git a/test/test (copy).py b/test/test (copy).py new file mode 100644 index 0000000..24346c3 --- /dev/null +++ b/test/test (copy).py @@ -0,0 +1,43 @@ +from griml.convert.convert import convert +from griml.filter.filter_vectors import filter_vectors +from griml.merge.merge_vectors import merge_vectors +from griml.metadata.add_metadata import add_metadata +import unittest, pkg_resources, os +import geopandas as gpd +import griml + +class TestGrIML(unittest.TestCase): + '''Unittest for the GrIML post-processing workflow''' + + def test_convert(self): + '''Test vector to raster conversion''' + proj = 'EPSG:3413' + band_info = [{'b_number':1, 'method':'VIS', 'source':'S2'}, + {'b_number':2, 'method':'SAR', 'source':'S1'}, + {'b_number':3, 'method':'DEM', 'source':'ARCTICDEM'}] + start='20170701' + end='20170831' + infile = os.path.join(os.path.dirname(griml.__file__),'test/test_north_greenland.tif') + convert([infile], proj, band_info, start, end) + + def test_filter(self): + '''Test vector filtering''' + infile1 = os.path.join(os.path.dirname(griml.__file__),'test/test_filter.shp') + infile2 = os.path.join(os.path.dirname(griml.__file__),'test/test_icemask.shp') + filter_vectors([infile1], infile2) + + def test_merge(self): + '''Test vector merging''' + infile1 = os.path.join(os.path.dirname(griml.__file__),'test/test_merge_1.shp') + infile2 = os.path.join(os.path.dirname(griml.__file__),'test/test_merge_2.shp') + merge_vectors([infile1,infile2]) + + def test_metadata(self): + '''Test metadata population''' + infile1 = os.path.join(os.path.dirname(griml.__file__),'test/test_merge_2.shp') + infile2 = os.path.join(os.path.dirname(griml.__file__),'test/test_placenames.shp') + infile3 = os.path.join(os.path.dirname(griml.__file__),'test/greenland_basins_polarstereo.shp') + add_metadata(infile1, infile2, infile3) + +if __name__ == "__main__": + unittest.main() diff --git a/test/test.py b/test/test.py new file mode 100644 index 0000000..24346c3 --- /dev/null +++ b/test/test.py @@ -0,0 +1,43 @@ +from griml.convert.convert import convert +from griml.filter.filter_vectors import filter_vectors +from griml.merge.merge_vectors import merge_vectors +from griml.metadata.add_metadata import add_metadata +import unittest, pkg_resources, os +import geopandas as gpd +import griml + +class TestGrIML(unittest.TestCase): + '''Unittest for the GrIML post-processing workflow''' + + def test_convert(self): + '''Test vector to raster conversion''' + proj = 'EPSG:3413' + band_info = [{'b_number':1, 'method':'VIS', 'source':'S2'}, + {'b_number':2, 'method':'SAR', 'source':'S1'}, + {'b_number':3, 'method':'DEM', 'source':'ARCTICDEM'}] + start='20170701' + end='20170831' + infile = os.path.join(os.path.dirname(griml.__file__),'test/test_north_greenland.tif') + convert([infile], proj, band_info, start, end) + + def test_filter(self): + '''Test vector filtering''' + infile1 = os.path.join(os.path.dirname(griml.__file__),'test/test_filter.shp') + infile2 = os.path.join(os.path.dirname(griml.__file__),'test/test_icemask.shp') + filter_vectors([infile1], infile2) + + def test_merge(self): + '''Test vector merging''' + infile1 = os.path.join(os.path.dirname(griml.__file__),'test/test_merge_1.shp') + infile2 = os.path.join(os.path.dirname(griml.__file__),'test/test_merge_2.shp') + merge_vectors([infile1,infile2]) + + def test_metadata(self): + '''Test metadata population''' + infile1 = os.path.join(os.path.dirname(griml.__file__),'test/test_merge_2.shp') + infile2 = os.path.join(os.path.dirname(griml.__file__),'test/test_placenames.shp') + infile3 = os.path.join(os.path.dirname(griml.__file__),'test/greenland_basins_polarstereo.shp') + add_metadata(infile1, infile2, infile3) + +if __name__ == "__main__": + unittest.main() diff --git a/src/griml/test/test_filter.cpg b/test/test_filter.cpg similarity index 100% rename from src/griml/test/test_filter.cpg rename to test/test_filter.cpg diff --git a/src/griml/test/test_filter.dbf b/test/test_filter.dbf similarity index 100% rename from src/griml/test/test_filter.dbf rename to test/test_filter.dbf diff --git a/src/griml/test/test_filter.prj b/test/test_filter.prj similarity index 100% rename from src/griml/test/test_filter.prj rename to test/test_filter.prj diff --git a/src/griml/test/test_filter.qix b/test/test_filter.qix similarity index 100% rename from src/griml/test/test_filter.qix rename to test/test_filter.qix diff --git a/src/griml/test/test_filter.shp b/test/test_filter.shp similarity index 100% rename from src/griml/test/test_filter.shp rename to test/test_filter.shp diff --git a/src/griml/test/test_filter.shx b/test/test_filter.shx similarity index 100% rename from src/griml/test/test_filter.shx rename to test/test_filter.shx diff --git a/src/griml/test/test_icemask.cpg b/test/test_icemask.cpg similarity index 100% rename from src/griml/test/test_icemask.cpg rename to test/test_icemask.cpg diff --git a/src/griml/test/test_icemask.dbf b/test/test_icemask.dbf similarity index 100% rename from src/griml/test/test_icemask.dbf rename to test/test_icemask.dbf diff --git a/src/griml/test/test_icemask.prj b/test/test_icemask.prj similarity index 100% rename from src/griml/test/test_icemask.prj rename to test/test_icemask.prj diff --git a/src/griml/test/test_icemask.shp b/test/test_icemask.shp similarity index 100% rename from src/griml/test/test_icemask.shp rename to test/test_icemask.shp diff --git a/src/griml/test/test_icemask.shx b/test/test_icemask.shx similarity index 100% rename from src/griml/test/test_icemask.shx rename to test/test_icemask.shx diff --git a/src/griml/test/test_merge_1.cpg b/test/test_merge_1.cpg similarity index 100% rename from src/griml/test/test_merge_1.cpg rename to test/test_merge_1.cpg diff --git a/src/griml/test/test_merge_1.dbf b/test/test_merge_1.dbf similarity index 100% rename from src/griml/test/test_merge_1.dbf rename to test/test_merge_1.dbf diff --git a/src/griml/test/test_merge_1.prj b/test/test_merge_1.prj similarity index 100% rename from src/griml/test/test_merge_1.prj rename to test/test_merge_1.prj diff --git a/src/griml/test/test_merge_1.qix b/test/test_merge_1.qix similarity index 100% rename from src/griml/test/test_merge_1.qix rename to test/test_merge_1.qix diff --git a/src/griml/test/test_merge_1.shp b/test/test_merge_1.shp similarity index 100% rename from src/griml/test/test_merge_1.shp rename to test/test_merge_1.shp diff --git a/src/griml/test/test_merge_1.shx b/test/test_merge_1.shx similarity index 100% rename from src/griml/test/test_merge_1.shx rename to test/test_merge_1.shx diff --git a/src/griml/test/test_merge_2.cpg b/test/test_merge_2.cpg similarity index 100% rename from src/griml/test/test_merge_2.cpg rename to test/test_merge_2.cpg diff --git a/src/griml/test/test_merge_2.dbf b/test/test_merge_2.dbf similarity index 100% rename from src/griml/test/test_merge_2.dbf rename to test/test_merge_2.dbf diff --git a/src/griml/test/test_merge_2.prj b/test/test_merge_2.prj similarity index 100% rename from src/griml/test/test_merge_2.prj rename to test/test_merge_2.prj diff --git a/src/griml/test/test_merge_2.qix b/test/test_merge_2.qix similarity index 100% rename from src/griml/test/test_merge_2.qix rename to test/test_merge_2.qix diff --git a/src/griml/test/test_merge_2.shp b/test/test_merge_2.shp similarity index 100% rename from src/griml/test/test_merge_2.shp rename to test/test_merge_2.shp diff --git a/src/griml/test/test_merge_2.shx b/test/test_merge_2.shx similarity index 100% rename from src/griml/test/test_merge_2.shx rename to test/test_merge_2.shx diff --git a/src/griml/test/test_north_greenland.tif b/test/test_north_greenland.tif similarity index 100% rename from src/griml/test/test_north_greenland.tif rename to test/test_north_greenland.tif diff --git a/src/griml/test/test_north_greenland.tif.aux.xml b/test/test_north_greenland.tif.aux.xml similarity index 100% rename from src/griml/test/test_north_greenland.tif.aux.xml rename to test/test_north_greenland.tif.aux.xml diff --git a/src/griml/test/test_placenames.cpg b/test/test_placenames.cpg similarity index 100% rename from src/griml/test/test_placenames.cpg rename to test/test_placenames.cpg diff --git a/src/griml/test/test_placenames.dbf b/test/test_placenames.dbf similarity index 100% rename from src/griml/test/test_placenames.dbf rename to test/test_placenames.dbf diff --git a/src/griml/test/test_placenames.prj b/test/test_placenames.prj similarity index 100% rename from src/griml/test/test_placenames.prj rename to test/test_placenames.prj diff --git a/src/griml/test/test_placenames.qmd b/test/test_placenames.qmd similarity index 100% rename from src/griml/test/test_placenames.qmd rename to test/test_placenames.qmd diff --git a/src/griml/test/test_placenames.shp b/test/test_placenames.shp similarity index 100% rename from src/griml/test/test_placenames.shp rename to test/test_placenames.shp diff --git a/src/griml/test/test_placenames.shx b/test/test_placenames.shx similarity index 100% rename from src/griml/test/test_placenames.shx rename to test/test_placenames.shx