-
Notifications
You must be signed in to change notification settings - Fork 1
/
elevation.py
332 lines (306 loc) · 11 KB
/
elevation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
"""Generate an MBTiles of contours from Mapzen / Amazon elevation tiles
(https://registry.opendata.aws/terrain-tiles/)"""
from multiprocessing import Pool
from subprocess import CalledProcessError, run
import argparse
import asyncio
import json
import os
import aiofiles
from fiona.transform import transform
import httpx
import mercantile
from supermercado import burntiles, super_utils
from tqdm import tqdm
from database import make_database, DBNAME, SRID
from sources import util
TABLE_NAME = "contours"
CACHE_DIR = "./elevation-tiles"
# pylint: disable=invalid-name
def tile_file_path(x, y, z, ext="tif"):
"""Return tile file path for coordinates"""
return f"{CACHE_DIR}/{z}/{x}/{y}.{ext}"
# pylint: enable=invalid-name
def tiles_from_bbox(swlon, swlat, nelon, nelat, zooms):
"""Tiles from a bounding box specified by southwest and northeast coordinates
and a list of zooms
"""
return list(
mercantile.tiles(swlon, swlat, nelon, nelat, zooms, truncate=False)
)
def tiles_from_geojson(geojson, zooms):
"""Tiles from a GeoJSON feature passed in as a dict"""
tiles = []
if geojson["type"] == "FeatureCollection":
features = geojson["features"]
# supermercado expects features, so if this is a just a geometry, wrap it in a
# Feature
elif geojson["type"] != "Feature":
features = [{"type": "Feature", "geometry": geojson}]
else:
features = [geojson]
for zoom in zooms:
# So much more complicated than it needs to be. burntiles.burn only
# accepts a list of polygons, not features, not multipolygons, just
# polygons, so super_utils.filter_features gets the polygons out of
# diverse GeoJSON-y dicts. It also returns a list of numpy.ndarray
# objects, which are not lists and need to be turned into lists with
# tolist()
tiles += [
ftiles.tolist()
for ftiles in burntiles.burn(
list(super_utils.filter_features(features)), zoom
)
]
return [mercantile.Tile(*tile) for tile in tiles]
async def cache_tile(tile, client, clean=False, max_retries=3):
"""Caches a tile"""
tile_path = f"{tile.z}/{tile.x}/{tile.y}.tif"
url = f"https://s3.amazonaws.com/elevation-tiles-prod/geotiff/{tile_path}"
dir_path = f"./{CACHE_DIR}/{tile.z}/{tile.x}"
file_path = tile_file_path(tile.x, tile.y, tile.z)
os.makedirs(dir_path, exist_ok=True)
if os.path.exists(file_path):
if clean:
os.remove(file_path)
else:
return file_path
# TODO handle errors, client abort, server abort
for try_num in range(1, max_retries + 1):
try:
download = await client.get(url)
if download.status_code != 200:
util.log(
f"Request for {url} failed with {download.status_code}, "
"skipping..."
)
return
async with aiofiles.open(file_path, 'wb') as outfile:
async for chunk in download.aiter_bytes():
await outfile.write(chunk)
break
except (asyncio.exceptions.TimeoutError, httpx.ConnectTimeout):
if try_num > max_retries:
util.log(
f"Request for {url} timed out {max_retries} times, "
"skipping..."
)
else:
# util.log(f"Sleeping for {try_num ** 3}s...")
await asyncio.sleep(try_num ** 3)
# Cache DEM tiles using asyncio for this presumably IO-bound process
async def cache_tiles(tiles, clean=False):
"""Cache multiple tiles"""
async with httpx.AsyncClient() as client:
# using as_completed with tqdm (https://stackoverflow.com/a/37901797)
tasks = [cache_tile(tile, client, clean=clean) for tile in tiles]
pbar = tqdm(
asyncio.as_completed(tasks),
total=len(tasks),
desc="Downloading DEM TIFs",
unit=" tiles"
)
for task in pbar:
await task
def make_contours_for_tile(tile, clean=False):
"""Make contours for a file"""
# print("Making contours for {}".format(tile))
merge_contours_path = tile_file_path(
tile.x,
tile.y,
tile.z,
ext="merge-contours.shp"
)
if os.path.exists(merge_contours_path):
if clean:
os.remove(merge_contours_path)
else:
return
interval = 1000
if tile.z >= 10:
interval = 25
elif tile.z >= 8:
interval = 100
# Merge all 8 tiles that surround this tile so we don't get weird edge
# effects
merge_coords = [
[tile.x - 1, tile.y - 1], [tile.x + 0, tile.y - 1], [tile.x + 1, tile.y - 1],
[tile.x - 1, tile.y + 0], [tile.x + 0, tile.y + 0], [tile.x + 1, tile.y + 0],
[tile.x - 1, tile.y + 1], [tile.x + 0, tile.y + 1], [tile.x + 1, tile.y + 1]
]
merge_file_paths = [tile_file_path(xy[0], xy[1], tile.z) for xy in merge_coords]
merge_file_paths = [
path for path in merge_file_paths if os.path.exists(path)
]
merge_path = tile_file_path(tile.x, tile.y, tile.z, "merge.tif")
run(["gdal_merge.py", "-q", "-o", merge_path, *merge_file_paths], check=True)
run([
"gdal_contour", "-q", "-i", str(
interval), "-a", "elevation", merge_path,
merge_contours_path
], check=True)
# Get the bounding box of this tile in lat/lon, project into the source
# coordinate system (Pseudo Mercator) so ogr2ogr can clip it before
# importing into PostGIS
bounds = mercantile.bounds(tile.x, tile.y, tile.z)
bounds_x, bounds_y = transform(
'EPSG:4326',
'EPSG:3857',
[bounds.west, bounds.east],
[bounds.south, bounds.north]
)
# Do a bunch of stuff, including clipping the lines back to the original
# tile boundaries, projecting them into 4326, and loading them into a
# PostGIS table
try:
run([
"ogr2ogr",
"-append",
"-skipfailures",
"-nln", TABLE_NAME,
"-nlt", "MULTILINESTRING",
"-clipsrc", *[str(c) for c in [bounds_x[0], bounds_y[0], bounds_x[1], bounds_y[1]]],
"-f", "PostgreSQL", f"PG:dbname={DBNAME}",
"-t_srs", f"EPSG:{SRID}",
"--config", "PG_USE_COPY", "YES",
merge_contours_path
], check=True)
except CalledProcessError as ogrerror:
# Sometimes the ogr2ogr command results in a GeometryColleciton
# instead of a MultilineString, which returns an error when it tries
# to insert into the MULTILINESTRING contours table. Might have
# something to do with clipsrs leaving some points. Unfortunately the
# error doesn't seem to have any output I can use to make sure it's
# that error.
util.log(f"Could not extract contours for {merge_contours_path}: {ogrerror}")
def make_contours_table(tiles, procs=2):
"""
Make contours from DEM files using a multiprocessing pool for this
presumably CPU-bound process
"""
make_database()
util.run_sql(f"DROP TABLE IF EXISTS {TABLE_NAME}")
with Pool(processes=procs) as pool:
pbar = tqdm(
pool.imap_unordered(make_contours_for_tile, tiles),
desc="Converting to contours & importing",
unit=" tiles",
total=len(tiles)
)
for _ in pbar:
pass
async def make_contours_mbtiles(
zoom,
swlon=None,
swlat=None,
nelon=None,
nelat=None,
geojson=None,
mbtiles_zoom=None,
clean=False, procs=2, path="./contours.mbtiles"
):
"""Make the mbtiles for contours"""
make_database()
zooms = [zoom]
if not mbtiles_zoom:
mbtiles_zoom = zoom
print("Clearing out existing data...")
if os.path.exists(path):
os.remove(path)
if os.path.isdir(CACHE_DIR):
util.call_cmd(
["find", CACHE_DIR, "-type", "f", "-name", "*.merge*", "-delete"]
)
else:
os.mkdir(CACHE_DIR)
tiles = None
if geojson:
tiles = tiles_from_geojson(geojson, zooms)
elif swlon and swlat and nelon and nelat:
tiles = tiles_from_bbox(swlon, swlat, nelon, nelat, zooms)
if tiles is None:
raise ValueError("You must specify a bounding box or a GeoJSON feature")
await cache_tiles(tiles, clean=clean)
make_contours_table(tiles, procs=procs)
# TODO make mbtiles_zoom into mbtiles_zooms which is a mapping between the
# desired zooms in the mbtiles and what zoom-level table in the database to
# fill it with (i.e. what contour resolution)
cmd = [
"ogr2ogr",
"-f", "MBTILES",
path,
f"PG:dbname={DBNAME}",
TABLE_NAME,
"-dsco", f"MINZOOM={mbtiles_zoom}",
"-dsco", f"MAXZOOM={mbtiles_zoom}",
"-dsco", "DESCRIPTION=\"Elevation contours, 25m interval\""
]
util.call_cmd(cmd)
return path
def make_contours(
zoom,
swlon=None,
swlat=None,
nelon=None,
nelat=None,
geojson=None,
mbtiles_zoom=None,
clean=False,
procs=2,
path="./contours.mbtiles"
):
"""Seemingly useless method so we can export a synchronous method that calls async code"""
mbtiles_path = asyncio.run(
make_contours_mbtiles(
zoom,
swlon=swlon,
swlat=swlat,
nelon=nelon,
nelat=nelat,
geojson=geojson,
mbtiles_zoom=mbtiles_zoom,
clean=clean,
procs=procs,
path=path
)
)
return mbtiles_path
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Make an MBTiles of contours given a bounding box and "
"zoom range"
)
parser.add_argument("zoom", type=int, help="Single zoom or minimum zoom")
parser.add_argument("--swlon", type=float, help="Bounding box swlon")
parser.add_argument("--swlat", type=float, help="Bounding box swlat")
parser.add_argument("--nelon", type=float, help="Bounding box nelon")
parser.add_argument("--nelat", type=float, help="Bounding box nelat")
parser.add_argument(
"-f", "--geojson", type=str,
help="Path to a file with a GeoJSON feature defining the target area"
)
parser.add_argument(
"--procs", type=int,
help="Number of processes to use for multiprocessing"
)
parser.add_argument(
"--clean", action="store_true",
help="Clean cached data before running"
)
args = parser.parse_args()
min_zoom = args.zoom
path = None
if args.geojson:
with open(args.geojson, encoding="utf-8") as f:
geojson = json.loads(f.read())
path = make_contours(
min_zoom, geojson=geojson, clean=args.clean, procs=args.procs)
else:
path = make_contours(
args.swlon, args.swlat, args.nelon, args.nelat, min_zoom,
clean=args.clean, procs=args.procs
)
if path:
print(f"MBTiles created at {path}")
else:
print("Failed to generate MBTiles")