diff --git a/batch-setup/make_meta_tiles.py b/batch-setup/make_meta_tiles.py index 323156a..bbaf19e 100644 --- a/batch-setup/make_meta_tiles.py +++ b/batch-setup/make_meta_tiles.py @@ -65,7 +65,7 @@ def clear_old_missing_logs(self): Prefix=self.dst_date_prefix, ) - print("Listing logs to delete.") + print("[make_meta_tiles] Listing logs to delete.") keys = [] for page in page_iter: if page['KeyCount'] == 0: @@ -77,7 +77,7 @@ def clear_old_missing_logs(self): # from AWS documentation, we can delete up to 1000 at a time. max_keys_per_chunk = 1000 - print("Deleting old logs.") + print("[make_meta_tiles] Deleting old logs.") for idx in range(0, len(keys), max_keys_per_chunk): chunk = keys[idx:idx+max_keys_per_chunk] response = self.s3.delete_objects( @@ -89,15 +89,17 @@ def clear_old_missing_logs(self): errors = response.get('Errors') if errors: - raise RuntimeError("Unable to delete some files: %r" % errors) + raise RuntimeError("[make_meta_tiles] Unable to delete some " + "files: %r" % errors) def run_batch_job(self): # before we start, delete any data that exists under this date prefix - print("Clearing out old missing tile logs") + print("[make_meta_tiles] Clearing out old missing tile logs") self.clear_old_missing_logs() # enqueue the jobs to find all the existing meta tiles. - print("Running Batch job to enumerate existing meta tiles.") + print("[make_meta_tiles] Running Batch job to enumerate existing " + "meta tiles.") run_go( 'tz-batch-submit-missing-meta-tiles', '-job-queue', self.job_queue_name, @@ -110,14 +112,14 @@ def run_batch_job(self): '-key-format-type', self.key_format_type, ) - print("Waiting for jobs to finish...") + print("[make_meta_tiles] Waiting for jobs to finish...") wait_for_jobs_to_finish(self.job_queue_name) def read_metas_to_file(self, filename, present=False, compress=False): if present: - print("Reading existing meta tiles") + print("[make_meta_tiles] Reading existing meta tiles") else: - print("Reading missing meta tiles") + print("[make_meta_tiles] Reading missing meta tiles") run_go('tz-missing-meta-tiles-read', '-bucket', self.missing_bucket, @@ -149,7 +151,7 @@ def missing_tiles_split(self, split_zoom, zoom_max, big_jobs): self.read_metas_to_file(missing_meta_file, compress=True) - print("Splitting into high and low zoom lists") + print("[make_meta_tiles] Splitting into high and low zoom lists") # contains zooms 0 until group zoom. the jobs between the group # zoom and RAWR zoom are merged into the parent at group zoom. @@ -291,7 +293,8 @@ def _big_jobs(rawr_bucket, prefix, key_format_type, rawr_zoom, group_zoom, return big_jobs -def enqueue_tiles(config_file, tile_list_file, check_metatile_exists, mem_multiplier=1.0, mem_max=32 * 1024): +def enqueue_tiles(config_file, tile_list_file, check_metatile_exists, + mem_multiplier=1.0, mem_max=32 * 1024): from tilequeue.command import make_config_from_argparse from tilequeue.command import tilequeue_batch_enqueue from make_rawr_tiles import BatchEnqueueArgs @@ -307,7 +310,8 @@ def enqueue_tiles(config_file, tile_list_file, check_metatile_exists, mem_multip def update_memory_request(cfg, mem_multiplier, mem_max): - cfg.yml["batch"]["memory"] = int(min(cfg.yml["batch"]["memory"] * mem_multiplier, mem_max)) + cfg.yml["batch"]["memory"] = int(min(cfg.yml["batch"]["memory"] * + mem_multiplier, mem_max)) # adaptor class for MissingTiles to see just the high zoom parts, this is used @@ -336,7 +340,8 @@ def missing_file(self, missing): # certain number of retries. class TileRenderer(object): - def __init__(self, tile_finder, big_jobs, split_zoom, zoom_max, allowed_missing_tiles=0): + def __init__(self, tile_finder, big_jobs, split_zoom, zoom_max, + allowed_missing_tiles=0): self.tile_finder = tile_finder self.big_jobs = big_jobs self.split_zoom = split_zoom @@ -358,7 +363,8 @@ def render(self, num_retries, lense): if count <= self.allowed_missing_tiles: sample = head_lines(missing_tile_file, 10) - print("All %s done with %d missing tiles, %d allowed. e.g. %s" % + print("[make_meta_tiles] All %s done with %d missing " + "tiles, %d allowed. e.g. %s" % (lense.description, count, self.allowed_missing_tiles, ', '.join(sample))) break @@ -368,7 +374,7 @@ def render(self, num_retries, lense): # enqueue jobs for missing tiles if count > 0: sample = head_lines(missing_tile_file, 10) - print("Enqueueing %d %s tiles (e.g. %s)" % + print("[make_meta_tiles] Enqueueing %d %s tiles (e.g. %s)" % (count, lense.description, ', '.join(sample))) enqueue_tiles(lense.config, missing_tile_file, @@ -380,8 +386,10 @@ def render(self, num_retries, lense): count = wc_line(missing_tile_file) sample = head_lines(missing_tile_file, 10) raise RuntimeError( - "FAILED! %d %s still missing after %d tries (e.g. %s)" - % (count, lense.description, num_retries, ', '.join(sample))) + "[make_meta_tiles] FAILED! %d %s still missing after %d " + "tries (e.g. %s)" + % (count, lense.description, num_retries, + ', '.join(sample))) if __name__ == '__main__': @@ -441,7 +449,8 @@ def render(self, num_retries, lense): region = args.region or os.environ.get('AWS_DEFAULT_REGION') if region is None: import sys - print("ERROR: Need environment variable AWS_DEFAULT_REGION to be set.") + print("[make_meta_tiles] ERROR: Need environment variable " + "AWS_DEFAULT_REGION to be set.") sys.exit(1) # check that metatile_size is within a sensible range diff --git a/batch-setup/make_rawr_tiles.py b/batch-setup/make_rawr_tiles.py index 29fca91..410e284 100644 --- a/batch-setup/make_rawr_tiles.py +++ b/batch-setup/make_rawr_tiles.py @@ -66,7 +66,7 @@ def missing_jobs(missing_bucket, rawr_bucket, date_prefix, region, config, config, tile_zoom) jobs = set(coord.zoomTo(job_zoom).container() for coord in tiles) - print("Missing %d tiles (%d jobs)" % (len(tiles), len(jobs))) + print("[make_rawr_tiles] Missing %d tiles (%d jobs)" % (len(tiles), len(jobs))) tmpdir = tempfile.mkdtemp() try: @@ -145,7 +145,7 @@ def wait_for_jobs_to_finish(job_queue, wait_time=300): print("[%s] Still have jobs left in queue." % (time.ctime())) time.sleep(wait_time) break - print("All jobs finished (either SUCCEEDED or FAILED)") + print("[make_rawr_tiles] All jobs finished (either SUCCEEDED or FAILED)") def make_rawr_tiles(rawr_config_file, missing_config_file, missing_bucket, @@ -173,7 +173,8 @@ def make_rawr_tiles(rawr_config_file, missing_config_file, missing_bucket, ) as missing_file: num_missing = wc_line(missing_file) if num_missing == 0: - print("Successfully generated all the RAWR tiles after " + print("[make_rawr_tiles] Successfully generated all the RAWR " + "tiles after " "%d re-enqueues!" % (attempt)) return @@ -186,7 +187,8 @@ def make_rawr_tiles(rawr_config_file, missing_config_file, missing_bucket, tiles = missing_tiles(missing_bucket, rawr_bucket, date_prefix, region, key_format_type, config, tile_zoom) - print("Ran %d times, but still have %d missing tiles. Good luck!" % + print("[make_rawr_tiles] Ran %d times, but still have %d missing tiles. " + "Good luck!" % (retry_attempts, len(tiles))) @@ -226,7 +228,8 @@ def make_rawr_tiles(rawr_config_file, missing_config_file, missing_bucket, region = args.region or os.environ.get('AWS_DEFAULT_REGION') if region is None: import sys - print("ERROR: Need environment variable AWS_DEFAULT_REGION to be set.") + print("[make_rawr_tiles] ERROR: Need environment variable " + "AWS_DEFAULT_REGION to be set.") sys.exit(1) make_rawr_tiles(args.config, args.missing_config, args.missing_bucket, diff --git a/batch-setup/make_tiles.py b/batch-setup/make_tiles.py index 4edbc6a..7bb219b 100644 --- a/batch-setup/make_tiles.py +++ b/batch-setup/make_tiles.py @@ -84,25 +84,29 @@ def _chr_range(a, b): else: meta_buckets = [args.meta_bucket] -assert meta_buckets, "Must configure at least one meta tile storage bucket." +assert meta_buckets, "[make_tiles] Must configure at least one meta tile " \ + "storage bucket." # check that bucket names look like valid bucket names assert _looks_like_an_s3_bucket_name(args.rawr_bucket), \ - "RAWR bucket name %r doesn't look like an S3 bucket name." \ + "[make_tiles] RAWR bucket name %r doesn't look like an S3 bucket name." \ % (args.rawr_bucket,) if args.missing_bucket is not None: assert _looks_like_an_s3_bucket_name(args.missing_bucket), \ - "missing bucket name %r doesn't look like an S3 bucket name." \ + "[make_tiles] missing bucket name %r doesn't look like an S3 " \ + "bucket name." \ % (args.missing_bucket,) for bucket in meta_buckets: assert _looks_like_an_s3_bucket_name(bucket), \ - "meta bucket name %r doesn't look like an S3 bucket name." % (bucket,) + "[make_tiles] meta bucket name %r doesn't look like an S3 " \ + "bucket name." % (bucket,) region = args.region or os.environ.get('AWS_DEFAULT_REGION') if region is None: import sys - print("ERROR: Need environment variable AWS_DEFAULT_REGION to be set.") + print("[make_tiles] ERROR: Need environment variable AWS_DEFAULT_REGION " + "to be set.") sys.exit(1) # unpack overrides into a dict, so it's easier to work with diff --git a/import/import.py b/import/import.py index 04dd852..c6e033c 100644 --- a/import/import.py +++ b/import/import.py @@ -75,14 +75,14 @@ def assert_run_id_format(run_id): assert args.run_id, '--planet-url requires --run-id' run_id = args.run_id - print("Downloading planet from %s" % planet_url) + print("[import] Downloading planet from %s" % planet_url) # set to empty string so it doesn't get serialized as 'None' planet_md5_url = args.planet_md5_url or "" else: if args.date is None: planet_date = osm.latest_planet_date() - print("Latest planet date is: %s" % planet_date.strftime('%Y-%m-%d')) + print("[import] Latest planet date is: %s" % planet_date.strftime('%Y-%m-%d')) else: planet_date = datetime.datetime.strptime(args.date, '%Y-%m-%d').date() @@ -116,7 +116,7 @@ def assert_run_id_format(run_id): ip_addr = requests.get( 'http://169.254.169.254/latest/meta-data/public-ipv4').text else: - assert 0, '--find-ip-address ' + assert 0, '[import] --find-ip-address ' osm2pgsql.ensure_import(