diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d87a7d9..acdbc9d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -42,4 +42,4 @@ repos: args: [--fix] # Run the formatter. - id: ruff-format - args: ["--config", "format.quote-style = 'single'"] \ No newline at end of file + args: ['--config', "format.quote-style = 'single'"] diff --git a/pygeoapi_plugins/__init__.py b/pygeoapi_plugins/__init__.py index fa6c782..3df9165 100755 --- a/pygeoapi_plugins/__init__.py +++ b/pygeoapi_plugins/__init__.py @@ -28,4 +28,4 @@ # # ================================================================= -__version__ = "0.1.0" +__version__ = '0.1.0' diff --git a/pygeoapi_plugins/formatter/xml.py b/pygeoapi_plugins/formatter/xml.py index 51adb10..20a07d4 100644 --- a/pygeoapi_plugins/formatter/xml.py +++ b/pygeoapi_plugins/formatter/xml.py @@ -61,9 +61,9 @@ def __init__(self, formatter_def: dict): """ geom = False - self.uri_field = formatter_def.get("uri_field") - super().__init__({"name": "xml", "geom": geom}) - self.mimetype = "application/xml; charset=utf-8" + self.uri_field = formatter_def.get('uri_field') + super().__init__({'name': 'xml', 'geom': geom}) + self.mimetype = 'application/xml; charset=utf-8' def write(self, options: dict = {}, data: dict = None) -> str: """ @@ -76,40 +76,40 @@ def write(self, options: dict = {}, data: dict = None) -> str: """ try: - feature = list(data["features"][0]) + feature = list(data['features'][0]) except IndexError: - LOGGER.error("no features") + LOGGER.error('no features') return str() - lastmod = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") + lastmod = datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ') root = ET.fromstring(URLSET) tree = ET.ElementTree(root) try: ET.indent(tree) except AttributeError: - LOGGER.warning("Unable to indent") + LOGGER.warning('Unable to indent') try: - for i, feature in enumerate(data["features"]): + for i, feature in enumerate(data['features']): if i >= 50000: - LOGGER.warning("Maximum size of sitemap reached") + LOGGER.warning('Maximum size of sitemap reached') break try: - loc = feature["properties"][self.uri_field] + loc = feature['properties'][self.uri_field] except KeyError: - loc = feature["@id"] + loc = feature['@id'] _ = URLSET_FOREACH.format(loc, lastmod) root.append(ET.fromstring(_)) except ValueError as err: LOGGER.error(err) - raise FormatterSerializationError("Error writing XML output") + raise FormatterSerializationError('Error writing XML output') output = io.BytesIO() - tree.write(output, encoding="utf-8", xml_declaration=True) + tree.write(output, encoding='utf-8', xml_declaration=True) return output.getvalue() def __repr__(self): - return f" {self.name}" + return f' {self.name}' diff --git a/pygeoapi_plugins/process/intersect.py b/pygeoapi_plugins/process/intersect.py index 2808651..316fb59 100644 --- a/pygeoapi_plugins/process/intersect.py +++ b/pygeoapi_plugins/process/intersect.py @@ -40,77 +40,77 @@ LOGGER = logging.getLogger(__name__) -with open(os.getenv("PYGEOAPI_CONFIG"), encoding="utf8") as fh: +with open(os.getenv('PYGEOAPI_CONFIG'), encoding='utf8') as fh: CONFIG = yaml_load(fh) - COLLECTIONS = filter_dict_by_key_value(CONFIG["resources"], "type", "collection") + COLLECTIONS = filter_dict_by_key_value(CONFIG['resources'], 'type', 'collection') # TODO: Filter collections for those that support CQL -PROCESS_DEF = CONFIG["resources"]["intersector"] +PROCESS_DEF = CONFIG['resources']['intersector'] PROCESS_DEF.update( { - "version": "0.1.0", - "id": "intersector", - "title": "Intersector", - "description": ( - "A process that takes a URL of a geoJSON, " "and returns all intersections." + 'version': '0.1.0', + 'id': 'intersector', + 'title': 'Intersector', + 'description': ( + 'A process that takes a URL of a geoJSON, ' 'and returns all intersections.' ), - "links": [ + 'links': [ { - "type": "text/html", - "rel": "about", - "title": "information", - "href": "https://example.org/process", - "hreflang": "en-US", + 'type': 'text/html', + 'rel': 'about', + 'title': 'information', + 'href': 'https://example.org/process', + 'hreflang': 'en-US', } ], - "inputs": { - "url": { - "title": {"en": "Feature URL"}, - "description": {"en": "URL of valid feature geoJSON"}, - "keywords": {"en": ["geojson", "feature", "url"]}, - "schema": {"type": "string", "default": None}, - "minOccurs": 1, - "maxOccurs": 1, - "metadata": None, # TODO how to use? + 'inputs': { + 'url': { + 'title': {'en': 'Feature URL'}, + 'description': {'en': 'URL of valid feature geoJSON'}, + 'keywords': {'en': ['geojson', 'feature', 'url']}, + 'schema': {'type': 'string', 'default': None}, + 'minOccurs': 1, + 'maxOccurs': 1, + 'metadata': None, # TODO how to use? }, - "collection": { - "title": {"en": "Feature Collection"}, - "description": {"en": "Feature Collection"}, - "keywords": {"en": ["OGC API", "collection"]}, - "schema": { - "type": "string", - "example": next(iter(COLLECTIONS)), - "enum": list(COLLECTIONS), + 'collection': { + 'title': {'en': 'Feature Collection'}, + 'description': {'en': 'Feature Collection'}, + 'keywords': {'en': ['OGC API', 'collection']}, + 'schema': { + 'type': 'string', + 'example': next(iter(COLLECTIONS)), + 'enum': list(COLLECTIONS), }, - "minOccurs": 1, - "maxOccurs": 1, - "metadata": None, # TODO how to use? + 'minOccurs': 1, + 'maxOccurs': 1, + 'metadata': None, # TODO how to use? }, - "geom_field": { - "title": {"en": "Geometry Field"}, - "description": {"en": "Geometry field of valid feature intersect"}, - "keywords": {"en": ["geometry", "intersect", "field"]}, - "schema": {"type": "string", "default": "geom"}, - "minOccurs": 0, - "maxOccurs": 1, - "metadata": None, # TODO how to use? + 'geom_field': { + 'title': {'en': 'Geometry Field'}, + 'description': {'en': 'Geometry field of valid feature intersect'}, + 'keywords': {'en': ['geometry', 'intersect', 'field']}, + 'schema': {'type': 'string', 'default': 'geom'}, + 'minOccurs': 0, + 'maxOccurs': 1, + 'metadata': None, # TODO how to use? }, }, - "outputs": { - "path": { - "title": {"en": "FeatureCollection"}, - "description": { - "en": "A geoJSON FeatureCollection of the " - "path generated by the intersection process" + 'outputs': { + 'path': { + 'title': {'en': 'FeatureCollection'}, + 'description': { + 'en': 'A geoJSON FeatureCollection of the ' + 'path generated by the intersection process' }, - "schema": {"type": "object", "contentMediaType": "application/json"}, + 'schema': {'type': 'object', 'contentMediaType': 'application/json'}, } }, - "example": { - "inputs": { - "url": "https://demo.pygeoapi.io/master/collections/obs/items/238", - "collection": next(iter(COLLECTIONS)), + 'example': { + 'inputs': { + 'url': 'https://demo.pygeoapi.io/master/collections/obs/items/238', + 'collection': next(iter(COLLECTIONS)), } }, } @@ -128,7 +128,7 @@ def __init__(self, processor_def): :returns: pygeoapi.process.intersect.IntersectionProcessor """ - LOGGER.debug("IntersectionProcesser init") + LOGGER.debug('IntersectionProcesser init') super().__init__(processor_def, PROCESS_DEF) def execute(self, data): @@ -139,39 +139,39 @@ def execute(self, data): :returns: 'application/json' """ - mimetype = "application/json" + mimetype = 'application/json' - if not data.get("url") or not data.get("collection"): - raise ProcessorExecuteError(f"Invalid input: {data.items()}") - feature_url = data["url"] - geom_field = data.get("geom_field") - collection = data["collection"] + if not data.get('url') or not data.get('collection'): + raise ProcessorExecuteError(f'Invalid input: {data.items()}') + feature_url = data['url'] + geom_field = data.get('geom_field') + collection = data['collection'] - LOGGER.debug(f"Fetching {feature_url}") - params = {"f": "json"} + LOGGER.debug(f'Fetching {feature_url}') + params = {'f': 'json'} feature = get(feature_url, params=params).json() - if not feature.get("geometry"): - raise ProcessorExecuteError(f"Invalid geoJSON: {feature.items()}") + if not feature.get('geometry'): + raise ProcessorExecuteError(f'Invalid geoJSON: {feature.items()}') for cname, c in COLLECTIONS.items(): if str(collection) != cname: continue - p = get_provider_default(c["providers"]) - provider = load_plugin("provider", p) - if "geom_field" in p: - geom_field = p["geom_field"] - LOGGER.debug(f"Using geom from configuration: {geom_field}") + p = get_provider_default(c['providers']) + provider = load_plugin('provider', p) + if 'geom_field' in p: + geom_field = p['geom_field'] + LOGGER.debug(f'Using geom from configuration: {geom_field}') else: - LOGGER.debug(f"Using provided geom field: {geom_field}") + LOGGER.debug(f'Using provided geom field: {geom_field}') if not geom_field: - msg = f"Invalid geom_field: {data.items()}" + msg = f'Invalid geom_field: {data.items()}' raise ProcessorExecuteError(msg) - LOGGER.debug(f"Intesecting {cname} with backend {provider}") + LOGGER.debug(f'Intesecting {cname} with backend {provider}') outputs = self._intersect(feature, geom_field, provider) - LOGGER.debug("Returning response") + LOGGER.debug('Returning response') return mimetype, outputs def _intersect(self, feature, geom_field, provider): @@ -185,14 +185,14 @@ def _intersect(self, feature, geom_field, provider): :returns: List of GeoJSON Features """ filter_ = parse_cql_json( - {"intersects": [{"property": geom_field}, feature["geometry"]]} + {'intersects': [{'property': geom_field}, feature['geometry']]} ) - LOGGER.debug(f"Making CQL query: {filter_}") - _ = provider.query(resulttype="hits", filterq=filter_) - fc = provider.query(limit=_["numberMatched"], filterq=filter_) + LOGGER.debug(f'Making CQL query: {filter_}') + _ = provider.query(resulttype='hits', filterq=filter_) + fc = provider.query(limit=_['numberMatched'], filterq=filter_) LOGGER.info(f'Returning {fc["numberReturned"]} intersections') return fc def __repr__(self): - return f" {self.name}" + return f' {self.name}' diff --git a/pygeoapi_plugins/process/sitemap.py b/pygeoapi_plugins/process/sitemap.py index ad79fff..a42ddbb 100644 --- a/pygeoapi_plugins/process/sitemap.py +++ b/pygeoapi_plugins/process/sitemap.py @@ -49,83 +49,83 @@ LOGGER = logging.getLogger(__name__) -with open(os.getenv("PYGEOAPI_CONFIG"), encoding="utf8") as fh: +with open(os.getenv('PYGEOAPI_CONFIG'), encoding='utf8') as fh: CONFIG = yaml_load(fh) - COLLECTIONS = filter_dict_by_key_value(CONFIG["resources"], "type", "collection") + COLLECTIONS = filter_dict_by_key_value(CONFIG['resources'], 'type', 'collection') # TODO: Filter collections for those that support CQL -PROCESS_DEF = CONFIG["resources"]["sitemap-generator"] +PROCESS_DEF = CONFIG['resources']['sitemap-generator'] PROCESS_DEF.update( { - "version": "0.1.0", - "id": "sitemap-generator", - "title": "Sitemap Generator", - "description": ( - "A process that returns a sitemap of" "all pygeoapi endpoints." + 'version': '0.1.0', + 'id': 'sitemap-generator', + 'title': 'Sitemap Generator', + 'description': ( + 'A process that returns a sitemap of' 'all pygeoapi endpoints.' ), - "links": [ + 'links': [ { - "type": "text/html", - "rel": "about", - "title": "information", - "href": "https://developers.google.com/search/docs/crawling-indexing/sitemaps/overview", # noqa - "hreflang": "en-US", + 'type': 'text/html', + 'rel': 'about', + 'title': 'information', + 'href': 'https://developers.google.com/search/docs/crawling-indexing/sitemaps/overview', # noqa + 'hreflang': 'en-US', } ], - "inputs": { - "include-common": { - "title": {"en": "Include OGC API - Common"}, - "description": { - "en": "Boolean value controlling the generation of a sitemap " - "for OGC API - Common endpoints" + 'inputs': { + 'include-common': { + 'title': {'en': 'Include OGC API - Common'}, + 'description': { + 'en': 'Boolean value controlling the generation of a sitemap ' + 'for OGC API - Common endpoints' }, - "keywords": {"en": ["sitemap", "ogc", "OGC API - Common", "pygeoapi"]}, - "schema": {"type": "boolean", "default": True}, - "minOccurs": 0, - "maxOccurs": 1, - "metadata": None, # TODO how to use? + 'keywords': {'en': ['sitemap', 'ogc', 'OGC API - Common', 'pygeoapi']}, + 'schema': {'type': 'boolean', 'default': True}, + 'minOccurs': 0, + 'maxOccurs': 1, + 'metadata': None, # TODO how to use? }, - "include-features": { - "title": {"en": "Include OGC API - Features"}, - "description": { - "en": "Boolean value controlling the generation of a sitemap " - "for individual OGC API - Features endpoints" + 'include-features': { + 'title': {'en': 'Include OGC API - Features'}, + 'description': { + 'en': 'Boolean value controlling the generation of a sitemap ' + 'for individual OGC API - Features endpoints' }, - "keywords": { - "en": ["sitemap", "ogc", "OGC API - Features", "pygeoapi"] + 'keywords': { + 'en': ['sitemap', 'ogc', 'OGC API - Features', 'pygeoapi'] }, - "schema": {"type": "boolean", "default": True}, - "minOccurs": 0, - "maxOccurs": 1, - "metadata": None, # TODO how to use? + 'schema': {'type': 'boolean', 'default': True}, + 'minOccurs': 0, + 'maxOccurs': 1, + 'metadata': None, # TODO how to use? }, - "zip": { - "title": {"en": "ZIP response"}, - "description": {"en": "Boolean whether to ZIP the response"}, - "keywords": {"en": ["sitemap", "zip", "pygeoapi"]}, - "schema": {"type": "boolean", "default": False}, - "minOccurs": 0, - "maxOccurs": 1, - "metadata": None, # TODO how to use? + 'zip': { + 'title': {'en': 'ZIP response'}, + 'description': {'en': 'Boolean whether to ZIP the response'}, + 'keywords': {'en': ['sitemap', 'zip', 'pygeoapi']}, + 'schema': {'type': 'boolean', 'default': False}, + 'minOccurs': 0, + 'maxOccurs': 1, + 'metadata': None, # TODO how to use? }, }, - "outputs": { - "common.xml": { - "title": {"en": "OGC API - Common Sitemap"}, - "description": { - "en": "A sitemap of the OGC API - Common end points for the " - "pygeoapi instance." + 'outputs': { + 'common.xml': { + 'title': {'en': 'OGC API - Common Sitemap'}, + 'description': { + 'en': 'A sitemap of the OGC API - Common end points for the ' + 'pygeoapi instance.' }, - "schema": {"type": "object", "contentMediaType": "application/json"}, + 'schema': {'type': 'object', 'contentMediaType': 'application/json'}, }, - "sitemap.zip": { - "title": {"en": "Sitemap"}, - "description": {"en": "A sitemap of the pygeoapi instance"}, - "schema": {"type": "object", "contentMediaType": "application/zip"}, + 'sitemap.zip': { + 'title': {'en': 'Sitemap'}, + 'description': {'en': 'A sitemap of the pygeoapi instance'}, + 'schema': {'type': 'object', 'contentMediaType': 'application/zip'}, }, }, - "example": {"inputs": {"include-features": False}}, + 'example': {'inputs': {'include-features': False}}, } ) @@ -141,10 +141,10 @@ def __init__(self, processor_def): :returns: pygeoapi.process.sitemap.SitemapProcessor """ - LOGGER.debug("SitemapProcesser init") + LOGGER.debug('SitemapProcesser init') super().__init__(processor_def, PROCESS_DEF) self.config = CONFIG - self.base_url = self.config["server"]["url"] + self.base_url = self.config['server']['url'] self.xml = XMLFormatter({}) def execute(self, data): @@ -155,19 +155,19 @@ def execute(self, data): :returns: 'application/json' """ - mimetype = "application/json" - common = data.get("include-common", True) - features = data.get("include-features", True) - if data.get("zip"): - LOGGER.debug("Returning zipped response") + mimetype = 'application/json' + common = data.get('include-common', True) + features = data.get('include-features', True) + if data.get('zip'): + LOGGER.debug('Returning zipped response') zip_output = io.BytesIO() - with zipfile.ZipFile(zip_output, "w") as zipf: + with zipfile.ZipFile(zip_output, 'w') as zipf: for filename, content in self.generate(common, features): zipf.writestr(filename, content) - return "application/zip", zip_output.getvalue() + return 'application/zip', zip_output.getvalue() else: - LOGGER.debug("Returning response") + LOGGER.debug('Returning response') return mimetype, dict(self.generate(common, features)) def generate(self, include_common, include_features): @@ -180,24 +180,24 @@ def generate(self, include_common, include_features): :returns: 'application/json' """ if include_common: - LOGGER.debug("Generating common.xml") - oas = {"features": []} - for path in get_oas(self.config).get("paths"): - if r"{jobId}" not in path and r"{featureId}" not in path: + LOGGER.debug('Generating common.xml') + oas = {'features': []} + for path in get_oas(self.config).get('paths'): + if r'{jobId}' not in path and r'{featureId}' not in path: path_uri = url_join(self.base_url, path) - oas["features"].append({"@id": path_uri}) - yield ("common.xml", self.xml.write(data=oas)) + oas['features'].append({'@id': path_uri}) + yield ('common.xml', self.xml.write(data=oas)) if include_features: - LOGGER.debug("Generating collections sitemap") + LOGGER.debug('Generating collections sitemap') for name, c in COLLECTIONS.items(): - LOGGER.debug(f"Generating sitemap(s) for {name}") - p = get_provider_default(c["providers"]) - provider = load_plugin("provider", p) - hits = provider.query(resulttype="hits").get("numberMatched") + LOGGER.debug(f'Generating sitemap(s) for {name}') + p = get_provider_default(c['providers']) + provider = load_plugin('provider', p) + hits = provider.query(resulttype='hits').get('numberMatched') iterations = range(math.ceil(hits / 50000)) for i in iterations: - yield (f"{name}__{i}.xml", self._generate(i, name, provider)) + yield (f'{name}__{i}.xml', self._generate(i, name, provider)) def _generate(self, index, dataset, provider, n=50000): """ @@ -212,14 +212,14 @@ def _generate(self, index, dataset, provider, n=50000): """ content = provider.query(offset=(n * index), limit=n) - content["links"] = [] + content['links'] = [] content = geojson2jsonld( - self, content, dataset, id_field=(provider.uri_field or "id") + self, content, dataset, id_field=(provider.uri_field or 'id') ) return self.xml.write(data=content) def get_collections_url(self): - return url_join(self.base_url, "collections") + return url_join(self.base_url, 'collections') def __repr__(self): - return f" {self.name}" + return f' {self.name}' diff --git a/pygeoapi_plugins/provider/ckan.py b/pygeoapi_plugins/provider/ckan.py index a1ce2f5..ae4f038 100644 --- a/pygeoapi_plugins/provider/ckan.py +++ b/pygeoapi_plugins/provider/ckan.py @@ -55,10 +55,10 @@ def __init__(self, provider_def): :returns: pygeoapi_plugins.provider.ckan.CKANProvider """ - LOGGER.debug("Logger CKAN init") + LOGGER.debug('Logger CKAN init') super().__init__(provider_def) - self.resource_id = provider_def["resource_id"] + self.resource_id = provider_def['resource_id'] self.http = Session() self.get_fields() @@ -77,10 +77,10 @@ def get_fields(self): self.properties = set(self.properties) | set( [self.id_field, self.x_field, self.y_field] ) - params["fields"] = ",".join(self.properties) + params['fields'] = ','.join(self.properties) r = self._get_response(self.data) - self.fields = {field.pop("id"): field for field in r["fields"]} + self.fields = {field.pop('id'): field for field in r['fields']} return self.fields @@ -89,7 +89,7 @@ def query( self, offset=0, limit=10, - resulttype="results", + resulttype='results', bbox=[], datetime_=None, properties=[], @@ -141,16 +141,16 @@ def get(self, identifier, **kwargs): properties = [ (self.id_field, identifier), ] - params = {"filters": self._make_where(properties)} + params = {'filters': self._make_where(properties)} response = self._get_response(self.data, params) - [feature] = [self._make_feature(f, False) for f in response["records"]] + [feature] = [self._make_feature(f, False) for f in response['records']] return feature def _load( self, offset=0, limit=10, - resulttype="results", + resulttype='results', bbox=[], datetime_=None, properties=[], @@ -177,60 +177,60 @@ def _load( """ # Default feature collection and request parameters - fc = {"type": "FeatureCollection", "features": []} + fc = {'type': 'FeatureCollection', 'features': []} - params = {"offset": offset, "limit": limit} + params = {'offset': offset, 'limit': limit} if self.properties or select_properties: required = [self.id_field, self.x_field, self.y_field] select_properties.extend(required) - params["fields"] = ",".join(set(self.properties) | set(select_properties)) + params['fields'] = ','.join(set(self.properties) | set(select_properties)) # Add queryables to request params if properties: - params["filters"] = self._make_where(properties) + params['filters'] = self._make_where(properties) - if resulttype == "hits": - params["include_total"] = "true" + if resulttype == 'hits': + params['include_total'] = 'true' if sortby: - params["sort"] = self._make_orderby(sortby) + params['sort'] = self._make_orderby(sortby) if q: - params["q"] = q + params['q'] = q # Form URL for GET request - LOGGER.debug("Sending query") + LOGGER.debug('Sending query') response = self._get_response(self.data, params) - if response.get("total"): - fc["numberMatched"] = response["total"] + if response.get('total'): + fc['numberMatched'] = response['total'] - if resulttype == "hits": + if resulttype == 'hits': # Return hits - LOGGER.debug("Returning hits") + LOGGER.debug('Returning hits') return fc # Return feature collection - v = [self._make_feature(f, skip_geometry) for f in response["records"]] + v = [self._make_feature(f, skip_geometry) for f in response['records']] step = len(v) # Query if values are less than expected while len(v) < limit: - LOGGER.debug("Fetching next set of values") - params["offset"] += step + LOGGER.debug('Fetching next set of values') + params['offset'] += step response = self._get_response(self.data, params) - if len(response["records"]) == 0: + if len(response['records']) == 0: break else: - _ = [self._make_feature(f, skip_geometry) for f in response["records"]] + _ = [self._make_feature(f, skip_geometry) for f in response['records']] v.extend(_) - fc["features"] = v - fc["numberReturned"] = len(v) + fc['features'] = v + fc['numberReturned'] = len(v) return fc @@ -243,26 +243,26 @@ def _get_response(self, url, params={}): :returns: STA response """ - params.update({"resource_id": self.resource_id}) + params.update({'resource_id': self.resource_id}) r = self.http.get(url, params=params) if not r.ok: - LOGGER.error("Bad http response code") - raise ProviderConnectionError("Bad http response code") + LOGGER.error('Bad http response code') + raise ProviderConnectionError('Bad http response code') print(r.url) try: response = r.json() except JSONDecodeError as err: - LOGGER.error("JSON decode error") + LOGGER.error('JSON decode error') raise ProviderQueryError(err) - if not response["success"]: - LOGGER.error("Bad CKAN response") - raise ProviderConnectionError("Bad CKAN response") + if not response['success']: + LOGGER.error('Bad CKAN response') + raise ProviderConnectionError('Bad CKAN response') - return response["result"] + return response['result'] def _make_feature(self, feature, skip_geometry): """ @@ -273,18 +273,18 @@ def _make_feature(self, feature, skip_geometry): :returns: STA response """ - f = {"type": "Feature", "id": feature.pop(self.id_field), "geometry": None} + f = {'type': 'Feature', 'id': feature.pop(self.id_field), 'geometry': None} if not skip_geometry: - f["geometry"] = { - "type": "Point", - "coordinates": [ + f['geometry'] = { + 'type': 'Point', + 'coordinates': [ float(feature.pop(self.x_field)), float(feature.pop(self.y_field)), ], } - f["properties"] = feature + f['properties'] = feature return f @@ -297,10 +297,10 @@ def _make_orderby(sortby): :returns: CKAN query `order` clause """ - __ = {"+": "asc", "-": "desc"} + __ = {'+': 'asc', '-': 'desc'} ret = [f"{_['property']} {__[_['order']]}" for _ in sortby] - return ",".join(ret) + return ','.join(ret) def _make_where(self, properties, bbox=[]): """ @@ -320,4 +320,4 @@ def _make_where(self, properties, bbox=[]): return json.dumps(p) def __repr__(self): - return f" {self.data}" + return f' {self.data}' diff --git a/pygeoapi_plugins/provider/postgresql.py b/pygeoapi_plugins/provider/postgresql.py index e3edbb6..596b247 100644 --- a/pygeoapi_plugins/provider/postgresql.py +++ b/pygeoapi_plugins/provider/postgresql.py @@ -44,7 +44,7 @@ from pygeoapi.provider.postgresql import PostgreSQLProvider from pygeoapi.provider.base import ProviderQueryError -PSUEDO_COUNT_LIMIT = os.getenv("PSUEDO_COUNT_LIMIT", 5000000) +PSUEDO_COUNT_LIMIT = os.getenv('PSUEDO_COUNT_LIMIT', 5000000) COUNT_FUNCTION = """ DROP FUNCTION IF EXISTS count_estimate; CREATE FUNCTION count_estimate(query text) @@ -85,14 +85,14 @@ def __init__(self, provider_def): :returns: pygeoapi.provider.base.PostgreSQLProvider """ - LOGGER.debug("Initialising Pseudo-count PostgreSQL provider.") + LOGGER.debug('Initialising Pseudo-count PostgreSQL provider.') super().__init__(provider_def) def query( self, offset=0, limit=10, - resulttype="results", + resulttype='results', bbox=[], datetime_=None, properties=[], @@ -125,7 +125,7 @@ def query( :returns: GeoJSON FeatureCollection """ - LOGGER.debug("Preparing filters") + LOGGER.debug('Preparing filters') property_filters = self._get_property_filters(properties) cql_filters = self._get_cql_filters(filterq) bbox_filter = self._get_bbox_filter(bbox) @@ -135,7 +135,7 @@ def query( select_properties, skip_geometry ) - LOGGER.debug("Querying PostGIS") + LOGGER.debug('Querying PostGIS') # Execute query within self-closing database Session context with Session(self._engine) as session: results = ( @@ -149,38 +149,38 @@ def query( try: if filterq: - raise ProviderQueryError("No Pseudo-count during CQL") - elif resulttype == "hits": - raise ProviderQueryError("No Pseudo-count during hits") + raise ProviderQueryError('No Pseudo-count during CQL') + elif resulttype == 'hits': + raise ProviderQueryError('No Pseudo-count during hits') matched = self._get_pseudo_count(results) except ProviderQueryError as err: - LOGGER.warning(f"Warning during psuedo-count {err}") + LOGGER.warning(f'Warning during psuedo-count {err}') matched = results.count() except Exception as err: - LOGGER.warning(f"Error during psuedo-count {err}") + LOGGER.warning(f'Error during psuedo-count {err}') matched = results.count() - LOGGER.debug(f"Found {matched} result(s)") + LOGGER.debug(f'Found {matched} result(s)') - LOGGER.debug("Preparing response") + LOGGER.debug('Preparing response') response = { - "type": "FeatureCollection", - "features": [], - "numberMatched": matched, - "numberReturned": 0, + 'type': 'FeatureCollection', + 'features': [], + 'numberMatched': matched, + 'numberReturned': 0, } - if resulttype == "hits" or not results: + if resulttype == 'hits' or not results: return response crs_transform_out = self._get_crs_transform(crs_transform_spec) for item in results.order_by(*order_by_clauses).offset(offset).limit(limit): # noqa - response["numberReturned"] += 1 - response["features"].append( + response['numberReturned'] += 1 + response['features'].append( self._sqlalchemy_to_feature(item, crs_transform_out) ) @@ -196,9 +196,9 @@ def _get_pseudo_count(self, results): :returns matched: `int` of the pseudo-count for the given results """ - LOGGER.debug("Getting pseudo-count") + LOGGER.debug('Getting pseudo-count') compiled = results.statement.compile( - self._engine, compile_kwargs={"literal_binds": True} + self._engine, compile_kwargs={'literal_binds': True} ) with Session(self._engine) as s: @@ -207,10 +207,10 @@ def _get_pseudo_count(self, results): matched = s.execute(compiled_query).scalar() if matched < PSUEDO_COUNT_LIMIT: - LOGGER.debug("Using precise count") + LOGGER.debug('Using precise count') matched = results.count() return matched def __repr__(self): - return f" {self.table}" + return f' {self.table}' diff --git a/pygeoapi_plugins/provider/sparql.py b/pygeoapi_plugins/provider/sparql.py index 4fd32e7..ab6569f 100644 --- a/pygeoapi_plugins/provider/sparql.py +++ b/pygeoapi_plugins/provider/sparql.py @@ -52,7 +52,7 @@ PREFIX skos: """ -_SELECT = "SELECT DISTINCT *" +_SELECT = 'SELECT DISTINCT *' _WHERE = """ WHERE {{ @@ -76,18 +76,18 @@ def __init__(self, provider_def): """ super().__init__(provider_def) _provider_def = provider_def.copy() - _provider_def["name"] = _provider_def.pop("sparql_provider") + _provider_def['name'] = _provider_def.pop('sparql_provider') - self.p = load_plugin("provider", _provider_def) - self.sparql_endpoint = provider_def.get("sparql_endpoint") - self.subj = provider_def.get("sparql_subject") - self.predicates = provider_def.get("sparql_predicates") + self.p = load_plugin('provider', _provider_def) + self.sparql_endpoint = provider_def.get('sparql_endpoint') + self.subj = provider_def.get('sparql_subject') + self.predicates = provider_def.get('sparql_predicates') def query( self, offset=0, limit=10, - resulttype="results", + resulttype='results', bbox=[], datetime_=None, properties=[], @@ -128,17 +128,17 @@ def query( ) v = [] - for c in content["features"]: - subj, _ = self._clean_subj(c["properties"], self.subj) + for c in content['features']: + subj, _ = self._clean_subj(c['properties'], self.subj) v.append(subj) - search = " ".join(v) + search = ' '.join(v) values = self._sparql(search) - for item in content["features"]: - _, _subj = self._clean_subj(item["properties"], self.subj) + for item in content['features']: + _, _subj = self._clean_subj(item['properties'], self.subj) - item["properties"] = self._combine(item["properties"], values.get(_subj)) + item['properties'] = self._combine(item['properties'], values.get(_subj)) return content @@ -150,13 +150,13 @@ def get(self, identifier, **kwargs): :returns: dict of single GeoJSON fea """ - LOGGER.debug(f"SPARQL for: {identifier}") + LOGGER.debug(f'SPARQL for: {identifier}') feature = self.p.get(identifier) - subj, _subj = self._clean_subj(feature["properties"], self.subj) + subj, _subj = self._clean_subj(feature['properties'], self.subj) values = self._sparql(subj) - feature["properties"] = self._combine(feature["properties"], values.get(_subj)) + feature['properties'] = self._combine(feature['properties'], values.get(_subj)) return feature @@ -168,13 +168,13 @@ def _sparql(self, value): :returns: dict of SPARQL feature data """ - LOGGER.debug("Requesting SPARQL data") + LOGGER.debug('Requesting SPARQL data') w = [ - "OPTIONAL {{?v {p} ?{o} .}}".format(p=v, o=k) + 'OPTIONAL {{?v {p} ?{o} .}}'.format(p=v, o=k) for k, v in self.predicates.items() ] - where = " ".join(w) + where = ' '.join(w) qs = self._makeQuery(value, where) result = self._sendQuery(qs) @@ -191,22 +191,22 @@ def _clean_subj(self, properties, _subject): :returns: subject value for properties block & SPARQL """ - if ":" in _subject: - (_pref, _subject) = _subject.split(":") + if ':' in _subject: + (_pref, _subject) = _subject.split(':') else: - _pref = "" + _pref = '' _subj = properties[_subject] if is_url(_subj): - subj = f"<{_subj}>" + subj = f'<{_subj}>' elif is_url(_subj[1:-1]): subj = _subj _subj = subj[1:-1] elif _pref: - __subj = _subj.replace(" ", "_") - subj = f"{_pref}:{__subj}" - if _pref == " ": - _subj = f"http://dbpedia.org/resource/{__subj}" + __subj = _subj.replace(' ', '_') + subj = f'{_pref}:{__subj}' + if _pref == ' ': + _subj = f'http://dbpedia.org/resource/{__subj}' return subj, _subj @@ -219,10 +219,10 @@ def _clean_result(self, result, ret={}): :returns: dict of SPARQL feature results """ - for v in result["results"]["bindings"]: - _id = v.pop("v").get("value") + for v in result['results']['bindings']: + _id = v.pop('v').get('value') - if not ret.get(_id, ""): + if not ret.get(_id, ''): ret[_id] = v for _k, _v in v.items(): @@ -231,7 +231,7 @@ def _clean_result(self, result, ret={}): ret[_id][_k], ] - _ = [_["value"] == _v["value"] for _ in ret[_id][_k]] + _ = [_['value'] == _v['value'] for _ in ret[_id][_k]] if True not in _: ret[_id][_k].append(_v) @@ -248,10 +248,10 @@ def _combine(self, properties, results): """ try: for r in results: - all_r = [_.get("value") for _ in results[r]] + all_r = [_.get('value') for _ in results[r]] properties[r] = all_r[-1] if len(all_r) == 1 else all_r except TypeError as err: - LOGGER.error("Error SPARQL data: {}".format(err)) + LOGGER.error('Error SPARQL data: {}'.format(err)) raise ProviderNoDataError(err) return properties @@ -266,8 +266,8 @@ def _makeQuery(self, value, where, prefix=_PREFIX, select=_SELECT): :returns: str, SPARQL query """ - querystring = "".join([prefix, select, _WHERE.format(value=value, where=where)]) - LOGGER.debug("SPARQL query: {}".format(querystring)) + querystring = ''.join([prefix, select, _WHERE.format(value=value, where=where)]) + LOGGER.debug('SPARQL query: {}'.format(querystring)) return querystring @@ -279,16 +279,16 @@ def _sendQuery(self, query): :returns: SPARQL query results """ - LOGGER.debug("Sending SPARQL query") + LOGGER.debug('Sending SPARQL query') sparql = SPARQLWrapper(self.sparql_endpoint) sparql.setQuery(query) sparql.setReturnFormat(JSON) try: results = sparql.query().convert() - LOGGER.debug("Received SPARQL results") + LOGGER.debug('Received SPARQL results') except Exception as err: - LOGGER.error("Error in SPARQL query: {}".format(err)) + LOGGER.error('Error in SPARQL query: {}'.format(err)) raise ProviderQueryError(err) return results @@ -296,7 +296,7 @@ def _sendQuery(self, query): def get_fields(self): self.fields = self.p.get_fields() for prop in self.predicates: - self.fields.update({prop: {"type": "string"}}) + self.fields.update({prop: {'type': 'string'}}) return self.fields @@ -322,4 +322,4 @@ def delete(self, identifier): return self.p.delete(identifier) def __repr__(self): - return " {}".format(self.data) + return ' {}'.format(self.data) diff --git a/setup.py b/setup.py index 8fd7884..05a7aac 100644 --- a/setup.py +++ b/setup.py @@ -45,11 +45,11 @@ def finalize_options(self): def run(self): import subprocess - errno = subprocess.call(["pytest"]) + errno = subprocess.call(['pytest']) raise SystemExit(errno) -def read(filename, encoding="utf-8"): +def read(filename, encoding='utf-8'): """read file contents""" full_path = os.path.join(os.path.dirname(__file__), filename) with io.open(full_path, encoding=encoding) as fh: @@ -59,45 +59,45 @@ def read(filename, encoding="utf-8"): def get_package_version(): """get version from top-level package init""" - version_file = read("pygeoapi_plugins/__init__.py") + version_file = read('pygeoapi_plugins/__init__.py') version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) - raise RuntimeError("Unable to find version string.") + raise RuntimeError('Unable to find version string.') -DESCRIPTION = "pygeoapi plugins" +DESCRIPTION = 'pygeoapi plugins' # ensure a fresh MANIFEST file is generated -if os.path.exists("MANIFEST"): - os.unlink("MANIFEST") +if os.path.exists('MANIFEST'): + os.unlink('MANIFEST') setup( - name="pygeoapi_plugins", + name='pygeoapi_plugins', version=get_package_version(), description=DESCRIPTION.strip(), - long_description=read("README.md"), - long_description_content_type="text/markdown", - license="MIT", - platforms="all", - keywords=" ".join(["cgs", "pygeoapi", "geopython"]), - author="Benjamin Webb", - author_email="bwebb@lincolninst.edu", - maintainer="Benjamin Webb", - maintainer_email="bwebb@lincolninst.edu", - url="https://github.com/cgs-earth/pygeoapi-plugins", - install_requires=read("requirements.txt").splitlines(), - packages=find_packages(exclude=["pygeoapi_plugins.tests"]), + long_description=read('README.md'), + long_description_content_type='text/markdown', + license='MIT', + platforms='all', + keywords=' '.join(['cgs', 'pygeoapi', 'geopython']), + author='Benjamin Webb', + author_email='bwebb@lincolninst.edu', + maintainer='Benjamin Webb', + maintainer_email='bwebb@lincolninst.edu', + url='https://github.com/cgs-earth/pygeoapi-plugins', + install_requires=read('requirements.txt').splitlines(), + packages=find_packages(exclude=['pygeoapi_plugins.tests']), include_package_data=True, classifiers=[ - "Development Status :: 4 - Beta", - "Environment :: Console", - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Topic :: Scientific/Engineering :: GIS", + 'Development Status :: 4 - Beta', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: MIT License', + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Topic :: Scientific/Engineering :: GIS', ], - cmdclass={"test": PyTest}, + cmdclass={'test': PyTest}, ) diff --git a/tests/test_ckan_provider.py b/tests/test_ckan_provider.py index 283b7ed..54096b2 100644 --- a/tests/test_ckan_provider.py +++ b/tests/test_ckan_provider.py @@ -37,13 +37,13 @@ def config(): # New Mexico Reservoirs # source: https://catalog.newmexicowaterdata.org return { - "name": "pygeoapi_plugins.provider.ckan.CKAN", - "type": "feature", - "data": "https://catalog.newmexicowaterdata.org/api/3/action/datastore_search", # noqa - "resource_id": "08369d21-520b-439e-97e3-5ecb50737887", - "id_field": "_id", - "x_field": "LONDD", - "y_field": "LATDD", + 'name': 'pygeoapi_plugins.provider.ckan.CKAN', + 'type': 'feature', + 'data': 'https://catalog.newmexicowaterdata.org/api/3/action/datastore_search', # noqa + 'resource_id': '08369d21-520b-439e-97e3-5ecb50737887', + 'id_field': '_id', + 'x_field': 'LONDD', + 'y_field': 'LATDD', } @@ -52,100 +52,100 @@ def test_query(config): fields = p.get_fields() assert len(fields) == 46 - assert fields["DAMNAME"]["type"] == "text" - assert fields["RES_ID"]["type"] == "numeric" + assert fields['DAMNAME']['type'] == 'text' + assert fields['RES_ID']['type'] == 'numeric' results = p.query() - assert len(results["features"]) == 10 - assert results["numberMatched"] == 25 - assert results["numberReturned"] == 10 + assert len(results['features']) == 10 + assert results['numberMatched'] == 25 + assert results['numberReturned'] == 10 - assert results["features"][0]["id"] == 1 - assert results["features"][0]["properties"]["DAMNAME"] == "NAVAJO" - assert results["features"][0]["geometry"]["coordinates"][0] == -107.609 - assert results["features"][0]["geometry"]["coordinates"][1] == 36.8078 + assert results['features'][0]['id'] == 1 + assert results['features'][0]['properties']['DAMNAME'] == 'NAVAJO' + assert results['features'][0]['geometry']['coordinates'][0] == -107.609 + assert results['features'][0]['geometry']['coordinates'][1] == 36.8078 - assert results["features"][2]["id"] == 3 - assert results["features"][2]["properties"]["DAMNAME"] == "LA JARA LAKE" - assert results["features"][2]["geometry"]["coordinates"][0] == -107 - assert results["features"][2]["geometry"]["coordinates"][1] == 36.74 + assert results['features'][2]['id'] == 3 + assert results['features'][2]['properties']['DAMNAME'] == 'LA JARA LAKE' + assert results['features'][2]['geometry']['coordinates'][0] == -107 + assert results['features'][2]['geometry']['coordinates'][1] == 36.74 results = p.query(limit=1) - assert len(results["features"]) == 1 - assert results["features"][0]["id"] == 1 + assert len(results['features']) == 1 + assert results['features'][0]['id'] == 1 results = p.query(offset=2, limit=1) - assert len(results["features"]) == 1 - assert results["features"][0]["id"] == 3 + assert len(results['features']) == 1 + assert results['features'][0]['id'] == 3 results = p.query(limit=25) - assert len(results["features"]) == 25 - assert results["numberMatched"] == 25 - assert results["numberReturned"] == 25 + assert len(results['features']) == 25 + assert results['numberMatched'] == 25 + assert results['numberReturned'] == 25 results = p.query(skip_geometry=True) - assert results["features"][0]["geometry"] is None + assert results['features'][0]['geometry'] is None def test_query_by_properties(config): p = CKANProvider(config) - results = p.query(properties=[("SOURCE", "E")]) - assert results["numberMatched"] == 11 - assert results["numberReturned"] == 10 + results = p.query(properties=[('SOURCE', 'E')]) + assert results['numberMatched'] == 11 + assert results['numberReturned'] == 10 - results = p.query(properties=[("RIVER", "RIO CHAMA")]) - assert results["numberMatched"] == 2 - assert results["numberReturned"] == 2 + results = p.query(properties=[('RIVER', 'RIO CHAMA')]) + assert results['numberMatched'] == 2 + assert results['numberReturned'] == 2 - results = p.query(properties=[("SOURCE", "E"), ("RIVER", "RIO CHAMA")]) - assert results["numberMatched"] == 1 - assert results["numberReturned"] == 1 + results = p.query(properties=[('SOURCE', 'E'), ('RIVER', 'RIO CHAMA')]) + assert results['numberMatched'] == 1 + assert results['numberReturned'] == 1 def test_query_sortby(config): p = CKANProvider(config) results = p.query() - assert results["features"][0]["properties"]["YEAR"] == 1963 + assert results['features'][0]['properties']['YEAR'] == 1963 - results = p.query(sortby=[{"property": "YEAR", "order": "+"}]) - assert results["features"][0]["properties"]["YEAR"] == 1893 + results = p.query(sortby=[{'property': 'YEAR', 'order': '+'}]) + assert results['features'][0]['properties']['YEAR'] == 1893 - results = p.query(sortby=[{"property": "YEAR", "order": "-"}]) - assert results["features"][0]["properties"]["YEAR"] == 1980 + results = p.query(sortby=[{'property': 'YEAR', 'order': '-'}]) + assert results['features'][0]['properties']['YEAR'] == 1980 def test_query_q(config): p = CKANProvider(config) - results = p.query(q="RESERVOIR") - assert results["numberMatched"] == 14 - assert results["numberReturned"] == 10 + results = p.query(q='RESERVOIR') + assert results['numberMatched'] == 14 + assert results['numberReturned'] == 10 - results = p.query(q="CREEK") - assert results["numberMatched"] == 7 - assert results["numberReturned"] == 7 + results = p.query(q='CREEK') + assert results['numberMatched'] == 7 + assert results['numberReturned'] == 7 def test_query_select_properties(config): p = CKANProvider(config) results = p.query() - assert len(results["features"][0]["properties"]) == 43 + assert len(results['features'][0]['properties']) == 43 - results = p.query(select_properties=["DAMNAME"]) - assert len(results["features"][0]["properties"]) == 1 + results = p.query(select_properties=['DAMNAME']) + assert len(results['features'][0]['properties']) == 1 - results = p.query(select_properties=["DAMNAME", "STATE"]) - assert len(results["features"][0]["properties"]) == 2 + results = p.query(select_properties=['DAMNAME', 'STATE']) + assert len(results['features'][0]['properties']) == 2 - config["properties"] = ["DAMNAME", "RIVER"] + config['properties'] = ['DAMNAME', 'RIVER'] p = CKANProvider(config) results = p.query() - assert len(results["features"][0]["properties"]) == 2 - assert results["features"][0]["properties"]["DAMNAME"] == "NAVAJO" + assert len(results['features'][0]['properties']) == 2 + assert results['features'][0]['properties']['DAMNAME'] == 'NAVAJO' def test_get(config): p = CKANProvider(config) result = p.get(1) - assert result["id"] == 1 - assert result["properties"]["DAMNAME"] == "NAVAJO" + assert result['id'] == 1 + assert result['properties']['DAMNAME'] == 'NAVAJO' diff --git a/tests/test_postgresql_provider.py b/tests/test_postgresql_provider.py index 8a35353..978a419 100644 --- a/tests/test_postgresql_provider.py +++ b/tests/test_postgresql_provider.py @@ -51,40 +51,40 @@ from pygeoapi.provider.postgresql import PostgreSQLProvider import pygeoapi.provider.postgresql as postgresql_provider_module -PASSWORD = os.environ.get("POSTGRESQL_PASSWORD", "postgres") -DEFAULT_CRS = "http://www.opengis.net/def/crs/OGC/1.3/CRS84" +PASSWORD = os.environ.get('POSTGRESQL_PASSWORD', 'postgres') +DEFAULT_CRS = 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' @pytest.fixture() def config(): return { - "name": "PostgreSQL", - "type": "feature", - "data": { - "host": "127.0.0.1", - "dbname": "test", - "user": "postgres", - "password": PASSWORD, - "search_path": ["osm", "public"], + 'name': 'PostgreSQL', + 'type': 'feature', + 'data': { + 'host': '127.0.0.1', + 'dbname': 'test', + 'user': 'postgres', + 'password': PASSWORD, + 'search_path': ['osm', 'public'], }, - "options": {"connect_timeout": 10}, - "id_field": "osm_id", - "table": "hotosm_bdi_waterways", - "geom_field": "foo_geom", + 'options': {'connect_timeout': 10}, + 'id_field': 'osm_id', + 'table': 'hotosm_bdi_waterways', + 'geom_field': 'foo_geom', } def test_valid_connection_options(config): - if config.get("options"): - keys = list(config["options"].keys()) + if config.get('options'): + keys = list(config['options'].keys()) for key in keys: assert key in [ - "connect_timeout", - "tcp_user_timeout", - "keepalives", - "keepalives_idle", - "keepalives_count", - "keepalives_interval", + 'connect_timeout', + 'tcp_user_timeout', + 'keepalives', + 'keepalives_idle', + 'keepalives_count', + 'keepalives_interval', ] @@ -92,48 +92,48 @@ def test_query(config): """Testing query for a valid JSON object with geometry""" p = PostgreSQLProvider(config) feature_collection = p.query() - assert feature_collection.get("type") == "FeatureCollection" - features = feature_collection.get("features") + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') assert features is not None feature = features[0] - properties = feature.get("properties") + properties = feature.get('properties') assert properties is not None - geometry = feature.get("geometry") + geometry = feature.get('geometry') assert geometry is not None def test_query_materialised_view(config): """Testing query using a materialised view""" config_materialised_view = config.copy() - config_materialised_view["table"] = "hotosm_bdi_drains" + config_materialised_view['table'] = 'hotosm_bdi_drains' provider = PostgreSQLProvider(config_materialised_view) # Only ID, width and depth properties should be available - assert set(provider.get_fields().keys()) == {"osm_id", "width", "depth"} + assert set(provider.get_fields().keys()) == {'osm_id', 'width', 'depth'} def test_query_with_property_filter(config): """Test query valid features when filtering by property""" p = PostgreSQLProvider(config) - feature_collection = p.query(properties=[("waterway", "stream")]) - features = feature_collection.get("features") + feature_collection = p.query(properties=[('waterway', 'stream')]) + features = feature_collection.get('features') stream_features = list( - filter(lambda feature: feature["properties"]["waterway"] == "stream", features) + filter(lambda feature: feature['properties']['waterway'] == 'stream', features) ) assert len(features) == len(stream_features) feature_collection = p.query(limit=50) - features = feature_collection.get("features") + features = feature_collection.get('features') stream_features = list( - filter(lambda feature: feature["properties"]["waterway"] == "stream", features) + filter(lambda feature: feature['properties']['waterway'] == 'stream', features) ) other_features = list( - filter(lambda feature: feature["properties"]["waterway"] != "stream", features) + filter(lambda feature: feature['properties']['waterway'] != 'stream', features) ) assert len(features) != len(stream_features) assert len(other_features) != 0 - assert feature_collection["numberMatched"] == 14776 - assert feature_collection["numberReturned"] == 50 + assert feature_collection['numberMatched'] == 14776 + assert feature_collection['numberReturned'] == 50 def test_query_with_paging(config): @@ -141,14 +141,14 @@ def test_query_with_paging(config): p = PostgreSQLProvider(config) feature_collection = p.query(limit=50) - assert feature_collection["numberMatched"] == 14776 - assert feature_collection["numberReturned"] == 50 + assert feature_collection['numberMatched'] == 14776 + assert feature_collection['numberReturned'] == 50 - offset = feature_collection["numberMatched"] - 10 + offset = feature_collection['numberMatched'] - 10 feature_collection = p.query(offset=offset) - assert feature_collection["numberMatched"] == 14776 - assert feature_collection["numberReturned"] == 10 + assert feature_collection['numberMatched'] == 14776 + assert feature_collection['numberReturned'] == 10 def test_query_with_config_properties(config): @@ -157,75 +157,75 @@ def test_query_with_config_properties(config): No properties should be returned that are not requested. Note that not all requested properties have to exist in the query result. """ - properties_subset = ["name", "waterway", "width", "does_not_exist"] - config.update({"properties": properties_subset}) + properties_subset = ['name', 'waterway', 'width', 'does_not_exist'] + config.update({'properties': properties_subset}) provider = PostgreSQLProvider(config) assert provider.properties == properties_subset result = provider.query() - feature = result.get("features")[0] - properties = feature.get("properties") + feature = result.get('features')[0] + properties = feature.get('properties') for property_name in properties.keys(): - assert property_name in config["properties"] + assert property_name in config['properties'] @pytest.mark.parametrize( - "property_filter, expected", + 'property_filter, expected', [ ([], 14776), - ([("waterway", "stream")], 13930), - ([("waterway", "this does not exist")], 0), + ([('waterway', 'stream')], 13930), + ([('waterway', 'this does not exist')], 0), ], ) def test_query_hits_with_property_filter(config, property_filter, expected): """Test query resulttype=hits""" provider = PostgreSQLProvider(config) - results = provider.query(properties=property_filter, resulttype="hits") - assert results["numberMatched"] == expected + results = provider.query(properties=property_filter, resulttype='hits') + assert results['numberMatched'] == expected def test_query_bbox(config): """Test query with a specified bounding box""" psp = PostgreSQLProvider(config) boxed_feature_collection = psp.query(bbox=[29.3373, -3.4099, 29.3761, -3.3924]) - assert len(boxed_feature_collection["features"]) == 5 + assert len(boxed_feature_collection['features']) == 5 def test_query_sortby(config): """Test query with sorting""" psp = PostgreSQLProvider(config) - up = psp.query(sortby=[{"property": "osm_id", "order": "+"}]) - assert up["features"][0]["id"] == 13990765 - down = psp.query(sortby=[{"property": "osm_id", "order": "-"}]) - assert down["features"][0]["id"] == 620735702 + up = psp.query(sortby=[{'property': 'osm_id', 'order': '+'}]) + assert up['features'][0]['id'] == 13990765 + down = psp.query(sortby=[{'property': 'osm_id', 'order': '-'}]) + assert down['features'][0]['id'] == 620735702 - name = psp.query(sortby=[{"property": "name", "order": "+"}]) - assert name["features"][0]["properties"]["name"] == "Agasasa" + name = psp.query(sortby=[{'property': 'name', 'order': '+'}]) + assert name['features'][0]['properties']['name'] == 'Agasasa' def test_query_skip_geometry(config): """Test query without geometry""" provider = PostgreSQLProvider(config) result = provider.query(skip_geometry=True) - feature = result["features"][0] - assert feature["geometry"] is None + feature = result['features'][0] + assert feature['geometry'] is None @pytest.mark.parametrize( - "properties", - [["name"], ["name", "waterway"], ["name", "waterway", "this does not exist"]], + 'properties', + [['name'], ['name', 'waterway'], ['name', 'waterway', 'this does not exist']], ) def test_query_select_properties(config, properties): """Test query with selected properties""" provider = PostgreSQLProvider(config) result = provider.query(select_properties=properties) - feature = result["features"][0] + feature = result['features'][0] expected = set(provider.get_fields().keys()).intersection(properties) - assert set(feature["properties"].keys()) == expected + assert set(feature['properties'].keys()) == expected @pytest.mark.parametrize( - "id_, prev, next_", + 'id_, prev, next_', [ (29701937, 29698243, 29704504), (13990765, 13990765, 25469515), # First item, prev should be id_ @@ -236,14 +236,14 @@ def test_get_simple(config, id_, prev, next_): """Testing query for a specific object and identifying prev/next""" p = PostgreSQLProvider(config) result = p.get(id_) - assert result["id"] == id_ - assert "geometry" in result - assert "properties" in result - assert result["type"] == "Feature" - assert "foo_geom" not in result["properties"] # geometry is separate + assert result['id'] == id_ + assert 'geometry' in result + assert 'properties' in result + assert result['type'] == 'Feature' + assert 'foo_geom' not in result['properties'] # geometry is separate - assert result["prev"] == prev - assert result["next"] == next_ + assert result['prev'] == prev + assert result['next'] == next_ def test_get_with_config_properties(config): @@ -252,14 +252,14 @@ def test_get_with_config_properties(config): No properties should be returned that are not requested. Note that not all requested properties have to exist in the query result. """ - properties_subset = ["name", "waterway", "width", "does_not_exist"] - config.update({"properties": properties_subset}) + properties_subset = ['name', 'waterway', 'width', 'does_not_exist'] + config.update({'properties': properties_subset}) provider = PostgreSQLProvider(config) assert provider.properties == properties_subset result = provider.get(80835483) - properties = result.get("properties") + properties = result.get('properties') for property_name in properties.keys(): - assert property_name in config["properties"] + assert property_name in config['properties'] def test_get_not_existing_item_raise_exception(config): @@ -270,10 +270,10 @@ def test_get_not_existing_item_raise_exception(config): @pytest.mark.parametrize( - "cql, expected_ids", + 'cql, expected_ids', [ ( - "osm_id BETWEEN 80800000 AND 80900000", + 'osm_id BETWEEN 80800000 AND 80900000', [ 80827787, 80827793, @@ -297,14 +297,14 @@ def test_get_not_existing_item_raise_exception(config): "osm_id BETWEEN 80800000 AND 80900000 AND name IN ('Muhira', 'Mpanda')", [80835468, 80835472, 80835475, 80835478], ), - ("osm_id BETWEEN 80800000 AND 80900000 AND name IS NULL", [80835474, 80835483]), + ('osm_id BETWEEN 80800000 AND 80900000 AND name IS NULL', [80835474, 80835483]), ( - "osm_id BETWEEN 80800000 AND 80900000 AND BBOX(foo_geom, 29, -2.8, 29.2, -2.9)", # noqa + 'osm_id BETWEEN 80800000 AND 80900000 AND BBOX(foo_geom, 29, -2.8, 29.2, -2.9)', # noqa [80827793, 80835470, 80835472, 80835483, 80835489], ), ( - "osm_id BETWEEN 80800000 AND 80900000 AND " - "CROSSES(foo_geom, LINESTRING(29.091 -2.731, 29.253 -2.845))", + 'osm_id BETWEEN 80800000 AND 80900000 AND ' + 'CROSSES(foo_geom, LINESTRING(29.091 -2.731, 29.253 -2.845))', [80835470, 80835472, 80835489], ), ], @@ -315,19 +315,19 @@ def test_query_cql(config, cql, expected_ids): provider = PostgreSQLProvider(config) feature_collection = provider.query(filterq=ast) - assert feature_collection.get("type") == "FeatureCollection" + assert feature_collection.get('type') == 'FeatureCollection' - features = feature_collection.get("features") - ids = [feature["id"] for feature in features] + features = feature_collection.get('features') + ids = [feature['id'] for feature in features] assert ids == expected_ids def test_query_cql_properties_bbox_filters(config): """Test query with CQL, properties and bbox filters""" # Arrange - properties = [("waterway", "stream")] + properties = [('waterway', 'stream')] bbox = [29, -2.8, 29.2, -2.9] - filterq = parse("osm_id BETWEEN 80800000 AND 80900000") + filterq = parse('osm_id BETWEEN 80800000 AND 80900000') expected_ids = [80835470] # Act @@ -337,25 +337,25 @@ def test_query_cql_properties_bbox_filters(config): ) # Assert - ids = [feature["id"] for feature in feature_collection.get("features")] + ids = [feature['id'] for feature in feature_collection.get('features')] assert ids == expected_ids def test_get_fields(config): # Arrange expected_fields = { - "blockage": {"type": "string", "format": None}, - "covered": {"type": "string", "format": None}, - "depth": {"type": "string", "format": None}, - "layer": {"type": "string", "format": None}, - "name": {"type": "string", "format": None}, - "natural": {"type": "string", "format": None}, - "osm_id": {"type": "integer", "format": None}, - "tunnel": {"type": "string", "format": None}, - "water": {"type": "string", "format": None}, - "waterway": {"type": "string", "format": None}, - "width": {"type": "string", "format": None}, - "z_index": {"type": "string", "format": None}, + 'blockage': {'type': 'string', 'format': None}, + 'covered': {'type': 'string', 'format': None}, + 'depth': {'type': 'string', 'format': None}, + 'layer': {'type': 'string', 'format': None}, + 'name': {'type': 'string', 'format': None}, + 'natural': {'type': 'string', 'format': None}, + 'osm_id': {'type': 'integer', 'format': None}, + 'tunnel': {'type': 'string', 'format': None}, + 'water': {'type': 'string', 'format': None}, + 'waterway': {'type': 'string', 'format': None}, + 'width': {'type': 'string', 'format': None}, + 'z_index': {'type': 'string', 'format': None}, } # Act @@ -372,24 +372,24 @@ def test_instantiation(config): provider = PostgreSQLProvider(config) # Assert - assert provider.name == "PostgreSQL" - assert provider.table == "hotosm_bdi_waterways" - assert provider.id_field == "osm_id" + assert provider.name == 'PostgreSQL' + assert provider.table == 'hotosm_bdi_waterways' + assert provider.id_field == 'osm_id' @pytest.mark.parametrize( - "bad_data, exception, match", + 'bad_data, exception, match', [ - ({"table": "bad_table"}, ProviderQueryError, "Table.*not found in schema.*"), + ({'table': 'bad_table'}, ProviderQueryError, 'Table.*not found in schema.*'), ( - {"data": {"bad": "data"}}, + {'data': {'bad': 'data'}}, ProviderConnectionError, - r"Could not connect to postgresql\+psycopg2:\/\/:5432 \(password hidden\).", + r'Could not connect to postgresql\+psycopg2:\/\/:5432 \(password hidden\).', ), # noqa ( - {"id_field": "bad_id"}, + {'id_field': 'bad_id'}, ProviderQueryError, - r"No such id_field column \(bad_id\) on osm.hotosm_bdi_waterways.", + r'No such id_field column \(bad_id\) on osm.hotosm_bdi_waterways.', ), ], ) @@ -407,8 +407,8 @@ def test_instantiation_with_bad_config(config, bad_data, exception, match): def test_instantiation_with_bad_credentials(config): # Arrange - config["data"].update({"user": "bad_user"}) - match = r"Could not connect to .*bad_user:\*\*\*@" + config['data'].update({'user': 'bad_user'}) + match = r'Could not connect to .*bad_user:\*\*\*@' # Make sure we don't use a cached connection in the tests postgresql_provider_module._ENGINE_STORE = {} @@ -428,7 +428,7 @@ def test_engine_and_table_model_stores(config): # Same database connection details, but different table different_table = config.copy() - different_table.update(table="hotosm_bdi_drains") + different_table.update(table='hotosm_bdi_drains') provider2 = PostgreSQLProvider(different_table) assert repr(provider2._engine) == repr(provider0._engine) assert provider2._engine is provider0._engine @@ -438,7 +438,7 @@ def test_engine_and_table_model_stores(config): # and also a different table_model, as two databases may have different # tables with the same name different_host = config.copy() - different_host["data"]["host"] = "localhost" + different_host['data']['host'] = 'localhost' provider3 = PostgreSQLProvider(different_host) assert provider3._engine is not provider0._engine assert provider3.table_model is not provider0.table_model diff --git a/tests/test_sitemap_process.py b/tests/test_sitemap_process.py index f83b50a..30b1714 100644 --- a/tests/test_sitemap_process.py +++ b/tests/test_sitemap_process.py @@ -35,35 +35,35 @@ from pygeoapi.util import url_join -PYGEOAPI_URL = "http://localhost:5000" -PROCESS_URL = url_join(PYGEOAPI_URL, "processes/sitemap-generator/execution") +PYGEOAPI_URL = 'http://localhost:5000' +PROCESS_URL = url_join(PYGEOAPI_URL, 'processes/sitemap-generator/execution') HTTP = Session() @pytest.fixture def body(): - return {"inputs": {"include-common": True, "include-features": False, "zip": False}} + return {'inputs': {'include-common': True, 'include-features': False, 'zip': False}} def test_sitemap_generator(body): - body["inputs"]["include-features"] = True + body['inputs']['include-features'] = True r = HTTP.post(PROCESS_URL, json=body) assert r.status_code == 200 sitemap = r.json() assert len(sitemap) == 5 - common = sitemap.pop("common.xml") + common = sitemap.pop('common.xml') assert len(common) == 2402 root = xml.etree.ElementTree.fromstring(common) - assert all(i.tag == j.tag for (i, j) in zip(root, root.findall("url"))) + assert all(i.tag == j.tag for (i, j) in zip(root, root.findall('url'))) - assert all(f.endswith("__0.xml") for f in sitemap) + assert all(f.endswith('__0.xml') for f in sitemap) def test_sitemap_no_common(body): - body["inputs"]["include-common"] = False + body['inputs']['include-common'] = False r = HTTP.post(PROCESS_URL, json=body) assert r.status_code == 200 @@ -78,12 +78,12 @@ def test_sitemap_no_features(body): sitemap = r.json() assert len(sitemap) == 1 - common = sitemap.pop("common.xml") + common = sitemap.pop('common.xml') assert len(common) == 2402 def test_sitemap_zip(body): - body["inputs"]["zip"] = True + body['inputs']['zip'] = True r = HTTP.post(PROCESS_URL, json=body) assert r.status_code == 200 diff --git a/tests/test_sparql_provider.py b/tests/test_sparql_provider.py index 284f77b..c879a48 100644 --- a/tests/test_sparql_provider.py +++ b/tests/test_sparql_provider.py @@ -35,18 +35,18 @@ @pytest.fixture() def config(): return { - "name": "pygeoapi_plugins.provider.sparql.SPARQLProvider", - "type": "feature", - "data": "tests/data/places.csv", - "id_field": "index", - "geometry": {"x_field": "lon", "y_field": "lat"}, - "sparql_provider": "CSV", - "sparql_endpoint": "https://dbpedia.org/sparql", - "sparql_subject": "uri", - "sparql_predicates": { - "population": "dbo:populationTotal", - "country": "", - "leader": "dbpedia2:leaderName", + 'name': 'pygeoapi_plugins.provider.sparql.SPARQLProvider', + 'type': 'feature', + 'data': 'tests/data/places.csv', + 'id_field': 'index', + 'geometry': {'x_field': 'lon', 'y_field': 'lat'}, + 'sparql_provider': 'CSV', + 'sparql_endpoint': 'https://dbpedia.org/sparql', + 'sparql_subject': 'uri', + 'sparql_predicates': { + 'population': 'dbo:populationTotal', + 'country': '', + 'leader': 'dbpedia2:leaderName', }, } @@ -56,32 +56,32 @@ def test_query(config): base_fields = p.p.get_fields() assert len(base_fields) == 3 - assert base_fields["city"]["type"] == "string" - assert base_fields["uri"]["type"] == "string" + assert base_fields['city']['type'] == 'string' + assert base_fields['uri']['type'] == 'string' fields = p.get_fields() assert len(fields) == 6 for field in base_fields: assert field in fields - assert fields["country"]["type"] == "string" - assert fields["leader"]["type"] == "string" + assert fields['country']['type'] == 'string' + assert fields['leader']['type'] == 'string' results = p.query() - assert len(results["features"]) == 8 + assert len(results['features']) == 8 - assert results["features"][0]["id"] == "0" - assert results["features"][0]["properties"]["city"] == "Berlin" - assert results["features"][0]["properties"]["population"] == "3677472" + assert results['features'][0]['id'] == '0' + assert results['features'][0]['properties']['city'] == 'Berlin' + assert results['features'][0]['properties']['population'] == '3677472' assert ( - results["features"][0]["properties"]["country"] - == "http://dbpedia.org/resource/Germany" + results['features'][0]['properties']['country'] + == 'http://dbpedia.org/resource/Germany' ) # noqa - assert results["features"][0]["geometry"]["coordinates"][0] == 13.405 - assert results["features"][0]["geometry"]["coordinates"][1] == 52.52 + assert results['features'][0]['geometry']['coordinates'][0] == 13.405 + assert results['features'][0]['geometry']['coordinates'][1] == 52.52 - assert results["features"][2]["properties"]["city"] == "New York" + assert results['features'][2]['properties']['city'] == 'New York' assert ( - results["features"][2]["properties"]["country"] - == "http://dbpedia.org/resource/United_States" + results['features'][2]['properties']['country'] + == 'http://dbpedia.org/resource/United_States' ) # noqa - assert results["features"][2]["properties"]["leader"] == "Eric Adams" + assert results['features'][2]['properties']['leader'] == 'Eric Adams' diff --git a/tests/test_xml_formatter.py b/tests/test_xml_formatter.py index 2d9eafc..23a4f1d 100644 --- a/tests/test_xml_formatter.py +++ b/tests/test_xml_formatter.py @@ -39,12 +39,12 @@ @pytest.fixture() def config(): return { - "name": "CSV", - "type": "feature", - "data": "tests/data/places.csv", - "id_field": "index", - "uri_field": "uri", - "geometry": {"x_field": "lon", "y_field": "lat"}, + 'name': 'CSV', + 'type': 'feature', + 'data': 'tests/data/places.csv', + 'id_field': 'index', + 'uri_field': 'uri', + 'geometry': {'x_field': 'lon', 'y_field': 'lat'}, } @@ -54,17 +54,17 @@ def test_xml_formatter(config): fc = p.query() f_xml = f.write(data=fc) - assert f.mimetype == "application/xml; charset=utf-8" + assert f.mimetype == 'application/xml; charset=utf-8' root = ET.fromstring(f_xml) - assert all(i.tag == j.tag for (i, j) in zip(root, root.findall("url"))) + assert all(i.tag == j.tag for (i, j) in zip(root, root.findall('url'))) - node = root.find("url") - assert node.find("loc").text == "http://dbpedia.org/resource/Berlin" + node = root.find('url') + assert node.find('loc').text == 'http://dbpedia.org/resource/Berlin' - lastmod = node.find("lastmod").text - strptime = datetime.strptime(lastmod, "%Y-%m-%dT%H:%M:%SZ") + lastmod = node.find('lastmod').text + strptime = datetime.strptime(lastmod, '%Y-%m-%dT%H:%M:%SZ') assert isinstance(strptime, datetime) - now = datetime.now().strftime("%Y-%m-%dT%H:%M") + now = datetime.now().strftime('%Y-%m-%dT%H:%M') assert now in lastmod