diff --git a/pygeoapi/api.py b/pygeoapi/api.py
index 93fbee7c04..dfe8c65039 100644
--- a/pygeoapi/api.py
+++ b/pygeoapi/api.py
@@ -74,7 +74,8 @@
from pygeoapi.provider.base import (
ProviderGenericError, ProviderConnectionError, ProviderNotFoundError,
ProviderTypeError)
-from pygeoapi.models.provider.base import TilesMetadataFormat
+from pygeoapi.models.provider.base import (TilesMetadataFormat,
+ TileMatrixSetEnum)
from pygeoapi.models.cql import CQLModel
from pygeoapi.util import (dategetter, RequestedProcessExecutionMode,
@@ -751,6 +752,16 @@ def landing_page(self,
'type': FORMAT_TYPES[F_JSON],
'title': 'Jobs',
'href': f"{self.base_url}/jobs"
+ }, {
+ 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/tiling-schemes',
+ 'type': FORMAT_TYPES[F_JSON],
+ 'title': 'The list of supported tiling schemes (as JSON)',
+ 'href': f"{self.base_url}/TileMatrixSets?f=json"
+ }, {
+ 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/tiling-schemes',
+ 'type': FORMAT_TYPES[F_HTML],
+ 'title': 'The list of supported tiling schemes (as HTML)',
+ 'href': f"{self.base_url}/TileMatrixSets?f=html"
}]
headers = request.get_response_headers(**self.api_headers)
@@ -858,6 +869,122 @@ def conformance(self,
return headers, HTTPStatus.OK, to_json(conformance, self.pretty_print)
+ @gzip
+ @pre_process
+ def tilematrixsets(self,
+ request: Union[APIRequest, Any]) -> Tuple[dict, int,
+ str]:
+ """
+ Provide tileMatrixSets definition
+
+ :param request: A request object
+
+ :returns: tuple of headers, status code, content
+ """
+
+ if not request.is_valid():
+ return self.get_format_exception(request)
+
+ headers = request.get_response_headers(**self.api_headers)
+
+ # Retrieve available TileMatrixSets
+ enums = [e.value for e in TileMatrixSetEnum]
+
+ tms = {"tileMatrixSets": []}
+
+ for e in enums:
+ tms['tileMatrixSets'].append({
+ "title": e.title,
+ "id": e.tileMatrixSet,
+ "uri": e.tileMatrixSetURI,
+ "links": [
+ {
+ "rel": "self",
+ "type": "text/html",
+ "title": f"The HTML representation of the {e.tileMatrixSet} tile matrix set", # noqa
+ "href": f"{self.base_url}/TileMatrixSets/{e.tileMatrixSet}?f=html" # noqa
+ },
+ {
+ "rel": "self",
+ "type": "application/json",
+ "title": f"The JSON representation of the {e.tileMatrixSet} tile matrix set", # noqa
+ "href": f"{self.base_url}/TileMatrixSets/{e.tileMatrixSet}?f=json" # noqa
+ }
+ ]
+ })
+
+ tms['links'] = [{
+ "rel": "alternate",
+ "type": "text/html",
+ "title": "This document as HTML",
+ "href": f"{self.base_url}/tileMatrixSets?f=html"
+ }, {
+ "rel": "self",
+ "type": "application/json",
+ "title": "This document",
+ "href": f"{self.base_url}/tileMatrixSets?f=json"
+ }]
+
+ if request.format == F_HTML: # render
+ content = render_j2_template(self.tpl_config,
+ 'tilematrixsets/index.html',
+ tms, request.locale)
+ return headers, HTTPStatus.OK, content
+
+ return headers, HTTPStatus.OK, to_json(tms, self.pretty_print)
+
+ @gzip
+ @pre_process
+ def tilematrixset(self,
+ request: Union[APIRequest, Any],
+ tileMatrixSetId) -> Tuple[dict,
+ int, str]:
+ """
+ Provide tile matrix definition
+
+ :param request: A request object
+
+ :returns: tuple of headers, status code, content
+ """
+
+ if not request.is_valid():
+ return self.get_format_exception(request)
+
+ headers = request.get_response_headers(**self.api_headers)
+
+ # Retrieve relevant TileMatrixSet
+ enums = [e.value for e in TileMatrixSetEnum]
+ enum = None
+
+ try:
+ for e in enums:
+ if tileMatrixSetId == e.tileMatrixSet:
+ enum = e
+ if not enum:
+ raise ValueError('could not find this tilematrixset')
+ except ValueError as err:
+ return self.get_exception(
+ HTTPStatus.BAD_REQUEST, headers, request.format,
+ 'InvalidParameterValue', str(err))
+
+ tms = {
+ "title": enum.tileMatrixSet,
+ "crs": enum.crs,
+ "id": enum.tileMatrixSet,
+ "uri": enum.tileMatrixSetURI,
+ "orderedAxes": enum.orderedAxes,
+ "wellKnownScaleSet": enum.wellKnownScaleSet,
+ "tileMatrices": enum.tileMatrices
+ }
+
+ if request.format == F_HTML: # render
+ content = render_j2_template(self.tpl_config,
+ 'tilematrixsets/tilematrixset.html',
+ tms, request.locale)
+ return headers, HTTPStatus.OK, content
+
+ return headers, HTTPStatus.OK, to_json(tms, self.pretty_print)
+
@gzip
@pre_process
@jsonldify
@@ -2665,7 +2792,12 @@ def get_collection_tiles(self, request: Union[APIRequest, Any],
'dataType': 'vector',
'links': []
}
- tile_matrix['links'].append(matrix.tileMatrixSetDefinition)
+ tile_matrix['links'].append({
+ 'type': FORMAT_TYPES[F_JSON],
+ 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/tiling-scheme',
+ 'title': f'{matrix.tileMatrixSet} TileMatrixSet definition (as {F_JSON})', # noqa
+ 'href': f'{self.base_url}/TileMatrixSets/{matrix.tileMatrixSet}?f={F_JSON}' # noqa
+ })
tile_matrix['links'].append({
'type': FORMAT_TYPES[F_JSON],
'rel': request.get_linkrel(F_JSON),
diff --git a/pygeoapi/django_/urls.py b/pygeoapi/django_/urls.py
index 4748602081..7d254600fd 100644
--- a/pygeoapi/django_/urls.py
+++ b/pygeoapi/django_/urls.py
@@ -76,6 +76,16 @@ def apply_slash_rule(url: str):
views.conformance,
name='conformance'
),
+ path(
+ apply_slash_rule('TileMatrixSets/'),
+ views.tilematrixsets,
+ name='tilematrixsets'
+ ),
+ path(
+ apply_slash_rule('TileMatrixSets/'),
+ views.tilematrixsets,
+ name='tilematrixset'
+ ),
path(
apply_slash_rule('collections/'),
views.collections,
diff --git a/pygeoapi/django_/views.py b/pygeoapi/django_/views.py
index bcc21302c4..851e35239e 100644
--- a/pygeoapi/django_/views.py
+++ b/pygeoapi/django_/views.py
@@ -94,6 +94,28 @@ def conformance(request: HttpRequest) -> HttpResponse:
return response
+def tilematrixsets(request: HttpRequest,
+ tilematrixset_id: Optional[str] = None) -> HttpResponse:
+ """
+ OGC API tilematrixsets endpoint
+
+ :request Django HTTP Request
+ :param tilematrixset_id: tile matrix set identifier
+
+ :returns: Django HTTP Response
+ """
+
+ response = None
+
+ if tilematrixset_id is None:
+ response_ = _feed_response(request, 'tilematrixsets')
+ else:
+ response_ = _feed_response(request, 'tilematrixset', tilematrixset_id)
+ response = _to_django_response(*response_)
+
+ return response
+
+
def collections(request: HttpRequest,
collection_id: Optional[str] = None) -> HttpResponse:
"""
diff --git a/pygeoapi/flask_app.py b/pygeoapi/flask_app.py
index f8502533f5..0ede3b357d 100644
--- a/pygeoapi/flask_app.py
+++ b/pygeoapi/flask_app.py
@@ -196,12 +196,9 @@ def get_tilematrix_set(tileMatrixSetId=None):
OGC API TileMatrixSet endpoint
:param tileMatrixSetId: identifier of tile matrix set
-
:returns: HTTP response
"""
-
- return execute_from_flask(tiles_api.tilematrixset, request,
- tileMatrixSetId)
+ return get_response(api_.tilematrixset(request, tileMatrixSetId))
@BLUEPRINT.route('/TileMatrixSets')
@@ -211,8 +208,7 @@ def get_tilematrix_sets():
:returns: HTTP response
"""
-
- return execute_from_flask(tiles_api.tilematrixsets, request)
+ return get_response(api_.tilematrixsets(request))
@BLUEPRINT.route('/collections')
diff --git a/pygeoapi/models/provider/base.py b/pygeoapi/models/provider/base.py
index 727e46743f..844590ee43 100644
--- a/pygeoapi/models/provider/base.py
+++ b/pygeoapi/models/provider/base.py
@@ -74,34 +74,692 @@ class TileMatrixSetEnumType(BaseModel):
tileMatrixSet: str
tileMatrixSetURI: str
crs: str
- tileMatrixSetDefinition: dict
-
+ title: str
+ orderedAxes: List[str]
+ wellKnownScaleSet: str
+ tileMatrices: List[dict]
class TileMatrixSetEnum(Enum):
WORLDCRS84QUAD = TileMatrixSetEnumType(
tileMatrixSet="WorldCRS84Quad",
tileMatrixSetURI="http://www.opengis.net/def/tilematrixset/OGC/1.0/WorldCRS84Quad", # noqa
crs="http://www.opengis.net/def/crs/OGC/1.3/CRS84",
- tileMatrixSetDefinition=
- {
- 'type': 'application/json',
- 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/tiling-scheme',
- 'title': 'WorldCRS84QuadTileMatrixSet definition (as JSON)',
- 'href': 'https://raw.githubusercontent.com/opengeospatial/2D-Tile-Matrix-Set/master/registry/json/WorldCRS84Quad.json' # authoritative TMS definition
- }
- )
+ title="World Mercator WGS84 (ellipsoid)",
+ orderedAxes = ["Lon", "Lat"],
+ wellKnownScaleSet = "http://www.opengis.net/def/wkss/OGC/1.0/GoogleCRS84Quad",
+ tileMatrices = [
+ {
+ "id": "0",
+ "scaleDenominator": 279541132.0143588781357,
+ "cellSize": 0.703125,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 2,
+ "matrixHeight": 1,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "1",
+ "scaleDenominator": 139770566.0071794390678,
+ "cellSize": 0.3515625,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 4,
+ "matrixHeight": 2,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "2",
+ "scaleDenominator": 69885283.0035897195339,
+ "cellSize": 0.17578125,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 8,
+ "matrixHeight": 4,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "3",
+ "scaleDenominator": 34942641.501794859767,
+ "cellSize": 0.087890625,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 16,
+ "matrixHeight": 8,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "4",
+ "scaleDenominator": 17471320.7508974298835,
+ "cellSize": 0.0439453125,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 32,
+ "matrixHeight": 16,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "5",
+ "scaleDenominator": 8735660.3754487149417,
+ "cellSize": 0.02197265625,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 64,
+ "matrixHeight": 32,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "6",
+ "scaleDenominator": 4367830.1877243574709,
+ "cellSize": 0.010986328125,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 128,
+ "matrixHeight": 64,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "7",
+ "scaleDenominator": 2183915.0938621787354,
+ "cellSize": 0.0054931640625,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 256,
+ "matrixHeight": 128,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "8",
+ "scaleDenominator": 1091957.5469310893677,
+ "cellSize": 0.0027465820312,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 512,
+ "matrixHeight": 256,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "9",
+ "scaleDenominator": 545978.7734655446839,
+ "cellSize": 0.0013732910156,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 1024,
+ "matrixHeight": 512,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "10",
+ "scaleDenominator": 272989.3867327723419,
+ "cellSize": 0.0006866455078,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 2048,
+ "matrixHeight": 1024,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "11",
+ "scaleDenominator": 136494.693366386171,
+ "cellSize": 0.0003433227539,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 4096,
+ "matrixHeight": 2048,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "12",
+ "scaleDenominator": 68247.3466831930855,
+ "cellSize": 0.000171661377,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 8192,
+ "matrixHeight": 4096,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "13",
+ "scaleDenominator": 34123.6733415965427,
+ "cellSize": 0.0000858306885,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 16384,
+ "matrixHeight": 8192,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "14",
+ "scaleDenominator": 17061.8366707982714,
+ "cellSize": 0.0000429153442,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 32768,
+ "matrixHeight": 16384,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "15",
+ "scaleDenominator": 8530.9183353991357,
+ "cellSize": 0.0000214576721,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 65536,
+ "matrixHeight": 32768,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "16",
+ "scaleDenominator": 4265.4591676995678,
+ "cellSize": 0.0000107288361,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 131072,
+ "matrixHeight": 65536,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "17",
+ "scaleDenominator": 2132.7295838497839,
+ "cellSize": 0.000005364418,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 262144,
+ "matrixHeight": 131072,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "18",
+ "scaleDenominator": 1066.364791924892,
+ "cellSize": 0.000002682209,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 524288,
+ "matrixHeight": 262144,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "19",
+ "scaleDenominator": 533.182395962446,
+ "cellSize": 0.0000013411045,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 1048576,
+ "matrixHeight": 524288,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "20",
+ "scaleDenominator": 266.591197981223,
+ "cellSize": 0.0000006705523,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 2097152,
+ "matrixHeight": 1048576,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "21",
+ "scaleDenominator": 133.2955989906115,
+ "cellSize": 0.0000003352761,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 4194304,
+ "matrixHeight": 2097152,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "22",
+ "scaleDenominator": 66.6477994953057,
+ "cellSize": 0.0000001676381,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 8388608,
+ "matrixHeight": 4194304,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "23",
+ "scaleDenominator": 33.3238997476529,
+ "cellSize": 0.000000083819,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 16777216,
+ "matrixHeight": 8388608,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "24",
+ "scaleDenominator": 16.6619498738264,
+ "cellSize": 0.0000000419095,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 33554432,
+ "matrixHeight": 16777216,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "25",
+ "scaleDenominator": 8.3309749369132,
+ "cellSize": 0.0000000209548,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 67108864,
+ "matrixHeight": 33554432,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "26",
+ "scaleDenominator": 4.1654874684566,
+ "cellSize": 0.0000000104774,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 134217728,
+ "matrixHeight": 67108864,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "27",
+ "scaleDenominator": 2.0827437342283,
+ "cellSize": 0.0000000052387,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 268435456,
+ "matrixHeight": 134217728,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "28",
+ "scaleDenominator": 1.0413718671142,
+ "cellSize": 0.0000000026193,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 536870912,
+ "matrixHeight": 268435456,
+ "tileWidth": 256,
+ "tileHeight": 256
+ },
+ {
+ "id": "29",
+ "scaleDenominator": 0.5206859335571,
+ "cellSize": 0.0000000013097,
+ "cornerOfOrigin": "topLeft",
+ "pointOfOrigin": [-180, 90],
+ "matrixWidth": 1073741824,
+ "matrixHeight": 536870912,
+ "tileWidth": 256,
+ "tileHeight": 256
+ }
+ ]
+ )
WEBMERCATORQUAD = TileMatrixSetEnumType(
tileMatrixSet="WebMercatorQuad",
tileMatrixSetURI="http://www.opengis.net/def/tilematrixset/OGC/1.0/WebMercatorQuad", # noqa
crs="http://www.opengis.net/def/crs/EPSG/0/3857",
- tileMatrixSetDefinition=
- {
- 'type': 'application/json',
- 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/tiling-scheme',
- 'title': 'WebMercatorQuadTileMatrixSet definition (as JSON)',
- 'href': 'https://raw.githubusercontent.com/opengeospatial/2D-Tile-Matrix-Set/master/registry/json/WebMercatorQuad.json' # authoritative TMS definition
- }
- )
+ title="Google Maps Compatible for the World",
+ orderedAxes=["E", "N"],
+ wellKnownScaleSet="http://www.opengis.net/def/wkss/OGC/1.0/GoogleMapsCompatible",
+ tileMatrices=[
+ {
+ "id" : "0",
+ "scaleDenominator" : 559082264.0287177562714,
+ "cellSize" : 156543.033928040968,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 1,
+ "matrixHeight" : 1,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "1",
+ "scaleDenominator" : 279541132.0143588781357,
+ "cellSize" : 78271.516964020484,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 2,
+ "matrixHeight" : 2,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "2",
+ "scaleDenominator" : 139770566.0071794390678,
+ "cellSize" : 39135.758482010242,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 4,
+ "matrixHeight" : 4,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "3",
+ "scaleDenominator" : 69885283.0035897195339,
+ "cellSize" : 19567.879241005121,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 8,
+ "matrixHeight" : 8,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "4",
+ "scaleDenominator" : 34942641.501794859767,
+ "cellSize" : 9783.9396205025605,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 16,
+ "matrixHeight" : 16,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "5",
+ "scaleDenominator" : 17471320.7508974298835,
+ "cellSize" : 4891.9698102512803,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 32,
+ "matrixHeight" : 32,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "6",
+ "scaleDenominator" : 8735660.3754487149417,
+ "cellSize" : 2445.9849051256401,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 64,
+ "matrixHeight" : 64,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "7",
+ "scaleDenominator" : 4367830.1877243574709,
+ "cellSize" : 1222.9924525628201,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 128,
+ "matrixHeight" : 128,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "8",
+ "scaleDenominator" : 2183915.0938621787354,
+ "cellSize" : 611.49622628141,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 256,
+ "matrixHeight" : 256,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "9",
+ "scaleDenominator" : 1091957.5469310893677,
+ "cellSize" : 305.748113140705,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 512,
+ "matrixHeight" : 512,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "10",
+ "scaleDenominator" : 545978.7734655446839,
+ "cellSize" : 152.8740565703525,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 1024,
+ "matrixHeight" : 1024,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "11",
+ "scaleDenominator" : 272989.3867327723419,
+ "cellSize" : 76.4370282851763,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 2048,
+ "matrixHeight" : 2048,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "12",
+ "scaleDenominator" : 136494.693366386171,
+ "cellSize" : 38.2185141425881,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 4096,
+ "matrixHeight" : 4096,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "13",
+ "scaleDenominator" : 68247.3466831930855,
+ "cellSize" : 19.1092570712941,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 8192,
+ "matrixHeight" : 8192,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "14",
+ "scaleDenominator" : 34123.6733415965427,
+ "cellSize" : 9.554628535647,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 16384,
+ "matrixHeight" : 16384,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "15",
+ "scaleDenominator" : 17061.8366707982714,
+ "cellSize" : 4.7773142678235,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 32768,
+ "matrixHeight" : 32768,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "16",
+ "scaleDenominator" : 8530.9183353991357,
+ "cellSize" : 2.3886571339118,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 65536,
+ "matrixHeight" : 65536,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "17",
+ "scaleDenominator" : 4265.4591676995678,
+ "cellSize" : 1.1943285669559,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 131072,
+ "matrixHeight" : 131072,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "18",
+ "scaleDenominator" : 2132.7295838497839,
+ "cellSize" : 0.5971642834779,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 262144,
+ "matrixHeight" : 262144,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "19",
+ "scaleDenominator" : 1066.364791924892,
+ "cellSize" : 0.298582141739,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 524288,
+ "matrixHeight" : 524288,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "20",
+ "scaleDenominator" : 533.182395962446,
+ "cellSize" : 0.1492910708695,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 1048576,
+ "matrixHeight" : 1048576,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "21",
+ "scaleDenominator" : 266.591197981223,
+ "cellSize" : 0.0746455354347,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 2097152,
+ "matrixHeight" : 2097152,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "22",
+ "scaleDenominator" : 133.2955989906115,
+ "cellSize" : 0.0373227677174,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 4194304,
+ "matrixHeight" : 4194304,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "23",
+ "scaleDenominator" : 66.6477994953057,
+ "cellSize" : 0.0186613838587,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 8388608,
+ "matrixHeight" : 8388608,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "24",
+ "scaleDenominator" : 33.3238997476529,
+ "cellSize" : 0.0093306919293,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 16777216,
+ "matrixHeight" : 16777216,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "25",
+ "scaleDenominator" : 16.6619498738264,
+ "cellSize" : 0.0046653459647,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 33554432,
+ "matrixHeight" : 33554432,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "26",
+ "scaleDenominator" : 8.3309749369132,
+ "cellSize" : 0.0023326729823,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 67108864,
+ "matrixHeight" : 67108864,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "27",
+ "scaleDenominator" : 4.1654874684566,
+ "cellSize" : 0.0011663364912,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 134217728,
+ "matrixHeight" : 134217728,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "28",
+ "scaleDenominator" : 2.0827437342283,
+ "cellSize" : 0.0005831682456,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 268435456,
+ "matrixHeight" : 268435456,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ },
+ {
+ "id" : "29",
+ "scaleDenominator" : 1.0413718671142,
+ "cellSize" : 0.0002915841228,
+ "cornerOfOrigin" : "topLeft",
+ "pointOfOrigin" : [ -20037508.3427892439067, 20037508.3427892439067 ],
+ "matrixWidth" : 536870912,
+ "matrixHeight" : 536870912,
+ "tileWidth" : 256,
+ "tileHeight" : 256
+ }
+ ]
+ )
# Tile Set Metadata Sub Types
diff --git a/pygeoapi/provider/mvt_elastic.py b/pygeoapi/provider/mvt_elastic.py
index 78914e9624..8741a445ab 100644
--- a/pygeoapi/provider/mvt_elastic.py
+++ b/pygeoapi/provider/mvt_elastic.py
@@ -209,6 +209,21 @@ def get_default_metadata(self, dataset, server_url, layer, tileset,
crs = schema.crs
tileMatrixSetURI = schema.tileMatrixSetURI
+ tiling_scheme_url = url_join(
+ server_url, f'/TileMatrixSets/{schema.tileMatrixSet}')
+ tiling_scheme_url_type = "application/json"
+ tiling_scheme_url_title = f'{schema.tileMatrixSet} tile matrix set definition' # noqa
+
+ tiling_scheme = LinkType(href=tiling_scheme_url,
+ el="http://www.opengis.net/def/rel/ogc/1.0/tiling-scheme", # noqa
+ type=tiling_scheme_url_type,
+ title=tiling_scheme_url_title)
+
+ if tiling_scheme is None:
+ msg = f'Could not identify a valid tiling schema' # noqa
+ LOGGER.error(msg)
+ raise ProviderConnectionError(msg)
+
content = TileSetMetadata(title=title, description=description,
keywords=keywords, crs=crs,
tileMatrixSetURI=tileMatrixSetURI)
@@ -219,6 +234,8 @@ def get_default_metadata(self, dataset, server_url, layer, tileset,
service_url_link = LinkType(href=service_url, rel="item",
type=service_url_link_type,
title=service_url_link_title)
+
+ links.append(tiling_scheme)
links.append(service_url_link)
content.links = links
diff --git a/pygeoapi/provider/mvt_tippecanoe.py b/pygeoapi/provider/mvt_tippecanoe.py
index fd8a83ebb1..3e538e5667 100644
--- a/pygeoapi/provider/mvt_tippecanoe.py
+++ b/pygeoapi/provider/mvt_tippecanoe.py
@@ -220,22 +220,43 @@ def get_default_metadata(self, dataset, server_url, layer, tileset,
# Default values
tileMatrixSetURI = tiling_schemes[0].tileMatrixSetURI
crs = tiling_schemes[0].crs
+
+ tiling_scheme = None
+
# Checking the selected matrix in configured tiling_schemes
for schema in tiling_schemes:
if (schema.tileMatrixSet == tileset):
crs = schema.crs
tileMatrixSetURI = schema.tileMatrixSetURI
+ tiling_scheme_url = url_join(
+ server_url, f'/TileMatrixSets/{schema.tileMatrixSet}')
+ tiling_scheme_url_type = "application/json"
+ tiling_scheme_url_title = f'{schema.tileMatrixSet} tile matrix set definition' # noqa
+
+ tiling_scheme = LinkType(href=tiling_scheme_url,
+ rel="http://www.opengis.net/def/rel/ogc/1.0/tiling-scheme", # noqa
+ type=tiling_scheme_url_type,
+ title=tiling_scheme_url_title)
+
+ if tiling_scheme is None:
+ msg = f'Could not identify a valid tiling schema' # noqa
+ LOGGER.error(msg)
+ raise ProviderConnectionError(msg)
+
content = TileSetMetadata(title=title, description=description,
keywords=keywords, crs=crs,
tileMatrixSetURI=tileMatrixSetURI)
links = []
+
service_url_link_type = "application/vnd.mapbox-vector-tile"
service_url_link_title = f'{tileset} vector tiles for {layer}'
service_url_link = LinkType(href=service_url, rel="item",
type=service_url_link_type,
title=service_url_link_title)
+
+ links.append(tiling_scheme)
links.append(service_url_link)
content.links = links
diff --git a/pygeoapi/starlette_app.py b/pygeoapi/starlette_app.py
index 96ba1d65d2..117aaa3582 100644
--- a/pygeoapi/starlette_app.py
+++ b/pygeoapi/starlette_app.py
@@ -164,7 +164,7 @@ async def conformance(request: Request):
:returns: Starlette HTTP Response
"""
- return await get_response(api_.conformance, request)
+ return get_response(api_.conformance(request))
async def get_tilematrix_set(request: Request, tileMatrixSetId=None):
@@ -177,9 +177,7 @@ async def get_tilematrix_set(request: Request, tileMatrixSetId=None):
if 'tileMatrixSetId' in request.path_params:
tileMatrixSetId = request.path_params['tileMatrixSetId']
- return await execute_from_starlette(
- tiles_api.tilematrixset, request, tileMatrixSetId,
- )
+ return get_response(api_.tilematrixset(request, tileMatrixSetId))
async def get_tilematrix_sets(request: Request):
@@ -188,23 +186,7 @@ async def get_tilematrix_sets(request: Request):
:returns: HTTP response
"""
- return await execute_from_starlette(tiles_api.tilematrixsets, request)
-
-
-async def collection_schema(request: Request, collection_id=None):
- """
- OGC API collections schema endpoint
-
- :param request: Starlette Request instance
- :param collection_id: collection identifier
-
- :returns: Starlette HTTP Response
- """
- if 'collection_id' in request.path_params:
- collection_id = request.path_params['collection_id']
-
- return await get_response(api_.get_collection_schema, request,
- collection_id)
+ return get_response(api_.tilematrixsets(request))
async def collection_queryables(request: Request, collection_id=None):
@@ -635,7 +617,6 @@ async def __call__(self, scope: Scope,
Route('/conformance', conformance),
Route('/TileMatrixSets/{tileMatrixSetId}', get_tilematrix_set),
Route('/TileMatrixSets', get_tilematrix_sets),
- Route('/collections/{collection_id:path}/schema', collection_schema),
Route('/collections/{collection_id:path}/queryables', collection_queryables), # noqa
Route('/collections/{collection_id:path}/tiles', get_collection_tiles),
Route('/collections/{collection_id:path}/tiles/{tileMatrixSetId}', get_collection_tiles_metadata), # noqa
diff --git a/pygeoapi/templates/landing_page.html b/pygeoapi/templates/landing_page.html
index ab67398b7f..e104d3dc32 100644
--- a/pygeoapi/templates/landing_page.html
+++ b/pygeoapi/templates/landing_page.html
@@ -88,6 +88,12 @@ {% trans %}Conformance{% endtrans %}
{% trans %}View the conformance classes of this service{% endtrans %}
+
diff --git a/pygeoapi/templates/tilematrixsets/index.html b/pygeoapi/templates/tilematrixsets/index.html
new file mode 100644
index 0000000000..f3b47fc686
--- /dev/null
+++ b/pygeoapi/templates/tilematrixsets/index.html
@@ -0,0 +1,37 @@
+{% extends "_base.html" %}
+{% block title %}{{ super() }} {% trans %}TileMatrixSets{% endtrans %} {% endblock %}
+{% block crumbs %}{{ super() }}
+/
{% trans %}TileMatrixSets{% endtrans %}
+{% endblock %}
+{% block body %}
+
+
+ {% trans %}Tile matrix sets available in this service{% endtrans %}
+
+
+
+
+
+ {% trans %}Title{% endtrans %} |
+ {% trans %}Description{% endtrans %} |
+
+
+
+ {% for p in data['tileMatrixSets'] %}
+
+
+ {{p.id}}
+ |
+
+ {{ p.title }}
+ |
+
+ {% endfor %}
+
+
+
+
+
+
+
+{% endblock %}
diff --git a/pygeoapi/templates/tilematrixsets/tilematrixset.html b/pygeoapi/templates/tilematrixsets/tilematrixset.html
new file mode 100644
index 0000000000..5aae2742a4
--- /dev/null
+++ b/pygeoapi/templates/tilematrixsets/tilematrixset.html
@@ -0,0 +1,18 @@
+{% extends "_base.html" %}
+{% block title %}{{ super() }} {% trans %}TileMatrixSet{% endtrans %} {% endblock %}
+{% block crumbs %}{{ super() }}
+/
{% trans %}TileMatrixSet{% endtrans %}
+{% endblock %}
+{% block body %}
+
+
+ {{ data['id'] }} tile matrix set
+
+ - CRS: {{ data['crs'] }}
+ - Uri: {{ data['uri'] }}
+ - Well Known Scale Set: {{ data['wellKnownScaleSet'] }}
+
+
+
+
+{% endblock %}
diff --git a/tests/test_api.py b/tests/test_api.py
new file mode 100644
index 0000000000..6732d27470
--- /dev/null
+++ b/tests/test_api.py
@@ -0,0 +1,2238 @@
+# =================================================================
+#
+# Authors: Tom Kralidis
+# John A Stevenson
+# Colin Blackburn
+#
+# Copyright (c) 2023 Tom Kralidis
+# Copyright (c) 2022 John A Stevenson and Colin Blackburn
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# =================================================================
+
+import copy
+import json
+import logging
+import time
+import gzip
+from http import HTTPStatus
+
+from pyld import jsonld
+import pytest
+import pyproj
+from shapely.geometry import Point
+
+from pygeoapi.api import (
+ API, APIRequest, FORMAT_TYPES, validate_bbox, validate_datetime,
+ validate_subset, F_HTML, F_JSON, F_JSONLD, F_GZIP, __version__
+)
+from pygeoapi.util import (yaml_load, get_crs_from_uri,
+ get_api_rules, get_base_url)
+
+from .util import (get_test_file_path, mock_request,
+ mock_flask, mock_starlette)
+
+from pygeoapi.models.provider.base import TileMatrixSetEnum
+
+LOGGER = logging.getLogger(__name__)
+
+
+@pytest.fixture()
+def config():
+ with open(get_test_file_path('pygeoapi-test-config.yml')) as fh:
+ return yaml_load(fh)
+
+
+@pytest.fixture()
+def config_with_rules() -> dict:
+ """ Returns a pygeoapi configuration with default API rules. """
+ with open(get_test_file_path('pygeoapi-test-config-apirules.yml')) as fh:
+ return yaml_load(fh)
+
+
+@pytest.fixture()
+def config_enclosure() -> dict:
+ """ Returns a pygeoapi configuration with enclosure links. """
+ with open(get_test_file_path('pygeoapi-test-config-enclosure.yml')) as fh:
+ return yaml_load(fh)
+
+
+@pytest.fixture()
+def config_hidden_resources():
+ filename = 'pygeoapi-test-config-hidden-resources.yml'
+ with open(get_test_file_path(filename)) as fh:
+ return yaml_load(fh)
+
+
+@pytest.fixture()
+def openapi():
+ with open(get_test_file_path('pygeoapi-test-openapi.yml')) as fh:
+ return yaml_load(fh)
+
+
+@pytest.fixture()
+def api_(config, openapi):
+ return API(config, openapi)
+
+
+@pytest.fixture()
+def enclosure_api(config_enclosure, openapi):
+ """ Returns an API instance with a collection with enclosure links. """
+ return API(config_enclosure, openapi)
+
+
+@pytest.fixture()
+def rules_api(config_with_rules, openapi):
+ """ Returns an API instance with URL prefix and strict slashes policy.
+ The API version is extracted from the current version here.
+ """
+ return API(config_with_rules, openapi)
+
+
+@pytest.fixture()
+def api_hidden_resources(config_hidden_resources, openapi):
+ return API(config_hidden_resources, openapi)
+
+
+def test_apirequest(api_):
+ # Test without (valid) locales
+ with pytest.raises(ValueError):
+ req = mock_request()
+ APIRequest(req, [])
+ APIRequest(req, None)
+ APIRequest(req, ['zz'])
+
+ # Test all supported formats from query args
+ for f, mt in FORMAT_TYPES.items():
+ req = mock_request({'f': f})
+ apireq = APIRequest(req, api_.locales)
+ assert apireq.is_valid()
+ assert apireq.format == f
+ assert apireq.get_response_headers()['Content-Type'] == mt
+
+ # Test all supported formats from Accept header
+ for f, mt in FORMAT_TYPES.items():
+ req = mock_request(HTTP_ACCEPT=mt)
+ apireq = APIRequest(req, api_.locales)
+ assert apireq.is_valid()
+ assert apireq.format == f
+ assert apireq.get_response_headers()['Content-Type'] == mt
+
+ # Test nonsense format
+ req = mock_request({'f': 'foo'})
+ apireq = APIRequest(req, api_.locales)
+ assert not apireq.is_valid()
+ assert apireq.format == 'foo'
+ assert apireq.is_valid(('foo',))
+ assert apireq.get_response_headers()['Content-Type'] == \
+ FORMAT_TYPES[F_JSON]
+
+ # Test without format
+ req = mock_request()
+ apireq = APIRequest(req, api_.locales)
+ assert apireq.is_valid()
+ assert apireq.format is None
+ assert apireq.get_response_headers()['Content-Type'] == \
+ FORMAT_TYPES[F_JSON]
+ assert apireq.get_linkrel(F_JSON) == 'self'
+ assert apireq.get_linkrel(F_HTML) == 'alternate'
+
+ # Test complex format string
+ hh = 'text/html,application/xhtml+xml,application/xml;q=0.9,'
+ req = mock_request(HTTP_ACCEPT=hh)
+ apireq = APIRequest(req, api_.locales)
+ assert apireq.is_valid()
+ assert apireq.format == F_HTML
+ assert apireq.get_response_headers()['Content-Type'] == \
+ FORMAT_TYPES[F_HTML]
+ assert apireq.get_linkrel(F_HTML) == 'self'
+ assert apireq.get_linkrel(F_JSON) == 'alternate'
+
+ # Test accept header with multiple valid formats
+ hh = 'plain/text,application/ld+json,application/json;q=0.9,'
+ req = mock_request(HTTP_ACCEPT=hh)
+ apireq = APIRequest(req, api_.locales)
+ assert apireq.is_valid()
+ assert apireq.format == F_JSONLD
+ assert apireq.get_response_headers()['Content-Type'] == \
+ FORMAT_TYPES[F_JSONLD]
+ assert apireq.get_linkrel(F_JSONLD) == 'self'
+ assert apireq.get_linkrel(F_HTML) == 'alternate'
+
+ # Overrule HTTP content negotiation
+ req = mock_request({'f': 'html'}, HTTP_ACCEPT='application/json') # noqa
+ apireq = APIRequest(req, api_.locales)
+ assert apireq.is_valid()
+ assert apireq.format == F_HTML
+ assert apireq.get_response_headers()['Content-Type'] == \
+ FORMAT_TYPES[F_HTML]
+
+ # Test data
+ for d in (None, '', 'test', {'key': 'value'}):
+ req = mock_request(data=d)
+ apireq = APIRequest.with_data(req, api_.locales)
+ if not d:
+ assert apireq.data == b''
+ elif isinstance(d, dict):
+ assert d == json.loads(apireq.data)
+ else:
+ assert apireq.data == d.encode()
+
+ # Test multilingual
+ test_lang = {
+ 'nl': ('en', 'en-US'), # unsupported lang should return default
+ 'en-US': ('en', 'en-US'),
+ 'de_CH': ('en', 'en-US'),
+ 'fr-CH, fr;q=0.9, en;q=0.8': ('fr', 'fr-CA'),
+ 'fr-CH, fr-BE;q=0.9': ('fr', 'fr-CA'),
+ }
+ sup_lang = ('en-US', 'fr_CA')
+ for lang_in, (lang_out, cl_out) in test_lang.items():
+ # Using l query parameter
+ req = mock_request({'lang': lang_in})
+ apireq = APIRequest(req, sup_lang)
+ assert apireq.raw_locale == lang_in
+ assert apireq.locale.language == lang_out
+ assert apireq.get_response_headers()['Content-Language'] == cl_out
+
+ # Using Accept-Language header
+ req = mock_request(HTTP_ACCEPT_LANGUAGE=lang_in)
+ apireq = APIRequest(req, sup_lang)
+ assert apireq.raw_locale == lang_in
+ assert apireq.locale.language == lang_out
+ assert apireq.get_response_headers()['Content-Language'] == cl_out
+
+ # Test language override
+ req = mock_request({'lang': 'fr'}, HTTP_ACCEPT_LANGUAGE='en_US')
+ apireq = APIRequest(req, sup_lang)
+ assert apireq.raw_locale == 'fr'
+ assert apireq.locale.language == 'fr'
+ assert apireq.get_response_headers()['Content-Language'] == 'fr-CA'
+
+ # Test locale territory
+ req = mock_request({'lang': 'en-GB'})
+ apireq = APIRequest(req, sup_lang)
+ assert apireq.raw_locale == 'en-GB'
+ assert apireq.locale.language == 'en'
+ assert apireq.locale.territory == 'US'
+ assert apireq.get_response_headers()['Content-Language'] == 'en-US'
+
+ # Test without Accept-Language header or 'lang' query parameter
+ # (should return default language from YAML config)
+ req = mock_request()
+ apireq = APIRequest(req, api_.locales)
+ assert apireq.raw_locale is None
+ assert apireq.locale.language == api_.default_locale.language
+ assert apireq.get_response_headers()['Content-Language'] == 'en-US'
+
+ # Test without Accept-Language header or 'lang' query param
+ # (should return first in custom list of languages)
+ sup_lang = ('de', 'fr', 'en')
+ apireq = APIRequest(req, sup_lang)
+ assert apireq.raw_locale is None
+ assert apireq.locale.language == 'de'
+ assert apireq.get_response_headers()['Content-Language'] == 'de'
+
+
+def test_apirules_active(config_with_rules, rules_api):
+ assert rules_api.config == config_with_rules
+ rules = get_api_rules(config_with_rules)
+ base_url = get_base_url(config_with_rules)
+
+ # Test Flask
+ flask_prefix = rules.get_url_prefix('flask')
+ with mock_flask('pygeoapi-test-config-apirules.yml') as flask_client:
+ # Test happy path
+ response = flask_client.get(f'{flask_prefix}/conformance')
+ assert response.status_code == 200
+ assert response.headers['X-API-Version'] == __version__
+ assert response.request.url == \
+ flask_client.application.url_for('pygeoapi.conformance')
+ response = flask_client.get(f'{flask_prefix}/static/img/pygeoapi.png')
+ assert response.status_code == 200
+ # Test that static resources also work without URL prefix
+ response = flask_client.get('/static/img/pygeoapi.png')
+ assert response.status_code == 200
+
+ # Test strict slashes
+ response = flask_client.get(f'{flask_prefix}/conformance/')
+ assert response.status_code == 404
+ # For the landing page ONLY, trailing slashes are actually preferred.
+ # See https://docs.opengeospatial.org/is/17-069r4/17-069r4.html#_api_landing_page # noqa
+ # Omitting the trailing slash should lead to a redirect.
+ response = flask_client.get(f'{flask_prefix}/')
+ assert response.status_code == 200
+ response = flask_client.get(flask_prefix)
+ assert response.status_code in (307, 308)
+
+ # Test links on landing page for correct URLs
+ response = flask_client.get(flask_prefix, follow_redirects=True)
+ assert response.status_code == 200
+ assert response.is_json
+ links = response.json['links']
+ assert all(
+ href.startswith(base_url) for href in (rel['href'] for rel in links) # noqa
+ )
+
+ # Test Starlette
+ starlette_prefix = rules.get_url_prefix('starlette')
+ with mock_starlette('pygeoapi-test-config-apirules.yml') as starlette_client: # noqa
+ # Test happy path
+ response = starlette_client.get(f'{starlette_prefix}/conformance')
+ assert response.status_code == 200
+ assert response.headers['X-API-Version'] == __version__
+ response = starlette_client.get(f'{starlette_prefix}/static/img/pygeoapi.png') # noqa
+ assert response.status_code == 200
+ # Test that static resources also work without URL prefix
+ response = starlette_client.get('/static/img/pygeoapi.png')
+ assert response.status_code == 200
+
+ # Test strict slashes
+ response = starlette_client.get(f'{starlette_prefix}/conformance/')
+ assert response.status_code == 404
+ # For the landing page ONLY, trailing slashes are actually preferred.
+ # See https://docs.opengeospatial.org/is/17-069r4/17-069r4.html#_api_landing_page # noqa
+ # Omitting the trailing slash should lead to a redirect.
+ response = starlette_client.get(f'{starlette_prefix}/')
+ assert response.status_code == 200
+ response = starlette_client.get(starlette_prefix)
+ assert response.status_code in (307, 308)
+
+ # Test links on landing page for correct URLs
+ response = starlette_client.get(starlette_prefix, follow_redirects=True) # noqa
+ assert response.status_code == 200
+ links = response.json()['links']
+ assert all(
+ href.startswith(base_url) for href in (rel['href'] for rel in links) # noqa
+ )
+
+
+def test_apirules_inactive(config, api_):
+ assert api_.config == config
+ rules = get_api_rules(config)
+
+ # Test Flask
+ flask_prefix = rules.get_url_prefix('flask')
+ assert flask_prefix == ''
+ with mock_flask('pygeoapi-test-config.yml') as flask_client:
+ response = flask_client.get('')
+ assert response.status_code == 200
+ response = flask_client.get('/conformance')
+ assert response.status_code == 200
+ assert 'X-API-Version' not in response.headers
+ assert response.request.url == \
+ flask_client.application.url_for('pygeoapi.conformance')
+ response = flask_client.get('/static/img/pygeoapi.png')
+ assert response.status_code == 200
+
+ # Test trailing slashes
+ response = flask_client.get('/')
+ assert response.status_code == 200
+ response = flask_client.get('/conformance/')
+ assert response.status_code == 200
+ assert 'X-API-Version' not in response.headers
+
+ # Test Starlette
+ starlette_prefix = rules.get_url_prefix('starlette')
+ assert starlette_prefix == ''
+ with mock_starlette('pygeoapi-test-config.yml') as starlette_client:
+ response = starlette_client.get('')
+ assert response.status_code == 200
+ response = starlette_client.get('/conformance')
+ assert response.status_code == 200
+ assert 'X-API-Version' not in response.headers
+ assert str(response.url) == f"{starlette_client.base_url}/conformance"
+ response = starlette_client.get('/static/img/pygeoapi.png')
+ assert response.status_code == 200
+
+ # Test trailing slashes
+ response = starlette_client.get('/')
+ assert response.status_code == 200
+ response = starlette_client.get('/conformance/', follow_redirects=True)
+ assert response.status_code == 200
+ assert 'X-API-Version' not in response.headers
+
+
+def test_api(config, api_, openapi):
+ assert api_.config == config
+ assert isinstance(api_.config, dict)
+
+ req = mock_request(HTTP_ACCEPT='application/json')
+ rsp_headers, code, response = api_.openapi_(req)
+ assert rsp_headers['Content-Type'] == 'application/vnd.oai.openapi+json;version=3.0' # noqa
+ # No language requested: should be set to default from YAML
+ assert rsp_headers['Content-Language'] == 'en-US'
+ root = json.loads(response)
+ assert isinstance(root, dict)
+
+ a = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
+ req = mock_request(HTTP_ACCEPT=a)
+ rsp_headers, code, response = api_.openapi_(req)
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML] == \
+ FORMAT_TYPES[F_HTML]
+
+ assert 'Swagger UI' in response
+
+ a = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
+ req = mock_request({'ui': 'redoc'}, HTTP_ACCEPT=a)
+ rsp_headers, code, response = api_.openapi_(req)
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML] == \
+ FORMAT_TYPES[F_HTML]
+
+ assert 'ReDoc' in response
+
+ req = mock_request({'f': 'foo'})
+ rsp_headers, code, response = api_.openapi_(req)
+ assert rsp_headers['Content-Language'] == 'en-US'
+ assert code == HTTPStatus.BAD_REQUEST
+
+ assert api_.get_collections_url() == 'http://localhost:5000/collections'
+
+
+def test_api_exception(config, api_):
+ req = mock_request({'f': 'foo'})
+ rsp_headers, code, response = api_.landing_page(req)
+ assert rsp_headers['Content-Language'] == 'en-US'
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # When a language is set, the exception should still be English
+ req = mock_request({'f': 'foo', 'lang': 'fr'})
+ rsp_headers, code, response = api_.landing_page(req)
+ assert rsp_headers['Content-Language'] == 'en-US'
+ assert code == HTTPStatus.BAD_REQUEST
+
+
+def test_gzip(config, api_):
+ # Requests for each response type and gzip encoding
+ req_gzip_json = mock_request(HTTP_ACCEPT=FORMAT_TYPES[F_JSON],
+ HTTP_ACCEPT_ENCODING=F_GZIP)
+ req_gzip_jsonld = mock_request(HTTP_ACCEPT=FORMAT_TYPES[F_JSONLD],
+ HTTP_ACCEPT_ENCODING=F_GZIP)
+ req_gzip_html = mock_request(HTTP_ACCEPT=FORMAT_TYPES[F_HTML],
+ HTTP_ACCEPT_ENCODING=F_GZIP)
+ req_gzip_gzip = mock_request(HTTP_ACCEPT='application/gzip',
+ HTTP_ACCEPT_ENCODING=F_GZIP)
+
+ # Responses from server config without gzip compression
+ rsp_headers, _, rsp_json = api_.landing_page(req_gzip_json)
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSON]
+ rsp_headers, _, rsp_jsonld = api_.landing_page(req_gzip_jsonld)
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSONLD]
+ rsp_headers, _, rsp_html = api_.landing_page(req_gzip_html)
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+ rsp_headers, _, _ = api_.landing_page(req_gzip_gzip)
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSON]
+
+ # Add gzip to server and use utf-16 encoding
+ config['server']['gzip'] = True
+ enc_16 = 'utf-16'
+ config['server']['encoding'] = enc_16
+ api_ = API(config, openapi)
+
+ # Responses from server with gzip compression
+ rsp_json_headers, _, rsp_gzip_json = api_.landing_page(req_gzip_json)
+ rsp_jsonld_headers, _, rsp_gzip_jsonld = api_.landing_page(req_gzip_jsonld)
+ rsp_html_headers, _, rsp_gzip_html = api_.landing_page(req_gzip_html)
+ rsp_gzip_headers, _, rsp_gzip_gzip = api_.landing_page(req_gzip_gzip)
+
+ # Validate compressed json response
+ assert rsp_json_headers['Content-Type'] == \
+ f'{FORMAT_TYPES[F_JSON]}; charset={enc_16}'
+ assert rsp_json_headers['Content-Encoding'] == F_GZIP
+
+ parsed_gzip_json = gzip.decompress(rsp_gzip_json).decode(enc_16)
+ assert isinstance(parsed_gzip_json, str)
+ parsed_gzip_json = json.loads(parsed_gzip_json)
+ assert isinstance(parsed_gzip_json, dict)
+ assert parsed_gzip_json == json.loads(rsp_json)
+
+ # Validate compressed jsonld response
+ assert rsp_jsonld_headers['Content-Type'] == \
+ f'{FORMAT_TYPES[F_JSONLD]}; charset={enc_16}'
+ assert rsp_jsonld_headers['Content-Encoding'] == F_GZIP
+
+ parsed_gzip_jsonld = gzip.decompress(rsp_gzip_jsonld).decode(enc_16)
+ assert isinstance(parsed_gzip_jsonld, str)
+ parsed_gzip_jsonld = json.loads(parsed_gzip_jsonld)
+ assert isinstance(parsed_gzip_jsonld, dict)
+ assert parsed_gzip_jsonld == json.loads(rsp_jsonld)
+
+ # Validate compressed html response
+ assert rsp_html_headers['Content-Type'] == \
+ f'{FORMAT_TYPES[F_HTML]}; charset={enc_16}'
+ assert rsp_html_headers['Content-Encoding'] == F_GZIP
+
+ parsed_gzip_html = gzip.decompress(rsp_gzip_html).decode(enc_16)
+ assert isinstance(parsed_gzip_html, str)
+ assert parsed_gzip_html == rsp_html
+
+ # Validate compressed gzip response
+ assert rsp_gzip_headers['Content-Type'] == \
+ f'{FORMAT_TYPES[F_GZIP]}; charset={enc_16}'
+ assert rsp_gzip_headers['Content-Encoding'] == F_GZIP
+
+ parsed_gzip_gzip = gzip.decompress(rsp_gzip_gzip).decode(enc_16)
+ assert isinstance(parsed_gzip_gzip, str)
+ parsed_gzip_gzip = json.loads(parsed_gzip_gzip)
+ assert isinstance(parsed_gzip_gzip, dict)
+
+ # Requests without content encoding header
+ req_json = mock_request(HTTP_ACCEPT=FORMAT_TYPES[F_JSON])
+ req_jsonld = mock_request(HTTP_ACCEPT=FORMAT_TYPES[F_JSONLD])
+ req_html = mock_request(HTTP_ACCEPT=FORMAT_TYPES[F_HTML])
+
+ # Responses without content encoding
+ _, _, rsp_json_ = api_.landing_page(req_json)
+ _, _, rsp_jsonld_ = api_.landing_page(req_jsonld)
+ _, _, rsp_html_ = api_.landing_page(req_html)
+
+ # Confirm each request is the same when decompressed
+ assert rsp_json_ == rsp_json == \
+ gzip.decompress(rsp_gzip_json).decode(enc_16)
+
+ assert rsp_jsonld_ == rsp_jsonld == \
+ gzip.decompress(rsp_gzip_jsonld).decode(enc_16)
+
+ assert rsp_html_ == rsp_html == \
+ gzip.decompress(rsp_gzip_html).decode(enc_16)
+
+
+def test_gzip_csv(config, api_):
+ req_csv = mock_request({'f': 'csv'})
+ rsp_csv_headers, _, rsp_csv = api_.get_collection_items(req_csv, 'obs')
+ assert rsp_csv_headers['Content-Type'] == 'text/csv; charset=utf-8'
+ rsp_csv = rsp_csv.decode('utf-8')
+
+ req_csv = mock_request({'f': 'csv'}, HTTP_ACCEPT_ENCODING=F_GZIP)
+ rsp_csv_headers, _, rsp_csv_gzip = api_.get_collection_items(req_csv, 'obs') # noqa
+ assert rsp_csv_headers['Content-Type'] == 'text/csv; charset=utf-8'
+ rsp_csv_ = gzip.decompress(rsp_csv_gzip).decode('utf-8')
+ assert rsp_csv == rsp_csv_
+
+ # Use utf-16 encoding
+ config['server']['encoding'] = 'utf-16'
+ api_ = API(config, openapi)
+
+ req_csv = mock_request({'f': 'csv'}, HTTP_ACCEPT_ENCODING=F_GZIP)
+ rsp_csv_headers, _, rsp_csv_gzip = api_.get_collection_items(req_csv, 'obs') # noqa
+ assert rsp_csv_headers['Content-Type'] == 'text/csv; charset=utf-8'
+ rsp_csv_ = gzip.decompress(rsp_csv_gzip).decode('utf-8')
+ assert rsp_csv == rsp_csv_
+
+
+def test_root(config, api_):
+ req = mock_request()
+ rsp_headers, code, response = api_.landing_page(req)
+ root = json.loads(response)
+
+ assert rsp_headers['Content-Type'] == 'application/json' == \
+ FORMAT_TYPES[F_JSON]
+ assert rsp_headers['X-Powered-By'].startswith('pygeoapi')
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+ assert isinstance(root, dict)
+ assert 'links' in root
+ assert root['links'][0]['rel'] == 'self'
+ assert root['links'][0]['type'] == FORMAT_TYPES[F_JSON]
+ assert root['links'][0]['href'].endswith('?f=json')
+ assert any(link['href'].endswith('f=jsonld') and link['rel'] == 'alternate'
+ for link in root['links'])
+ assert any(link['href'].endswith('f=html') and link['rel'] == 'alternate'
+ for link in root['links'])
+ assert len(root['links']) == 11
+ assert 'title' in root
+ assert root['title'] == 'pygeoapi default instance'
+ assert 'description' in root
+ assert root['description'] == 'pygeoapi provides an API to geospatial data'
+
+ req = mock_request({'f': 'html'})
+ rsp_headers, code, response = api_.landing_page(req)
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+
+def test_root_structured_data(config, api_):
+ req = mock_request({"f": "jsonld"})
+ rsp_headers, code, response = api_.landing_page(req)
+ root = json.loads(response)
+
+ assert rsp_headers['Content-Type'] == 'application/ld+json' == \
+ FORMAT_TYPES[F_JSONLD]
+ assert rsp_headers['Content-Language'] == 'en-US'
+ assert rsp_headers['X-Powered-By'].startswith('pygeoapi')
+
+ assert isinstance(root, dict)
+ assert 'description' in root
+ assert root['description'] == 'pygeoapi provides an API to geospatial data'
+
+ assert '@context' in root
+ assert root['@context'] == 'https://schema.org/docs/jsonldcontext.jsonld'
+ expanded = jsonld.expand(root)[0]
+ assert '@type' in expanded
+ assert 'http://schema.org/DataCatalog' in expanded['@type']
+ assert 'http://schema.org/description' in expanded
+ assert root['description'] == expanded['http://schema.org/description'][0][
+ '@value']
+ assert 'http://schema.org/keywords' in expanded
+ assert len(expanded['http://schema.org/keywords']) == 3
+ assert '@value' in expanded['http://schema.org/keywords'][0].keys()
+ assert 'http://schema.org/provider' in expanded
+ assert expanded['http://schema.org/provider'][0]['@type'][
+ 0] == 'http://schema.org/Organization'
+ assert expanded['http://schema.org/name'][0]['@value'] == root['name']
+
+
+def test_conformance(config, api_):
+ req = mock_request()
+ rsp_headers, code, response = api_.conformance(req)
+ root = json.loads(response)
+
+ assert isinstance(root, dict)
+ assert 'conformsTo' in root
+ assert len(root['conformsTo']) == 34
+ assert 'http://www.opengis.net/spec/ogcapi-features-2/1.0/conf/crs' \
+ in root['conformsTo']
+
+ req = mock_request({'f': 'foo'})
+ rsp_headers, code, response = api_.conformance(req)
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'f': 'html'})
+ rsp_headers, code, response = api_.conformance(req)
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+ # No language requested: should be set to default from YAML
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+
+def test_tilematrixsets(config, api_):
+ req = mock_request()
+ rsp_headers, code, response = api_.tilematrixsets(req)
+ root = json.loads(response)
+
+ assert isinstance(root, dict)
+ assert 'tileMatrixSets' in root
+ assert len(root['tileMatrixSets']) == 2
+ assert 'http://www.opengis.net/def/tilematrixset/OGC/1.0/WorldCRS84Quad' \
+ in root['tileMatrixSets'][0]['uri']
+ assert 'http://www.opengis.net/def/tilematrixset/OGC/1.0/WebMercatorQuad' \
+ in root['tileMatrixSets'][1]['uri']
+
+ req = mock_request({'f': 'foo'})
+ rsp_headers, code, response = api_.tilematrixsets(req)
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'f': 'html'})
+ rsp_headers, code, response = api_.tilematrixsets(req)
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+ # No language requested: should be set to default from YAML
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+
+def test_tilematrixset(config, api_):
+ req = mock_request()
+
+ enums = [e.value for e in TileMatrixSetEnum]
+ enum = None
+
+ for e in enums:
+ enum = e.tileMatrixSet
+ rsp_headers, code, response = api_.tilematrixset(req, enum)
+ root = json.loads(response)
+
+ assert isinstance(root, dict)
+ assert 'id' in root
+ assert root['id'] == enum
+ assert 'tileMatrices' in root
+ assert len(root['tileMatrices']) == 30
+
+ rsp_headers, code, response = api_.tilematrixset(req, 'foo')
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'f': 'html'})
+ rsp_headers, code, response = api_.tilematrixset(req, enum)
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+ # No language requested: should be set to default from YAML
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+
+def test_describe_collections(config, api_):
+ req = mock_request({"f": "foo"})
+ rsp_headers, code, response = api_.describe_collections(req)
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({"f": "html"})
+ rsp_headers, code, response = api_.describe_collections(req)
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+
+ req = mock_request()
+ rsp_headers, code, response = api_.describe_collections(req)
+ collections = json.loads(response)
+
+ assert len(collections) == 2
+ assert len(collections['collections']) == 9
+ assert len(collections['links']) == 3
+
+ rsp_headers, code, response = api_.describe_collections(req, 'foo')
+ collection = json.loads(response)
+ assert code == HTTPStatus.NOT_FOUND
+
+ rsp_headers, code, response = api_.describe_collections(req, 'obs')
+ collection = json.loads(response)
+
+ assert rsp_headers['Content-Language'] == 'en-US'
+ assert collection['id'] == 'obs'
+ assert collection['title'] == 'Observations'
+ assert collection['description'] == 'My cool observations'
+ assert len(collection['links']) == 12
+ assert collection['extent'] == {
+ 'spatial': {
+ 'bbox': [[-180, -90, 180, 90]],
+ 'crs': 'http://www.opengis.net/def/crs/OGC/1.3/CRS84'
+ },
+ 'temporal': {
+ 'interval': [
+ ['2000-10-30T18:24:39+00:00', '2007-10-30T08:57:29+00:00']
+ ],
+ 'trs': 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian'
+ }
+ }
+
+ # OAPIF Part 2 CRS 6.2.1 A, B, configured CRS + defaults
+ assert collection['crs'] is not None
+ crs_set = [
+ 'http://www.opengis.net/def/crs/EPSG/0/28992',
+ 'http://www.opengis.net/def/crs/OGC/1.3/CRS84',
+ 'http://www.opengis.net/def/crs/EPSG/0/4326',
+ ]
+ for crs in crs_set:
+ assert crs in collection['crs']
+ assert collection['storageCRS'] is not None
+ assert collection['storageCRS'] == 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' # noqa
+ assert 'storageCrsCoordinateEpoch' not in collection
+
+ # French language request
+ req = mock_request({'lang': 'fr'})
+ rsp_headers, code, response = api_.describe_collections(req, 'obs')
+ collection = json.loads(response)
+
+ assert rsp_headers['Content-Language'] == 'fr-CA'
+ assert collection['title'] == 'Observations'
+ assert collection['description'] == 'Mes belles observations'
+
+ # Check HTML request in an unsupported language
+ req = mock_request({'f': 'html', 'lang': 'de'})
+ rsp_headers, code, response = api_.describe_collections(req, 'obs')
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+ req = mock_request()
+ rsp_headers, code, response = api_.describe_collections(req,
+ 'gdps-temperature')
+ collection = json.loads(response)
+
+ assert collection['id'] == 'gdps-temperature'
+ assert len(collection['links']) == 14
+
+ # hiearchical collections
+ rsp_headers, code, response = api_.describe_collections(
+ req, 'naturalearth/lakes')
+ collection = json.loads(response)
+ assert collection['id'] == 'naturalearth/lakes'
+
+ # OAPIF Part 2 CRS 6.2.1 B, defaults when not configured
+ assert collection['crs'] is not None
+ default_crs_list = [
+ 'http://www.opengis.net/def/crs/OGC/1.3/CRS84',
+ 'http://www.opengis.net/def/crs/OGC/1.3/CRS84h',
+ ]
+ contains_default = False
+ for crs in default_crs_list:
+ if crs in default_crs_list:
+ contains_default = True
+ assert contains_default
+ assert collection['storageCRS'] is not None
+ assert collection['storageCRS'] == 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' # noqa
+ assert collection['storageCrsCoordinateEpoch'] == 2017.23
+
+
+def test_describe_collections_hidden_resources(
+ config_hidden_resources, api_hidden_resources):
+ req = mock_request({})
+ rsp_headers, code, response = api_hidden_resources.describe_collections(req) # noqa
+ assert code == HTTPStatus.OK
+
+ assert len(config_hidden_resources['resources']) == 3
+
+ collections = json.loads(response)
+ assert len(collections['collections']) == 1
+
+
+def test_get_collection_queryables(config, api_):
+ req = mock_request()
+ rsp_headers, code, response = api_.get_collection_queryables(req,
+ 'notfound')
+ assert code == HTTPStatus.NOT_FOUND
+
+ req = mock_request({'f': 'html'})
+ rsp_headers, code, response = api_.get_collection_queryables(req, 'obs')
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+
+ req = mock_request({'f': 'json'})
+ rsp_headers, code, response = api_.get_collection_queryables(req, 'obs')
+ assert rsp_headers['Content-Type'] == 'application/schema+json'
+ queryables = json.loads(response)
+
+ assert 'properties' in queryables
+ assert len(queryables['properties']) == 5
+
+ # test with provider filtered properties
+ api_.config['resources']['obs']['providers'][0]['properties'] = ['stn_id']
+
+ rsp_headers, code, response = api_.get_collection_queryables(req, 'obs')
+ queryables = json.loads(response)
+
+ assert 'properties' in queryables
+ assert len(queryables['properties']) == 2
+ assert 'geometry' in queryables['properties']
+ assert queryables['properties']['geometry']['$ref'] == 'https://geojson.org/schema/Geometry.json' # noqa
+
+ # No language requested: should be set to default from YAML
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+
+def test_describe_collections_json_ld(config, api_):
+ req = mock_request({'f': 'jsonld'})
+ rsp_headers, code, response = api_.describe_collections(req, 'obs')
+ collection = json.loads(response)
+
+ assert '@context' in collection
+ expanded = jsonld.expand(collection)[0]
+ # Metadata is about a schema:DataCollection that contains a schema:Dataset
+ assert not expanded['@id'].endswith('obs')
+ assert 'http://schema.org/dataset' in expanded
+ assert len(expanded['http://schema.org/dataset']) == 1
+ dataset = expanded['http://schema.org/dataset'][0]
+ assert dataset['@type'][0] == 'http://schema.org/Dataset'
+ assert len(dataset['http://schema.org/distribution']) == 12
+ assert all(dist['@type'][0] == 'http://schema.org/DataDownload'
+ for dist in dataset['http://schema.org/distribution'])
+
+ assert 'http://schema.org/Organization' in expanded[
+ 'http://schema.org/provider'][0]['@type']
+
+ assert 'http://schema.org/Place' in dataset[
+ 'http://schema.org/spatial'][0]['@type']
+ assert 'http://schema.org/GeoShape' in dataset[
+ 'http://schema.org/spatial'][0]['http://schema.org/geo'][0]['@type']
+ assert dataset['http://schema.org/spatial'][0]['http://schema.org/geo'][
+ 0]['http://schema.org/box'][0]['@value'] == '-180,-90 180,90'
+
+ assert 'http://schema.org/temporalCoverage' in dataset
+ assert dataset['http://schema.org/temporalCoverage'][0][
+ '@value'] == '2000-10-30T18:24:39+00:00/2007-10-30T08:57:29+00:00'
+
+ # No language requested: should be set to default from YAML
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+
+def test_get_collection_items(config, api_):
+ req = mock_request()
+ rsp_headers, code, response = api_.get_collection_items(req, 'foo')
+ features = json.loads(response)
+ assert code == HTTPStatus.NOT_FOUND
+
+ req = mock_request({'f': 'foo'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'bbox': '1,2,3'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'bbox': '1,2,3,4c'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'bbox': '1,2,3,4', 'bbox-crs': 'bad_value'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'bbox-crs': 'bad_value'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # bbox-crs must be in configured values for Collection
+ req = mock_request({'bbox': '1,2,3,4', 'bbox-crs': 'http://www.opengis.net/def/crs/EPSG/0/4258'}) # noqa
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # bbox-crs must be in configured values for Collection (CSV will ignore)
+ req = mock_request({'bbox': '52,4,53,5', 'bbox-crs': 'http://www.opengis.net/def/crs/EPSG/0/4326'}) # noqa
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.OK
+
+ # bbox-crs can be a default even if not configured
+ req = mock_request({'bbox': '4,52,5,53', 'bbox-crs': 'http://www.opengis.net/def/crs/OGC/1.3/CRS84'}) # noqa
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.OK
+
+ # bbox-crs can be a default even if not configured
+ req = mock_request({'bbox': '4,52,5,53'}) # noqa
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.OK
+
+ req = mock_request({'f': 'html', 'lang': 'fr'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+ assert rsp_headers['Content-Language'] == 'fr-CA'
+
+ req = mock_request()
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+ # No language requested: should be set to default from YAML
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+ assert len(features['features']) == 5
+
+ req = mock_request({'resulttype': 'hits'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+
+ assert len(features['features']) == 0
+
+ # Invalid limit
+ req = mock_request({'limit': 0})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'stn_id': '35'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+
+ assert len(features['features']) == 2
+ assert features['numberMatched'] == 2
+
+ req = mock_request({'stn_id': '35', 'value': '93.9'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+
+ assert len(features['features']) == 1
+ assert features['numberMatched'] == 1
+
+ req = mock_request({'limit': 2})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+
+ assert len(features['features']) == 2
+ assert features['features'][1]['properties']['stn_id'] == 35
+
+ links = features['links']
+ assert len(links) == 4
+ assert '/collections/obs/items?f=json' in links[0]['href']
+ assert links[0]['rel'] == 'self'
+ assert '/collections/obs/items?f=jsonld' in links[1]['href']
+ assert links[1]['rel'] == 'alternate'
+ assert '/collections/obs/items?f=html' in links[2]['href']
+ assert links[2]['rel'] == 'alternate'
+ assert '/collections/obs' in links[3]['href']
+ assert links[3]['rel'] == 'collection'
+
+ # Invalid offset
+ req = mock_request({'offset': -1})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'offset': 2})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+
+ assert len(features['features']) == 3
+ assert features['features'][1]['properties']['stn_id'] == 2147
+
+ links = features['links']
+ assert len(links) == 5
+ assert '/collections/obs/items?f=json' in links[0]['href']
+ assert links[0]['rel'] == 'self'
+ assert '/collections/obs/items?f=jsonld' in links[1]['href']
+ assert links[1]['rel'] == 'alternate'
+ assert '/collections/obs/items?f=html' in links[2]['href']
+ assert links[2]['rel'] == 'alternate'
+ assert '/collections/obs/items?offset=0' in links[3]['href']
+ assert links[3]['rel'] == 'prev'
+ assert '/collections/obs' in links[4]['href']
+ assert links[4]['rel'] == 'collection'
+
+ req = mock_request({
+ 'offset': 1,
+ 'limit': 1,
+ 'bbox': '-180,90,180,90'
+ })
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+
+ assert len(features['features']) == 1
+
+ links = features['links']
+ assert len(links) == 5
+ assert '/collections/obs/items?f=json&limit=1&bbox=-180,90,180,90' in \
+ links[0]['href']
+ assert links[0]['rel'] == 'self'
+ assert '/collections/obs/items?f=jsonld&limit=1&bbox=-180,90,180,90' in \
+ links[1]['href']
+ assert links[1]['rel'] == 'alternate'
+ assert '/collections/obs/items?f=html&limit=1&bbox=-180,90,180,90' in \
+ links[2]['href']
+ assert links[2]['rel'] == 'alternate'
+ assert '/collections/obs/items?offset=0&limit=1&bbox=-180,90,180,90' \
+ in links[3]['href']
+ assert links[3]['rel'] == 'prev'
+ assert '/collections/obs' in links[4]['href']
+ assert links[4]['rel'] == 'collection'
+
+ req = mock_request({
+ 'sortby': 'bad-property',
+ 'stn_id': '35'
+ })
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'sortby': 'stn_id'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+ assert code == HTTPStatus.OK
+
+ req = mock_request({'sortby': '+stn_id'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+ assert code == HTTPStatus.OK
+
+ req = mock_request({'sortby': '-stn_id'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+ features = json.loads(response)
+ assert code == HTTPStatus.OK
+
+ req = mock_request({'f': 'csv'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert rsp_headers['Content-Type'] == 'text/csv; charset=utf-8'
+
+ req = mock_request({'datetime': '2003'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.OK
+
+ req = mock_request({'datetime': '1999'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'datetime': '2010-04-22'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'datetime': '2001-11-11/2003-12-18'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.OK
+
+ req = mock_request({'datetime': '../2003-12-18'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.OK
+
+ req = mock_request({'datetime': '2001-11-11/..'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.OK
+
+ req = mock_request({'datetime': '1999/2005-04-22'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.OK
+
+ req = mock_request({'datetime': '1999/2000-04-22'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ api_.config['resources']['obs']['extents'].pop('temporal')
+
+ req = mock_request({'datetime': '2002/2014-04-22'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.OK
+
+ req = mock_request({'scalerank': 1})
+ rsp_headers, code, response = api_.get_collection_items(
+ req, 'naturalearth/lakes')
+ features = json.loads(response)
+
+ assert len(features['features']) == 10
+ assert features['numberMatched'] == 11
+ assert features['numberReturned'] == 10
+
+ req = mock_request({'datetime': '2005-04-22'})
+ rsp_headers, code, response = api_.get_collection_items(
+ req, 'naturalearth/lakes')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'skipGeometry': 'true'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert json.loads(response)['features'][0]['geometry'] is None
+
+ req = mock_request({'properties': 'foo,bar'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+
+def test_get_collection_items_crs(config, api_):
+
+ # Invalid CRS query parameter
+ req = mock_request({'crs': '4326'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'norway_pop')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # Unsupported CRS
+ req = mock_request({'crs': 'http://www.opengis.net/def/crs/EPSG/0/32633'})
+ rsp_headers, code, response = api_.get_collection_items(req, 'norway_pop')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # Supported CRSs
+ default_crs = 'http://www.opengis.net/def/crs/OGC/1.3/CRS84'
+ storage_crs = 'http://www.opengis.net/def/crs/EPSG/0/25833'
+ crs_4258 = 'http://www.opengis.net/def/crs/EPSG/0/4258'
+ supported_crs_list = [default_crs, storage_crs, crs_4258]
+
+ for crs in supported_crs_list:
+ req = mock_request({'crs': crs})
+ rsp_headers, code, response = api_.get_collection_items(
+ req, 'norway_pop',
+ )
+
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Crs'] == f'<{crs}>'
+
+ # With CRS query parameter, using storageCRS
+ req = mock_request({'crs': storage_crs})
+ rsp_headers, code, response = api_.get_collection_items(req, 'norway_pop')
+
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Crs'] == f'<{storage_crs}>'
+
+ features_25833 = json.loads(response)
+
+ # With CRS query parameter resulting in coordinates transformation
+ req = mock_request({'crs': crs_4258})
+ rsp_headers, code, response = api_.get_collection_items(req, 'norway_pop')
+
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Crs'] == f'<{crs_4258}>'
+
+ features_4258 = json.loads(response)
+ transform_func = pyproj.Transformer.from_crs(
+ pyproj.CRS.from_epsg(25833),
+ pyproj.CRS.from_epsg(4258),
+ always_xy=False,
+ ).transform
+ for feat_orig in features_25833['features']:
+ id_ = feat_orig['id']
+ x, y, *_ = feat_orig['geometry']['coordinates']
+ loc_transf = Point(transform_func(x, y))
+ for feat_out in features_4258['features']:
+ if id_ == feat_out['id']:
+ loc_out = Point(feat_out['geometry']['coordinates'][:2])
+
+ assert loc_out.equals_exact(loc_transf, 1e-5)
+ break
+
+ # Without CRS query parameter: assume Transform to default WGS84 lon,lat
+ req = mock_request({})
+ rsp_headers, code, response = api_.get_collection_items(req, 'norway_pop')
+
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Crs'] == f'<{default_crs}>'
+
+ features_wgs84 = json.loads(response)
+
+ # With CRS query parameter resulting in coordinates transformation
+ transform_func = pyproj.Transformer.from_crs(
+ pyproj.CRS.from_epsg(4258),
+ get_crs_from_uri(default_crs),
+ always_xy=False,
+ ).transform
+ for feat_orig in features_4258['features']:
+ id_ = feat_orig['id']
+ x, y, *_ = feat_orig['geometry']['coordinates']
+ loc_transf = Point(transform_func(x, y))
+ for feat_out in features_wgs84['features']:
+ if id_ == feat_out['id']:
+ loc_out = Point(feat_out['geometry']['coordinates'][:2])
+
+ assert loc_out.equals_exact(loc_transf, 1e-5)
+ break
+
+
+def test_manage_collection_item_read_only_options_req(config, api_):
+ """Test OPTIONS request on a read-only items endpoint"""
+ req = mock_request()
+ _, code, _ = api_.manage_collection_item(req, 'options', 'foo')
+ assert code == HTTPStatus.NOT_FOUND
+
+ req = mock_request()
+ rsp_headers, code, _ = api_.manage_collection_item(req, 'options', 'obs')
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Allow'] == 'HEAD, GET'
+
+ req = mock_request()
+ rsp_headers, code, _ = api_.manage_collection_item(
+ req, 'options', 'obs', 'ressource_id')
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Allow'] == 'HEAD, GET'
+
+
+def test_manage_collection_item_editable_options_req(config):
+ """Test OPTIONS request on a editable items endpoint"""
+ config = copy.deepcopy(config)
+ config['resources']['obs']['providers'][0]['editable'] = True
+ api_ = API(config, openapi)
+
+ req = mock_request()
+ rsp_headers, code, _ = api_.manage_collection_item(req, 'options', 'obs')
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Allow'] == 'HEAD, GET, POST'
+
+ req = mock_request()
+ rsp_headers, code, _ = api_.manage_collection_item(
+ req, 'options', 'obs', 'ressource_id')
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Allow'] == 'HEAD, GET, PUT, DELETE'
+
+
+def test_describe_collections_enclosures(config_enclosure, enclosure_api):
+ original_enclosures = {
+ lnk['title']: lnk
+ for lnk in config_enclosure['resources']['objects']['links']
+ if lnk['rel'] == 'enclosure'
+ }
+
+ req = mock_request()
+ _, _, response = enclosure_api.describe_collections(req, 'objects')
+ features = json.loads(response)
+ modified_enclosures = {
+ lnk['title']: lnk for lnk in features['links']
+ if lnk['rel'] == 'enclosure'
+ }
+
+ # If type and length is set, do not verify/update link
+ assert original_enclosures['download link 1'] == \
+ modified_enclosures['download link 1']
+ # If length is missing, modify link type and length
+ assert original_enclosures['download link 2']['type'] == \
+ modified_enclosures['download link 2']['type']
+ assert modified_enclosures['download link 2']['type'] == \
+ modified_enclosures['download link 3']['type']
+ assert 'length' not in original_enclosures['download link 2']
+ assert modified_enclosures['download link 2']['length'] > 0
+ assert modified_enclosures['download link 2']['length'] == \
+ modified_enclosures['download link 3']['length']
+ assert original_enclosures['download link 3']['type'] != \
+ modified_enclosures['download link 3']['type']
+
+
+def test_get_collection_items_json_ld(config, api_):
+ req = mock_request({
+ 'f': 'jsonld',
+ 'limit': 2
+ })
+ rsp_headers, code, response = api_.get_collection_items(req, 'obs')
+
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSONLD]
+ # No language requested: return default from YAML
+ assert rsp_headers['Content-Language'] == 'en-US'
+ collection = json.loads(response)
+
+ assert '@context' in collection
+ assert all((f in collection['@context'][0] for
+ f in ('schema', 'type', 'features', 'FeatureCollection')))
+ assert len(collection['@context']) > 1
+ assert collection['@context'][1]['schema'] == 'https://schema.org/'
+ expanded = jsonld.expand(collection)[0]
+ featuresUri = 'https://schema.org/itemListElement'
+ assert len(expanded[featuresUri]) == 2
+
+
+def test_get_collection_item(config, api_):
+ req = mock_request({'f': 'foo'})
+ rsp_headers, code, response = api_.get_collection_item(req, 'obs', '371')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'f': 'json'})
+ rsp_headers, code, response = api_.get_collection_item(
+ req, 'gdps-temperature', '371')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request()
+ rsp_headers, code, response = api_.get_collection_item(req, 'foo', '371')
+
+ assert code == HTTPStatus.NOT_FOUND
+
+ rsp_headers, code, response = api_.get_collection_item(
+ req, 'obs', 'notfound')
+
+ assert code == HTTPStatus.NOT_FOUND
+
+ req = mock_request({'f': 'html'})
+ rsp_headers, code, response = api_.get_collection_item(req, 'obs', '371')
+
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+ req = mock_request()
+ rsp_headers, code, response = api_.get_collection_item(req, 'obs', '371')
+ feature = json.loads(response)
+
+ assert feature['properties']['stn_id'] == 35
+ assert 'prev' not in feature['links']
+ assert 'next' not in feature['links']
+
+
+def test_get_collection_item_json_ld(config, api_):
+ req = mock_request({'f': 'jsonld'})
+ rsp_headers, _, response = api_.get_collection_item(req, 'objects', '3')
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSONLD]
+ assert rsp_headers['Content-Language'] == 'en-US'
+ feature = json.loads(response)
+ assert '@context' in feature
+ assert all((f in feature['@context'][0] for
+ f in ('schema', 'type', 'gsp')))
+ assert len(feature['@context']) == 1
+ assert 'schema' in feature['@context'][0]
+ assert feature['@context'][0]['schema'] == 'https://schema.org/'
+ assert feature['id'] == 3
+ expanded = jsonld.expand(feature)[0]
+
+ assert expanded['@id'].startswith('http://')
+ assert expanded['@id'].endswith('/collections/objects/items/3')
+ assert expanded['http://www.opengis.net/ont/geosparql#hasGeometry'][0][
+ 'http://www.opengis.net/ont/geosparql#asWKT'][0][
+ '@value'] == 'POINT (-85 33)'
+ assert expanded['https://schema.org/geo'][0][
+ 'https://schema.org/latitude'][0][
+ '@value'] == 33
+ assert expanded['https://schema.org/geo'][0][
+ 'https://schema.org/longitude'][0][
+ '@value'] == -85
+
+ _, _, response = api_.get_collection_item(req, 'objects', '2')
+ feature = json.loads(response)
+ assert feature['geometry']['type'] == 'MultiPoint'
+ expanded = jsonld.expand(feature)[0]
+ assert expanded['http://www.opengis.net/ont/geosparql#hasGeometry'][0][
+ 'http://www.opengis.net/ont/geosparql#asWKT'][0][
+ '@value'] == 'MULTIPOINT (10 40, 40 30, 20 20, 30 10)'
+ assert expanded['https://schema.org/geo'][0][
+ 'https://schema.org/polygon'][0][
+ '@value'] == "10.0,40.0 40.0,30.0 20.0,20.0 30.0,10.0 10.0,40.0"
+
+ _, _, response = api_.get_collection_item(req, 'objects', '1')
+ feature = json.loads(response)
+ expanded = jsonld.expand(feature)[0]
+ assert expanded['http://www.opengis.net/ont/geosparql#hasGeometry'][0][
+ 'http://www.opengis.net/ont/geosparql#asWKT'][0][
+ '@value'] == 'LINESTRING (30 10, 10 30, 40 40)'
+ assert expanded['https://schema.org/geo'][0][
+ 'https://schema.org/line'][0][
+ '@value'] == '30.0,10.0 10.0,30.0 40.0,40.0'
+
+ _, _, response = api_.get_collection_item(req, 'objects', '4')
+ feature = json.loads(response)
+ expanded = jsonld.expand(feature)[0]
+ assert expanded['http://www.opengis.net/ont/geosparql#hasGeometry'][0][
+ 'http://www.opengis.net/ont/geosparql#asWKT'][0][
+ '@value'] == 'MULTILINESTRING ((10 10, 20 20, 10 40), ' \
+ '(40 40, 30 30, 40 20, 30 10))'
+ assert expanded['https://schema.org/geo'][0][
+ 'https://schema.org/line'][0][
+ '@value'] == '10.0,10.0 20.0,20.0 10.0,40.0 40.0,40.0 ' \
+ '30.0,30.0 40.0,20.0 30.0,10.0'
+
+ _, _, response = api_.get_collection_item(req, 'objects', '5')
+ feature = json.loads(response)
+ expanded = jsonld.expand(feature)[0]
+ assert expanded['http://www.opengis.net/ont/geosparql#hasGeometry'][0][
+ 'http://www.opengis.net/ont/geosparql#asWKT'][0][
+ '@value'] == 'POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))'
+ assert expanded['https://schema.org/geo'][0][
+ 'https://schema.org/polygon'][0][
+ '@value'] == '30.0,10.0 40.0,40.0 20.0,40.0 10.0,20.0 30.0,10.0'
+
+ _, _, response = api_.get_collection_item(req, 'objects', '7')
+ feature = json.loads(response)
+ expanded = jsonld.expand(feature)[0]
+ assert expanded['http://www.opengis.net/ont/geosparql#hasGeometry'][0][
+ 'http://www.opengis.net/ont/geosparql#asWKT'][0][
+ '@value'] == 'MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)), '\
+ '((15 5, 40 10, 10 20, 5 10, 15 5)))'
+ assert expanded['https://schema.org/geo'][0][
+ 'https://schema.org/polygon'][0][
+ '@value'] == '15.0,5.0 5.0,10.0 10.0,40.0 '\
+ '45.0,40.0 40.0,10.0 15.0,5.0'
+
+ req = mock_request({'f': 'jsonld', 'lang': 'fr'})
+ rsp_headers, code, response = api_.get_collection_item(req, 'obs', '371')
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSONLD]
+ assert rsp_headers['Content-Language'] == 'fr-CA'
+
+
+def test_get_coverage_domainset(config, api_):
+ req = mock_request()
+ rsp_headers, code, response = api_.get_collection_coverage_domainset(
+ req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ rsp_headers, code, response = api_.get_collection_coverage_domainset(
+ req, 'gdps-temperature')
+
+ domainset = json.loads(response)
+
+ assert domainset['type'] == 'DomainSet'
+ assert domainset['generalGrid']['axisLabels'] == ['Long', 'Lat']
+ assert domainset['generalGrid']['gridLimits']['axisLabels'] == ['i', 'j']
+ assert domainset['generalGrid']['gridLimits']['axis'][0]['upperBound'] == 2400 # noqa
+ assert domainset['generalGrid']['gridLimits']['axis'][1]['upperBound'] == 1201 # noqa
+
+
+def test_get_collection_coverage_rangetype(config, api_):
+ req = mock_request()
+ rsp_headers, code, response = api_.get_collection_coverage_rangetype(
+ req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ rsp_headers, code, response = api_.get_collection_coverage_rangetype(
+ req, 'gdps-temperature')
+
+ rangetype = json.loads(response)
+
+ assert rangetype['type'] == 'DataRecord'
+ assert len(rangetype['field']) == 1
+ assert rangetype['field'][0]['id'] == 1
+ assert rangetype['field'][0]['name'] == 'Temperature [C]'
+ assert rangetype['field'][0]['uom']['code'] == '[C]'
+
+
+def test_get_collection_coverage(config, api_):
+ req = mock_request()
+ rsp_headers, code, response = api_.get_collection_coverage(
+ req, 'obs')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'properties': '12'})
+ rsp_headers, code, response = api_.get_collection_coverage(
+ req, 'gdps-temperature')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'subset': 'bad_axis(10:20)'})
+ rsp_headers, code, response = api_.get_collection_coverage(
+ req, 'gdps-temperature')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'f': 'blah'})
+ rsp_headers, code, response = api_.get_collection_coverage(
+ req, 'gdps-temperature')
+
+ assert code == HTTPStatus.BAD_REQUEST
+
+ req = mock_request({'f': 'html'})
+ rsp_headers, code, response = api_.get_collection_coverage(
+ req, 'gdps-temperature')
+
+ assert code == HTTPStatus.BAD_REQUEST
+ assert rsp_headers['Content-Type'] == 'text/html'
+
+ req = mock_request(HTTP_ACCEPT='text/html')
+ rsp_headers, code, response = api_.get_collection_coverage(
+ req, 'gdps-temperature')
+
+ # NOTE: This test used to assert the code to be 200 OK,
+ # but it requested HTML, which is not available,
+ # so it should be 400 Bad Request
+ assert code == HTTPStatus.BAD_REQUEST
+ assert rsp_headers['Content-Type'] == 'text/html'
+
+ req = mock_request({'subset': 'Lat(5:10),Long(5:10)'})
+ rsp_headers, code, response = api_.get_collection_coverage(
+ req, 'gdps-temperature')
+
+ assert code == HTTPStatus.OK
+ content = json.loads(response)
+
+ assert content['domain']['axes']['x']['num'] == 35
+ assert content['domain']['axes']['y']['num'] == 35
+ assert 'TMP' in content['parameters']
+ assert 'TMP' in content['ranges']
+ assert content['ranges']['TMP']['axisNames'] == ['y', 'x']
+
+ req = mock_request({'bbox': '-79,45,-75,49'})
+ rsp_headers, code, response = api_.get_collection_coverage(
+ req, 'gdps-temperature')
+
+ assert code == HTTPStatus.OK
+ content = json.loads(response)
+
+ assert content['domain']['axes']['x']['start'] == -79.0
+ assert content['domain']['axes']['x']['stop'] == -75.0
+ assert content['domain']['axes']['y']['start'] == 49.0
+ assert content['domain']['axes']['y']['stop'] == 45.0
+
+ req = mock_request({
+ 'subset': 'Lat(5:10),Long(5:10)',
+ 'f': 'GRIB'
+ })
+ rsp_headers, code, response = api_.get_collection_coverage(
+ req, 'gdps-temperature')
+
+ assert code == HTTPStatus.OK
+ assert isinstance(response, bytes)
+
+ req = mock_request(HTTP_ACCEPT='application/x-netcdf')
+ rsp_headers, code, response = api_.get_collection_coverage(
+ req, 'cmip5')
+
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Type'] == 'application/x-netcdf'
+
+ # req = mock_request({
+ # 'subset': 'time("2006-07-01T06:00:00":"2007-07-01T06:00:00")'
+ # })
+ # rsp_headers, code, response = api_.get_collection_coverage(req, 'cmip5')
+ #
+ # assert code == HTTPStatus.OK
+ # assert isinstance(json.loads(response), dict)
+
+ # req = mock_request({'subset': 'lat(1:2'})
+ # rsp_headers, code, response = api_.get_collection_coverage(req, 'cmip5')
+ #
+ # assert code == HTTPStatus.BAD_REQUEST
+ #
+ # req = mock_request({'subset': 'lat(1:2)'})
+ # rsp_headers, code, response = api_.get_collection_coverage(req, 'cmip5')
+ #
+ # assert code == HTTPStatus.NO_CONTENT
+
+
+def test_get_collection_map(config, api_):
+ req = mock_request()
+ rsp_headers, code, response = api_.get_collection_map(req, 'notfound')
+ assert code == HTTPStatus.NOT_FOUND
+
+ req = mock_request()
+ rsp_headers, code, response = api_.get_collection_map(
+ req, 'mapserver_world_map')
+ assert code == HTTPStatus.OK
+ assert isinstance(response, bytes)
+ assert response[1:4] == b'PNG'
+
+
+def test_get_collection_tiles(config, api_):
+ req = mock_request()
+ rsp_headers, code, response = api_.get_collection_tiles(req, 'obs')
+ assert code == HTTPStatus.BAD_REQUEST
+
+ rsp_headers, code, response = api_.get_collection_tiles(
+ req, 'naturalearth/lakes')
+ assert code == HTTPStatus.OK
+
+ # Language settings should be ignored (return system default)
+ req = mock_request({'lang': 'fr'})
+ rsp_headers, code, response = api_.get_collection_tiles(
+ req, 'naturalearth/lakes')
+ assert rsp_headers['Content-Language'] == 'en-US'
+ content = json.loads(response)
+ assert len(content['links']) > 0
+ assert len(content['tilesets']) > 0
+
+
+def test_describe_processes(config, api_):
+ req = mock_request({'limit': 1})
+ # Test for description of single processes
+ rsp_headers, code, response = api_.describe_processes(req)
+ data = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert len(data['processes']) == 1
+ assert len(data['links']) == 3
+
+ req = mock_request()
+
+ # Test for undefined process
+ rsp_headers, code, response = api_.describe_processes(req, 'foo')
+ data = json.loads(response)
+ assert code == HTTPStatus.NOT_FOUND
+ assert data['code'] == 'NoSuchProcess'
+
+ # Test for description of all processes
+ rsp_headers, code, response = api_.describe_processes(req)
+ data = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert len(data['processes']) == 2
+ assert len(data['links']) == 3
+
+ # Test for particular, defined process
+ rsp_headers, code, response = api_.describe_processes(req, 'hello-world')
+ process = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSON]
+ assert process['id'] == 'hello-world'
+ assert process['version'] == '0.2.0'
+ assert process['title'] == 'Hello World'
+ assert len(process['keywords']) == 3
+ assert len(process['links']) == 6
+ assert len(process['inputs']) == 2
+ assert len(process['outputs']) == 1
+ assert len(process['outputTransmission']) == 1
+ assert len(process['jobControlOptions']) == 2
+ assert 'sync-execute' in process['jobControlOptions']
+ assert 'async-execute' in process['jobControlOptions']
+
+ # Check HTML response when requested in headers
+ req = mock_request(HTTP_ACCEPT='text/html')
+ rsp_headers, code, response = api_.describe_processes(req, 'hello-world')
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+ # No language requested: return default from YAML
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+ # Check JSON response when requested in headers
+ req = mock_request(HTTP_ACCEPT='application/json')
+ rsp_headers, code, response = api_.describe_processes(req, 'hello-world')
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSON]
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+ # Check HTML response when requested with query parameter
+ req = mock_request({'f': 'html'})
+ rsp_headers, code, response = api_.describe_processes(req, 'hello-world')
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML]
+ # No language requested: return default from YAML
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+ # Check JSON response when requested with query parameter
+ req = mock_request({'f': 'json'})
+ rsp_headers, code, response = api_.describe_processes(req, 'hello-world')
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSON]
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+ # Check JSON response when requested with French language parameter
+ req = mock_request({'lang': 'fr'})
+ rsp_headers, code, response = api_.describe_processes(req, 'hello-world')
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSON]
+ assert rsp_headers['Content-Language'] == 'fr-CA'
+ process = json.loads(response)
+ assert process['title'] == 'Bonjour le Monde'
+
+ # Check JSON response when language requested in headers
+ req = mock_request(HTTP_ACCEPT_LANGUAGE='fr')
+ rsp_headers, code, response = api_.describe_processes(req, 'hello-world')
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSON]
+ assert rsp_headers['Content-Language'] == 'fr-CA'
+
+ # Test for undefined process
+ req = mock_request()
+ rsp_headers, code, response = api_.describe_processes(req, 'goodbye-world')
+ data = json.loads(response)
+ assert code == HTTPStatus.NOT_FOUND
+ assert data['code'] == 'NoSuchProcess'
+ assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_JSON]
+
+ # Test describe doesn't crash if example is missing
+ req = mock_request()
+ processor = api_.manager.get_processor("hello-world")
+ example = processor.metadata.pop("example")
+ rsp_headers, code, response = api_.describe_processes(req)
+ processor.metadata['example'] = example
+ data = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert len(data['processes']) == 2
+
+
+def test_execute_process(config, api_):
+ req_body_0 = {
+ 'inputs': {
+ 'name': 'Test'
+ }
+ }
+ req_body_1 = {
+ 'inputs': {
+ 'name': 'Test'
+ },
+ 'response': 'document'
+ }
+ req_body_2 = {
+ 'inputs': {
+ 'name': 'Tést'
+ }
+ }
+ req_body_3 = {
+ 'inputs': {
+ 'name': 'Tést',
+ 'message': 'This is a test.'
+ }
+ }
+ req_body_4 = {
+ 'inputs': {
+ 'foo': 'Tést'
+ }
+ }
+ req_body_5 = {
+ 'inputs': {}
+ }
+ req_body_6 = {
+ 'inputs': {
+ 'name': None
+ }
+ }
+
+ cleanup_jobs = set()
+
+ # Test posting empty payload to existing process
+ req = mock_request(data='')
+ rsp_headers, code, response = api_.execute_process(req, 'hello-world')
+ assert rsp_headers['Content-Language'] == 'en-US'
+
+ data = json.loads(response)
+ assert code == HTTPStatus.BAD_REQUEST
+ assert 'Location' not in rsp_headers
+ assert data['code'] == 'MissingParameterValue'
+
+ req = mock_request(data=req_body_0)
+ rsp_headers, code, response = api_.execute_process(req, 'foo')
+
+ data = json.loads(response)
+ assert code == HTTPStatus.NOT_FOUND
+ assert 'Location' not in rsp_headers
+ assert data['code'] == 'NoSuchProcess'
+
+ rsp_headers, code, response = api_.execute_process(req, 'hello-world')
+
+ data = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert 'Location' in rsp_headers
+
+ assert len(data.keys()) == 2
+ assert data['id'] == 'echo'
+ assert data['value'] == 'Hello Test!'
+
+ cleanup_jobs.add(tuple(['hello-world',
+ rsp_headers['Location'].split('/')[-1]]))
+
+ req = mock_request(data=req_body_1)
+ rsp_headers, code, response = api_.execute_process(req, 'hello-world')
+
+ data = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert 'Location' in rsp_headers
+
+ assert len(data.keys()) == 1
+ assert data['outputs'][0]['id'] == 'echo'
+ assert data['outputs'][0]['value'] == 'Hello Test!'
+
+ cleanup_jobs.add(tuple(['hello-world',
+ rsp_headers['Location'].split('/')[-1]]))
+
+ req = mock_request(data=req_body_2)
+ rsp_headers, code, response = api_.execute_process(req, 'hello-world')
+
+ data = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert 'Location' in rsp_headers
+ assert data['value'] == 'Hello Tést!'
+
+ cleanup_jobs.add(tuple(['hello-world',
+ rsp_headers['Location'].split('/')[-1]]))
+
+ req = mock_request(data=req_body_3)
+ rsp_headers, code, response = api_.execute_process(req, 'hello-world')
+
+ data = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert 'Location' in rsp_headers
+ assert data['value'] == 'Hello Tést! This is a test.'
+
+ cleanup_jobs.add(tuple(['hello-world',
+ rsp_headers['Location'].split('/')[-1]]))
+
+ req = mock_request(data=req_body_4)
+ rsp_headers, code, response = api_.execute_process(req, 'hello-world')
+
+ data = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert 'Location' in rsp_headers
+ assert data['code'] == 'InvalidParameterValue'
+ cleanup_jobs.add(tuple(['hello-world',
+ rsp_headers['Location'].split('/')[-1]]))
+
+ req = mock_request(data=req_body_5)
+ rsp_headers, code, response = api_.execute_process(req, 'hello-world')
+ data = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert 'Location' in rsp_headers
+ assert data['code'] == 'InvalidParameterValue'
+ assert data['description'] == 'Error updating job'
+
+ cleanup_jobs.add(tuple(['hello-world',
+ rsp_headers['Location'].split('/')[-1]]))
+
+ req = mock_request(data=req_body_6)
+ rsp_headers, code, response = api_.execute_process(req, 'hello-world')
+
+ data = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert 'Location' in rsp_headers
+ assert data['code'] == 'InvalidParameterValue'
+ assert data['description'] == 'Error updating job'
+
+ cleanup_jobs.add(tuple(['hello-world',
+ rsp_headers['Location'].split('/')[-1]]))
+
+ req = mock_request(data=req_body_0)
+ rsp_headers, code, response = api_.execute_process(req, 'goodbye-world')
+
+ response = json.loads(response)
+ assert code == HTTPStatus.NOT_FOUND
+ assert 'Location' not in rsp_headers
+ assert response['code'] == 'NoSuchProcess'
+
+ rsp_headers, code, response = api_.execute_process(req, 'hello-world')
+
+ response = json.loads(response)
+ assert code == HTTPStatus.OK
+
+ cleanup_jobs.add(tuple(['hello-world',
+ rsp_headers['Location'].split('/')[-1]]))
+
+ req = mock_request(data=req_body_1, HTTP_Prefer='respond-async')
+ rsp_headers, code, response = api_.execute_process(req, 'hello-world')
+
+ assert 'Location' in rsp_headers
+ response = json.loads(response)
+ assert isinstance(response, dict)
+ assert code == HTTPStatus.CREATED
+
+ cleanup_jobs.add(tuple(['hello-world',
+ rsp_headers['Location'].split('/')[-1]]))
+
+ # Cleanup
+ time.sleep(2) # Allow time for any outstanding async jobs
+ for _, job_id in cleanup_jobs:
+ rsp_headers, code, response = api_.delete_job(mock_request(), job_id)
+ assert code == HTTPStatus.OK
+
+
+def _execute_a_job(api_):
+ req_body_sync = {
+ 'inputs': {
+ 'name': 'Sync Test'
+ }
+ }
+
+ req = mock_request(data=req_body_sync)
+ rsp_headers, code, response = api_.execute_process(
+ req, 'hello-world')
+
+ data = json.loads(response)
+ assert code == HTTPStatus.OK
+ assert 'Location' in rsp_headers
+ assert data['value'] == 'Hello Sync Test!'
+
+ job_id = rsp_headers['Location'].split('/')[-1]
+ return job_id
+
+
+def test_delete_job(api_):
+ rsp_headers, code, response = api_.delete_job(
+ mock_request(), 'does-not-exist')
+
+ assert code == HTTPStatus.NOT_FOUND
+ req_body_async = {
+ 'inputs': {
+ 'name': 'Async Test Deletion'
+ }
+ }
+ job_id = _execute_a_job(api_)
+ rsp_headers, code, response = api_.delete_job(mock_request(), job_id)
+
+ assert code == HTTPStatus.OK
+
+ rsp_headers, code, response = api_.delete_job(mock_request(), job_id)
+ assert code == HTTPStatus.NOT_FOUND
+
+ req = mock_request(data=req_body_async, HTTP_Prefer='respond-async')
+ rsp_headers, code, response = api_.execute_process(
+ req, 'hello-world')
+
+ assert code == HTTPStatus.CREATED
+ assert 'Location' in rsp_headers
+
+ time.sleep(2) # Allow time for async execution to complete
+ job_id = rsp_headers['Location'].split('/')[-1]
+ rsp_headers, code, response = api_.delete_job(mock_request(), job_id)
+ assert code == HTTPStatus.OK
+
+ rsp_headers, code, response = api_.delete_job(mock_request(), job_id)
+ assert code == HTTPStatus.NOT_FOUND
+
+
+def test_get_job_result(api_):
+ rsp_headers, code, response = api_.get_job_result(mock_request(),
+ 'not-exist')
+ assert code == HTTPStatus.NOT_FOUND
+
+ job_id = _execute_a_job(api_)
+ rsp_headers, code, response = api_.get_job_result(mock_request(), job_id)
+ # default response is html
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Type'] == 'text/html'
+ assert 'Hello Sync Test!' in response
+
+ rsp_headers, code, response = api_.get_job_result(
+ mock_request({'f': 'json'}), job_id,
+ )
+ assert code == HTTPStatus.OK
+ assert rsp_headers['Content-Type'] == 'application/json'
+ assert json.loads(response)['value'] == "Hello Sync Test!"
+
+
+def test_get_collection_edr_query(config, api_):
+ # edr resource
+ req = mock_request()
+ rsp_headers, code, response = api_.describe_collections(req, 'icoads-sst')
+ collection = json.loads(response)
+ parameter_names = list(collection['parameter_names'].keys())
+ parameter_names.sort()
+ assert len(parameter_names) == 4
+ assert parameter_names == ['AIRT', 'SST', 'UWND', 'VWND']
+
+ # no coords parameter
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # bad query type
+ req = mock_request({'coords': 'POINT(11 11)'})
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'corridor')
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # bad coords parameter
+ req = mock_request({'coords': 'gah'})
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # bad parameter_names parameter
+ req = mock_request({
+ 'coords': 'POINT(11 11)', 'parameter_names': 'bad'
+ })
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # all parameters
+ req = mock_request({'coords': 'POINT(11 11)'})
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.OK
+
+ data = json.loads(response)
+
+ axes = list(data['domain']['axes'].keys())
+ axes.sort()
+ assert len(axes) == 3
+ assert axes == ['TIME', 'x', 'y']
+
+ assert data['domain']['axes']['x']['start'] == 11.0
+ assert data['domain']['axes']['x']['stop'] == 11.0
+ assert data['domain']['axes']['y']['start'] == 11.0
+ assert data['domain']['axes']['y']['stop'] == 11.0
+
+ parameters = list(data['parameters'].keys())
+ parameters.sort()
+ assert len(parameters) == 4
+ assert parameters == ['AIRT', 'SST', 'UWND', 'VWND']
+
+ # single parameter
+ req = mock_request({
+ 'coords': 'POINT(11 11)', 'parameter_names': 'SST'
+ })
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.OK
+
+ data = json.loads(response)
+
+ assert len(data['parameters'].keys()) == 1
+ assert list(data['parameters'].keys())[0] == 'SST'
+
+ # Zulu time zone
+ req = mock_request({
+ 'coords': 'POINT(11 11)',
+ 'datetime': '2000-01-17T00:00:00Z/2000-06-16T23:00:00Z'
+ })
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.OK
+
+ # bounded date range
+ req = mock_request({
+ 'coords': 'POINT(11 11)',
+ 'datetime': '2000-01-17/2000-06-16'
+ })
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.OK
+
+ data = json.loads(response)
+ time_dict = data['domain']['axes']['TIME']
+
+ assert time_dict['start'] == '2000-02-15T16:29:05.999999999'
+ assert time_dict['stop'] == '2000-06-16T10:25:30.000000000'
+ assert time_dict['num'] == 5
+
+ # unbounded date range - start
+ req = mock_request({
+ 'coords': 'POINT(11 11)',
+ 'datetime': '../2000-06-16'
+ })
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.OK
+
+ data = json.loads(response)
+ time_dict = data['domain']['axes']['TIME']
+
+ assert time_dict['start'] == '2000-01-16T06:00:00.000000000'
+ assert time_dict['stop'] == '2000-06-16T10:25:30.000000000'
+ assert time_dict['num'] == 6
+
+ # unbounded date range - end
+ req = mock_request({
+ 'coords': 'POINT(11 11)',
+ 'datetime': '2000-06-16/..'
+ })
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.OK
+
+ data = json.loads(response)
+ time_dict = data['domain']['axes']['TIME']
+
+ assert time_dict['start'] == '2000-06-16T10:25:30.000000000'
+ assert time_dict['stop'] == '2000-12-16T01:20:05.999999996'
+ assert time_dict['num'] == 7
+
+ # some data
+ req = mock_request({
+ 'coords': 'POINT(11 11)', 'datetime': '2000-01-16'
+ })
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.OK
+
+ # no data
+ req = mock_request({
+ 'coords': 'POINT(11 11)', 'datetime': '2000-01-17'
+ })
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.NO_CONTENT
+
+ # position no coords
+ req = mock_request({
+ 'datetime': '2000-01-17'
+ })
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'position')
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # cube bbox parameter 4 dimensional
+ req = mock_request({
+ 'bbox': '0,0,10,10'
+ })
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'cube')
+ assert code == HTTPStatus.OK
+
+ # cube bad bbox parameter
+ req = mock_request({
+ 'bbox': '0,0,10'
+ })
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'cube')
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # cube no bbox parameter
+ req = mock_request({})
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'icoads-sst', None, 'cube')
+ assert code == HTTPStatus.BAD_REQUEST
+
+ # cube decreasing latitude coords and S3
+ req = mock_request({
+ 'bbox': '-100,40,-99,45',
+ 'parameter_names': 'tmn',
+ 'datetime': '1994-01-01/1994-12-31',
+ })
+
+ rsp_headers, code, response = api_.get_collection_edr_query(
+ req, 'usgs-prism', None, 'cube')
+ assert code == HTTPStatus.OK
+
+
+def test_validate_bbox():
+ assert validate_bbox('1,2,3,4') == [1, 2, 3, 4]
+ assert validate_bbox('1,2,3,4,5,6') == [1, 2, 3, 4, 5, 6]
+ assert validate_bbox('-142,42,-52,84') == [-142, 42, -52, 84]
+ assert (validate_bbox('-142.1,42.12,-52.22,84.4') ==
+ [-142.1, 42.12, -52.22, 84.4])
+ assert (validate_bbox('-142.1,42.12,-5.28,-52.22,84.4,7.39') ==
+ [-142.1, 42.12, -5.28, -52.22, 84.4, 7.39])
+
+ assert (validate_bbox('177.0,65.0,-177.0,70.0') ==
+ [177.0, 65.0, -177.0, 70.0])
+
+ with pytest.raises(ValueError):
+ validate_bbox('1,2,4')
+
+ with pytest.raises(ValueError):
+ validate_bbox('1,2,4,5,6')
+
+ with pytest.raises(ValueError):
+ validate_bbox('3,4,1,2')
+
+ with pytest.raises(ValueError):
+ validate_bbox('1,2,6,4,5,3')
+
+
+def test_validate_datetime():
+ config = yaml_load('''
+ temporal:
+ begin: 2000-10-30T18:24:39Z
+ end: 2007-10-30T08:57:29Z
+ ''')
+
+ # test time instant
+ assert validate_datetime(config, '2004') == '2004'
+ assert validate_datetime(config, '2004-10') == '2004-10'
+ assert validate_datetime(config, '2001-10-30') == '2001-10-30'
+
+ with pytest.raises(ValueError):
+ _ = validate_datetime(config, '2009-10-30')
+ with pytest.raises(ValueError):
+ _ = validate_datetime(config, '2000-09-09')
+ with pytest.raises(ValueError):
+ _ = validate_datetime(config, '2000-10-30T17:24:39Z')
+ with pytest.raises(ValueError):
+ _ = validate_datetime(config, '2007-10-30T08:58:29Z')
+
+ # test time envelope
+ assert validate_datetime(config, '2004/2005') == '2004/2005'
+ assert validate_datetime(config, '2004-10/2005-10') == '2004-10/2005-10'
+ assert (validate_datetime(config, '2001-10-30/2002-10-30') ==
+ '2001-10-30/2002-10-30')
+ assert validate_datetime(config, '2004/..') == '2004/..'
+ assert validate_datetime(config, '../2005') == '../2005'
+ assert validate_datetime(config, '2004/') == '2004/..'
+ assert validate_datetime(config, '/2005') == '../2005'
+ assert validate_datetime(config, '2004-10/2005-10') == '2004-10/2005-10'
+ assert (validate_datetime(config, '2001-10-30/2002-10-30') ==
+ '2001-10-30/2002-10-30')
+
+ with pytest.raises(ValueError):
+ _ = validate_datetime(config, '2007-11-01/..')
+ with pytest.raises(ValueError):
+ _ = validate_datetime(config, '2009/..')
+ with pytest.raises(ValueError):
+ _ = validate_datetime(config, '../2000-09')
+ with pytest.raises(ValueError):
+ _ = validate_datetime(config, '../1999')
+
+
+@pytest.mark.parametrize("value, expected", [
+ ('time(2000-11-11)', {'time': ['2000-11-11']}),
+ ('time("2000-11-11")', {'time': ['2000-11-11']}),
+ ('time("2000-11-11T00:11:11")', {'time': ['2000-11-11T00:11:11']}),
+ ('time("2000-11-11T11:12:13":"2021-12-22T:13:33:33")', {'time': ['2000-11-11T11:12:13', '2021-12-22T:13:33:33']}), # noqa
+ ('lat(40)', {'lat': [40]}),
+ ('lat(0:40)', {'lat': [0, 40]}),
+ ('foo("bar")', {'foo': ['bar']}),
+ ('foo("bar":"baz")', {'foo': ['bar', 'baz']})
+])
+def test_validate_subset(value, expected):
+ assert validate_subset(value) == expected
+
+ with pytest.raises(ValueError):
+ validate_subset('foo("bar)')
+
+
+def test_get_exception(config, api_):
+ d = api_.get_exception(500, {}, 'json', 'NoApplicableCode', 'oops')
+ assert d[0] == {}
+ assert d[1] == 500
+ content = json.loads(d[2])
+ assert content['code'] == 'NoApplicableCode'
+ assert content['description'] == 'oops'
+
+ d = api_.get_exception(500, {}, 'html', 'NoApplicableCode', 'oops')
+ assert d[0] == {'Content-Type': 'text/html'}