From 568b1cf83828e6717f630b6b5b8fbbd94c80a6e4 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Wed, 31 Jan 2024 18:17:02 +0800 Subject: [PATCH 01/33] create three packages scratch --- Dockerfile.dev | 1 + stac_fastapi/core/README.md | 0 stac_fastapi/core/setup.cfg | 2 + stac_fastapi/core/setup.py | 62 ++ .../core/stac_fastapi/core/__init__.py | 1 + .../stac_fastapi/core/base_database_logic.py | 54 ++ .../core/stac_fastapi/core/base_settings.py | 12 + stac_fastapi/core/stac_fastapi/core/core.py | 855 ++++++++++++++++++ .../core/stac_fastapi/core/models/__init__.py | 1 + .../core/stac_fastapi/core/models/links.py | 138 +++ .../core/stac_fastapi/core/models/search.py | 1 + .../core/stac_fastapi/core/serializers.py | 162 ++++ .../core/stac_fastapi/core/session.py | 25 + .../core/stac_fastapi/core/version.py | 2 + .../stac_fastapi/elasticsearch/app.py | 30 +- 15 files changed, 1339 insertions(+), 7 deletions(-) create mode 100644 stac_fastapi/core/README.md create mode 100644 stac_fastapi/core/setup.cfg create mode 100644 stac_fastapi/core/setup.py create mode 100644 stac_fastapi/core/stac_fastapi/core/__init__.py create mode 100644 stac_fastapi/core/stac_fastapi/core/base_database_logic.py create mode 100644 stac_fastapi/core/stac_fastapi/core/base_settings.py create mode 100644 stac_fastapi/core/stac_fastapi/core/core.py create mode 100644 stac_fastapi/core/stac_fastapi/core/models/__init__.py create mode 100644 stac_fastapi/core/stac_fastapi/core/models/links.py create mode 100644 stac_fastapi/core/stac_fastapi/core/models/search.py create mode 100644 stac_fastapi/core/stac_fastapi/core/serializers.py create mode 100644 stac_fastapi/core/stac_fastapi/core/session.py create mode 100644 stac_fastapi/core/stac_fastapi/core/version.py diff --git a/Dockerfile.dev b/Dockerfile.dev index 4e2f0f4b..a4248d39 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -15,4 +15,5 @@ WORKDIR /app COPY . /app +RUN pip install --no-cache-dir -e ./stac_fastapi/core RUN pip install --no-cache-dir -e ./stac_fastapi/elasticsearch[dev,server] diff --git a/stac_fastapi/core/README.md b/stac_fastapi/core/README.md new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/core/setup.cfg b/stac_fastapi/core/setup.cfg new file mode 100644 index 00000000..1eb3fa49 --- /dev/null +++ b/stac_fastapi/core/setup.cfg @@ -0,0 +1,2 @@ +[metadata] +version = attr: stac_fastapi.core.version.__version__ diff --git a/stac_fastapi/core/setup.py b/stac_fastapi/core/setup.py new file mode 100644 index 00000000..4046a1c2 --- /dev/null +++ b/stac_fastapi/core/setup.py @@ -0,0 +1,62 @@ +"""stac_fastapi: elasticsearch module.""" + +from setuptools import find_namespace_packages, setup + +with open("README.md") as f: + desc = f.read() + +install_requires = [ + "fastapi", + "attrs", + "pydantic[dotenv]<2", + "stac_pydantic==2.0.*", + "stac-fastapi.types==2.4.9", + "stac-fastapi.api==2.4.9", + "stac-fastapi.extensions==2.4.9", + # "elasticsearch[async]==8.11.0", + # "elasticsearch-dsl==8.11.0", + "pystac[validation]", + # "uvicorn", + "orjson", + "overrides", + # "starlette", + "geojson-pydantic", + "pygeofilter==0.2.1", +] + +# extra_reqs = { +# "dev": [ +# "pytest", +# "pytest-cov", +# "pytest-asyncio", +# "pre-commit", +# "requests", +# "ciso8601", +# "httpx", +# ], +# "docs": ["mkdocs", "mkdocs-material", "pdocs"], +# "server": ["uvicorn[standard]==0.19.0"], +# } + +setup( + name="stac-fastapi.core", + description="Core library for the Elasticsearch and Opensearch stac-fastapi backends.", + long_description=desc, + long_description_content_type="text/markdown", + python_requires=">=3.8", + classifiers=[ + "Intended Audience :: Developers", + "Intended Audience :: Information Technology", + "Intended Audience :: Science/Research", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "License :: OSI Approved :: MIT License", + ], + url="https://github.com/stac-utils/stac-fastapi-elasticsearch", + license="MIT", + packages=find_namespace_packages(), + zip_safe=False, + install_requires=install_requires, +) diff --git a/stac_fastapi/core/stac_fastapi/core/__init__.py b/stac_fastapi/core/stac_fastapi/core/__init__.py new file mode 100644 index 00000000..32b338eb --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/__init__.py @@ -0,0 +1 @@ +"""Core library.""" diff --git a/stac_fastapi/core/stac_fastapi/core/base_database_logic.py b/stac_fastapi/core/stac_fastapi/core/base_database_logic.py new file mode 100644 index 00000000..0043cfb8 --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/base_database_logic.py @@ -0,0 +1,54 @@ +"""Base database logic.""" + +import abc +from typing import Any, Dict, Iterable, Optional + + +class BaseDatabaseLogic(abc.ABC): + """ + Abstract base class for database logic. + + This class defines the basic structure and operations for database interactions. + Subclasses must provide implementations for these methods. + """ + + @abc.abstractmethod + async def get_all_collections( + self, token: Optional[str], limit: int + ) -> Iterable[Dict[str, Any]]: + """Retrieve a list of all collections from the database.""" + pass + + @abc.abstractmethod + async def get_one_item(self, collection_id: str, item_id: str) -> Dict: + """Retrieve a single item from the database.""" + pass + + @abc.abstractmethod + async def create_item(self, item: Dict, refresh: bool = False) -> None: + """Create an item in the database.""" + pass + + @abc.abstractmethod + async def delete_item( + self, item_id: str, collection_id: str, refresh: bool = False + ) -> None: + """Delete an item from the database.""" + pass + + @abc.abstractmethod + async def create_collection(self, collection: Dict, refresh: bool = False) -> None: + """Create a collection in the database.""" + pass + + @abc.abstractmethod + async def find_collection(self, collection_id: str) -> Dict: + """Find a collection in the database.""" + pass + + @abc.abstractmethod + async def delete_collection( + self, collection_id: str, refresh: bool = False + ) -> None: + """Delete a collection from the database.""" + pass diff --git a/stac_fastapi/core/stac_fastapi/core/base_settings.py b/stac_fastapi/core/stac_fastapi/core/base_settings.py new file mode 100644 index 00000000..f30d07a4 --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/base_settings.py @@ -0,0 +1,12 @@ +"""Base settings.""" + +from abc import ABC, abstractmethod + + +class ApiBaseSettings(ABC): + """Abstract base class for API settings.""" + + @abstractmethod + def create_client(self): + """Create a database client.""" + pass diff --git a/stac_fastapi/core/stac_fastapi/core/core.py b/stac_fastapi/core/stac_fastapi/core/core.py new file mode 100644 index 00000000..ffa81318 --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/core.py @@ -0,0 +1,855 @@ +"""Item crud client.""" +import logging +import re +from base64 import urlsafe_b64encode +from datetime import datetime as datetime_type +from datetime import timezone +from typing import Any, Dict, List, Optional, Set, Type, Union +from urllib.parse import unquote_plus, urljoin + +import attr +import orjson +import stac_pydantic +from fastapi import HTTPException, Request +from overrides import overrides +from pydantic import ValidationError +from pygeofilter.backends.cql2_json import to_cql2 +from pygeofilter.parsers.cql2_text import parse as parse_cql2_text +from stac_pydantic.links import Relations +from stac_pydantic.shared import MimeTypes + +# from stac_fastapi.elasticsearch.config import ElasticsearchSettings +from stac_fastapi.core.base_database_logic import BaseDatabaseLogic +from stac_fastapi.core.base_settings import ApiBaseSettings +from stac_fastapi.core.models.links import PagingLinks +from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer +from stac_fastapi.core.session import Session +from stac_fastapi.extensions.third_party.bulk_transactions import ( + BaseBulkTransactionsClient, + BulkTransactionMethod, + Items, +) +from stac_fastapi.types import stac as stac_types +from stac_fastapi.types.config import Settings +from stac_fastapi.types.core import ( + AsyncBaseCoreClient, + AsyncBaseFiltersClient, + AsyncBaseTransactionsClient, +) +from stac_fastapi.types.links import CollectionLinks +from stac_fastapi.types.search import BaseSearchPostRequest +from stac_fastapi.types.stac import Collection, Collections, Item, ItemCollection + +logger = logging.getLogger(__name__) + +NumType = Union[float, int] + + +@attr.s +class CoreClient(AsyncBaseCoreClient): + """Client for core endpoints defined by the STAC specification. + + This class is a implementation of `AsyncBaseCoreClient` that implements the core endpoints + defined by the STAC specification. It uses the `DatabaseLogic` class to interact with the + database, and `ItemSerializer` and `CollectionSerializer` to convert between STAC objects and + database records. + + Attributes: + session (Session): A requests session instance to be used for all HTTP requests. + item_serializer (Type[serializers.ItemSerializer]): A serializer class to be used to convert + between STAC items and database records. + collection_serializer (Type[serializers.CollectionSerializer]): A serializer class to be + used to convert between STAC collections and database records. + database (DatabaseLogic): An instance of the `DatabaseLogic` class that is used to interact + with the database. + """ + + database: BaseDatabaseLogic = attr.ib() + + session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) + item_serializer: Type[ItemSerializer] = attr.ib(default=ItemSerializer) + collection_serializer: Type[CollectionSerializer] = attr.ib( + default=CollectionSerializer + ) + + @overrides + async def all_collections(self, **kwargs) -> Collections: + """Read all collections from the database. + + Returns: + Collections: A `Collections` object containing all the collections in the database and + links to various resources. + + Raises: + Exception: If any error occurs while reading the collections from the database. + """ + request: Request = kwargs["request"] + base_url = str(kwargs["request"].base_url) + + limit = ( + int(request.query_params["limit"]) + if "limit" in request.query_params + else 10 + ) + token = ( + request.query_params["token"] if "token" in request.query_params else None + ) + + hits = await self.database.get_all_collections(limit=limit, token=token) + + next_search_after = None + next_link = None + if len(hits) == limit: + last_hit = hits[-1] + next_search_after = last_hit["sort"] + next_token = urlsafe_b64encode( + ",".join(map(str, next_search_after)).encode() + ).decode() + paging_links = PagingLinks(next=next_token, request=request) + next_link = paging_links.link_next() + + links = [ + { + "rel": Relations.root.value, + "type": MimeTypes.json, + "href": base_url, + }, + { + "rel": Relations.parent.value, + "type": MimeTypes.json, + "href": base_url, + }, + { + "rel": Relations.self.value, + "type": MimeTypes.json, + "href": urljoin(base_url, "collections"), + }, + ] + + if next_link: + links.append(next_link) + + return Collections( + collections=[ + self.collection_serializer.db_to_stac(c["_source"], base_url=base_url) + for c in hits + ], + links=links, + ) + + @overrides + async def get_collection(self, collection_id: str, **kwargs) -> Collection: + """Get a collection from the database by its id. + + Args: + collection_id (str): The id of the collection to retrieve. + kwargs: Additional keyword arguments passed to the API call. + + Returns: + Collection: A `Collection` object representing the requested collection. + + Raises: + NotFoundError: If the collection with the given id cannot be found in the database. + """ + base_url = str(kwargs["request"].base_url) + collection = await self.database.find_collection(collection_id=collection_id) + return self.collection_serializer.db_to_stac(collection, base_url) + + @overrides + async def item_collection( + self, + collection_id: str, + bbox: Optional[List[NumType]] = None, + datetime: Union[str, datetime_type, None] = None, + limit: int = 10, + token: str = None, + **kwargs, + ) -> ItemCollection: + """Read items from a specific collection in the database. + + Args: + collection_id (str): The identifier of the collection to read items from. + bbox (Optional[List[NumType]]): The bounding box to filter items by. + datetime (Union[str, datetime_type, None]): The datetime range to filter items by. + limit (int): The maximum number of items to return. The default value is 10. + token (str): A token used for pagination. + request (Request): The incoming request. + + Returns: + ItemCollection: An `ItemCollection` object containing the items from the specified collection that meet + the filter criteria and links to various resources. + + Raises: + HTTPException: If the specified collection is not found. + Exception: If any error occurs while reading the items from the database. + """ + request: Request = kwargs["request"] + base_url = str(request.base_url) + + collection = await self.get_collection( + collection_id=collection_id, request=request + ) + collection_id = collection.get("id") + if collection_id is None: + raise HTTPException(status_code=404, detail="Collection not found") + + search = self.database.make_search() + search = self.database.apply_collections_filter( + search=search, collection_ids=[collection_id] + ) + + if datetime: + datetime_search = self._return_date(datetime) + search = self.database.apply_datetime_filter( + search=search, datetime_search=datetime_search + ) + + if bbox: + bbox = [float(x) for x in bbox] + if len(bbox) == 6: + bbox = [bbox[0], bbox[1], bbox[3], bbox[4]] + + search = self.database.apply_bbox_filter(search=search, bbox=bbox) + + items, maybe_count, next_token = await self.database.execute_search( + search=search, + limit=limit, + sort=None, + token=token, # type: ignore + collection_ids=[collection_id], + ) + + items = [ + self.item_serializer.db_to_stac(item, base_url=base_url) for item in items + ] + + context_obj = None + if self.extension_is_enabled("ContextExtension"): + context_obj = { + "returned": len(items), + "limit": limit, + } + if maybe_count is not None: + context_obj["matched"] = maybe_count + + links = [] + if next_token: + links = await PagingLinks(request=request, next=next_token).get_links() + + return ItemCollection( + type="FeatureCollection", + features=items, + links=links, + context=context_obj, + ) + + @overrides + async def get_item(self, item_id: str, collection_id: str, **kwargs) -> Item: + """Get an item from the database based on its id and collection id. + + Args: + collection_id (str): The ID of the collection the item belongs to. + item_id (str): The ID of the item to be retrieved. + + Returns: + Item: An `Item` object representing the requested item. + + Raises: + Exception: If any error occurs while getting the item from the database. + NotFoundError: If the item does not exist in the specified collection. + """ + base_url = str(kwargs["request"].base_url) + item = await self.database.get_one_item( + item_id=item_id, collection_id=collection_id + ) + return self.item_serializer.db_to_stac(item, base_url) + + @staticmethod + def _return_date(interval_str): + """ + Convert a date interval string into a dictionary for filtering search results. + + The date interval string should be formatted as either a single date or a range of dates separated + by "/". The date format should be ISO-8601 (YYYY-MM-DDTHH:MM:SSZ). If the interval string is a + single date, it will be converted to a dictionary with a single "eq" key whose value is the date in + the ISO-8601 format. If the interval string is a range of dates, it will be converted to a + dictionary with "gte" (greater than or equal to) and "lte" (less than or equal to) keys. If the + interval string is a range of dates with ".." instead of "/", the start and end dates will be + assigned default values to encompass the entire possible date range. + + Args: + interval_str (str): The date interval string to be converted. + + Returns: + dict: A dictionary representing the date interval for use in filtering search results. + """ + intervals = interval_str.split("/") + if len(intervals) == 1: + datetime = f"{intervals[0][0:19]}Z" + return {"eq": datetime} + else: + start_date = intervals[0] + end_date = intervals[1] + if ".." not in intervals: + start_date = f"{start_date[0:19]}Z" + end_date = f"{end_date[0:19]}Z" + elif start_date != "..": + start_date = f"{start_date[0:19]}Z" + end_date = "2200-12-01T12:31:12Z" + elif end_date != "..": + start_date = "1900-10-01T00:00:00Z" + end_date = f"{end_date[0:19]}Z" + else: + start_date = "1900-10-01T00:00:00Z" + end_date = "2200-12-01T12:31:12Z" + + return {"lte": end_date, "gte": start_date} + + async def get_search( + self, + request: Request, + collections: Optional[List[str]] = None, + ids: Optional[List[str]] = None, + bbox: Optional[List[NumType]] = None, + datetime: Optional[Union[str, datetime_type]] = None, + limit: Optional[int] = 10, + query: Optional[str] = None, + token: Optional[str] = None, + fields: Optional[List[str]] = None, + sortby: Optional[str] = None, + intersects: Optional[str] = None, + filter: Optional[str] = None, + filter_lang: Optional[str] = None, + **kwargs, + ) -> ItemCollection: + """Get search results from the database. + + Args: + collections (Optional[List[str]]): List of collection IDs to search in. + ids (Optional[List[str]]): List of item IDs to search for. + bbox (Optional[List[NumType]]): Bounding box to search in. + datetime (Optional[Union[str, datetime_type]]): Filter items based on the datetime field. + limit (Optional[int]): Maximum number of results to return. + query (Optional[str]): Query string to filter the results. + token (Optional[str]): Access token to use when searching the catalog. + fields (Optional[List[str]]): Fields to include or exclude from the results. + sortby (Optional[str]): Sorting options for the results. + intersects (Optional[str]): GeoJSON geometry to search in. + kwargs: Additional parameters to be passed to the API. + + Returns: + ItemCollection: Collection of `Item` objects representing the search results. + + Raises: + HTTPException: If any error occurs while searching the catalog. + """ + base_args = { + "collections": collections, + "ids": ids, + "bbox": bbox, + "limit": limit, + "token": token, + "query": orjson.loads(query) if query else query, + } + + # this is borrowed from stac-fastapi-pgstac + # Kludgy fix because using factory does not allow alias for filter-lan + query_params = str(request.query_params) + if filter_lang is None: + match = re.search(r"filter-lang=([a-z0-9-]+)", query_params, re.IGNORECASE) + if match: + filter_lang = match.group(1) + + if datetime: + base_args["datetime"] = datetime + + if intersects: + base_args["intersects"] = orjson.loads(unquote_plus(intersects)) + + if sortby: + sort_param = [] + for sort in sortby: + sort_param.append( + { + "field": sort[1:], + "direction": "desc" if sort[0] == "-" else "asc", + } + ) + print(sort_param) + base_args["sortby"] = sort_param + + if filter: + if filter_lang == "cql2-json": + base_args["filter-lang"] = "cql2-json" + base_args["filter"] = orjson.loads(unquote_plus(filter)) + else: + base_args["filter-lang"] = "cql2-json" + base_args["filter"] = orjson.loads(to_cql2(parse_cql2_text(filter))) + + if fields: + includes = set() + excludes = set() + for field in fields: + if field[0] == "-": + excludes.add(field[1:]) + elif field[0] == "+": + includes.add(field[1:]) + else: + includes.add(field) + base_args["fields"] = {"include": includes, "exclude": excludes} + + # Do the request + try: + search_request = self.post_request_model(**base_args) + except ValidationError: + raise HTTPException(status_code=400, detail="Invalid parameters provided") + resp = await self.post_search(search_request=search_request, request=request) + + return resp + + async def post_search( + self, search_request: BaseSearchPostRequest, request: Request + ) -> ItemCollection: + """ + Perform a POST search on the catalog. + + Args: + search_request (BaseSearchPostRequest): Request object that includes the parameters for the search. + kwargs: Keyword arguments passed to the function. + + Returns: + ItemCollection: A collection of items matching the search criteria. + + Raises: + HTTPException: If there is an error with the cql2_json filter. + """ + base_url = str(request.base_url) + + search = self.database.make_search() + + if search_request.ids: + search = self.database.apply_ids_filter( + search=search, item_ids=search_request.ids + ) + + if search_request.collections: + search = self.database.apply_collections_filter( + search=search, collection_ids=search_request.collections + ) + + if search_request.datetime: + datetime_search = self._return_date(search_request.datetime) + search = self.database.apply_datetime_filter( + search=search, datetime_search=datetime_search + ) + + if search_request.bbox: + bbox = search_request.bbox + if len(bbox) == 6: + bbox = [bbox[0], bbox[1], bbox[3], bbox[4]] + + search = self.database.apply_bbox_filter(search=search, bbox=bbox) + + if search_request.intersects: + search = self.database.apply_intersects_filter( + search=search, intersects=search_request.intersects + ) + + if search_request.query: + for (field_name, expr) in search_request.query.items(): + field = "properties__" + field_name + for (op, value) in expr.items(): + search = self.database.apply_stacql_filter( + search=search, op=op, field=field, value=value + ) + + # only cql2_json is supported here + if hasattr(search_request, "filter"): + cql2_filter = getattr(search_request, "filter", None) + try: + search = self.database.apply_cql2_filter(search, cql2_filter) + except Exception as e: + raise HTTPException( + status_code=400, detail=f"Error with cql2_json filter: {e}" + ) + + sort = None + if search_request.sortby: + sort = self.database.populate_sort(search_request.sortby) + + limit = 10 + if search_request.limit: + limit = search_request.limit + + items, maybe_count, next_token = await self.database.execute_search( + search=search, + limit=limit, + token=search_request.token, # type: ignore + sort=sort, + collection_ids=search_request.collections, + ) + + items = [ + self.item_serializer.db_to_stac(item, base_url=base_url) for item in items + ] + + if self.extension_is_enabled("FieldsExtension"): + if search_request.query is not None: + query_include: Set[str] = set( + [ + k if k in Settings.get().indexed_fields else f"properties.{k}" + for k in search_request.query.keys() + ] + ) + if not search_request.fields.include: + search_request.fields.include = query_include + else: + search_request.fields.include.union(query_include) + + filter_kwargs = search_request.fields.filter_fields + + items = [ + orjson.loads( + stac_pydantic.Item(**feat).json(**filter_kwargs, exclude_unset=True) + ) + for feat in items + ] + + context_obj = None + if self.extension_is_enabled("ContextExtension"): + context_obj = { + "returned": len(items), + "limit": limit, + } + if maybe_count is not None: + context_obj["matched"] = maybe_count + + links = [] + if next_token: + links = await PagingLinks(request=request, next=next_token).get_links() + + return ItemCollection( + type="FeatureCollection", + features=items, + links=links, + context=context_obj, + ) + + +@attr.s +class TransactionsClient(AsyncBaseTransactionsClient): + """Transactions extension specific CRUD operations.""" + + database: BaseDatabaseLogic = attr.ib() + settings: ApiBaseSettings = attr.ib() + session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) + + @overrides + async def create_item( + self, collection_id: str, item: stac_types.Item, **kwargs + ) -> stac_types.Item: + """Create an item in the collection. + + Args: + collection_id (str): The id of the collection to add the item to. + item (stac_types.Item): The item to be added to the collection. + kwargs: Additional keyword arguments. + + Returns: + stac_types.Item: The created item. + + Raises: + NotFound: If the specified collection is not found in the database. + ConflictError: If the item in the specified collection already exists. + + """ + base_url = str(kwargs["request"].base_url) + + # If a feature collection is posted + if item["type"] == "FeatureCollection": + bulk_client = BulkTransactionsClient( + database=self.database, settings=self.settings + ) + processed_items = [ + bulk_client.preprocess_item(item, base_url, BulkTransactionMethod.INSERT) for item in item["features"] # type: ignore + ] + + await self.database.bulk_async( + collection_id, processed_items, refresh=kwargs.get("refresh", False) + ) + + return None # type: ignore + else: + item = await self.database.prep_create_item(item=item, base_url=base_url) + await self.database.create_item(item, refresh=kwargs.get("refresh", False)) + return item + + @overrides + async def update_item( + self, collection_id: str, item_id: str, item: stac_types.Item, **kwargs + ) -> stac_types.Item: + """Update an item in the collection. + + Args: + collection_id (str): The ID of the collection the item belongs to. + item_id (str): The ID of the item to be updated. + item (stac_types.Item): The new item data. + kwargs: Other optional arguments, including the request object. + + Returns: + stac_types.Item: The updated item object. + + Raises: + NotFound: If the specified collection is not found in the database. + + """ + base_url = str(kwargs["request"].base_url) + now = datetime_type.now(timezone.utc).isoformat().replace("+00:00", "Z") + item["properties"]["updated"] = now + + await self.database.check_collection_exists(collection_id) + await self.delete_item(item_id=item_id, collection_id=collection_id) + await self.create_item(collection_id=collection_id, item=item, **kwargs) + + return ItemSerializer.db_to_stac(item, base_url) + + @overrides + async def delete_item( + self, item_id: str, collection_id: str, **kwargs + ) -> stac_types.Item: + """Delete an item from a collection. + + Args: + item_id (str): The identifier of the item to delete. + collection_id (str): The identifier of the collection that contains the item. + + Returns: + Optional[stac_types.Item]: The deleted item, or `None` if the item was successfully deleted. + """ + await self.database.delete_item(item_id=item_id, collection_id=collection_id) + return None # type: ignore + + @overrides + async def create_collection( + self, collection: stac_types.Collection, **kwargs + ) -> stac_types.Collection: + """Create a new collection in the database. + + Args: + collection (stac_types.Collection): The collection to be created. + kwargs: Additional keyword arguments. + + Returns: + stac_types.Collection: The created collection object. + + Raises: + ConflictError: If the collection already exists. + """ + base_url = str(kwargs["request"].base_url) + collection_links = CollectionLinks( + collection_id=collection["id"], base_url=base_url + ).create_links() + collection["links"] = collection_links + await self.database.create_collection(collection=collection) + + return CollectionSerializer.db_to_stac(collection, base_url) + + @overrides + async def update_collection( + self, collection: stac_types.Collection, **kwargs + ) -> stac_types.Collection: + """ + Update a collection. + + This method updates an existing collection in the database by first finding + the collection by its id, then deleting the old version, and finally creating + a new version of the updated collection. The updated collection is then returned. + + Args: + collection: A STAC collection that needs to be updated. + kwargs: Additional keyword arguments. + + Returns: + A STAC collection that has been updated in the database. + + """ + base_url = str(kwargs["request"].base_url) + + await self.database.find_collection(collection_id=collection["id"]) + await self.delete_collection(collection["id"]) + await self.create_collection(collection, **kwargs) + + return CollectionSerializer.db_to_stac(collection, base_url) + + @overrides + async def delete_collection( + self, collection_id: str, **kwargs + ) -> stac_types.Collection: + """ + Delete a collection. + + This method deletes an existing collection in the database. + + Args: + collection_id (str): The identifier of the collection that contains the item. + kwargs: Additional keyword arguments. + + Returns: + None. + + Raises: + NotFoundError: If the collection doesn't exist. + """ + await self.database.delete_collection(collection_id=collection_id) + return None # type: ignore + + +@attr.s +class BulkTransactionsClient(BaseBulkTransactionsClient): + """A client for posting bulk transactions to a Postgres database. + + Attributes: + session: An instance of `Session` to use for database connection. + database: An instance of `DatabaseLogic` to perform database operations. + """ + + database: BaseDatabaseLogic = attr.ib() + settings: ApiBaseSettings = attr.ib() + session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) + # database = DatabaseLogic() + + def __attrs_post_init__(self): + """Create es engine.""" + # settings = BaseSettings() + self.client = self.settings.create_client + + def preprocess_item( + self, item: stac_types.Item, base_url, method: BulkTransactionMethod + ) -> stac_types.Item: + """Preprocess an item to match the data model. + + Args: + item: The item to preprocess. + base_url: The base URL of the request. + method: The bulk transaction method. + + Returns: + The preprocessed item. + """ + exist_ok = method == BulkTransactionMethod.UPSERT + return self.database.sync_prep_create_item( + item=item, base_url=base_url, exist_ok=exist_ok + ) + + @overrides + def bulk_item_insert( + self, items: Items, chunk_size: Optional[int] = None, **kwargs + ) -> str: + """Perform a bulk insertion of items into the database using Elasticsearch. + + Args: + items: The items to insert. + chunk_size: The size of each chunk for bulk processing. + **kwargs: Additional keyword arguments, such as `request` and `refresh`. + + Returns: + A string indicating the number of items successfully added. + """ + request = kwargs.get("request") + if request: + base_url = str(request.base_url) + else: + base_url = "" + + processed_items = [ + self.preprocess_item(item, base_url, items.method) + for item in items.items.values() + ] + + # not a great way to get the collection_id-- should be part of the method signature + collection_id = processed_items[0]["collection"] + + self.database.bulk_sync( + collection_id, processed_items, refresh=kwargs.get("refresh", False) + ) + + return f"Successfully added {len(processed_items)} Items." + + +@attr.s +class EsAsyncBaseFiltersClient(AsyncBaseFiltersClient): + """Defines a pattern for implementing the STAC filter extension.""" + + # todo: use the ES _mapping endpoint to dynamically find what fields exist + async def get_queryables( + self, collection_id: Optional[str] = None, **kwargs + ) -> Dict[str, Any]: + """Get the queryables available for the given collection_id. + + If collection_id is None, returns the intersection of all + queryables over all collections. + + This base implementation returns a blank queryable schema. This is not allowed + under OGC CQL but it is allowed by the STAC API Filter Extension + + https://github.com/radiantearth/stac-api-spec/tree/master/fragments/filter#queryables + + Args: + collection_id (str, optional): The id of the collection to get queryables for. + **kwargs: additional keyword arguments + + Returns: + Dict[str, Any]: A dictionary containing the queryables for the given collection. + """ + return { + "$schema": "https://json-schema.org/draft/2019-09/schema", + "$id": "https://stac-api.example.com/queryables", + "type": "object", + "title": "Queryables for Example STAC API", + "description": "Queryable names for the example STAC API Item Search filter.", + "properties": { + "id": { + "description": "ID", + "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/definitions/core/allOf/2/properties/id", + }, + "collection": { + "description": "Collection", + "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/definitions/core/allOf/2/then/properties/collection", + }, + "geometry": { + "description": "Geometry", + "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/definitions/core/allOf/1/oneOf/0/properties/geometry", + }, + "datetime": { + "description": "Acquisition Timestamp", + "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/datetime.json#/properties/datetime", + }, + "created": { + "description": "Creation Timestamp", + "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/datetime.json#/properties/created", + }, + "updated": { + "description": "Creation Timestamp", + "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/datetime.json#/properties/updated", + }, + "cloud_cover": { + "description": "Cloud Cover", + "$ref": "https://stac-extensions.github.io/eo/v1.0.0/schema.json#/definitions/fields/properties/eo:cloud_cover", + }, + "cloud_shadow_percentage": { + "description": "Cloud Shadow Percentage", + "title": "Cloud Shadow Percentage", + "type": "number", + "minimum": 0, + "maximum": 100, + }, + "nodata_pixel_percentage": { + "description": "No Data Pixel Percentage", + "title": "No Data Pixel Percentage", + "type": "number", + "minimum": 0, + "maximum": 100, + }, + }, + "additionalProperties": True, + } diff --git a/stac_fastapi/core/stac_fastapi/core/models/__init__.py b/stac_fastapi/core/stac_fastapi/core/models/__init__.py new file mode 100644 index 00000000..d0748bcc --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/models/__init__.py @@ -0,0 +1 @@ +"""stac_fastapi.elasticsearch.models module.""" diff --git a/stac_fastapi/core/stac_fastapi/core/models/links.py b/stac_fastapi/core/stac_fastapi/core/models/links.py new file mode 100644 index 00000000..3941a149 --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/models/links.py @@ -0,0 +1,138 @@ +"""link helpers.""" + +from typing import Any, Dict, List, Optional +from urllib.parse import ParseResult, parse_qs, unquote, urlencode, urljoin, urlparse + +import attr +from stac_pydantic.links import Relations +from stac_pydantic.shared import MimeTypes +from starlette.requests import Request + +# Copied from pgstac links + +# These can be inferred from the item/collection, so they aren't included in the database +# Instead they are dynamically generated when querying the database using the classes defined below +INFERRED_LINK_RELS = ["self", "item", "parent", "collection", "root"] + + +def merge_params(url: str, newparams: Dict) -> str: + """Merge url parameters.""" + u = urlparse(url) + params = parse_qs(u.query) + params.update(newparams) + param_string = unquote(urlencode(params, True)) + + href = ParseResult( + scheme=u.scheme, + netloc=u.netloc, + path=u.path, + params=u.params, + query=param_string, + fragment=u.fragment, + ).geturl() + return href + + +@attr.s +class BaseLinks: + """Create inferred links common to collections and items.""" + + request: Request = attr.ib() + + @property + def base_url(self): + """Get the base url.""" + return str(self.request.base_url) + + @property + def url(self): + """Get the current request url.""" + return str(self.request.url) + + def resolve(self, url): + """Resolve url to the current request url.""" + return urljoin(str(self.base_url), str(url)) + + def link_self(self) -> Dict: + """Return the self link.""" + return dict(rel=Relations.self.value, type=MimeTypes.json.value, href=self.url) + + def link_root(self) -> Dict: + """Return the catalog root.""" + return dict( + rel=Relations.root.value, type=MimeTypes.json.value, href=self.base_url + ) + + def create_links(self) -> List[Dict[str, Any]]: + """Return all inferred links.""" + links = [] + for name in dir(self): + if name.startswith("link_") and callable(getattr(self, name)): + link = getattr(self, name)() + if link is not None: + links.append(link) + return links + + async def get_links( + self, extra_links: Optional[List[Dict[str, Any]]] = None + ) -> List[Dict[str, Any]]: + """ + Generate all the links. + + Get the links object for a stac resource by iterating through + available methods on this class that start with link_. + """ + # TODO: Pass request.json() into function so this doesn't need to be coroutine + if self.request.method == "POST": + self.request.postbody = await self.request.json() + # join passed in links with generated links + # and update relative paths + links = self.create_links() + + if extra_links: + # For extra links passed in, + # add links modified with a resolved href. + # Drop any links that are dynamically + # determined by the server (e.g. self, parent, etc.) + # Resolving the href allows for relative paths + # to be stored in pgstac and for the hrefs in the + # links of response STAC objects to be resolved + # to the request url. + links += [ + {**link, "href": self.resolve(link["href"])} + for link in extra_links + if link["rel"] not in INFERRED_LINK_RELS + ] + + return links + + +@attr.s +class PagingLinks(BaseLinks): + """Create links for paging.""" + + next: Optional[str] = attr.ib(kw_only=True, default=None) + + def link_next(self) -> Optional[Dict[str, Any]]: + """Create link for next page.""" + if self.next is not None: + method = self.request.method + if method == "GET": + href = merge_params(self.url, {"token": self.next}) + link = dict( + rel=Relations.next.value, + type=MimeTypes.json.value, + method=method, + href=href, + ) + return link + if method == "POST": + return { + "rel": Relations.next, + "type": MimeTypes.json, + "method": method, + "href": f"{self.request.url}", + "body": {**self.request.postbody, "token": self.next}, + } + + return None diff --git a/stac_fastapi/core/stac_fastapi/core/models/search.py b/stac_fastapi/core/stac_fastapi/core/models/search.py new file mode 100644 index 00000000..33b73b68 --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/models/search.py @@ -0,0 +1 @@ +"""Unused search model.""" diff --git a/stac_fastapi/core/stac_fastapi/core/serializers.py b/stac_fastapi/core/stac_fastapi/core/serializers.py new file mode 100644 index 00000000..725e8f65 --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/serializers.py @@ -0,0 +1,162 @@ +"""Serializers.""" +import abc +from typing import Any + +import attr + +from stac_fastapi.elasticsearch.datetime_utils import now_to_rfc3339_str +from stac_fastapi.types import stac as stac_types +from stac_fastapi.types.links import CollectionLinks, ItemLinks, resolve_links + + +@attr.s +class Serializer(abc.ABC): + """Defines serialization methods between the API and the data model. + + This class is meant to be subclassed and implemented by specific serializers for different STAC objects (e.g. Item, Collection). + """ + + @classmethod + @abc.abstractmethod + def db_to_stac(cls, item: dict, base_url: str) -> Any: + """Transform database model to STAC object. + + Arguments: + item (dict): A dictionary representing the database model. + base_url (str): The base URL of the STAC API. + + Returns: + Any: A STAC object, e.g. an `Item` or `Collection`, representing the input `item`. + """ + ... + + @classmethod + @abc.abstractmethod + def stac_to_db(cls, stac_object: Any, base_url: str) -> dict: + """Transform STAC object to database model. + + Arguments: + stac_object (Any): A STAC object, e.g. an `Item` or `Collection`. + base_url (str): The base URL of the STAC API. + + Returns: + dict: A dictionary representing the database model. + """ + ... + + +class ItemSerializer(Serializer): + """Serialization methods for STAC items.""" + + @classmethod + def stac_to_db(cls, stac_data: stac_types.Item, base_url: str) -> stac_types.Item: + """Transform STAC item to database-ready STAC item. + + Args: + stac_data (stac_types.Item): The STAC item object to be transformed. + base_url (str): The base URL for the STAC API. + + Returns: + stac_types.Item: The database-ready STAC item object. + """ + item_links = ItemLinks( + collection_id=stac_data["collection"], + item_id=stac_data["id"], + base_url=base_url, + ).create_links() + stac_data["links"] = item_links + + now = now_to_rfc3339_str() + if "created" not in stac_data["properties"]: + stac_data["properties"]["created"] = now + stac_data["properties"]["updated"] = now + return stac_data + + @classmethod + def db_to_stac(cls, item: dict, base_url: str) -> stac_types.Item: + """Transform database-ready STAC item to STAC item. + + Args: + item (dict): The database-ready STAC item to be transformed. + base_url (str): The base URL for the STAC API. + + Returns: + stac_types.Item: The STAC item object. + """ + item_id = item["id"] + collection_id = item["collection"] + item_links = ItemLinks( + collection_id=collection_id, item_id=item_id, base_url=base_url + ).create_links() + + original_links = item.get("links", []) + if original_links: + item_links += resolve_links(original_links, base_url) + + return stac_types.Item( + type="Feature", + stac_version=item.get("stac_version", ""), + stac_extensions=item.get("stac_extensions", []), + id=item_id, + collection=item.get("collection", ""), + geometry=item.get("geometry", {}), + bbox=item.get("bbox", []), + properties=item.get("properties", {}), + links=item_links, + assets=item.get("assets", {}), + ) + + +class CollectionSerializer(Serializer): + """Serialization methods for STAC collections.""" + + @classmethod + def db_to_stac(cls, collection: dict, base_url: str) -> stac_types.Collection: + """Transform database model to STAC collection. + + Args: + collection (dict): The collection data in dictionary form, extracted from the database. + base_url (str): The base URL for the collection. + + Returns: + stac_types.Collection: The STAC collection object. + """ + # Use dictionary unpacking to extract values from the collection dictionary + collection_id = collection.get("id") + stac_extensions = collection.get("stac_extensions", []) + stac_version = collection.get("stac_version", "") + title = collection.get("title", "") + description = collection.get("description", "") + keywords = collection.get("keywords", []) + license = collection.get("license", "") + providers = collection.get("providers", {}) + summaries = collection.get("summaries", {}) + extent = collection.get("extent", {}) + collection_assets = collection.get("assets", {}) + + # Create the collection links using CollectionLinks + collection_links = CollectionLinks( + collection_id=collection_id, base_url=base_url + ).create_links() + + # Add any additional links from the collection dictionary + original_links = collection.get("links") + if original_links: + collection_links += resolve_links(original_links, base_url) + + # Return the stac_types.Collection object + return stac_types.Collection( + type="Collection", + id=collection_id, + stac_extensions=stac_extensions, + stac_version=stac_version, + title=title, + description=description, + keywords=keywords, + license=license, + providers=providers, + summaries=summaries, + extent=extent, + links=collection_links, + assets=collection_assets, + ) diff --git a/stac_fastapi/core/stac_fastapi/core/session.py b/stac_fastapi/core/stac_fastapi/core/session.py new file mode 100644 index 00000000..d5a7aa3c --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/session.py @@ -0,0 +1,25 @@ +"""database session management.""" +import logging + +import attr + +logger = logging.getLogger(__name__) + + +@attr.s +class Session: + """Database session management.""" + + @classmethod + def create_from_env(cls): + """Create from environment.""" + ... + + @classmethod + def create_from_settings(cls, settings): + """Create a Session object from settings.""" + ... + + def __attrs_post_init__(self): + """Post init handler.""" + ... diff --git a/stac_fastapi/core/stac_fastapi/core/version.py b/stac_fastapi/core/stac_fastapi/core/version.py new file mode 100644 index 00000000..1eeef171 --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/version.py @@ -0,0 +1,2 @@ +"""library version.""" +__version__ = "1.0.0" diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py index 8adcece4..d1b5ac63 100644 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py +++ b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py @@ -1,14 +1,17 @@ """FastAPI application.""" from stac_fastapi.api.app import StacApi from stac_fastapi.api.models import create_get_request_model, create_post_request_model -from stac_fastapi.elasticsearch.config import ElasticsearchSettings -from stac_fastapi.elasticsearch.core import ( +from stac_fastapi.core.core import ( BulkTransactionsClient, CoreClient, - EsAsyncBaseFiltersClient, TransactionsClient, ) -from stac_fastapi.elasticsearch.database_logic import create_collection_index +from stac_fastapi.elasticsearch.config import ElasticsearchSettings +from stac_fastapi.elasticsearch.core import EsAsyncBaseFiltersClient +from stac_fastapi.elasticsearch.database_logic import ( + DatabaseLogic, + create_collection_index, +) from stac_fastapi.elasticsearch.extensions import QueryExtension from stac_fastapi.elasticsearch.session import Session from stac_fastapi.extensions.core import ( @@ -29,9 +32,20 @@ "http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators" ) +database_logic = DatabaseLogic() + extensions = [ - TransactionExtension(client=TransactionsClient(session=session), settings=settings), - BulkTransactionExtension(client=BulkTransactionsClient(session=session)), + TransactionExtension( + client=TransactionsClient(database=database_logic, session=session), + settings=settings, + ), + BulkTransactionExtension( + client=BulkTransactionsClient( + database=database_logic, + session=session, + settings=settings, + ) + ), FieldsExtension(), QueryExtension(), SortExtension(), @@ -45,7 +59,9 @@ api = StacApi( settings=settings, extensions=extensions, - client=CoreClient(session=session, post_request_model=post_request_model), + client=CoreClient( + database=database_logic, session=session, post_request_model=post_request_model + ), search_get_request_model=create_get_request_model(extensions), search_post_request_model=post_request_model, ) From c40e1229dc372e48d90a2ac520819620a00cd6f5 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Wed, 31 Jan 2024 18:22:17 +0800 Subject: [PATCH 02/33] add scratch setup to opensearch folder --- stac_fastapi/opensearch/README.md | 0 stac_fastapi/opensearch/setup.cfg | 2 + stac_fastapi/opensearch/setup.py | 62 +++++++++++++++++++++++++++++++ 3 files changed, 64 insertions(+) create mode 100644 stac_fastapi/opensearch/README.md create mode 100644 stac_fastapi/opensearch/setup.cfg create mode 100644 stac_fastapi/opensearch/setup.py diff --git a/stac_fastapi/opensearch/README.md b/stac_fastapi/opensearch/README.md new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/opensearch/setup.cfg b/stac_fastapi/opensearch/setup.cfg new file mode 100644 index 00000000..1eb3fa49 --- /dev/null +++ b/stac_fastapi/opensearch/setup.cfg @@ -0,0 +1,2 @@ +[metadata] +version = attr: stac_fastapi.core.version.__version__ diff --git a/stac_fastapi/opensearch/setup.py b/stac_fastapi/opensearch/setup.py new file mode 100644 index 00000000..4046a1c2 --- /dev/null +++ b/stac_fastapi/opensearch/setup.py @@ -0,0 +1,62 @@ +"""stac_fastapi: elasticsearch module.""" + +from setuptools import find_namespace_packages, setup + +with open("README.md") as f: + desc = f.read() + +install_requires = [ + "fastapi", + "attrs", + "pydantic[dotenv]<2", + "stac_pydantic==2.0.*", + "stac-fastapi.types==2.4.9", + "stac-fastapi.api==2.4.9", + "stac-fastapi.extensions==2.4.9", + # "elasticsearch[async]==8.11.0", + # "elasticsearch-dsl==8.11.0", + "pystac[validation]", + # "uvicorn", + "orjson", + "overrides", + # "starlette", + "geojson-pydantic", + "pygeofilter==0.2.1", +] + +# extra_reqs = { +# "dev": [ +# "pytest", +# "pytest-cov", +# "pytest-asyncio", +# "pre-commit", +# "requests", +# "ciso8601", +# "httpx", +# ], +# "docs": ["mkdocs", "mkdocs-material", "pdocs"], +# "server": ["uvicorn[standard]==0.19.0"], +# } + +setup( + name="stac-fastapi.core", + description="Core library for the Elasticsearch and Opensearch stac-fastapi backends.", + long_description=desc, + long_description_content_type="text/markdown", + python_requires=">=3.8", + classifiers=[ + "Intended Audience :: Developers", + "Intended Audience :: Information Technology", + "Intended Audience :: Science/Research", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "License :: OSI Approved :: MIT License", + ], + url="https://github.com/stac-utils/stac-fastapi-elasticsearch", + license="MIT", + packages=find_namespace_packages(), + zip_safe=False, + install_requires=install_requires, +) From 5a14447b3ca9c2c1c7729f6dcf5e473095b6d48f Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Wed, 31 Jan 2024 23:00:58 +0800 Subject: [PATCH 03/33] reorg, working --- stac_fastapi/core/stac_fastapi/core/core.py | 134 +++++- .../core/stac_fastapi/core/types/core.py | 422 ++++++++++++++++++ .../stac_fastapi/elasticsearch/app.py | 4 +- stac_fastapi/elasticsearch/tests/conftest.py | 28 +- 4 files changed, 570 insertions(+), 18 deletions(-) create mode 100644 stac_fastapi/core/stac_fastapi/core/types/core.py diff --git a/stac_fastapi/core/stac_fastapi/core/core.py b/stac_fastapi/core/stac_fastapi/core/core.py index ffa81318..df180599 100644 --- a/stac_fastapi/core/stac_fastapi/core/core.py +++ b/stac_fastapi/core/stac_fastapi/core/core.py @@ -17,13 +17,18 @@ from pygeofilter.parsers.cql2_text import parse as parse_cql2_text from stac_pydantic.links import Relations from stac_pydantic.shared import MimeTypes +from stac_pydantic.version import STAC_VERSION -# from stac_fastapi.elasticsearch.config import ElasticsearchSettings from stac_fastapi.core.base_database_logic import BaseDatabaseLogic from stac_fastapi.core.base_settings import ApiBaseSettings from stac_fastapi.core.models.links import PagingLinks from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer from stac_fastapi.core.session import Session +from stac_fastapi.core.types.core import ( + AsyncBaseCoreClient, + AsyncBaseFiltersClient, + AsyncBaseTransactionsClient, +) from stac_fastapi.extensions.third_party.bulk_transactions import ( BaseBulkTransactionsClient, BulkTransactionMethod, @@ -31,12 +36,10 @@ ) from stac_fastapi.types import stac as stac_types from stac_fastapi.types.config import Settings -from stac_fastapi.types.core import ( - AsyncBaseCoreClient, - AsyncBaseFiltersClient, - AsyncBaseTransactionsClient, -) +from stac_fastapi.types.conformance import BASE_CONFORMANCE_CLASSES +from stac_fastapi.types.extension import ApiExtension from stac_fastapi.types.links import CollectionLinks +from stac_fastapi.types.requests import get_base_url from stac_fastapi.types.search import BaseSearchPostRequest from stac_fastapi.types.stac import Collection, Collections, Item, ItemCollection @@ -65,14 +68,128 @@ class CoreClient(AsyncBaseCoreClient): """ database: BaseDatabaseLogic = attr.ib() + base_conformance_classes: List[str] = attr.ib( + factory=lambda: BASE_CONFORMANCE_CLASSES + ) + extensions: List[ApiExtension] = attr.ib(default=attr.Factory(list)) session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) item_serializer: Type[ItemSerializer] = attr.ib(default=ItemSerializer) collection_serializer: Type[CollectionSerializer] = attr.ib( default=CollectionSerializer ) + post_request_model = attr.ib(default=BaseSearchPostRequest) + stac_version: str = attr.ib(default=STAC_VERSION) + landing_page_id: str = attr.ib(default="stac-fastapi") + title: str = attr.ib(default="stac-fastapi") + description: str = attr.ib(default="stac-fastapi") + + def _landing_page( + self, + base_url: str, + conformance_classes: List[str], + extension_schemas: List[str], + ) -> stac_types.LandingPage: + landing_page = stac_types.LandingPage( + type="Catalog", + id=self.landing_page_id, + title=self.title, + description=self.description, + stac_version=self.stac_version, + conformsTo=conformance_classes, + links=[ + { + "rel": Relations.self.value, + "type": MimeTypes.json, + "href": base_url, + }, + { + "rel": Relations.root.value, + "type": MimeTypes.json, + "href": base_url, + }, + { + "rel": "data", + "type": MimeTypes.json, + "href": urljoin(base_url, "collections"), + }, + { + "rel": Relations.conformance.value, + "type": MimeTypes.json, + "title": "STAC/WFS3 conformance classes implemented by this server", + "href": urljoin(base_url, "conformance"), + }, + { + "rel": Relations.search.value, + "type": MimeTypes.geojson, + "title": "STAC search", + "href": urljoin(base_url, "search"), + "method": "GET", + }, + { + "rel": Relations.search.value, + "type": MimeTypes.geojson, + "title": "STAC search", + "href": urljoin(base_url, "search"), + "method": "POST", + }, + ], + stac_extensions=extension_schemas, + ) + return landing_page + + async def landing_page(self, **kwargs) -> stac_types.LandingPage: + """Landing page. + + Called with `GET /`. + + Returns: + API landing page, serving as an entry point to the API. + """ + request: Request = kwargs["request"] + base_url = get_base_url(request) + landing_page = self._landing_page( + base_url=base_url, + conformance_classes=self.conformance_classes(), + extension_schemas=[], + ) + collections = await self.all_collections(request=kwargs["request"]) + for collection in collections["collections"]: + landing_page["links"].append( + { + "rel": Relations.child.value, + "type": MimeTypes.json.value, + "title": collection.get("title") or collection.get("id"), + "href": urljoin(base_url, f"collections/{collection['id']}"), + } + ) + + # Add OpenAPI URL + landing_page["links"].append( + { + "rel": "service-desc", + "type": "application/vnd.oai.openapi+json;version=3.0", + "title": "OpenAPI service description", + "href": urljoin( + str(request.base_url), request.app.openapi_url.lstrip("/") + ), + } + ) + + # Add human readable service-doc + landing_page["links"].append( + { + "rel": "service-doc", + "type": "text/html", + "title": "OpenAPI service documentation", + "href": urljoin( + str(request.base_url), request.app.docs_url.lstrip("/") + ), + } + ) + + return landing_page - @overrides async def all_collections(self, **kwargs) -> Collections: """Read all collections from the database. @@ -137,7 +254,6 @@ async def all_collections(self, **kwargs) -> Collections: links=links, ) - @overrides async def get_collection(self, collection_id: str, **kwargs) -> Collection: """Get a collection from the database by its id. @@ -155,7 +271,6 @@ async def get_collection(self, collection_id: str, **kwargs) -> Collection: collection = await self.database.find_collection(collection_id=collection_id) return self.collection_serializer.db_to_stac(collection, base_url) - @overrides async def item_collection( self, collection_id: str, @@ -243,7 +358,6 @@ async def item_collection( context=context_obj, ) - @overrides async def get_item(self, item_id: str, collection_id: str, **kwargs) -> Item: """Get an item from the database based on its id and collection id. diff --git a/stac_fastapi/core/stac_fastapi/core/types/core.py b/stac_fastapi/core/stac_fastapi/core/types/core.py new file mode 100644 index 00000000..d012dcea --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/types/core.py @@ -0,0 +1,422 @@ +"""Base clients. Takef from stac-fastapi.types.core v2.4.9.""" +import abc +from datetime import datetime +from typing import Any, Dict, List, Optional, Union + +import attr +from starlette.responses import Response + +from stac_fastapi.core.base_database_logic import BaseDatabaseLogic +from stac_fastapi.types import stac as stac_types +from stac_fastapi.types.conformance import BASE_CONFORMANCE_CLASSES +from stac_fastapi.types.extension import ApiExtension +from stac_fastapi.types.search import BaseSearchPostRequest +from stac_fastapi.types.stac import Conformance + +NumType = Union[float, int] +StacType = Dict[str, Any] + + +@attr.s # type:ignore +class AsyncBaseTransactionsClient(abc.ABC): + """Defines a pattern for implementing the STAC transaction extension.""" + + database = attr.ib(default=BaseDatabaseLogic) + + @abc.abstractmethod + async def create_item( + self, + collection_id: str, + item: Union[stac_types.Item, stac_types.ItemCollection], + **kwargs, + ) -> Optional[Union[stac_types.Item, Response, None]]: + """Create a new item. + + Called with `POST /collections/{collection_id}/items`. + + Args: + item: the item or item collection + collection_id: the id of the collection from the resource path + + Returns: + The item that was created or None if item collection. + """ + ... + + @abc.abstractmethod + async def update_item( + self, collection_id: str, item_id: str, item: stac_types.Item, **kwargs + ) -> Optional[Union[stac_types.Item, Response]]: + """Perform a complete update on an existing item. + + Called with `PUT /collections/{collection_id}/items`. It is expected + that this item already exists. The update should do a diff against the + saved item and perform any necessary updates. Partial updates are not + supported by the transactions extension. + + Args: + item: the item (must be complete) + + Returns: + The updated item. + """ + ... + + @abc.abstractmethod + async def delete_item( + self, item_id: str, collection_id: str, **kwargs + ) -> Optional[Union[stac_types.Item, Response]]: + """Delete an item from a collection. + + Called with `DELETE /collections/{collection_id}/items/{item_id}` + + Args: + item_id: id of the item. + collection_id: id of the collection. + + Returns: + The deleted item. + """ + ... + + @abc.abstractmethod + async def create_collection( + self, collection: stac_types.Collection, **kwargs + ) -> Optional[Union[stac_types.Collection, Response]]: + """Create a new collection. + + Called with `POST /collections`. + + Args: + collection: the collection + + Returns: + The collection that was created. + """ + ... + + @abc.abstractmethod + async def update_collection( + self, collection: stac_types.Collection, **kwargs + ) -> Optional[Union[stac_types.Collection, Response]]: + """Perform a complete update on an existing collection. + + Called with `PUT /collections`. It is expected that this item already + exists. The update should do a diff against the saved collection and + perform any necessary updates. Partial updates are not supported by the + transactions extension. + + Args: + collection: the collection (must be complete) + + Returns: + The updated collection. + """ + ... + + @abc.abstractmethod + async def delete_collection( + self, collection_id: str, **kwargs + ) -> Optional[Union[stac_types.Collection, Response]]: + """Delete a collection. + + Called with `DELETE /collections/{collection_id}` + + Args: + collection_id: id of the collection. + + Returns: + The deleted collection. + """ + ... + + +# @attr.s +# class LandingPageMixin(abc.ABC): +# """Create a STAC landing page (GET /).""" + +# stac_version: str = attr.ib(default=STAC_VERSION) +# landing_page_id: str = attr.ib(default="stac-fastapi") +# title: str = attr.ib(default="stac-fastapi") +# description: str = attr.ib(default="stac-fastapi") + +# def _landing_page( +# self, +# base_url: str, +# conformance_classes: List[str], +# extension_schemas: List[str], +# ) -> stac_types.LandingPage: +# landing_page = stac_types.LandingPage( +# type="Catalog", +# id=self.landing_page_id, +# title=self.title, +# description=self.description, +# stac_version=self.stac_version, +# conformsTo=conformance_classes, +# links=[ +# { +# "rel": Relations.self.value, +# "type": MimeTypes.json, +# "href": base_url, +# }, +# { +# "rel": Relations.root.value, +# "type": MimeTypes.json, +# "href": base_url, +# }, +# { +# "rel": "data", +# "type": MimeTypes.json, +# "href": urljoin(base_url, "collections"), +# }, +# { +# "rel": Relations.conformance.value, +# "type": MimeTypes.json, +# "title": "STAC/WFS3 conformance classes implemented by this server", +# "href": urljoin(base_url, "conformance"), +# }, +# { +# "rel": Relations.search.value, +# "type": MimeTypes.geojson, +# "title": "STAC search", +# "href": urljoin(base_url, "search"), +# "method": "GET", +# }, +# { +# "rel": Relations.search.value, +# "type": MimeTypes.geojson, +# "title": "STAC search", +# "href": urljoin(base_url, "search"), +# "method": "POST", +# }, +# ], +# stac_extensions=extension_schemas, +# ) +# return landing_page + + +@attr.s # type:ignore +class AsyncBaseCoreClient(abc.ABC): + """Defines a pattern for implementing STAC api core endpoints. + + Attributes: + extensions: list of registered api extensions. + """ + + database = attr.ib(default=BaseDatabaseLogic) + + base_conformance_classes: List[str] = attr.ib( + factory=lambda: BASE_CONFORMANCE_CLASSES + ) + extensions: List[ApiExtension] = attr.ib(default=attr.Factory(list)) + post_request_model = attr.ib(default=BaseSearchPostRequest) + + def conformance_classes(self) -> List[str]: + """Generate conformance classes.""" + conformance_classes = self.base_conformance_classes.copy() + + for extension in self.extensions: + extension_classes = getattr(extension, "conformance_classes", []) + conformance_classes.extend(extension_classes) + + return list(set(conformance_classes)) + + def extension_is_enabled(self, extension: str) -> bool: + """Check if an api extension is enabled.""" + return any([type(ext).__name__ == extension for ext in self.extensions]) + + # async def landing_page(self, **kwargs) -> stac_types.LandingPage: + # """Landing page. + + # Called with `GET /`. + + # Returns: + # API landing page, serving as an entry point to the API. + # """ + # request: Request = kwargs["request"] + # base_url = get_base_url(request) + # landing_page = self._landing_page( + # base_url=base_url, + # conformance_classes=self.conformance_classes(), + # extension_schemas=[], + # ) + # collections = await self.all_collections(request=kwargs["request"]) + # for collection in collections["collections"]: + # landing_page["links"].append( + # { + # "rel": Relations.child.value, + # "type": MimeTypes.json.value, + # "title": collection.get("title") or collection.get("id"), + # "href": urljoin(base_url, f"collections/{collection['id']}"), + # } + # ) + + # # Add OpenAPI URL + # landing_page["links"].append( + # { + # "rel": "service-desc", + # "type": "application/vnd.oai.openapi+json;version=3.0", + # "title": "OpenAPI service description", + # "href": urljoin( + # str(request.base_url), request.app.openapi_url.lstrip("/") + # ), + # } + # ) + + # # Add human readable service-doc + # landing_page["links"].append( + # { + # "rel": "service-doc", + # "type": "text/html", + # "title": "OpenAPI service documentation", + # "href": urljoin( + # str(request.base_url), request.app.docs_url.lstrip("/") + # ), + # } + # ) + + # return landing_page + + async def conformance(self, **kwargs) -> stac_types.Conformance: + """Conformance classes. + + Called with `GET /conformance`. + + Returns: + Conformance classes which the server conforms to. + """ + return Conformance(conformsTo=self.conformance_classes()) + + @abc.abstractmethod + async def post_search( + self, search_request: BaseSearchPostRequest, **kwargs + ) -> stac_types.ItemCollection: + """Cross catalog search (POST). + + Called with `POST /search`. + + Args: + search_request: search request parameters. + + Returns: + ItemCollection containing items which match the search criteria. + """ + ... + + @abc.abstractmethod + async def get_search( + self, + collections: Optional[List[str]] = None, + ids: Optional[List[str]] = None, + bbox: Optional[List[NumType]] = None, + datetime: Optional[Union[str, datetime]] = None, + limit: Optional[int] = 10, + query: Optional[str] = None, + token: Optional[str] = None, + fields: Optional[List[str]] = None, + sortby: Optional[str] = None, + intersects: Optional[str] = None, + **kwargs, + ) -> stac_types.ItemCollection: + """Cross catalog search (GET). + + Called with `GET /search`. + + Returns: + ItemCollection containing items which match the search criteria. + """ + ... + + @abc.abstractmethod + async def get_item( + self, item_id: str, collection_id: str, **kwargs + ) -> stac_types.Item: + """Get item by id. + + Called with `GET /collections/{collection_id}/items/{item_id}`. + + Args: + item_id: Id of the item. + collection_id: Id of the collection. + + Returns: + Item. + """ + ... + + @abc.abstractmethod + async def all_collections(self, **kwargs) -> stac_types.Collections: + """Get all available collections. + + Called with `GET /collections`. + + Returns: + A list of collections. + """ + ... + + @abc.abstractmethod + async def get_collection( + self, collection_id: str, **kwargs + ) -> stac_types.Collection: + """Get collection by id. + + Called with `GET /collections/{collection_id}`. + + Args: + collection_id: Id of the collection. + + Returns: + Collection. + """ + ... + + @abc.abstractmethod + async def item_collection( + self, + collection_id: str, + bbox: Optional[List[NumType]] = None, + datetime: Optional[Union[str, datetime]] = None, + limit: int = 10, + token: str = None, + **kwargs, + ) -> stac_types.ItemCollection: + """Get all items from a specific collection. + + Called with `GET /collections/{collection_id}/items` + + Args: + collection_id: id of the collection. + limit: number of items to return. + token: pagination token. + + Returns: + An ItemCollection. + """ + ... + + +@attr.s +class AsyncBaseFiltersClient(abc.ABC): + """Defines a pattern for implementing the STAC filter extension.""" + + async def get_queryables( + self, collection_id: Optional[str] = None, **kwargs + ) -> Dict[str, Any]: + """Get the queryables available for the given collection_id. + + If collection_id is None, returns the intersection of all queryables over all + collections. + + This base implementation returns a blank queryable schema. This is not allowed + under OGC CQL but it is allowed by the STAC API Filter Extension + https://github.com/radiantearth/stac-api-spec/tree/master/fragments/filter#queryables + """ + return { + "$schema": "https://json-schema.org/draft/2019-09/schema", + "$id": "https://example.org/queryables", + "type": "object", + "title": "Queryables for Example STAC API", + "description": "Queryable names for the example STAC API Item Search filter.", + "properties": {}, + } diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py index d1b5ac63..760edb8f 100644 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py +++ b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py @@ -36,7 +36,9 @@ extensions = [ TransactionExtension( - client=TransactionsClient(database=database_logic, session=session), + client=TransactionsClient( + database=database_logic, session=session, settings=settings + ), settings=settings, ), BulkTransactionExtension( diff --git a/stac_fastapi/elasticsearch/tests/conftest.py b/stac_fastapi/elasticsearch/tests/conftest.py index fa093af2..6d4a8b5e 100644 --- a/stac_fastapi/elasticsearch/tests/conftest.py +++ b/stac_fastapi/elasticsearch/tests/conftest.py @@ -10,13 +10,19 @@ from stac_fastapi.api.app import StacApi from stac_fastapi.api.models import create_get_request_model, create_post_request_model -from stac_fastapi.elasticsearch.config import AsyncElasticsearchSettings -from stac_fastapi.elasticsearch.core import ( +from stac_fastapi.core.core import ( BulkTransactionsClient, CoreClient, TransactionsClient, ) -from stac_fastapi.elasticsearch.database_logic import create_collection_index +from stac_fastapi.elasticsearch.config import ( + AsyncElasticsearchSettings, + ElasticsearchSettings, +) +from stac_fastapi.elasticsearch.database_logic import ( + DatabaseLogic, + create_collection_index, +) from stac_fastapi.elasticsearch.extensions import QueryExtension from stac_fastapi.extensions.core import ( # FieldsExtension, ContextExtension, @@ -143,19 +149,23 @@ async def ctx(txn_client: TransactionsClient, test_collection, test_item): await delete_collections_and_items(txn_client) +database = DatabaseLogic() +settings = ElasticsearchSettings() + + @pytest.fixture def core_client(): - return CoreClient(session=None) + return CoreClient(database=database, session=None) @pytest.fixture def txn_client(): - return TransactionsClient(session=None) + return TransactionsClient(database=database, session=None, settings=settings) @pytest.fixture def bulk_txn_client(): - return BulkTransactionsClient(session=None) + return BulkTransactionsClient(database=database, session=None, settings=settings) @pytest_asyncio.fixture(scope="session") @@ -163,7 +173,10 @@ async def app(): settings = AsyncElasticsearchSettings() extensions = [ TransactionExtension( - client=TransactionsClient(session=None), settings=settings + client=TransactionsClient( + database=database, session=None, settings=settings + ), + settings=settings, ), ContextExtension(), SortExtension(), @@ -178,6 +191,7 @@ async def app(): return StacApi( settings=settings, client=CoreClient( + database=database, session=None, extensions=extensions, post_request_model=post_request_model, From e5d71fd471a05c602c0f57ce679b4cfbe4fe97dc Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Wed, 31 Jan 2024 23:03:14 +0800 Subject: [PATCH 04/33] install core library in cicd --- .github/workflows/cicd.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 9a50e28f..154b5f26 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -68,6 +68,10 @@ jobs: run: | python -m pip install --upgrade pipenv wheel + - name: Install core library stac-fastapi + run: | + pip install ./stac_fastapi/core + - name: Install elasticsearch stac-fastapi run: | pip install ./stac_fastapi/elasticsearch[dev,server] From 33ddeb7f28ac47847d31a60e6096493f39a968ad Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Wed, 31 Jan 2024 23:19:39 +0800 Subject: [PATCH 05/33] remove core.py from es folder --- .../stac_fastapi/elasticsearch/core.py | 851 ------------------ .../elasticsearch/models/links.py | 138 --- .../tests/resources/test_item.py | 10 +- 3 files changed, 8 insertions(+), 991 deletions(-) delete mode 100644 stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/core.py delete mode 100644 stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/links.py diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/core.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/core.py deleted file mode 100644 index 12cc6b2c..00000000 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/core.py +++ /dev/null @@ -1,851 +0,0 @@ -"""Item crud client.""" -import logging -import re -from base64 import urlsafe_b64encode -from datetime import datetime as datetime_type -from datetime import timezone -from typing import Any, Dict, List, Optional, Set, Type, Union -from urllib.parse import unquote_plus, urljoin - -import attr -import orjson -import stac_pydantic -from fastapi import HTTPException, Request -from overrides import overrides -from pydantic import ValidationError -from pygeofilter.backends.cql2_json import to_cql2 -from pygeofilter.parsers.cql2_text import parse as parse_cql2_text -from stac_pydantic.links import Relations -from stac_pydantic.shared import MimeTypes - -from stac_fastapi.elasticsearch import serializers -from stac_fastapi.elasticsearch.config import ElasticsearchSettings -from stac_fastapi.elasticsearch.database_logic import DatabaseLogic -from stac_fastapi.elasticsearch.models.links import PagingLinks -from stac_fastapi.elasticsearch.serializers import CollectionSerializer, ItemSerializer -from stac_fastapi.elasticsearch.session import Session -from stac_fastapi.extensions.third_party.bulk_transactions import ( - BaseBulkTransactionsClient, - BulkTransactionMethod, - Items, -) -from stac_fastapi.types import stac as stac_types -from stac_fastapi.types.config import Settings -from stac_fastapi.types.core import ( - AsyncBaseCoreClient, - AsyncBaseFiltersClient, - AsyncBaseTransactionsClient, -) -from stac_fastapi.types.links import CollectionLinks -from stac_fastapi.types.search import BaseSearchPostRequest -from stac_fastapi.types.stac import Collection, Collections, Item, ItemCollection - -logger = logging.getLogger(__name__) - -NumType = Union[float, int] - - -@attr.s -class CoreClient(AsyncBaseCoreClient): - """Client for core endpoints defined by the STAC specification. - - This class is a implementation of `AsyncBaseCoreClient` that implements the core endpoints - defined by the STAC specification. It uses the `DatabaseLogic` class to interact with the - database, and `ItemSerializer` and `CollectionSerializer` to convert between STAC objects and - database records. - - Attributes: - session (Session): A requests session instance to be used for all HTTP requests. - item_serializer (Type[serializers.ItemSerializer]): A serializer class to be used to convert - between STAC items and database records. - collection_serializer (Type[serializers.CollectionSerializer]): A serializer class to be - used to convert between STAC collections and database records. - database (DatabaseLogic): An instance of the `DatabaseLogic` class that is used to interact - with the database. - """ - - session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) - item_serializer: Type[serializers.ItemSerializer] = attr.ib( - default=serializers.ItemSerializer - ) - collection_serializer: Type[serializers.CollectionSerializer] = attr.ib( - default=serializers.CollectionSerializer - ) - database = DatabaseLogic() - - @overrides - async def all_collections(self, **kwargs) -> Collections: - """Read all collections from the database. - - Returns: - Collections: A `Collections` object containing all the collections in the database and - links to various resources. - - Raises: - Exception: If any error occurs while reading the collections from the database. - """ - request: Request = kwargs["request"] - base_url = str(kwargs["request"].base_url) - - limit = ( - int(request.query_params["limit"]) - if "limit" in request.query_params - else 10 - ) - token = ( - request.query_params["token"] if "token" in request.query_params else None - ) - - hits = await self.database.get_all_collections(limit=limit, token=token) - - next_search_after = None - next_link = None - if len(hits) == limit: - last_hit = hits[-1] - next_search_after = last_hit["sort"] - next_token = urlsafe_b64encode( - ",".join(map(str, next_search_after)).encode() - ).decode() - paging_links = PagingLinks(next=next_token, request=request) - next_link = paging_links.link_next() - - links = [ - { - "rel": Relations.root.value, - "type": MimeTypes.json, - "href": base_url, - }, - { - "rel": Relations.parent.value, - "type": MimeTypes.json, - "href": base_url, - }, - { - "rel": Relations.self.value, - "type": MimeTypes.json, - "href": urljoin(base_url, "collections"), - }, - ] - - if next_link: - links.append(next_link) - - return Collections( - collections=[ - self.collection_serializer.db_to_stac(c["_source"], base_url=base_url) - for c in hits - ], - links=links, - ) - - @overrides - async def get_collection(self, collection_id: str, **kwargs) -> Collection: - """Get a collection from the database by its id. - - Args: - collection_id (str): The id of the collection to retrieve. - kwargs: Additional keyword arguments passed to the API call. - - Returns: - Collection: A `Collection` object representing the requested collection. - - Raises: - NotFoundError: If the collection with the given id cannot be found in the database. - """ - base_url = str(kwargs["request"].base_url) - collection = await self.database.find_collection(collection_id=collection_id) - return self.collection_serializer.db_to_stac(collection, base_url) - - @overrides - async def item_collection( - self, - collection_id: str, - bbox: Optional[List[NumType]] = None, - datetime: Union[str, datetime_type, None] = None, - limit: int = 10, - token: str = None, - **kwargs, - ) -> ItemCollection: - """Read items from a specific collection in the database. - - Args: - collection_id (str): The identifier of the collection to read items from. - bbox (Optional[List[NumType]]): The bounding box to filter items by. - datetime (Union[str, datetime_type, None]): The datetime range to filter items by. - limit (int): The maximum number of items to return. The default value is 10. - token (str): A token used for pagination. - request (Request): The incoming request. - - Returns: - ItemCollection: An `ItemCollection` object containing the items from the specified collection that meet - the filter criteria and links to various resources. - - Raises: - HTTPException: If the specified collection is not found. - Exception: If any error occurs while reading the items from the database. - """ - request: Request = kwargs["request"] - base_url = str(request.base_url) - - collection = await self.get_collection( - collection_id=collection_id, request=request - ) - collection_id = collection.get("id") - if collection_id is None: - raise HTTPException(status_code=404, detail="Collection not found") - - search = self.database.make_search() - search = self.database.apply_collections_filter( - search=search, collection_ids=[collection_id] - ) - - if datetime: - datetime_search = self._return_date(datetime) - search = self.database.apply_datetime_filter( - search=search, datetime_search=datetime_search - ) - - if bbox: - bbox = [float(x) for x in bbox] - if len(bbox) == 6: - bbox = [bbox[0], bbox[1], bbox[3], bbox[4]] - - search = self.database.apply_bbox_filter(search=search, bbox=bbox) - - items, maybe_count, next_token = await self.database.execute_search( - search=search, - limit=limit, - sort=None, - token=token, # type: ignore - collection_ids=[collection_id], - ) - - items = [ - self.item_serializer.db_to_stac(item, base_url=base_url) for item in items - ] - - context_obj = None - if self.extension_is_enabled("ContextExtension"): - context_obj = { - "returned": len(items), - "limit": limit, - } - if maybe_count is not None: - context_obj["matched"] = maybe_count - - links = [] - if next_token: - links = await PagingLinks(request=request, next=next_token).get_links() - - return ItemCollection( - type="FeatureCollection", - features=items, - links=links, - context=context_obj, - ) - - @overrides - async def get_item(self, item_id: str, collection_id: str, **kwargs) -> Item: - """Get an item from the database based on its id and collection id. - - Args: - collection_id (str): The ID of the collection the item belongs to. - item_id (str): The ID of the item to be retrieved. - - Returns: - Item: An `Item` object representing the requested item. - - Raises: - Exception: If any error occurs while getting the item from the database. - NotFoundError: If the item does not exist in the specified collection. - """ - base_url = str(kwargs["request"].base_url) - item = await self.database.get_one_item( - item_id=item_id, collection_id=collection_id - ) - return self.item_serializer.db_to_stac(item, base_url) - - @staticmethod - def _return_date(interval_str): - """ - Convert a date interval string into a dictionary for filtering search results. - - The date interval string should be formatted as either a single date or a range of dates separated - by "/". The date format should be ISO-8601 (YYYY-MM-DDTHH:MM:SSZ). If the interval string is a - single date, it will be converted to a dictionary with a single "eq" key whose value is the date in - the ISO-8601 format. If the interval string is a range of dates, it will be converted to a - dictionary with "gte" (greater than or equal to) and "lte" (less than or equal to) keys. If the - interval string is a range of dates with ".." instead of "/", the start and end dates will be - assigned default values to encompass the entire possible date range. - - Args: - interval_str (str): The date interval string to be converted. - - Returns: - dict: A dictionary representing the date interval for use in filtering search results. - """ - intervals = interval_str.split("/") - if len(intervals) == 1: - datetime = f"{intervals[0][0:19]}Z" - return {"eq": datetime} - else: - start_date = intervals[0] - end_date = intervals[1] - if ".." not in intervals: - start_date = f"{start_date[0:19]}Z" - end_date = f"{end_date[0:19]}Z" - elif start_date != "..": - start_date = f"{start_date[0:19]}Z" - end_date = "2200-12-01T12:31:12Z" - elif end_date != "..": - start_date = "1900-10-01T00:00:00Z" - end_date = f"{end_date[0:19]}Z" - else: - start_date = "1900-10-01T00:00:00Z" - end_date = "2200-12-01T12:31:12Z" - - return {"lte": end_date, "gte": start_date} - - async def get_search( - self, - request: Request, - collections: Optional[List[str]] = None, - ids: Optional[List[str]] = None, - bbox: Optional[List[NumType]] = None, - datetime: Optional[Union[str, datetime_type]] = None, - limit: Optional[int] = 10, - query: Optional[str] = None, - token: Optional[str] = None, - fields: Optional[List[str]] = None, - sortby: Optional[str] = None, - intersects: Optional[str] = None, - filter: Optional[str] = None, - filter_lang: Optional[str] = None, - **kwargs, - ) -> ItemCollection: - """Get search results from the database. - - Args: - collections (Optional[List[str]]): List of collection IDs to search in. - ids (Optional[List[str]]): List of item IDs to search for. - bbox (Optional[List[NumType]]): Bounding box to search in. - datetime (Optional[Union[str, datetime_type]]): Filter items based on the datetime field. - limit (Optional[int]): Maximum number of results to return. - query (Optional[str]): Query string to filter the results. - token (Optional[str]): Access token to use when searching the catalog. - fields (Optional[List[str]]): Fields to include or exclude from the results. - sortby (Optional[str]): Sorting options for the results. - intersects (Optional[str]): GeoJSON geometry to search in. - kwargs: Additional parameters to be passed to the API. - - Returns: - ItemCollection: Collection of `Item` objects representing the search results. - - Raises: - HTTPException: If any error occurs while searching the catalog. - """ - base_args = { - "collections": collections, - "ids": ids, - "bbox": bbox, - "limit": limit, - "token": token, - "query": orjson.loads(query) if query else query, - } - - # this is borrowed from stac-fastapi-pgstac - # Kludgy fix because using factory does not allow alias for filter-lan - query_params = str(request.query_params) - if filter_lang is None: - match = re.search(r"filter-lang=([a-z0-9-]+)", query_params, re.IGNORECASE) - if match: - filter_lang = match.group(1) - - if datetime: - base_args["datetime"] = datetime - - if intersects: - base_args["intersects"] = orjson.loads(unquote_plus(intersects)) - - if sortby: - sort_param = [] - for sort in sortby: - sort_param.append( - { - "field": sort[1:], - "direction": "desc" if sort[0] == "-" else "asc", - } - ) - print(sort_param) - base_args["sortby"] = sort_param - - if filter: - if filter_lang == "cql2-json": - base_args["filter-lang"] = "cql2-json" - base_args["filter"] = orjson.loads(unquote_plus(filter)) - else: - base_args["filter-lang"] = "cql2-json" - base_args["filter"] = orjson.loads(to_cql2(parse_cql2_text(filter))) - - if fields: - includes = set() - excludes = set() - for field in fields: - if field[0] == "-": - excludes.add(field[1:]) - elif field[0] == "+": - includes.add(field[1:]) - else: - includes.add(field) - base_args["fields"] = {"include": includes, "exclude": excludes} - - # Do the request - try: - search_request = self.post_request_model(**base_args) - except ValidationError: - raise HTTPException(status_code=400, detail="Invalid parameters provided") - resp = await self.post_search(search_request=search_request, request=request) - - return resp - - async def post_search( - self, search_request: BaseSearchPostRequest, request: Request - ) -> ItemCollection: - """ - Perform a POST search on the catalog. - - Args: - search_request (BaseSearchPostRequest): Request object that includes the parameters for the search. - kwargs: Keyword arguments passed to the function. - - Returns: - ItemCollection: A collection of items matching the search criteria. - - Raises: - HTTPException: If there is an error with the cql2_json filter. - """ - base_url = str(request.base_url) - - search = self.database.make_search() - - if search_request.ids: - search = self.database.apply_ids_filter( - search=search, item_ids=search_request.ids - ) - - if search_request.collections: - search = self.database.apply_collections_filter( - search=search, collection_ids=search_request.collections - ) - - if search_request.datetime: - datetime_search = self._return_date(search_request.datetime) - search = self.database.apply_datetime_filter( - search=search, datetime_search=datetime_search - ) - - if search_request.bbox: - bbox = search_request.bbox - if len(bbox) == 6: - bbox = [bbox[0], bbox[1], bbox[3], bbox[4]] - - search = self.database.apply_bbox_filter(search=search, bbox=bbox) - - if search_request.intersects: - search = self.database.apply_intersects_filter( - search=search, intersects=search_request.intersects - ) - - if search_request.query: - for (field_name, expr) in search_request.query.items(): - field = "properties__" + field_name - for (op, value) in expr.items(): - search = self.database.apply_stacql_filter( - search=search, op=op, field=field, value=value - ) - - # only cql2_json is supported here - if hasattr(search_request, "filter"): - cql2_filter = getattr(search_request, "filter", None) - try: - search = self.database.apply_cql2_filter(search, cql2_filter) - except Exception as e: - raise HTTPException( - status_code=400, detail=f"Error with cql2_json filter: {e}" - ) - - sort = None - if search_request.sortby: - sort = self.database.populate_sort(search_request.sortby) - - limit = 10 - if search_request.limit: - limit = search_request.limit - - items, maybe_count, next_token = await self.database.execute_search( - search=search, - limit=limit, - token=search_request.token, # type: ignore - sort=sort, - collection_ids=search_request.collections, - ) - - items = [ - self.item_serializer.db_to_stac(item, base_url=base_url) for item in items - ] - - if self.extension_is_enabled("FieldsExtension"): - if search_request.query is not None: - query_include: Set[str] = set( - [ - k if k in Settings.get().indexed_fields else f"properties.{k}" - for k in search_request.query.keys() - ] - ) - if not search_request.fields.include: - search_request.fields.include = query_include - else: - search_request.fields.include.union(query_include) - - filter_kwargs = search_request.fields.filter_fields - - items = [ - orjson.loads( - stac_pydantic.Item(**feat).json(**filter_kwargs, exclude_unset=True) - ) - for feat in items - ] - - context_obj = None - if self.extension_is_enabled("ContextExtension"): - context_obj = { - "returned": len(items), - "limit": limit, - } - if maybe_count is not None: - context_obj["matched"] = maybe_count - - links = [] - if next_token: - links = await PagingLinks(request=request, next=next_token).get_links() - - return ItemCollection( - type="FeatureCollection", - features=items, - links=links, - context=context_obj, - ) - - -@attr.s -class TransactionsClient(AsyncBaseTransactionsClient): - """Transactions extension specific CRUD operations.""" - - session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) - database = DatabaseLogic() - - @overrides - async def create_item( - self, collection_id: str, item: stac_types.Item, **kwargs - ) -> stac_types.Item: - """Create an item in the collection. - - Args: - collection_id (str): The id of the collection to add the item to. - item (stac_types.Item): The item to be added to the collection. - kwargs: Additional keyword arguments. - - Returns: - stac_types.Item: The created item. - - Raises: - NotFound: If the specified collection is not found in the database. - ConflictError: If the item in the specified collection already exists. - - """ - base_url = str(kwargs["request"].base_url) - - # If a feature collection is posted - if item["type"] == "FeatureCollection": - bulk_client = BulkTransactionsClient() - processed_items = [ - bulk_client.preprocess_item(item, base_url, BulkTransactionMethod.INSERT) for item in item["features"] # type: ignore - ] - - await self.database.bulk_async( - collection_id, processed_items, refresh=kwargs.get("refresh", False) - ) - - return None # type: ignore - else: - item = await self.database.prep_create_item(item=item, base_url=base_url) - await self.database.create_item(item, refresh=kwargs.get("refresh", False)) - return item - - @overrides - async def update_item( - self, collection_id: str, item_id: str, item: stac_types.Item, **kwargs - ) -> stac_types.Item: - """Update an item in the collection. - - Args: - collection_id (str): The ID of the collection the item belongs to. - item_id (str): The ID of the item to be updated. - item (stac_types.Item): The new item data. - kwargs: Other optional arguments, including the request object. - - Returns: - stac_types.Item: The updated item object. - - Raises: - NotFound: If the specified collection is not found in the database. - - """ - base_url = str(kwargs["request"].base_url) - now = datetime_type.now(timezone.utc).isoformat().replace("+00:00", "Z") - item["properties"]["updated"] = now - - await self.database.check_collection_exists(collection_id) - await self.delete_item(item_id=item_id, collection_id=collection_id) - await self.create_item(collection_id=collection_id, item=item, **kwargs) - - return ItemSerializer.db_to_stac(item, base_url) - - @overrides - async def delete_item( - self, item_id: str, collection_id: str, **kwargs - ) -> stac_types.Item: - """Delete an item from a collection. - - Args: - item_id (str): The identifier of the item to delete. - collection_id (str): The identifier of the collection that contains the item. - - Returns: - Optional[stac_types.Item]: The deleted item, or `None` if the item was successfully deleted. - """ - await self.database.delete_item(item_id=item_id, collection_id=collection_id) - return None # type: ignore - - @overrides - async def create_collection( - self, collection: stac_types.Collection, **kwargs - ) -> stac_types.Collection: - """Create a new collection in the database. - - Args: - collection (stac_types.Collection): The collection to be created. - kwargs: Additional keyword arguments. - - Returns: - stac_types.Collection: The created collection object. - - Raises: - ConflictError: If the collection already exists. - """ - base_url = str(kwargs["request"].base_url) - collection_links = CollectionLinks( - collection_id=collection["id"], base_url=base_url - ).create_links() - collection["links"] = collection_links - await self.database.create_collection(collection=collection) - - return CollectionSerializer.db_to_stac(collection, base_url) - - @overrides - async def update_collection( - self, collection: stac_types.Collection, **kwargs - ) -> stac_types.Collection: - """ - Update a collection. - - This method updates an existing collection in the database by first finding - the collection by its id, then deleting the old version, and finally creating - a new version of the updated collection. The updated collection is then returned. - - Args: - collection: A STAC collection that needs to be updated. - kwargs: Additional keyword arguments. - - Returns: - A STAC collection that has been updated in the database. - - """ - base_url = str(kwargs["request"].base_url) - - await self.database.find_collection(collection_id=collection["id"]) - await self.delete_collection(collection["id"]) - await self.create_collection(collection, **kwargs) - - return CollectionSerializer.db_to_stac(collection, base_url) - - @overrides - async def delete_collection( - self, collection_id: str, **kwargs - ) -> stac_types.Collection: - """ - Delete a collection. - - This method deletes an existing collection in the database. - - Args: - collection_id (str): The identifier of the collection that contains the item. - kwargs: Additional keyword arguments. - - Returns: - None. - - Raises: - NotFoundError: If the collection doesn't exist. - """ - await self.database.delete_collection(collection_id=collection_id) - return None # type: ignore - - -@attr.s -class BulkTransactionsClient(BaseBulkTransactionsClient): - """A client for posting bulk transactions to a Postgres database. - - Attributes: - session: An instance of `Session` to use for database connection. - database: An instance of `DatabaseLogic` to perform database operations. - """ - - session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) - database = DatabaseLogic() - - def __attrs_post_init__(self): - """Create es engine.""" - settings = ElasticsearchSettings() - self.client = settings.create_client - - def preprocess_item( - self, item: stac_types.Item, base_url, method: BulkTransactionMethod - ) -> stac_types.Item: - """Preprocess an item to match the data model. - - Args: - item: The item to preprocess. - base_url: The base URL of the request. - method: The bulk transaction method. - - Returns: - The preprocessed item. - """ - exist_ok = method == BulkTransactionMethod.UPSERT - return self.database.sync_prep_create_item( - item=item, base_url=base_url, exist_ok=exist_ok - ) - - @overrides - def bulk_item_insert( - self, items: Items, chunk_size: Optional[int] = None, **kwargs - ) -> str: - """Perform a bulk insertion of items into the database using Elasticsearch. - - Args: - items: The items to insert. - chunk_size: The size of each chunk for bulk processing. - **kwargs: Additional keyword arguments, such as `request` and `refresh`. - - Returns: - A string indicating the number of items successfully added. - """ - request = kwargs.get("request") - if request: - base_url = str(request.base_url) - else: - base_url = "" - - processed_items = [ - self.preprocess_item(item, base_url, items.method) - for item in items.items.values() - ] - - # not a great way to get the collection_id-- should be part of the method signature - collection_id = processed_items[0]["collection"] - - self.database.bulk_sync( - collection_id, processed_items, refresh=kwargs.get("refresh", False) - ) - - return f"Successfully added {len(processed_items)} Items." - - -@attr.s -class EsAsyncBaseFiltersClient(AsyncBaseFiltersClient): - """Defines a pattern for implementing the STAC filter extension.""" - - # todo: use the ES _mapping endpoint to dynamically find what fields exist - async def get_queryables( - self, collection_id: Optional[str] = None, **kwargs - ) -> Dict[str, Any]: - """Get the queryables available for the given collection_id. - - If collection_id is None, returns the intersection of all - queryables over all collections. - - This base implementation returns a blank queryable schema. This is not allowed - under OGC CQL but it is allowed by the STAC API Filter Extension - - https://github.com/radiantearth/stac-api-spec/tree/master/fragments/filter#queryables - - Args: - collection_id (str, optional): The id of the collection to get queryables for. - **kwargs: additional keyword arguments - - Returns: - Dict[str, Any]: A dictionary containing the queryables for the given collection. - """ - return { - "$schema": "https://json-schema.org/draft/2019-09/schema", - "$id": "https://stac-api.example.com/queryables", - "type": "object", - "title": "Queryables for Example STAC API", - "description": "Queryable names for the example STAC API Item Search filter.", - "properties": { - "id": { - "description": "ID", - "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/definitions/core/allOf/2/properties/id", - }, - "collection": { - "description": "Collection", - "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/definitions/core/allOf/2/then/properties/collection", - }, - "geometry": { - "description": "Geometry", - "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/definitions/core/allOf/1/oneOf/0/properties/geometry", - }, - "datetime": { - "description": "Acquisition Timestamp", - "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/datetime.json#/properties/datetime", - }, - "created": { - "description": "Creation Timestamp", - "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/datetime.json#/properties/created", - }, - "updated": { - "description": "Creation Timestamp", - "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/datetime.json#/properties/updated", - }, - "cloud_cover": { - "description": "Cloud Cover", - "$ref": "https://stac-extensions.github.io/eo/v1.0.0/schema.json#/definitions/fields/properties/eo:cloud_cover", - }, - "cloud_shadow_percentage": { - "description": "Cloud Shadow Percentage", - "title": "Cloud Shadow Percentage", - "type": "number", - "minimum": 0, - "maximum": 100, - }, - "nodata_pixel_percentage": { - "description": "No Data Pixel Percentage", - "title": "No Data Pixel Percentage", - "type": "number", - "minimum": 0, - "maximum": 100, - }, - }, - "additionalProperties": True, - } diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/links.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/links.py deleted file mode 100644 index 3941a149..00000000 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/links.py +++ /dev/null @@ -1,138 +0,0 @@ -"""link helpers.""" - -from typing import Any, Dict, List, Optional -from urllib.parse import ParseResult, parse_qs, unquote, urlencode, urljoin, urlparse - -import attr -from stac_pydantic.links import Relations -from stac_pydantic.shared import MimeTypes -from starlette.requests import Request - -# Copied from pgstac links - -# These can be inferred from the item/collection, so they aren't included in the database -# Instead they are dynamically generated when querying the database using the classes defined below -INFERRED_LINK_RELS = ["self", "item", "parent", "collection", "root"] - - -def merge_params(url: str, newparams: Dict) -> str: - """Merge url parameters.""" - u = urlparse(url) - params = parse_qs(u.query) - params.update(newparams) - param_string = unquote(urlencode(params, True)) - - href = ParseResult( - scheme=u.scheme, - netloc=u.netloc, - path=u.path, - params=u.params, - query=param_string, - fragment=u.fragment, - ).geturl() - return href - - -@attr.s -class BaseLinks: - """Create inferred links common to collections and items.""" - - request: Request = attr.ib() - - @property - def base_url(self): - """Get the base url.""" - return str(self.request.base_url) - - @property - def url(self): - """Get the current request url.""" - return str(self.request.url) - - def resolve(self, url): - """Resolve url to the current request url.""" - return urljoin(str(self.base_url), str(url)) - - def link_self(self) -> Dict: - """Return the self link.""" - return dict(rel=Relations.self.value, type=MimeTypes.json.value, href=self.url) - - def link_root(self) -> Dict: - """Return the catalog root.""" - return dict( - rel=Relations.root.value, type=MimeTypes.json.value, href=self.base_url - ) - - def create_links(self) -> List[Dict[str, Any]]: - """Return all inferred links.""" - links = [] - for name in dir(self): - if name.startswith("link_") and callable(getattr(self, name)): - link = getattr(self, name)() - if link is not None: - links.append(link) - return links - - async def get_links( - self, extra_links: Optional[List[Dict[str, Any]]] = None - ) -> List[Dict[str, Any]]: - """ - Generate all the links. - - Get the links object for a stac resource by iterating through - available methods on this class that start with link_. - """ - # TODO: Pass request.json() into function so this doesn't need to be coroutine - if self.request.method == "POST": - self.request.postbody = await self.request.json() - # join passed in links with generated links - # and update relative paths - links = self.create_links() - - if extra_links: - # For extra links passed in, - # add links modified with a resolved href. - # Drop any links that are dynamically - # determined by the server (e.g. self, parent, etc.) - # Resolving the href allows for relative paths - # to be stored in pgstac and for the hrefs in the - # links of response STAC objects to be resolved - # to the request url. - links += [ - {**link, "href": self.resolve(link["href"])} - for link in extra_links - if link["rel"] not in INFERRED_LINK_RELS - ] - - return links - - -@attr.s -class PagingLinks(BaseLinks): - """Create links for paging.""" - - next: Optional[str] = attr.ib(kw_only=True, default=None) - - def link_next(self) -> Optional[Dict[str, Any]]: - """Create link for next page.""" - if self.next is not None: - method = self.request.method - if method == "GET": - href = merge_params(self.url, {"token": self.next}) - link = dict( - rel=Relations.next.value, - type=MimeTypes.json.value, - method=method, - href=href, - ) - return link - if method == "POST": - return { - "rel": Relations.next, - "type": MimeTypes.json, - "method": method, - "href": f"{self.request.url}", - "body": {**self.request.postbody, "token": self.next}, - } - - return None diff --git a/stac_fastapi/elasticsearch/tests/resources/test_item.py b/stac_fastapi/elasticsearch/tests/resources/test_item.py index 5b382873..c63be048 100644 --- a/stac_fastapi/elasticsearch/tests/resources/test_item.py +++ b/stac_fastapi/elasticsearch/tests/resources/test_item.py @@ -12,7 +12,8 @@ from geojson_pydantic.geometries import Polygon from pystac.utils import datetime_to_str -from stac_fastapi.elasticsearch.core import CoreClient +from stac_fastapi.core.core import CoreClient +from stac_fastapi.elasticsearch.database_logic import DatabaseLogic from stac_fastapi.elasticsearch.datetime_utils import now_to_rfc3339_str from stac_fastapi.types.core import LandingPageMixin @@ -23,6 +24,9 @@ def rfc3339_str_to_datetime(s: str) -> datetime: return ciso8601.parse_rfc3339(s) +database_logic = DatabaseLogic() + + @pytest.mark.asyncio async def test_create_and_delete_item(app_client, ctx, txn_client): """Test creation and deletion of a single item (transactions extension)""" @@ -773,7 +777,9 @@ async def test_conformance_classes_configurable(): # Update environment to avoid key error on client instantiation os.environ["READER_CONN_STRING"] = "testing" os.environ["WRITER_CONN_STRING"] = "testing" - client = CoreClient(base_conformance_classes=["this is a test"]) + client = CoreClient( + database=database_logic, base_conformance_classes=["this is a test"] + ) assert client.conformance_classes()[0] == "this is a test" From 57cfbb7f15cb766682135645f633722b25a6561c Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Wed, 31 Jan 2024 23:23:23 +0800 Subject: [PATCH 06/33] remove models folder --- stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py | 2 +- .../elasticsearch/stac_fastapi/elasticsearch/models/__init__.py | 1 - .../elasticsearch/stac_fastapi/elasticsearch/models/search.py | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) delete mode 100644 stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/__init__.py delete mode 100644 stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/search.py diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py index 760edb8f..0dc30d00 100644 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py +++ b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py @@ -4,10 +4,10 @@ from stac_fastapi.core.core import ( BulkTransactionsClient, CoreClient, + EsAsyncBaseFiltersClient, TransactionsClient, ) from stac_fastapi.elasticsearch.config import ElasticsearchSettings -from stac_fastapi.elasticsearch.core import EsAsyncBaseFiltersClient from stac_fastapi.elasticsearch.database_logic import ( DatabaseLogic, create_collection_index, diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/__init__.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/__init__.py deleted file mode 100644 index d0748bcc..00000000 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""stac_fastapi.elasticsearch.models module.""" diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/search.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/search.py deleted file mode 100644 index 33b73b68..00000000 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/models/search.py +++ /dev/null @@ -1 +0,0 @@ -"""Unused search model.""" From 1561dec94bc41eecad526b27ee573898374d78b1 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Wed, 31 Jan 2024 23:35:42 +0800 Subject: [PATCH 07/33] remove unused types.search.py --- .../elasticsearch/database_logic.py | 10 ++- .../elasticsearch/types/search.py | 65 ------------------- 2 files changed, 4 insertions(+), 71 deletions(-) delete mode 100644 stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/types/search.py diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py index 336c8d07..02cfefa1 100644 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py +++ b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py @@ -9,12 +9,12 @@ from elasticsearch_dsl import Q, Search from elasticsearch import exceptions, helpers # type: ignore -from stac_fastapi.elasticsearch import serializers from stac_fastapi.elasticsearch.config import AsyncElasticsearchSettings from stac_fastapi.elasticsearch.config import ( ElasticsearchSettings as SyncElasticsearchSettings, ) from stac_fastapi.elasticsearch.extensions import filter +from stac_fastapi.elasticsearch.serializers import CollectionSerializer, ItemSerializer from stac_fastapi.types.errors import ConflictError, NotFoundError from stac_fastapi.types.stac import Collection, Item @@ -296,11 +296,9 @@ class DatabaseLogic: client = AsyncElasticsearchSettings().create_client sync_client = SyncElasticsearchSettings().create_client - item_serializer: Type[serializers.ItemSerializer] = attr.ib( - default=serializers.ItemSerializer - ) - collection_serializer: Type[serializers.CollectionSerializer] = attr.ib( - default=serializers.CollectionSerializer + item_serializer: Type[ItemSerializer] = attr.ib(default=ItemSerializer) + collection_serializer: Type[CollectionSerializer] = attr.ib( + default=CollectionSerializer ) """CORE LOGIC""" diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/types/search.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/types/search.py deleted file mode 100644 index 26a2dbb6..00000000 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/types/search.py +++ /dev/null @@ -1,65 +0,0 @@ -"""stac_fastapi.types.search module. - -# TODO: replace with stac-pydantic -""" - -import logging -from typing import Dict, Optional, Set, Union - -from stac_pydantic.api.extensions.fields import FieldsExtension as FieldsBase - -from stac_fastapi.types.config import Settings - -logger = logging.getLogger("uvicorn") -logger.setLevel(logging.INFO) -# Be careful: https://github.com/samuelcolvin/pydantic/issues/1423#issuecomment-642797287 -NumType = Union[float, int] - - -class FieldsExtension(FieldsBase): - """FieldsExtension. - - Attributes: - include: set of fields to include. - exclude: set of fields to exclude. - """ - - include: Optional[Set[str]] = set() - exclude: Optional[Set[str]] = set() - - @staticmethod - def _get_field_dict(fields: Optional[Set[str]]) -> Dict: - """Pydantic include/excludes notation. - - Internal method to create a dictionary for advanced include or exclude of pydantic fields on model export - Ref: https://pydantic-docs.helpmanual.io/usage/exporting_models/#advanced-include-and-exclude - """ - field_dict = {} - for field in fields or []: - if "." in field: - parent, key = field.split(".") - if parent not in field_dict: - field_dict[parent] = {key} - else: - field_dict[parent].add(key) - else: - field_dict[field] = ... # type:ignore - return field_dict - - @property - def filter_fields(self) -> Dict: - """Create pydantic include/exclude expression. - - Create dictionary of fields to include/exclude on model export based on the included and excluded fields passed - to the API - Ref: https://pydantic-docs.helpmanual.io/usage/exporting_models/#advanced-include-and-exclude - """ - # Always include default_includes, even if they - # exist in the exclude list. - include = (self.include or set()) - (self.exclude or set()) - include |= Settings.get().default_includes or set() - - return { - "include": self._get_field_dict(include), - "exclude": self._get_field_dict(self.exclude), - } From cea47352b5c459be1912354628af824efc905737 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Wed, 31 Jan 2024 23:39:38 +0800 Subject: [PATCH 08/33] move extensions folder --- .../stac_fastapi/core}/extensions/__init__.py | 0 .../stac_fastapi/core}/extensions/filter.py | 0 .../stac_fastapi/core}/extensions/query.py | 0 .../elasticsearch/stac_fastapi/elasticsearch/database_logic.py | 2 +- stac_fastapi/elasticsearch/tests/conftest.py | 2 +- 5 files changed, 2 insertions(+), 2 deletions(-) rename stac_fastapi/{elasticsearch/stac_fastapi/elasticsearch => core/stac_fastapi/core}/extensions/__init__.py (100%) rename stac_fastapi/{elasticsearch/stac_fastapi/elasticsearch => core/stac_fastapi/core}/extensions/filter.py (100%) rename stac_fastapi/{elasticsearch/stac_fastapi/elasticsearch => core/stac_fastapi/core}/extensions/query.py (100%) diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/extensions/__init__.py b/stac_fastapi/core/stac_fastapi/core/extensions/__init__.py similarity index 100% rename from stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/extensions/__init__.py rename to stac_fastapi/core/stac_fastapi/core/extensions/__init__.py diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/extensions/filter.py b/stac_fastapi/core/stac_fastapi/core/extensions/filter.py similarity index 100% rename from stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/extensions/filter.py rename to stac_fastapi/core/stac_fastapi/core/extensions/filter.py diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/extensions/query.py b/stac_fastapi/core/stac_fastapi/core/extensions/query.py similarity index 100% rename from stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/extensions/query.py rename to stac_fastapi/core/stac_fastapi/core/extensions/query.py diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py index 02cfefa1..ed812c46 100644 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py +++ b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py @@ -9,11 +9,11 @@ from elasticsearch_dsl import Q, Search from elasticsearch import exceptions, helpers # type: ignore +from stac_fastapi.core.extensions import filter from stac_fastapi.elasticsearch.config import AsyncElasticsearchSettings from stac_fastapi.elasticsearch.config import ( ElasticsearchSettings as SyncElasticsearchSettings, ) -from stac_fastapi.elasticsearch.extensions import filter from stac_fastapi.elasticsearch.serializers import CollectionSerializer, ItemSerializer from stac_fastapi.types.errors import ConflictError, NotFoundError from stac_fastapi.types.stac import Collection, Item diff --git a/stac_fastapi/elasticsearch/tests/conftest.py b/stac_fastapi/elasticsearch/tests/conftest.py index 6d4a8b5e..49afa9bc 100644 --- a/stac_fastapi/elasticsearch/tests/conftest.py +++ b/stac_fastapi/elasticsearch/tests/conftest.py @@ -15,6 +15,7 @@ CoreClient, TransactionsClient, ) +from stac_fastapi.core.extensions import QueryExtension from stac_fastapi.elasticsearch.config import ( AsyncElasticsearchSettings, ElasticsearchSettings, @@ -23,7 +24,6 @@ DatabaseLogic, create_collection_index, ) -from stac_fastapi.elasticsearch.extensions import QueryExtension from stac_fastapi.extensions.core import ( # FieldsExtension, ContextExtension, FieldsExtension, From 63aeb07251b7649ce2976e7749bf19afeaec41bd Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Wed, 31 Jan 2024 23:57:32 +0800 Subject: [PATCH 09/33] move serializers, update imports --- stac_fastapi/core/setup.py | 20 +-- .../stac_fastapi/core}/datetime_utils.py | 0 .../core/stac_fastapi/core/serializers.py | 2 +- .../stac_fastapi/elasticsearch/app.py | 2 +- .../elasticsearch/database_logic.py | 2 +- .../stac_fastapi/elasticsearch/serializers.py | 162 ------------------ .../tests/resources/test_item.py | 2 +- 7 files changed, 5 insertions(+), 185 deletions(-) rename stac_fastapi/{elasticsearch/stac_fastapi/elasticsearch => core/stac_fastapi/core}/datetime_utils.py (100%) delete mode 100644 stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/serializers.py diff --git a/stac_fastapi/core/setup.py b/stac_fastapi/core/setup.py index 4046a1c2..27b17be5 100644 --- a/stac_fastapi/core/setup.py +++ b/stac_fastapi/core/setup.py @@ -1,4 +1,4 @@ -"""stac_fastapi: elasticsearch module.""" +"""stac_fastapi: core elasticsearch/ opensearch module.""" from setuptools import find_namespace_packages, setup @@ -13,31 +13,13 @@ "stac-fastapi.types==2.4.9", "stac-fastapi.api==2.4.9", "stac-fastapi.extensions==2.4.9", - # "elasticsearch[async]==8.11.0", - # "elasticsearch-dsl==8.11.0", "pystac[validation]", - # "uvicorn", "orjson", "overrides", - # "starlette", "geojson-pydantic", "pygeofilter==0.2.1", ] -# extra_reqs = { -# "dev": [ -# "pytest", -# "pytest-cov", -# "pytest-asyncio", -# "pre-commit", -# "requests", -# "ciso8601", -# "httpx", -# ], -# "docs": ["mkdocs", "mkdocs-material", "pdocs"], -# "server": ["uvicorn[standard]==0.19.0"], -# } - setup( name="stac-fastapi.core", description="Core library for the Elasticsearch and Opensearch stac-fastapi backends.", diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/datetime_utils.py b/stac_fastapi/core/stac_fastapi/core/datetime_utils.py similarity index 100% rename from stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/datetime_utils.py rename to stac_fastapi/core/stac_fastapi/core/datetime_utils.py diff --git a/stac_fastapi/core/stac_fastapi/core/serializers.py b/stac_fastapi/core/stac_fastapi/core/serializers.py index 725e8f65..db7353c2 100644 --- a/stac_fastapi/core/stac_fastapi/core/serializers.py +++ b/stac_fastapi/core/stac_fastapi/core/serializers.py @@ -4,7 +4,7 @@ import attr -from stac_fastapi.elasticsearch.datetime_utils import now_to_rfc3339_str +from stac_fastapi.core.datetime_utils import now_to_rfc3339_str from stac_fastapi.types import stac as stac_types from stac_fastapi.types.links import CollectionLinks, ItemLinks, resolve_links diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py index 0dc30d00..c1bc2d3d 100644 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py +++ b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py @@ -7,12 +7,12 @@ EsAsyncBaseFiltersClient, TransactionsClient, ) +from stac_fastapi.core.extensions import QueryExtension from stac_fastapi.elasticsearch.config import ElasticsearchSettings from stac_fastapi.elasticsearch.database_logic import ( DatabaseLogic, create_collection_index, ) -from stac_fastapi.elasticsearch.extensions import QueryExtension from stac_fastapi.elasticsearch.session import Session from stac_fastapi.extensions.core import ( ContextExtension, diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py index ed812c46..933312f2 100644 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py +++ b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py @@ -10,11 +10,11 @@ from elasticsearch import exceptions, helpers # type: ignore from stac_fastapi.core.extensions import filter +from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer from stac_fastapi.elasticsearch.config import AsyncElasticsearchSettings from stac_fastapi.elasticsearch.config import ( ElasticsearchSettings as SyncElasticsearchSettings, ) -from stac_fastapi.elasticsearch.serializers import CollectionSerializer, ItemSerializer from stac_fastapi.types.errors import ConflictError, NotFoundError from stac_fastapi.types.stac import Collection, Item diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/serializers.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/serializers.py deleted file mode 100644 index 725e8f65..00000000 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/serializers.py +++ /dev/null @@ -1,162 +0,0 @@ -"""Serializers.""" -import abc -from typing import Any - -import attr - -from stac_fastapi.elasticsearch.datetime_utils import now_to_rfc3339_str -from stac_fastapi.types import stac as stac_types -from stac_fastapi.types.links import CollectionLinks, ItemLinks, resolve_links - - -@attr.s -class Serializer(abc.ABC): - """Defines serialization methods between the API and the data model. - - This class is meant to be subclassed and implemented by specific serializers for different STAC objects (e.g. Item, Collection). - """ - - @classmethod - @abc.abstractmethod - def db_to_stac(cls, item: dict, base_url: str) -> Any: - """Transform database model to STAC object. - - Arguments: - item (dict): A dictionary representing the database model. - base_url (str): The base URL of the STAC API. - - Returns: - Any: A STAC object, e.g. an `Item` or `Collection`, representing the input `item`. - """ - ... - - @classmethod - @abc.abstractmethod - def stac_to_db(cls, stac_object: Any, base_url: str) -> dict: - """Transform STAC object to database model. - - Arguments: - stac_object (Any): A STAC object, e.g. an `Item` or `Collection`. - base_url (str): The base URL of the STAC API. - - Returns: - dict: A dictionary representing the database model. - """ - ... - - -class ItemSerializer(Serializer): - """Serialization methods for STAC items.""" - - @classmethod - def stac_to_db(cls, stac_data: stac_types.Item, base_url: str) -> stac_types.Item: - """Transform STAC item to database-ready STAC item. - - Args: - stac_data (stac_types.Item): The STAC item object to be transformed. - base_url (str): The base URL for the STAC API. - - Returns: - stac_types.Item: The database-ready STAC item object. - """ - item_links = ItemLinks( - collection_id=stac_data["collection"], - item_id=stac_data["id"], - base_url=base_url, - ).create_links() - stac_data["links"] = item_links - - now = now_to_rfc3339_str() - if "created" not in stac_data["properties"]: - stac_data["properties"]["created"] = now - stac_data["properties"]["updated"] = now - return stac_data - - @classmethod - def db_to_stac(cls, item: dict, base_url: str) -> stac_types.Item: - """Transform database-ready STAC item to STAC item. - - Args: - item (dict): The database-ready STAC item to be transformed. - base_url (str): The base URL for the STAC API. - - Returns: - stac_types.Item: The STAC item object. - """ - item_id = item["id"] - collection_id = item["collection"] - item_links = ItemLinks( - collection_id=collection_id, item_id=item_id, base_url=base_url - ).create_links() - - original_links = item.get("links", []) - if original_links: - item_links += resolve_links(original_links, base_url) - - return stac_types.Item( - type="Feature", - stac_version=item.get("stac_version", ""), - stac_extensions=item.get("stac_extensions", []), - id=item_id, - collection=item.get("collection", ""), - geometry=item.get("geometry", {}), - bbox=item.get("bbox", []), - properties=item.get("properties", {}), - links=item_links, - assets=item.get("assets", {}), - ) - - -class CollectionSerializer(Serializer): - """Serialization methods for STAC collections.""" - - @classmethod - def db_to_stac(cls, collection: dict, base_url: str) -> stac_types.Collection: - """Transform database model to STAC collection. - - Args: - collection (dict): The collection data in dictionary form, extracted from the database. - base_url (str): The base URL for the collection. - - Returns: - stac_types.Collection: The STAC collection object. - """ - # Use dictionary unpacking to extract values from the collection dictionary - collection_id = collection.get("id") - stac_extensions = collection.get("stac_extensions", []) - stac_version = collection.get("stac_version", "") - title = collection.get("title", "") - description = collection.get("description", "") - keywords = collection.get("keywords", []) - license = collection.get("license", "") - providers = collection.get("providers", {}) - summaries = collection.get("summaries", {}) - extent = collection.get("extent", {}) - collection_assets = collection.get("assets", {}) - - # Create the collection links using CollectionLinks - collection_links = CollectionLinks( - collection_id=collection_id, base_url=base_url - ).create_links() - - # Add any additional links from the collection dictionary - original_links = collection.get("links") - if original_links: - collection_links += resolve_links(original_links, base_url) - - # Return the stac_types.Collection object - return stac_types.Collection( - type="Collection", - id=collection_id, - stac_extensions=stac_extensions, - stac_version=stac_version, - title=title, - description=description, - keywords=keywords, - license=license, - providers=providers, - summaries=summaries, - extent=extent, - links=collection_links, - assets=collection_assets, - ) diff --git a/stac_fastapi/elasticsearch/tests/resources/test_item.py b/stac_fastapi/elasticsearch/tests/resources/test_item.py index c63be048..e62da8b8 100644 --- a/stac_fastapi/elasticsearch/tests/resources/test_item.py +++ b/stac_fastapi/elasticsearch/tests/resources/test_item.py @@ -13,8 +13,8 @@ from pystac.utils import datetime_to_str from stac_fastapi.core.core import CoreClient +from stac_fastapi.core.datetime_utils import now_to_rfc3339_str from stac_fastapi.elasticsearch.database_logic import DatabaseLogic -from stac_fastapi.elasticsearch.datetime_utils import now_to_rfc3339_str from stac_fastapi.types.core import LandingPageMixin from ..conftest import create_item, refresh_indices From d82267d4284cd22f4163543fe95ae77b8f8503a2 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Thu, 1 Feb 2024 00:16:32 +0800 Subject: [PATCH 10/33] add scratch database logic to opensearch folder --- .../stac_fastapi/opensearch/database_logic.py | 850 ++++++++++++++++++ 1 file changed, 850 insertions(+) create mode 100644 stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py diff --git a/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py b/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py new file mode 100644 index 00000000..933312f2 --- /dev/null +++ b/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py @@ -0,0 +1,850 @@ +"""Database logic.""" +import asyncio +import logging +import os +from base64 import urlsafe_b64decode, urlsafe_b64encode +from typing import Any, Dict, Iterable, List, Optional, Protocol, Tuple, Type, Union + +import attr +from elasticsearch_dsl import Q, Search + +from elasticsearch import exceptions, helpers # type: ignore +from stac_fastapi.core.extensions import filter +from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer +from stac_fastapi.elasticsearch.config import AsyncElasticsearchSettings +from stac_fastapi.elasticsearch.config import ( + ElasticsearchSettings as SyncElasticsearchSettings, +) +from stac_fastapi.types.errors import ConflictError, NotFoundError +from stac_fastapi.types.stac import Collection, Item + +logger = logging.getLogger(__name__) + +NumType = Union[float, int] + +COLLECTIONS_INDEX = os.getenv("STAC_COLLECTIONS_INDEX", "collections") +ITEMS_INDEX_PREFIX = os.getenv("STAC_ITEMS_INDEX_PREFIX", "items_") +ES_INDEX_NAME_UNSUPPORTED_CHARS = { + "\\", + "/", + "*", + "?", + '"', + "<", + ">", + "|", + " ", + ",", + "#", + ":", +} + +ITEM_INDICES = f"{ITEMS_INDEX_PREFIX}*,-*kibana*,-{COLLECTIONS_INDEX}*" + +DEFAULT_SORT = { + "properties.datetime": {"order": "desc"}, + "id": {"order": "desc"}, + "collection": {"order": "desc"}, +} + +ES_ITEMS_SETTINGS = { + "index": { + "sort.field": list(DEFAULT_SORT.keys()), + "sort.order": [v["order"] for v in DEFAULT_SORT.values()], + } +} + +ES_MAPPINGS_DYNAMIC_TEMPLATES = [ + # Common https://github.com/radiantearth/stac-spec/blob/master/item-spec/common-metadata.md + { + "descriptions": { + "match_mapping_type": "string", + "match": "description", + "mapping": {"type": "text"}, + } + }, + { + "titles": { + "match_mapping_type": "string", + "match": "title", + "mapping": {"type": "text"}, + } + }, + # Projection Extension https://github.com/stac-extensions/projection + {"proj_epsg": {"match": "proj:epsg", "mapping": {"type": "integer"}}}, + { + "proj_projjson": { + "match": "proj:projjson", + "mapping": {"type": "object", "enabled": False}, + } + }, + { + "proj_centroid": { + "match": "proj:centroid", + "mapping": {"type": "geo_point"}, + } + }, + { + "proj_geometry": { + "match": "proj:geometry", + "mapping": {"type": "object", "enabled": False}, + } + }, + { + "no_index_href": { + "match": "href", + "mapping": {"type": "text", "index": False}, + } + }, + # Default all other strings not otherwise specified to keyword + {"strings": {"match_mapping_type": "string", "mapping": {"type": "keyword"}}}, + {"numerics": {"match_mapping_type": "long", "mapping": {"type": "float"}}}, +] + +ES_ITEMS_MAPPINGS = { + "numeric_detection": False, + "dynamic_templates": ES_MAPPINGS_DYNAMIC_TEMPLATES, + "properties": { + "id": {"type": "keyword"}, + "collection": {"type": "keyword"}, + "geometry": {"type": "geo_shape"}, + "assets": {"type": "object", "enabled": False}, + "links": {"type": "object", "enabled": False}, + "properties": { + "type": "object", + "properties": { + # Common https://github.com/radiantearth/stac-spec/blob/master/item-spec/common-metadata.md + "datetime": {"type": "date"}, + "start_datetime": {"type": "date"}, + "end_datetime": {"type": "date"}, + "created": {"type": "date"}, + "updated": {"type": "date"}, + # Satellite Extension https://github.com/stac-extensions/sat + "sat:absolute_orbit": {"type": "integer"}, + "sat:relative_orbit": {"type": "integer"}, + }, + }, + }, +} + +ES_COLLECTIONS_MAPPINGS = { + "numeric_detection": False, + "dynamic_templates": ES_MAPPINGS_DYNAMIC_TEMPLATES, + "properties": { + "extent.spatial.bbox": {"type": "long"}, + "extent.temporal.interval": {"type": "date"}, + "providers": {"type": "object", "enabled": False}, + "links": {"type": "object", "enabled": False}, + "item_assets": {"type": "object", "enabled": False}, + }, +} + + +def index_by_collection_id(collection_id: str) -> str: + """ + Translate a collection id into an Elasticsearch index name. + + Args: + collection_id (str): The collection id to translate into an index name. + + Returns: + str: The index name derived from the collection id. + """ + return f"{ITEMS_INDEX_PREFIX}{''.join(c for c in collection_id.lower() if c not in ES_INDEX_NAME_UNSUPPORTED_CHARS)}" + + +def indices(collection_ids: Optional[List[str]]) -> str: + """ + Get a comma-separated string of index names for a given list of collection ids. + + Args: + collection_ids: A list of collection ids. + + Returns: + A string of comma-separated index names. If `collection_ids` is None, returns the default indices. + """ + if collection_ids is None: + return ITEM_INDICES + else: + return ",".join([index_by_collection_id(c) for c in collection_ids]) + + +async def create_collection_index() -> None: + """ + Create the index for a Collection. + + Returns: + None + + """ + client = AsyncElasticsearchSettings().create_client + + await client.options(ignore_status=400).indices.create( + index=f"{COLLECTIONS_INDEX}-000001", + aliases={COLLECTIONS_INDEX: {}}, + mappings=ES_COLLECTIONS_MAPPINGS, + ) + await client.close() + + +async def create_item_index(collection_id: str): + """ + Create the index for Items. + + Args: + collection_id (str): Collection identifier. + + Returns: + None + + """ + client = AsyncElasticsearchSettings().create_client + index_name = index_by_collection_id(collection_id) + + await client.options(ignore_status=400).indices.create( + index=f"{index_by_collection_id(collection_id)}-000001", + aliases={index_name: {}}, + mappings=ES_ITEMS_MAPPINGS, + settings=ES_ITEMS_SETTINGS, + ) + await client.close() + + +async def delete_item_index(collection_id: str): + """Delete the index for items in a collection. + + Args: + collection_id (str): The ID of the collection whose items index will be deleted. + """ + client = AsyncElasticsearchSettings().create_client + + name = index_by_collection_id(collection_id) + resolved = await client.indices.resolve_index(name=name) + if "aliases" in resolved and resolved["aliases"]: + [alias] = resolved["aliases"] + await client.indices.delete_alias(index=alias["indices"], name=alias["name"]) + await client.indices.delete(index=alias["indices"]) + else: + await client.indices.delete(index=name) + await client.close() + + +def bbox2polygon(b0: float, b1: float, b2: float, b3: float) -> List[List[List[float]]]: + """Transform a bounding box represented by its four coordinates `b0`, `b1`, `b2`, and `b3` into a polygon. + + Args: + b0 (float): The x-coordinate of the lower-left corner of the bounding box. + b1 (float): The y-coordinate of the lower-left corner of the bounding box. + b2 (float): The x-coordinate of the upper-right corner of the bounding box. + b3 (float): The y-coordinate of the upper-right corner of the bounding box. + + Returns: + List[List[List[float]]]: A polygon represented as a list of lists of coordinates. + """ + return [[[b0, b1], [b2, b1], [b2, b3], [b0, b3], [b0, b1]]] + + +def mk_item_id(item_id: str, collection_id: str): + """Create the document id for an Item in Elasticsearch. + + Args: + item_id (str): The id of the Item. + collection_id (str): The id of the Collection that the Item belongs to. + + Returns: + str: The document id for the Item, combining the Item id and the Collection id, separated by a `|` character. + """ + return f"{item_id}|{collection_id}" + + +def mk_actions(collection_id: str, processed_items: List[Item]): + """Create Elasticsearch bulk actions for a list of processed items. + + Args: + collection_id (str): The identifier for the collection the items belong to. + processed_items (List[Item]): The list of processed items to be bulk indexed. + + Returns: + List[Dict[str, Union[str, Dict]]]: The list of bulk actions to be executed, + each action being a dictionary with the following keys: + - `_index`: the index to store the document in. + - `_id`: the document's identifier. + - `_source`: the source of the document. + """ + return [ + { + "_index": index_by_collection_id(collection_id), + "_id": mk_item_id(item["id"], item["collection"]), + "_source": item, + } + for item in processed_items + ] + + +# stac_pydantic classes extend _GeometryBase, which doesn't have a type field, +# So create our own Protocol for typing +# Union[ Point, MultiPoint, LineString, MultiLineString, Polygon, MultiPolygon, GeometryCollection] +class Geometry(Protocol): # noqa + type: str + coordinates: Any + + +@attr.s +class DatabaseLogic: + """Database logic.""" + + client = AsyncElasticsearchSettings().create_client + sync_client = SyncElasticsearchSettings().create_client + + item_serializer: Type[ItemSerializer] = attr.ib(default=ItemSerializer) + collection_serializer: Type[CollectionSerializer] = attr.ib( + default=CollectionSerializer + ) + + """CORE LOGIC""" + + async def get_all_collections( + self, token: Optional[str], limit: int + ) -> Iterable[Dict[str, Any]]: + """Retrieve a list of all collections from the database. + + Args: + token (Optional[str]): The token used to return the next set of results. + limit (int): Number of results to return + + Returns: + collections (Iterable[Dict[str, Any]]): A list of dictionaries containing the source data for each collection. + + Notes: + The collections are retrieved from the Elasticsearch database using the `client.search` method, + with the `COLLECTIONS_INDEX` as the target index and `size=limit` to retrieve records. + The result is a generator of dictionaries containing the source data for each collection. + """ + search_after = None + if token: + search_after = urlsafe_b64decode(token.encode()).decode().split(",") + collections = await self.client.search( + index=COLLECTIONS_INDEX, + search_after=search_after, + size=limit, + sort={"id": {"order": "asc"}}, + ) + hits = collections["hits"]["hits"] + return hits + + async def get_one_item(self, collection_id: str, item_id: str) -> Dict: + """Retrieve a single item from the database. + + Args: + collection_id (str): The id of the Collection that the Item belongs to. + item_id (str): The id of the Item. + + Returns: + item (Dict): A dictionary containing the source data for the Item. + + Raises: + NotFoundError: If the specified Item does not exist in the Collection. + + Notes: + The Item is retrieved from the Elasticsearch database using the `client.get` method, + with the index for the Collection as the target index and the combined `mk_item_id` as the document id. + """ + try: + item = await self.client.get( + index=index_by_collection_id(collection_id), + id=mk_item_id(item_id, collection_id), + ) + except exceptions.NotFoundError: + raise NotFoundError( + f"Item {item_id} does not exist in Collection {collection_id}" + ) + return item["_source"] + + @staticmethod + def make_search(): + """Database logic to create a Search instance.""" + return Search().sort(*DEFAULT_SORT) + + @staticmethod + def apply_ids_filter(search: Search, item_ids: List[str]): + """Database logic to search a list of STAC item ids.""" + return search.filter("terms", id=item_ids) + + @staticmethod + def apply_collections_filter(search: Search, collection_ids: List[str]): + """Database logic to search a list of STAC collection ids.""" + return search.filter("terms", collection=collection_ids) + + @staticmethod + def apply_datetime_filter(search: Search, datetime_search): + """Apply a filter to search based on datetime field. + + Args: + search (Search): The search object to filter. + datetime_search (dict): The datetime filter criteria. + + Returns: + Search: The filtered search object. + """ + if "eq" in datetime_search: + search = search.filter( + "term", **{"properties__datetime": datetime_search["eq"]} + ) + else: + search = search.filter( + "range", properties__datetime={"lte": datetime_search["lte"]} + ) + search = search.filter( + "range", properties__datetime={"gte": datetime_search["gte"]} + ) + return search + + @staticmethod + def apply_bbox_filter(search: Search, bbox: List): + """Filter search results based on bounding box. + + Args: + search (Search): The search object to apply the filter to. + bbox (List): The bounding box coordinates, represented as a list of four values [minx, miny, maxx, maxy]. + + Returns: + search (Search): The search object with the bounding box filter applied. + + Notes: + The bounding box is transformed into a polygon using the `bbox2polygon` function and + a geo_shape filter is added to the search object, set to intersect with the specified polygon. + """ + return search.filter( + Q( + { + "geo_shape": { + "geometry": { + "shape": { + "type": "polygon", + "coordinates": bbox2polygon(*bbox), + }, + "relation": "intersects", + } + } + } + ) + ) + + @staticmethod + def apply_intersects_filter( + search: Search, + intersects: Geometry, + ): + """Filter search results based on intersecting geometry. + + Args: + search (Search): The search object to apply the filter to. + intersects (Geometry): The intersecting geometry, represented as a GeoJSON-like object. + + Returns: + search (Search): The search object with the intersecting geometry filter applied. + + Notes: + A geo_shape filter is added to the search object, set to intersect with the specified geometry. + """ + return search.filter( + Q( + { + "geo_shape": { + "geometry": { + "shape": { + "type": intersects.type.lower(), + "coordinates": intersects.coordinates, + }, + "relation": "intersects", + } + } + } + ) + ) + + @staticmethod + def apply_stacql_filter(search: Search, op: str, field: str, value: float): + """Filter search results based on a comparison between a field and a value. + + Args: + search (Search): The search object to apply the filter to. + op (str): The comparison operator to use. Can be 'eq' (equal), 'gt' (greater than), 'gte' (greater than or equal), + 'lt' (less than), or 'lte' (less than or equal). + field (str): The field to perform the comparison on. + value (float): The value to compare the field against. + + Returns: + search (Search): The search object with the specified filter applied. + """ + if op != "eq": + key_filter = {field: {f"{op}": value}} + search = search.filter(Q("range", **key_filter)) + else: + search = search.filter("term", **{field: value}) + + return search + + @staticmethod + def apply_cql2_filter(search: Search, _filter: Optional[Dict[str, Any]]): + """Database logic to perform query for search endpoint.""" + if _filter is not None: + search = search.filter(filter.Clause.parse_obj(_filter).to_es()) + return search + + @staticmethod + def populate_sort(sortby: List) -> Optional[Dict[str, Dict[str, str]]]: + """Database logic to sort search instance.""" + if sortby: + return {s.field: {"order": s.direction} for s in sortby} + else: + return None + + async def execute_search( + self, + search: Search, + limit: int, + token: Optional[str], + sort: Optional[Dict[str, Dict[str, str]]], + collection_ids: Optional[List[str]], + ignore_unavailable: bool = True, + ) -> Tuple[Iterable[Dict[str, Any]], Optional[int], Optional[str]]: + """Execute a search query with limit and other optional parameters. + + Args: + search (Search): The search query to be executed. + limit (int): The maximum number of results to be returned. + token (Optional[str]): The token used to return the next set of results. + sort (Optional[Dict[str, Dict[str, str]]]): Specifies how the results should be sorted. + collection_ids (Optional[List[str]]): The collection ids to search. + ignore_unavailable (bool, optional): Whether to ignore unavailable collections. Defaults to True. + + Returns: + Tuple[Iterable[Dict[str, Any]], Optional[int], Optional[str]]: A tuple containing: + - An iterable of search results, where each result is a dictionary with keys and values representing the + fields and values of each document. + - The total number of results (if the count could be computed), or None if the count could not be + computed. + - The token to be used to retrieve the next set of results, or None if there are no more results. + + Raises: + NotFoundError: If the collections specified in `collection_ids` do not exist. + """ + search_after = None + if token: + search_after = urlsafe_b64decode(token.encode()).decode().split(",") + + query = search.query.to_dict() if search.query else None + + index_param = indices(collection_ids) + + search_task = asyncio.create_task( + self.client.search( + index=index_param, + ignore_unavailable=ignore_unavailable, + query=query, + sort=sort or DEFAULT_SORT, + search_after=search_after, + size=limit, + ) + ) + + count_task = asyncio.create_task( + self.client.count( + index=index_param, + ignore_unavailable=ignore_unavailable, + body=search.to_dict(count=True), + ) + ) + + try: + es_response = await search_task + except exceptions.NotFoundError: + raise NotFoundError(f"Collections '{collection_ids}' do not exist") + + hits = es_response["hits"]["hits"] + items = (hit["_source"] for hit in hits) + + next_token = None + if hits and (sort_array := hits[-1].get("sort")): + next_token = urlsafe_b64encode( + ",".join([str(x) for x in sort_array]).encode() + ).decode() + + # (1) count should not block returning results, so don't wait for it to be done + # (2) don't cancel the task so that it will populate the ES cache for subsequent counts + maybe_count = None + if count_task.done(): + try: + maybe_count = count_task.result().get("count") + except Exception as e: + logger.error(f"Count task failed: {e}") + + return items, maybe_count, next_token + + """ TRANSACTION LOGIC """ + + async def check_collection_exists(self, collection_id: str): + """Database logic to check if a collection exists.""" + if not await self.client.exists(index=COLLECTIONS_INDEX, id=collection_id): + raise NotFoundError(f"Collection {collection_id} does not exist") + + async def prep_create_item( + self, item: Item, base_url: str, exist_ok: bool = False + ) -> Item: + """ + Preps an item for insertion into the database. + + Args: + item (Item): The item to be prepped for insertion. + base_url (str): The base URL used to create the item's self URL. + exist_ok (bool): Indicates whether the item can exist already. + + Returns: + Item: The prepped item. + + Raises: + ConflictError: If the item already exists in the database. + + """ + await self.check_collection_exists(collection_id=item["collection"]) + + if not exist_ok and await self.client.exists( + index=index_by_collection_id(item["collection"]), + id=mk_item_id(item["id"], item["collection"]), + ): + raise ConflictError( + f"Item {item['id']} in collection {item['collection']} already exists" + ) + + return self.item_serializer.stac_to_db(item, base_url) + + def sync_prep_create_item( + self, item: Item, base_url: str, exist_ok: bool = False + ) -> Item: + """ + Prepare an item for insertion into the database. + + This method performs pre-insertion preparation on the given `item`, + such as checking if the collection the item belongs to exists, + and optionally verifying that an item with the same ID does not already exist in the database. + + Args: + item (Item): The item to be inserted into the database. + base_url (str): The base URL used for constructing URLs for the item. + exist_ok (bool): Indicates whether the item can exist already. + + Returns: + Item: The item after preparation is done. + + Raises: + NotFoundError: If the collection that the item belongs to does not exist in the database. + ConflictError: If an item with the same ID already exists in the collection. + """ + item_id = item["id"] + collection_id = item["collection"] + if not self.sync_client.exists(index=COLLECTIONS_INDEX, id=collection_id): + raise NotFoundError(f"Collection {collection_id} does not exist") + + if not exist_ok and self.sync_client.exists( + index=index_by_collection_id(collection_id), + id=mk_item_id(item_id, collection_id), + ): + raise ConflictError( + f"Item {item_id} in collection {collection_id} already exists" + ) + + return self.item_serializer.stac_to_db(item, base_url) + + async def create_item(self, item: Item, refresh: bool = False): + """Database logic for creating one item. + + Args: + item (Item): The item to be created. + refresh (bool, optional): Refresh the index after performing the operation. Defaults to False. + + Raises: + ConflictError: If the item already exists in the database. + + Returns: + None + """ + # todo: check if collection exists, but cache + item_id = item["id"] + collection_id = item["collection"] + es_resp = await self.client.index( + index=index_by_collection_id(collection_id), + id=mk_item_id(item_id, collection_id), + document=item, + refresh=refresh, + ) + + if (meta := es_resp.get("meta")) and meta.get("status") == 409: + raise ConflictError( + f"Item {item_id} in collection {collection_id} already exists" + ) + + async def delete_item( + self, item_id: str, collection_id: str, refresh: bool = False + ): + """Delete a single item from the database. + + Args: + item_id (str): The id of the Item to be deleted. + collection_id (str): The id of the Collection that the Item belongs to. + refresh (bool, optional): Whether to refresh the index after the deletion. Default is False. + + Raises: + NotFoundError: If the Item does not exist in the database. + """ + try: + await self.client.delete( + index=index_by_collection_id(collection_id), + id=mk_item_id(item_id, collection_id), + refresh=refresh, + ) + except exceptions.NotFoundError: + raise NotFoundError( + f"Item {item_id} in collection {collection_id} not found" + ) + + async def create_collection(self, collection: Collection, refresh: bool = False): + """Create a single collection in the database. + + Args: + collection (Collection): The Collection object to be created. + refresh (bool, optional): Whether to refresh the index after the creation. Default is False. + + Raises: + ConflictError: If a Collection with the same id already exists in the database. + + Notes: + A new index is created for the items in the Collection using the `create_item_index` function. + """ + collection_id = collection["id"] + + if await self.client.exists(index=COLLECTIONS_INDEX, id=collection_id): + raise ConflictError(f"Collection {collection_id} already exists") + + await self.client.index( + index=COLLECTIONS_INDEX, + id=collection_id, + document=collection, + refresh=refresh, + ) + + await create_item_index(collection_id) + + async def find_collection(self, collection_id: str) -> Collection: + """Find and return a collection from the database. + + Args: + self: The instance of the object calling this function. + collection_id (str): The ID of the collection to be found. + + Returns: + Collection: The found collection, represented as a `Collection` object. + + Raises: + NotFoundError: If the collection with the given `collection_id` is not found in the database. + + Notes: + This function searches for a collection in the database using the specified `collection_id` and returns the found + collection as a `Collection` object. If the collection is not found, a `NotFoundError` is raised. + """ + try: + collection = await self.client.get( + index=COLLECTIONS_INDEX, id=collection_id + ) + except exceptions.NotFoundError: + raise NotFoundError(f"Collection {collection_id} not found") + + return collection["_source"] + + async def delete_collection(self, collection_id: str, refresh: bool = False): + """Delete a collection from the database. + + Parameters: + self: The instance of the object calling this function. + collection_id (str): The ID of the collection to be deleted. + refresh (bool): Whether to refresh the index after the deletion (default: False). + + Raises: + NotFoundError: If the collection with the given `collection_id` is not found in the database. + + Notes: + This function first verifies that the collection with the specified `collection_id` exists in the database, and then + deletes the collection. If `refresh` is set to True, the index is refreshed after the deletion. Additionally, this + function also calls `delete_item_index` to delete the index for the items in the collection. + """ + await self.find_collection(collection_id=collection_id) + await self.client.delete( + index=COLLECTIONS_INDEX, id=collection_id, refresh=refresh + ) + await delete_item_index(collection_id) + + async def bulk_async( + self, collection_id: str, processed_items: List[Item], refresh: bool = False + ) -> None: + """Perform a bulk insert of items into the database asynchronously. + + Args: + self: The instance of the object calling this function. + collection_id (str): The ID of the collection to which the items belong. + processed_items (List[Item]): A list of `Item` objects to be inserted into the database. + refresh (bool): Whether to refresh the index after the bulk insert (default: False). + + Notes: + This function performs a bulk insert of `processed_items` into the database using the specified `collection_id`. The + insert is performed asynchronously, and the event loop is used to run the operation in a separate executor. The + `mk_actions` function is called to generate a list of actions for the bulk insert. If `refresh` is set to True, the + index is refreshed after the bulk insert. The function does not return any value. + """ + await helpers.async_bulk( + self.client, + mk_actions(collection_id, processed_items), + refresh=refresh, + raise_on_error=False, + ) + + def bulk_sync( + self, collection_id: str, processed_items: List[Item], refresh: bool = False + ) -> None: + """Perform a bulk insert of items into the database synchronously. + + Args: + self: The instance of the object calling this function. + collection_id (str): The ID of the collection to which the items belong. + processed_items (List[Item]): A list of `Item` objects to be inserted into the database. + refresh (bool): Whether to refresh the index after the bulk insert (default: False). + + Notes: + This function performs a bulk insert of `processed_items` into the database using the specified `collection_id`. The + insert is performed synchronously and blocking, meaning that the function does not return until the insert has + completed. The `mk_actions` function is called to generate a list of actions for the bulk insert. If `refresh` is set to + True, the index is refreshed after the bulk insert. The function does not return any value. + """ + helpers.bulk( + self.sync_client, + mk_actions(collection_id, processed_items), + refresh=refresh, + raise_on_error=False, + ) + + # DANGER + async def delete_items(self) -> None: + """Danger. this is only for tests.""" + await self.client.delete_by_query( + index=ITEM_INDICES, + body={"query": {"match_all": {}}}, + wait_for_completion=True, + ) + + # DANGER + async def delete_collections(self) -> None: + """Danger. this is only for tests.""" + await self.client.delete_by_query( + index=COLLECTIONS_INDEX, + body={"query": {"match_all": {}}}, + wait_for_completion=True, + ) From 980125ef55ea3b9e428dde6148e83acfb1371bfa Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Fri, 2 Feb 2024 11:19:36 +0800 Subject: [PATCH 11/33] clean up --- .../core/stac_fastapi/core/types/core.py | 64 ------------------- 1 file changed, 64 deletions(-) diff --git a/stac_fastapi/core/stac_fastapi/core/types/core.py b/stac_fastapi/core/stac_fastapi/core/types/core.py index d012dcea..875060a2 100644 --- a/stac_fastapi/core/stac_fastapi/core/types/core.py +++ b/stac_fastapi/core/stac_fastapi/core/types/core.py @@ -131,70 +131,6 @@ async def delete_collection( ... -# @attr.s -# class LandingPageMixin(abc.ABC): -# """Create a STAC landing page (GET /).""" - -# stac_version: str = attr.ib(default=STAC_VERSION) -# landing_page_id: str = attr.ib(default="stac-fastapi") -# title: str = attr.ib(default="stac-fastapi") -# description: str = attr.ib(default="stac-fastapi") - -# def _landing_page( -# self, -# base_url: str, -# conformance_classes: List[str], -# extension_schemas: List[str], -# ) -> stac_types.LandingPage: -# landing_page = stac_types.LandingPage( -# type="Catalog", -# id=self.landing_page_id, -# title=self.title, -# description=self.description, -# stac_version=self.stac_version, -# conformsTo=conformance_classes, -# links=[ -# { -# "rel": Relations.self.value, -# "type": MimeTypes.json, -# "href": base_url, -# }, -# { -# "rel": Relations.root.value, -# "type": MimeTypes.json, -# "href": base_url, -# }, -# { -# "rel": "data", -# "type": MimeTypes.json, -# "href": urljoin(base_url, "collections"), -# }, -# { -# "rel": Relations.conformance.value, -# "type": MimeTypes.json, -# "title": "STAC/WFS3 conformance classes implemented by this server", -# "href": urljoin(base_url, "conformance"), -# }, -# { -# "rel": Relations.search.value, -# "type": MimeTypes.geojson, -# "title": "STAC search", -# "href": urljoin(base_url, "search"), -# "method": "GET", -# }, -# { -# "rel": Relations.search.value, -# "type": MimeTypes.geojson, -# "title": "STAC search", -# "href": urljoin(base_url, "search"), -# "method": "POST", -# }, -# ], -# stac_extensions=extension_schemas, -# ) -# return landing_page - - @attr.s # type:ignore class AsyncBaseCoreClient(abc.ABC): """Defines a pattern for implementing STAC api core endpoints. From d00ec1460999d50d89734ed970b5ed7a97dc36c3 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Fri, 2 Feb 2024 12:45:41 +0800 Subject: [PATCH 12/33] add tests --- stac_fastapi/opensearch/pytest.ini | 4 + .../stac_fastapi/opensearch/config.py | 4 +- stac_fastapi/opensearch/tests/__init__.py | 0 stac_fastapi/opensearch/tests/api/__init__.py | 0 stac_fastapi/opensearch/tests/api/test_api.py | 447 ++++++++++ .../opensearch/tests/clients/__init__.py | 0 .../tests/clients/test_elasticsearch.py | 312 +++++++ stac_fastapi/opensearch/tests/conftest.py | 208 +++++ .../tests/data/test_collection.json | 99 +++ .../opensearch/tests/data/test_item.json | 505 +++++++++++ .../tests/extensions/cql2/example01.json | 9 + .../tests/extensions/cql2/example04.json | 32 + .../tests/extensions/cql2/example05a.json | 23 + .../tests/extensions/cql2/example06b.json | 41 + .../tests/extensions/cql2/example08.json | 79 ++ .../tests/extensions/cql2/example09.json | 9 + .../tests/extensions/cql2/example1.json | 39 + .../tests/extensions/cql2/example10.json | 9 + .../tests/extensions/cql2/example14.json | 9 + .../tests/extensions/cql2/example15.json | 23 + .../tests/extensions/cql2/example17.json | 37 + .../tests/extensions/cql2/example18.json | 28 + .../tests/extensions/cql2/example19.json | 9 + .../tests/extensions/cql2/example20.json | 10 + .../tests/extensions/cql2/example21.json | 33 + .../tests/extensions/cql2/example22.json | 13 + .../tests/extensions/test_filter.py | 402 +++++++++ .../opensearch/tests/resources/__init__.py | 0 .../tests/resources/test_collection.py | 123 +++ .../tests/resources/test_conformance.py | 72 ++ .../opensearch/tests/resources/test_item.py | 804 ++++++++++++++++++ .../opensearch/tests/resources/test_mgmt.py | 13 + 32 files changed, 3394 insertions(+), 2 deletions(-) create mode 100644 stac_fastapi/opensearch/pytest.ini create mode 100644 stac_fastapi/opensearch/tests/__init__.py create mode 100644 stac_fastapi/opensearch/tests/api/__init__.py create mode 100644 stac_fastapi/opensearch/tests/api/test_api.py create mode 100644 stac_fastapi/opensearch/tests/clients/__init__.py create mode 100644 stac_fastapi/opensearch/tests/clients/test_elasticsearch.py create mode 100644 stac_fastapi/opensearch/tests/conftest.py create mode 100644 stac_fastapi/opensearch/tests/data/test_collection.json create mode 100644 stac_fastapi/opensearch/tests/data/test_item.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example01.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example04.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example05a.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example06b.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example08.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example09.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example1.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example10.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example14.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example15.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example17.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example18.json create mode 100755 stac_fastapi/opensearch/tests/extensions/cql2/example19.json create mode 100755 stac_fastapi/opensearch/tests/extensions/cql2/example20.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example21.json create mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example22.json create mode 100644 stac_fastapi/opensearch/tests/extensions/test_filter.py create mode 100644 stac_fastapi/opensearch/tests/resources/__init__.py create mode 100644 stac_fastapi/opensearch/tests/resources/test_collection.py create mode 100644 stac_fastapi/opensearch/tests/resources/test_conformance.py create mode 100644 stac_fastapi/opensearch/tests/resources/test_item.py create mode 100644 stac_fastapi/opensearch/tests/resources/test_mgmt.py diff --git a/stac_fastapi/opensearch/pytest.ini b/stac_fastapi/opensearch/pytest.ini new file mode 100644 index 00000000..db0353ef --- /dev/null +++ b/stac_fastapi/opensearch/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +testpaths = tests +addopts = -sv +asyncio_mode = auto \ No newline at end of file diff --git a/stac_fastapi/opensearch/stac_fastapi/opensearch/config.py b/stac_fastapi/opensearch/stac_fastapi/opensearch/config.py index 643b81ce..db08301f 100644 --- a/stac_fastapi/opensearch/stac_fastapi/opensearch/config.py +++ b/stac_fastapi/opensearch/stac_fastapi/opensearch/config.py @@ -46,7 +46,7 @@ def _es_config() -> Dict[str, Any]: _forbidden_fields: Set[str] = {"type"} -class SearchSettings(ApiSettings): +class OpensearchSettings(ApiSettings): """API settings.""" # Fields which are defined by STAC but not included in the database model @@ -59,7 +59,7 @@ def create_client(self): return OpenSearch(**_es_config()) -class AsyncSearchSettings(ApiSettings): +class AsyncOpensearchSettings(ApiSettings): """API settings.""" # Fields which are defined by STAC but not included in the database model diff --git a/stac_fastapi/opensearch/tests/__init__.py b/stac_fastapi/opensearch/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/opensearch/tests/api/__init__.py b/stac_fastapi/opensearch/tests/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/opensearch/tests/api/test_api.py b/stac_fastapi/opensearch/tests/api/test_api.py new file mode 100644 index 00000000..74f0bb55 --- /dev/null +++ b/stac_fastapi/opensearch/tests/api/test_api.py @@ -0,0 +1,447 @@ +import copy +import uuid +from datetime import datetime, timedelta + +import pytest + +from ..conftest import create_collection, create_item + +ROUTES = { + "GET /_mgmt/ping", + "GET /docs/oauth2-redirect", + "HEAD /docs/oauth2-redirect", + "GET /", + "GET /conformance", + "GET /api", + "GET /api.html", + "HEAD /api", + "HEAD /api.html", + "GET /queryables", + "GET /collections", + "GET /collections/{collection_id}", + "GET /collections/{collection_id}/queryables", + "GET /collections/{collection_id}/items", + "GET /collections/{collection_id}/items/{item_id}", + "GET /search", + "POST /search", + "DELETE /collections/{collection_id}", + "DELETE /collections/{collection_id}/items/{item_id}", + "POST /collections", + "POST /collections/{collection_id}/items", + "PUT /collections", + "PUT /collections/{collection_id}/items/{item_id}", +} + + +@pytest.mark.asyncio +async def test_post_search_content_type(app_client, ctx): + params = {"limit": 1} + resp = await app_client.post("/search", json=params) + assert resp.headers["content-type"] == "application/geo+json" + + +@pytest.mark.asyncio +async def test_get_search_content_type(app_client, ctx): + resp = await app_client.get("/search") + assert resp.headers["content-type"] == "application/geo+json" + + +@pytest.mark.asyncio +async def test_api_headers(app_client): + resp = await app_client.get("/api") + assert ( + resp.headers["content-type"] == "application/vnd.oai.openapi+json;version=3.0" + ) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_router(app): + api_routes = set([f"{list(route.methods)[0]} {route.path}" for route in app.routes]) + assert len(api_routes - ROUTES) == 0 + + +@pytest.mark.asyncio +async def test_app_transaction_extension(app_client, ctx): + item = copy.deepcopy(ctx.item) + item["id"] = str(uuid.uuid4()) + resp = await app_client.post(f"/collections/{item['collection']}/items", json=item) + assert resp.status_code == 200 + + await app_client.delete(f"/collections/{item['collection']}/items/{item['id']}") + + +@pytest.mark.asyncio +async def test_app_search_response(app_client, ctx): + resp = await app_client.get("/search", params={"ids": ["test-item"]}) + assert resp.status_code == 200 + resp_json = resp.json() + + assert resp_json.get("type") == "FeatureCollection" + # stac_version and stac_extensions were removed in v1.0.0-beta.3 + assert resp_json.get("stac_version") is None + assert resp_json.get("stac_extensions") is None + + +@pytest.mark.asyncio +async def test_app_context_extension(app_client, ctx, txn_client): + test_item = ctx.item + test_item["id"] = "test-item-2" + test_item["collection"] = "test-collection-2" + test_collection = ctx.collection + test_collection["id"] = "test-collection-2" + + await create_collection(txn_client, test_collection) + await create_item(txn_client, test_item) + + resp = await app_client.get( + f"/collections/{test_collection['id']}/items/{test_item['id']}" + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["id"] == test_item["id"] + assert resp_json["collection"] == test_item["collection"] + + resp = await app_client.get(f"/collections/{test_collection['id']}") + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["id"] == test_collection["id"] + + resp = await app_client.post("/search", json={"collections": ["test-collection-2"]}) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + assert "context" in resp_json + assert resp_json["context"]["returned"] == 1 + if matched := resp_json["context"].get("matched"): + assert matched == 1 + + +@pytest.mark.asyncio +async def test_app_fields_extension(app_client, ctx, txn_client): + resp = await app_client.get("/search", params={"collections": ["test-collection"]}) + assert resp.status_code == 200 + resp_json = resp.json() + assert list(resp_json["features"][0]["properties"]) == ["datetime"] + + +@pytest.mark.asyncio +async def test_app_fields_extension_query(app_client, ctx, txn_client): + resp = await app_client.post( + "/search", + json={ + "query": {"proj:epsg": {"gte": ctx.item["properties"]["proj:epsg"]}}, + "collections": ["test-collection"], + }, + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert list(resp_json["features"][0]["properties"]) == ["datetime", "proj:epsg"] + + +@pytest.mark.asyncio +async def test_app_fields_extension_no_properties_get(app_client, ctx, txn_client): + resp = await app_client.get( + "/search", params={"collections": ["test-collection"], "fields": "-properties"} + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert "properties" not in resp_json["features"][0] + + +@pytest.mark.asyncio +async def test_app_fields_extension_no_properties_post(app_client, ctx, txn_client): + resp = await app_client.post( + "/search", + json={ + "collections": ["test-collection"], + "fields": {"exclude": ["properties"]}, + }, + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert "properties" not in resp_json["features"][0] + + +@pytest.mark.asyncio +async def test_app_fields_extension_no_null_fields(app_client, ctx, txn_client): + resp = await app_client.get("/search", params={"collections": ["test-collection"]}) + assert resp.status_code == 200 + resp_json = resp.json() + # check if no null fields: https://github.com/stac-utils/stac-fastapi-elasticsearch/issues/166 + for feature in resp_json["features"]: + # assert "bbox" not in feature["geometry"] + for link in feature["links"]: + assert all(a not in link or link[a] is not None for a in ("title", "asset")) + for asset in feature["assets"]: + assert all( + a not in asset or asset[a] is not None + for a in ("start_datetime", "created") + ) + + +@pytest.mark.asyncio +async def test_app_fields_extension_return_all_properties(app_client, ctx, txn_client): + item = ctx.item + resp = await app_client.get( + "/search", params={"collections": ["test-collection"], "fields": "properties"} + ) + assert resp.status_code == 200 + resp_json = resp.json() + feature = resp_json["features"][0] + assert len(feature["properties"]) >= len(item["properties"]) + for expected_prop, expected_value in item["properties"].items(): + if expected_prop in ("datetime", "created", "updated"): + assert feature["properties"][expected_prop][0:19] == expected_value[0:19] + else: + assert feature["properties"][expected_prop] == expected_value + + +@pytest.mark.asyncio +async def test_app_query_extension_gt(app_client, ctx): + params = {"query": {"proj:epsg": {"gt": ctx.item["properties"]["proj:epsg"]}}} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +async def test_app_query_extension_gte(app_client, ctx): + params = {"query": {"proj:epsg": {"gte": ctx.item["properties"]["proj:epsg"]}}} + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_app_query_extension_limit_lt0(app_client): + assert (await app_client.post("/search", json={"limit": -1})).status_code == 400 + + +@pytest.mark.asyncio +async def test_app_query_extension_limit_gt10000(app_client): + resp = await app_client.post("/search", json={"limit": 10001}) + assert resp.status_code == 200 + assert resp.json()["context"]["limit"] == 10000 + + +@pytest.mark.asyncio +async def test_app_query_extension_limit_10000(app_client): + params = {"limit": 10000} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_app_sort_extension_get_asc(app_client, txn_client, ctx): + first_item = ctx.item + item_date = datetime.strptime( + first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" + ) + + second_item = dict(first_item) + second_item["id"] = "another-item" + another_item_date = item_date - timedelta(days=1) + second_item["properties"]["datetime"] = another_item_date.strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + await create_item(txn_client, second_item) + + resp = await app_client.get("/search?sortby=+properties.datetime") + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][1]["id"] == first_item["id"] + assert resp_json["features"][0]["id"] == second_item["id"] + + +@pytest.mark.asyncio +async def test_app_sort_extension_get_desc(app_client, txn_client, ctx): + first_item = ctx.item + item_date = datetime.strptime( + first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" + ) + + second_item = dict(first_item) + second_item["id"] = "another-item" + another_item_date = item_date - timedelta(days=1) + second_item["properties"]["datetime"] = another_item_date.strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + await create_item(txn_client, second_item) + + resp = await app_client.get("/search?sortby=-properties.datetime") + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == first_item["id"] + assert resp_json["features"][1]["id"] == second_item["id"] + + +@pytest.mark.asyncio +async def test_app_sort_extension_post_asc(app_client, txn_client, ctx): + first_item = ctx.item + item_date = datetime.strptime( + first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" + ) + + second_item = dict(first_item) + second_item["id"] = "another-item" + another_item_date = item_date - timedelta(days=1) + second_item["properties"]["datetime"] = another_item_date.strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + await create_item(txn_client, second_item) + + params = { + "collections": [first_item["collection"]], + "sortby": [{"field": "properties.datetime", "direction": "asc"}], + } + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][1]["id"] == first_item["id"] + assert resp_json["features"][0]["id"] == second_item["id"] + + +@pytest.mark.asyncio +async def test_app_sort_extension_post_desc(app_client, txn_client, ctx): + first_item = ctx.item + item_date = datetime.strptime( + first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" + ) + + second_item = dict(first_item) + second_item["id"] = "another-item" + another_item_date = item_date - timedelta(days=1) + second_item["properties"]["datetime"] = another_item_date.strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + await create_item(txn_client, second_item) + + params = { + "collections": [first_item["collection"]], + "sortby": [{"field": "properties.datetime", "direction": "desc"}], + } + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == first_item["id"] + assert resp_json["features"][1]["id"] == second_item["id"] + + +@pytest.mark.asyncio +async def test_search_invalid_date(app_client, ctx): + params = { + "datetime": "2020-XX-01/2020-10-30", + "collections": [ctx.item["collection"]], + } + + resp = await app_client.post("/search", json=params) + assert resp.status_code == 400 + + +@pytest.mark.asyncio +async def test_search_point_intersects_get(app_client, ctx): + resp = await app_client.get( + '/search?intersects={"type":"Point","coordinates":[150.04,-33.14]}' + ) + + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_polygon_intersects_get(app_client, ctx): + resp = await app_client.get( + '/search?intersects={"type":"Polygon","coordinates":[[[149.04, -34.14],[149.04, -32.14],[151.04, -32.14],[151.04, -34.14],[149.04, -34.14]]]}' + ) + + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_point_intersects_post(app_client, ctx): + point = [150.04, -33.14] + intersects = {"type": "Point", "coordinates": point} + + params = { + "intersects": intersects, + "collections": [ctx.item["collection"]], + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_point_does_not_intersect(app_client, ctx): + point = [15.04, -3.14] + intersects = {"type": "Point", "coordinates": point} + + params = { + "intersects": intersects, + "collections": [ctx.item["collection"]], + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +async def test_datetime_non_interval(app_client, ctx): + dt_formats = [ + "2020-02-12T12:30:22+00:00", + "2020-02-12T12:30:22.00Z", + "2020-02-12T12:30:22Z", + "2020-02-12T12:30:22.00+00:00", + ] + + for dt in dt_formats: + params = { + "datetime": dt, + "collections": [ctx.item["collection"]], + } + + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + # datetime is returned in this format "2020-02-12T12:30:22Z" + assert resp_json["features"][0]["properties"]["datetime"][0:19] == dt[0:19] + + +@pytest.mark.asyncio +async def test_bbox_3d(app_client, ctx): + australia_bbox = [106.343365, -47.199523, 0.1, 168.218365, -19.437288, 0.1] + params = { + "bbox": australia_bbox, + "collections": [ctx.item["collection"]], + } + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_line_string_intersects(app_client, ctx): + line = [[150.04, -33.14], [150.22, -33.89]] + intersects = {"type": "LineString", "coordinates": line} + params = { + "intersects": intersects, + "collections": [ctx.item["collection"]], + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + + resp_json = resp.json() + assert len(resp_json["features"]) == 1 diff --git a/stac_fastapi/opensearch/tests/clients/__init__.py b/stac_fastapi/opensearch/tests/clients/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/opensearch/tests/clients/test_elasticsearch.py b/stac_fastapi/opensearch/tests/clients/test_elasticsearch.py new file mode 100644 index 00000000..41fcf26d --- /dev/null +++ b/stac_fastapi/opensearch/tests/clients/test_elasticsearch.py @@ -0,0 +1,312 @@ +import uuid +from copy import deepcopy +from typing import Callable + +import pytest +from stac_pydantic import Item + +from stac_fastapi.extensions.third_party.bulk_transactions import Items +from stac_fastapi.types.errors import ConflictError, NotFoundError + +from ..conftest import MockRequest, create_item + + +@pytest.mark.asyncio +async def test_create_collection(app_client, ctx, core_client, txn_client): + in_coll = deepcopy(ctx.collection) + in_coll["id"] = str(uuid.uuid4()) + await txn_client.create_collection(in_coll, request=MockRequest) + got_coll = await core_client.get_collection(in_coll["id"], request=MockRequest) + assert got_coll["id"] == in_coll["id"] + await txn_client.delete_collection(in_coll["id"]) + + +@pytest.mark.asyncio +async def test_create_collection_already_exists(app_client, ctx, txn_client): + data = deepcopy(ctx.collection) + + # change id to avoid elasticsearch duplicate key error + data["_id"] = str(uuid.uuid4()) + + with pytest.raises(ConflictError): + await txn_client.create_collection(data, request=MockRequest) + + await txn_client.delete_collection(data["id"]) + + +@pytest.mark.asyncio +async def test_update_collection( + core_client, + txn_client, + load_test_data: Callable, +): + collection_data = load_test_data("test_collection.json") + item_data = load_test_data("test_item.json") + + await txn_client.create_collection(collection_data, request=MockRequest) + await txn_client.create_item( + collection_id=collection_data["id"], + item=item_data, + request=MockRequest, + refresh=True, + ) + + collection_data["keywords"].append("new keyword") + await txn_client.update_collection(collection_data, request=MockRequest) + + coll = await core_client.get_collection(collection_data["id"], request=MockRequest) + assert "new keyword" in coll["keywords"] + + item = await core_client.get_item( + item_id=item_data["id"], + collection_id=collection_data["id"], + request=MockRequest, + ) + assert item["id"] == item_data["id"] + assert item["collection"] == item_data["collection"] + + await txn_client.delete_collection(collection_data["id"]) + + +@pytest.mark.asyncio +async def test_update_collection_id( + core_client, + txn_client, + load_test_data: Callable, +): + collection_data = load_test_data("test_collection.json") + item_data = load_test_data("test_item.json") + new_collection_id = "new-test-collection" + + await txn_client.create_collection(collection_data, request=MockRequest) + await txn_client.create_item( + collection_id=collection_data["id"], + item=item_data, + request=MockRequest, + refresh=True, + ) + + old_collection_id = collection_data["id"] + collection_data["id"] = new_collection_id + + await txn_client.update_collection( + collection=collection_data, + request=MockRequest( + query_params={ + "collection_id": old_collection_id, + "limit": "10", + } + ), + refresh=True, + ) + + with pytest.raises(NotFoundError): + await core_client.get_collection(old_collection_id, request=MockRequest) + + coll = await core_client.get_collection(collection_data["id"], request=MockRequest) + assert coll["id"] == new_collection_id + + with pytest.raises(NotFoundError): + await core_client.get_item( + item_id=item_data["id"], + collection_id=old_collection_id, + request=MockRequest, + ) + + item = await core_client.get_item( + item_id=item_data["id"], + collection_id=collection_data["id"], + request=MockRequest, + refresh=True, + ) + + assert item["id"] == item_data["id"] + assert item["collection"] == new_collection_id + + await txn_client.delete_collection(collection_data["id"]) + + +@pytest.mark.asyncio +async def test_delete_collection( + core_client, + txn_client, + load_test_data: Callable, +): + data = load_test_data("test_collection.json") + await txn_client.create_collection(data, request=MockRequest) + + await txn_client.delete_collection(data["id"]) + + with pytest.raises(NotFoundError): + await core_client.get_collection(data["id"], request=MockRequest) + + +@pytest.mark.asyncio +async def test_get_collection( + core_client, + txn_client, + load_test_data: Callable, +): + data = load_test_data("test_collection.json") + await txn_client.create_collection(data, request=MockRequest) + coll = await core_client.get_collection(data["id"], request=MockRequest) + assert coll["id"] == data["id"] + + await txn_client.delete_collection(data["id"]) + + +@pytest.mark.asyncio +async def test_get_item(app_client, ctx, core_client): + got_item = await core_client.get_item( + item_id=ctx.item["id"], + collection_id=ctx.item["collection"], + request=MockRequest, + ) + assert got_item["id"] == ctx.item["id"] + assert got_item["collection"] == ctx.item["collection"] + + +@pytest.mark.asyncio +async def test_get_collection_items(app_client, ctx, core_client, txn_client): + coll = ctx.collection + num_of_items_to_create = 5 + for _ in range(num_of_items_to_create): + item = deepcopy(ctx.item) + item["id"] = str(uuid.uuid4()) + await txn_client.create_item( + collection_id=item["collection"], + item=item, + request=MockRequest, + refresh=True, + ) + + fc = await core_client.item_collection(coll["id"], request=MockRequest()) + assert len(fc["features"]) == num_of_items_to_create + 1 # ctx.item + + for item in fc["features"]: + assert item["collection"] == coll["id"] + + +@pytest.mark.asyncio +async def test_create_item(ctx, core_client, txn_client): + resp = await core_client.get_item( + ctx.item["id"], ctx.item["collection"], request=MockRequest + ) + assert Item(**ctx.item).dict( + exclude={"links": ..., "properties": {"created", "updated"}} + ) == Item(**resp).dict(exclude={"links": ..., "properties": {"created", "updated"}}) + + +@pytest.mark.asyncio +async def test_create_item_already_exists(ctx, txn_client): + with pytest.raises(ConflictError): + await txn_client.create_item( + collection_id=ctx.item["collection"], + item=ctx.item, + request=MockRequest, + refresh=True, + ) + + +@pytest.mark.asyncio +async def test_update_item(ctx, core_client, txn_client): + ctx.item["properties"]["foo"] = "bar" + collection_id = ctx.item["collection"] + item_id = ctx.item["id"] + await txn_client.update_item( + collection_id=collection_id, item_id=item_id, item=ctx.item, request=MockRequest + ) + + updated_item = await core_client.get_item( + item_id, collection_id, request=MockRequest + ) + assert updated_item["properties"]["foo"] == "bar" + + +@pytest.mark.asyncio +async def test_update_geometry(ctx, core_client, txn_client): + new_coordinates = [ + [ + [142.15052873427666, -33.82243006904891], + [140.1000346138806, -34.257132625788756], + [139.5776607193635, -32.514709769700254], + [141.6262528041627, -32.08081674221862], + [142.15052873427666, -33.82243006904891], + ] + ] + + ctx.item["geometry"]["coordinates"] = new_coordinates + collection_id = ctx.item["collection"] + item_id = ctx.item["id"] + await txn_client.update_item( + collection_id=collection_id, item_id=item_id, item=ctx.item, request=MockRequest + ) + + updated_item = await core_client.get_item( + item_id, collection_id, request=MockRequest + ) + assert updated_item["geometry"]["coordinates"] == new_coordinates + + +@pytest.mark.asyncio +async def test_delete_item(ctx, core_client, txn_client): + await txn_client.delete_item(ctx.item["id"], ctx.item["collection"]) + + with pytest.raises(NotFoundError): + await core_client.get_item( + ctx.item["id"], ctx.item["collection"], request=MockRequest + ) + + +@pytest.mark.asyncio +async def test_bulk_item_insert(ctx, core_client, txn_client, bulk_txn_client): + items = {} + for _ in range(10): + _item = deepcopy(ctx.item) + _item["id"] = str(uuid.uuid4()) + items[_item["id"]] = _item + + # fc = es_core.item_collection(coll["id"], request=MockStarletteRequest) + # assert len(fc["features"]) == 0 + + bulk_txn_client.bulk_item_insert(Items(items=items), refresh=True) + + fc = await core_client.item_collection(ctx.collection["id"], request=MockRequest()) + assert len(fc["features"]) >= 10 + + # for item in items: + # es_transactions.delete_item( + # item["id"], item["collection"], request=MockStarletteRequest + # ) + + +@pytest.mark.asyncio +async def test_feature_collection_insert( + core_client, + txn_client, + ctx, +): + features = [] + for _ in range(10): + _item = deepcopy(ctx.item) + _item["id"] = str(uuid.uuid4()) + features.append(_item) + + feature_collection = {"type": "FeatureCollection", "features": features} + + await create_item(txn_client, feature_collection) + + fc = await core_client.item_collection(ctx.collection["id"], request=MockRequest()) + assert len(fc["features"]) >= 10 + + +@pytest.mark.asyncio +async def test_landing_page_no_collection_title(ctx, core_client, txn_client, app): + ctx.collection["id"] = "new_id" + del ctx.collection["title"] + await txn_client.create_collection(ctx.collection, request=MockRequest) + + landing_page = await core_client.landing_page(request=MockRequest(app=app)) + for link in landing_page["links"]: + if link["href"].split("/")[-1] == ctx.collection["id"]: + assert link["title"] diff --git a/stac_fastapi/opensearch/tests/conftest.py b/stac_fastapi/opensearch/tests/conftest.py new file mode 100644 index 00000000..8ebab272 --- /dev/null +++ b/stac_fastapi/opensearch/tests/conftest.py @@ -0,0 +1,208 @@ +import asyncio +import copy +import json +import os +from typing import Any, Callable, Dict, Optional + +import pytest +import pytest_asyncio +from httpx import AsyncClient + +from stac_fastapi.api.app import StacApi +from stac_fastapi.api.models import create_get_request_model, create_post_request_model +from stac_fastapi.core.core import ( + BulkTransactionsClient, + CoreClient, + TransactionsClient, +) +from stac_fastapi.core.extensions import QueryExtension +from stac_fastapi.extensions.core import ( # FieldsExtension, + ContextExtension, + FieldsExtension, + FilterExtension, + SortExtension, + TokenPaginationExtension, + TransactionExtension, +) +from stac_fastapi.opensearch.config import AsyncOpensearchSettings, OpensearchSettings +from stac_fastapi.opensearch.database_logic import ( + DatabaseLogic, + create_collection_index, +) +from stac_fastapi.types.config import Settings + +DATA_DIR = os.path.join(os.path.dirname(__file__), "data") + + +class Context: + def __init__(self, item, collection): + self.item = item + self.collection = collection + + +class MockRequest: + base_url = "http://test-server" + query_params = {} + + def __init__( + self, + method: str = "GET", + url: str = "XXXX", + app: Optional[Any] = None, + query_params: Dict[str, Any] = {"limit": "10"}, + ): + self.method = method + self.url = url + self.app = app + self.query_params = query_params + + +class TestSettings(AsyncOpensearchSettings): + class Config: + env_file = ".env.test" + + +settings = TestSettings() +Settings.set(settings) + + +@pytest.fixture(scope="session") +def event_loop(): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + yield loop + loop.close() + + +def _load_file(filename: str) -> Dict: + with open(os.path.join(DATA_DIR, filename)) as file: + return json.load(file) + + +_test_item_prototype = _load_file("test_item.json") +_test_collection_prototype = _load_file("test_collection.json") + + +@pytest.fixture +def load_test_data() -> Callable[[str], Dict]: + return _load_file + + +@pytest.fixture +def test_item() -> Dict: + return copy.deepcopy(_test_item_prototype) + + +@pytest.fixture +def test_collection() -> Dict: + return copy.deepcopy(_test_collection_prototype) + + +async def create_collection(txn_client: TransactionsClient, collection: Dict) -> None: + await txn_client.create_collection( + dict(collection), request=MockRequest, refresh=True + ) + + +async def create_item(txn_client: TransactionsClient, item: Dict) -> None: + if "collection" in item: + await txn_client.create_item( + collection_id=item["collection"], + item=item, + request=MockRequest, + refresh=True, + ) + else: + await txn_client.create_item( + collection_id=item["features"][0]["collection"], + item=item, + request=MockRequest, + refresh=True, + ) + + +async def delete_collections_and_items(txn_client: TransactionsClient) -> None: + await refresh_indices(txn_client) + await txn_client.database.delete_items() + await txn_client.database.delete_collections() + + +async def refresh_indices(txn_client: TransactionsClient) -> None: + try: + await txn_client.database.client.indices.refresh(index="_all") + except Exception: + pass + + +@pytest_asyncio.fixture() +async def ctx(txn_client: TransactionsClient, test_collection, test_item): + # todo remove one of these when all methods use it + await delete_collections_and_items(txn_client) + + await create_collection(txn_client, test_collection) + await create_item(txn_client, test_item) + + yield Context(item=test_item, collection=test_collection) + + await delete_collections_and_items(txn_client) + + +database = DatabaseLogic() +settings = OpensearchSettings() + + +@pytest.fixture +def core_client(): + return CoreClient(database=database, session=None) + + +@pytest.fixture +def txn_client(): + return TransactionsClient(database=database, session=None, settings=settings) + + +@pytest.fixture +def bulk_txn_client(): + return BulkTransactionsClient(database=database, session=None, settings=settings) + + +@pytest_asyncio.fixture(scope="session") +async def app(): + settings = AsyncOpensearchSettings() + extensions = [ + TransactionExtension( + client=TransactionsClient( + database=database, session=None, settings=settings + ), + settings=settings, + ), + ContextExtension(), + SortExtension(), + FieldsExtension(), + QueryExtension(), + TokenPaginationExtension(), + FilterExtension(), + ] + + post_request_model = create_post_request_model(extensions) + + return StacApi( + settings=settings, + client=CoreClient( + database=database, + session=None, + extensions=extensions, + post_request_model=post_request_model, + ), + extensions=extensions, + search_get_request_model=create_get_request_model(extensions), + search_post_request_model=post_request_model, + ).app + + +@pytest_asyncio.fixture(scope="session") +async def app_client(app): + await create_collection_index() + + async with AsyncClient(app=app, base_url="http://test-server") as c: + yield c diff --git a/stac_fastapi/opensearch/tests/data/test_collection.json b/stac_fastapi/opensearch/tests/data/test_collection.json new file mode 100644 index 00000000..391b906c --- /dev/null +++ b/stac_fastapi/opensearch/tests/data/test_collection.json @@ -0,0 +1,99 @@ +{ + "id": "test-collection", + "stac_extensions": ["https://stac-extensions.github.io/eo/v1.0.0/schema.json"], + "type": "Collection", + "description": "Landat 8 imagery radiometrically calibrated and orthorectified using gound points and Digital Elevation Model (DEM) data to correct relief displacement.", + "stac_version": "1.0.0", + "license": "PDDL-1.0", + "summaries": { + "platform": ["landsat-8"], + "instruments": ["oli", "tirs"], + "gsd": [30] + }, + "extent": { + "spatial": { + "bbox": [ + [ + -180.0, + -90.0, + 180.0, + 90.0 + ] + ] + }, + "temporal": { + "interval": [ + [ + "2013-06-01", + null + ] + ] + } + }, + "links": [ + { + "href": "http://localhost:8081/collections/landsat-8-l1", + "rel": "self", + "type": "application/json" + }, + { + "href": "http://localhost:8081/", + "rel": "parent", + "type": "application/json" + }, + { + "href": "http://localhost:8081/collections/landsat-8-l1/items", + "rel": "item", + "type": "application/geo+json" + }, + { + "href": "http://localhost:8081/", + "rel": "root", + "type": "application/json" + } + ], + "title": "Landsat 8 L1", + "keywords": [ + "landsat", + "earth observation", + "usgs" + ], + "providers": [ + { + "name": "USGS", + "roles": [ + "producer" + ], + "url": "https://landsat.usgs.gov/" + }, + { + "name": "Planet Labs", + "roles": [ + "processor" + ], + "url": "https://github.com/landsat-pds/landsat_ingestor" + }, + { + "name": "AWS", + "roles": [ + "host" + ], + "url": "https://landsatonaws.com/" + }, + { + "name": "Development Seed", + "roles": [ + "processor" + ], + "url": "https://github.com/sat-utils/sat-api" + }, + { + "name": "Earth Search by Element84", + "description": "API of Earth on AWS datasets", + "roles": [ + "host" + ], + "url": "https://element84.com" + } + ] +} \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/data/test_item.json b/stac_fastapi/opensearch/tests/data/test_item.json new file mode 100644 index 00000000..2b7fdd86 --- /dev/null +++ b/stac_fastapi/opensearch/tests/data/test_item.json @@ -0,0 +1,505 @@ +{ + "type": "Feature", + "id": "test-item", + "stac_version": "1.0.0", + "stac_extensions": [ + "https://stac-extensions.github.io/eo/v1.0.0/schema.json", + "https://stac-extensions.github.io/projection/v1.0.0/schema.json" + ], + "geometry": { + "coordinates": [ + [ + [ + 152.15052873427666, + -33.82243006904891 + ], + [ + 150.1000346138806, + -34.257132625788756 + ], + [ + 149.5776607193635, + -32.514709769700254 + ], + [ + 151.6262528041627, + -32.08081674221862 + ], + [ + 152.15052873427666, + -33.82243006904891 + ] + ] + ], + "type": "Polygon" + }, + "properties": { + "datetime": "2020-02-12T12:30:22Z", + "landsat:scene_id": "LC82081612020043LGN00", + "landsat:row": "161", + "gsd": 15, + "eo:bands": [ + { + "gsd": 30, + "name": "B1", + "common_name": "coastal", + "center_wavelength": 0.44, + "full_width_half_max": 0.02 + }, + { + "gsd": 30, + "name": "B2", + "common_name": "blue", + "center_wavelength": 0.48, + "full_width_half_max": 0.06 + }, + { + "gsd": 30, + "name": "B3", + "common_name": "green", + "center_wavelength": 0.56, + "full_width_half_max": 0.06 + }, + { + "gsd": 30, + "name": "B4", + "common_name": "red", + "center_wavelength": 0.65, + "full_width_half_max": 0.04 + }, + { + "gsd": 30, + "name": "B5", + "common_name": "nir", + "center_wavelength": 0.86, + "full_width_half_max": 0.03 + }, + { + "gsd": 30, + "name": "B6", + "common_name": "swir16", + "center_wavelength": 1.6, + "full_width_half_max": 0.08 + }, + { + "gsd": 30, + "name": "B7", + "common_name": "swir22", + "center_wavelength": 2.2, + "full_width_half_max": 0.2 + }, + { + "gsd": 15, + "name": "B8", + "common_name": "pan", + "center_wavelength": 0.59, + "full_width_half_max": 0.18 + }, + { + "gsd": 30, + "name": "B9", + "common_name": "cirrus", + "center_wavelength": 1.37, + "full_width_half_max": 0.02 + }, + { + "gsd": 100, + "name": "B10", + "common_name": "lwir11", + "center_wavelength": 10.9, + "full_width_half_max": 0.8 + }, + { + "gsd": 100, + "name": "B11", + "common_name": "lwir12", + "center_wavelength": 12, + "full_width_half_max": 1 + } + ], + "landsat:revision": "00", + "view:sun_azimuth": -148.83296771, + "instrument": "OLI_TIRS", + "landsat:product_id": "LC08_L1GT_208161_20200212_20200212_01_RT", + "eo:cloud_cover": 0, + "landsat:tier": "RT", + "landsat:processing_level": "L1GT", + "landsat:column": "208", + "platform": "landsat-8", + "proj:epsg": 32756, + "view:sun_elevation": -37.30791534, + "view:off_nadir": 0, + "height": 2500, + "width": 2500 + }, + "bbox": [ + 149.57574, + -34.25796, + 152.15194, + -32.07915 + ], + "collection": "test-collection", + "assets": { + "ANG": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ANG.txt", + "type": "text/plain", + "title": "Angle Coefficients File", + "description": "Collection 2 Level-1 Angle Coefficients File (ANG)" + }, + "SR_B1": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B1.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Coastal/Aerosol Band (B1)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B1", + "common_name": "coastal", + "center_wavelength": 0.44, + "full_width_half_max": 0.02 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Coastal/Aerosol Band (B1) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B2": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B2.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Blue Band (B2)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B2", + "common_name": "blue", + "center_wavelength": 0.48, + "full_width_half_max": 0.06 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Blue Band (B2) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B3": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B3.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Green Band (B3)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B3", + "common_name": "green", + "center_wavelength": 0.56, + "full_width_half_max": 0.06 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Green Band (B3) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B4": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B4.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Red Band (B4)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B4", + "common_name": "red", + "center_wavelength": 0.65, + "full_width_half_max": 0.04 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Red Band (B4) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B5": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B5.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Near Infrared Band 0.8 (B5)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B5", + "common_name": "nir08", + "center_wavelength": 0.86, + "full_width_half_max": 0.03 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Near Infrared Band 0.8 (B5) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B6": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B6.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Short-wave Infrared Band 1.6 (B6)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B6", + "common_name": "swir16", + "center_wavelength": 1.6, + "full_width_half_max": 0.08 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Short-wave Infrared Band 1.6 (B6) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B7": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B7.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Short-wave Infrared Band 2.2 (B7)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B7", + "common_name": "swir22", + "center_wavelength": 2.2, + "full_width_half_max": 0.2 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Short-wave Infrared Band 2.2 (B7) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "ST_QA": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_QA.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Surface Temperature Quality Assessment Band", + "proj:shape": [ + 7731, + 7591 + ], + "description": "Landsat Collection 2 Level-2 Surface Temperature Band Surface Temperature Product", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "ST_B10": { + "gsd": 100, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_B10.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Surface Temperature Band (B10)", + "eo:bands": [ + { + "gsd": 100, + "name": "ST_B10", + "common_name": "lwir11", + "center_wavelength": 10.9, + "full_width_half_max": 0.8 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Landsat Collection 2 Level-2 Surface Temperature Band (B10) Surface Temperature Product", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "MTL.txt": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.txt", + "type": "text/plain", + "title": "Product Metadata File", + "description": "Collection 2 Level-1 Product Metadata File (MTL)" + }, + "MTL.xml": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.xml", + "type": "application/xml", + "title": "Product Metadata File (xml)", + "description": "Collection 2 Level-1 Product Metadata File (xml)" + }, + "ST_DRAD": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_DRAD.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Downwelled Radiance Band", + "eo:bands": [ + { + "gsd": 30, + "name": "ST_DRAD", + "description": "downwelled radiance" + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Landsat Collection 2 Level-2 Downwelled Radiance Band Surface Temperature Product", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "ST_EMIS": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMIS.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Emissivity Band", + "eo:bands": [ + { + "gsd": 30, + "name": "ST_EMIS", + "description": "emissivity" + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Landsat Collection 2 Level-2 Emissivity Band Surface Temperature Product", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "ST_EMSD": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMSD.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Emissivity Standard Deviation Band", + "eo:bands": [ + { + "gsd": 30, + "name": "ST_EMSD", + "description": "emissivity standard deviation" + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Landsat Collection 2 Level-2 Emissivity Standard Deviation Band Surface Temperature Product", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + } + }, + "links": [ + { + "href": "http://localhost:8081/collections/landsat-8-l1/items/LC82081612020043", + "rel": "self", + "type": "application/geo+json" + }, + { + "href": "http://localhost:8081/collections/landsat-8-l1", + "rel": "parent", + "type": "application/json" + }, + { + "href": "http://localhost:8081/collections/landsat-8-l1", + "rel": "collection", + "type": "application/json" + }, + { + "href": "http://localhost:8081/", + "rel": "root", + "type": "application/json" + } + ] +} \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example01.json b/stac_fastapi/opensearch/tests/extensions/cql2/example01.json new file mode 100644 index 00000000..a70bd0d3 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example01.json @@ -0,0 +1,9 @@ +{ + "op": "=", + "args": [ + { + "property": "scene_id" + }, + "LC82030282019133LGN00" + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example04.json b/stac_fastapi/opensearch/tests/extensions/cql2/example04.json new file mode 100644 index 00000000..e087504c --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example04.json @@ -0,0 +1,32 @@ +{ + "op": "and", + "args": [ + { + "op": "<", + "args": [ + { + "property": "cloud_cover" + }, + 0.1 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_row" + }, + 28 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_path" + }, + 203 + ] + } + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example05a.json b/stac_fastapi/opensearch/tests/extensions/cql2/example05a.json new file mode 100644 index 00000000..b5bd7a94 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example05a.json @@ -0,0 +1,23 @@ +{ + "op": "or", + "args": [ + { + "op": "=", + "args": [ + { + "property": "cloud_cover" + }, + 0.1 + ] + }, + { + "op": "=", + "args": [ + { + "property": "cloud_cover" + }, + 0.2 + ] + } + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example06b.json b/stac_fastapi/opensearch/tests/extensions/cql2/example06b.json new file mode 100644 index 00000000..fc2a7e56 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example06b.json @@ -0,0 +1,41 @@ +{ + "op": "and", + "args": [ + { + "op": ">=", + "args": [ + { + "property": "cloud_cover" + }, + 0.1 + ] + }, + { + "op": "<=", + "args": [ + { + "property": "cloud_cover" + }, + 0.2 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_row" + }, + 28 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_path" + }, + 203 + ] + } + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example08.json b/stac_fastapi/opensearch/tests/extensions/cql2/example08.json new file mode 100644 index 00000000..2f06413f --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example08.json @@ -0,0 +1,79 @@ +{ + "op": "and", + "args": [ + { + "op": "=", + "args": [ + { + "property": "beamMode" + }, + "ScanSAR Narrow" + ] + }, + { + "op": "=", + "args": [ + { + "property": "swathDirection" + }, + "ascending" + ] + }, + { + "op": "=", + "args": [ + { + "property": "polarization" + }, + "HH+VV+HV+VH" + ] + }, + { + "op": "s_intersects", + "args": [ + { + "property": "footprint" + }, + { + "type": "Polygon", + "coordinates": [ + [ + [ + -77.117938, + 38.93686 + ], + [ + -77.040604, + 39.995648 + ], + [ + -76.910536, + 38.892912 + ], + [ + -77.039359, + 38.791753 + ], + [ + -77.047906, + 38.841462 + ], + [ + -77.034183, + 38.840655 + ], + [ + -77.033142, + 38.85749 + ], + [ + -77.117938, + 38.93686 + ] + ] + ] + } + ] + } + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example09.json b/stac_fastapi/opensearch/tests/extensions/cql2/example09.json new file mode 100644 index 00000000..9f562fb4 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example09.json @@ -0,0 +1,9 @@ +{ + "op": ">", + "args": [ + { + "property": "floors" + }, + 5 + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example1.json b/stac_fastapi/opensearch/tests/extensions/cql2/example1.json new file mode 100644 index 00000000..48483548 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example1.json @@ -0,0 +1,39 @@ + { + "op": "and", + "args": [ + { + "op": "=", + "args": [ + {"property": "id"}, + "LC08_L1TP_060247_20180905_20180912_01_T1_L1TP" + ] + }, + {"op": "=", "args": [{"property": "collection"}, "landsat8_l1tp"]}, + { + "op": ">", + "args": [ + {"property": "properties.datetime"}, + {"timestamp": "2022-04-29T00:00:00Z"} + ] + }, + {"op": "<", "args": [{"property": "properties.eo:cloud_cover"}, 10]}, + { + "op": "s_intersects", + "args": [ + {"property": "geometry"}, + { + "type": "Polygon", + "coordinates": [ + [ + [36.319836, 32.288087], + [36.320041, 32.288032], + [36.320210, 32.288402], + [36.320008, 32.288458], + [36.319836, 32.288087] + ] + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example10.json b/stac_fastapi/opensearch/tests/extensions/cql2/example10.json new file mode 100644 index 00000000..870303ea --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example10.json @@ -0,0 +1,9 @@ +{ + "op": "<=", + "args": [ + { + "property": "taxes" + }, + 500 + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example14.json b/stac_fastapi/opensearch/tests/extensions/cql2/example14.json new file mode 100644 index 00000000..fad45d48 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example14.json @@ -0,0 +1,9 @@ +{ + "op": "=", + "args": [ + { + "property": "swimming_pool" + }, + true + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example15.json b/stac_fastapi/opensearch/tests/extensions/cql2/example15.json new file mode 100644 index 00000000..98f96797 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example15.json @@ -0,0 +1,23 @@ +{ + "op": "and", + "args": [ + { + "op": ">", + "args": [ + { + "property": "floor" + }, + 5 + ] + }, + { + "op": "=", + "args": [ + { + "property": "swimming_pool" + }, + true + ] + } + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example17.json b/stac_fastapi/opensearch/tests/extensions/cql2/example17.json new file mode 100644 index 00000000..9b215273 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example17.json @@ -0,0 +1,37 @@ +{ + "op": "or", + "args": [ + { + "op": "and", + "args": [ + { + "op": ">", + "args": [ + { + "property": "floors" + }, + 5 + ] + }, + { + "op": "=", + "args": [ + { + "property": "material" + }, + "brick" + ] + } + ] + }, + { + "op": "=", + "args": [ + { + "property": "swimming_pool" + }, + true + ] + } + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example18.json b/stac_fastapi/opensearch/tests/extensions/cql2/example18.json new file mode 100644 index 00000000..7087a151 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example18.json @@ -0,0 +1,28 @@ +{ + "op": "or", + "args": [ + { + "op": "not", + "args": [ + { + "op": "<", + "args": [ + { + "property": "floors" + }, + 5 + ] + } + ] + }, + { + "op": "=", + "args": [ + { + "property": "swimming_pool" + }, + true + ] + } + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example19.json b/stac_fastapi/opensearch/tests/extensions/cql2/example19.json new file mode 100755 index 00000000..0e4306fb --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example19.json @@ -0,0 +1,9 @@ +{ + "op": "like", + "args": [ + { + "property": "scene_id" + }, + "LC82030282019133%" + ] +} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example20.json b/stac_fastapi/opensearch/tests/extensions/cql2/example20.json new file mode 100755 index 00000000..f7412fc0 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example20.json @@ -0,0 +1,10 @@ +{ + "op": "like", + "args": [ + { + "property": "scene_id" + }, + "LC82030282019133LGN0_" + ] + } + \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example21.json b/stac_fastapi/opensearch/tests/extensions/cql2/example21.json new file mode 100644 index 00000000..175b8732 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example21.json @@ -0,0 +1,33 @@ +{ + "op": "and", + "args": [ + { + "op": "between", + "args": [ + { + "property": "cloud_cover" + }, + 0.1, + 0.2 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_row" + }, + 28 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_path" + }, + 203 + ] + } + ] +} \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example22.json b/stac_fastapi/opensearch/tests/extensions/cql2/example22.json new file mode 100644 index 00000000..880c90c3 --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/cql2/example22.json @@ -0,0 +1,13 @@ +{ + "op": "and", + "args": [ + { + "op": "in", + "args": [ + {"property": "id"}, + ["LC08_L1TP_060247_20180905_20180912_01_T1_L1TP"] + ] + }, + {"op": "=", "args": [{"property": "collection"}, "landsat8_l1tp"]} + ] +} \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/extensions/test_filter.py b/stac_fastapi/opensearch/tests/extensions/test_filter.py new file mode 100644 index 00000000..72cea59f --- /dev/null +++ b/stac_fastapi/opensearch/tests/extensions/test_filter.py @@ -0,0 +1,402 @@ +import json +import os +from os import listdir +from os.path import isfile, join + +import pytest + +THIS_DIR = os.path.dirname(os.path.abspath(__file__)) + + +@pytest.mark.asyncio +async def test_search_filters_post(app_client, ctx): + + filters = [] + pwd = f"{THIS_DIR}/cql2" + for fn in [fn for f in listdir(pwd) if isfile(fn := join(pwd, f))]: + with open(fn) as f: + filters.append(json.loads(f.read())) + + for _filter in filters: + resp = await app_client.post("/search", json={"filter": _filter}) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_search_filter_extension_eq_get(app_client, ctx): + resp = await app_client.get( + '/search?filter-lang=cql2-json&filter={"op":"=","args":[{"property":"id"},"test-item"]}' + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_eq_post(app_client, ctx): + params = {"filter": {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_gte_get(app_client, ctx): + # there's one item that can match, so one of these queries should match it and the other shouldn't + resp = await app_client.get( + '/search?filter-lang=cql2-json&filter={"op":"<=","args":[{"property": "properties.proj:epsg"},32756]}' + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + resp = await app_client.get( + '/search?filter-lang=cql2-json&filter={"op":">","args":[{"property": "properties.proj:epsg"},32756]}' + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 0 + + +@pytest.mark.asyncio +async def test_search_filter_extension_gte_post(app_client, ctx): + # there's one item that can match, so one of these queries should match it and the other shouldn't + params = { + "filter": { + "op": "<=", + "args": [ + {"property": "properties.proj:epsg"}, + ctx.item["properties"]["proj:epsg"], + ], + } + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + params = { + "filter": { + "op": ">", + "args": [ + {"property": "properties.proj:epsg"}, + ctx.item["properties"]["proj:epsg"], + ], + } + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 0 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_get(app_client, ctx): + resp = await app_client.get( + '/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"<=","args":[{"property":"properties.proj:epsg"},32756]},{"op":"=","args":[{"property":"id"},"test-item"]}]}' + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_get_id(app_client, ctx): + collection = ctx.item["collection"] + id = ctx.item["id"] + filter = f"id='{id}' AND collection='{collection}'" + resp = await app_client.get(f"/search?&filter={filter}") + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_get_cql2text_id(app_client, ctx): + collection = ctx.item["collection"] + id = ctx.item["id"] + filter = f"id='{id}' AND collection='{collection}'" + resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_get_cql2text_cloud_cover(app_client, ctx): + collection = ctx.item["collection"] + cloud_cover = ctx.item["properties"]["eo:cloud_cover"] + filter = f"cloud_cover={cloud_cover} AND collection='{collection}'" + resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_get_cql2text_cloud_cover_no_results( + app_client, ctx +): + collection = ctx.item["collection"] + cloud_cover = ctx.item["properties"]["eo:cloud_cover"] + 1 + filter = f"cloud_cover={cloud_cover} AND collection='{collection}'" + resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 0 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_post(app_client, ctx): + params = { + "filter": { + "op": "and", + "args": [ + { + "op": "<=", + "args": [ + {"property": "properties.proj:epsg"}, + ctx.item["properties"]["proj:epsg"], + ], + }, + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + ], + } + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_floats_get(app_client, ctx): + resp = await app_client.get( + """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30891534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30691534"]}]}""" + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + resp = await app_client.get( + """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item-7"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30891534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30691534"]}]}""" + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 0 + + resp = await app_client.get( + """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30591534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30491534"]}]}""" + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 0 + + +@pytest.mark.asyncio +async def test_search_filter_extension_floats_post(app_client, ctx): + sun_elevation = ctx.item["properties"]["view:sun_elevation"] + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": ">", + "args": [ + {"property": "properties.view:sun_elevation"}, + sun_elevation - 0.01, + ], + }, + { + "op": "<", + "args": [ + {"property": "properties.view:sun_elevation"}, + sun_elevation + 0.01, + ], + }, + ], + } + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_wildcard_cql2(app_client, ctx): + single_char = ctx.item["id"][:-1] + "_" + multi_char = ctx.item["id"][:-3] + "%" + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "like", + "args": [ + {"property": "id"}, + single_char, + ], + }, + { + "op": "like", + "args": [ + {"property": "id"}, + multi_char, + ], + }, + ], + } + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_wildcard_es(app_client, ctx): + single_char = ctx.item["id"][:-1] + "?" + multi_char = ctx.item["id"][:-3] + "*" + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "like", + "args": [ + {"property": "id"}, + single_char, + ], + }, + { + "op": "like", + "args": [ + {"property": "id"}, + multi_char, + ], + }, + ], + } + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_escape_chars(app_client, ctx): + esc_chars = ( + ctx.item["properties"]["landsat:product_id"].replace("_", "\\_")[:-1] + "_" + ) + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "like", + "args": [ + {"property": "properties.landsat:product_id"}, + esc_chars, + ], + }, + ], + } + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_in(app_client, ctx): + product_id = ctx.item["properties"]["landsat:product_id"] + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "in", + "args": [ + {"property": "properties.landsat:product_id"}, + [product_id], + ], + }, + ], + } + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_in_no_list(app_client, ctx): + product_id = ctx.item["properties"]["landsat:product_id"] + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "in", + "args": [ + {"property": "properties.landsat:product_id"}, + product_id, + ], + }, + ], + } + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 400 + assert resp.json() == { + "detail": f"Error with cql2_json filter: Arg {product_id} is not a list" + } + + +@pytest.mark.asyncio +async def test_search_filter_extension_between(app_client, ctx): + sun_elevation = ctx.item["properties"]["view:sun_elevation"] + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "between", + "args": [ + {"property": "properties.view:sun_elevation"}, + sun_elevation - 0.01, + sun_elevation + 0.01, + ], + }, + ], + } + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 diff --git a/stac_fastapi/opensearch/tests/resources/__init__.py b/stac_fastapi/opensearch/tests/resources/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/opensearch/tests/resources/test_collection.py b/stac_fastapi/opensearch/tests/resources/test_collection.py new file mode 100644 index 00000000..9061ac1e --- /dev/null +++ b/stac_fastapi/opensearch/tests/resources/test_collection.py @@ -0,0 +1,123 @@ +import uuid + +import pystac +import pytest + +from ..conftest import create_collection, delete_collections_and_items, refresh_indices + + +@pytest.mark.asyncio +async def test_create_and_delete_collection(app_client, load_test_data): + """Test creation and deletion of a collection""" + test_collection = load_test_data("test_collection.json") + test_collection["id"] = "test" + + resp = await app_client.post("/collections", json=test_collection) + assert resp.status_code == 200 + + resp = await app_client.delete(f"/collections/{test_collection['id']}") + assert resp.status_code == 204 + + +@pytest.mark.asyncio +async def test_create_collection_conflict(app_client, ctx): + """Test creation of a collection which already exists""" + # This collection ID is created in the fixture, so this should be a conflict + resp = await app_client.post("/collections", json=ctx.collection) + assert resp.status_code == 409 + + +@pytest.mark.asyncio +async def test_delete_missing_collection(app_client): + """Test deletion of a collection which does not exist""" + resp = await app_client.delete("/collections/missing-collection") + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_update_collection_already_exists(ctx, app_client): + """Test updating a collection which already exists""" + ctx.collection["keywords"].append("test") + resp = await app_client.put("/collections", json=ctx.collection) + assert resp.status_code == 200 + + resp = await app_client.get(f"/collections/{ctx.collection['id']}") + assert resp.status_code == 200 + resp_json = resp.json() + assert "test" in resp_json["keywords"] + + +@pytest.mark.asyncio +async def test_update_new_collection(app_client, load_test_data): + """Test updating a collection which does not exist (same as creation)""" + test_collection = load_test_data("test_collection.json") + test_collection["id"] = "new-test-collection" + + resp = await app_client.put("/collections", json=test_collection) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_collection_not_found(app_client): + """Test read a collection which does not exist""" + resp = await app_client.get("/collections/does-not-exist") + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_returns_valid_collection(ctx, app_client): + """Test validates fetched collection with jsonschema""" + resp = await app_client.put("/collections", json=ctx.collection) + assert resp.status_code == 200 + + resp = await app_client.get(f"/collections/{ctx.collection['id']}") + assert resp.status_code == 200 + resp_json = resp.json() + + # Mock root to allow validation + mock_root = pystac.Catalog( + id="test", description="test desc", href="https://example.com" + ) + collection = pystac.Collection.from_dict( + resp_json, root=mock_root, preserve_dict=False + ) + collection.validate() + + +@pytest.mark.asyncio +async def test_pagination_collection(app_client, ctx, txn_client): + """Test collection pagination links""" + + # Clear existing collections if necessary + await delete_collections_and_items(txn_client) + + # Ingest 6 collections + ids = set() + for _ in range(6): + ctx.collection["id"] = str(uuid.uuid4()) + await create_collection(txn_client, collection=ctx.collection) + ids.add(ctx.collection["id"]) + + await refresh_indices(txn_client) + + # Paginate through all 6 collections with a limit of 1 + collection_ids = set() + page = await app_client.get("/collections", params={"limit": 1}) + while True: + page_data = page.json() + assert ( + len(page_data["collections"]) <= 1 + ) # Each page should have 1 or 0 collections + collection_ids.update(coll["id"] for coll in page_data["collections"]) + + next_link = next( + (link for link in page_data["links"] if link["rel"] == "next"), None + ) + if not next_link: + break # No more pages + + href = next_link["href"][len("http://test-server") :] + page = await app_client.get(href) + + # Confirm we have paginated through all collections + assert collection_ids == ids diff --git a/stac_fastapi/opensearch/tests/resources/test_conformance.py b/stac_fastapi/opensearch/tests/resources/test_conformance.py new file mode 100644 index 00000000..d93d8b81 --- /dev/null +++ b/stac_fastapi/opensearch/tests/resources/test_conformance.py @@ -0,0 +1,72 @@ +import urllib.parse + +import pytest +import pytest_asyncio + + +@pytest_asyncio.fixture +async def response(app_client): + return await app_client.get("/") + + +@pytest.fixture +def response_json(response): + return response.json() + + +def get_link(landing_page, rel_type): + return next( + filter(lambda link: link["rel"] == rel_type, landing_page["links"]), None + ) + + +@pytest.mark.asyncio +async def test_landing_page_health(response): + """Test landing page""" + assert response.status_code == 200 + assert response.headers["content-type"] == "application/json" + + +# Parameters for test_landing_page_links test below. +# Each tuple has the following values (in this order): +# - Rel type of link to test +# - Expected MIME/Media Type +# - Expected relative path +link_tests = [ + ("root", "application/json", "/"), + ("conformance", "application/json", "/conformance"), + ("service-doc", "text/html", "/api.html"), + ("service-desc", "application/vnd.oai.openapi+json;version=3.0", "/api"), +] + + +@pytest.mark.asyncio +@pytest.mark.parametrize("rel_type,expected_media_type,expected_path", link_tests) +async def test_landing_page_links( + response_json, app_client, rel_type, expected_media_type, expected_path +): + link = get_link(response_json, rel_type) + + assert link is not None, f"Missing {rel_type} link in landing page" + assert link.get("type") == expected_media_type + + link_path = urllib.parse.urlsplit(link.get("href")).path + assert link_path == expected_path + + resp = await app_client.get(link_path) + assert resp.status_code == 200 + + +# This endpoint currently returns a 404 for empty result sets, but testing for this response +# code here seems meaningless since it would be the same as if the endpoint did not exist. Once +# https://github.com/stac-utils/stac-fastapi/pull/227 has been merged we can add this to the +# parameterized tests above. +@pytest.mark.asyncio +async def test_search_link(response_json): + search_link = get_link(response_json, "search") + + assert search_link is not None + assert search_link.get("type") == "application/geo+json" + + search_path = urllib.parse.urlsplit(search_link.get("href")).path + assert search_path == "/search" diff --git a/stac_fastapi/opensearch/tests/resources/test_item.py b/stac_fastapi/opensearch/tests/resources/test_item.py new file mode 100644 index 00000000..e62da8b8 --- /dev/null +++ b/stac_fastapi/opensearch/tests/resources/test_item.py @@ -0,0 +1,804 @@ +import json +import os +import uuid +from copy import deepcopy +from datetime import datetime, timedelta +from random import randint +from urllib.parse import parse_qs, urlparse, urlsplit + +import ciso8601 +import pystac +import pytest +from geojson_pydantic.geometries import Polygon +from pystac.utils import datetime_to_str + +from stac_fastapi.core.core import CoreClient +from stac_fastapi.core.datetime_utils import now_to_rfc3339_str +from stac_fastapi.elasticsearch.database_logic import DatabaseLogic +from stac_fastapi.types.core import LandingPageMixin + +from ..conftest import create_item, refresh_indices + + +def rfc3339_str_to_datetime(s: str) -> datetime: + return ciso8601.parse_rfc3339(s) + + +database_logic = DatabaseLogic() + + +@pytest.mark.asyncio +async def test_create_and_delete_item(app_client, ctx, txn_client): + """Test creation and deletion of a single item (transactions extension)""" + + test_item = ctx.item + + resp = await app_client.get( + f"/collections/{test_item['collection']}/items/{test_item['id']}" + ) + assert resp.status_code == 200 + + resp = await app_client.delete( + f"/collections/{test_item['collection']}/items/{test_item['id']}" + ) + assert resp.status_code == 204 + + await refresh_indices(txn_client) + + resp = await app_client.get( + f"/collections/{test_item['collection']}/items/{test_item['id']}" + ) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_create_item_conflict(app_client, ctx): + """Test creation of an item which already exists (transactions extension)""" + + test_item = ctx.item + + resp = await app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 409 + + +@pytest.mark.asyncio +async def test_delete_missing_item(app_client, load_test_data): + """Test deletion of an item which does not exist (transactions extension)""" + test_item = load_test_data("test_item.json") + resp = await app_client.delete( + f"/collections/{test_item['collection']}/items/hijosh" + ) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_create_item_missing_collection(app_client, ctx): + """Test creation of an item without a parent collection (transactions extension)""" + ctx.item["collection"] = "stac_is_cool" + resp = await app_client.post( + f"/collections/{ctx.item['collection']}/items", json=ctx.item + ) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_create_uppercase_collection_with_item(app_client, ctx, txn_client): + """Test creation of a collection and item with uppercase collection ID (transactions extension)""" + collection_id = "UPPERCASE" + ctx.item["collection"] = collection_id + ctx.collection["id"] = collection_id + resp = await app_client.post("/collections", json=ctx.collection) + assert resp.status_code == 200 + await refresh_indices(txn_client) + resp = await app_client.post(f"/collections/{collection_id}/items", json=ctx.item) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_update_item_already_exists(app_client, ctx): + """Test updating an item which already exists (transactions extension)""" + + assert ctx.item["properties"]["gsd"] != 16 + ctx.item["properties"]["gsd"] = 16 + await app_client.put( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item + ) + resp = await app_client.get( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" + ) + updated_item = resp.json() + assert updated_item["properties"]["gsd"] == 16 + + await app_client.delete( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" + ) + + +@pytest.mark.asyncio +async def test_update_new_item(app_client, ctx): + """Test updating an item which does not exist (transactions extension)""" + test_item = ctx.item + test_item["id"] = "a" + + resp = await app_client.put( + f"/collections/{test_item['collection']}/items/{test_item['id']}", + json=test_item, + ) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_update_item_missing_collection(app_client, ctx): + """Test updating an item without a parent collection (transactions extension)""" + # Try to update collection of the item + ctx.item["collection"] = "stac_is_cool" + resp = await app_client.put( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item + ) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_update_item_geometry(app_client, ctx): + ctx.item["id"] = "update_test_item_1" + + # Create the item + resp = await app_client.post( + f"/collections/{ctx.item['collection']}/items", json=ctx.item + ) + assert resp.status_code == 200 + + new_coordinates = [ + [ + [142.15052873427666, -33.82243006904891], + [140.1000346138806, -34.257132625788756], + [139.5776607193635, -32.514709769700254], + [141.6262528041627, -32.08081674221862], + [142.15052873427666, -33.82243006904891], + ] + ] + + # Update the geometry of the item + ctx.item["geometry"]["coordinates"] = new_coordinates + resp = await app_client.put( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item + ) + assert resp.status_code == 200 + + # Fetch the updated item + resp = await app_client.get( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" + ) + assert resp.status_code == 200 + assert resp.json()["geometry"]["coordinates"] == new_coordinates + + +@pytest.mark.asyncio +async def test_get_item(app_client, ctx): + """Test read an item by id (core)""" + get_item = await app_client.get( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" + ) + assert get_item.status_code == 200 + + +@pytest.mark.asyncio +async def test_returns_valid_item(app_client, ctx): + """Test validates fetched item with jsonschema""" + test_item = ctx.item + get_item = await app_client.get( + f"/collections/{test_item['collection']}/items/{test_item['id']}" + ) + assert get_item.status_code == 200 + item_dict = get_item.json() + # Mock root to allow validation + mock_root = pystac.Catalog( + id="test", description="test desc", href="https://example.com" + ) + item = pystac.Item.from_dict(item_dict, preserve_dict=False, root=mock_root) + item.validate() + + +@pytest.mark.asyncio +async def test_get_item_collection(app_client, ctx, txn_client): + """Test read an item collection (core)""" + item_count = randint(1, 4) + + for idx in range(item_count): + ctx.item["id"] = f'{ctx.item["id"]}{idx}' + await create_item(txn_client, ctx.item) + + resp = await app_client.get(f"/collections/{ctx.item['collection']}/items") + assert resp.status_code == 200 + + item_collection = resp.json() + if matched := item_collection["context"].get("matched"): + assert matched == item_count + 1 + + +@pytest.mark.asyncio +async def test_item_collection_filter_bbox(app_client, ctx): + item = ctx.item + collection = item["collection"] + + bbox = "100,-50,170,-20" + resp = await app_client.get( + f"/collections/{collection}/items", params={"bbox": bbox} + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + bbox = "1,2,3,4" + resp = await app_client.get( + f"/collections/{collection}/items", params={"bbox": bbox} + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +async def test_item_collection_filter_datetime(app_client, ctx): + item = ctx.item + collection = item["collection"] + + datetime_range = "2020-01-01T00:00:00.00Z/.." + resp = await app_client.get( + f"/collections/{collection}/items", params={"datetime": datetime_range} + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + datetime_range = "2018-01-01T00:00:00.00Z/2019-01-01T00:00:00.00Z" + resp = await app_client.get( + f"/collections/{collection}/items", params={"datetime": datetime_range} + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +@pytest.mark.skip(reason="Pagination extension not implemented") +async def test_pagination(app_client, load_test_data): + """Test item collection pagination (paging extension)""" + item_count = 10 + test_item = load_test_data("test_item.json") + + for idx in range(item_count): + _test_item = deepcopy(test_item) + _test_item["id"] = test_item["id"] + str(idx) + resp = await app_client.post( + f"/collections/{test_item['collection']}/items", json=_test_item + ) + assert resp.status_code == 200 + + resp = await app_client.get( + f"/collections/{test_item['collection']}/items", params={"limit": 3} + ) + assert resp.status_code == 200 + first_page = resp.json() + assert first_page["context"]["returned"] == 3 + + url_components = urlsplit(first_page["links"][0]["href"]) + resp = await app_client.get(f"{url_components.path}?{url_components.query}") + assert resp.status_code == 200 + second_page = resp.json() + assert second_page["context"]["returned"] == 3 + + +@pytest.mark.asyncio +async def test_item_timestamps(app_client, ctx): + """Test created and updated timestamps (common metadata)""" + # start_time = now_to_rfc3339_str() + + created_dt = ctx.item["properties"]["created"] + + # todo, check lower bound + # assert start_time < created_dt < now_to_rfc3339_str() + assert created_dt < now_to_rfc3339_str() + + # Confirm `updated` timestamp + ctx.item["properties"]["proj:epsg"] = 4326 + resp = await app_client.put( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", + json=dict(ctx.item), + ) + assert resp.status_code == 200 + updated_item = resp.json() + + # Created shouldn't change on update + assert ctx.item["properties"]["created"] == updated_item["properties"]["created"] + assert updated_item["properties"]["updated"] > created_dt + + await app_client.delete( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" + ) + + +@pytest.mark.asyncio +async def test_item_search_by_id_post(app_client, ctx, txn_client): + """Test POST search by item id (core)""" + ids = ["test1", "test2", "test3"] + for _id in ids: + ctx.item["id"] = _id + await create_item(txn_client, ctx.item) + + params = {"collections": [ctx.item["collection"]], "ids": ids} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == len(ids) + assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) + + +@pytest.mark.asyncio +async def test_item_search_spatial_query_post(app_client, ctx): + """Test POST search with spatial query (core)""" + test_item = ctx.item + + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + } + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +@pytest.mark.asyncio +async def test_item_search_temporal_query_post(app_client, ctx): + """Test POST search with single-tailed spatio-temporal query (core)""" + + test_item = ctx.item + + item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) + item_date = item_date + timedelta(seconds=1) + + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + "datetime": f"../{datetime_to_str(item_date)}", + } + resp = await app_client.post("/search", json=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +@pytest.mark.asyncio +async def test_item_search_temporal_window_post(app_client, ctx): + """Test POST search with two-tailed spatio-temporal query (core)""" + test_item = ctx.item + + item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) + item_date_before = item_date - timedelta(seconds=1) + item_date_after = item_date + timedelta(seconds=1) + + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + "datetime": f"{datetime_to_str(item_date_before)}/{datetime_to_str(item_date_after)}", + } + resp = await app_client.post("/search", json=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +@pytest.mark.asyncio +@pytest.mark.skip(reason="KeyError: 'features") +async def test_item_search_temporal_open_window(app_client, ctx): + """Test POST search with open spatio-temporal query (core)""" + test_item = ctx.item + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + "datetime": "../..", + } + resp = await app_client.post("/search", json=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +@pytest.mark.asyncio +async def test_item_search_by_id_get(app_client, ctx, txn_client): + """Test GET search by item id (core)""" + ids = ["test1", "test2", "test3"] + for _id in ids: + ctx.item["id"] = _id + await create_item(txn_client, ctx.item) + + params = {"collections": ctx.item["collection"], "ids": ",".join(ids)} + resp = await app_client.get("/search", params=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == len(ids) + assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) + + +@pytest.mark.asyncio +async def test_item_search_bbox_get(app_client, ctx): + """Test GET search with spatial query (core)""" + params = { + "collections": ctx.item["collection"], + "bbox": ",".join([str(coord) for coord in ctx.item["bbox"]]), + } + resp = await app_client.get("/search", params=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == ctx.item["id"] + + +@pytest.mark.asyncio +async def test_item_search_get_without_collections(app_client, ctx): + """Test GET search without specifying collections""" + + params = { + "bbox": ",".join([str(coord) for coord in ctx.item["bbox"]]), + } + resp = await app_client.get("/search", params=params) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_item_search_get_with_non_existent_collections(app_client, ctx): + """Test GET search with non-existent collections""" + + params = {"collections": "non-existent-collection,or-this-one"} + resp = await app_client.get("/search", params=params) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_item_search_temporal_window_get(app_client, ctx): + """Test GET search with spatio-temporal query (core)""" + test_item = ctx.item + item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) + item_date_before = item_date - timedelta(seconds=1) + item_date_after = item_date + timedelta(seconds=1) + + params = { + "collections": test_item["collection"], + "bbox": ",".join([str(coord) for coord in test_item["bbox"]]), + "datetime": f"{datetime_to_str(item_date_before)}/{datetime_to_str(item_date_after)}", + } + resp = await app_client.get("/search", params=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +@pytest.mark.asyncio +async def test_item_search_post_without_collection(app_client, ctx): + """Test POST search without specifying a collection""" + test_item = ctx.item + params = { + "bbox": test_item["bbox"], + } + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_item_search_properties_es(app_client, ctx): + """Test POST search with JSONB query (query extension)""" + + test_item = ctx.item + # EPSG is a JSONB key + params = {"query": {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}}} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +async def test_item_search_properties_field(app_client): + """Test POST search indexed field with query (query extension)""" + + # Orientation is an indexed field + params = {"query": {"orientation": {"eq": "south"}}} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +async def test_item_search_get_query_extension(app_client, ctx): + """Test GET search with JSONB query (query extension)""" + + test_item = ctx.item + + params = { + "collections": [test_item["collection"]], + "query": json.dumps( + {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}} + ), + } + resp = await app_client.get("/search", params=params) + assert resp.json()["context"]["returned"] == 0 + + params["query"] = json.dumps( + {"proj:epsg": {"eq": test_item["properties"]["proj:epsg"]}} + ) + resp = await app_client.get("/search", params=params) + resp_json = resp.json() + assert resp_json["context"]["returned"] == 1 + assert ( + resp_json["features"][0]["properties"]["proj:epsg"] + == test_item["properties"]["proj:epsg"] + ) + + +@pytest.mark.asyncio +async def test_get_missing_item_collection(app_client): + """Test reading a collection which does not exist""" + resp = await app_client.get("/collections/invalid-collection/items") + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_pagination_item_collection(app_client, ctx, txn_client): + """Test item collection pagination links (paging extension)""" + ids = [ctx.item["id"]] + + # Ingest 5 items + for _ in range(5): + ctx.item["id"] = str(uuid.uuid4()) + await create_item(txn_client, item=ctx.item) + ids.append(ctx.item["id"]) + + # Paginate through all 6 items with a limit of 1 (expecting 7 requests) + page = await app_client.get( + f"/collections/{ctx.item['collection']}/items", params={"limit": 1} + ) + + item_ids = [] + idx = 0 + for idx in range(100): + page_data = page.json() + next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) + if not next_link: + assert not page_data["features"] + break + + assert len(page_data["features"]) == 1 + item_ids.append(page_data["features"][0]["id"]) + + href = next_link[0]["href"][len("http://test-server") :] + page = await app_client.get(href) + + assert idx == len(ids) + + # Confirm we have paginated through all items + assert not set(item_ids) - set(ids) + + +@pytest.mark.asyncio +async def test_pagination_post(app_client, ctx, txn_client): + """Test POST pagination (paging extension)""" + ids = [ctx.item["id"]] + + # Ingest 5 items + for _ in range(5): + ctx.item["id"] = str(uuid.uuid4()) + await create_item(txn_client, ctx.item) + ids.append(ctx.item["id"]) + + # Paginate through all 5 items with a limit of 1 (expecting 5 requests) + request_body = {"ids": ids, "limit": 1} + page = await app_client.post("/search", json=request_body) + idx = 0 + item_ids = [] + for _ in range(100): + idx += 1 + page_data = page.json() + next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) + if not next_link: + break + + item_ids.append(page_data["features"][0]["id"]) + + # Merge request bodies + request_body.update(next_link[0]["body"]) + page = await app_client.post("/search", json=request_body) + + # Our limit is 1, so we expect len(ids) number of requests before we run out of pages + assert idx == len(ids) + 1 + + # Confirm we have paginated through all items + assert not set(item_ids) - set(ids) + + +@pytest.mark.asyncio +async def test_pagination_token_idempotent(app_client, ctx, txn_client): + """Test that pagination tokens are idempotent (paging extension)""" + ids = [ctx.item["id"]] + + # Ingest 5 items + for _ in range(5): + ctx.item["id"] = str(uuid.uuid4()) + await create_item(txn_client, ctx.item) + ids.append(ctx.item["id"]) + + page = await app_client.get("/search", params={"ids": ",".join(ids), "limit": 3}) + page_data = page.json() + next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) + + # Confirm token is idempotent + resp1 = await app_client.get( + "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) + ) + resp2 = await app_client.get( + "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) + ) + resp1_data = resp1.json() + resp2_data = resp2.json() + + # Two different requests with the same pagination token should return the same items + assert [item["id"] for item in resp1_data["features"]] == [ + item["id"] for item in resp2_data["features"] + ] + + +@pytest.mark.asyncio +async def test_field_extension_get_includes(app_client, ctx): + """Test GET search with included fields (fields extension)""" + test_item = ctx.item + params = { + "ids": [test_item["id"]], + "fields": "+properties.proj:epsg,+properties.gsd", + } + resp = await app_client.get("/search", params=params) + feat_properties = resp.json()["features"][0]["properties"] + assert not set(feat_properties) - {"proj:epsg", "gsd", "datetime"} + + +@pytest.mark.asyncio +async def test_field_extension_get_excludes(app_client, ctx): + """Test GET search with included fields (fields extension)""" + test_item = ctx.item + params = { + "ids": [test_item["id"]], + "fields": "-properties.proj:epsg,-properties.gsd", + } + resp = await app_client.get("/search", params=params) + resp_json = resp.json() + assert "proj:epsg" not in resp_json["features"][0]["properties"].keys() + assert "gsd" not in resp_json["features"][0]["properties"].keys() + + +@pytest.mark.asyncio +async def test_field_extension_post(app_client, ctx): + """Test POST search with included and excluded fields (fields extension)""" + test_item = ctx.item + body = { + "ids": [test_item["id"]], + "fields": { + "exclude": ["assets.B1"], + "include": ["properties.eo:cloud_cover", "properties.orientation"], + }, + } + + resp = await app_client.post("/search", json=body) + resp_json = resp.json() + assert "B1" not in resp_json["features"][0]["assets"].keys() + assert not set(resp_json["features"][0]["properties"]) - { + "orientation", + "eo:cloud_cover", + "datetime", + } + + +@pytest.mark.asyncio +async def test_field_extension_exclude_and_include(app_client, ctx): + """Test POST search including/excluding same field (fields extension)""" + test_item = ctx.item + body = { + "ids": [test_item["id"]], + "fields": { + "exclude": ["properties.eo:cloud_cover"], + "include": ["properties.eo:cloud_cover"], + }, + } + + resp = await app_client.post("/search", json=body) + resp_json = resp.json() + assert "eo:cloud_cover" not in resp_json["features"][0]["properties"] + + +@pytest.mark.asyncio +async def test_field_extension_exclude_default_includes(app_client, ctx): + """Test POST search excluding a forbidden field (fields extension)""" + test_item = ctx.item + body = {"ids": [test_item["id"]], "fields": {"exclude": ["gsd"]}} + + resp = await app_client.post("/search", json=body) + resp_json = resp.json() + assert "gsd" not in resp_json["features"][0] + + +@pytest.mark.asyncio +async def test_search_intersects_and_bbox(app_client): + """Test POST search intersects and bbox are mutually exclusive (core)""" + bbox = [-118, 34, -117, 35] + geoj = Polygon.from_bounds(*bbox).dict(exclude_none=True) + params = {"bbox": bbox, "intersects": geoj} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 400 + + +@pytest.mark.asyncio +async def test_get_missing_item(app_client, load_test_data): + """Test read item which does not exist (transactions extension)""" + test_coll = load_test_data("test_collection.json") + resp = await app_client.get(f"/collections/{test_coll['id']}/items/invalid-item") + assert resp.status_code == 404 + + +@pytest.mark.asyncio +@pytest.mark.skip(reason="invalid queries not implemented") +async def test_search_invalid_query_field(app_client): + body = {"query": {"gsd": {"lt": 100}, "invalid-field": {"eq": 50}}} + resp = await app_client.post("/search", json=body) + assert resp.status_code == 400 + + +@pytest.mark.asyncio +async def test_search_bbox_errors(app_client): + body = {"query": {"bbox": [0]}} + resp = await app_client.post("/search", json=body) + assert resp.status_code == 400 + + body = {"query": {"bbox": [100.0, 0.0, 0.0, 105.0, 1.0, 1.0]}} + resp = await app_client.post("/search", json=body) + assert resp.status_code == 400 + + params = {"bbox": "100.0,0.0,0.0,105.0"} + resp = await app_client.get("/search", params=params) + assert resp.status_code == 400 + + +@pytest.mark.asyncio +async def test_conformance_classes_configurable(): + """Test conformance class configurability""" + landing = LandingPageMixin() + landing_page = landing._landing_page( + base_url="http://test/test", + conformance_classes=["this is a test"], + extension_schemas=[], + ) + assert landing_page["conformsTo"][0] == "this is a test" + + # Update environment to avoid key error on client instantiation + os.environ["READER_CONN_STRING"] = "testing" + os.environ["WRITER_CONN_STRING"] = "testing" + client = CoreClient( + database=database_logic, base_conformance_classes=["this is a test"] + ) + assert client.conformance_classes()[0] == "this is a test" + + +@pytest.mark.asyncio +async def test_search_datetime_validation_errors(app_client): + bad_datetimes = [ + "37-01-01T12:00:27.87Z", + "1985-13-12T23:20:50.52Z", + "1985-12-32T23:20:50.52Z", + "1985-12-01T25:20:50.52Z", + "1985-12-01T00:60:50.52Z", + "1985-12-01T00:06:61.52Z", + "1990-12-31T23:59:61Z", + "1986-04-12T23:20:50.52Z/1985-04-12T23:20:50.52Z", + ] + for dt in bad_datetimes: + body = {"query": {"datetime": dt}} + resp = await app_client.post("/search", json=body) + assert resp.status_code == 400 + + resp = await app_client.get("/search?datetime={}".format(dt)) + assert resp.status_code == 400 diff --git a/stac_fastapi/opensearch/tests/resources/test_mgmt.py b/stac_fastapi/opensearch/tests/resources/test_mgmt.py new file mode 100644 index 00000000..2b7d9728 --- /dev/null +++ b/stac_fastapi/opensearch/tests/resources/test_mgmt.py @@ -0,0 +1,13 @@ +import pytest + + +@pytest.mark.asyncio +async def test_ping_no_param(app_client): + """ + Test ping endpoint with a mocked client. + Args: + app_client (TestClient): mocked client fixture + """ + res = await app_client.get("/_mgmt/ping") + assert res.status_code == 200 + assert res.json() == {"message": "PONG"} From 322ff19415f0b1a0c68be71514865b5eada56242 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Fri, 2 Feb 2024 12:49:17 +0800 Subject: [PATCH 13/33] update workflow, dockerfiles --- .github/workflows/cicd.yml | 6 +++++- Dockerfile.dev => Dockerfile.dev.es | 0 Dockerfile.dev.os | 19 +++++++++++++++++++ docker-compose.yml | 4 ++-- 4 files changed, 26 insertions(+), 3 deletions(-) rename Dockerfile.dev => Dockerfile.dev.es (100%) create mode 100644 Dockerfile.dev.os diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index bf9ad019..d438979d 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -92,6 +92,10 @@ jobs: run: | pip install ./stac_fastapi/elasticsearch[dev,server] + - name: Install opensearch stac-fastapi + run: | + pip install ./stac_fastapi/opensearch[dev,server] + - name: Run test suite against Elasticsearch 7.x run: | cd stac_fastapi/elasticsearch && pipenv run pytest -svvv @@ -114,7 +118,7 @@ jobs: - name: Run test suite against OpenSearch 2.11.1 run: | - cd stac_fastapi/elasticsearch && pipenv run pytest -svvv + cd stac_fastapi/opensearch && pipenv run pytest -svvv env: ENVIRONMENT: testing ES_PORT: 9202 diff --git a/Dockerfile.dev b/Dockerfile.dev.es similarity index 100% rename from Dockerfile.dev rename to Dockerfile.dev.es diff --git a/Dockerfile.dev.os b/Dockerfile.dev.os new file mode 100644 index 00000000..a4248d39 --- /dev/null +++ b/Dockerfile.dev.os @@ -0,0 +1,19 @@ +FROM python:3.10-slim + + +# update apt pkgs, and install build-essential for ciso8601 +RUN apt-get update && \ + apt-get -y upgrade && \ + apt-get install -y build-essential && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +# update certs used by Requests +ENV CURL_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt + +WORKDIR /app + +COPY . /app + +RUN pip install --no-cache-dir -e ./stac_fastapi/core +RUN pip install --no-cache-dir -e ./stac_fastapi/elasticsearch[dev,server] diff --git a/docker-compose.yml b/docker-compose.yml index 2010cd08..02f4235f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,7 +7,7 @@ services: restart: always build: context: . - dockerfile: Dockerfile.dev + dockerfile: Dockerfile.dev.es environment: - APP_HOST=0.0.0.0 - APP_PORT=8080 @@ -36,7 +36,7 @@ services: restart: always build: context: . - dockerfile: Dockerfile.dev + dockerfile: Dockerfile.dev.os environment: - APP_HOST=0.0.0.0 - APP_PORT=8082 From 0ff356f5256d365ab0317df04cd7a39167bdcc8e Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Fri, 2 Feb 2024 13:53:16 +0800 Subject: [PATCH 14/33] fix dockerfiles, imports --- Dockerfile.dev.os | 2 +- docker-compose.yml | 2 +- stac_fastapi/opensearch/setup.py | 2 +- .../opensearch/stac_fastapi/opensearch/app.py | 16 ++--- .../stac_fastapi/opensearch/database_logic.py | 58 +++++++++++++++++-- .../opensearch/tests/resources/test_item.py | 2 +- 6 files changed, 65 insertions(+), 17 deletions(-) diff --git a/Dockerfile.dev.os b/Dockerfile.dev.os index a4248d39..d9dc8b0a 100644 --- a/Dockerfile.dev.os +++ b/Dockerfile.dev.os @@ -16,4 +16,4 @@ WORKDIR /app COPY . /app RUN pip install --no-cache-dir -e ./stac_fastapi/core -RUN pip install --no-cache-dir -e ./stac_fastapi/elasticsearch[dev,server] +RUN pip install --no-cache-dir -e ./stac_fastapi/opensearch[dev,server] diff --git a/docker-compose.yml b/docker-compose.yml index 02f4235f..7db5312a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -57,7 +57,7 @@ services: depends_on: - opensearch command: - bash -c "./scripts/wait-for-it-es.sh os-container:9202 && python -m stac_fastapi.elasticsearch.app" + bash -c "./scripts/wait-for-it-es.sh os-container:9202 && python -m stac_fastapi.opensearch.app" elasticsearch: container_name: es-container diff --git a/stac_fastapi/opensearch/setup.py b/stac_fastapi/opensearch/setup.py index 07c143df..71319637 100644 --- a/stac_fastapi/opensearch/setup.py +++ b/stac_fastapi/opensearch/setup.py @@ -1,4 +1,4 @@ -"""stac_fastapi: opencsearch module.""" +"""stac_fastapi: opensearch module.""" from setuptools import find_namespace_packages, setup diff --git a/stac_fastapi/opensearch/stac_fastapi/opensearch/app.py b/stac_fastapi/opensearch/stac_fastapi/opensearch/app.py index 75d60684..ebb2921e 100644 --- a/stac_fastapi/opensearch/stac_fastapi/opensearch/app.py +++ b/stac_fastapi/opensearch/stac_fastapi/opensearch/app.py @@ -9,12 +9,7 @@ TransactionsClient, ) from stac_fastapi.core.extensions import QueryExtension -from stac_fastapi.elasticsearch.config import ElasticsearchSettings -from stac_fastapi.elasticsearch.database_logic import ( - DatabaseLogic, - create_collection_index, -) -from stac_fastapi.elasticsearch.session import Session +from stac_fastapi.core.session import Session from stac_fastapi.extensions.core import ( ContextExtension, FieldsExtension, @@ -24,8 +19,13 @@ TransactionExtension, ) from stac_fastapi.extensions.third_party import BulkTransactionExtension +from stac_fastapi.opensearch.config import OpensearchSettings +from stac_fastapi.opensearch.database_logic import ( + DatabaseLogic, + create_collection_index, +) -settings = ElasticsearchSettings() +settings = OpensearchSettings() session = Session.create_from_settings(settings) filter_extension = FilterExtension(client=EsAsyncBaseFiltersClient()) @@ -82,7 +82,7 @@ def run() -> None: import uvicorn uvicorn.run( - "stac_fastapi.elasticsearch.app:app", + "stac_fastapi.opensearch.app:app", host=settings.app_host, port=settings.app_port, log_level="info", diff --git a/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py b/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py index 3f8ec4a3..55d0fcf6 100644 --- a/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py +++ b/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py @@ -12,12 +12,12 @@ from opensearchpy.helpers.search import Search from stac_fastapi.core import serializers -from stac_fastapi.elasticsearch.config.config_opensearch import AsyncSearchSettings -from stac_fastapi.elasticsearch.config.config_opensearch import ( - SearchSettings as SyncSearchSettings, +from stac_fastapi.core.extensions import filter +from stac_fastapi.core.utilities import bbox2polygon +from stac_fastapi.opensearch.config import ( + AsyncOpensearchSettings as AsyncSearchSettings, ) -from stac_fastapi.elasticsearch.extensions import filter -from stac_fastapi.elasticsearch.utilities import bbox2polygon +from stac_fastapi.opensearch.config import OpensearchSettings as SyncSearchSettings from stac_fastapi.types.errors import ConflictError, NotFoundError from stac_fastapi.types.stac import Collection, Item @@ -772,6 +772,54 @@ async def find_collection(self, collection_id: str) -> Collection: return collection["_source"] + # this is copied from stac-fastapi-elasticseach and the logic needs to be updated for opensearch + async def update_collection( + self, collection_id: str, collection: Collection, refresh: bool = False + ): + """Update a collection from the database. + + Args: + self: The instance of the object calling this function. + collection_id (str): The ID of the collection to be updated. + collection (Collection): The Collection object to be used for the update. + + Raises: + NotFoundError: If the collection with the given `collection_id` is not + found in the database. + + Notes: + This function updates the collection in the database using the specified + `collection_id` and with the collection specified in the `Collection` object. + If the collection is not found, a `NotFoundError` is raised. + """ + await self.find_collection(collection_id=collection_id) + + if collection_id != collection["id"]: + await self.create_collection(collection, refresh=refresh) + + await self.client.reindex( + body={ + "dest": {"index": f"{ITEMS_INDEX_PREFIX}{collection['id']}"}, + "source": {"index": f"{ITEMS_INDEX_PREFIX}{collection_id}"}, + "script": { + "lang": "painless", + "source": f"""ctx._id = ctx._id.replace('{collection_id}', '{collection["id"]}'); ctx._source.collection = '{collection["id"]}' ;""", + }, + }, + wait_for_completion=True, + refresh=refresh, + ) + + await self.delete_collection(collection_id) + + else: + await self.client.index( + index=COLLECTIONS_INDEX, + id=collection_id, + document=collection, + refresh=refresh, + ) + async def delete_collection(self, collection_id: str, refresh: bool = False): """Delete a collection from the database. diff --git a/stac_fastapi/opensearch/tests/resources/test_item.py b/stac_fastapi/opensearch/tests/resources/test_item.py index e62da8b8..ab4bb123 100644 --- a/stac_fastapi/opensearch/tests/resources/test_item.py +++ b/stac_fastapi/opensearch/tests/resources/test_item.py @@ -14,7 +14,7 @@ from stac_fastapi.core.core import CoreClient from stac_fastapi.core.datetime_utils import now_to_rfc3339_str -from stac_fastapi.elasticsearch.database_logic import DatabaseLogic +from stac_fastapi.opensearch.database_logic import DatabaseLogic from stac_fastapi.types.core import LandingPageMixin from ..conftest import create_item, refresh_indices From 70924efae925b364afdaa7e2841f1bbba56f7a1d Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Sat, 3 Feb 2024 12:20:50 +0800 Subject: [PATCH 15/33] update update collection method --- .../opensearch/stac_fastapi/opensearch/database_logic.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py b/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py index 55d0fcf6..a946f82f 100644 --- a/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py +++ b/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py @@ -772,7 +772,6 @@ async def find_collection(self, collection_id: str) -> Collection: return collection["_source"] - # this is copied from stac-fastapi-elasticseach and the logic needs to be updated for opensearch async def update_collection( self, collection_id: str, collection: Collection, refresh: bool = False ): @@ -816,7 +815,7 @@ async def update_collection( await self.client.index( index=COLLECTIONS_INDEX, id=collection_id, - document=collection, + body=collection, refresh=refresh, ) From ae9d6ea89e8b4e1d4bce6b1668d6ec4c503438b8 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Sat, 3 Feb 2024 12:26:35 +0800 Subject: [PATCH 16/33] update changelog --- CHANGELOG.md | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e76fecef..62f8531a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,8 +9,21 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added -- OpenSearch 2.11.1 support [#187](https://github.com/stac-utils/stac-fastapi-elasticsearch/pull/187) +- Added core library package for common logic [#186]https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/186 + +### Changed + +- Moved Elasticsearch and Opensearch backends into separate packages [#186]https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/186 + +### Fixed + +## [v1.1.0] + +### Added + - Advanced comparison (LIKE, IN, BETWEEN) operators to the Filter extension [#178](https://github.com/stac-utils/stac-fastapi-elasticsearch/pull/178) +- Collection update endpoint no longer delete all sub items [#177](https://github.com/stac-utils/stac-fastapi-elasticsearch/pull/177) +- OpenSearch 2.11.1 support [#188](https://github.com/stac-utils/stac-fastapi-elasticsearch/pull/188) ### Changed @@ -100,8 +113,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Added db_to_stac serializer to item_collection method in core.py. -[Unreleased]: +[Unreleased]: +[v1.1.0]: [v1.0.0]: [v0.3.0]: [v0.2.0]: -[v0.1.0]: +[v0.1.0]: \ No newline at end of file From 31dcae9ba927089fb8d307f88344e4c39cb4721c Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Sat, 3 Feb 2024 19:13:18 +0800 Subject: [PATCH 17/33] update makefile --- Makefile | 39 ++++++++++++++++++++++++++++----------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/Makefile b/Makefile index 068c86d1..78cbbb79 100644 --- a/Makefile +++ b/Makefile @@ -27,9 +27,13 @@ run_os = docker-compose \ -e APP_PORT=${OS_APP_PORT} \ app-opensearch -.PHONY: image-deploy -image-deploy: - docker build -f Dockerfile.deploy -t stac-fastapi-elasticsearch:latest . +.PHONY: image-deploy-es +image-deploy-es: + docker build -f Dockerfile.deploy.es -t stac-fastapi-elasticsearch:latest . + +.PHONY: image-deploy-os +image-deploy-os: + docker build -f Dockerfile.deploy.os -t stac-fastapi-opensearch:latest . .PHONY: run-deploy-locally run-deploy-locally: @@ -44,14 +48,22 @@ run-deploy-locally: image-dev: docker-compose build -.PHONY: docker-run -docker-run: image-dev +.PHONY: docker-run-es +docker-run-es: image-dev $(run_es) -.PHONY: docker-shell +.PHONY: docker-run-os +docker-run-es: image-dev + $(run_os) + +.PHONY: docker-shell-es docker-shell: $(run_es) /bin/bash +.PHONY: docker-shell-os +docker-shell: + $(run_os) /bin/bash + .PHONY: test-elasticsearch test: -$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd /app/stac_fastapi/elasticsearch/tests/ && pytest' @@ -59,7 +71,7 @@ test: .PHONY: test-opensearch test-opensearch: - -$(run_os) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd /app/stac_fastapi/elasticsearch/tests/ && pytest' + -$(run_os) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd /app/stac_fastapi/opensearch/tests/ && pytest' docker-compose down .PHONY: test @@ -67,7 +79,7 @@ test: -$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd /app/stac_fastapi/elasticsearch/tests/ && pytest' docker-compose down - -$(run_os) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd /app/stac_fastapi/elasticsearch/tests/ && pytest' + -$(run_os) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd /app/stac_fastapi/opensearch/tests/ && pytest' docker-compose down .PHONY: run-database-es @@ -83,12 +95,17 @@ pybase-install: pip install wheel && \ pip install -e ./stac_fastapi/api[dev] && \ pip install -e ./stac_fastapi/types[dev] && \ - pip install -e ./stac_fastapi/extensions[dev] + pip install -e ./stac_fastapi/extensions[dev] && \ + pip install -e ./stac_fastapi/core -.PHONY: install -install: pybase-install +.PHONY: install-es +install-es: pybase-install pip install -e ./stac_fastapi/elasticsearch[dev,server] +.PHONY: install-os +install-os: pybase-install + pip install -e ./stac_fastapi/opensearch[dev,server] + .PHONY: ingest ingest: python3 data_loader/data_loader.py From aeabc347a4a31e08cb188b700f9b8020db72efc7 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Sat, 3 Feb 2024 19:14:02 +0800 Subject: [PATCH 18/33] separate dockerfiles --- Dockerfile.deploy => Dockerfile.deploy.es | 1 + Dockerfile.deploy.os | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+) rename Dockerfile.deploy => Dockerfile.deploy.es (88%) create mode 100644 Dockerfile.deploy.os diff --git a/Dockerfile.deploy b/Dockerfile.deploy.es similarity index 88% rename from Dockerfile.deploy rename to Dockerfile.deploy.es index 85d540fc..2eab7b9d 100644 --- a/Dockerfile.deploy +++ b/Dockerfile.deploy.es @@ -12,6 +12,7 @@ WORKDIR /app COPY . /app +RUN pip install --no-cache-dir -e ./stac_fastapi/core RUN pip install --no-cache-dir ./stac_fastapi/elasticsearch[server] EXPOSE 8080 diff --git a/Dockerfile.deploy.os b/Dockerfile.deploy.os new file mode 100644 index 00000000..64999bbb --- /dev/null +++ b/Dockerfile.deploy.os @@ -0,0 +1,20 @@ +FROM python:3.10-slim + +RUN apt-get update && \ + apt-get -y upgrade && \ + apt-get -y install gcc && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +ENV CURL_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt + +WORKDIR /app + +COPY . /app + +RUN pip install --no-cache-dir -e ./stac_fastapi/core +RUN pip install --no-cache-dir ./stac_fastapi/elasticsearch[server] + +EXPOSE 8080 + +CMD ["uvicorn", "stac_fastapi.opensearch.app:app", "--host", "0.0.0.0", "--port", "8080"] From 3c1151b12eacd91eb4c6288efda084bea2aeb138 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Sat, 3 Feb 2024 20:24:26 +0800 Subject: [PATCH 19/33] update readme --- README.md | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 9ae86aae..593bdac3 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,8 @@ -# STAC FastAPI Elasticsearch (sfes) +# stac-fastapi-elasticsearch-opensearch (sfeos) -## Elasticsearch backend for stac-fastapi +## Elasticsearch and Opensearch backends for the stac-fastapi project -#### Join our [Gitter](https://gitter.im/stac-fastapi-elasticsearch/community) page -#### Check out the public Postman documentation [Postman](https://documenter.getpostman.com/view/12888943/2s8ZDSdRHA) - -#### Check out the examples folder for deployment options, ex. running sfes from pip in docker #### For changes, see the [Changelog](CHANGELOG.md) @@ -19,6 +15,13 @@ To install the classes in your local Python env, run: pip install -e 'stac_fastapi/elasticsearch[dev]' ``` +or + +```shell +pip install -e 'stac_fastapi/opensearch[dev]' +``` + + ### Pre-commit Install [pre-commit](https://pre-commit.com/#install). From e530e060e77d75c38d4ac35a3a602e01700f6264 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Sat, 3 Feb 2024 23:13:34 +0800 Subject: [PATCH 20/33] revert changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9cbb2160..62f8531a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,8 +9,12 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added +- Added core library package for common logic [#186]https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/186 + ### Changed +- Moved Elasticsearch and Opensearch backends into separate packages [#186]https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/186 + ### Fixed ## [v1.1.0] From 73fc73ad6d8b786e3a3798ced25e0bade5f7b53b Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Sat, 3 Feb 2024 23:18:33 +0800 Subject: [PATCH 21/33] revert bbox to polygon --- .../stac_fastapi/elasticsearch/database_logic.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py index 8ee036e9..9a60bfd7 100644 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py +++ b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py @@ -11,6 +11,7 @@ from elasticsearch import exceptions, helpers # type: ignore from stac_fastapi.core.extensions import filter from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer +from stac_fastapi.core.utilities import bbox2polygon from stac_fastapi.elasticsearch.config import AsyncElasticsearchSettings from stac_fastapi.elasticsearch.config import ( ElasticsearchSettings as SyncElasticsearchSettings, @@ -229,21 +230,6 @@ async def delete_item_index(collection_id: str): await client.close() -def bbox2polygon(b0: float, b1: float, b2: float, b3: float) -> List[List[List[float]]]: - """Transform a bounding box represented by its four coordinates `b0`, `b1`, `b2`, and `b3` into a polygon. - - Args: - b0 (float): The x-coordinate of the lower-left corner of the bounding box. - b1 (float): The y-coordinate of the lower-left corner of the bounding box. - b2 (float): The x-coordinate of the upper-right corner of the bounding box. - b3 (float): The y-coordinate of the upper-right corner of the bounding box. - - Returns: - List[List[List[float]]]: A polygon represented as a list of lists of coordinates. - """ - return [[[b0, b1], [b2, b1], [b2, b3], [b0, b3], [b0, b1]]] - - def mk_item_id(item_id: str, collection_id: str): """Create the document id for an Item in Elasticsearch. From dff0a6cdfdaa741928d9b9ed9bcb7e31e0154ab9 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Sun, 4 Feb 2024 12:25:22 +0800 Subject: [PATCH 22/33] update, clean dependencies --- .github/workflows/cicd.yml | 8 ++++---- examples/pip_docker/Dockerfile | 2 +- ...ac-fastapi-elasticsearch.postman_collection.json | 0 stac_fastapi/core/README.md | 1 + stac_fastapi/core/setup.cfg | 2 +- stac_fastapi/elasticsearch/setup.py | 13 +------------ stac_fastapi/opensearch/README.md | 1 + stac_fastapi/opensearch/setup.cfg | 2 +- stac_fastapi/opensearch/setup.py | 13 +------------ 9 files changed, 11 insertions(+), 31 deletions(-) rename {postman_collections => examples/postman_collections}/stac-fastapi-elasticsearch.postman_collection.json (100%) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index d438979d..24112faf 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -84,10 +84,6 @@ jobs: run: | python -m pip install --upgrade pipenv wheel - - name: Install core library stac-fastapi - run: | - pip install ./stac_fastapi/core - - name: Install elasticsearch stac-fastapi run: | pip install ./stac_fastapi/elasticsearch[dev,server] @@ -96,6 +92,10 @@ jobs: run: | pip install ./stac_fastapi/opensearch[dev,server] + - name: Install core library stac-fastapi + run: | + pip install ./stac_fastapi/core + - name: Run test suite against Elasticsearch 7.x run: | cd stac_fastapi/elasticsearch && pipenv run pytest -svvv diff --git a/examples/pip_docker/Dockerfile b/examples/pip_docker/Dockerfile index 609ada8c..1db773f5 100644 --- a/examples/pip_docker/Dockerfile +++ b/examples/pip_docker/Dockerfile @@ -15,4 +15,4 @@ WORKDIR /app COPY . /app -RUN pip install stac-fastapi.elasticsearch==0.3.0 \ No newline at end of file +RUN pip install stac-fastapi.elasticsearch==1.1.0 \ No newline at end of file diff --git a/postman_collections/stac-fastapi-elasticsearch.postman_collection.json b/examples/postman_collections/stac-fastapi-elasticsearch.postman_collection.json similarity index 100% rename from postman_collections/stac-fastapi-elasticsearch.postman_collection.json rename to examples/postman_collections/stac-fastapi-elasticsearch.postman_collection.json diff --git a/stac_fastapi/core/README.md b/stac_fastapi/core/README.md index e69de29b..02f4e35a 100644 --- a/stac_fastapi/core/README.md +++ b/stac_fastapi/core/README.md @@ -0,0 +1 @@ +# stac-fastapi core library for Elasticsearch and Opensearch backends \ No newline at end of file diff --git a/stac_fastapi/core/setup.cfg b/stac_fastapi/core/setup.cfg index 1eb3fa49..1f4f20aa 100644 --- a/stac_fastapi/core/setup.cfg +++ b/stac_fastapi/core/setup.cfg @@ -1,2 +1,2 @@ [metadata] -version = attr: stac_fastapi.core.version.__version__ +version = 0.1.0 diff --git a/stac_fastapi/elasticsearch/setup.py b/stac_fastapi/elasticsearch/setup.py index f2de8fa3..80a7cf25 100644 --- a/stac_fastapi/elasticsearch/setup.py +++ b/stac_fastapi/elasticsearch/setup.py @@ -6,22 +6,11 @@ desc = f.read() install_requires = [ - "fastapi", - "attrs", - "pydantic[dotenv]<2", - "stac_pydantic==2.0.*", - "stac-fastapi.types==2.4.9", - "stac-fastapi.api==2.4.9", - "stac-fastapi.extensions==2.4.9", + "stac-fastapi.core==0.1.0", "elasticsearch[async]==8.11.0", "elasticsearch-dsl==8.11.0", - "pystac[validation]", "uvicorn", - "orjson", - "overrides", "starlette", - "geojson-pydantic", - "pygeofilter==0.2.1", ] extra_reqs = { diff --git a/stac_fastapi/opensearch/README.md b/stac_fastapi/opensearch/README.md index e69de29b..6b1f8391 100644 --- a/stac_fastapi/opensearch/README.md +++ b/stac_fastapi/opensearch/README.md @@ -0,0 +1 @@ +# stac-fastapi-opensearch \ No newline at end of file diff --git a/stac_fastapi/opensearch/setup.cfg b/stac_fastapi/opensearch/setup.cfg index 1eb3fa49..1f4f20aa 100644 --- a/stac_fastapi/opensearch/setup.cfg +++ b/stac_fastapi/opensearch/setup.cfg @@ -1,2 +1,2 @@ [metadata] -version = attr: stac_fastapi.core.version.__version__ +version = 0.1.0 diff --git a/stac_fastapi/opensearch/setup.py b/stac_fastapi/opensearch/setup.py index 71319637..8710e5c8 100644 --- a/stac_fastapi/opensearch/setup.py +++ b/stac_fastapi/opensearch/setup.py @@ -6,22 +6,11 @@ desc = f.read() install_requires = [ - "fastapi", - "attrs", - "pydantic[dotenv]<2", - "stac_pydantic==2.0.*", - "stac-fastapi.types==2.4.9", - "stac-fastapi.api==2.4.9", - "stac-fastapi.extensions==2.4.9", + "stac-fastapi.core==0.1.0", "opensearch-py==2.4.2", "opensearch-py[async]==2.4.2", - "pystac[validation]", "uvicorn", - "orjson", - "overrides", "starlette", - "geojson-pydantic", - "pygeofilter==0.2.1", ] extra_reqs = { From 59306373b6fb92450ec77a03401d0cb66c375565 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Mon, 5 Feb 2024 18:15:41 +0800 Subject: [PATCH 23/33] update setup url --- stac_fastapi/core/setup.py | 2 +- stac_fastapi/elasticsearch/setup.py | 2 +- stac_fastapi/opensearch/setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/stac_fastapi/core/setup.py b/stac_fastapi/core/setup.py index 27b17be5..68ba8f70 100644 --- a/stac_fastapi/core/setup.py +++ b/stac_fastapi/core/setup.py @@ -36,7 +36,7 @@ "Programming Language :: Python :: 3.11", "License :: OSI Approved :: MIT License", ], - url="https://github.com/stac-utils/stac-fastapi-elasticsearch", + url="https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch", license="MIT", packages=find_namespace_packages(), zip_safe=False, diff --git a/stac_fastapi/elasticsearch/setup.py b/stac_fastapi/elasticsearch/setup.py index 80a7cf25..587c1aee 100644 --- a/stac_fastapi/elasticsearch/setup.py +++ b/stac_fastapi/elasticsearch/setup.py @@ -43,7 +43,7 @@ "Programming Language :: Python :: 3.11", "License :: OSI Approved :: MIT License", ], - url="https://github.com/stac-utils/stac-fastapi-elasticsearch", + url="https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch", license="MIT", packages=find_namespace_packages(exclude=["alembic", "tests", "scripts"]), zip_safe=False, diff --git a/stac_fastapi/opensearch/setup.py b/stac_fastapi/opensearch/setup.py index 8710e5c8..f6a11e57 100644 --- a/stac_fastapi/opensearch/setup.py +++ b/stac_fastapi/opensearch/setup.py @@ -43,7 +43,7 @@ "Programming Language :: Python :: 3.11", "License :: OSI Approved :: MIT License", ], - url="https://github.com/stac-utils/stac-fastapi-elasticsearch", + url="https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch", license="MIT", packages=find_namespace_packages(), zip_safe=False, From 7e5b618427e3bb6dcaf6e353acb9c26e49848832 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Mon, 5 Feb 2024 18:22:52 +0800 Subject: [PATCH 24/33] remove type ignores, clean up --- stac_fastapi/core/stac_fastapi/core/core.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/stac_fastapi/core/stac_fastapi/core/core.py b/stac_fastapi/core/stac_fastapi/core/core.py index 5a6dd291..63c43944 100644 --- a/stac_fastapi/core/stac_fastapi/core/core.py +++ b/stac_fastapi/core/stac_fastapi/core/core.py @@ -661,7 +661,7 @@ class TransactionsClient(AsyncBaseTransactionsClient): @overrides async def create_item( self, collection_id: str, item: stac_types.Item, **kwargs - ) -> stac_types.Item: + ) -> Optional[stac_types.Item]: """Create an item in the collection. Args: @@ -692,7 +692,7 @@ async def create_item( collection_id, processed_items, refresh=kwargs.get("refresh", False) ) - return None # type: ignore + return None else: item = await self.database.prep_create_item(item=item, base_url=base_url) await self.database.create_item(item, refresh=kwargs.get("refresh", False)) @@ -730,7 +730,7 @@ async def update_item( @overrides async def delete_item( self, item_id: str, collection_id: str, **kwargs - ) -> stac_types.Item: + ) -> Optional[stac_types.Item]: """Delete an item from a collection. Args: @@ -741,7 +741,7 @@ async def delete_item( Optional[stac_types.Item]: The deleted item, or `None` if the item was successfully deleted. """ await self.database.delete_item(item_id=item_id, collection_id=collection_id) - return None # type: ignore + return None @overrides async def create_collection( @@ -810,7 +810,7 @@ async def update_collection( @overrides async def delete_collection( self, collection_id: str, **kwargs - ) -> stac_types.Collection: + ) -> Optional[stac_types.Collection]: """ Delete a collection. @@ -827,7 +827,7 @@ async def delete_collection( NotFoundError: If the collection doesn't exist. """ await self.database.delete_collection(collection_id=collection_id) - return None # type: ignore + return None @attr.s @@ -842,14 +842,10 @@ class BulkTransactionsClient(BaseBulkTransactionsClient): database: BaseDatabaseLogic = attr.ib() settings: ApiBaseSettings = attr.ib() session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) - # database = DatabaseLogic() def __attrs_post_init__(self): """Create es engine.""" - # settings = BaseSettings() self.client = self.settings.create_client - # settings = SearchSettings() - # self.client = settings.create_client def preprocess_item( self, item: stac_types.Item, base_url, method: BulkTransactionMethod From 0322a8ab83e5f343e3667baf24a85d423179bf40 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Mon, 5 Feb 2024 18:26:31 +0800 Subject: [PATCH 25/33] remove type ignore, clean up types.core --- .../core/stac_fastapi/core/types/core.py | 54 +------------------ 1 file changed, 1 insertion(+), 53 deletions(-) diff --git a/stac_fastapi/core/stac_fastapi/core/types/core.py b/stac_fastapi/core/stac_fastapi/core/types/core.py index 875060a2..1212619c 100644 --- a/stac_fastapi/core/stac_fastapi/core/types/core.py +++ b/stac_fastapi/core/stac_fastapi/core/types/core.py @@ -17,7 +17,7 @@ StacType = Dict[str, Any] -@attr.s # type:ignore +@attr.s class AsyncBaseTransactionsClient(abc.ABC): """Defines a pattern for implementing the STAC transaction extension.""" @@ -161,58 +161,6 @@ def extension_is_enabled(self, extension: str) -> bool: """Check if an api extension is enabled.""" return any([type(ext).__name__ == extension for ext in self.extensions]) - # async def landing_page(self, **kwargs) -> stac_types.LandingPage: - # """Landing page. - - # Called with `GET /`. - - # Returns: - # API landing page, serving as an entry point to the API. - # """ - # request: Request = kwargs["request"] - # base_url = get_base_url(request) - # landing_page = self._landing_page( - # base_url=base_url, - # conformance_classes=self.conformance_classes(), - # extension_schemas=[], - # ) - # collections = await self.all_collections(request=kwargs["request"]) - # for collection in collections["collections"]: - # landing_page["links"].append( - # { - # "rel": Relations.child.value, - # "type": MimeTypes.json.value, - # "title": collection.get("title") or collection.get("id"), - # "href": urljoin(base_url, f"collections/{collection['id']}"), - # } - # ) - - # # Add OpenAPI URL - # landing_page["links"].append( - # { - # "rel": "service-desc", - # "type": "application/vnd.oai.openapi+json;version=3.0", - # "title": "OpenAPI service description", - # "href": urljoin( - # str(request.base_url), request.app.openapi_url.lstrip("/") - # ), - # } - # ) - - # # Add human readable service-doc - # landing_page["links"].append( - # { - # "rel": "service-doc", - # "type": "text/html", - # "title": "OpenAPI service documentation", - # "href": urljoin( - # str(request.base_url), request.app.docs_url.lstrip("/") - # ), - # } - # ) - - # return landing_page - async def conformance(self, **kwargs) -> stac_types.Conformance: """Conformance classes. From def6cf7988c00f56a080d4bfcf8156c225037d40 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Mon, 5 Feb 2024 18:32:45 +0800 Subject: [PATCH 26/33] fix package versions --- stac_fastapi/core/setup.cfg | 2 +- stac_fastapi/core/stac_fastapi/core/version.py | 2 +- stac_fastapi/elasticsearch/setup.cfg | 2 +- .../elasticsearch/stac_fastapi/elasticsearch/version.py | 2 +- stac_fastapi/opensearch/setup.cfg | 2 +- stac_fastapi/opensearch/stac_fastapi/opensearch/version.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/stac_fastapi/core/setup.cfg b/stac_fastapi/core/setup.cfg index 1f4f20aa..1eb3fa49 100644 --- a/stac_fastapi/core/setup.cfg +++ b/stac_fastapi/core/setup.cfg @@ -1,2 +1,2 @@ [metadata] -version = 0.1.0 +version = attr: stac_fastapi.core.version.__version__ diff --git a/stac_fastapi/core/stac_fastapi/core/version.py b/stac_fastapi/core/stac_fastapi/core/version.py index 1eeef171..04a6346d 100644 --- a/stac_fastapi/core/stac_fastapi/core/version.py +++ b/stac_fastapi/core/stac_fastapi/core/version.py @@ -1,2 +1,2 @@ """library version.""" -__version__ = "1.0.0" +__version__ = "0.1.0" diff --git a/stac_fastapi/elasticsearch/setup.cfg b/stac_fastapi/elasticsearch/setup.cfg index ad4714c2..7a42432c 100644 --- a/stac_fastapi/elasticsearch/setup.cfg +++ b/stac_fastapi/elasticsearch/setup.cfg @@ -1,2 +1,2 @@ [metadata] -version = 1.1.0 +version = attr: stac_fastapi.elasticsearch.version.__version__ diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/version.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/version.py index 1eeef171..6249d737 100644 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/version.py +++ b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/version.py @@ -1,2 +1,2 @@ """library version.""" -__version__ = "1.0.0" +__version__ = "1.1.0" diff --git a/stac_fastapi/opensearch/setup.cfg b/stac_fastapi/opensearch/setup.cfg index 1f4f20aa..9f0be4b7 100644 --- a/stac_fastapi/opensearch/setup.cfg +++ b/stac_fastapi/opensearch/setup.cfg @@ -1,2 +1,2 @@ [metadata] -version = 0.1.0 +version = attr: stac_fastapi.opensearch.version.__version__ diff --git a/stac_fastapi/opensearch/stac_fastapi/opensearch/version.py b/stac_fastapi/opensearch/stac_fastapi/opensearch/version.py index 1eeef171..04a6346d 100644 --- a/stac_fastapi/opensearch/stac_fastapi/opensearch/version.py +++ b/stac_fastapi/opensearch/stac_fastapi/opensearch/version.py @@ -1,2 +1,2 @@ """library version.""" -__version__ = "1.0.0" +__version__ = "0.1.0" From 1ae8e702da57e0e68370c52805180979fe6affe4 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Mon, 5 Feb 2024 18:57:54 +0800 Subject: [PATCH 27/33] move tests to common folder --- .github/workflows/cicd.yml | 8 +- stac_fastapi/tests/__init__.py | 0 stac_fastapi/tests/api/__init__.py | 0 stac_fastapi/tests/api/test_api.py | 447 ++++++++++ stac_fastapi/tests/clients/__init__.py | 0 .../tests/clients/test_elasticsearch.py | 312 +++++++ stac_fastapi/tests/conftest.py | 221 +++++ stac_fastapi/tests/data/test_collection.json | 99 +++ stac_fastapi/tests/data/test_item.json | 505 +++++++++++ .../tests/extensions/cql2/example01.json | 9 + .../tests/extensions/cql2/example04.json | 32 + .../tests/extensions/cql2/example05a.json | 23 + .../tests/extensions/cql2/example06b.json | 41 + .../tests/extensions/cql2/example08.json | 79 ++ .../tests/extensions/cql2/example09.json | 9 + .../tests/extensions/cql2/example1.json | 39 + .../tests/extensions/cql2/example10.json | 9 + .../tests/extensions/cql2/example14.json | 9 + .../tests/extensions/cql2/example15.json | 23 + .../tests/extensions/cql2/example17.json | 37 + .../tests/extensions/cql2/example18.json | 28 + .../tests/extensions/cql2/example19.json | 9 + .../tests/extensions/cql2/example20.json | 10 + .../tests/extensions/cql2/example21.json | 33 + .../tests/extensions/cql2/example22.json | 13 + stac_fastapi/tests/extensions/test_filter.py | 402 +++++++++ stac_fastapi/tests/resources/__init__.py | 0 .../tests/resources/test_collection.py | 123 +++ .../tests/resources/test_conformance.py | 72 ++ stac_fastapi/tests/resources/test_item.py | 804 ++++++++++++++++++ stac_fastapi/tests/resources/test_mgmt.py | 13 + 31 files changed, 3406 insertions(+), 3 deletions(-) create mode 100644 stac_fastapi/tests/__init__.py create mode 100644 stac_fastapi/tests/api/__init__.py create mode 100644 stac_fastapi/tests/api/test_api.py create mode 100644 stac_fastapi/tests/clients/__init__.py create mode 100644 stac_fastapi/tests/clients/test_elasticsearch.py create mode 100644 stac_fastapi/tests/conftest.py create mode 100644 stac_fastapi/tests/data/test_collection.json create mode 100644 stac_fastapi/tests/data/test_item.json create mode 100644 stac_fastapi/tests/extensions/cql2/example01.json create mode 100644 stac_fastapi/tests/extensions/cql2/example04.json create mode 100644 stac_fastapi/tests/extensions/cql2/example05a.json create mode 100644 stac_fastapi/tests/extensions/cql2/example06b.json create mode 100644 stac_fastapi/tests/extensions/cql2/example08.json create mode 100644 stac_fastapi/tests/extensions/cql2/example09.json create mode 100644 stac_fastapi/tests/extensions/cql2/example1.json create mode 100644 stac_fastapi/tests/extensions/cql2/example10.json create mode 100644 stac_fastapi/tests/extensions/cql2/example14.json create mode 100644 stac_fastapi/tests/extensions/cql2/example15.json create mode 100644 stac_fastapi/tests/extensions/cql2/example17.json create mode 100644 stac_fastapi/tests/extensions/cql2/example18.json create mode 100755 stac_fastapi/tests/extensions/cql2/example19.json create mode 100755 stac_fastapi/tests/extensions/cql2/example20.json create mode 100644 stac_fastapi/tests/extensions/cql2/example21.json create mode 100644 stac_fastapi/tests/extensions/cql2/example22.json create mode 100644 stac_fastapi/tests/extensions/test_filter.py create mode 100644 stac_fastapi/tests/resources/__init__.py create mode 100644 stac_fastapi/tests/resources/test_collection.py create mode 100644 stac_fastapi/tests/resources/test_conformance.py create mode 100644 stac_fastapi/tests/resources/test_item.py create mode 100644 stac_fastapi/tests/resources/test_mgmt.py diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 24112faf..2a6ca861 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -98,27 +98,29 @@ jobs: - name: Run test suite against Elasticsearch 7.x run: | - cd stac_fastapi/elasticsearch && pipenv run pytest -svvv + pipenv run pytest -svvv env: ENVIRONMENT: testing ES_PORT: 9200 ES_HOST: 172.17.0.1 ES_USE_SSL: false ES_VERIFY_CERTS: false + BACKEND: elasticsearch - name: Run test suite against Elasticsearch 8.x run: | - cd stac_fastapi/elasticsearch && pipenv run pytest -svvv + pipenv run pytest -svvv env: ENVIRONMENT: testing ES_PORT: 9400 ES_HOST: 172.17.0.1 ES_USE_SSL: false ES_VERIFY_CERTS: false + BACKEND: elasticsearch - name: Run test suite against OpenSearch 2.11.1 run: | - cd stac_fastapi/opensearch && pipenv run pytest -svvv + pipenv run pytest -svvv env: ENVIRONMENT: testing ES_PORT: 9202 diff --git a/stac_fastapi/tests/__init__.py b/stac_fastapi/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/tests/api/__init__.py b/stac_fastapi/tests/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/tests/api/test_api.py b/stac_fastapi/tests/api/test_api.py new file mode 100644 index 00000000..74f0bb55 --- /dev/null +++ b/stac_fastapi/tests/api/test_api.py @@ -0,0 +1,447 @@ +import copy +import uuid +from datetime import datetime, timedelta + +import pytest + +from ..conftest import create_collection, create_item + +ROUTES = { + "GET /_mgmt/ping", + "GET /docs/oauth2-redirect", + "HEAD /docs/oauth2-redirect", + "GET /", + "GET /conformance", + "GET /api", + "GET /api.html", + "HEAD /api", + "HEAD /api.html", + "GET /queryables", + "GET /collections", + "GET /collections/{collection_id}", + "GET /collections/{collection_id}/queryables", + "GET /collections/{collection_id}/items", + "GET /collections/{collection_id}/items/{item_id}", + "GET /search", + "POST /search", + "DELETE /collections/{collection_id}", + "DELETE /collections/{collection_id}/items/{item_id}", + "POST /collections", + "POST /collections/{collection_id}/items", + "PUT /collections", + "PUT /collections/{collection_id}/items/{item_id}", +} + + +@pytest.mark.asyncio +async def test_post_search_content_type(app_client, ctx): + params = {"limit": 1} + resp = await app_client.post("/search", json=params) + assert resp.headers["content-type"] == "application/geo+json" + + +@pytest.mark.asyncio +async def test_get_search_content_type(app_client, ctx): + resp = await app_client.get("/search") + assert resp.headers["content-type"] == "application/geo+json" + + +@pytest.mark.asyncio +async def test_api_headers(app_client): + resp = await app_client.get("/api") + assert ( + resp.headers["content-type"] == "application/vnd.oai.openapi+json;version=3.0" + ) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_router(app): + api_routes = set([f"{list(route.methods)[0]} {route.path}" for route in app.routes]) + assert len(api_routes - ROUTES) == 0 + + +@pytest.mark.asyncio +async def test_app_transaction_extension(app_client, ctx): + item = copy.deepcopy(ctx.item) + item["id"] = str(uuid.uuid4()) + resp = await app_client.post(f"/collections/{item['collection']}/items", json=item) + assert resp.status_code == 200 + + await app_client.delete(f"/collections/{item['collection']}/items/{item['id']}") + + +@pytest.mark.asyncio +async def test_app_search_response(app_client, ctx): + resp = await app_client.get("/search", params={"ids": ["test-item"]}) + assert resp.status_code == 200 + resp_json = resp.json() + + assert resp_json.get("type") == "FeatureCollection" + # stac_version and stac_extensions were removed in v1.0.0-beta.3 + assert resp_json.get("stac_version") is None + assert resp_json.get("stac_extensions") is None + + +@pytest.mark.asyncio +async def test_app_context_extension(app_client, ctx, txn_client): + test_item = ctx.item + test_item["id"] = "test-item-2" + test_item["collection"] = "test-collection-2" + test_collection = ctx.collection + test_collection["id"] = "test-collection-2" + + await create_collection(txn_client, test_collection) + await create_item(txn_client, test_item) + + resp = await app_client.get( + f"/collections/{test_collection['id']}/items/{test_item['id']}" + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["id"] == test_item["id"] + assert resp_json["collection"] == test_item["collection"] + + resp = await app_client.get(f"/collections/{test_collection['id']}") + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["id"] == test_collection["id"] + + resp = await app_client.post("/search", json={"collections": ["test-collection-2"]}) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + assert "context" in resp_json + assert resp_json["context"]["returned"] == 1 + if matched := resp_json["context"].get("matched"): + assert matched == 1 + + +@pytest.mark.asyncio +async def test_app_fields_extension(app_client, ctx, txn_client): + resp = await app_client.get("/search", params={"collections": ["test-collection"]}) + assert resp.status_code == 200 + resp_json = resp.json() + assert list(resp_json["features"][0]["properties"]) == ["datetime"] + + +@pytest.mark.asyncio +async def test_app_fields_extension_query(app_client, ctx, txn_client): + resp = await app_client.post( + "/search", + json={ + "query": {"proj:epsg": {"gte": ctx.item["properties"]["proj:epsg"]}}, + "collections": ["test-collection"], + }, + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert list(resp_json["features"][0]["properties"]) == ["datetime", "proj:epsg"] + + +@pytest.mark.asyncio +async def test_app_fields_extension_no_properties_get(app_client, ctx, txn_client): + resp = await app_client.get( + "/search", params={"collections": ["test-collection"], "fields": "-properties"} + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert "properties" not in resp_json["features"][0] + + +@pytest.mark.asyncio +async def test_app_fields_extension_no_properties_post(app_client, ctx, txn_client): + resp = await app_client.post( + "/search", + json={ + "collections": ["test-collection"], + "fields": {"exclude": ["properties"]}, + }, + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert "properties" not in resp_json["features"][0] + + +@pytest.mark.asyncio +async def test_app_fields_extension_no_null_fields(app_client, ctx, txn_client): + resp = await app_client.get("/search", params={"collections": ["test-collection"]}) + assert resp.status_code == 200 + resp_json = resp.json() + # check if no null fields: https://github.com/stac-utils/stac-fastapi-elasticsearch/issues/166 + for feature in resp_json["features"]: + # assert "bbox" not in feature["geometry"] + for link in feature["links"]: + assert all(a not in link or link[a] is not None for a in ("title", "asset")) + for asset in feature["assets"]: + assert all( + a not in asset or asset[a] is not None + for a in ("start_datetime", "created") + ) + + +@pytest.mark.asyncio +async def test_app_fields_extension_return_all_properties(app_client, ctx, txn_client): + item = ctx.item + resp = await app_client.get( + "/search", params={"collections": ["test-collection"], "fields": "properties"} + ) + assert resp.status_code == 200 + resp_json = resp.json() + feature = resp_json["features"][0] + assert len(feature["properties"]) >= len(item["properties"]) + for expected_prop, expected_value in item["properties"].items(): + if expected_prop in ("datetime", "created", "updated"): + assert feature["properties"][expected_prop][0:19] == expected_value[0:19] + else: + assert feature["properties"][expected_prop] == expected_value + + +@pytest.mark.asyncio +async def test_app_query_extension_gt(app_client, ctx): + params = {"query": {"proj:epsg": {"gt": ctx.item["properties"]["proj:epsg"]}}} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +async def test_app_query_extension_gte(app_client, ctx): + params = {"query": {"proj:epsg": {"gte": ctx.item["properties"]["proj:epsg"]}}} + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_app_query_extension_limit_lt0(app_client): + assert (await app_client.post("/search", json={"limit": -1})).status_code == 400 + + +@pytest.mark.asyncio +async def test_app_query_extension_limit_gt10000(app_client): + resp = await app_client.post("/search", json={"limit": 10001}) + assert resp.status_code == 200 + assert resp.json()["context"]["limit"] == 10000 + + +@pytest.mark.asyncio +async def test_app_query_extension_limit_10000(app_client): + params = {"limit": 10000} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_app_sort_extension_get_asc(app_client, txn_client, ctx): + first_item = ctx.item + item_date = datetime.strptime( + first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" + ) + + second_item = dict(first_item) + second_item["id"] = "another-item" + another_item_date = item_date - timedelta(days=1) + second_item["properties"]["datetime"] = another_item_date.strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + await create_item(txn_client, second_item) + + resp = await app_client.get("/search?sortby=+properties.datetime") + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][1]["id"] == first_item["id"] + assert resp_json["features"][0]["id"] == second_item["id"] + + +@pytest.mark.asyncio +async def test_app_sort_extension_get_desc(app_client, txn_client, ctx): + first_item = ctx.item + item_date = datetime.strptime( + first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" + ) + + second_item = dict(first_item) + second_item["id"] = "another-item" + another_item_date = item_date - timedelta(days=1) + second_item["properties"]["datetime"] = another_item_date.strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + await create_item(txn_client, second_item) + + resp = await app_client.get("/search?sortby=-properties.datetime") + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == first_item["id"] + assert resp_json["features"][1]["id"] == second_item["id"] + + +@pytest.mark.asyncio +async def test_app_sort_extension_post_asc(app_client, txn_client, ctx): + first_item = ctx.item + item_date = datetime.strptime( + first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" + ) + + second_item = dict(first_item) + second_item["id"] = "another-item" + another_item_date = item_date - timedelta(days=1) + second_item["properties"]["datetime"] = another_item_date.strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + await create_item(txn_client, second_item) + + params = { + "collections": [first_item["collection"]], + "sortby": [{"field": "properties.datetime", "direction": "asc"}], + } + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][1]["id"] == first_item["id"] + assert resp_json["features"][0]["id"] == second_item["id"] + + +@pytest.mark.asyncio +async def test_app_sort_extension_post_desc(app_client, txn_client, ctx): + first_item = ctx.item + item_date = datetime.strptime( + first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" + ) + + second_item = dict(first_item) + second_item["id"] = "another-item" + another_item_date = item_date - timedelta(days=1) + second_item["properties"]["datetime"] = another_item_date.strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + await create_item(txn_client, second_item) + + params = { + "collections": [first_item["collection"]], + "sortby": [{"field": "properties.datetime", "direction": "desc"}], + } + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == first_item["id"] + assert resp_json["features"][1]["id"] == second_item["id"] + + +@pytest.mark.asyncio +async def test_search_invalid_date(app_client, ctx): + params = { + "datetime": "2020-XX-01/2020-10-30", + "collections": [ctx.item["collection"]], + } + + resp = await app_client.post("/search", json=params) + assert resp.status_code == 400 + + +@pytest.mark.asyncio +async def test_search_point_intersects_get(app_client, ctx): + resp = await app_client.get( + '/search?intersects={"type":"Point","coordinates":[150.04,-33.14]}' + ) + + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_polygon_intersects_get(app_client, ctx): + resp = await app_client.get( + '/search?intersects={"type":"Polygon","coordinates":[[[149.04, -34.14],[149.04, -32.14],[151.04, -32.14],[151.04, -34.14],[149.04, -34.14]]]}' + ) + + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_point_intersects_post(app_client, ctx): + point = [150.04, -33.14] + intersects = {"type": "Point", "coordinates": point} + + params = { + "intersects": intersects, + "collections": [ctx.item["collection"]], + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_point_does_not_intersect(app_client, ctx): + point = [15.04, -3.14] + intersects = {"type": "Point", "coordinates": point} + + params = { + "intersects": intersects, + "collections": [ctx.item["collection"]], + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +async def test_datetime_non_interval(app_client, ctx): + dt_formats = [ + "2020-02-12T12:30:22+00:00", + "2020-02-12T12:30:22.00Z", + "2020-02-12T12:30:22Z", + "2020-02-12T12:30:22.00+00:00", + ] + + for dt in dt_formats: + params = { + "datetime": dt, + "collections": [ctx.item["collection"]], + } + + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + # datetime is returned in this format "2020-02-12T12:30:22Z" + assert resp_json["features"][0]["properties"]["datetime"][0:19] == dt[0:19] + + +@pytest.mark.asyncio +async def test_bbox_3d(app_client, ctx): + australia_bbox = [106.343365, -47.199523, 0.1, 168.218365, -19.437288, 0.1] + params = { + "bbox": australia_bbox, + "collections": [ctx.item["collection"]], + } + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_line_string_intersects(app_client, ctx): + line = [[150.04, -33.14], [150.22, -33.89]] + intersects = {"type": "LineString", "coordinates": line} + params = { + "intersects": intersects, + "collections": [ctx.item["collection"]], + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + + resp_json = resp.json() + assert len(resp_json["features"]) == 1 diff --git a/stac_fastapi/tests/clients/__init__.py b/stac_fastapi/tests/clients/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/tests/clients/test_elasticsearch.py b/stac_fastapi/tests/clients/test_elasticsearch.py new file mode 100644 index 00000000..41fcf26d --- /dev/null +++ b/stac_fastapi/tests/clients/test_elasticsearch.py @@ -0,0 +1,312 @@ +import uuid +from copy import deepcopy +from typing import Callable + +import pytest +from stac_pydantic import Item + +from stac_fastapi.extensions.third_party.bulk_transactions import Items +from stac_fastapi.types.errors import ConflictError, NotFoundError + +from ..conftest import MockRequest, create_item + + +@pytest.mark.asyncio +async def test_create_collection(app_client, ctx, core_client, txn_client): + in_coll = deepcopy(ctx.collection) + in_coll["id"] = str(uuid.uuid4()) + await txn_client.create_collection(in_coll, request=MockRequest) + got_coll = await core_client.get_collection(in_coll["id"], request=MockRequest) + assert got_coll["id"] == in_coll["id"] + await txn_client.delete_collection(in_coll["id"]) + + +@pytest.mark.asyncio +async def test_create_collection_already_exists(app_client, ctx, txn_client): + data = deepcopy(ctx.collection) + + # change id to avoid elasticsearch duplicate key error + data["_id"] = str(uuid.uuid4()) + + with pytest.raises(ConflictError): + await txn_client.create_collection(data, request=MockRequest) + + await txn_client.delete_collection(data["id"]) + + +@pytest.mark.asyncio +async def test_update_collection( + core_client, + txn_client, + load_test_data: Callable, +): + collection_data = load_test_data("test_collection.json") + item_data = load_test_data("test_item.json") + + await txn_client.create_collection(collection_data, request=MockRequest) + await txn_client.create_item( + collection_id=collection_data["id"], + item=item_data, + request=MockRequest, + refresh=True, + ) + + collection_data["keywords"].append("new keyword") + await txn_client.update_collection(collection_data, request=MockRequest) + + coll = await core_client.get_collection(collection_data["id"], request=MockRequest) + assert "new keyword" in coll["keywords"] + + item = await core_client.get_item( + item_id=item_data["id"], + collection_id=collection_data["id"], + request=MockRequest, + ) + assert item["id"] == item_data["id"] + assert item["collection"] == item_data["collection"] + + await txn_client.delete_collection(collection_data["id"]) + + +@pytest.mark.asyncio +async def test_update_collection_id( + core_client, + txn_client, + load_test_data: Callable, +): + collection_data = load_test_data("test_collection.json") + item_data = load_test_data("test_item.json") + new_collection_id = "new-test-collection" + + await txn_client.create_collection(collection_data, request=MockRequest) + await txn_client.create_item( + collection_id=collection_data["id"], + item=item_data, + request=MockRequest, + refresh=True, + ) + + old_collection_id = collection_data["id"] + collection_data["id"] = new_collection_id + + await txn_client.update_collection( + collection=collection_data, + request=MockRequest( + query_params={ + "collection_id": old_collection_id, + "limit": "10", + } + ), + refresh=True, + ) + + with pytest.raises(NotFoundError): + await core_client.get_collection(old_collection_id, request=MockRequest) + + coll = await core_client.get_collection(collection_data["id"], request=MockRequest) + assert coll["id"] == new_collection_id + + with pytest.raises(NotFoundError): + await core_client.get_item( + item_id=item_data["id"], + collection_id=old_collection_id, + request=MockRequest, + ) + + item = await core_client.get_item( + item_id=item_data["id"], + collection_id=collection_data["id"], + request=MockRequest, + refresh=True, + ) + + assert item["id"] == item_data["id"] + assert item["collection"] == new_collection_id + + await txn_client.delete_collection(collection_data["id"]) + + +@pytest.mark.asyncio +async def test_delete_collection( + core_client, + txn_client, + load_test_data: Callable, +): + data = load_test_data("test_collection.json") + await txn_client.create_collection(data, request=MockRequest) + + await txn_client.delete_collection(data["id"]) + + with pytest.raises(NotFoundError): + await core_client.get_collection(data["id"], request=MockRequest) + + +@pytest.mark.asyncio +async def test_get_collection( + core_client, + txn_client, + load_test_data: Callable, +): + data = load_test_data("test_collection.json") + await txn_client.create_collection(data, request=MockRequest) + coll = await core_client.get_collection(data["id"], request=MockRequest) + assert coll["id"] == data["id"] + + await txn_client.delete_collection(data["id"]) + + +@pytest.mark.asyncio +async def test_get_item(app_client, ctx, core_client): + got_item = await core_client.get_item( + item_id=ctx.item["id"], + collection_id=ctx.item["collection"], + request=MockRequest, + ) + assert got_item["id"] == ctx.item["id"] + assert got_item["collection"] == ctx.item["collection"] + + +@pytest.mark.asyncio +async def test_get_collection_items(app_client, ctx, core_client, txn_client): + coll = ctx.collection + num_of_items_to_create = 5 + for _ in range(num_of_items_to_create): + item = deepcopy(ctx.item) + item["id"] = str(uuid.uuid4()) + await txn_client.create_item( + collection_id=item["collection"], + item=item, + request=MockRequest, + refresh=True, + ) + + fc = await core_client.item_collection(coll["id"], request=MockRequest()) + assert len(fc["features"]) == num_of_items_to_create + 1 # ctx.item + + for item in fc["features"]: + assert item["collection"] == coll["id"] + + +@pytest.mark.asyncio +async def test_create_item(ctx, core_client, txn_client): + resp = await core_client.get_item( + ctx.item["id"], ctx.item["collection"], request=MockRequest + ) + assert Item(**ctx.item).dict( + exclude={"links": ..., "properties": {"created", "updated"}} + ) == Item(**resp).dict(exclude={"links": ..., "properties": {"created", "updated"}}) + + +@pytest.mark.asyncio +async def test_create_item_already_exists(ctx, txn_client): + with pytest.raises(ConflictError): + await txn_client.create_item( + collection_id=ctx.item["collection"], + item=ctx.item, + request=MockRequest, + refresh=True, + ) + + +@pytest.mark.asyncio +async def test_update_item(ctx, core_client, txn_client): + ctx.item["properties"]["foo"] = "bar" + collection_id = ctx.item["collection"] + item_id = ctx.item["id"] + await txn_client.update_item( + collection_id=collection_id, item_id=item_id, item=ctx.item, request=MockRequest + ) + + updated_item = await core_client.get_item( + item_id, collection_id, request=MockRequest + ) + assert updated_item["properties"]["foo"] == "bar" + + +@pytest.mark.asyncio +async def test_update_geometry(ctx, core_client, txn_client): + new_coordinates = [ + [ + [142.15052873427666, -33.82243006904891], + [140.1000346138806, -34.257132625788756], + [139.5776607193635, -32.514709769700254], + [141.6262528041627, -32.08081674221862], + [142.15052873427666, -33.82243006904891], + ] + ] + + ctx.item["geometry"]["coordinates"] = new_coordinates + collection_id = ctx.item["collection"] + item_id = ctx.item["id"] + await txn_client.update_item( + collection_id=collection_id, item_id=item_id, item=ctx.item, request=MockRequest + ) + + updated_item = await core_client.get_item( + item_id, collection_id, request=MockRequest + ) + assert updated_item["geometry"]["coordinates"] == new_coordinates + + +@pytest.mark.asyncio +async def test_delete_item(ctx, core_client, txn_client): + await txn_client.delete_item(ctx.item["id"], ctx.item["collection"]) + + with pytest.raises(NotFoundError): + await core_client.get_item( + ctx.item["id"], ctx.item["collection"], request=MockRequest + ) + + +@pytest.mark.asyncio +async def test_bulk_item_insert(ctx, core_client, txn_client, bulk_txn_client): + items = {} + for _ in range(10): + _item = deepcopy(ctx.item) + _item["id"] = str(uuid.uuid4()) + items[_item["id"]] = _item + + # fc = es_core.item_collection(coll["id"], request=MockStarletteRequest) + # assert len(fc["features"]) == 0 + + bulk_txn_client.bulk_item_insert(Items(items=items), refresh=True) + + fc = await core_client.item_collection(ctx.collection["id"], request=MockRequest()) + assert len(fc["features"]) >= 10 + + # for item in items: + # es_transactions.delete_item( + # item["id"], item["collection"], request=MockStarletteRequest + # ) + + +@pytest.mark.asyncio +async def test_feature_collection_insert( + core_client, + txn_client, + ctx, +): + features = [] + for _ in range(10): + _item = deepcopy(ctx.item) + _item["id"] = str(uuid.uuid4()) + features.append(_item) + + feature_collection = {"type": "FeatureCollection", "features": features} + + await create_item(txn_client, feature_collection) + + fc = await core_client.item_collection(ctx.collection["id"], request=MockRequest()) + assert len(fc["features"]) >= 10 + + +@pytest.mark.asyncio +async def test_landing_page_no_collection_title(ctx, core_client, txn_client, app): + ctx.collection["id"] = "new_id" + del ctx.collection["title"] + await txn_client.create_collection(ctx.collection, request=MockRequest) + + landing_page = await core_client.landing_page(request=MockRequest(app=app)) + for link in landing_page["links"]: + if link["href"].split("/")[-1] == ctx.collection["id"]: + assert link["title"] diff --git a/stac_fastapi/tests/conftest.py b/stac_fastapi/tests/conftest.py new file mode 100644 index 00000000..01160ee1 --- /dev/null +++ b/stac_fastapi/tests/conftest.py @@ -0,0 +1,221 @@ +import asyncio +import copy +import json +import os +from typing import Any, Callable, Dict, Optional + +import pytest +import pytest_asyncio +from httpx import AsyncClient + +from stac_fastapi.api.app import StacApi +from stac_fastapi.api.models import create_get_request_model, create_post_request_model +from stac_fastapi.core.core import ( + BulkTransactionsClient, + CoreClient, + TransactionsClient, +) +from stac_fastapi.core.extensions import QueryExtension + +if os.getenv("BACKEND", "elasticsearch").lower() == "opensearch": + from stac_fastapi.opensearch.config import AsyncOpensearchSettings as AsyncSettings + from stac_fastapi.opensearch.config import OpensearchSettings as SearchSettings + from stac_fastapi.opensearch.database_logic import ( + DatabaseLogic, + create_collection_index, + ) +else: + from stac_fastapi.elasticsearch.config import ( + ElasticsearchSettings as SearchSettings, + AsyncElasticsearchSettings as AsyncSettings, + ) + from stac_fastapi.elasticsearch.database_logic import ( + DatabaseLogic, + create_collection_index, + ) + +from stac_fastapi.extensions.core import ( # FieldsExtension, + ContextExtension, + FieldsExtension, + FilterExtension, + SortExtension, + TokenPaginationExtension, + TransactionExtension, +) +from stac_fastapi.types.config import Settings + +DATA_DIR = os.path.join(os.path.dirname(__file__), "data") + + +class Context: + def __init__(self, item, collection): + self.item = item + self.collection = collection + + +class MockRequest: + base_url = "http://test-server" + query_params = {} + + def __init__( + self, + method: str = "GET", + url: str = "XXXX", + app: Optional[Any] = None, + query_params: Dict[str, Any] = {"limit": "10"}, + ): + self.method = method + self.url = url + self.app = app + self.query_params = query_params + + +class TestSettings(AsyncSettings): + class Config: + env_file = ".env.test" + + +settings = TestSettings() +Settings.set(settings) + + +@pytest.fixture(scope="session") +def event_loop(): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + yield loop + loop.close() + + +def _load_file(filename: str) -> Dict: + with open(os.path.join(DATA_DIR, filename)) as file: + return json.load(file) + + +_test_item_prototype = _load_file("test_item.json") +_test_collection_prototype = _load_file("test_collection.json") + + +@pytest.fixture +def load_test_data() -> Callable[[str], Dict]: + return _load_file + + +@pytest.fixture +def test_item() -> Dict: + return copy.deepcopy(_test_item_prototype) + + +@pytest.fixture +def test_collection() -> Dict: + return copy.deepcopy(_test_collection_prototype) + + +async def create_collection(txn_client: TransactionsClient, collection: Dict) -> None: + await txn_client.create_collection( + dict(collection), request=MockRequest, refresh=True + ) + + +async def create_item(txn_client: TransactionsClient, item: Dict) -> None: + if "collection" in item: + await txn_client.create_item( + collection_id=item["collection"], + item=item, + request=MockRequest, + refresh=True, + ) + else: + await txn_client.create_item( + collection_id=item["features"][0]["collection"], + item=item, + request=MockRequest, + refresh=True, + ) + + +async def delete_collections_and_items(txn_client: TransactionsClient) -> None: + await refresh_indices(txn_client) + await txn_client.database.delete_items() + await txn_client.database.delete_collections() + + +async def refresh_indices(txn_client: TransactionsClient) -> None: + try: + await txn_client.database.client.indices.refresh(index="_all") + except Exception: + pass + + +@pytest_asyncio.fixture() +async def ctx(txn_client: TransactionsClient, test_collection, test_item): + # todo remove one of these when all methods use it + await delete_collections_and_items(txn_client) + + await create_collection(txn_client, test_collection) + await create_item(txn_client, test_item) + + yield Context(item=test_item, collection=test_collection) + + await delete_collections_and_items(txn_client) + + +database = DatabaseLogic() +settings = SearchSettings() + + +@pytest.fixture +def core_client(): + return CoreClient(database=database, session=None) + + +@pytest.fixture +def txn_client(): + return TransactionsClient(database=database, session=None, settings=settings) + + +@pytest.fixture +def bulk_txn_client(): + return BulkTransactionsClient(database=database, session=None, settings=settings) + + +@pytest_asyncio.fixture(scope="session") +async def app(): + settings = AsyncSettings() + extensions = [ + TransactionExtension( + client=TransactionsClient( + database=database, session=None, settings=settings + ), + settings=settings, + ), + ContextExtension(), + SortExtension(), + FieldsExtension(), + QueryExtension(), + TokenPaginationExtension(), + FilterExtension(), + ] + + post_request_model = create_post_request_model(extensions) + + return StacApi( + settings=settings, + client=CoreClient( + database=database, + session=None, + extensions=extensions, + post_request_model=post_request_model, + ), + extensions=extensions, + search_get_request_model=create_get_request_model(extensions), + search_post_request_model=post_request_model, + ).app + + +@pytest_asyncio.fixture(scope="session") +async def app_client(app): + await create_collection_index() + + async with AsyncClient(app=app, base_url="http://test-server") as c: + yield c diff --git a/stac_fastapi/tests/data/test_collection.json b/stac_fastapi/tests/data/test_collection.json new file mode 100644 index 00000000..391b906c --- /dev/null +++ b/stac_fastapi/tests/data/test_collection.json @@ -0,0 +1,99 @@ +{ + "id": "test-collection", + "stac_extensions": ["https://stac-extensions.github.io/eo/v1.0.0/schema.json"], + "type": "Collection", + "description": "Landat 8 imagery radiometrically calibrated and orthorectified using gound points and Digital Elevation Model (DEM) data to correct relief displacement.", + "stac_version": "1.0.0", + "license": "PDDL-1.0", + "summaries": { + "platform": ["landsat-8"], + "instruments": ["oli", "tirs"], + "gsd": [30] + }, + "extent": { + "spatial": { + "bbox": [ + [ + -180.0, + -90.0, + 180.0, + 90.0 + ] + ] + }, + "temporal": { + "interval": [ + [ + "2013-06-01", + null + ] + ] + } + }, + "links": [ + { + "href": "http://localhost:8081/collections/landsat-8-l1", + "rel": "self", + "type": "application/json" + }, + { + "href": "http://localhost:8081/", + "rel": "parent", + "type": "application/json" + }, + { + "href": "http://localhost:8081/collections/landsat-8-l1/items", + "rel": "item", + "type": "application/geo+json" + }, + { + "href": "http://localhost:8081/", + "rel": "root", + "type": "application/json" + } + ], + "title": "Landsat 8 L1", + "keywords": [ + "landsat", + "earth observation", + "usgs" + ], + "providers": [ + { + "name": "USGS", + "roles": [ + "producer" + ], + "url": "https://landsat.usgs.gov/" + }, + { + "name": "Planet Labs", + "roles": [ + "processor" + ], + "url": "https://github.com/landsat-pds/landsat_ingestor" + }, + { + "name": "AWS", + "roles": [ + "host" + ], + "url": "https://landsatonaws.com/" + }, + { + "name": "Development Seed", + "roles": [ + "processor" + ], + "url": "https://github.com/sat-utils/sat-api" + }, + { + "name": "Earth Search by Element84", + "description": "API of Earth on AWS datasets", + "roles": [ + "host" + ], + "url": "https://element84.com" + } + ] +} \ No newline at end of file diff --git a/stac_fastapi/tests/data/test_item.json b/stac_fastapi/tests/data/test_item.json new file mode 100644 index 00000000..2b7fdd86 --- /dev/null +++ b/stac_fastapi/tests/data/test_item.json @@ -0,0 +1,505 @@ +{ + "type": "Feature", + "id": "test-item", + "stac_version": "1.0.0", + "stac_extensions": [ + "https://stac-extensions.github.io/eo/v1.0.0/schema.json", + "https://stac-extensions.github.io/projection/v1.0.0/schema.json" + ], + "geometry": { + "coordinates": [ + [ + [ + 152.15052873427666, + -33.82243006904891 + ], + [ + 150.1000346138806, + -34.257132625788756 + ], + [ + 149.5776607193635, + -32.514709769700254 + ], + [ + 151.6262528041627, + -32.08081674221862 + ], + [ + 152.15052873427666, + -33.82243006904891 + ] + ] + ], + "type": "Polygon" + }, + "properties": { + "datetime": "2020-02-12T12:30:22Z", + "landsat:scene_id": "LC82081612020043LGN00", + "landsat:row": "161", + "gsd": 15, + "eo:bands": [ + { + "gsd": 30, + "name": "B1", + "common_name": "coastal", + "center_wavelength": 0.44, + "full_width_half_max": 0.02 + }, + { + "gsd": 30, + "name": "B2", + "common_name": "blue", + "center_wavelength": 0.48, + "full_width_half_max": 0.06 + }, + { + "gsd": 30, + "name": "B3", + "common_name": "green", + "center_wavelength": 0.56, + "full_width_half_max": 0.06 + }, + { + "gsd": 30, + "name": "B4", + "common_name": "red", + "center_wavelength": 0.65, + "full_width_half_max": 0.04 + }, + { + "gsd": 30, + "name": "B5", + "common_name": "nir", + "center_wavelength": 0.86, + "full_width_half_max": 0.03 + }, + { + "gsd": 30, + "name": "B6", + "common_name": "swir16", + "center_wavelength": 1.6, + "full_width_half_max": 0.08 + }, + { + "gsd": 30, + "name": "B7", + "common_name": "swir22", + "center_wavelength": 2.2, + "full_width_half_max": 0.2 + }, + { + "gsd": 15, + "name": "B8", + "common_name": "pan", + "center_wavelength": 0.59, + "full_width_half_max": 0.18 + }, + { + "gsd": 30, + "name": "B9", + "common_name": "cirrus", + "center_wavelength": 1.37, + "full_width_half_max": 0.02 + }, + { + "gsd": 100, + "name": "B10", + "common_name": "lwir11", + "center_wavelength": 10.9, + "full_width_half_max": 0.8 + }, + { + "gsd": 100, + "name": "B11", + "common_name": "lwir12", + "center_wavelength": 12, + "full_width_half_max": 1 + } + ], + "landsat:revision": "00", + "view:sun_azimuth": -148.83296771, + "instrument": "OLI_TIRS", + "landsat:product_id": "LC08_L1GT_208161_20200212_20200212_01_RT", + "eo:cloud_cover": 0, + "landsat:tier": "RT", + "landsat:processing_level": "L1GT", + "landsat:column": "208", + "platform": "landsat-8", + "proj:epsg": 32756, + "view:sun_elevation": -37.30791534, + "view:off_nadir": 0, + "height": 2500, + "width": 2500 + }, + "bbox": [ + 149.57574, + -34.25796, + 152.15194, + -32.07915 + ], + "collection": "test-collection", + "assets": { + "ANG": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ANG.txt", + "type": "text/plain", + "title": "Angle Coefficients File", + "description": "Collection 2 Level-1 Angle Coefficients File (ANG)" + }, + "SR_B1": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B1.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Coastal/Aerosol Band (B1)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B1", + "common_name": "coastal", + "center_wavelength": 0.44, + "full_width_half_max": 0.02 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Coastal/Aerosol Band (B1) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B2": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B2.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Blue Band (B2)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B2", + "common_name": "blue", + "center_wavelength": 0.48, + "full_width_half_max": 0.06 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Blue Band (B2) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B3": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B3.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Green Band (B3)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B3", + "common_name": "green", + "center_wavelength": 0.56, + "full_width_half_max": 0.06 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Green Band (B3) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B4": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B4.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Red Band (B4)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B4", + "common_name": "red", + "center_wavelength": 0.65, + "full_width_half_max": 0.04 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Red Band (B4) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B5": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B5.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Near Infrared Band 0.8 (B5)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B5", + "common_name": "nir08", + "center_wavelength": 0.86, + "full_width_half_max": 0.03 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Near Infrared Band 0.8 (B5) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B6": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B6.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Short-wave Infrared Band 1.6 (B6)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B6", + "common_name": "swir16", + "center_wavelength": 1.6, + "full_width_half_max": 0.08 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Short-wave Infrared Band 1.6 (B6) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "SR_B7": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B7.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Short-wave Infrared Band 2.2 (B7)", + "eo:bands": [ + { + "gsd": 30, + "name": "SR_B7", + "common_name": "swir22", + "center_wavelength": 2.2, + "full_width_half_max": 0.2 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Collection 2 Level-2 Short-wave Infrared Band 2.2 (B7) Surface Reflectance", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "ST_QA": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_QA.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Surface Temperature Quality Assessment Band", + "proj:shape": [ + 7731, + 7591 + ], + "description": "Landsat Collection 2 Level-2 Surface Temperature Band Surface Temperature Product", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "ST_B10": { + "gsd": 100, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_B10.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Surface Temperature Band (B10)", + "eo:bands": [ + { + "gsd": 100, + "name": "ST_B10", + "common_name": "lwir11", + "center_wavelength": 10.9, + "full_width_half_max": 0.8 + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Landsat Collection 2 Level-2 Surface Temperature Band (B10) Surface Temperature Product", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "MTL.txt": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.txt", + "type": "text/plain", + "title": "Product Metadata File", + "description": "Collection 2 Level-1 Product Metadata File (MTL)" + }, + "MTL.xml": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.xml", + "type": "application/xml", + "title": "Product Metadata File (xml)", + "description": "Collection 2 Level-1 Product Metadata File (xml)" + }, + "ST_DRAD": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_DRAD.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Downwelled Radiance Band", + "eo:bands": [ + { + "gsd": 30, + "name": "ST_DRAD", + "description": "downwelled radiance" + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Landsat Collection 2 Level-2 Downwelled Radiance Band Surface Temperature Product", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "ST_EMIS": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMIS.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Emissivity Band", + "eo:bands": [ + { + "gsd": 30, + "name": "ST_EMIS", + "description": "emissivity" + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Landsat Collection 2 Level-2 Emissivity Band Surface Temperature Product", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + }, + "ST_EMSD": { + "gsd": 30, + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMSD.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "title": "Emissivity Standard Deviation Band", + "eo:bands": [ + { + "gsd": 30, + "name": "ST_EMSD", + "description": "emissivity standard deviation" + } + ], + "proj:shape": [ + 7731, + 7591 + ], + "description": "Landsat Collection 2 Level-2 Emissivity Standard Deviation Band Surface Temperature Product", + "proj:transform": [ + 30, + 0, + 304185, + 0, + -30, + -843585 + ] + } + }, + "links": [ + { + "href": "http://localhost:8081/collections/landsat-8-l1/items/LC82081612020043", + "rel": "self", + "type": "application/geo+json" + }, + { + "href": "http://localhost:8081/collections/landsat-8-l1", + "rel": "parent", + "type": "application/json" + }, + { + "href": "http://localhost:8081/collections/landsat-8-l1", + "rel": "collection", + "type": "application/json" + }, + { + "href": "http://localhost:8081/", + "rel": "root", + "type": "application/json" + } + ] +} \ No newline at end of file diff --git a/stac_fastapi/tests/extensions/cql2/example01.json b/stac_fastapi/tests/extensions/cql2/example01.json new file mode 100644 index 00000000..a70bd0d3 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example01.json @@ -0,0 +1,9 @@ +{ + "op": "=", + "args": [ + { + "property": "scene_id" + }, + "LC82030282019133LGN00" + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example04.json b/stac_fastapi/tests/extensions/cql2/example04.json new file mode 100644 index 00000000..e087504c --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example04.json @@ -0,0 +1,32 @@ +{ + "op": "and", + "args": [ + { + "op": "<", + "args": [ + { + "property": "cloud_cover" + }, + 0.1 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_row" + }, + 28 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_path" + }, + 203 + ] + } + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example05a.json b/stac_fastapi/tests/extensions/cql2/example05a.json new file mode 100644 index 00000000..b5bd7a94 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example05a.json @@ -0,0 +1,23 @@ +{ + "op": "or", + "args": [ + { + "op": "=", + "args": [ + { + "property": "cloud_cover" + }, + 0.1 + ] + }, + { + "op": "=", + "args": [ + { + "property": "cloud_cover" + }, + 0.2 + ] + } + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example06b.json b/stac_fastapi/tests/extensions/cql2/example06b.json new file mode 100644 index 00000000..fc2a7e56 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example06b.json @@ -0,0 +1,41 @@ +{ + "op": "and", + "args": [ + { + "op": ">=", + "args": [ + { + "property": "cloud_cover" + }, + 0.1 + ] + }, + { + "op": "<=", + "args": [ + { + "property": "cloud_cover" + }, + 0.2 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_row" + }, + 28 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_path" + }, + 203 + ] + } + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example08.json b/stac_fastapi/tests/extensions/cql2/example08.json new file mode 100644 index 00000000..2f06413f --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example08.json @@ -0,0 +1,79 @@ +{ + "op": "and", + "args": [ + { + "op": "=", + "args": [ + { + "property": "beamMode" + }, + "ScanSAR Narrow" + ] + }, + { + "op": "=", + "args": [ + { + "property": "swathDirection" + }, + "ascending" + ] + }, + { + "op": "=", + "args": [ + { + "property": "polarization" + }, + "HH+VV+HV+VH" + ] + }, + { + "op": "s_intersects", + "args": [ + { + "property": "footprint" + }, + { + "type": "Polygon", + "coordinates": [ + [ + [ + -77.117938, + 38.93686 + ], + [ + -77.040604, + 39.995648 + ], + [ + -76.910536, + 38.892912 + ], + [ + -77.039359, + 38.791753 + ], + [ + -77.047906, + 38.841462 + ], + [ + -77.034183, + 38.840655 + ], + [ + -77.033142, + 38.85749 + ], + [ + -77.117938, + 38.93686 + ] + ] + ] + } + ] + } + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example09.json b/stac_fastapi/tests/extensions/cql2/example09.json new file mode 100644 index 00000000..9f562fb4 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example09.json @@ -0,0 +1,9 @@ +{ + "op": ">", + "args": [ + { + "property": "floors" + }, + 5 + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example1.json b/stac_fastapi/tests/extensions/cql2/example1.json new file mode 100644 index 00000000..48483548 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example1.json @@ -0,0 +1,39 @@ + { + "op": "and", + "args": [ + { + "op": "=", + "args": [ + {"property": "id"}, + "LC08_L1TP_060247_20180905_20180912_01_T1_L1TP" + ] + }, + {"op": "=", "args": [{"property": "collection"}, "landsat8_l1tp"]}, + { + "op": ">", + "args": [ + {"property": "properties.datetime"}, + {"timestamp": "2022-04-29T00:00:00Z"} + ] + }, + {"op": "<", "args": [{"property": "properties.eo:cloud_cover"}, 10]}, + { + "op": "s_intersects", + "args": [ + {"property": "geometry"}, + { + "type": "Polygon", + "coordinates": [ + [ + [36.319836, 32.288087], + [36.320041, 32.288032], + [36.320210, 32.288402], + [36.320008, 32.288458], + [36.319836, 32.288087] + ] + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/stac_fastapi/tests/extensions/cql2/example10.json b/stac_fastapi/tests/extensions/cql2/example10.json new file mode 100644 index 00000000..870303ea --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example10.json @@ -0,0 +1,9 @@ +{ + "op": "<=", + "args": [ + { + "property": "taxes" + }, + 500 + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example14.json b/stac_fastapi/tests/extensions/cql2/example14.json new file mode 100644 index 00000000..fad45d48 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example14.json @@ -0,0 +1,9 @@ +{ + "op": "=", + "args": [ + { + "property": "swimming_pool" + }, + true + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example15.json b/stac_fastapi/tests/extensions/cql2/example15.json new file mode 100644 index 00000000..98f96797 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example15.json @@ -0,0 +1,23 @@ +{ + "op": "and", + "args": [ + { + "op": ">", + "args": [ + { + "property": "floor" + }, + 5 + ] + }, + { + "op": "=", + "args": [ + { + "property": "swimming_pool" + }, + true + ] + } + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example17.json b/stac_fastapi/tests/extensions/cql2/example17.json new file mode 100644 index 00000000..9b215273 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example17.json @@ -0,0 +1,37 @@ +{ + "op": "or", + "args": [ + { + "op": "and", + "args": [ + { + "op": ">", + "args": [ + { + "property": "floors" + }, + 5 + ] + }, + { + "op": "=", + "args": [ + { + "property": "material" + }, + "brick" + ] + } + ] + }, + { + "op": "=", + "args": [ + { + "property": "swimming_pool" + }, + true + ] + } + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example18.json b/stac_fastapi/tests/extensions/cql2/example18.json new file mode 100644 index 00000000..7087a151 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example18.json @@ -0,0 +1,28 @@ +{ + "op": "or", + "args": [ + { + "op": "not", + "args": [ + { + "op": "<", + "args": [ + { + "property": "floors" + }, + 5 + ] + } + ] + }, + { + "op": "=", + "args": [ + { + "property": "swimming_pool" + }, + true + ] + } + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example19.json b/stac_fastapi/tests/extensions/cql2/example19.json new file mode 100755 index 00000000..0e4306fb --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example19.json @@ -0,0 +1,9 @@ +{ + "op": "like", + "args": [ + { + "property": "scene_id" + }, + "LC82030282019133%" + ] +} diff --git a/stac_fastapi/tests/extensions/cql2/example20.json b/stac_fastapi/tests/extensions/cql2/example20.json new file mode 100755 index 00000000..f7412fc0 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example20.json @@ -0,0 +1,10 @@ +{ + "op": "like", + "args": [ + { + "property": "scene_id" + }, + "LC82030282019133LGN0_" + ] + } + \ No newline at end of file diff --git a/stac_fastapi/tests/extensions/cql2/example21.json b/stac_fastapi/tests/extensions/cql2/example21.json new file mode 100644 index 00000000..175b8732 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example21.json @@ -0,0 +1,33 @@ +{ + "op": "and", + "args": [ + { + "op": "between", + "args": [ + { + "property": "cloud_cover" + }, + 0.1, + 0.2 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_row" + }, + 28 + ] + }, + { + "op": "=", + "args": [ + { + "property": "landsat:wrs_path" + }, + 203 + ] + } + ] +} \ No newline at end of file diff --git a/stac_fastapi/tests/extensions/cql2/example22.json b/stac_fastapi/tests/extensions/cql2/example22.json new file mode 100644 index 00000000..880c90c3 --- /dev/null +++ b/stac_fastapi/tests/extensions/cql2/example22.json @@ -0,0 +1,13 @@ +{ + "op": "and", + "args": [ + { + "op": "in", + "args": [ + {"property": "id"}, + ["LC08_L1TP_060247_20180905_20180912_01_T1_L1TP"] + ] + }, + {"op": "=", "args": [{"property": "collection"}, "landsat8_l1tp"]} + ] +} \ No newline at end of file diff --git a/stac_fastapi/tests/extensions/test_filter.py b/stac_fastapi/tests/extensions/test_filter.py new file mode 100644 index 00000000..72cea59f --- /dev/null +++ b/stac_fastapi/tests/extensions/test_filter.py @@ -0,0 +1,402 @@ +import json +import os +from os import listdir +from os.path import isfile, join + +import pytest + +THIS_DIR = os.path.dirname(os.path.abspath(__file__)) + + +@pytest.mark.asyncio +async def test_search_filters_post(app_client, ctx): + + filters = [] + pwd = f"{THIS_DIR}/cql2" + for fn in [fn for f in listdir(pwd) if isfile(fn := join(pwd, f))]: + with open(fn) as f: + filters.append(json.loads(f.read())) + + for _filter in filters: + resp = await app_client.post("/search", json={"filter": _filter}) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_search_filter_extension_eq_get(app_client, ctx): + resp = await app_client.get( + '/search?filter-lang=cql2-json&filter={"op":"=","args":[{"property":"id"},"test-item"]}' + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_eq_post(app_client, ctx): + params = {"filter": {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_gte_get(app_client, ctx): + # there's one item that can match, so one of these queries should match it and the other shouldn't + resp = await app_client.get( + '/search?filter-lang=cql2-json&filter={"op":"<=","args":[{"property": "properties.proj:epsg"},32756]}' + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + resp = await app_client.get( + '/search?filter-lang=cql2-json&filter={"op":">","args":[{"property": "properties.proj:epsg"},32756]}' + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 0 + + +@pytest.mark.asyncio +async def test_search_filter_extension_gte_post(app_client, ctx): + # there's one item that can match, so one of these queries should match it and the other shouldn't + params = { + "filter": { + "op": "<=", + "args": [ + {"property": "properties.proj:epsg"}, + ctx.item["properties"]["proj:epsg"], + ], + } + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + params = { + "filter": { + "op": ">", + "args": [ + {"property": "properties.proj:epsg"}, + ctx.item["properties"]["proj:epsg"], + ], + } + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 0 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_get(app_client, ctx): + resp = await app_client.get( + '/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"<=","args":[{"property":"properties.proj:epsg"},32756]},{"op":"=","args":[{"property":"id"},"test-item"]}]}' + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_get_id(app_client, ctx): + collection = ctx.item["collection"] + id = ctx.item["id"] + filter = f"id='{id}' AND collection='{collection}'" + resp = await app_client.get(f"/search?&filter={filter}") + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_get_cql2text_id(app_client, ctx): + collection = ctx.item["collection"] + id = ctx.item["id"] + filter = f"id='{id}' AND collection='{collection}'" + resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_get_cql2text_cloud_cover(app_client, ctx): + collection = ctx.item["collection"] + cloud_cover = ctx.item["properties"]["eo:cloud_cover"] + filter = f"cloud_cover={cloud_cover} AND collection='{collection}'" + resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_get_cql2text_cloud_cover_no_results( + app_client, ctx +): + collection = ctx.item["collection"] + cloud_cover = ctx.item["properties"]["eo:cloud_cover"] + 1 + filter = f"cloud_cover={cloud_cover} AND collection='{collection}'" + resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 0 + + +@pytest.mark.asyncio +async def test_search_filter_ext_and_post(app_client, ctx): + params = { + "filter": { + "op": "and", + "args": [ + { + "op": "<=", + "args": [ + {"property": "properties.proj:epsg"}, + ctx.item["properties"]["proj:epsg"], + ], + }, + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + ], + } + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_floats_get(app_client, ctx): + resp = await app_client.get( + """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30891534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30691534"]}]}""" + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + resp = await app_client.get( + """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item-7"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30891534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30691534"]}]}""" + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 0 + + resp = await app_client.get( + """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30591534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30491534"]}]}""" + ) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 0 + + +@pytest.mark.asyncio +async def test_search_filter_extension_floats_post(app_client, ctx): + sun_elevation = ctx.item["properties"]["view:sun_elevation"] + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": ">", + "args": [ + {"property": "properties.view:sun_elevation"}, + sun_elevation - 0.01, + ], + }, + { + "op": "<", + "args": [ + {"property": "properties.view:sun_elevation"}, + sun_elevation + 0.01, + ], + }, + ], + } + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_wildcard_cql2(app_client, ctx): + single_char = ctx.item["id"][:-1] + "_" + multi_char = ctx.item["id"][:-3] + "%" + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "like", + "args": [ + {"property": "id"}, + single_char, + ], + }, + { + "op": "like", + "args": [ + {"property": "id"}, + multi_char, + ], + }, + ], + } + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_wildcard_es(app_client, ctx): + single_char = ctx.item["id"][:-1] + "?" + multi_char = ctx.item["id"][:-3] + "*" + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "like", + "args": [ + {"property": "id"}, + single_char, + ], + }, + { + "op": "like", + "args": [ + {"property": "id"}, + multi_char, + ], + }, + ], + } + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_escape_chars(app_client, ctx): + esc_chars = ( + ctx.item["properties"]["landsat:product_id"].replace("_", "\\_")[:-1] + "_" + ) + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "like", + "args": [ + {"property": "properties.landsat:product_id"}, + esc_chars, + ], + }, + ], + } + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_in(app_client, ctx): + product_id = ctx.item["properties"]["landsat:product_id"] + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "in", + "args": [ + {"property": "properties.landsat:product_id"}, + [product_id], + ], + }, + ], + } + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 + + +@pytest.mark.asyncio +async def test_search_filter_extension_in_no_list(app_client, ctx): + product_id = ctx.item["properties"]["landsat:product_id"] + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "in", + "args": [ + {"property": "properties.landsat:product_id"}, + product_id, + ], + }, + ], + } + } + + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 400 + assert resp.json() == { + "detail": f"Error with cql2_json filter: Arg {product_id} is not a list" + } + + +@pytest.mark.asyncio +async def test_search_filter_extension_between(app_client, ctx): + sun_elevation = ctx.item["properties"]["view:sun_elevation"] + + params = { + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, + { + "op": "between", + "args": [ + {"property": "properties.view:sun_elevation"}, + sun_elevation - 0.01, + sun_elevation + 0.01, + ], + }, + ], + } + } + resp = await app_client.post("/search", json=params) + + assert resp.status_code == 200 + assert len(resp.json()["features"]) == 1 diff --git a/stac_fastapi/tests/resources/__init__.py b/stac_fastapi/tests/resources/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/tests/resources/test_collection.py b/stac_fastapi/tests/resources/test_collection.py new file mode 100644 index 00000000..9061ac1e --- /dev/null +++ b/stac_fastapi/tests/resources/test_collection.py @@ -0,0 +1,123 @@ +import uuid + +import pystac +import pytest + +from ..conftest import create_collection, delete_collections_and_items, refresh_indices + + +@pytest.mark.asyncio +async def test_create_and_delete_collection(app_client, load_test_data): + """Test creation and deletion of a collection""" + test_collection = load_test_data("test_collection.json") + test_collection["id"] = "test" + + resp = await app_client.post("/collections", json=test_collection) + assert resp.status_code == 200 + + resp = await app_client.delete(f"/collections/{test_collection['id']}") + assert resp.status_code == 204 + + +@pytest.mark.asyncio +async def test_create_collection_conflict(app_client, ctx): + """Test creation of a collection which already exists""" + # This collection ID is created in the fixture, so this should be a conflict + resp = await app_client.post("/collections", json=ctx.collection) + assert resp.status_code == 409 + + +@pytest.mark.asyncio +async def test_delete_missing_collection(app_client): + """Test deletion of a collection which does not exist""" + resp = await app_client.delete("/collections/missing-collection") + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_update_collection_already_exists(ctx, app_client): + """Test updating a collection which already exists""" + ctx.collection["keywords"].append("test") + resp = await app_client.put("/collections", json=ctx.collection) + assert resp.status_code == 200 + + resp = await app_client.get(f"/collections/{ctx.collection['id']}") + assert resp.status_code == 200 + resp_json = resp.json() + assert "test" in resp_json["keywords"] + + +@pytest.mark.asyncio +async def test_update_new_collection(app_client, load_test_data): + """Test updating a collection which does not exist (same as creation)""" + test_collection = load_test_data("test_collection.json") + test_collection["id"] = "new-test-collection" + + resp = await app_client.put("/collections", json=test_collection) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_collection_not_found(app_client): + """Test read a collection which does not exist""" + resp = await app_client.get("/collections/does-not-exist") + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_returns_valid_collection(ctx, app_client): + """Test validates fetched collection with jsonschema""" + resp = await app_client.put("/collections", json=ctx.collection) + assert resp.status_code == 200 + + resp = await app_client.get(f"/collections/{ctx.collection['id']}") + assert resp.status_code == 200 + resp_json = resp.json() + + # Mock root to allow validation + mock_root = pystac.Catalog( + id="test", description="test desc", href="https://example.com" + ) + collection = pystac.Collection.from_dict( + resp_json, root=mock_root, preserve_dict=False + ) + collection.validate() + + +@pytest.mark.asyncio +async def test_pagination_collection(app_client, ctx, txn_client): + """Test collection pagination links""" + + # Clear existing collections if necessary + await delete_collections_and_items(txn_client) + + # Ingest 6 collections + ids = set() + for _ in range(6): + ctx.collection["id"] = str(uuid.uuid4()) + await create_collection(txn_client, collection=ctx.collection) + ids.add(ctx.collection["id"]) + + await refresh_indices(txn_client) + + # Paginate through all 6 collections with a limit of 1 + collection_ids = set() + page = await app_client.get("/collections", params={"limit": 1}) + while True: + page_data = page.json() + assert ( + len(page_data["collections"]) <= 1 + ) # Each page should have 1 or 0 collections + collection_ids.update(coll["id"] for coll in page_data["collections"]) + + next_link = next( + (link for link in page_data["links"] if link["rel"] == "next"), None + ) + if not next_link: + break # No more pages + + href = next_link["href"][len("http://test-server") :] + page = await app_client.get(href) + + # Confirm we have paginated through all collections + assert collection_ids == ids diff --git a/stac_fastapi/tests/resources/test_conformance.py b/stac_fastapi/tests/resources/test_conformance.py new file mode 100644 index 00000000..d93d8b81 --- /dev/null +++ b/stac_fastapi/tests/resources/test_conformance.py @@ -0,0 +1,72 @@ +import urllib.parse + +import pytest +import pytest_asyncio + + +@pytest_asyncio.fixture +async def response(app_client): + return await app_client.get("/") + + +@pytest.fixture +def response_json(response): + return response.json() + + +def get_link(landing_page, rel_type): + return next( + filter(lambda link: link["rel"] == rel_type, landing_page["links"]), None + ) + + +@pytest.mark.asyncio +async def test_landing_page_health(response): + """Test landing page""" + assert response.status_code == 200 + assert response.headers["content-type"] == "application/json" + + +# Parameters for test_landing_page_links test below. +# Each tuple has the following values (in this order): +# - Rel type of link to test +# - Expected MIME/Media Type +# - Expected relative path +link_tests = [ + ("root", "application/json", "/"), + ("conformance", "application/json", "/conformance"), + ("service-doc", "text/html", "/api.html"), + ("service-desc", "application/vnd.oai.openapi+json;version=3.0", "/api"), +] + + +@pytest.mark.asyncio +@pytest.mark.parametrize("rel_type,expected_media_type,expected_path", link_tests) +async def test_landing_page_links( + response_json, app_client, rel_type, expected_media_type, expected_path +): + link = get_link(response_json, rel_type) + + assert link is not None, f"Missing {rel_type} link in landing page" + assert link.get("type") == expected_media_type + + link_path = urllib.parse.urlsplit(link.get("href")).path + assert link_path == expected_path + + resp = await app_client.get(link_path) + assert resp.status_code == 200 + + +# This endpoint currently returns a 404 for empty result sets, but testing for this response +# code here seems meaningless since it would be the same as if the endpoint did not exist. Once +# https://github.com/stac-utils/stac-fastapi/pull/227 has been merged we can add this to the +# parameterized tests above. +@pytest.mark.asyncio +async def test_search_link(response_json): + search_link = get_link(response_json, "search") + + assert search_link is not None + assert search_link.get("type") == "application/geo+json" + + search_path = urllib.parse.urlsplit(search_link.get("href")).path + assert search_path == "/search" diff --git a/stac_fastapi/tests/resources/test_item.py b/stac_fastapi/tests/resources/test_item.py new file mode 100644 index 00000000..e62da8b8 --- /dev/null +++ b/stac_fastapi/tests/resources/test_item.py @@ -0,0 +1,804 @@ +import json +import os +import uuid +from copy import deepcopy +from datetime import datetime, timedelta +from random import randint +from urllib.parse import parse_qs, urlparse, urlsplit + +import ciso8601 +import pystac +import pytest +from geojson_pydantic.geometries import Polygon +from pystac.utils import datetime_to_str + +from stac_fastapi.core.core import CoreClient +from stac_fastapi.core.datetime_utils import now_to_rfc3339_str +from stac_fastapi.elasticsearch.database_logic import DatabaseLogic +from stac_fastapi.types.core import LandingPageMixin + +from ..conftest import create_item, refresh_indices + + +def rfc3339_str_to_datetime(s: str) -> datetime: + return ciso8601.parse_rfc3339(s) + + +database_logic = DatabaseLogic() + + +@pytest.mark.asyncio +async def test_create_and_delete_item(app_client, ctx, txn_client): + """Test creation and deletion of a single item (transactions extension)""" + + test_item = ctx.item + + resp = await app_client.get( + f"/collections/{test_item['collection']}/items/{test_item['id']}" + ) + assert resp.status_code == 200 + + resp = await app_client.delete( + f"/collections/{test_item['collection']}/items/{test_item['id']}" + ) + assert resp.status_code == 204 + + await refresh_indices(txn_client) + + resp = await app_client.get( + f"/collections/{test_item['collection']}/items/{test_item['id']}" + ) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_create_item_conflict(app_client, ctx): + """Test creation of an item which already exists (transactions extension)""" + + test_item = ctx.item + + resp = await app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 409 + + +@pytest.mark.asyncio +async def test_delete_missing_item(app_client, load_test_data): + """Test deletion of an item which does not exist (transactions extension)""" + test_item = load_test_data("test_item.json") + resp = await app_client.delete( + f"/collections/{test_item['collection']}/items/hijosh" + ) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_create_item_missing_collection(app_client, ctx): + """Test creation of an item without a parent collection (transactions extension)""" + ctx.item["collection"] = "stac_is_cool" + resp = await app_client.post( + f"/collections/{ctx.item['collection']}/items", json=ctx.item + ) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_create_uppercase_collection_with_item(app_client, ctx, txn_client): + """Test creation of a collection and item with uppercase collection ID (transactions extension)""" + collection_id = "UPPERCASE" + ctx.item["collection"] = collection_id + ctx.collection["id"] = collection_id + resp = await app_client.post("/collections", json=ctx.collection) + assert resp.status_code == 200 + await refresh_indices(txn_client) + resp = await app_client.post(f"/collections/{collection_id}/items", json=ctx.item) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_update_item_already_exists(app_client, ctx): + """Test updating an item which already exists (transactions extension)""" + + assert ctx.item["properties"]["gsd"] != 16 + ctx.item["properties"]["gsd"] = 16 + await app_client.put( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item + ) + resp = await app_client.get( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" + ) + updated_item = resp.json() + assert updated_item["properties"]["gsd"] == 16 + + await app_client.delete( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" + ) + + +@pytest.mark.asyncio +async def test_update_new_item(app_client, ctx): + """Test updating an item which does not exist (transactions extension)""" + test_item = ctx.item + test_item["id"] = "a" + + resp = await app_client.put( + f"/collections/{test_item['collection']}/items/{test_item['id']}", + json=test_item, + ) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_update_item_missing_collection(app_client, ctx): + """Test updating an item without a parent collection (transactions extension)""" + # Try to update collection of the item + ctx.item["collection"] = "stac_is_cool" + resp = await app_client.put( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item + ) + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_update_item_geometry(app_client, ctx): + ctx.item["id"] = "update_test_item_1" + + # Create the item + resp = await app_client.post( + f"/collections/{ctx.item['collection']}/items", json=ctx.item + ) + assert resp.status_code == 200 + + new_coordinates = [ + [ + [142.15052873427666, -33.82243006904891], + [140.1000346138806, -34.257132625788756], + [139.5776607193635, -32.514709769700254], + [141.6262528041627, -32.08081674221862], + [142.15052873427666, -33.82243006904891], + ] + ] + + # Update the geometry of the item + ctx.item["geometry"]["coordinates"] = new_coordinates + resp = await app_client.put( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item + ) + assert resp.status_code == 200 + + # Fetch the updated item + resp = await app_client.get( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" + ) + assert resp.status_code == 200 + assert resp.json()["geometry"]["coordinates"] == new_coordinates + + +@pytest.mark.asyncio +async def test_get_item(app_client, ctx): + """Test read an item by id (core)""" + get_item = await app_client.get( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" + ) + assert get_item.status_code == 200 + + +@pytest.mark.asyncio +async def test_returns_valid_item(app_client, ctx): + """Test validates fetched item with jsonschema""" + test_item = ctx.item + get_item = await app_client.get( + f"/collections/{test_item['collection']}/items/{test_item['id']}" + ) + assert get_item.status_code == 200 + item_dict = get_item.json() + # Mock root to allow validation + mock_root = pystac.Catalog( + id="test", description="test desc", href="https://example.com" + ) + item = pystac.Item.from_dict(item_dict, preserve_dict=False, root=mock_root) + item.validate() + + +@pytest.mark.asyncio +async def test_get_item_collection(app_client, ctx, txn_client): + """Test read an item collection (core)""" + item_count = randint(1, 4) + + for idx in range(item_count): + ctx.item["id"] = f'{ctx.item["id"]}{idx}' + await create_item(txn_client, ctx.item) + + resp = await app_client.get(f"/collections/{ctx.item['collection']}/items") + assert resp.status_code == 200 + + item_collection = resp.json() + if matched := item_collection["context"].get("matched"): + assert matched == item_count + 1 + + +@pytest.mark.asyncio +async def test_item_collection_filter_bbox(app_client, ctx): + item = ctx.item + collection = item["collection"] + + bbox = "100,-50,170,-20" + resp = await app_client.get( + f"/collections/{collection}/items", params={"bbox": bbox} + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + bbox = "1,2,3,4" + resp = await app_client.get( + f"/collections/{collection}/items", params={"bbox": bbox} + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +async def test_item_collection_filter_datetime(app_client, ctx): + item = ctx.item + collection = item["collection"] + + datetime_range = "2020-01-01T00:00:00.00Z/.." + resp = await app_client.get( + f"/collections/{collection}/items", params={"datetime": datetime_range} + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 1 + + datetime_range = "2018-01-01T00:00:00.00Z/2019-01-01T00:00:00.00Z" + resp = await app_client.get( + f"/collections/{collection}/items", params={"datetime": datetime_range} + ) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +@pytest.mark.skip(reason="Pagination extension not implemented") +async def test_pagination(app_client, load_test_data): + """Test item collection pagination (paging extension)""" + item_count = 10 + test_item = load_test_data("test_item.json") + + for idx in range(item_count): + _test_item = deepcopy(test_item) + _test_item["id"] = test_item["id"] + str(idx) + resp = await app_client.post( + f"/collections/{test_item['collection']}/items", json=_test_item + ) + assert resp.status_code == 200 + + resp = await app_client.get( + f"/collections/{test_item['collection']}/items", params={"limit": 3} + ) + assert resp.status_code == 200 + first_page = resp.json() + assert first_page["context"]["returned"] == 3 + + url_components = urlsplit(first_page["links"][0]["href"]) + resp = await app_client.get(f"{url_components.path}?{url_components.query}") + assert resp.status_code == 200 + second_page = resp.json() + assert second_page["context"]["returned"] == 3 + + +@pytest.mark.asyncio +async def test_item_timestamps(app_client, ctx): + """Test created and updated timestamps (common metadata)""" + # start_time = now_to_rfc3339_str() + + created_dt = ctx.item["properties"]["created"] + + # todo, check lower bound + # assert start_time < created_dt < now_to_rfc3339_str() + assert created_dt < now_to_rfc3339_str() + + # Confirm `updated` timestamp + ctx.item["properties"]["proj:epsg"] = 4326 + resp = await app_client.put( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", + json=dict(ctx.item), + ) + assert resp.status_code == 200 + updated_item = resp.json() + + # Created shouldn't change on update + assert ctx.item["properties"]["created"] == updated_item["properties"]["created"] + assert updated_item["properties"]["updated"] > created_dt + + await app_client.delete( + f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" + ) + + +@pytest.mark.asyncio +async def test_item_search_by_id_post(app_client, ctx, txn_client): + """Test POST search by item id (core)""" + ids = ["test1", "test2", "test3"] + for _id in ids: + ctx.item["id"] = _id + await create_item(txn_client, ctx.item) + + params = {"collections": [ctx.item["collection"]], "ids": ids} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == len(ids) + assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) + + +@pytest.mark.asyncio +async def test_item_search_spatial_query_post(app_client, ctx): + """Test POST search with spatial query (core)""" + test_item = ctx.item + + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + } + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +@pytest.mark.asyncio +async def test_item_search_temporal_query_post(app_client, ctx): + """Test POST search with single-tailed spatio-temporal query (core)""" + + test_item = ctx.item + + item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) + item_date = item_date + timedelta(seconds=1) + + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + "datetime": f"../{datetime_to_str(item_date)}", + } + resp = await app_client.post("/search", json=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +@pytest.mark.asyncio +async def test_item_search_temporal_window_post(app_client, ctx): + """Test POST search with two-tailed spatio-temporal query (core)""" + test_item = ctx.item + + item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) + item_date_before = item_date - timedelta(seconds=1) + item_date_after = item_date + timedelta(seconds=1) + + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + "datetime": f"{datetime_to_str(item_date_before)}/{datetime_to_str(item_date_after)}", + } + resp = await app_client.post("/search", json=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +@pytest.mark.asyncio +@pytest.mark.skip(reason="KeyError: 'features") +async def test_item_search_temporal_open_window(app_client, ctx): + """Test POST search with open spatio-temporal query (core)""" + test_item = ctx.item + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + "datetime": "../..", + } + resp = await app_client.post("/search", json=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +@pytest.mark.asyncio +async def test_item_search_by_id_get(app_client, ctx, txn_client): + """Test GET search by item id (core)""" + ids = ["test1", "test2", "test3"] + for _id in ids: + ctx.item["id"] = _id + await create_item(txn_client, ctx.item) + + params = {"collections": ctx.item["collection"], "ids": ",".join(ids)} + resp = await app_client.get("/search", params=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == len(ids) + assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) + + +@pytest.mark.asyncio +async def test_item_search_bbox_get(app_client, ctx): + """Test GET search with spatial query (core)""" + params = { + "collections": ctx.item["collection"], + "bbox": ",".join([str(coord) for coord in ctx.item["bbox"]]), + } + resp = await app_client.get("/search", params=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == ctx.item["id"] + + +@pytest.mark.asyncio +async def test_item_search_get_without_collections(app_client, ctx): + """Test GET search without specifying collections""" + + params = { + "bbox": ",".join([str(coord) for coord in ctx.item["bbox"]]), + } + resp = await app_client.get("/search", params=params) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_item_search_get_with_non_existent_collections(app_client, ctx): + """Test GET search with non-existent collections""" + + params = {"collections": "non-existent-collection,or-this-one"} + resp = await app_client.get("/search", params=params) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_item_search_temporal_window_get(app_client, ctx): + """Test GET search with spatio-temporal query (core)""" + test_item = ctx.item + item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) + item_date_before = item_date - timedelta(seconds=1) + item_date_after = item_date + timedelta(seconds=1) + + params = { + "collections": test_item["collection"], + "bbox": ",".join([str(coord) for coord in test_item["bbox"]]), + "datetime": f"{datetime_to_str(item_date_before)}/{datetime_to_str(item_date_after)}", + } + resp = await app_client.get("/search", params=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +@pytest.mark.asyncio +async def test_item_search_post_without_collection(app_client, ctx): + """Test POST search without specifying a collection""" + test_item = ctx.item + params = { + "bbox": test_item["bbox"], + } + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_item_search_properties_es(app_client, ctx): + """Test POST search with JSONB query (query extension)""" + + test_item = ctx.item + # EPSG is a JSONB key + params = {"query": {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}}} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +async def test_item_search_properties_field(app_client): + """Test POST search indexed field with query (query extension)""" + + # Orientation is an indexed field + params = {"query": {"orientation": {"eq": "south"}}} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +@pytest.mark.asyncio +async def test_item_search_get_query_extension(app_client, ctx): + """Test GET search with JSONB query (query extension)""" + + test_item = ctx.item + + params = { + "collections": [test_item["collection"]], + "query": json.dumps( + {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}} + ), + } + resp = await app_client.get("/search", params=params) + assert resp.json()["context"]["returned"] == 0 + + params["query"] = json.dumps( + {"proj:epsg": {"eq": test_item["properties"]["proj:epsg"]}} + ) + resp = await app_client.get("/search", params=params) + resp_json = resp.json() + assert resp_json["context"]["returned"] == 1 + assert ( + resp_json["features"][0]["properties"]["proj:epsg"] + == test_item["properties"]["proj:epsg"] + ) + + +@pytest.mark.asyncio +async def test_get_missing_item_collection(app_client): + """Test reading a collection which does not exist""" + resp = await app_client.get("/collections/invalid-collection/items") + assert resp.status_code == 404 + + +@pytest.mark.asyncio +async def test_pagination_item_collection(app_client, ctx, txn_client): + """Test item collection pagination links (paging extension)""" + ids = [ctx.item["id"]] + + # Ingest 5 items + for _ in range(5): + ctx.item["id"] = str(uuid.uuid4()) + await create_item(txn_client, item=ctx.item) + ids.append(ctx.item["id"]) + + # Paginate through all 6 items with a limit of 1 (expecting 7 requests) + page = await app_client.get( + f"/collections/{ctx.item['collection']}/items", params={"limit": 1} + ) + + item_ids = [] + idx = 0 + for idx in range(100): + page_data = page.json() + next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) + if not next_link: + assert not page_data["features"] + break + + assert len(page_data["features"]) == 1 + item_ids.append(page_data["features"][0]["id"]) + + href = next_link[0]["href"][len("http://test-server") :] + page = await app_client.get(href) + + assert idx == len(ids) + + # Confirm we have paginated through all items + assert not set(item_ids) - set(ids) + + +@pytest.mark.asyncio +async def test_pagination_post(app_client, ctx, txn_client): + """Test POST pagination (paging extension)""" + ids = [ctx.item["id"]] + + # Ingest 5 items + for _ in range(5): + ctx.item["id"] = str(uuid.uuid4()) + await create_item(txn_client, ctx.item) + ids.append(ctx.item["id"]) + + # Paginate through all 5 items with a limit of 1 (expecting 5 requests) + request_body = {"ids": ids, "limit": 1} + page = await app_client.post("/search", json=request_body) + idx = 0 + item_ids = [] + for _ in range(100): + idx += 1 + page_data = page.json() + next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) + if not next_link: + break + + item_ids.append(page_data["features"][0]["id"]) + + # Merge request bodies + request_body.update(next_link[0]["body"]) + page = await app_client.post("/search", json=request_body) + + # Our limit is 1, so we expect len(ids) number of requests before we run out of pages + assert idx == len(ids) + 1 + + # Confirm we have paginated through all items + assert not set(item_ids) - set(ids) + + +@pytest.mark.asyncio +async def test_pagination_token_idempotent(app_client, ctx, txn_client): + """Test that pagination tokens are idempotent (paging extension)""" + ids = [ctx.item["id"]] + + # Ingest 5 items + for _ in range(5): + ctx.item["id"] = str(uuid.uuid4()) + await create_item(txn_client, ctx.item) + ids.append(ctx.item["id"]) + + page = await app_client.get("/search", params={"ids": ",".join(ids), "limit": 3}) + page_data = page.json() + next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) + + # Confirm token is idempotent + resp1 = await app_client.get( + "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) + ) + resp2 = await app_client.get( + "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) + ) + resp1_data = resp1.json() + resp2_data = resp2.json() + + # Two different requests with the same pagination token should return the same items + assert [item["id"] for item in resp1_data["features"]] == [ + item["id"] for item in resp2_data["features"] + ] + + +@pytest.mark.asyncio +async def test_field_extension_get_includes(app_client, ctx): + """Test GET search with included fields (fields extension)""" + test_item = ctx.item + params = { + "ids": [test_item["id"]], + "fields": "+properties.proj:epsg,+properties.gsd", + } + resp = await app_client.get("/search", params=params) + feat_properties = resp.json()["features"][0]["properties"] + assert not set(feat_properties) - {"proj:epsg", "gsd", "datetime"} + + +@pytest.mark.asyncio +async def test_field_extension_get_excludes(app_client, ctx): + """Test GET search with included fields (fields extension)""" + test_item = ctx.item + params = { + "ids": [test_item["id"]], + "fields": "-properties.proj:epsg,-properties.gsd", + } + resp = await app_client.get("/search", params=params) + resp_json = resp.json() + assert "proj:epsg" not in resp_json["features"][0]["properties"].keys() + assert "gsd" not in resp_json["features"][0]["properties"].keys() + + +@pytest.mark.asyncio +async def test_field_extension_post(app_client, ctx): + """Test POST search with included and excluded fields (fields extension)""" + test_item = ctx.item + body = { + "ids": [test_item["id"]], + "fields": { + "exclude": ["assets.B1"], + "include": ["properties.eo:cloud_cover", "properties.orientation"], + }, + } + + resp = await app_client.post("/search", json=body) + resp_json = resp.json() + assert "B1" not in resp_json["features"][0]["assets"].keys() + assert not set(resp_json["features"][0]["properties"]) - { + "orientation", + "eo:cloud_cover", + "datetime", + } + + +@pytest.mark.asyncio +async def test_field_extension_exclude_and_include(app_client, ctx): + """Test POST search including/excluding same field (fields extension)""" + test_item = ctx.item + body = { + "ids": [test_item["id"]], + "fields": { + "exclude": ["properties.eo:cloud_cover"], + "include": ["properties.eo:cloud_cover"], + }, + } + + resp = await app_client.post("/search", json=body) + resp_json = resp.json() + assert "eo:cloud_cover" not in resp_json["features"][0]["properties"] + + +@pytest.mark.asyncio +async def test_field_extension_exclude_default_includes(app_client, ctx): + """Test POST search excluding a forbidden field (fields extension)""" + test_item = ctx.item + body = {"ids": [test_item["id"]], "fields": {"exclude": ["gsd"]}} + + resp = await app_client.post("/search", json=body) + resp_json = resp.json() + assert "gsd" not in resp_json["features"][0] + + +@pytest.mark.asyncio +async def test_search_intersects_and_bbox(app_client): + """Test POST search intersects and bbox are mutually exclusive (core)""" + bbox = [-118, 34, -117, 35] + geoj = Polygon.from_bounds(*bbox).dict(exclude_none=True) + params = {"bbox": bbox, "intersects": geoj} + resp = await app_client.post("/search", json=params) + assert resp.status_code == 400 + + +@pytest.mark.asyncio +async def test_get_missing_item(app_client, load_test_data): + """Test read item which does not exist (transactions extension)""" + test_coll = load_test_data("test_collection.json") + resp = await app_client.get(f"/collections/{test_coll['id']}/items/invalid-item") + assert resp.status_code == 404 + + +@pytest.mark.asyncio +@pytest.mark.skip(reason="invalid queries not implemented") +async def test_search_invalid_query_field(app_client): + body = {"query": {"gsd": {"lt": 100}, "invalid-field": {"eq": 50}}} + resp = await app_client.post("/search", json=body) + assert resp.status_code == 400 + + +@pytest.mark.asyncio +async def test_search_bbox_errors(app_client): + body = {"query": {"bbox": [0]}} + resp = await app_client.post("/search", json=body) + assert resp.status_code == 400 + + body = {"query": {"bbox": [100.0, 0.0, 0.0, 105.0, 1.0, 1.0]}} + resp = await app_client.post("/search", json=body) + assert resp.status_code == 400 + + params = {"bbox": "100.0,0.0,0.0,105.0"} + resp = await app_client.get("/search", params=params) + assert resp.status_code == 400 + + +@pytest.mark.asyncio +async def test_conformance_classes_configurable(): + """Test conformance class configurability""" + landing = LandingPageMixin() + landing_page = landing._landing_page( + base_url="http://test/test", + conformance_classes=["this is a test"], + extension_schemas=[], + ) + assert landing_page["conformsTo"][0] == "this is a test" + + # Update environment to avoid key error on client instantiation + os.environ["READER_CONN_STRING"] = "testing" + os.environ["WRITER_CONN_STRING"] = "testing" + client = CoreClient( + database=database_logic, base_conformance_classes=["this is a test"] + ) + assert client.conformance_classes()[0] == "this is a test" + + +@pytest.mark.asyncio +async def test_search_datetime_validation_errors(app_client): + bad_datetimes = [ + "37-01-01T12:00:27.87Z", + "1985-13-12T23:20:50.52Z", + "1985-12-32T23:20:50.52Z", + "1985-12-01T25:20:50.52Z", + "1985-12-01T00:60:50.52Z", + "1985-12-01T00:06:61.52Z", + "1990-12-31T23:59:61Z", + "1986-04-12T23:20:50.52Z/1985-04-12T23:20:50.52Z", + ] + for dt in bad_datetimes: + body = {"query": {"datetime": dt}} + resp = await app_client.post("/search", json=body) + assert resp.status_code == 400 + + resp = await app_client.get("/search?datetime={}".format(dt)) + assert resp.status_code == 400 diff --git a/stac_fastapi/tests/resources/test_mgmt.py b/stac_fastapi/tests/resources/test_mgmt.py new file mode 100644 index 00000000..2b7d9728 --- /dev/null +++ b/stac_fastapi/tests/resources/test_mgmt.py @@ -0,0 +1,13 @@ +import pytest + + +@pytest.mark.asyncio +async def test_ping_no_param(app_client): + """ + Test ping endpoint with a mocked client. + Args: + app_client (TestClient): mocked client fixture + """ + res = await app_client.get("/_mgmt/ping") + assert res.status_code == 200 + assert res.json() == {"message": "PONG"} From 9f54c9b09612f9002acc5f86544ade16a1f2aa33 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Mon, 5 Feb 2024 19:44:25 +0800 Subject: [PATCH 28/33] delete old test files --- .../stac_fastapi/elasticsearch/app.py | 2 +- stac_fastapi/elasticsearch/tests/__init__.py | 0 .../elasticsearch/tests/api/__init__.py | 0 .../elasticsearch/tests/api/test_api.py | 447 ---------- .../elasticsearch/tests/clients/__init__.py | 0 .../tests/clients/test_elasticsearch.py | 312 ------- stac_fastapi/elasticsearch/tests/conftest.py | 211 ----- .../tests/data/test_collection.json | 99 --- .../elasticsearch/tests/data/test_item.json | 505 ----------- .../tests/extensions/cql2/example01.json | 9 - .../tests/extensions/cql2/example04.json | 32 - .../tests/extensions/cql2/example05a.json | 23 - .../tests/extensions/cql2/example06b.json | 41 - .../tests/extensions/cql2/example08.json | 79 -- .../tests/extensions/cql2/example09.json | 9 - .../tests/extensions/cql2/example1.json | 39 - .../tests/extensions/cql2/example10.json | 9 - .../tests/extensions/cql2/example14.json | 9 - .../tests/extensions/cql2/example15.json | 23 - .../tests/extensions/cql2/example17.json | 37 - .../tests/extensions/cql2/example18.json | 28 - .../tests/extensions/cql2/example19.json | 9 - .../tests/extensions/cql2/example20.json | 10 - .../tests/extensions/cql2/example21.json | 33 - .../tests/extensions/cql2/example22.json | 13 - .../tests/extensions/test_filter.py | 402 --------- .../elasticsearch/tests/resources/__init__.py | 0 .../tests/resources/test_collection.py | 123 --- .../tests/resources/test_conformance.py | 72 -- .../tests/resources/test_item.py | 804 ------------------ .../tests/resources/test_mgmt.py | 13 - stac_fastapi/opensearch/tests/__init__.py | 0 stac_fastapi/opensearch/tests/api/__init__.py | 0 stac_fastapi/opensearch/tests/api/test_api.py | 447 ---------- .../opensearch/tests/clients/__init__.py | 0 .../tests/clients/test_elasticsearch.py | 312 ------- stac_fastapi/opensearch/tests/conftest.py | 208 ----- .../tests/data/test_collection.json | 99 --- .../opensearch/tests/data/test_item.json | 505 ----------- .../tests/extensions/cql2/example01.json | 9 - .../tests/extensions/cql2/example04.json | 32 - .../tests/extensions/cql2/example05a.json | 23 - .../tests/extensions/cql2/example06b.json | 41 - .../tests/extensions/cql2/example08.json | 79 -- .../tests/extensions/cql2/example09.json | 9 - .../tests/extensions/cql2/example1.json | 39 - .../tests/extensions/cql2/example10.json | 9 - .../tests/extensions/cql2/example14.json | 9 - .../tests/extensions/cql2/example15.json | 23 - .../tests/extensions/cql2/example17.json | 37 - .../tests/extensions/cql2/example18.json | 28 - .../tests/extensions/cql2/example19.json | 9 - .../tests/extensions/cql2/example20.json | 10 - .../tests/extensions/cql2/example21.json | 33 - .../tests/extensions/cql2/example22.json | 13 - .../tests/extensions/test_filter.py | 402 --------- .../opensearch/tests/resources/__init__.py | 0 .../tests/resources/test_collection.py | 123 --- .../tests/resources/test_conformance.py | 72 -- .../opensearch/tests/resources/test_item.py | 804 ------------------ .../opensearch/tests/resources/test_mgmt.py | 13 - 61 files changed, 1 insertion(+), 6780 deletions(-) delete mode 100644 stac_fastapi/elasticsearch/tests/__init__.py delete mode 100644 stac_fastapi/elasticsearch/tests/api/__init__.py delete mode 100644 stac_fastapi/elasticsearch/tests/api/test_api.py delete mode 100644 stac_fastapi/elasticsearch/tests/clients/__init__.py delete mode 100644 stac_fastapi/elasticsearch/tests/clients/test_elasticsearch.py delete mode 100644 stac_fastapi/elasticsearch/tests/conftest.py delete mode 100644 stac_fastapi/elasticsearch/tests/data/test_collection.json delete mode 100644 stac_fastapi/elasticsearch/tests/data/test_item.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example01.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example04.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example05a.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example06b.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example08.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example09.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example1.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example10.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example14.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example15.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example17.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example18.json delete mode 100755 stac_fastapi/elasticsearch/tests/extensions/cql2/example19.json delete mode 100755 stac_fastapi/elasticsearch/tests/extensions/cql2/example20.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example21.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/cql2/example22.json delete mode 100644 stac_fastapi/elasticsearch/tests/extensions/test_filter.py delete mode 100644 stac_fastapi/elasticsearch/tests/resources/__init__.py delete mode 100644 stac_fastapi/elasticsearch/tests/resources/test_collection.py delete mode 100644 stac_fastapi/elasticsearch/tests/resources/test_conformance.py delete mode 100644 stac_fastapi/elasticsearch/tests/resources/test_item.py delete mode 100644 stac_fastapi/elasticsearch/tests/resources/test_mgmt.py delete mode 100644 stac_fastapi/opensearch/tests/__init__.py delete mode 100644 stac_fastapi/opensearch/tests/api/__init__.py delete mode 100644 stac_fastapi/opensearch/tests/api/test_api.py delete mode 100644 stac_fastapi/opensearch/tests/clients/__init__.py delete mode 100644 stac_fastapi/opensearch/tests/clients/test_elasticsearch.py delete mode 100644 stac_fastapi/opensearch/tests/conftest.py delete mode 100644 stac_fastapi/opensearch/tests/data/test_collection.json delete mode 100644 stac_fastapi/opensearch/tests/data/test_item.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example01.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example04.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example05a.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example06b.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example08.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example09.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example1.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example10.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example14.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example15.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example17.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example18.json delete mode 100755 stac_fastapi/opensearch/tests/extensions/cql2/example19.json delete mode 100755 stac_fastapi/opensearch/tests/extensions/cql2/example20.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example21.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/cql2/example22.json delete mode 100644 stac_fastapi/opensearch/tests/extensions/test_filter.py delete mode 100644 stac_fastapi/opensearch/tests/resources/__init__.py delete mode 100644 stac_fastapi/opensearch/tests/resources/test_collection.py delete mode 100644 stac_fastapi/opensearch/tests/resources/test_conformance.py delete mode 100644 stac_fastapi/opensearch/tests/resources/test_item.py delete mode 100644 stac_fastapi/opensearch/tests/resources/test_mgmt.py diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py index 75d60684..0d896534 100644 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py +++ b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py @@ -9,12 +9,12 @@ TransactionsClient, ) from stac_fastapi.core.extensions import QueryExtension +from stac_fastapi.core.session import Session from stac_fastapi.elasticsearch.config import ElasticsearchSettings from stac_fastapi.elasticsearch.database_logic import ( DatabaseLogic, create_collection_index, ) -from stac_fastapi.elasticsearch.session import Session from stac_fastapi.extensions.core import ( ContextExtension, FieldsExtension, diff --git a/stac_fastapi/elasticsearch/tests/__init__.py b/stac_fastapi/elasticsearch/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/elasticsearch/tests/api/__init__.py b/stac_fastapi/elasticsearch/tests/api/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/elasticsearch/tests/api/test_api.py b/stac_fastapi/elasticsearch/tests/api/test_api.py deleted file mode 100644 index 74f0bb55..00000000 --- a/stac_fastapi/elasticsearch/tests/api/test_api.py +++ /dev/null @@ -1,447 +0,0 @@ -import copy -import uuid -from datetime import datetime, timedelta - -import pytest - -from ..conftest import create_collection, create_item - -ROUTES = { - "GET /_mgmt/ping", - "GET /docs/oauth2-redirect", - "HEAD /docs/oauth2-redirect", - "GET /", - "GET /conformance", - "GET /api", - "GET /api.html", - "HEAD /api", - "HEAD /api.html", - "GET /queryables", - "GET /collections", - "GET /collections/{collection_id}", - "GET /collections/{collection_id}/queryables", - "GET /collections/{collection_id}/items", - "GET /collections/{collection_id}/items/{item_id}", - "GET /search", - "POST /search", - "DELETE /collections/{collection_id}", - "DELETE /collections/{collection_id}/items/{item_id}", - "POST /collections", - "POST /collections/{collection_id}/items", - "PUT /collections", - "PUT /collections/{collection_id}/items/{item_id}", -} - - -@pytest.mark.asyncio -async def test_post_search_content_type(app_client, ctx): - params = {"limit": 1} - resp = await app_client.post("/search", json=params) - assert resp.headers["content-type"] == "application/geo+json" - - -@pytest.mark.asyncio -async def test_get_search_content_type(app_client, ctx): - resp = await app_client.get("/search") - assert resp.headers["content-type"] == "application/geo+json" - - -@pytest.mark.asyncio -async def test_api_headers(app_client): - resp = await app_client.get("/api") - assert ( - resp.headers["content-type"] == "application/vnd.oai.openapi+json;version=3.0" - ) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_router(app): - api_routes = set([f"{list(route.methods)[0]} {route.path}" for route in app.routes]) - assert len(api_routes - ROUTES) == 0 - - -@pytest.mark.asyncio -async def test_app_transaction_extension(app_client, ctx): - item = copy.deepcopy(ctx.item) - item["id"] = str(uuid.uuid4()) - resp = await app_client.post(f"/collections/{item['collection']}/items", json=item) - assert resp.status_code == 200 - - await app_client.delete(f"/collections/{item['collection']}/items/{item['id']}") - - -@pytest.mark.asyncio -async def test_app_search_response(app_client, ctx): - resp = await app_client.get("/search", params={"ids": ["test-item"]}) - assert resp.status_code == 200 - resp_json = resp.json() - - assert resp_json.get("type") == "FeatureCollection" - # stac_version and stac_extensions were removed in v1.0.0-beta.3 - assert resp_json.get("stac_version") is None - assert resp_json.get("stac_extensions") is None - - -@pytest.mark.asyncio -async def test_app_context_extension(app_client, ctx, txn_client): - test_item = ctx.item - test_item["id"] = "test-item-2" - test_item["collection"] = "test-collection-2" - test_collection = ctx.collection - test_collection["id"] = "test-collection-2" - - await create_collection(txn_client, test_collection) - await create_item(txn_client, test_item) - - resp = await app_client.get( - f"/collections/{test_collection['id']}/items/{test_item['id']}" - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["id"] == test_item["id"] - assert resp_json["collection"] == test_item["collection"] - - resp = await app_client.get(f"/collections/{test_collection['id']}") - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["id"] == test_collection["id"] - - resp = await app_client.post("/search", json={"collections": ["test-collection-2"]}) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - assert "context" in resp_json - assert resp_json["context"]["returned"] == 1 - if matched := resp_json["context"].get("matched"): - assert matched == 1 - - -@pytest.mark.asyncio -async def test_app_fields_extension(app_client, ctx, txn_client): - resp = await app_client.get("/search", params={"collections": ["test-collection"]}) - assert resp.status_code == 200 - resp_json = resp.json() - assert list(resp_json["features"][0]["properties"]) == ["datetime"] - - -@pytest.mark.asyncio -async def test_app_fields_extension_query(app_client, ctx, txn_client): - resp = await app_client.post( - "/search", - json={ - "query": {"proj:epsg": {"gte": ctx.item["properties"]["proj:epsg"]}}, - "collections": ["test-collection"], - }, - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert list(resp_json["features"][0]["properties"]) == ["datetime", "proj:epsg"] - - -@pytest.mark.asyncio -async def test_app_fields_extension_no_properties_get(app_client, ctx, txn_client): - resp = await app_client.get( - "/search", params={"collections": ["test-collection"], "fields": "-properties"} - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert "properties" not in resp_json["features"][0] - - -@pytest.mark.asyncio -async def test_app_fields_extension_no_properties_post(app_client, ctx, txn_client): - resp = await app_client.post( - "/search", - json={ - "collections": ["test-collection"], - "fields": {"exclude": ["properties"]}, - }, - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert "properties" not in resp_json["features"][0] - - -@pytest.mark.asyncio -async def test_app_fields_extension_no_null_fields(app_client, ctx, txn_client): - resp = await app_client.get("/search", params={"collections": ["test-collection"]}) - assert resp.status_code == 200 - resp_json = resp.json() - # check if no null fields: https://github.com/stac-utils/stac-fastapi-elasticsearch/issues/166 - for feature in resp_json["features"]: - # assert "bbox" not in feature["geometry"] - for link in feature["links"]: - assert all(a not in link or link[a] is not None for a in ("title", "asset")) - for asset in feature["assets"]: - assert all( - a not in asset or asset[a] is not None - for a in ("start_datetime", "created") - ) - - -@pytest.mark.asyncio -async def test_app_fields_extension_return_all_properties(app_client, ctx, txn_client): - item = ctx.item - resp = await app_client.get( - "/search", params={"collections": ["test-collection"], "fields": "properties"} - ) - assert resp.status_code == 200 - resp_json = resp.json() - feature = resp_json["features"][0] - assert len(feature["properties"]) >= len(item["properties"]) - for expected_prop, expected_value in item["properties"].items(): - if expected_prop in ("datetime", "created", "updated"): - assert feature["properties"][expected_prop][0:19] == expected_value[0:19] - else: - assert feature["properties"][expected_prop] == expected_value - - -@pytest.mark.asyncio -async def test_app_query_extension_gt(app_client, ctx): - params = {"query": {"proj:epsg": {"gt": ctx.item["properties"]["proj:epsg"]}}} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -async def test_app_query_extension_gte(app_client, ctx): - params = {"query": {"proj:epsg": {"gte": ctx.item["properties"]["proj:epsg"]}}} - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_app_query_extension_limit_lt0(app_client): - assert (await app_client.post("/search", json={"limit": -1})).status_code == 400 - - -@pytest.mark.asyncio -async def test_app_query_extension_limit_gt10000(app_client): - resp = await app_client.post("/search", json={"limit": 10001}) - assert resp.status_code == 200 - assert resp.json()["context"]["limit"] == 10000 - - -@pytest.mark.asyncio -async def test_app_query_extension_limit_10000(app_client): - params = {"limit": 10000} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_app_sort_extension_get_asc(app_client, txn_client, ctx): - first_item = ctx.item - item_date = datetime.strptime( - first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" - ) - - second_item = dict(first_item) - second_item["id"] = "another-item" - another_item_date = item_date - timedelta(days=1) - second_item["properties"]["datetime"] = another_item_date.strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - await create_item(txn_client, second_item) - - resp = await app_client.get("/search?sortby=+properties.datetime") - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][1]["id"] == first_item["id"] - assert resp_json["features"][0]["id"] == second_item["id"] - - -@pytest.mark.asyncio -async def test_app_sort_extension_get_desc(app_client, txn_client, ctx): - first_item = ctx.item - item_date = datetime.strptime( - first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" - ) - - second_item = dict(first_item) - second_item["id"] = "another-item" - another_item_date = item_date - timedelta(days=1) - second_item["properties"]["datetime"] = another_item_date.strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - await create_item(txn_client, second_item) - - resp = await app_client.get("/search?sortby=-properties.datetime") - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == first_item["id"] - assert resp_json["features"][1]["id"] == second_item["id"] - - -@pytest.mark.asyncio -async def test_app_sort_extension_post_asc(app_client, txn_client, ctx): - first_item = ctx.item - item_date = datetime.strptime( - first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" - ) - - second_item = dict(first_item) - second_item["id"] = "another-item" - another_item_date = item_date - timedelta(days=1) - second_item["properties"]["datetime"] = another_item_date.strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - await create_item(txn_client, second_item) - - params = { - "collections": [first_item["collection"]], - "sortby": [{"field": "properties.datetime", "direction": "asc"}], - } - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][1]["id"] == first_item["id"] - assert resp_json["features"][0]["id"] == second_item["id"] - - -@pytest.mark.asyncio -async def test_app_sort_extension_post_desc(app_client, txn_client, ctx): - first_item = ctx.item - item_date = datetime.strptime( - first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" - ) - - second_item = dict(first_item) - second_item["id"] = "another-item" - another_item_date = item_date - timedelta(days=1) - second_item["properties"]["datetime"] = another_item_date.strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - await create_item(txn_client, second_item) - - params = { - "collections": [first_item["collection"]], - "sortby": [{"field": "properties.datetime", "direction": "desc"}], - } - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == first_item["id"] - assert resp_json["features"][1]["id"] == second_item["id"] - - -@pytest.mark.asyncio -async def test_search_invalid_date(app_client, ctx): - params = { - "datetime": "2020-XX-01/2020-10-30", - "collections": [ctx.item["collection"]], - } - - resp = await app_client.post("/search", json=params) - assert resp.status_code == 400 - - -@pytest.mark.asyncio -async def test_search_point_intersects_get(app_client, ctx): - resp = await app_client.get( - '/search?intersects={"type":"Point","coordinates":[150.04,-33.14]}' - ) - - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_polygon_intersects_get(app_client, ctx): - resp = await app_client.get( - '/search?intersects={"type":"Polygon","coordinates":[[[149.04, -34.14],[149.04, -32.14],[151.04, -32.14],[151.04, -34.14],[149.04, -34.14]]]}' - ) - - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_point_intersects_post(app_client, ctx): - point = [150.04, -33.14] - intersects = {"type": "Point", "coordinates": point} - - params = { - "intersects": intersects, - "collections": [ctx.item["collection"]], - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_point_does_not_intersect(app_client, ctx): - point = [15.04, -3.14] - intersects = {"type": "Point", "coordinates": point} - - params = { - "intersects": intersects, - "collections": [ctx.item["collection"]], - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -async def test_datetime_non_interval(app_client, ctx): - dt_formats = [ - "2020-02-12T12:30:22+00:00", - "2020-02-12T12:30:22.00Z", - "2020-02-12T12:30:22Z", - "2020-02-12T12:30:22.00+00:00", - ] - - for dt in dt_formats: - params = { - "datetime": dt, - "collections": [ctx.item["collection"]], - } - - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - # datetime is returned in this format "2020-02-12T12:30:22Z" - assert resp_json["features"][0]["properties"]["datetime"][0:19] == dt[0:19] - - -@pytest.mark.asyncio -async def test_bbox_3d(app_client, ctx): - australia_bbox = [106.343365, -47.199523, 0.1, 168.218365, -19.437288, 0.1] - params = { - "bbox": australia_bbox, - "collections": [ctx.item["collection"]], - } - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_line_string_intersects(app_client, ctx): - line = [[150.04, -33.14], [150.22, -33.89]] - intersects = {"type": "LineString", "coordinates": line} - params = { - "intersects": intersects, - "collections": [ctx.item["collection"]], - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - - resp_json = resp.json() - assert len(resp_json["features"]) == 1 diff --git a/stac_fastapi/elasticsearch/tests/clients/__init__.py b/stac_fastapi/elasticsearch/tests/clients/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/elasticsearch/tests/clients/test_elasticsearch.py b/stac_fastapi/elasticsearch/tests/clients/test_elasticsearch.py deleted file mode 100644 index 41fcf26d..00000000 --- a/stac_fastapi/elasticsearch/tests/clients/test_elasticsearch.py +++ /dev/null @@ -1,312 +0,0 @@ -import uuid -from copy import deepcopy -from typing import Callable - -import pytest -from stac_pydantic import Item - -from stac_fastapi.extensions.third_party.bulk_transactions import Items -from stac_fastapi.types.errors import ConflictError, NotFoundError - -from ..conftest import MockRequest, create_item - - -@pytest.mark.asyncio -async def test_create_collection(app_client, ctx, core_client, txn_client): - in_coll = deepcopy(ctx.collection) - in_coll["id"] = str(uuid.uuid4()) - await txn_client.create_collection(in_coll, request=MockRequest) - got_coll = await core_client.get_collection(in_coll["id"], request=MockRequest) - assert got_coll["id"] == in_coll["id"] - await txn_client.delete_collection(in_coll["id"]) - - -@pytest.mark.asyncio -async def test_create_collection_already_exists(app_client, ctx, txn_client): - data = deepcopy(ctx.collection) - - # change id to avoid elasticsearch duplicate key error - data["_id"] = str(uuid.uuid4()) - - with pytest.raises(ConflictError): - await txn_client.create_collection(data, request=MockRequest) - - await txn_client.delete_collection(data["id"]) - - -@pytest.mark.asyncio -async def test_update_collection( - core_client, - txn_client, - load_test_data: Callable, -): - collection_data = load_test_data("test_collection.json") - item_data = load_test_data("test_item.json") - - await txn_client.create_collection(collection_data, request=MockRequest) - await txn_client.create_item( - collection_id=collection_data["id"], - item=item_data, - request=MockRequest, - refresh=True, - ) - - collection_data["keywords"].append("new keyword") - await txn_client.update_collection(collection_data, request=MockRequest) - - coll = await core_client.get_collection(collection_data["id"], request=MockRequest) - assert "new keyword" in coll["keywords"] - - item = await core_client.get_item( - item_id=item_data["id"], - collection_id=collection_data["id"], - request=MockRequest, - ) - assert item["id"] == item_data["id"] - assert item["collection"] == item_data["collection"] - - await txn_client.delete_collection(collection_data["id"]) - - -@pytest.mark.asyncio -async def test_update_collection_id( - core_client, - txn_client, - load_test_data: Callable, -): - collection_data = load_test_data("test_collection.json") - item_data = load_test_data("test_item.json") - new_collection_id = "new-test-collection" - - await txn_client.create_collection(collection_data, request=MockRequest) - await txn_client.create_item( - collection_id=collection_data["id"], - item=item_data, - request=MockRequest, - refresh=True, - ) - - old_collection_id = collection_data["id"] - collection_data["id"] = new_collection_id - - await txn_client.update_collection( - collection=collection_data, - request=MockRequest( - query_params={ - "collection_id": old_collection_id, - "limit": "10", - } - ), - refresh=True, - ) - - with pytest.raises(NotFoundError): - await core_client.get_collection(old_collection_id, request=MockRequest) - - coll = await core_client.get_collection(collection_data["id"], request=MockRequest) - assert coll["id"] == new_collection_id - - with pytest.raises(NotFoundError): - await core_client.get_item( - item_id=item_data["id"], - collection_id=old_collection_id, - request=MockRequest, - ) - - item = await core_client.get_item( - item_id=item_data["id"], - collection_id=collection_data["id"], - request=MockRequest, - refresh=True, - ) - - assert item["id"] == item_data["id"] - assert item["collection"] == new_collection_id - - await txn_client.delete_collection(collection_data["id"]) - - -@pytest.mark.asyncio -async def test_delete_collection( - core_client, - txn_client, - load_test_data: Callable, -): - data = load_test_data("test_collection.json") - await txn_client.create_collection(data, request=MockRequest) - - await txn_client.delete_collection(data["id"]) - - with pytest.raises(NotFoundError): - await core_client.get_collection(data["id"], request=MockRequest) - - -@pytest.mark.asyncio -async def test_get_collection( - core_client, - txn_client, - load_test_data: Callable, -): - data = load_test_data("test_collection.json") - await txn_client.create_collection(data, request=MockRequest) - coll = await core_client.get_collection(data["id"], request=MockRequest) - assert coll["id"] == data["id"] - - await txn_client.delete_collection(data["id"]) - - -@pytest.mark.asyncio -async def test_get_item(app_client, ctx, core_client): - got_item = await core_client.get_item( - item_id=ctx.item["id"], - collection_id=ctx.item["collection"], - request=MockRequest, - ) - assert got_item["id"] == ctx.item["id"] - assert got_item["collection"] == ctx.item["collection"] - - -@pytest.mark.asyncio -async def test_get_collection_items(app_client, ctx, core_client, txn_client): - coll = ctx.collection - num_of_items_to_create = 5 - for _ in range(num_of_items_to_create): - item = deepcopy(ctx.item) - item["id"] = str(uuid.uuid4()) - await txn_client.create_item( - collection_id=item["collection"], - item=item, - request=MockRequest, - refresh=True, - ) - - fc = await core_client.item_collection(coll["id"], request=MockRequest()) - assert len(fc["features"]) == num_of_items_to_create + 1 # ctx.item - - for item in fc["features"]: - assert item["collection"] == coll["id"] - - -@pytest.mark.asyncio -async def test_create_item(ctx, core_client, txn_client): - resp = await core_client.get_item( - ctx.item["id"], ctx.item["collection"], request=MockRequest - ) - assert Item(**ctx.item).dict( - exclude={"links": ..., "properties": {"created", "updated"}} - ) == Item(**resp).dict(exclude={"links": ..., "properties": {"created", "updated"}}) - - -@pytest.mark.asyncio -async def test_create_item_already_exists(ctx, txn_client): - with pytest.raises(ConflictError): - await txn_client.create_item( - collection_id=ctx.item["collection"], - item=ctx.item, - request=MockRequest, - refresh=True, - ) - - -@pytest.mark.asyncio -async def test_update_item(ctx, core_client, txn_client): - ctx.item["properties"]["foo"] = "bar" - collection_id = ctx.item["collection"] - item_id = ctx.item["id"] - await txn_client.update_item( - collection_id=collection_id, item_id=item_id, item=ctx.item, request=MockRequest - ) - - updated_item = await core_client.get_item( - item_id, collection_id, request=MockRequest - ) - assert updated_item["properties"]["foo"] == "bar" - - -@pytest.mark.asyncio -async def test_update_geometry(ctx, core_client, txn_client): - new_coordinates = [ - [ - [142.15052873427666, -33.82243006904891], - [140.1000346138806, -34.257132625788756], - [139.5776607193635, -32.514709769700254], - [141.6262528041627, -32.08081674221862], - [142.15052873427666, -33.82243006904891], - ] - ] - - ctx.item["geometry"]["coordinates"] = new_coordinates - collection_id = ctx.item["collection"] - item_id = ctx.item["id"] - await txn_client.update_item( - collection_id=collection_id, item_id=item_id, item=ctx.item, request=MockRequest - ) - - updated_item = await core_client.get_item( - item_id, collection_id, request=MockRequest - ) - assert updated_item["geometry"]["coordinates"] == new_coordinates - - -@pytest.mark.asyncio -async def test_delete_item(ctx, core_client, txn_client): - await txn_client.delete_item(ctx.item["id"], ctx.item["collection"]) - - with pytest.raises(NotFoundError): - await core_client.get_item( - ctx.item["id"], ctx.item["collection"], request=MockRequest - ) - - -@pytest.mark.asyncio -async def test_bulk_item_insert(ctx, core_client, txn_client, bulk_txn_client): - items = {} - for _ in range(10): - _item = deepcopy(ctx.item) - _item["id"] = str(uuid.uuid4()) - items[_item["id"]] = _item - - # fc = es_core.item_collection(coll["id"], request=MockStarletteRequest) - # assert len(fc["features"]) == 0 - - bulk_txn_client.bulk_item_insert(Items(items=items), refresh=True) - - fc = await core_client.item_collection(ctx.collection["id"], request=MockRequest()) - assert len(fc["features"]) >= 10 - - # for item in items: - # es_transactions.delete_item( - # item["id"], item["collection"], request=MockStarletteRequest - # ) - - -@pytest.mark.asyncio -async def test_feature_collection_insert( - core_client, - txn_client, - ctx, -): - features = [] - for _ in range(10): - _item = deepcopy(ctx.item) - _item["id"] = str(uuid.uuid4()) - features.append(_item) - - feature_collection = {"type": "FeatureCollection", "features": features} - - await create_item(txn_client, feature_collection) - - fc = await core_client.item_collection(ctx.collection["id"], request=MockRequest()) - assert len(fc["features"]) >= 10 - - -@pytest.mark.asyncio -async def test_landing_page_no_collection_title(ctx, core_client, txn_client, app): - ctx.collection["id"] = "new_id" - del ctx.collection["title"] - await txn_client.create_collection(ctx.collection, request=MockRequest) - - landing_page = await core_client.landing_page(request=MockRequest(app=app)) - for link in landing_page["links"]: - if link["href"].split("/")[-1] == ctx.collection["id"]: - assert link["title"] diff --git a/stac_fastapi/elasticsearch/tests/conftest.py b/stac_fastapi/elasticsearch/tests/conftest.py deleted file mode 100644 index 61bfcb0e..00000000 --- a/stac_fastapi/elasticsearch/tests/conftest.py +++ /dev/null @@ -1,211 +0,0 @@ -import asyncio -import copy -import json -import os -from typing import Any, Callable, Dict, Optional - -import pytest -import pytest_asyncio -from httpx import AsyncClient - -from stac_fastapi.api.app import StacApi -from stac_fastapi.api.models import create_get_request_model, create_post_request_model -from stac_fastapi.core.core import ( - BulkTransactionsClient, - CoreClient, - TransactionsClient, -) -from stac_fastapi.core.extensions import QueryExtension -from stac_fastapi.elasticsearch.config import ( - AsyncElasticsearchSettings, - ElasticsearchSettings, -) -from stac_fastapi.elasticsearch.database_logic import ( - DatabaseLogic, - create_collection_index, -) -from stac_fastapi.extensions.core import ( # FieldsExtension, - ContextExtension, - FieldsExtension, - FilterExtension, - SortExtension, - TokenPaginationExtension, - TransactionExtension, -) -from stac_fastapi.types.config import Settings - -DATA_DIR = os.path.join(os.path.dirname(__file__), "data") - - -class Context: - def __init__(self, item, collection): - self.item = item - self.collection = collection - - -class MockRequest: - base_url = "http://test-server" - query_params = {} - - def __init__( - self, - method: str = "GET", - url: str = "XXXX", - app: Optional[Any] = None, - query_params: Dict[str, Any] = {"limit": "10"}, - ): - self.method = method - self.url = url - self.app = app - self.query_params = query_params - - -class TestSettings(AsyncElasticsearchSettings): - class Config: - env_file = ".env.test" - - -settings = TestSettings() -Settings.set(settings) - - -@pytest.fixture(scope="session") -def event_loop(): - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - yield loop - loop.close() - - -def _load_file(filename: str) -> Dict: - with open(os.path.join(DATA_DIR, filename)) as file: - return json.load(file) - - -_test_item_prototype = _load_file("test_item.json") -_test_collection_prototype = _load_file("test_collection.json") - - -@pytest.fixture -def load_test_data() -> Callable[[str], Dict]: - return _load_file - - -@pytest.fixture -def test_item() -> Dict: - return copy.deepcopy(_test_item_prototype) - - -@pytest.fixture -def test_collection() -> Dict: - return copy.deepcopy(_test_collection_prototype) - - -async def create_collection(txn_client: TransactionsClient, collection: Dict) -> None: - await txn_client.create_collection( - dict(collection), request=MockRequest, refresh=True - ) - - -async def create_item(txn_client: TransactionsClient, item: Dict) -> None: - if "collection" in item: - await txn_client.create_item( - collection_id=item["collection"], - item=item, - request=MockRequest, - refresh=True, - ) - else: - await txn_client.create_item( - collection_id=item["features"][0]["collection"], - item=item, - request=MockRequest, - refresh=True, - ) - - -async def delete_collections_and_items(txn_client: TransactionsClient) -> None: - await refresh_indices(txn_client) - await txn_client.database.delete_items() - await txn_client.database.delete_collections() - - -async def refresh_indices(txn_client: TransactionsClient) -> None: - try: - await txn_client.database.client.indices.refresh(index="_all") - except Exception: - pass - - -@pytest_asyncio.fixture() -async def ctx(txn_client: TransactionsClient, test_collection, test_item): - # todo remove one of these when all methods use it - await delete_collections_and_items(txn_client) - - await create_collection(txn_client, test_collection) - await create_item(txn_client, test_item) - - yield Context(item=test_item, collection=test_collection) - - await delete_collections_and_items(txn_client) - - -database = DatabaseLogic() -settings = ElasticsearchSettings() - - -@pytest.fixture -def core_client(): - return CoreClient(database=database, session=None) - - -@pytest.fixture -def txn_client(): - return TransactionsClient(database=database, session=None, settings=settings) - - -@pytest.fixture -def bulk_txn_client(): - return BulkTransactionsClient(database=database, session=None, settings=settings) - - -@pytest_asyncio.fixture(scope="session") -async def app(): - settings = AsyncElasticsearchSettings() - extensions = [ - TransactionExtension( - client=TransactionsClient( - database=database, session=None, settings=settings - ), - settings=settings, - ), - ContextExtension(), - SortExtension(), - FieldsExtension(), - QueryExtension(), - TokenPaginationExtension(), - FilterExtension(), - ] - - post_request_model = create_post_request_model(extensions) - - return StacApi( - settings=settings, - client=CoreClient( - database=database, - session=None, - extensions=extensions, - post_request_model=post_request_model, - ), - extensions=extensions, - search_get_request_model=create_get_request_model(extensions), - search_post_request_model=post_request_model, - ).app - - -@pytest_asyncio.fixture(scope="session") -async def app_client(app): - await create_collection_index() - - async with AsyncClient(app=app, base_url="http://test-server") as c: - yield c diff --git a/stac_fastapi/elasticsearch/tests/data/test_collection.json b/stac_fastapi/elasticsearch/tests/data/test_collection.json deleted file mode 100644 index 391b906c..00000000 --- a/stac_fastapi/elasticsearch/tests/data/test_collection.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "id": "test-collection", - "stac_extensions": ["https://stac-extensions.github.io/eo/v1.0.0/schema.json"], - "type": "Collection", - "description": "Landat 8 imagery radiometrically calibrated and orthorectified using gound points and Digital Elevation Model (DEM) data to correct relief displacement.", - "stac_version": "1.0.0", - "license": "PDDL-1.0", - "summaries": { - "platform": ["landsat-8"], - "instruments": ["oli", "tirs"], - "gsd": [30] - }, - "extent": { - "spatial": { - "bbox": [ - [ - -180.0, - -90.0, - 180.0, - 90.0 - ] - ] - }, - "temporal": { - "interval": [ - [ - "2013-06-01", - null - ] - ] - } - }, - "links": [ - { - "href": "http://localhost:8081/collections/landsat-8-l1", - "rel": "self", - "type": "application/json" - }, - { - "href": "http://localhost:8081/", - "rel": "parent", - "type": "application/json" - }, - { - "href": "http://localhost:8081/collections/landsat-8-l1/items", - "rel": "item", - "type": "application/geo+json" - }, - { - "href": "http://localhost:8081/", - "rel": "root", - "type": "application/json" - } - ], - "title": "Landsat 8 L1", - "keywords": [ - "landsat", - "earth observation", - "usgs" - ], - "providers": [ - { - "name": "USGS", - "roles": [ - "producer" - ], - "url": "https://landsat.usgs.gov/" - }, - { - "name": "Planet Labs", - "roles": [ - "processor" - ], - "url": "https://github.com/landsat-pds/landsat_ingestor" - }, - { - "name": "AWS", - "roles": [ - "host" - ], - "url": "https://landsatonaws.com/" - }, - { - "name": "Development Seed", - "roles": [ - "processor" - ], - "url": "https://github.com/sat-utils/sat-api" - }, - { - "name": "Earth Search by Element84", - "description": "API of Earth on AWS datasets", - "roles": [ - "host" - ], - "url": "https://element84.com" - } - ] -} \ No newline at end of file diff --git a/stac_fastapi/elasticsearch/tests/data/test_item.json b/stac_fastapi/elasticsearch/tests/data/test_item.json deleted file mode 100644 index 2b7fdd86..00000000 --- a/stac_fastapi/elasticsearch/tests/data/test_item.json +++ /dev/null @@ -1,505 +0,0 @@ -{ - "type": "Feature", - "id": "test-item", - "stac_version": "1.0.0", - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "geometry": { - "coordinates": [ - [ - [ - 152.15052873427666, - -33.82243006904891 - ], - [ - 150.1000346138806, - -34.257132625788756 - ], - [ - 149.5776607193635, - -32.514709769700254 - ], - [ - 151.6262528041627, - -32.08081674221862 - ], - [ - 152.15052873427666, - -33.82243006904891 - ] - ] - ], - "type": "Polygon" - }, - "properties": { - "datetime": "2020-02-12T12:30:22Z", - "landsat:scene_id": "LC82081612020043LGN00", - "landsat:row": "161", - "gsd": 15, - "eo:bands": [ - { - "gsd": 30, - "name": "B1", - "common_name": "coastal", - "center_wavelength": 0.44, - "full_width_half_max": 0.02 - }, - { - "gsd": 30, - "name": "B2", - "common_name": "blue", - "center_wavelength": 0.48, - "full_width_half_max": 0.06 - }, - { - "gsd": 30, - "name": "B3", - "common_name": "green", - "center_wavelength": 0.56, - "full_width_half_max": 0.06 - }, - { - "gsd": 30, - "name": "B4", - "common_name": "red", - "center_wavelength": 0.65, - "full_width_half_max": 0.04 - }, - { - "gsd": 30, - "name": "B5", - "common_name": "nir", - "center_wavelength": 0.86, - "full_width_half_max": 0.03 - }, - { - "gsd": 30, - "name": "B6", - "common_name": "swir16", - "center_wavelength": 1.6, - "full_width_half_max": 0.08 - }, - { - "gsd": 30, - "name": "B7", - "common_name": "swir22", - "center_wavelength": 2.2, - "full_width_half_max": 0.2 - }, - { - "gsd": 15, - "name": "B8", - "common_name": "pan", - "center_wavelength": 0.59, - "full_width_half_max": 0.18 - }, - { - "gsd": 30, - "name": "B9", - "common_name": "cirrus", - "center_wavelength": 1.37, - "full_width_half_max": 0.02 - }, - { - "gsd": 100, - "name": "B10", - "common_name": "lwir11", - "center_wavelength": 10.9, - "full_width_half_max": 0.8 - }, - { - "gsd": 100, - "name": "B11", - "common_name": "lwir12", - "center_wavelength": 12, - "full_width_half_max": 1 - } - ], - "landsat:revision": "00", - "view:sun_azimuth": -148.83296771, - "instrument": "OLI_TIRS", - "landsat:product_id": "LC08_L1GT_208161_20200212_20200212_01_RT", - "eo:cloud_cover": 0, - "landsat:tier": "RT", - "landsat:processing_level": "L1GT", - "landsat:column": "208", - "platform": "landsat-8", - "proj:epsg": 32756, - "view:sun_elevation": -37.30791534, - "view:off_nadir": 0, - "height": 2500, - "width": 2500 - }, - "bbox": [ - 149.57574, - -34.25796, - 152.15194, - -32.07915 - ], - "collection": "test-collection", - "assets": { - "ANG": { - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ANG.txt", - "type": "text/plain", - "title": "Angle Coefficients File", - "description": "Collection 2 Level-1 Angle Coefficients File (ANG)" - }, - "SR_B1": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B1.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Coastal/Aerosol Band (B1)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B1", - "common_name": "coastal", - "center_wavelength": 0.44, - "full_width_half_max": 0.02 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Coastal/Aerosol Band (B1) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B2": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B2.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Blue Band (B2)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B2", - "common_name": "blue", - "center_wavelength": 0.48, - "full_width_half_max": 0.06 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Blue Band (B2) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B3": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B3.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Green Band (B3)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B3", - "common_name": "green", - "center_wavelength": 0.56, - "full_width_half_max": 0.06 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Green Band (B3) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B4": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B4.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Red Band (B4)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B4", - "common_name": "red", - "center_wavelength": 0.65, - "full_width_half_max": 0.04 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Red Band (B4) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B5": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B5.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Near Infrared Band 0.8 (B5)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B5", - "common_name": "nir08", - "center_wavelength": 0.86, - "full_width_half_max": 0.03 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Near Infrared Band 0.8 (B5) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B6": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B6.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Short-wave Infrared Band 1.6 (B6)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B6", - "common_name": "swir16", - "center_wavelength": 1.6, - "full_width_half_max": 0.08 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Short-wave Infrared Band 1.6 (B6) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B7": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B7.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Short-wave Infrared Band 2.2 (B7)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B7", - "common_name": "swir22", - "center_wavelength": 2.2, - "full_width_half_max": 0.2 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Short-wave Infrared Band 2.2 (B7) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_QA": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_QA.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Surface Temperature Quality Assessment Band", - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Surface Temperature Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_B10": { - "gsd": 100, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_B10.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Surface Temperature Band (B10)", - "eo:bands": [ - { - "gsd": 100, - "name": "ST_B10", - "common_name": "lwir11", - "center_wavelength": 10.9, - "full_width_half_max": 0.8 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Surface Temperature Band (B10) Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "MTL.txt": { - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.txt", - "type": "text/plain", - "title": "Product Metadata File", - "description": "Collection 2 Level-1 Product Metadata File (MTL)" - }, - "MTL.xml": { - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.xml", - "type": "application/xml", - "title": "Product Metadata File (xml)", - "description": "Collection 2 Level-1 Product Metadata File (xml)" - }, - "ST_DRAD": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_DRAD.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Downwelled Radiance Band", - "eo:bands": [ - { - "gsd": 30, - "name": "ST_DRAD", - "description": "downwelled radiance" - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Downwelled Radiance Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_EMIS": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMIS.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Emissivity Band", - "eo:bands": [ - { - "gsd": 30, - "name": "ST_EMIS", - "description": "emissivity" - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Emissivity Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_EMSD": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMSD.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Emissivity Standard Deviation Band", - "eo:bands": [ - { - "gsd": 30, - "name": "ST_EMSD", - "description": "emissivity standard deviation" - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Emissivity Standard Deviation Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - } - }, - "links": [ - { - "href": "http://localhost:8081/collections/landsat-8-l1/items/LC82081612020043", - "rel": "self", - "type": "application/geo+json" - }, - { - "href": "http://localhost:8081/collections/landsat-8-l1", - "rel": "parent", - "type": "application/json" - }, - { - "href": "http://localhost:8081/collections/landsat-8-l1", - "rel": "collection", - "type": "application/json" - }, - { - "href": "http://localhost:8081/", - "rel": "root", - "type": "application/json" - } - ] -} \ No newline at end of file diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example01.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example01.json deleted file mode 100644 index a70bd0d3..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example01.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "op": "=", - "args": [ - { - "property": "scene_id" - }, - "LC82030282019133LGN00" - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example04.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example04.json deleted file mode 100644 index e087504c..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example04.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": "<", - "args": [ - { - "property": "cloud_cover" - }, - 0.1 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_row" - }, - 28 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_path" - }, - 203 - ] - } - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example05a.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example05a.json deleted file mode 100644 index b5bd7a94..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example05a.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "op": "or", - "args": [ - { - "op": "=", - "args": [ - { - "property": "cloud_cover" - }, - 0.1 - ] - }, - { - "op": "=", - "args": [ - { - "property": "cloud_cover" - }, - 0.2 - ] - } - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example06b.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example06b.json deleted file mode 100644 index fc2a7e56..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example06b.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": ">=", - "args": [ - { - "property": "cloud_cover" - }, - 0.1 - ] - }, - { - "op": "<=", - "args": [ - { - "property": "cloud_cover" - }, - 0.2 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_row" - }, - 28 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_path" - }, - 203 - ] - } - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example08.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example08.json deleted file mode 100644 index 2f06413f..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example08.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": "=", - "args": [ - { - "property": "beamMode" - }, - "ScanSAR Narrow" - ] - }, - { - "op": "=", - "args": [ - { - "property": "swathDirection" - }, - "ascending" - ] - }, - { - "op": "=", - "args": [ - { - "property": "polarization" - }, - "HH+VV+HV+VH" - ] - }, - { - "op": "s_intersects", - "args": [ - { - "property": "footprint" - }, - { - "type": "Polygon", - "coordinates": [ - [ - [ - -77.117938, - 38.93686 - ], - [ - -77.040604, - 39.995648 - ], - [ - -76.910536, - 38.892912 - ], - [ - -77.039359, - 38.791753 - ], - [ - -77.047906, - 38.841462 - ], - [ - -77.034183, - 38.840655 - ], - [ - -77.033142, - 38.85749 - ], - [ - -77.117938, - 38.93686 - ] - ] - ] - } - ] - } - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example09.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example09.json deleted file mode 100644 index 9f562fb4..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example09.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "op": ">", - "args": [ - { - "property": "floors" - }, - 5 - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example1.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example1.json deleted file mode 100644 index 48483548..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example1.json +++ /dev/null @@ -1,39 +0,0 @@ - { - "op": "and", - "args": [ - { - "op": "=", - "args": [ - {"property": "id"}, - "LC08_L1TP_060247_20180905_20180912_01_T1_L1TP" - ] - }, - {"op": "=", "args": [{"property": "collection"}, "landsat8_l1tp"]}, - { - "op": ">", - "args": [ - {"property": "properties.datetime"}, - {"timestamp": "2022-04-29T00:00:00Z"} - ] - }, - {"op": "<", "args": [{"property": "properties.eo:cloud_cover"}, 10]}, - { - "op": "s_intersects", - "args": [ - {"property": "geometry"}, - { - "type": "Polygon", - "coordinates": [ - [ - [36.319836, 32.288087], - [36.320041, 32.288032], - [36.320210, 32.288402], - [36.320008, 32.288458], - [36.319836, 32.288087] - ] - ] - } - ] - } - ] -} \ No newline at end of file diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example10.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example10.json deleted file mode 100644 index 870303ea..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example10.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "op": "<=", - "args": [ - { - "property": "taxes" - }, - 500 - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example14.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example14.json deleted file mode 100644 index fad45d48..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example14.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "op": "=", - "args": [ - { - "property": "swimming_pool" - }, - true - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example15.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example15.json deleted file mode 100644 index 98f96797..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example15.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": ">", - "args": [ - { - "property": "floor" - }, - 5 - ] - }, - { - "op": "=", - "args": [ - { - "property": "swimming_pool" - }, - true - ] - } - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example17.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example17.json deleted file mode 100644 index 9b215273..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example17.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "op": "or", - "args": [ - { - "op": "and", - "args": [ - { - "op": ">", - "args": [ - { - "property": "floors" - }, - 5 - ] - }, - { - "op": "=", - "args": [ - { - "property": "material" - }, - "brick" - ] - } - ] - }, - { - "op": "=", - "args": [ - { - "property": "swimming_pool" - }, - true - ] - } - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example18.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example18.json deleted file mode 100644 index 7087a151..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example18.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "op": "or", - "args": [ - { - "op": "not", - "args": [ - { - "op": "<", - "args": [ - { - "property": "floors" - }, - 5 - ] - } - ] - }, - { - "op": "=", - "args": [ - { - "property": "swimming_pool" - }, - true - ] - } - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example19.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example19.json deleted file mode 100755 index 0e4306fb..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example19.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "op": "like", - "args": [ - { - "property": "scene_id" - }, - "LC82030282019133%" - ] -} diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example20.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example20.json deleted file mode 100755 index f7412fc0..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example20.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "op": "like", - "args": [ - { - "property": "scene_id" - }, - "LC82030282019133LGN0_" - ] - } - \ No newline at end of file diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example21.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example21.json deleted file mode 100644 index 175b8732..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example21.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": "between", - "args": [ - { - "property": "cloud_cover" - }, - 0.1, - 0.2 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_row" - }, - 28 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_path" - }, - 203 - ] - } - ] -} \ No newline at end of file diff --git a/stac_fastapi/elasticsearch/tests/extensions/cql2/example22.json b/stac_fastapi/elasticsearch/tests/extensions/cql2/example22.json deleted file mode 100644 index 880c90c3..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/cql2/example22.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": "in", - "args": [ - {"property": "id"}, - ["LC08_L1TP_060247_20180905_20180912_01_T1_L1TP"] - ] - }, - {"op": "=", "args": [{"property": "collection"}, "landsat8_l1tp"]} - ] -} \ No newline at end of file diff --git a/stac_fastapi/elasticsearch/tests/extensions/test_filter.py b/stac_fastapi/elasticsearch/tests/extensions/test_filter.py deleted file mode 100644 index 72cea59f..00000000 --- a/stac_fastapi/elasticsearch/tests/extensions/test_filter.py +++ /dev/null @@ -1,402 +0,0 @@ -import json -import os -from os import listdir -from os.path import isfile, join - -import pytest - -THIS_DIR = os.path.dirname(os.path.abspath(__file__)) - - -@pytest.mark.asyncio -async def test_search_filters_post(app_client, ctx): - - filters = [] - pwd = f"{THIS_DIR}/cql2" - for fn in [fn for f in listdir(pwd) if isfile(fn := join(pwd, f))]: - with open(fn) as f: - filters.append(json.loads(f.read())) - - for _filter in filters: - resp = await app_client.post("/search", json={"filter": _filter}) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_search_filter_extension_eq_get(app_client, ctx): - resp = await app_client.get( - '/search?filter-lang=cql2-json&filter={"op":"=","args":[{"property":"id"},"test-item"]}' - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_eq_post(app_client, ctx): - params = {"filter": {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_gte_get(app_client, ctx): - # there's one item that can match, so one of these queries should match it and the other shouldn't - resp = await app_client.get( - '/search?filter-lang=cql2-json&filter={"op":"<=","args":[{"property": "properties.proj:epsg"},32756]}' - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - resp = await app_client.get( - '/search?filter-lang=cql2-json&filter={"op":">","args":[{"property": "properties.proj:epsg"},32756]}' - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 0 - - -@pytest.mark.asyncio -async def test_search_filter_extension_gte_post(app_client, ctx): - # there's one item that can match, so one of these queries should match it and the other shouldn't - params = { - "filter": { - "op": "<=", - "args": [ - {"property": "properties.proj:epsg"}, - ctx.item["properties"]["proj:epsg"], - ], - } - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - params = { - "filter": { - "op": ">", - "args": [ - {"property": "properties.proj:epsg"}, - ctx.item["properties"]["proj:epsg"], - ], - } - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 0 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_get(app_client, ctx): - resp = await app_client.get( - '/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"<=","args":[{"property":"properties.proj:epsg"},32756]},{"op":"=","args":[{"property":"id"},"test-item"]}]}' - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_get_id(app_client, ctx): - collection = ctx.item["collection"] - id = ctx.item["id"] - filter = f"id='{id}' AND collection='{collection}'" - resp = await app_client.get(f"/search?&filter={filter}") - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_get_cql2text_id(app_client, ctx): - collection = ctx.item["collection"] - id = ctx.item["id"] - filter = f"id='{id}' AND collection='{collection}'" - resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_get_cql2text_cloud_cover(app_client, ctx): - collection = ctx.item["collection"] - cloud_cover = ctx.item["properties"]["eo:cloud_cover"] - filter = f"cloud_cover={cloud_cover} AND collection='{collection}'" - resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_get_cql2text_cloud_cover_no_results( - app_client, ctx -): - collection = ctx.item["collection"] - cloud_cover = ctx.item["properties"]["eo:cloud_cover"] + 1 - filter = f"cloud_cover={cloud_cover} AND collection='{collection}'" - resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 0 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_post(app_client, ctx): - params = { - "filter": { - "op": "and", - "args": [ - { - "op": "<=", - "args": [ - {"property": "properties.proj:epsg"}, - ctx.item["properties"]["proj:epsg"], - ], - }, - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - ], - } - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_floats_get(app_client, ctx): - resp = await app_client.get( - """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30891534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30691534"]}]}""" - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - resp = await app_client.get( - """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item-7"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30891534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30691534"]}]}""" - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 0 - - resp = await app_client.get( - """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30591534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30491534"]}]}""" - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 0 - - -@pytest.mark.asyncio -async def test_search_filter_extension_floats_post(app_client, ctx): - sun_elevation = ctx.item["properties"]["view:sun_elevation"] - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": ">", - "args": [ - {"property": "properties.view:sun_elevation"}, - sun_elevation - 0.01, - ], - }, - { - "op": "<", - "args": [ - {"property": "properties.view:sun_elevation"}, - sun_elevation + 0.01, - ], - }, - ], - } - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_wildcard_cql2(app_client, ctx): - single_char = ctx.item["id"][:-1] + "_" - multi_char = ctx.item["id"][:-3] + "%" - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "like", - "args": [ - {"property": "id"}, - single_char, - ], - }, - { - "op": "like", - "args": [ - {"property": "id"}, - multi_char, - ], - }, - ], - } - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_wildcard_es(app_client, ctx): - single_char = ctx.item["id"][:-1] + "?" - multi_char = ctx.item["id"][:-3] + "*" - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "like", - "args": [ - {"property": "id"}, - single_char, - ], - }, - { - "op": "like", - "args": [ - {"property": "id"}, - multi_char, - ], - }, - ], - } - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_escape_chars(app_client, ctx): - esc_chars = ( - ctx.item["properties"]["landsat:product_id"].replace("_", "\\_")[:-1] + "_" - ) - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "like", - "args": [ - {"property": "properties.landsat:product_id"}, - esc_chars, - ], - }, - ], - } - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_in(app_client, ctx): - product_id = ctx.item["properties"]["landsat:product_id"] - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "in", - "args": [ - {"property": "properties.landsat:product_id"}, - [product_id], - ], - }, - ], - } - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_in_no_list(app_client, ctx): - product_id = ctx.item["properties"]["landsat:product_id"] - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "in", - "args": [ - {"property": "properties.landsat:product_id"}, - product_id, - ], - }, - ], - } - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 400 - assert resp.json() == { - "detail": f"Error with cql2_json filter: Arg {product_id} is not a list" - } - - -@pytest.mark.asyncio -async def test_search_filter_extension_between(app_client, ctx): - sun_elevation = ctx.item["properties"]["view:sun_elevation"] - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "between", - "args": [ - {"property": "properties.view:sun_elevation"}, - sun_elevation - 0.01, - sun_elevation + 0.01, - ], - }, - ], - } - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 diff --git a/stac_fastapi/elasticsearch/tests/resources/__init__.py b/stac_fastapi/elasticsearch/tests/resources/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/elasticsearch/tests/resources/test_collection.py b/stac_fastapi/elasticsearch/tests/resources/test_collection.py deleted file mode 100644 index 9061ac1e..00000000 --- a/stac_fastapi/elasticsearch/tests/resources/test_collection.py +++ /dev/null @@ -1,123 +0,0 @@ -import uuid - -import pystac -import pytest - -from ..conftest import create_collection, delete_collections_and_items, refresh_indices - - -@pytest.mark.asyncio -async def test_create_and_delete_collection(app_client, load_test_data): - """Test creation and deletion of a collection""" - test_collection = load_test_data("test_collection.json") - test_collection["id"] = "test" - - resp = await app_client.post("/collections", json=test_collection) - assert resp.status_code == 200 - - resp = await app_client.delete(f"/collections/{test_collection['id']}") - assert resp.status_code == 204 - - -@pytest.mark.asyncio -async def test_create_collection_conflict(app_client, ctx): - """Test creation of a collection which already exists""" - # This collection ID is created in the fixture, so this should be a conflict - resp = await app_client.post("/collections", json=ctx.collection) - assert resp.status_code == 409 - - -@pytest.mark.asyncio -async def test_delete_missing_collection(app_client): - """Test deletion of a collection which does not exist""" - resp = await app_client.delete("/collections/missing-collection") - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_update_collection_already_exists(ctx, app_client): - """Test updating a collection which already exists""" - ctx.collection["keywords"].append("test") - resp = await app_client.put("/collections", json=ctx.collection) - assert resp.status_code == 200 - - resp = await app_client.get(f"/collections/{ctx.collection['id']}") - assert resp.status_code == 200 - resp_json = resp.json() - assert "test" in resp_json["keywords"] - - -@pytest.mark.asyncio -async def test_update_new_collection(app_client, load_test_data): - """Test updating a collection which does not exist (same as creation)""" - test_collection = load_test_data("test_collection.json") - test_collection["id"] = "new-test-collection" - - resp = await app_client.put("/collections", json=test_collection) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_collection_not_found(app_client): - """Test read a collection which does not exist""" - resp = await app_client.get("/collections/does-not-exist") - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_returns_valid_collection(ctx, app_client): - """Test validates fetched collection with jsonschema""" - resp = await app_client.put("/collections", json=ctx.collection) - assert resp.status_code == 200 - - resp = await app_client.get(f"/collections/{ctx.collection['id']}") - assert resp.status_code == 200 - resp_json = resp.json() - - # Mock root to allow validation - mock_root = pystac.Catalog( - id="test", description="test desc", href="https://example.com" - ) - collection = pystac.Collection.from_dict( - resp_json, root=mock_root, preserve_dict=False - ) - collection.validate() - - -@pytest.mark.asyncio -async def test_pagination_collection(app_client, ctx, txn_client): - """Test collection pagination links""" - - # Clear existing collections if necessary - await delete_collections_and_items(txn_client) - - # Ingest 6 collections - ids = set() - for _ in range(6): - ctx.collection["id"] = str(uuid.uuid4()) - await create_collection(txn_client, collection=ctx.collection) - ids.add(ctx.collection["id"]) - - await refresh_indices(txn_client) - - # Paginate through all 6 collections with a limit of 1 - collection_ids = set() - page = await app_client.get("/collections", params={"limit": 1}) - while True: - page_data = page.json() - assert ( - len(page_data["collections"]) <= 1 - ) # Each page should have 1 or 0 collections - collection_ids.update(coll["id"] for coll in page_data["collections"]) - - next_link = next( - (link for link in page_data["links"] if link["rel"] == "next"), None - ) - if not next_link: - break # No more pages - - href = next_link["href"][len("http://test-server") :] - page = await app_client.get(href) - - # Confirm we have paginated through all collections - assert collection_ids == ids diff --git a/stac_fastapi/elasticsearch/tests/resources/test_conformance.py b/stac_fastapi/elasticsearch/tests/resources/test_conformance.py deleted file mode 100644 index d93d8b81..00000000 --- a/stac_fastapi/elasticsearch/tests/resources/test_conformance.py +++ /dev/null @@ -1,72 +0,0 @@ -import urllib.parse - -import pytest -import pytest_asyncio - - -@pytest_asyncio.fixture -async def response(app_client): - return await app_client.get("/") - - -@pytest.fixture -def response_json(response): - return response.json() - - -def get_link(landing_page, rel_type): - return next( - filter(lambda link: link["rel"] == rel_type, landing_page["links"]), None - ) - - -@pytest.mark.asyncio -async def test_landing_page_health(response): - """Test landing page""" - assert response.status_code == 200 - assert response.headers["content-type"] == "application/json" - - -# Parameters for test_landing_page_links test below. -# Each tuple has the following values (in this order): -# - Rel type of link to test -# - Expected MIME/Media Type -# - Expected relative path -link_tests = [ - ("root", "application/json", "/"), - ("conformance", "application/json", "/conformance"), - ("service-doc", "text/html", "/api.html"), - ("service-desc", "application/vnd.oai.openapi+json;version=3.0", "/api"), -] - - -@pytest.mark.asyncio -@pytest.mark.parametrize("rel_type,expected_media_type,expected_path", link_tests) -async def test_landing_page_links( - response_json, app_client, rel_type, expected_media_type, expected_path -): - link = get_link(response_json, rel_type) - - assert link is not None, f"Missing {rel_type} link in landing page" - assert link.get("type") == expected_media_type - - link_path = urllib.parse.urlsplit(link.get("href")).path - assert link_path == expected_path - - resp = await app_client.get(link_path) - assert resp.status_code == 200 - - -# This endpoint currently returns a 404 for empty result sets, but testing for this response -# code here seems meaningless since it would be the same as if the endpoint did not exist. Once -# https://github.com/stac-utils/stac-fastapi/pull/227 has been merged we can add this to the -# parameterized tests above. -@pytest.mark.asyncio -async def test_search_link(response_json): - search_link = get_link(response_json, "search") - - assert search_link is not None - assert search_link.get("type") == "application/geo+json" - - search_path = urllib.parse.urlsplit(search_link.get("href")).path - assert search_path == "/search" diff --git a/stac_fastapi/elasticsearch/tests/resources/test_item.py b/stac_fastapi/elasticsearch/tests/resources/test_item.py deleted file mode 100644 index e62da8b8..00000000 --- a/stac_fastapi/elasticsearch/tests/resources/test_item.py +++ /dev/null @@ -1,804 +0,0 @@ -import json -import os -import uuid -from copy import deepcopy -from datetime import datetime, timedelta -from random import randint -from urllib.parse import parse_qs, urlparse, urlsplit - -import ciso8601 -import pystac -import pytest -from geojson_pydantic.geometries import Polygon -from pystac.utils import datetime_to_str - -from stac_fastapi.core.core import CoreClient -from stac_fastapi.core.datetime_utils import now_to_rfc3339_str -from stac_fastapi.elasticsearch.database_logic import DatabaseLogic -from stac_fastapi.types.core import LandingPageMixin - -from ..conftest import create_item, refresh_indices - - -def rfc3339_str_to_datetime(s: str) -> datetime: - return ciso8601.parse_rfc3339(s) - - -database_logic = DatabaseLogic() - - -@pytest.mark.asyncio -async def test_create_and_delete_item(app_client, ctx, txn_client): - """Test creation and deletion of a single item (transactions extension)""" - - test_item = ctx.item - - resp = await app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert resp.status_code == 200 - - resp = await app_client.delete( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert resp.status_code == 204 - - await refresh_indices(txn_client) - - resp = await app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_create_item_conflict(app_client, ctx): - """Test creation of an item which already exists (transactions extension)""" - - test_item = ctx.item - - resp = await app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 409 - - -@pytest.mark.asyncio -async def test_delete_missing_item(app_client, load_test_data): - """Test deletion of an item which does not exist (transactions extension)""" - test_item = load_test_data("test_item.json") - resp = await app_client.delete( - f"/collections/{test_item['collection']}/items/hijosh" - ) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_create_item_missing_collection(app_client, ctx): - """Test creation of an item without a parent collection (transactions extension)""" - ctx.item["collection"] = "stac_is_cool" - resp = await app_client.post( - f"/collections/{ctx.item['collection']}/items", json=ctx.item - ) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_create_uppercase_collection_with_item(app_client, ctx, txn_client): - """Test creation of a collection and item with uppercase collection ID (transactions extension)""" - collection_id = "UPPERCASE" - ctx.item["collection"] = collection_id - ctx.collection["id"] = collection_id - resp = await app_client.post("/collections", json=ctx.collection) - assert resp.status_code == 200 - await refresh_indices(txn_client) - resp = await app_client.post(f"/collections/{collection_id}/items", json=ctx.item) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_update_item_already_exists(app_client, ctx): - """Test updating an item which already exists (transactions extension)""" - - assert ctx.item["properties"]["gsd"] != 16 - ctx.item["properties"]["gsd"] = 16 - await app_client.put( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item - ) - resp = await app_client.get( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" - ) - updated_item = resp.json() - assert updated_item["properties"]["gsd"] == 16 - - await app_client.delete( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" - ) - - -@pytest.mark.asyncio -async def test_update_new_item(app_client, ctx): - """Test updating an item which does not exist (transactions extension)""" - test_item = ctx.item - test_item["id"] = "a" - - resp = await app_client.put( - f"/collections/{test_item['collection']}/items/{test_item['id']}", - json=test_item, - ) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_update_item_missing_collection(app_client, ctx): - """Test updating an item without a parent collection (transactions extension)""" - # Try to update collection of the item - ctx.item["collection"] = "stac_is_cool" - resp = await app_client.put( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item - ) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_update_item_geometry(app_client, ctx): - ctx.item["id"] = "update_test_item_1" - - # Create the item - resp = await app_client.post( - f"/collections/{ctx.item['collection']}/items", json=ctx.item - ) - assert resp.status_code == 200 - - new_coordinates = [ - [ - [142.15052873427666, -33.82243006904891], - [140.1000346138806, -34.257132625788756], - [139.5776607193635, -32.514709769700254], - [141.6262528041627, -32.08081674221862], - [142.15052873427666, -33.82243006904891], - ] - ] - - # Update the geometry of the item - ctx.item["geometry"]["coordinates"] = new_coordinates - resp = await app_client.put( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item - ) - assert resp.status_code == 200 - - # Fetch the updated item - resp = await app_client.get( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" - ) - assert resp.status_code == 200 - assert resp.json()["geometry"]["coordinates"] == new_coordinates - - -@pytest.mark.asyncio -async def test_get_item(app_client, ctx): - """Test read an item by id (core)""" - get_item = await app_client.get( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" - ) - assert get_item.status_code == 200 - - -@pytest.mark.asyncio -async def test_returns_valid_item(app_client, ctx): - """Test validates fetched item with jsonschema""" - test_item = ctx.item - get_item = await app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert get_item.status_code == 200 - item_dict = get_item.json() - # Mock root to allow validation - mock_root = pystac.Catalog( - id="test", description="test desc", href="https://example.com" - ) - item = pystac.Item.from_dict(item_dict, preserve_dict=False, root=mock_root) - item.validate() - - -@pytest.mark.asyncio -async def test_get_item_collection(app_client, ctx, txn_client): - """Test read an item collection (core)""" - item_count = randint(1, 4) - - for idx in range(item_count): - ctx.item["id"] = f'{ctx.item["id"]}{idx}' - await create_item(txn_client, ctx.item) - - resp = await app_client.get(f"/collections/{ctx.item['collection']}/items") - assert resp.status_code == 200 - - item_collection = resp.json() - if matched := item_collection["context"].get("matched"): - assert matched == item_count + 1 - - -@pytest.mark.asyncio -async def test_item_collection_filter_bbox(app_client, ctx): - item = ctx.item - collection = item["collection"] - - bbox = "100,-50,170,-20" - resp = await app_client.get( - f"/collections/{collection}/items", params={"bbox": bbox} - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - bbox = "1,2,3,4" - resp = await app_client.get( - f"/collections/{collection}/items", params={"bbox": bbox} - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -async def test_item_collection_filter_datetime(app_client, ctx): - item = ctx.item - collection = item["collection"] - - datetime_range = "2020-01-01T00:00:00.00Z/.." - resp = await app_client.get( - f"/collections/{collection}/items", params={"datetime": datetime_range} - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - datetime_range = "2018-01-01T00:00:00.00Z/2019-01-01T00:00:00.00Z" - resp = await app_client.get( - f"/collections/{collection}/items", params={"datetime": datetime_range} - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -@pytest.mark.skip(reason="Pagination extension not implemented") -async def test_pagination(app_client, load_test_data): - """Test item collection pagination (paging extension)""" - item_count = 10 - test_item = load_test_data("test_item.json") - - for idx in range(item_count): - _test_item = deepcopy(test_item) - _test_item["id"] = test_item["id"] + str(idx) - resp = await app_client.post( - f"/collections/{test_item['collection']}/items", json=_test_item - ) - assert resp.status_code == 200 - - resp = await app_client.get( - f"/collections/{test_item['collection']}/items", params={"limit": 3} - ) - assert resp.status_code == 200 - first_page = resp.json() - assert first_page["context"]["returned"] == 3 - - url_components = urlsplit(first_page["links"][0]["href"]) - resp = await app_client.get(f"{url_components.path}?{url_components.query}") - assert resp.status_code == 200 - second_page = resp.json() - assert second_page["context"]["returned"] == 3 - - -@pytest.mark.asyncio -async def test_item_timestamps(app_client, ctx): - """Test created and updated timestamps (common metadata)""" - # start_time = now_to_rfc3339_str() - - created_dt = ctx.item["properties"]["created"] - - # todo, check lower bound - # assert start_time < created_dt < now_to_rfc3339_str() - assert created_dt < now_to_rfc3339_str() - - # Confirm `updated` timestamp - ctx.item["properties"]["proj:epsg"] = 4326 - resp = await app_client.put( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", - json=dict(ctx.item), - ) - assert resp.status_code == 200 - updated_item = resp.json() - - # Created shouldn't change on update - assert ctx.item["properties"]["created"] == updated_item["properties"]["created"] - assert updated_item["properties"]["updated"] > created_dt - - await app_client.delete( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" - ) - - -@pytest.mark.asyncio -async def test_item_search_by_id_post(app_client, ctx, txn_client): - """Test POST search by item id (core)""" - ids = ["test1", "test2", "test3"] - for _id in ids: - ctx.item["id"] = _id - await create_item(txn_client, ctx.item) - - params = {"collections": [ctx.item["collection"]], "ids": ids} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == len(ids) - assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) - - -@pytest.mark.asyncio -async def test_item_search_spatial_query_post(app_client, ctx): - """Test POST search with spatial query (core)""" - test_item = ctx.item - - params = { - "collections": [test_item["collection"]], - "intersects": test_item["geometry"], - } - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -@pytest.mark.asyncio -async def test_item_search_temporal_query_post(app_client, ctx): - """Test POST search with single-tailed spatio-temporal query (core)""" - - test_item = ctx.item - - item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) - item_date = item_date + timedelta(seconds=1) - - params = { - "collections": [test_item["collection"]], - "intersects": test_item["geometry"], - "datetime": f"../{datetime_to_str(item_date)}", - } - resp = await app_client.post("/search", json=params) - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -@pytest.mark.asyncio -async def test_item_search_temporal_window_post(app_client, ctx): - """Test POST search with two-tailed spatio-temporal query (core)""" - test_item = ctx.item - - item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) - item_date_before = item_date - timedelta(seconds=1) - item_date_after = item_date + timedelta(seconds=1) - - params = { - "collections": [test_item["collection"]], - "intersects": test_item["geometry"], - "datetime": f"{datetime_to_str(item_date_before)}/{datetime_to_str(item_date_after)}", - } - resp = await app_client.post("/search", json=params) - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -@pytest.mark.asyncio -@pytest.mark.skip(reason="KeyError: 'features") -async def test_item_search_temporal_open_window(app_client, ctx): - """Test POST search with open spatio-temporal query (core)""" - test_item = ctx.item - params = { - "collections": [test_item["collection"]], - "intersects": test_item["geometry"], - "datetime": "../..", - } - resp = await app_client.post("/search", json=params) - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -@pytest.mark.asyncio -async def test_item_search_by_id_get(app_client, ctx, txn_client): - """Test GET search by item id (core)""" - ids = ["test1", "test2", "test3"] - for _id in ids: - ctx.item["id"] = _id - await create_item(txn_client, ctx.item) - - params = {"collections": ctx.item["collection"], "ids": ",".join(ids)} - resp = await app_client.get("/search", params=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == len(ids) - assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) - - -@pytest.mark.asyncio -async def test_item_search_bbox_get(app_client, ctx): - """Test GET search with spatial query (core)""" - params = { - "collections": ctx.item["collection"], - "bbox": ",".join([str(coord) for coord in ctx.item["bbox"]]), - } - resp = await app_client.get("/search", params=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == ctx.item["id"] - - -@pytest.mark.asyncio -async def test_item_search_get_without_collections(app_client, ctx): - """Test GET search without specifying collections""" - - params = { - "bbox": ",".join([str(coord) for coord in ctx.item["bbox"]]), - } - resp = await app_client.get("/search", params=params) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_item_search_get_with_non_existent_collections(app_client, ctx): - """Test GET search with non-existent collections""" - - params = {"collections": "non-existent-collection,or-this-one"} - resp = await app_client.get("/search", params=params) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_item_search_temporal_window_get(app_client, ctx): - """Test GET search with spatio-temporal query (core)""" - test_item = ctx.item - item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) - item_date_before = item_date - timedelta(seconds=1) - item_date_after = item_date + timedelta(seconds=1) - - params = { - "collections": test_item["collection"], - "bbox": ",".join([str(coord) for coord in test_item["bbox"]]), - "datetime": f"{datetime_to_str(item_date_before)}/{datetime_to_str(item_date_after)}", - } - resp = await app_client.get("/search", params=params) - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -@pytest.mark.asyncio -async def test_item_search_post_without_collection(app_client, ctx): - """Test POST search without specifying a collection""" - test_item = ctx.item - params = { - "bbox": test_item["bbox"], - } - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_item_search_properties_es(app_client, ctx): - """Test POST search with JSONB query (query extension)""" - - test_item = ctx.item - # EPSG is a JSONB key - params = {"query": {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}}} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -async def test_item_search_properties_field(app_client): - """Test POST search indexed field with query (query extension)""" - - # Orientation is an indexed field - params = {"query": {"orientation": {"eq": "south"}}} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -async def test_item_search_get_query_extension(app_client, ctx): - """Test GET search with JSONB query (query extension)""" - - test_item = ctx.item - - params = { - "collections": [test_item["collection"]], - "query": json.dumps( - {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}} - ), - } - resp = await app_client.get("/search", params=params) - assert resp.json()["context"]["returned"] == 0 - - params["query"] = json.dumps( - {"proj:epsg": {"eq": test_item["properties"]["proj:epsg"]}} - ) - resp = await app_client.get("/search", params=params) - resp_json = resp.json() - assert resp_json["context"]["returned"] == 1 - assert ( - resp_json["features"][0]["properties"]["proj:epsg"] - == test_item["properties"]["proj:epsg"] - ) - - -@pytest.mark.asyncio -async def test_get_missing_item_collection(app_client): - """Test reading a collection which does not exist""" - resp = await app_client.get("/collections/invalid-collection/items") - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_pagination_item_collection(app_client, ctx, txn_client): - """Test item collection pagination links (paging extension)""" - ids = [ctx.item["id"]] - - # Ingest 5 items - for _ in range(5): - ctx.item["id"] = str(uuid.uuid4()) - await create_item(txn_client, item=ctx.item) - ids.append(ctx.item["id"]) - - # Paginate through all 6 items with a limit of 1 (expecting 7 requests) - page = await app_client.get( - f"/collections/{ctx.item['collection']}/items", params={"limit": 1} - ) - - item_ids = [] - idx = 0 - for idx in range(100): - page_data = page.json() - next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) - if not next_link: - assert not page_data["features"] - break - - assert len(page_data["features"]) == 1 - item_ids.append(page_data["features"][0]["id"]) - - href = next_link[0]["href"][len("http://test-server") :] - page = await app_client.get(href) - - assert idx == len(ids) - - # Confirm we have paginated through all items - assert not set(item_ids) - set(ids) - - -@pytest.mark.asyncio -async def test_pagination_post(app_client, ctx, txn_client): - """Test POST pagination (paging extension)""" - ids = [ctx.item["id"]] - - # Ingest 5 items - for _ in range(5): - ctx.item["id"] = str(uuid.uuid4()) - await create_item(txn_client, ctx.item) - ids.append(ctx.item["id"]) - - # Paginate through all 5 items with a limit of 1 (expecting 5 requests) - request_body = {"ids": ids, "limit": 1} - page = await app_client.post("/search", json=request_body) - idx = 0 - item_ids = [] - for _ in range(100): - idx += 1 - page_data = page.json() - next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) - if not next_link: - break - - item_ids.append(page_data["features"][0]["id"]) - - # Merge request bodies - request_body.update(next_link[0]["body"]) - page = await app_client.post("/search", json=request_body) - - # Our limit is 1, so we expect len(ids) number of requests before we run out of pages - assert idx == len(ids) + 1 - - # Confirm we have paginated through all items - assert not set(item_ids) - set(ids) - - -@pytest.mark.asyncio -async def test_pagination_token_idempotent(app_client, ctx, txn_client): - """Test that pagination tokens are idempotent (paging extension)""" - ids = [ctx.item["id"]] - - # Ingest 5 items - for _ in range(5): - ctx.item["id"] = str(uuid.uuid4()) - await create_item(txn_client, ctx.item) - ids.append(ctx.item["id"]) - - page = await app_client.get("/search", params={"ids": ",".join(ids), "limit": 3}) - page_data = page.json() - next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) - - # Confirm token is idempotent - resp1 = await app_client.get( - "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) - ) - resp2 = await app_client.get( - "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) - ) - resp1_data = resp1.json() - resp2_data = resp2.json() - - # Two different requests with the same pagination token should return the same items - assert [item["id"] for item in resp1_data["features"]] == [ - item["id"] for item in resp2_data["features"] - ] - - -@pytest.mark.asyncio -async def test_field_extension_get_includes(app_client, ctx): - """Test GET search with included fields (fields extension)""" - test_item = ctx.item - params = { - "ids": [test_item["id"]], - "fields": "+properties.proj:epsg,+properties.gsd", - } - resp = await app_client.get("/search", params=params) - feat_properties = resp.json()["features"][0]["properties"] - assert not set(feat_properties) - {"proj:epsg", "gsd", "datetime"} - - -@pytest.mark.asyncio -async def test_field_extension_get_excludes(app_client, ctx): - """Test GET search with included fields (fields extension)""" - test_item = ctx.item - params = { - "ids": [test_item["id"]], - "fields": "-properties.proj:epsg,-properties.gsd", - } - resp = await app_client.get("/search", params=params) - resp_json = resp.json() - assert "proj:epsg" not in resp_json["features"][0]["properties"].keys() - assert "gsd" not in resp_json["features"][0]["properties"].keys() - - -@pytest.mark.asyncio -async def test_field_extension_post(app_client, ctx): - """Test POST search with included and excluded fields (fields extension)""" - test_item = ctx.item - body = { - "ids": [test_item["id"]], - "fields": { - "exclude": ["assets.B1"], - "include": ["properties.eo:cloud_cover", "properties.orientation"], - }, - } - - resp = await app_client.post("/search", json=body) - resp_json = resp.json() - assert "B1" not in resp_json["features"][0]["assets"].keys() - assert not set(resp_json["features"][0]["properties"]) - { - "orientation", - "eo:cloud_cover", - "datetime", - } - - -@pytest.mark.asyncio -async def test_field_extension_exclude_and_include(app_client, ctx): - """Test POST search including/excluding same field (fields extension)""" - test_item = ctx.item - body = { - "ids": [test_item["id"]], - "fields": { - "exclude": ["properties.eo:cloud_cover"], - "include": ["properties.eo:cloud_cover"], - }, - } - - resp = await app_client.post("/search", json=body) - resp_json = resp.json() - assert "eo:cloud_cover" not in resp_json["features"][0]["properties"] - - -@pytest.mark.asyncio -async def test_field_extension_exclude_default_includes(app_client, ctx): - """Test POST search excluding a forbidden field (fields extension)""" - test_item = ctx.item - body = {"ids": [test_item["id"]], "fields": {"exclude": ["gsd"]}} - - resp = await app_client.post("/search", json=body) - resp_json = resp.json() - assert "gsd" not in resp_json["features"][0] - - -@pytest.mark.asyncio -async def test_search_intersects_and_bbox(app_client): - """Test POST search intersects and bbox are mutually exclusive (core)""" - bbox = [-118, 34, -117, 35] - geoj = Polygon.from_bounds(*bbox).dict(exclude_none=True) - params = {"bbox": bbox, "intersects": geoj} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 400 - - -@pytest.mark.asyncio -async def test_get_missing_item(app_client, load_test_data): - """Test read item which does not exist (transactions extension)""" - test_coll = load_test_data("test_collection.json") - resp = await app_client.get(f"/collections/{test_coll['id']}/items/invalid-item") - assert resp.status_code == 404 - - -@pytest.mark.asyncio -@pytest.mark.skip(reason="invalid queries not implemented") -async def test_search_invalid_query_field(app_client): - body = {"query": {"gsd": {"lt": 100}, "invalid-field": {"eq": 50}}} - resp = await app_client.post("/search", json=body) - assert resp.status_code == 400 - - -@pytest.mark.asyncio -async def test_search_bbox_errors(app_client): - body = {"query": {"bbox": [0]}} - resp = await app_client.post("/search", json=body) - assert resp.status_code == 400 - - body = {"query": {"bbox": [100.0, 0.0, 0.0, 105.0, 1.0, 1.0]}} - resp = await app_client.post("/search", json=body) - assert resp.status_code == 400 - - params = {"bbox": "100.0,0.0,0.0,105.0"} - resp = await app_client.get("/search", params=params) - assert resp.status_code == 400 - - -@pytest.mark.asyncio -async def test_conformance_classes_configurable(): - """Test conformance class configurability""" - landing = LandingPageMixin() - landing_page = landing._landing_page( - base_url="http://test/test", - conformance_classes=["this is a test"], - extension_schemas=[], - ) - assert landing_page["conformsTo"][0] == "this is a test" - - # Update environment to avoid key error on client instantiation - os.environ["READER_CONN_STRING"] = "testing" - os.environ["WRITER_CONN_STRING"] = "testing" - client = CoreClient( - database=database_logic, base_conformance_classes=["this is a test"] - ) - assert client.conformance_classes()[0] == "this is a test" - - -@pytest.mark.asyncio -async def test_search_datetime_validation_errors(app_client): - bad_datetimes = [ - "37-01-01T12:00:27.87Z", - "1985-13-12T23:20:50.52Z", - "1985-12-32T23:20:50.52Z", - "1985-12-01T25:20:50.52Z", - "1985-12-01T00:60:50.52Z", - "1985-12-01T00:06:61.52Z", - "1990-12-31T23:59:61Z", - "1986-04-12T23:20:50.52Z/1985-04-12T23:20:50.52Z", - ] - for dt in bad_datetimes: - body = {"query": {"datetime": dt}} - resp = await app_client.post("/search", json=body) - assert resp.status_code == 400 - - resp = await app_client.get("/search?datetime={}".format(dt)) - assert resp.status_code == 400 diff --git a/stac_fastapi/elasticsearch/tests/resources/test_mgmt.py b/stac_fastapi/elasticsearch/tests/resources/test_mgmt.py deleted file mode 100644 index 2b7d9728..00000000 --- a/stac_fastapi/elasticsearch/tests/resources/test_mgmt.py +++ /dev/null @@ -1,13 +0,0 @@ -import pytest - - -@pytest.mark.asyncio -async def test_ping_no_param(app_client): - """ - Test ping endpoint with a mocked client. - Args: - app_client (TestClient): mocked client fixture - """ - res = await app_client.get("/_mgmt/ping") - assert res.status_code == 200 - assert res.json() == {"message": "PONG"} diff --git a/stac_fastapi/opensearch/tests/__init__.py b/stac_fastapi/opensearch/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/opensearch/tests/api/__init__.py b/stac_fastapi/opensearch/tests/api/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/opensearch/tests/api/test_api.py b/stac_fastapi/opensearch/tests/api/test_api.py deleted file mode 100644 index 74f0bb55..00000000 --- a/stac_fastapi/opensearch/tests/api/test_api.py +++ /dev/null @@ -1,447 +0,0 @@ -import copy -import uuid -from datetime import datetime, timedelta - -import pytest - -from ..conftest import create_collection, create_item - -ROUTES = { - "GET /_mgmt/ping", - "GET /docs/oauth2-redirect", - "HEAD /docs/oauth2-redirect", - "GET /", - "GET /conformance", - "GET /api", - "GET /api.html", - "HEAD /api", - "HEAD /api.html", - "GET /queryables", - "GET /collections", - "GET /collections/{collection_id}", - "GET /collections/{collection_id}/queryables", - "GET /collections/{collection_id}/items", - "GET /collections/{collection_id}/items/{item_id}", - "GET /search", - "POST /search", - "DELETE /collections/{collection_id}", - "DELETE /collections/{collection_id}/items/{item_id}", - "POST /collections", - "POST /collections/{collection_id}/items", - "PUT /collections", - "PUT /collections/{collection_id}/items/{item_id}", -} - - -@pytest.mark.asyncio -async def test_post_search_content_type(app_client, ctx): - params = {"limit": 1} - resp = await app_client.post("/search", json=params) - assert resp.headers["content-type"] == "application/geo+json" - - -@pytest.mark.asyncio -async def test_get_search_content_type(app_client, ctx): - resp = await app_client.get("/search") - assert resp.headers["content-type"] == "application/geo+json" - - -@pytest.mark.asyncio -async def test_api_headers(app_client): - resp = await app_client.get("/api") - assert ( - resp.headers["content-type"] == "application/vnd.oai.openapi+json;version=3.0" - ) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_router(app): - api_routes = set([f"{list(route.methods)[0]} {route.path}" for route in app.routes]) - assert len(api_routes - ROUTES) == 0 - - -@pytest.mark.asyncio -async def test_app_transaction_extension(app_client, ctx): - item = copy.deepcopy(ctx.item) - item["id"] = str(uuid.uuid4()) - resp = await app_client.post(f"/collections/{item['collection']}/items", json=item) - assert resp.status_code == 200 - - await app_client.delete(f"/collections/{item['collection']}/items/{item['id']}") - - -@pytest.mark.asyncio -async def test_app_search_response(app_client, ctx): - resp = await app_client.get("/search", params={"ids": ["test-item"]}) - assert resp.status_code == 200 - resp_json = resp.json() - - assert resp_json.get("type") == "FeatureCollection" - # stac_version and stac_extensions were removed in v1.0.0-beta.3 - assert resp_json.get("stac_version") is None - assert resp_json.get("stac_extensions") is None - - -@pytest.mark.asyncio -async def test_app_context_extension(app_client, ctx, txn_client): - test_item = ctx.item - test_item["id"] = "test-item-2" - test_item["collection"] = "test-collection-2" - test_collection = ctx.collection - test_collection["id"] = "test-collection-2" - - await create_collection(txn_client, test_collection) - await create_item(txn_client, test_item) - - resp = await app_client.get( - f"/collections/{test_collection['id']}/items/{test_item['id']}" - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["id"] == test_item["id"] - assert resp_json["collection"] == test_item["collection"] - - resp = await app_client.get(f"/collections/{test_collection['id']}") - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["id"] == test_collection["id"] - - resp = await app_client.post("/search", json={"collections": ["test-collection-2"]}) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - assert "context" in resp_json - assert resp_json["context"]["returned"] == 1 - if matched := resp_json["context"].get("matched"): - assert matched == 1 - - -@pytest.mark.asyncio -async def test_app_fields_extension(app_client, ctx, txn_client): - resp = await app_client.get("/search", params={"collections": ["test-collection"]}) - assert resp.status_code == 200 - resp_json = resp.json() - assert list(resp_json["features"][0]["properties"]) == ["datetime"] - - -@pytest.mark.asyncio -async def test_app_fields_extension_query(app_client, ctx, txn_client): - resp = await app_client.post( - "/search", - json={ - "query": {"proj:epsg": {"gte": ctx.item["properties"]["proj:epsg"]}}, - "collections": ["test-collection"], - }, - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert list(resp_json["features"][0]["properties"]) == ["datetime", "proj:epsg"] - - -@pytest.mark.asyncio -async def test_app_fields_extension_no_properties_get(app_client, ctx, txn_client): - resp = await app_client.get( - "/search", params={"collections": ["test-collection"], "fields": "-properties"} - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert "properties" not in resp_json["features"][0] - - -@pytest.mark.asyncio -async def test_app_fields_extension_no_properties_post(app_client, ctx, txn_client): - resp = await app_client.post( - "/search", - json={ - "collections": ["test-collection"], - "fields": {"exclude": ["properties"]}, - }, - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert "properties" not in resp_json["features"][0] - - -@pytest.mark.asyncio -async def test_app_fields_extension_no_null_fields(app_client, ctx, txn_client): - resp = await app_client.get("/search", params={"collections": ["test-collection"]}) - assert resp.status_code == 200 - resp_json = resp.json() - # check if no null fields: https://github.com/stac-utils/stac-fastapi-elasticsearch/issues/166 - for feature in resp_json["features"]: - # assert "bbox" not in feature["geometry"] - for link in feature["links"]: - assert all(a not in link or link[a] is not None for a in ("title", "asset")) - for asset in feature["assets"]: - assert all( - a not in asset or asset[a] is not None - for a in ("start_datetime", "created") - ) - - -@pytest.mark.asyncio -async def test_app_fields_extension_return_all_properties(app_client, ctx, txn_client): - item = ctx.item - resp = await app_client.get( - "/search", params={"collections": ["test-collection"], "fields": "properties"} - ) - assert resp.status_code == 200 - resp_json = resp.json() - feature = resp_json["features"][0] - assert len(feature["properties"]) >= len(item["properties"]) - for expected_prop, expected_value in item["properties"].items(): - if expected_prop in ("datetime", "created", "updated"): - assert feature["properties"][expected_prop][0:19] == expected_value[0:19] - else: - assert feature["properties"][expected_prop] == expected_value - - -@pytest.mark.asyncio -async def test_app_query_extension_gt(app_client, ctx): - params = {"query": {"proj:epsg": {"gt": ctx.item["properties"]["proj:epsg"]}}} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -async def test_app_query_extension_gte(app_client, ctx): - params = {"query": {"proj:epsg": {"gte": ctx.item["properties"]["proj:epsg"]}}} - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_app_query_extension_limit_lt0(app_client): - assert (await app_client.post("/search", json={"limit": -1})).status_code == 400 - - -@pytest.mark.asyncio -async def test_app_query_extension_limit_gt10000(app_client): - resp = await app_client.post("/search", json={"limit": 10001}) - assert resp.status_code == 200 - assert resp.json()["context"]["limit"] == 10000 - - -@pytest.mark.asyncio -async def test_app_query_extension_limit_10000(app_client): - params = {"limit": 10000} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_app_sort_extension_get_asc(app_client, txn_client, ctx): - first_item = ctx.item - item_date = datetime.strptime( - first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" - ) - - second_item = dict(first_item) - second_item["id"] = "another-item" - another_item_date = item_date - timedelta(days=1) - second_item["properties"]["datetime"] = another_item_date.strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - await create_item(txn_client, second_item) - - resp = await app_client.get("/search?sortby=+properties.datetime") - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][1]["id"] == first_item["id"] - assert resp_json["features"][0]["id"] == second_item["id"] - - -@pytest.mark.asyncio -async def test_app_sort_extension_get_desc(app_client, txn_client, ctx): - first_item = ctx.item - item_date = datetime.strptime( - first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" - ) - - second_item = dict(first_item) - second_item["id"] = "another-item" - another_item_date = item_date - timedelta(days=1) - second_item["properties"]["datetime"] = another_item_date.strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - await create_item(txn_client, second_item) - - resp = await app_client.get("/search?sortby=-properties.datetime") - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == first_item["id"] - assert resp_json["features"][1]["id"] == second_item["id"] - - -@pytest.mark.asyncio -async def test_app_sort_extension_post_asc(app_client, txn_client, ctx): - first_item = ctx.item - item_date = datetime.strptime( - first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" - ) - - second_item = dict(first_item) - second_item["id"] = "another-item" - another_item_date = item_date - timedelta(days=1) - second_item["properties"]["datetime"] = another_item_date.strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - await create_item(txn_client, second_item) - - params = { - "collections": [first_item["collection"]], - "sortby": [{"field": "properties.datetime", "direction": "asc"}], - } - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][1]["id"] == first_item["id"] - assert resp_json["features"][0]["id"] == second_item["id"] - - -@pytest.mark.asyncio -async def test_app_sort_extension_post_desc(app_client, txn_client, ctx): - first_item = ctx.item - item_date = datetime.strptime( - first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" - ) - - second_item = dict(first_item) - second_item["id"] = "another-item" - another_item_date = item_date - timedelta(days=1) - second_item["properties"]["datetime"] = another_item_date.strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - await create_item(txn_client, second_item) - - params = { - "collections": [first_item["collection"]], - "sortby": [{"field": "properties.datetime", "direction": "desc"}], - } - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == first_item["id"] - assert resp_json["features"][1]["id"] == second_item["id"] - - -@pytest.mark.asyncio -async def test_search_invalid_date(app_client, ctx): - params = { - "datetime": "2020-XX-01/2020-10-30", - "collections": [ctx.item["collection"]], - } - - resp = await app_client.post("/search", json=params) - assert resp.status_code == 400 - - -@pytest.mark.asyncio -async def test_search_point_intersects_get(app_client, ctx): - resp = await app_client.get( - '/search?intersects={"type":"Point","coordinates":[150.04,-33.14]}' - ) - - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_polygon_intersects_get(app_client, ctx): - resp = await app_client.get( - '/search?intersects={"type":"Polygon","coordinates":[[[149.04, -34.14],[149.04, -32.14],[151.04, -32.14],[151.04, -34.14],[149.04, -34.14]]]}' - ) - - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_point_intersects_post(app_client, ctx): - point = [150.04, -33.14] - intersects = {"type": "Point", "coordinates": point} - - params = { - "intersects": intersects, - "collections": [ctx.item["collection"]], - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_point_does_not_intersect(app_client, ctx): - point = [15.04, -3.14] - intersects = {"type": "Point", "coordinates": point} - - params = { - "intersects": intersects, - "collections": [ctx.item["collection"]], - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -async def test_datetime_non_interval(app_client, ctx): - dt_formats = [ - "2020-02-12T12:30:22+00:00", - "2020-02-12T12:30:22.00Z", - "2020-02-12T12:30:22Z", - "2020-02-12T12:30:22.00+00:00", - ] - - for dt in dt_formats: - params = { - "datetime": dt, - "collections": [ctx.item["collection"]], - } - - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - # datetime is returned in this format "2020-02-12T12:30:22Z" - assert resp_json["features"][0]["properties"]["datetime"][0:19] == dt[0:19] - - -@pytest.mark.asyncio -async def test_bbox_3d(app_client, ctx): - australia_bbox = [106.343365, -47.199523, 0.1, 168.218365, -19.437288, 0.1] - params = { - "bbox": australia_bbox, - "collections": [ctx.item["collection"]], - } - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_line_string_intersects(app_client, ctx): - line = [[150.04, -33.14], [150.22, -33.89]] - intersects = {"type": "LineString", "coordinates": line} - params = { - "intersects": intersects, - "collections": [ctx.item["collection"]], - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - - resp_json = resp.json() - assert len(resp_json["features"]) == 1 diff --git a/stac_fastapi/opensearch/tests/clients/__init__.py b/stac_fastapi/opensearch/tests/clients/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/opensearch/tests/clients/test_elasticsearch.py b/stac_fastapi/opensearch/tests/clients/test_elasticsearch.py deleted file mode 100644 index 41fcf26d..00000000 --- a/stac_fastapi/opensearch/tests/clients/test_elasticsearch.py +++ /dev/null @@ -1,312 +0,0 @@ -import uuid -from copy import deepcopy -from typing import Callable - -import pytest -from stac_pydantic import Item - -from stac_fastapi.extensions.third_party.bulk_transactions import Items -from stac_fastapi.types.errors import ConflictError, NotFoundError - -from ..conftest import MockRequest, create_item - - -@pytest.mark.asyncio -async def test_create_collection(app_client, ctx, core_client, txn_client): - in_coll = deepcopy(ctx.collection) - in_coll["id"] = str(uuid.uuid4()) - await txn_client.create_collection(in_coll, request=MockRequest) - got_coll = await core_client.get_collection(in_coll["id"], request=MockRequest) - assert got_coll["id"] == in_coll["id"] - await txn_client.delete_collection(in_coll["id"]) - - -@pytest.mark.asyncio -async def test_create_collection_already_exists(app_client, ctx, txn_client): - data = deepcopy(ctx.collection) - - # change id to avoid elasticsearch duplicate key error - data["_id"] = str(uuid.uuid4()) - - with pytest.raises(ConflictError): - await txn_client.create_collection(data, request=MockRequest) - - await txn_client.delete_collection(data["id"]) - - -@pytest.mark.asyncio -async def test_update_collection( - core_client, - txn_client, - load_test_data: Callable, -): - collection_data = load_test_data("test_collection.json") - item_data = load_test_data("test_item.json") - - await txn_client.create_collection(collection_data, request=MockRequest) - await txn_client.create_item( - collection_id=collection_data["id"], - item=item_data, - request=MockRequest, - refresh=True, - ) - - collection_data["keywords"].append("new keyword") - await txn_client.update_collection(collection_data, request=MockRequest) - - coll = await core_client.get_collection(collection_data["id"], request=MockRequest) - assert "new keyword" in coll["keywords"] - - item = await core_client.get_item( - item_id=item_data["id"], - collection_id=collection_data["id"], - request=MockRequest, - ) - assert item["id"] == item_data["id"] - assert item["collection"] == item_data["collection"] - - await txn_client.delete_collection(collection_data["id"]) - - -@pytest.mark.asyncio -async def test_update_collection_id( - core_client, - txn_client, - load_test_data: Callable, -): - collection_data = load_test_data("test_collection.json") - item_data = load_test_data("test_item.json") - new_collection_id = "new-test-collection" - - await txn_client.create_collection(collection_data, request=MockRequest) - await txn_client.create_item( - collection_id=collection_data["id"], - item=item_data, - request=MockRequest, - refresh=True, - ) - - old_collection_id = collection_data["id"] - collection_data["id"] = new_collection_id - - await txn_client.update_collection( - collection=collection_data, - request=MockRequest( - query_params={ - "collection_id": old_collection_id, - "limit": "10", - } - ), - refresh=True, - ) - - with pytest.raises(NotFoundError): - await core_client.get_collection(old_collection_id, request=MockRequest) - - coll = await core_client.get_collection(collection_data["id"], request=MockRequest) - assert coll["id"] == new_collection_id - - with pytest.raises(NotFoundError): - await core_client.get_item( - item_id=item_data["id"], - collection_id=old_collection_id, - request=MockRequest, - ) - - item = await core_client.get_item( - item_id=item_data["id"], - collection_id=collection_data["id"], - request=MockRequest, - refresh=True, - ) - - assert item["id"] == item_data["id"] - assert item["collection"] == new_collection_id - - await txn_client.delete_collection(collection_data["id"]) - - -@pytest.mark.asyncio -async def test_delete_collection( - core_client, - txn_client, - load_test_data: Callable, -): - data = load_test_data("test_collection.json") - await txn_client.create_collection(data, request=MockRequest) - - await txn_client.delete_collection(data["id"]) - - with pytest.raises(NotFoundError): - await core_client.get_collection(data["id"], request=MockRequest) - - -@pytest.mark.asyncio -async def test_get_collection( - core_client, - txn_client, - load_test_data: Callable, -): - data = load_test_data("test_collection.json") - await txn_client.create_collection(data, request=MockRequest) - coll = await core_client.get_collection(data["id"], request=MockRequest) - assert coll["id"] == data["id"] - - await txn_client.delete_collection(data["id"]) - - -@pytest.mark.asyncio -async def test_get_item(app_client, ctx, core_client): - got_item = await core_client.get_item( - item_id=ctx.item["id"], - collection_id=ctx.item["collection"], - request=MockRequest, - ) - assert got_item["id"] == ctx.item["id"] - assert got_item["collection"] == ctx.item["collection"] - - -@pytest.mark.asyncio -async def test_get_collection_items(app_client, ctx, core_client, txn_client): - coll = ctx.collection - num_of_items_to_create = 5 - for _ in range(num_of_items_to_create): - item = deepcopy(ctx.item) - item["id"] = str(uuid.uuid4()) - await txn_client.create_item( - collection_id=item["collection"], - item=item, - request=MockRequest, - refresh=True, - ) - - fc = await core_client.item_collection(coll["id"], request=MockRequest()) - assert len(fc["features"]) == num_of_items_to_create + 1 # ctx.item - - for item in fc["features"]: - assert item["collection"] == coll["id"] - - -@pytest.mark.asyncio -async def test_create_item(ctx, core_client, txn_client): - resp = await core_client.get_item( - ctx.item["id"], ctx.item["collection"], request=MockRequest - ) - assert Item(**ctx.item).dict( - exclude={"links": ..., "properties": {"created", "updated"}} - ) == Item(**resp).dict(exclude={"links": ..., "properties": {"created", "updated"}}) - - -@pytest.mark.asyncio -async def test_create_item_already_exists(ctx, txn_client): - with pytest.raises(ConflictError): - await txn_client.create_item( - collection_id=ctx.item["collection"], - item=ctx.item, - request=MockRequest, - refresh=True, - ) - - -@pytest.mark.asyncio -async def test_update_item(ctx, core_client, txn_client): - ctx.item["properties"]["foo"] = "bar" - collection_id = ctx.item["collection"] - item_id = ctx.item["id"] - await txn_client.update_item( - collection_id=collection_id, item_id=item_id, item=ctx.item, request=MockRequest - ) - - updated_item = await core_client.get_item( - item_id, collection_id, request=MockRequest - ) - assert updated_item["properties"]["foo"] == "bar" - - -@pytest.mark.asyncio -async def test_update_geometry(ctx, core_client, txn_client): - new_coordinates = [ - [ - [142.15052873427666, -33.82243006904891], - [140.1000346138806, -34.257132625788756], - [139.5776607193635, -32.514709769700254], - [141.6262528041627, -32.08081674221862], - [142.15052873427666, -33.82243006904891], - ] - ] - - ctx.item["geometry"]["coordinates"] = new_coordinates - collection_id = ctx.item["collection"] - item_id = ctx.item["id"] - await txn_client.update_item( - collection_id=collection_id, item_id=item_id, item=ctx.item, request=MockRequest - ) - - updated_item = await core_client.get_item( - item_id, collection_id, request=MockRequest - ) - assert updated_item["geometry"]["coordinates"] == new_coordinates - - -@pytest.mark.asyncio -async def test_delete_item(ctx, core_client, txn_client): - await txn_client.delete_item(ctx.item["id"], ctx.item["collection"]) - - with pytest.raises(NotFoundError): - await core_client.get_item( - ctx.item["id"], ctx.item["collection"], request=MockRequest - ) - - -@pytest.mark.asyncio -async def test_bulk_item_insert(ctx, core_client, txn_client, bulk_txn_client): - items = {} - for _ in range(10): - _item = deepcopy(ctx.item) - _item["id"] = str(uuid.uuid4()) - items[_item["id"]] = _item - - # fc = es_core.item_collection(coll["id"], request=MockStarletteRequest) - # assert len(fc["features"]) == 0 - - bulk_txn_client.bulk_item_insert(Items(items=items), refresh=True) - - fc = await core_client.item_collection(ctx.collection["id"], request=MockRequest()) - assert len(fc["features"]) >= 10 - - # for item in items: - # es_transactions.delete_item( - # item["id"], item["collection"], request=MockStarletteRequest - # ) - - -@pytest.mark.asyncio -async def test_feature_collection_insert( - core_client, - txn_client, - ctx, -): - features = [] - for _ in range(10): - _item = deepcopy(ctx.item) - _item["id"] = str(uuid.uuid4()) - features.append(_item) - - feature_collection = {"type": "FeatureCollection", "features": features} - - await create_item(txn_client, feature_collection) - - fc = await core_client.item_collection(ctx.collection["id"], request=MockRequest()) - assert len(fc["features"]) >= 10 - - -@pytest.mark.asyncio -async def test_landing_page_no_collection_title(ctx, core_client, txn_client, app): - ctx.collection["id"] = "new_id" - del ctx.collection["title"] - await txn_client.create_collection(ctx.collection, request=MockRequest) - - landing_page = await core_client.landing_page(request=MockRequest(app=app)) - for link in landing_page["links"]: - if link["href"].split("/")[-1] == ctx.collection["id"]: - assert link["title"] diff --git a/stac_fastapi/opensearch/tests/conftest.py b/stac_fastapi/opensearch/tests/conftest.py deleted file mode 100644 index 8ebab272..00000000 --- a/stac_fastapi/opensearch/tests/conftest.py +++ /dev/null @@ -1,208 +0,0 @@ -import asyncio -import copy -import json -import os -from typing import Any, Callable, Dict, Optional - -import pytest -import pytest_asyncio -from httpx import AsyncClient - -from stac_fastapi.api.app import StacApi -from stac_fastapi.api.models import create_get_request_model, create_post_request_model -from stac_fastapi.core.core import ( - BulkTransactionsClient, - CoreClient, - TransactionsClient, -) -from stac_fastapi.core.extensions import QueryExtension -from stac_fastapi.extensions.core import ( # FieldsExtension, - ContextExtension, - FieldsExtension, - FilterExtension, - SortExtension, - TokenPaginationExtension, - TransactionExtension, -) -from stac_fastapi.opensearch.config import AsyncOpensearchSettings, OpensearchSettings -from stac_fastapi.opensearch.database_logic import ( - DatabaseLogic, - create_collection_index, -) -from stac_fastapi.types.config import Settings - -DATA_DIR = os.path.join(os.path.dirname(__file__), "data") - - -class Context: - def __init__(self, item, collection): - self.item = item - self.collection = collection - - -class MockRequest: - base_url = "http://test-server" - query_params = {} - - def __init__( - self, - method: str = "GET", - url: str = "XXXX", - app: Optional[Any] = None, - query_params: Dict[str, Any] = {"limit": "10"}, - ): - self.method = method - self.url = url - self.app = app - self.query_params = query_params - - -class TestSettings(AsyncOpensearchSettings): - class Config: - env_file = ".env.test" - - -settings = TestSettings() -Settings.set(settings) - - -@pytest.fixture(scope="session") -def event_loop(): - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - yield loop - loop.close() - - -def _load_file(filename: str) -> Dict: - with open(os.path.join(DATA_DIR, filename)) as file: - return json.load(file) - - -_test_item_prototype = _load_file("test_item.json") -_test_collection_prototype = _load_file("test_collection.json") - - -@pytest.fixture -def load_test_data() -> Callable[[str], Dict]: - return _load_file - - -@pytest.fixture -def test_item() -> Dict: - return copy.deepcopy(_test_item_prototype) - - -@pytest.fixture -def test_collection() -> Dict: - return copy.deepcopy(_test_collection_prototype) - - -async def create_collection(txn_client: TransactionsClient, collection: Dict) -> None: - await txn_client.create_collection( - dict(collection), request=MockRequest, refresh=True - ) - - -async def create_item(txn_client: TransactionsClient, item: Dict) -> None: - if "collection" in item: - await txn_client.create_item( - collection_id=item["collection"], - item=item, - request=MockRequest, - refresh=True, - ) - else: - await txn_client.create_item( - collection_id=item["features"][0]["collection"], - item=item, - request=MockRequest, - refresh=True, - ) - - -async def delete_collections_and_items(txn_client: TransactionsClient) -> None: - await refresh_indices(txn_client) - await txn_client.database.delete_items() - await txn_client.database.delete_collections() - - -async def refresh_indices(txn_client: TransactionsClient) -> None: - try: - await txn_client.database.client.indices.refresh(index="_all") - except Exception: - pass - - -@pytest_asyncio.fixture() -async def ctx(txn_client: TransactionsClient, test_collection, test_item): - # todo remove one of these when all methods use it - await delete_collections_and_items(txn_client) - - await create_collection(txn_client, test_collection) - await create_item(txn_client, test_item) - - yield Context(item=test_item, collection=test_collection) - - await delete_collections_and_items(txn_client) - - -database = DatabaseLogic() -settings = OpensearchSettings() - - -@pytest.fixture -def core_client(): - return CoreClient(database=database, session=None) - - -@pytest.fixture -def txn_client(): - return TransactionsClient(database=database, session=None, settings=settings) - - -@pytest.fixture -def bulk_txn_client(): - return BulkTransactionsClient(database=database, session=None, settings=settings) - - -@pytest_asyncio.fixture(scope="session") -async def app(): - settings = AsyncOpensearchSettings() - extensions = [ - TransactionExtension( - client=TransactionsClient( - database=database, session=None, settings=settings - ), - settings=settings, - ), - ContextExtension(), - SortExtension(), - FieldsExtension(), - QueryExtension(), - TokenPaginationExtension(), - FilterExtension(), - ] - - post_request_model = create_post_request_model(extensions) - - return StacApi( - settings=settings, - client=CoreClient( - database=database, - session=None, - extensions=extensions, - post_request_model=post_request_model, - ), - extensions=extensions, - search_get_request_model=create_get_request_model(extensions), - search_post_request_model=post_request_model, - ).app - - -@pytest_asyncio.fixture(scope="session") -async def app_client(app): - await create_collection_index() - - async with AsyncClient(app=app, base_url="http://test-server") as c: - yield c diff --git a/stac_fastapi/opensearch/tests/data/test_collection.json b/stac_fastapi/opensearch/tests/data/test_collection.json deleted file mode 100644 index 391b906c..00000000 --- a/stac_fastapi/opensearch/tests/data/test_collection.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "id": "test-collection", - "stac_extensions": ["https://stac-extensions.github.io/eo/v1.0.0/schema.json"], - "type": "Collection", - "description": "Landat 8 imagery radiometrically calibrated and orthorectified using gound points and Digital Elevation Model (DEM) data to correct relief displacement.", - "stac_version": "1.0.0", - "license": "PDDL-1.0", - "summaries": { - "platform": ["landsat-8"], - "instruments": ["oli", "tirs"], - "gsd": [30] - }, - "extent": { - "spatial": { - "bbox": [ - [ - -180.0, - -90.0, - 180.0, - 90.0 - ] - ] - }, - "temporal": { - "interval": [ - [ - "2013-06-01", - null - ] - ] - } - }, - "links": [ - { - "href": "http://localhost:8081/collections/landsat-8-l1", - "rel": "self", - "type": "application/json" - }, - { - "href": "http://localhost:8081/", - "rel": "parent", - "type": "application/json" - }, - { - "href": "http://localhost:8081/collections/landsat-8-l1/items", - "rel": "item", - "type": "application/geo+json" - }, - { - "href": "http://localhost:8081/", - "rel": "root", - "type": "application/json" - } - ], - "title": "Landsat 8 L1", - "keywords": [ - "landsat", - "earth observation", - "usgs" - ], - "providers": [ - { - "name": "USGS", - "roles": [ - "producer" - ], - "url": "https://landsat.usgs.gov/" - }, - { - "name": "Planet Labs", - "roles": [ - "processor" - ], - "url": "https://github.com/landsat-pds/landsat_ingestor" - }, - { - "name": "AWS", - "roles": [ - "host" - ], - "url": "https://landsatonaws.com/" - }, - { - "name": "Development Seed", - "roles": [ - "processor" - ], - "url": "https://github.com/sat-utils/sat-api" - }, - { - "name": "Earth Search by Element84", - "description": "API of Earth on AWS datasets", - "roles": [ - "host" - ], - "url": "https://element84.com" - } - ] -} \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/data/test_item.json b/stac_fastapi/opensearch/tests/data/test_item.json deleted file mode 100644 index 2b7fdd86..00000000 --- a/stac_fastapi/opensearch/tests/data/test_item.json +++ /dev/null @@ -1,505 +0,0 @@ -{ - "type": "Feature", - "id": "test-item", - "stac_version": "1.0.0", - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "geometry": { - "coordinates": [ - [ - [ - 152.15052873427666, - -33.82243006904891 - ], - [ - 150.1000346138806, - -34.257132625788756 - ], - [ - 149.5776607193635, - -32.514709769700254 - ], - [ - 151.6262528041627, - -32.08081674221862 - ], - [ - 152.15052873427666, - -33.82243006904891 - ] - ] - ], - "type": "Polygon" - }, - "properties": { - "datetime": "2020-02-12T12:30:22Z", - "landsat:scene_id": "LC82081612020043LGN00", - "landsat:row": "161", - "gsd": 15, - "eo:bands": [ - { - "gsd": 30, - "name": "B1", - "common_name": "coastal", - "center_wavelength": 0.44, - "full_width_half_max": 0.02 - }, - { - "gsd": 30, - "name": "B2", - "common_name": "blue", - "center_wavelength": 0.48, - "full_width_half_max": 0.06 - }, - { - "gsd": 30, - "name": "B3", - "common_name": "green", - "center_wavelength": 0.56, - "full_width_half_max": 0.06 - }, - { - "gsd": 30, - "name": "B4", - "common_name": "red", - "center_wavelength": 0.65, - "full_width_half_max": 0.04 - }, - { - "gsd": 30, - "name": "B5", - "common_name": "nir", - "center_wavelength": 0.86, - "full_width_half_max": 0.03 - }, - { - "gsd": 30, - "name": "B6", - "common_name": "swir16", - "center_wavelength": 1.6, - "full_width_half_max": 0.08 - }, - { - "gsd": 30, - "name": "B7", - "common_name": "swir22", - "center_wavelength": 2.2, - "full_width_half_max": 0.2 - }, - { - "gsd": 15, - "name": "B8", - "common_name": "pan", - "center_wavelength": 0.59, - "full_width_half_max": 0.18 - }, - { - "gsd": 30, - "name": "B9", - "common_name": "cirrus", - "center_wavelength": 1.37, - "full_width_half_max": 0.02 - }, - { - "gsd": 100, - "name": "B10", - "common_name": "lwir11", - "center_wavelength": 10.9, - "full_width_half_max": 0.8 - }, - { - "gsd": 100, - "name": "B11", - "common_name": "lwir12", - "center_wavelength": 12, - "full_width_half_max": 1 - } - ], - "landsat:revision": "00", - "view:sun_azimuth": -148.83296771, - "instrument": "OLI_TIRS", - "landsat:product_id": "LC08_L1GT_208161_20200212_20200212_01_RT", - "eo:cloud_cover": 0, - "landsat:tier": "RT", - "landsat:processing_level": "L1GT", - "landsat:column": "208", - "platform": "landsat-8", - "proj:epsg": 32756, - "view:sun_elevation": -37.30791534, - "view:off_nadir": 0, - "height": 2500, - "width": 2500 - }, - "bbox": [ - 149.57574, - -34.25796, - 152.15194, - -32.07915 - ], - "collection": "test-collection", - "assets": { - "ANG": { - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ANG.txt", - "type": "text/plain", - "title": "Angle Coefficients File", - "description": "Collection 2 Level-1 Angle Coefficients File (ANG)" - }, - "SR_B1": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B1.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Coastal/Aerosol Band (B1)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B1", - "common_name": "coastal", - "center_wavelength": 0.44, - "full_width_half_max": 0.02 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Coastal/Aerosol Band (B1) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B2": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B2.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Blue Band (B2)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B2", - "common_name": "blue", - "center_wavelength": 0.48, - "full_width_half_max": 0.06 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Blue Band (B2) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B3": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B3.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Green Band (B3)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B3", - "common_name": "green", - "center_wavelength": 0.56, - "full_width_half_max": 0.06 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Green Band (B3) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B4": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B4.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Red Band (B4)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B4", - "common_name": "red", - "center_wavelength": 0.65, - "full_width_half_max": 0.04 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Red Band (B4) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B5": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B5.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Near Infrared Band 0.8 (B5)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B5", - "common_name": "nir08", - "center_wavelength": 0.86, - "full_width_half_max": 0.03 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Near Infrared Band 0.8 (B5) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B6": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B6.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Short-wave Infrared Band 1.6 (B6)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B6", - "common_name": "swir16", - "center_wavelength": 1.6, - "full_width_half_max": 0.08 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Short-wave Infrared Band 1.6 (B6) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B7": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B7.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Short-wave Infrared Band 2.2 (B7)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B7", - "common_name": "swir22", - "center_wavelength": 2.2, - "full_width_half_max": 0.2 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Short-wave Infrared Band 2.2 (B7) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_QA": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_QA.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Surface Temperature Quality Assessment Band", - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Surface Temperature Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_B10": { - "gsd": 100, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_B10.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Surface Temperature Band (B10)", - "eo:bands": [ - { - "gsd": 100, - "name": "ST_B10", - "common_name": "lwir11", - "center_wavelength": 10.9, - "full_width_half_max": 0.8 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Surface Temperature Band (B10) Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "MTL.txt": { - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.txt", - "type": "text/plain", - "title": "Product Metadata File", - "description": "Collection 2 Level-1 Product Metadata File (MTL)" - }, - "MTL.xml": { - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.xml", - "type": "application/xml", - "title": "Product Metadata File (xml)", - "description": "Collection 2 Level-1 Product Metadata File (xml)" - }, - "ST_DRAD": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_DRAD.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Downwelled Radiance Band", - "eo:bands": [ - { - "gsd": 30, - "name": "ST_DRAD", - "description": "downwelled radiance" - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Downwelled Radiance Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_EMIS": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMIS.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Emissivity Band", - "eo:bands": [ - { - "gsd": 30, - "name": "ST_EMIS", - "description": "emissivity" - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Emissivity Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_EMSD": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMSD.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Emissivity Standard Deviation Band", - "eo:bands": [ - { - "gsd": 30, - "name": "ST_EMSD", - "description": "emissivity standard deviation" - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Emissivity Standard Deviation Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - } - }, - "links": [ - { - "href": "http://localhost:8081/collections/landsat-8-l1/items/LC82081612020043", - "rel": "self", - "type": "application/geo+json" - }, - { - "href": "http://localhost:8081/collections/landsat-8-l1", - "rel": "parent", - "type": "application/json" - }, - { - "href": "http://localhost:8081/collections/landsat-8-l1", - "rel": "collection", - "type": "application/json" - }, - { - "href": "http://localhost:8081/", - "rel": "root", - "type": "application/json" - } - ] -} \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example01.json b/stac_fastapi/opensearch/tests/extensions/cql2/example01.json deleted file mode 100644 index a70bd0d3..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example01.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "op": "=", - "args": [ - { - "property": "scene_id" - }, - "LC82030282019133LGN00" - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example04.json b/stac_fastapi/opensearch/tests/extensions/cql2/example04.json deleted file mode 100644 index e087504c..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example04.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": "<", - "args": [ - { - "property": "cloud_cover" - }, - 0.1 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_row" - }, - 28 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_path" - }, - 203 - ] - } - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example05a.json b/stac_fastapi/opensearch/tests/extensions/cql2/example05a.json deleted file mode 100644 index b5bd7a94..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example05a.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "op": "or", - "args": [ - { - "op": "=", - "args": [ - { - "property": "cloud_cover" - }, - 0.1 - ] - }, - { - "op": "=", - "args": [ - { - "property": "cloud_cover" - }, - 0.2 - ] - } - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example06b.json b/stac_fastapi/opensearch/tests/extensions/cql2/example06b.json deleted file mode 100644 index fc2a7e56..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example06b.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": ">=", - "args": [ - { - "property": "cloud_cover" - }, - 0.1 - ] - }, - { - "op": "<=", - "args": [ - { - "property": "cloud_cover" - }, - 0.2 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_row" - }, - 28 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_path" - }, - 203 - ] - } - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example08.json b/stac_fastapi/opensearch/tests/extensions/cql2/example08.json deleted file mode 100644 index 2f06413f..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example08.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": "=", - "args": [ - { - "property": "beamMode" - }, - "ScanSAR Narrow" - ] - }, - { - "op": "=", - "args": [ - { - "property": "swathDirection" - }, - "ascending" - ] - }, - { - "op": "=", - "args": [ - { - "property": "polarization" - }, - "HH+VV+HV+VH" - ] - }, - { - "op": "s_intersects", - "args": [ - { - "property": "footprint" - }, - { - "type": "Polygon", - "coordinates": [ - [ - [ - -77.117938, - 38.93686 - ], - [ - -77.040604, - 39.995648 - ], - [ - -76.910536, - 38.892912 - ], - [ - -77.039359, - 38.791753 - ], - [ - -77.047906, - 38.841462 - ], - [ - -77.034183, - 38.840655 - ], - [ - -77.033142, - 38.85749 - ], - [ - -77.117938, - 38.93686 - ] - ] - ] - } - ] - } - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example09.json b/stac_fastapi/opensearch/tests/extensions/cql2/example09.json deleted file mode 100644 index 9f562fb4..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example09.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "op": ">", - "args": [ - { - "property": "floors" - }, - 5 - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example1.json b/stac_fastapi/opensearch/tests/extensions/cql2/example1.json deleted file mode 100644 index 48483548..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example1.json +++ /dev/null @@ -1,39 +0,0 @@ - { - "op": "and", - "args": [ - { - "op": "=", - "args": [ - {"property": "id"}, - "LC08_L1TP_060247_20180905_20180912_01_T1_L1TP" - ] - }, - {"op": "=", "args": [{"property": "collection"}, "landsat8_l1tp"]}, - { - "op": ">", - "args": [ - {"property": "properties.datetime"}, - {"timestamp": "2022-04-29T00:00:00Z"} - ] - }, - {"op": "<", "args": [{"property": "properties.eo:cloud_cover"}, 10]}, - { - "op": "s_intersects", - "args": [ - {"property": "geometry"}, - { - "type": "Polygon", - "coordinates": [ - [ - [36.319836, 32.288087], - [36.320041, 32.288032], - [36.320210, 32.288402], - [36.320008, 32.288458], - [36.319836, 32.288087] - ] - ] - } - ] - } - ] -} \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example10.json b/stac_fastapi/opensearch/tests/extensions/cql2/example10.json deleted file mode 100644 index 870303ea..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example10.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "op": "<=", - "args": [ - { - "property": "taxes" - }, - 500 - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example14.json b/stac_fastapi/opensearch/tests/extensions/cql2/example14.json deleted file mode 100644 index fad45d48..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example14.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "op": "=", - "args": [ - { - "property": "swimming_pool" - }, - true - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example15.json b/stac_fastapi/opensearch/tests/extensions/cql2/example15.json deleted file mode 100644 index 98f96797..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example15.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": ">", - "args": [ - { - "property": "floor" - }, - 5 - ] - }, - { - "op": "=", - "args": [ - { - "property": "swimming_pool" - }, - true - ] - } - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example17.json b/stac_fastapi/opensearch/tests/extensions/cql2/example17.json deleted file mode 100644 index 9b215273..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example17.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "op": "or", - "args": [ - { - "op": "and", - "args": [ - { - "op": ">", - "args": [ - { - "property": "floors" - }, - 5 - ] - }, - { - "op": "=", - "args": [ - { - "property": "material" - }, - "brick" - ] - } - ] - }, - { - "op": "=", - "args": [ - { - "property": "swimming_pool" - }, - true - ] - } - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example18.json b/stac_fastapi/opensearch/tests/extensions/cql2/example18.json deleted file mode 100644 index 7087a151..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example18.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "op": "or", - "args": [ - { - "op": "not", - "args": [ - { - "op": "<", - "args": [ - { - "property": "floors" - }, - 5 - ] - } - ] - }, - { - "op": "=", - "args": [ - { - "property": "swimming_pool" - }, - true - ] - } - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example19.json b/stac_fastapi/opensearch/tests/extensions/cql2/example19.json deleted file mode 100755 index 0e4306fb..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example19.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "op": "like", - "args": [ - { - "property": "scene_id" - }, - "LC82030282019133%" - ] -} diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example20.json b/stac_fastapi/opensearch/tests/extensions/cql2/example20.json deleted file mode 100755 index f7412fc0..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example20.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "op": "like", - "args": [ - { - "property": "scene_id" - }, - "LC82030282019133LGN0_" - ] - } - \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example21.json b/stac_fastapi/opensearch/tests/extensions/cql2/example21.json deleted file mode 100644 index 175b8732..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example21.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": "between", - "args": [ - { - "property": "cloud_cover" - }, - 0.1, - 0.2 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_row" - }, - 28 - ] - }, - { - "op": "=", - "args": [ - { - "property": "landsat:wrs_path" - }, - 203 - ] - } - ] -} \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/extensions/cql2/example22.json b/stac_fastapi/opensearch/tests/extensions/cql2/example22.json deleted file mode 100644 index 880c90c3..00000000 --- a/stac_fastapi/opensearch/tests/extensions/cql2/example22.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "op": "and", - "args": [ - { - "op": "in", - "args": [ - {"property": "id"}, - ["LC08_L1TP_060247_20180905_20180912_01_T1_L1TP"] - ] - }, - {"op": "=", "args": [{"property": "collection"}, "landsat8_l1tp"]} - ] -} \ No newline at end of file diff --git a/stac_fastapi/opensearch/tests/extensions/test_filter.py b/stac_fastapi/opensearch/tests/extensions/test_filter.py deleted file mode 100644 index 72cea59f..00000000 --- a/stac_fastapi/opensearch/tests/extensions/test_filter.py +++ /dev/null @@ -1,402 +0,0 @@ -import json -import os -from os import listdir -from os.path import isfile, join - -import pytest - -THIS_DIR = os.path.dirname(os.path.abspath(__file__)) - - -@pytest.mark.asyncio -async def test_search_filters_post(app_client, ctx): - - filters = [] - pwd = f"{THIS_DIR}/cql2" - for fn in [fn for f in listdir(pwd) if isfile(fn := join(pwd, f))]: - with open(fn) as f: - filters.append(json.loads(f.read())) - - for _filter in filters: - resp = await app_client.post("/search", json={"filter": _filter}) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_search_filter_extension_eq_get(app_client, ctx): - resp = await app_client.get( - '/search?filter-lang=cql2-json&filter={"op":"=","args":[{"property":"id"},"test-item"]}' - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_eq_post(app_client, ctx): - params = {"filter": {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_gte_get(app_client, ctx): - # there's one item that can match, so one of these queries should match it and the other shouldn't - resp = await app_client.get( - '/search?filter-lang=cql2-json&filter={"op":"<=","args":[{"property": "properties.proj:epsg"},32756]}' - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - resp = await app_client.get( - '/search?filter-lang=cql2-json&filter={"op":">","args":[{"property": "properties.proj:epsg"},32756]}' - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 0 - - -@pytest.mark.asyncio -async def test_search_filter_extension_gte_post(app_client, ctx): - # there's one item that can match, so one of these queries should match it and the other shouldn't - params = { - "filter": { - "op": "<=", - "args": [ - {"property": "properties.proj:epsg"}, - ctx.item["properties"]["proj:epsg"], - ], - } - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - params = { - "filter": { - "op": ">", - "args": [ - {"property": "properties.proj:epsg"}, - ctx.item["properties"]["proj:epsg"], - ], - } - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 0 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_get(app_client, ctx): - resp = await app_client.get( - '/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"<=","args":[{"property":"properties.proj:epsg"},32756]},{"op":"=","args":[{"property":"id"},"test-item"]}]}' - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_get_id(app_client, ctx): - collection = ctx.item["collection"] - id = ctx.item["id"] - filter = f"id='{id}' AND collection='{collection}'" - resp = await app_client.get(f"/search?&filter={filter}") - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_get_cql2text_id(app_client, ctx): - collection = ctx.item["collection"] - id = ctx.item["id"] - filter = f"id='{id}' AND collection='{collection}'" - resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_get_cql2text_cloud_cover(app_client, ctx): - collection = ctx.item["collection"] - cloud_cover = ctx.item["properties"]["eo:cloud_cover"] - filter = f"cloud_cover={cloud_cover} AND collection='{collection}'" - resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_get_cql2text_cloud_cover_no_results( - app_client, ctx -): - collection = ctx.item["collection"] - cloud_cover = ctx.item["properties"]["eo:cloud_cover"] + 1 - filter = f"cloud_cover={cloud_cover} AND collection='{collection}'" - resp = await app_client.get(f"/search?filter-lang=cql2-text&filter={filter}") - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 0 - - -@pytest.mark.asyncio -async def test_search_filter_ext_and_post(app_client, ctx): - params = { - "filter": { - "op": "and", - "args": [ - { - "op": "<=", - "args": [ - {"property": "properties.proj:epsg"}, - ctx.item["properties"]["proj:epsg"], - ], - }, - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - ], - } - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_floats_get(app_client, ctx): - resp = await app_client.get( - """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30891534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30691534"]}]}""" - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - resp = await app_client.get( - """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item-7"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30891534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30691534"]}]}""" - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 0 - - resp = await app_client.get( - """/search?filter-lang=cql2-json&filter={"op":"and","args":[{"op":"=","args":[{"property":"id"},"test-item"]},{"op":">","args":[{"property":"properties.view:sun_elevation"},"-37.30591534"]},{"op":"<","args":[{"property":"properties.view:sun_elevation"},"-37.30491534"]}]}""" - ) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 0 - - -@pytest.mark.asyncio -async def test_search_filter_extension_floats_post(app_client, ctx): - sun_elevation = ctx.item["properties"]["view:sun_elevation"] - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": ">", - "args": [ - {"property": "properties.view:sun_elevation"}, - sun_elevation - 0.01, - ], - }, - { - "op": "<", - "args": [ - {"property": "properties.view:sun_elevation"}, - sun_elevation + 0.01, - ], - }, - ], - } - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_wildcard_cql2(app_client, ctx): - single_char = ctx.item["id"][:-1] + "_" - multi_char = ctx.item["id"][:-3] + "%" - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "like", - "args": [ - {"property": "id"}, - single_char, - ], - }, - { - "op": "like", - "args": [ - {"property": "id"}, - multi_char, - ], - }, - ], - } - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_wildcard_es(app_client, ctx): - single_char = ctx.item["id"][:-1] + "?" - multi_char = ctx.item["id"][:-3] + "*" - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "like", - "args": [ - {"property": "id"}, - single_char, - ], - }, - { - "op": "like", - "args": [ - {"property": "id"}, - multi_char, - ], - }, - ], - } - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_escape_chars(app_client, ctx): - esc_chars = ( - ctx.item["properties"]["landsat:product_id"].replace("_", "\\_")[:-1] + "_" - ) - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "like", - "args": [ - {"property": "properties.landsat:product_id"}, - esc_chars, - ], - }, - ], - } - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_in(app_client, ctx): - product_id = ctx.item["properties"]["landsat:product_id"] - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "in", - "args": [ - {"property": "properties.landsat:product_id"}, - [product_id], - ], - }, - ], - } - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 - - -@pytest.mark.asyncio -async def test_search_filter_extension_in_no_list(app_client, ctx): - product_id = ctx.item["properties"]["landsat:product_id"] - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "in", - "args": [ - {"property": "properties.landsat:product_id"}, - product_id, - ], - }, - ], - } - } - - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 400 - assert resp.json() == { - "detail": f"Error with cql2_json filter: Arg {product_id} is not a list" - } - - -@pytest.mark.asyncio -async def test_search_filter_extension_between(app_client, ctx): - sun_elevation = ctx.item["properties"]["view:sun_elevation"] - - params = { - "filter": { - "op": "and", - "args": [ - {"op": "=", "args": [{"property": "id"}, ctx.item["id"]]}, - { - "op": "between", - "args": [ - {"property": "properties.view:sun_elevation"}, - sun_elevation - 0.01, - sun_elevation + 0.01, - ], - }, - ], - } - } - resp = await app_client.post("/search", json=params) - - assert resp.status_code == 200 - assert len(resp.json()["features"]) == 1 diff --git a/stac_fastapi/opensearch/tests/resources/__init__.py b/stac_fastapi/opensearch/tests/resources/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/opensearch/tests/resources/test_collection.py b/stac_fastapi/opensearch/tests/resources/test_collection.py deleted file mode 100644 index 9061ac1e..00000000 --- a/stac_fastapi/opensearch/tests/resources/test_collection.py +++ /dev/null @@ -1,123 +0,0 @@ -import uuid - -import pystac -import pytest - -from ..conftest import create_collection, delete_collections_and_items, refresh_indices - - -@pytest.mark.asyncio -async def test_create_and_delete_collection(app_client, load_test_data): - """Test creation and deletion of a collection""" - test_collection = load_test_data("test_collection.json") - test_collection["id"] = "test" - - resp = await app_client.post("/collections", json=test_collection) - assert resp.status_code == 200 - - resp = await app_client.delete(f"/collections/{test_collection['id']}") - assert resp.status_code == 204 - - -@pytest.mark.asyncio -async def test_create_collection_conflict(app_client, ctx): - """Test creation of a collection which already exists""" - # This collection ID is created in the fixture, so this should be a conflict - resp = await app_client.post("/collections", json=ctx.collection) - assert resp.status_code == 409 - - -@pytest.mark.asyncio -async def test_delete_missing_collection(app_client): - """Test deletion of a collection which does not exist""" - resp = await app_client.delete("/collections/missing-collection") - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_update_collection_already_exists(ctx, app_client): - """Test updating a collection which already exists""" - ctx.collection["keywords"].append("test") - resp = await app_client.put("/collections", json=ctx.collection) - assert resp.status_code == 200 - - resp = await app_client.get(f"/collections/{ctx.collection['id']}") - assert resp.status_code == 200 - resp_json = resp.json() - assert "test" in resp_json["keywords"] - - -@pytest.mark.asyncio -async def test_update_new_collection(app_client, load_test_data): - """Test updating a collection which does not exist (same as creation)""" - test_collection = load_test_data("test_collection.json") - test_collection["id"] = "new-test-collection" - - resp = await app_client.put("/collections", json=test_collection) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_collection_not_found(app_client): - """Test read a collection which does not exist""" - resp = await app_client.get("/collections/does-not-exist") - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_returns_valid_collection(ctx, app_client): - """Test validates fetched collection with jsonschema""" - resp = await app_client.put("/collections", json=ctx.collection) - assert resp.status_code == 200 - - resp = await app_client.get(f"/collections/{ctx.collection['id']}") - assert resp.status_code == 200 - resp_json = resp.json() - - # Mock root to allow validation - mock_root = pystac.Catalog( - id="test", description="test desc", href="https://example.com" - ) - collection = pystac.Collection.from_dict( - resp_json, root=mock_root, preserve_dict=False - ) - collection.validate() - - -@pytest.mark.asyncio -async def test_pagination_collection(app_client, ctx, txn_client): - """Test collection pagination links""" - - # Clear existing collections if necessary - await delete_collections_and_items(txn_client) - - # Ingest 6 collections - ids = set() - for _ in range(6): - ctx.collection["id"] = str(uuid.uuid4()) - await create_collection(txn_client, collection=ctx.collection) - ids.add(ctx.collection["id"]) - - await refresh_indices(txn_client) - - # Paginate through all 6 collections with a limit of 1 - collection_ids = set() - page = await app_client.get("/collections", params={"limit": 1}) - while True: - page_data = page.json() - assert ( - len(page_data["collections"]) <= 1 - ) # Each page should have 1 or 0 collections - collection_ids.update(coll["id"] for coll in page_data["collections"]) - - next_link = next( - (link for link in page_data["links"] if link["rel"] == "next"), None - ) - if not next_link: - break # No more pages - - href = next_link["href"][len("http://test-server") :] - page = await app_client.get(href) - - # Confirm we have paginated through all collections - assert collection_ids == ids diff --git a/stac_fastapi/opensearch/tests/resources/test_conformance.py b/stac_fastapi/opensearch/tests/resources/test_conformance.py deleted file mode 100644 index d93d8b81..00000000 --- a/stac_fastapi/opensearch/tests/resources/test_conformance.py +++ /dev/null @@ -1,72 +0,0 @@ -import urllib.parse - -import pytest -import pytest_asyncio - - -@pytest_asyncio.fixture -async def response(app_client): - return await app_client.get("/") - - -@pytest.fixture -def response_json(response): - return response.json() - - -def get_link(landing_page, rel_type): - return next( - filter(lambda link: link["rel"] == rel_type, landing_page["links"]), None - ) - - -@pytest.mark.asyncio -async def test_landing_page_health(response): - """Test landing page""" - assert response.status_code == 200 - assert response.headers["content-type"] == "application/json" - - -# Parameters for test_landing_page_links test below. -# Each tuple has the following values (in this order): -# - Rel type of link to test -# - Expected MIME/Media Type -# - Expected relative path -link_tests = [ - ("root", "application/json", "/"), - ("conformance", "application/json", "/conformance"), - ("service-doc", "text/html", "/api.html"), - ("service-desc", "application/vnd.oai.openapi+json;version=3.0", "/api"), -] - - -@pytest.mark.asyncio -@pytest.mark.parametrize("rel_type,expected_media_type,expected_path", link_tests) -async def test_landing_page_links( - response_json, app_client, rel_type, expected_media_type, expected_path -): - link = get_link(response_json, rel_type) - - assert link is not None, f"Missing {rel_type} link in landing page" - assert link.get("type") == expected_media_type - - link_path = urllib.parse.urlsplit(link.get("href")).path - assert link_path == expected_path - - resp = await app_client.get(link_path) - assert resp.status_code == 200 - - -# This endpoint currently returns a 404 for empty result sets, but testing for this response -# code here seems meaningless since it would be the same as if the endpoint did not exist. Once -# https://github.com/stac-utils/stac-fastapi/pull/227 has been merged we can add this to the -# parameterized tests above. -@pytest.mark.asyncio -async def test_search_link(response_json): - search_link = get_link(response_json, "search") - - assert search_link is not None - assert search_link.get("type") == "application/geo+json" - - search_path = urllib.parse.urlsplit(search_link.get("href")).path - assert search_path == "/search" diff --git a/stac_fastapi/opensearch/tests/resources/test_item.py b/stac_fastapi/opensearch/tests/resources/test_item.py deleted file mode 100644 index ab4bb123..00000000 --- a/stac_fastapi/opensearch/tests/resources/test_item.py +++ /dev/null @@ -1,804 +0,0 @@ -import json -import os -import uuid -from copy import deepcopy -from datetime import datetime, timedelta -from random import randint -from urllib.parse import parse_qs, urlparse, urlsplit - -import ciso8601 -import pystac -import pytest -from geojson_pydantic.geometries import Polygon -from pystac.utils import datetime_to_str - -from stac_fastapi.core.core import CoreClient -from stac_fastapi.core.datetime_utils import now_to_rfc3339_str -from stac_fastapi.opensearch.database_logic import DatabaseLogic -from stac_fastapi.types.core import LandingPageMixin - -from ..conftest import create_item, refresh_indices - - -def rfc3339_str_to_datetime(s: str) -> datetime: - return ciso8601.parse_rfc3339(s) - - -database_logic = DatabaseLogic() - - -@pytest.mark.asyncio -async def test_create_and_delete_item(app_client, ctx, txn_client): - """Test creation and deletion of a single item (transactions extension)""" - - test_item = ctx.item - - resp = await app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert resp.status_code == 200 - - resp = await app_client.delete( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert resp.status_code == 204 - - await refresh_indices(txn_client) - - resp = await app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_create_item_conflict(app_client, ctx): - """Test creation of an item which already exists (transactions extension)""" - - test_item = ctx.item - - resp = await app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 409 - - -@pytest.mark.asyncio -async def test_delete_missing_item(app_client, load_test_data): - """Test deletion of an item which does not exist (transactions extension)""" - test_item = load_test_data("test_item.json") - resp = await app_client.delete( - f"/collections/{test_item['collection']}/items/hijosh" - ) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_create_item_missing_collection(app_client, ctx): - """Test creation of an item without a parent collection (transactions extension)""" - ctx.item["collection"] = "stac_is_cool" - resp = await app_client.post( - f"/collections/{ctx.item['collection']}/items", json=ctx.item - ) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_create_uppercase_collection_with_item(app_client, ctx, txn_client): - """Test creation of a collection and item with uppercase collection ID (transactions extension)""" - collection_id = "UPPERCASE" - ctx.item["collection"] = collection_id - ctx.collection["id"] = collection_id - resp = await app_client.post("/collections", json=ctx.collection) - assert resp.status_code == 200 - await refresh_indices(txn_client) - resp = await app_client.post(f"/collections/{collection_id}/items", json=ctx.item) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_update_item_already_exists(app_client, ctx): - """Test updating an item which already exists (transactions extension)""" - - assert ctx.item["properties"]["gsd"] != 16 - ctx.item["properties"]["gsd"] = 16 - await app_client.put( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item - ) - resp = await app_client.get( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" - ) - updated_item = resp.json() - assert updated_item["properties"]["gsd"] == 16 - - await app_client.delete( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" - ) - - -@pytest.mark.asyncio -async def test_update_new_item(app_client, ctx): - """Test updating an item which does not exist (transactions extension)""" - test_item = ctx.item - test_item["id"] = "a" - - resp = await app_client.put( - f"/collections/{test_item['collection']}/items/{test_item['id']}", - json=test_item, - ) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_update_item_missing_collection(app_client, ctx): - """Test updating an item without a parent collection (transactions extension)""" - # Try to update collection of the item - ctx.item["collection"] = "stac_is_cool" - resp = await app_client.put( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item - ) - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_update_item_geometry(app_client, ctx): - ctx.item["id"] = "update_test_item_1" - - # Create the item - resp = await app_client.post( - f"/collections/{ctx.item['collection']}/items", json=ctx.item - ) - assert resp.status_code == 200 - - new_coordinates = [ - [ - [142.15052873427666, -33.82243006904891], - [140.1000346138806, -34.257132625788756], - [139.5776607193635, -32.514709769700254], - [141.6262528041627, -32.08081674221862], - [142.15052873427666, -33.82243006904891], - ] - ] - - # Update the geometry of the item - ctx.item["geometry"]["coordinates"] = new_coordinates - resp = await app_client.put( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", json=ctx.item - ) - assert resp.status_code == 200 - - # Fetch the updated item - resp = await app_client.get( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" - ) - assert resp.status_code == 200 - assert resp.json()["geometry"]["coordinates"] == new_coordinates - - -@pytest.mark.asyncio -async def test_get_item(app_client, ctx): - """Test read an item by id (core)""" - get_item = await app_client.get( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" - ) - assert get_item.status_code == 200 - - -@pytest.mark.asyncio -async def test_returns_valid_item(app_client, ctx): - """Test validates fetched item with jsonschema""" - test_item = ctx.item - get_item = await app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert get_item.status_code == 200 - item_dict = get_item.json() - # Mock root to allow validation - mock_root = pystac.Catalog( - id="test", description="test desc", href="https://example.com" - ) - item = pystac.Item.from_dict(item_dict, preserve_dict=False, root=mock_root) - item.validate() - - -@pytest.mark.asyncio -async def test_get_item_collection(app_client, ctx, txn_client): - """Test read an item collection (core)""" - item_count = randint(1, 4) - - for idx in range(item_count): - ctx.item["id"] = f'{ctx.item["id"]}{idx}' - await create_item(txn_client, ctx.item) - - resp = await app_client.get(f"/collections/{ctx.item['collection']}/items") - assert resp.status_code == 200 - - item_collection = resp.json() - if matched := item_collection["context"].get("matched"): - assert matched == item_count + 1 - - -@pytest.mark.asyncio -async def test_item_collection_filter_bbox(app_client, ctx): - item = ctx.item - collection = item["collection"] - - bbox = "100,-50,170,-20" - resp = await app_client.get( - f"/collections/{collection}/items", params={"bbox": bbox} - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - bbox = "1,2,3,4" - resp = await app_client.get( - f"/collections/{collection}/items", params={"bbox": bbox} - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -async def test_item_collection_filter_datetime(app_client, ctx): - item = ctx.item - collection = item["collection"] - - datetime_range = "2020-01-01T00:00:00.00Z/.." - resp = await app_client.get( - f"/collections/{collection}/items", params={"datetime": datetime_range} - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - datetime_range = "2018-01-01T00:00:00.00Z/2019-01-01T00:00:00.00Z" - resp = await app_client.get( - f"/collections/{collection}/items", params={"datetime": datetime_range} - ) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -@pytest.mark.skip(reason="Pagination extension not implemented") -async def test_pagination(app_client, load_test_data): - """Test item collection pagination (paging extension)""" - item_count = 10 - test_item = load_test_data("test_item.json") - - for idx in range(item_count): - _test_item = deepcopy(test_item) - _test_item["id"] = test_item["id"] + str(idx) - resp = await app_client.post( - f"/collections/{test_item['collection']}/items", json=_test_item - ) - assert resp.status_code == 200 - - resp = await app_client.get( - f"/collections/{test_item['collection']}/items", params={"limit": 3} - ) - assert resp.status_code == 200 - first_page = resp.json() - assert first_page["context"]["returned"] == 3 - - url_components = urlsplit(first_page["links"][0]["href"]) - resp = await app_client.get(f"{url_components.path}?{url_components.query}") - assert resp.status_code == 200 - second_page = resp.json() - assert second_page["context"]["returned"] == 3 - - -@pytest.mark.asyncio -async def test_item_timestamps(app_client, ctx): - """Test created and updated timestamps (common metadata)""" - # start_time = now_to_rfc3339_str() - - created_dt = ctx.item["properties"]["created"] - - # todo, check lower bound - # assert start_time < created_dt < now_to_rfc3339_str() - assert created_dt < now_to_rfc3339_str() - - # Confirm `updated` timestamp - ctx.item["properties"]["proj:epsg"] = 4326 - resp = await app_client.put( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}", - json=dict(ctx.item), - ) - assert resp.status_code == 200 - updated_item = resp.json() - - # Created shouldn't change on update - assert ctx.item["properties"]["created"] == updated_item["properties"]["created"] - assert updated_item["properties"]["updated"] > created_dt - - await app_client.delete( - f"/collections/{ctx.item['collection']}/items/{ctx.item['id']}" - ) - - -@pytest.mark.asyncio -async def test_item_search_by_id_post(app_client, ctx, txn_client): - """Test POST search by item id (core)""" - ids = ["test1", "test2", "test3"] - for _id in ids: - ctx.item["id"] = _id - await create_item(txn_client, ctx.item) - - params = {"collections": [ctx.item["collection"]], "ids": ids} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == len(ids) - assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) - - -@pytest.mark.asyncio -async def test_item_search_spatial_query_post(app_client, ctx): - """Test POST search with spatial query (core)""" - test_item = ctx.item - - params = { - "collections": [test_item["collection"]], - "intersects": test_item["geometry"], - } - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -@pytest.mark.asyncio -async def test_item_search_temporal_query_post(app_client, ctx): - """Test POST search with single-tailed spatio-temporal query (core)""" - - test_item = ctx.item - - item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) - item_date = item_date + timedelta(seconds=1) - - params = { - "collections": [test_item["collection"]], - "intersects": test_item["geometry"], - "datetime": f"../{datetime_to_str(item_date)}", - } - resp = await app_client.post("/search", json=params) - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -@pytest.mark.asyncio -async def test_item_search_temporal_window_post(app_client, ctx): - """Test POST search with two-tailed spatio-temporal query (core)""" - test_item = ctx.item - - item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) - item_date_before = item_date - timedelta(seconds=1) - item_date_after = item_date + timedelta(seconds=1) - - params = { - "collections": [test_item["collection"]], - "intersects": test_item["geometry"], - "datetime": f"{datetime_to_str(item_date_before)}/{datetime_to_str(item_date_after)}", - } - resp = await app_client.post("/search", json=params) - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -@pytest.mark.asyncio -@pytest.mark.skip(reason="KeyError: 'features") -async def test_item_search_temporal_open_window(app_client, ctx): - """Test POST search with open spatio-temporal query (core)""" - test_item = ctx.item - params = { - "collections": [test_item["collection"]], - "intersects": test_item["geometry"], - "datetime": "../..", - } - resp = await app_client.post("/search", json=params) - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -@pytest.mark.asyncio -async def test_item_search_by_id_get(app_client, ctx, txn_client): - """Test GET search by item id (core)""" - ids = ["test1", "test2", "test3"] - for _id in ids: - ctx.item["id"] = _id - await create_item(txn_client, ctx.item) - - params = {"collections": ctx.item["collection"], "ids": ",".join(ids)} - resp = await app_client.get("/search", params=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == len(ids) - assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) - - -@pytest.mark.asyncio -async def test_item_search_bbox_get(app_client, ctx): - """Test GET search with spatial query (core)""" - params = { - "collections": ctx.item["collection"], - "bbox": ",".join([str(coord) for coord in ctx.item["bbox"]]), - } - resp = await app_client.get("/search", params=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == ctx.item["id"] - - -@pytest.mark.asyncio -async def test_item_search_get_without_collections(app_client, ctx): - """Test GET search without specifying collections""" - - params = { - "bbox": ",".join([str(coord) for coord in ctx.item["bbox"]]), - } - resp = await app_client.get("/search", params=params) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_item_search_get_with_non_existent_collections(app_client, ctx): - """Test GET search with non-existent collections""" - - params = {"collections": "non-existent-collection,or-this-one"} - resp = await app_client.get("/search", params=params) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_item_search_temporal_window_get(app_client, ctx): - """Test GET search with spatio-temporal query (core)""" - test_item = ctx.item - item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) - item_date_before = item_date - timedelta(seconds=1) - item_date_after = item_date + timedelta(seconds=1) - - params = { - "collections": test_item["collection"], - "bbox": ",".join([str(coord) for coord in test_item["bbox"]]), - "datetime": f"{datetime_to_str(item_date_before)}/{datetime_to_str(item_date_after)}", - } - resp = await app_client.get("/search", params=params) - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -@pytest.mark.asyncio -async def test_item_search_post_without_collection(app_client, ctx): - """Test POST search without specifying a collection""" - test_item = ctx.item - params = { - "bbox": test_item["bbox"], - } - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - - -@pytest.mark.asyncio -async def test_item_search_properties_es(app_client, ctx): - """Test POST search with JSONB query (query extension)""" - - test_item = ctx.item - # EPSG is a JSONB key - params = {"query": {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}}} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -async def test_item_search_properties_field(app_client): - """Test POST search indexed field with query (query extension)""" - - # Orientation is an indexed field - params = {"query": {"orientation": {"eq": "south"}}} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -@pytest.mark.asyncio -async def test_item_search_get_query_extension(app_client, ctx): - """Test GET search with JSONB query (query extension)""" - - test_item = ctx.item - - params = { - "collections": [test_item["collection"]], - "query": json.dumps( - {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}} - ), - } - resp = await app_client.get("/search", params=params) - assert resp.json()["context"]["returned"] == 0 - - params["query"] = json.dumps( - {"proj:epsg": {"eq": test_item["properties"]["proj:epsg"]}} - ) - resp = await app_client.get("/search", params=params) - resp_json = resp.json() - assert resp_json["context"]["returned"] == 1 - assert ( - resp_json["features"][0]["properties"]["proj:epsg"] - == test_item["properties"]["proj:epsg"] - ) - - -@pytest.mark.asyncio -async def test_get_missing_item_collection(app_client): - """Test reading a collection which does not exist""" - resp = await app_client.get("/collections/invalid-collection/items") - assert resp.status_code == 404 - - -@pytest.mark.asyncio -async def test_pagination_item_collection(app_client, ctx, txn_client): - """Test item collection pagination links (paging extension)""" - ids = [ctx.item["id"]] - - # Ingest 5 items - for _ in range(5): - ctx.item["id"] = str(uuid.uuid4()) - await create_item(txn_client, item=ctx.item) - ids.append(ctx.item["id"]) - - # Paginate through all 6 items with a limit of 1 (expecting 7 requests) - page = await app_client.get( - f"/collections/{ctx.item['collection']}/items", params={"limit": 1} - ) - - item_ids = [] - idx = 0 - for idx in range(100): - page_data = page.json() - next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) - if not next_link: - assert not page_data["features"] - break - - assert len(page_data["features"]) == 1 - item_ids.append(page_data["features"][0]["id"]) - - href = next_link[0]["href"][len("http://test-server") :] - page = await app_client.get(href) - - assert idx == len(ids) - - # Confirm we have paginated through all items - assert not set(item_ids) - set(ids) - - -@pytest.mark.asyncio -async def test_pagination_post(app_client, ctx, txn_client): - """Test POST pagination (paging extension)""" - ids = [ctx.item["id"]] - - # Ingest 5 items - for _ in range(5): - ctx.item["id"] = str(uuid.uuid4()) - await create_item(txn_client, ctx.item) - ids.append(ctx.item["id"]) - - # Paginate through all 5 items with a limit of 1 (expecting 5 requests) - request_body = {"ids": ids, "limit": 1} - page = await app_client.post("/search", json=request_body) - idx = 0 - item_ids = [] - for _ in range(100): - idx += 1 - page_data = page.json() - next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) - if not next_link: - break - - item_ids.append(page_data["features"][0]["id"]) - - # Merge request bodies - request_body.update(next_link[0]["body"]) - page = await app_client.post("/search", json=request_body) - - # Our limit is 1, so we expect len(ids) number of requests before we run out of pages - assert idx == len(ids) + 1 - - # Confirm we have paginated through all items - assert not set(item_ids) - set(ids) - - -@pytest.mark.asyncio -async def test_pagination_token_idempotent(app_client, ctx, txn_client): - """Test that pagination tokens are idempotent (paging extension)""" - ids = [ctx.item["id"]] - - # Ingest 5 items - for _ in range(5): - ctx.item["id"] = str(uuid.uuid4()) - await create_item(txn_client, ctx.item) - ids.append(ctx.item["id"]) - - page = await app_client.get("/search", params={"ids": ",".join(ids), "limit": 3}) - page_data = page.json() - next_link = list(filter(lambda link: link["rel"] == "next", page_data["links"])) - - # Confirm token is idempotent - resp1 = await app_client.get( - "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) - ) - resp2 = await app_client.get( - "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) - ) - resp1_data = resp1.json() - resp2_data = resp2.json() - - # Two different requests with the same pagination token should return the same items - assert [item["id"] for item in resp1_data["features"]] == [ - item["id"] for item in resp2_data["features"] - ] - - -@pytest.mark.asyncio -async def test_field_extension_get_includes(app_client, ctx): - """Test GET search with included fields (fields extension)""" - test_item = ctx.item - params = { - "ids": [test_item["id"]], - "fields": "+properties.proj:epsg,+properties.gsd", - } - resp = await app_client.get("/search", params=params) - feat_properties = resp.json()["features"][0]["properties"] - assert not set(feat_properties) - {"proj:epsg", "gsd", "datetime"} - - -@pytest.mark.asyncio -async def test_field_extension_get_excludes(app_client, ctx): - """Test GET search with included fields (fields extension)""" - test_item = ctx.item - params = { - "ids": [test_item["id"]], - "fields": "-properties.proj:epsg,-properties.gsd", - } - resp = await app_client.get("/search", params=params) - resp_json = resp.json() - assert "proj:epsg" not in resp_json["features"][0]["properties"].keys() - assert "gsd" not in resp_json["features"][0]["properties"].keys() - - -@pytest.mark.asyncio -async def test_field_extension_post(app_client, ctx): - """Test POST search with included and excluded fields (fields extension)""" - test_item = ctx.item - body = { - "ids": [test_item["id"]], - "fields": { - "exclude": ["assets.B1"], - "include": ["properties.eo:cloud_cover", "properties.orientation"], - }, - } - - resp = await app_client.post("/search", json=body) - resp_json = resp.json() - assert "B1" not in resp_json["features"][0]["assets"].keys() - assert not set(resp_json["features"][0]["properties"]) - { - "orientation", - "eo:cloud_cover", - "datetime", - } - - -@pytest.mark.asyncio -async def test_field_extension_exclude_and_include(app_client, ctx): - """Test POST search including/excluding same field (fields extension)""" - test_item = ctx.item - body = { - "ids": [test_item["id"]], - "fields": { - "exclude": ["properties.eo:cloud_cover"], - "include": ["properties.eo:cloud_cover"], - }, - } - - resp = await app_client.post("/search", json=body) - resp_json = resp.json() - assert "eo:cloud_cover" not in resp_json["features"][0]["properties"] - - -@pytest.mark.asyncio -async def test_field_extension_exclude_default_includes(app_client, ctx): - """Test POST search excluding a forbidden field (fields extension)""" - test_item = ctx.item - body = {"ids": [test_item["id"]], "fields": {"exclude": ["gsd"]}} - - resp = await app_client.post("/search", json=body) - resp_json = resp.json() - assert "gsd" not in resp_json["features"][0] - - -@pytest.mark.asyncio -async def test_search_intersects_and_bbox(app_client): - """Test POST search intersects and bbox are mutually exclusive (core)""" - bbox = [-118, 34, -117, 35] - geoj = Polygon.from_bounds(*bbox).dict(exclude_none=True) - params = {"bbox": bbox, "intersects": geoj} - resp = await app_client.post("/search", json=params) - assert resp.status_code == 400 - - -@pytest.mark.asyncio -async def test_get_missing_item(app_client, load_test_data): - """Test read item which does not exist (transactions extension)""" - test_coll = load_test_data("test_collection.json") - resp = await app_client.get(f"/collections/{test_coll['id']}/items/invalid-item") - assert resp.status_code == 404 - - -@pytest.mark.asyncio -@pytest.mark.skip(reason="invalid queries not implemented") -async def test_search_invalid_query_field(app_client): - body = {"query": {"gsd": {"lt": 100}, "invalid-field": {"eq": 50}}} - resp = await app_client.post("/search", json=body) - assert resp.status_code == 400 - - -@pytest.mark.asyncio -async def test_search_bbox_errors(app_client): - body = {"query": {"bbox": [0]}} - resp = await app_client.post("/search", json=body) - assert resp.status_code == 400 - - body = {"query": {"bbox": [100.0, 0.0, 0.0, 105.0, 1.0, 1.0]}} - resp = await app_client.post("/search", json=body) - assert resp.status_code == 400 - - params = {"bbox": "100.0,0.0,0.0,105.0"} - resp = await app_client.get("/search", params=params) - assert resp.status_code == 400 - - -@pytest.mark.asyncio -async def test_conformance_classes_configurable(): - """Test conformance class configurability""" - landing = LandingPageMixin() - landing_page = landing._landing_page( - base_url="http://test/test", - conformance_classes=["this is a test"], - extension_schemas=[], - ) - assert landing_page["conformsTo"][0] == "this is a test" - - # Update environment to avoid key error on client instantiation - os.environ["READER_CONN_STRING"] = "testing" - os.environ["WRITER_CONN_STRING"] = "testing" - client = CoreClient( - database=database_logic, base_conformance_classes=["this is a test"] - ) - assert client.conformance_classes()[0] == "this is a test" - - -@pytest.mark.asyncio -async def test_search_datetime_validation_errors(app_client): - bad_datetimes = [ - "37-01-01T12:00:27.87Z", - "1985-13-12T23:20:50.52Z", - "1985-12-32T23:20:50.52Z", - "1985-12-01T25:20:50.52Z", - "1985-12-01T00:60:50.52Z", - "1985-12-01T00:06:61.52Z", - "1990-12-31T23:59:61Z", - "1986-04-12T23:20:50.52Z/1985-04-12T23:20:50.52Z", - ] - for dt in bad_datetimes: - body = {"query": {"datetime": dt}} - resp = await app_client.post("/search", json=body) - assert resp.status_code == 400 - - resp = await app_client.get("/search?datetime={}".format(dt)) - assert resp.status_code == 400 diff --git a/stac_fastapi/opensearch/tests/resources/test_mgmt.py b/stac_fastapi/opensearch/tests/resources/test_mgmt.py deleted file mode 100644 index 2b7d9728..00000000 --- a/stac_fastapi/opensearch/tests/resources/test_mgmt.py +++ /dev/null @@ -1,13 +0,0 @@ -import pytest - - -@pytest.mark.asyncio -async def test_ping_no_param(app_client): - """ - Test ping endpoint with a mocked client. - Args: - app_client (TestClient): mocked client fixture - """ - res = await app_client.get("/_mgmt/ping") - assert res.status_code == 200 - assert res.json() == {"message": "PONG"} From ef05b938f7301a5660be38365935e7b4521f9435 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Mon, 5 Feb 2024 19:58:04 +0800 Subject: [PATCH 29/33] update readme --- README.md | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 593bdac3..7c662480 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,17 @@ ## Elasticsearch and Opensearch backends for the stac-fastapi project +[![PyPI version](https://badge.fury.io/py/stac-fastapi.elasticsearch.svg)](https://badge.fury.io/py/stac-fastapi.elasticsearch) +To install from PyPI: + +```shell +pip install stac_fastapi.elasticsearch +``` +or +``` +pip install stac_fastapi.opensearch +``` #### For changes, see the [Changelog](CHANGELOG.md) @@ -32,17 +42,17 @@ Prior to commit, run: pre-commit run --all-files ``` - -## Building +## Build Elasticsearh API backend ```shell -docker-compose build +docker-compose up elasticsearch +docker-compose build app-elasticsearch ``` -## Running API on localhost:8080 +## Running Elasticsearh API on localhost:8080 ```shell -docker-compose up +docker-compose up app-elasticsearch ``` By default, docker-compose uses Elasticsearch 8.x and OpenSearch 2.11.1. From a0b0940882eeb83396193cea78bdba6b79597181 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Tue, 6 Feb 2024 11:12:32 +0800 Subject: [PATCH 30/33] changelog fix --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3d5df7f9..03d6ee5a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Fixed +- Allow additional top-level properties on collections [#191](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/191) + ## [v1.1.0] ### Added @@ -32,7 +34,6 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Fixed -- Allow additional top-level properties on collections [#191](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/191) - Exclude unset fields in search response [#166](https://github.com/stac-utils/stac-fastapi-elasticsearch/issues/166) - Upgrade stac-fastapi to v2.4.9 [#172](https://github.com/stac-utils/stac-fastapi-elasticsearch/pull/172) - Set correct default filter-lang for GET /search requests [#179](https://github.com/stac-utils/stac-fastapi-elasticsearch/issues/179) From 3cd75ec6857483060be8498617a9352f1abbe3dd Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Tue, 6 Feb 2024 11:19:44 +0800 Subject: [PATCH 31/33] fix brackets --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 03d6ee5a..432c01cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,11 +9,11 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added -- Added core library package for common logic [#186]https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/186 +- Added core library package for common logic [#186](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/186) ### Changed -- Moved Elasticsearch and Opensearch backends into separate packages [#186]https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/186 +- Moved Elasticsearch and Opensearch backends into separate packages [#186](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/186) ### Fixed From 6c2df64ffd9a3de3e8dabe55e33f8755cb3fd8c1 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Tue, 6 Feb 2024 12:49:27 +0800 Subject: [PATCH 32/33] makefile fixes --- Dockerfile.deploy.os | 2 +- Makefile | 23 +++++++++++------------ stac_fastapi/opensearch/setup.py | 4 ++++ stac_fastapi/tests/resources/test_item.py | 8 +++++++- 4 files changed, 23 insertions(+), 14 deletions(-) diff --git a/Dockerfile.deploy.os b/Dockerfile.deploy.os index 64999bbb..035b181e 100644 --- a/Dockerfile.deploy.os +++ b/Dockerfile.deploy.os @@ -13,7 +13,7 @@ WORKDIR /app COPY . /app RUN pip install --no-cache-dir -e ./stac_fastapi/core -RUN pip install --no-cache-dir ./stac_fastapi/elasticsearch[server] +RUN pip install --no-cache-dir ./stac_fastapi/opensearch[server] EXPOSE 8080 diff --git a/Makefile b/Makefile index 78cbbb79..545d2311 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,5 @@ #!make APP_HOST ?= 0.0.0.0 -ES_APP_PORT ?= 8080 EXTERNAL_APP_PORT ?= ${APP_PORT} ES_APP_PORT ?= 8080 @@ -8,7 +7,7 @@ ES_HOST ?= docker.for.mac.localhost ES_PORT ?= 9200 OS_APP_PORT ?= 8082 -ES_HOST ?= docker.for.mac.localhost +OS_HOST ?= docker.for.mac.localhost OS_PORT ?= 9202 run_es = docker-compose \ @@ -29,11 +28,11 @@ run_os = docker-compose \ .PHONY: image-deploy-es image-deploy-es: - docker build -f Dockerfile.deploy.es -t stac-fastapi-elasticsearch:latest . + docker build -f Dockerfile.dev.es -t stac-fastapi-elasticsearch:latest . .PHONY: image-deploy-os image-deploy-os: - docker build -f Dockerfile.deploy.os -t stac-fastapi-opensearch:latest . + docker build -f Dockerfile.dev.os -t stac-fastapi-opensearch:latest . .PHONY: run-deploy-locally run-deploy-locally: @@ -53,33 +52,33 @@ docker-run-es: image-dev $(run_es) .PHONY: docker-run-os -docker-run-es: image-dev +docker-run-os: image-dev $(run_os) .PHONY: docker-shell-es -docker-shell: +docker-shell-es: $(run_es) /bin/bash .PHONY: docker-shell-os -docker-shell: +docker-shell-os: $(run_os) /bin/bash .PHONY: test-elasticsearch -test: - -$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd /app/stac_fastapi/elasticsearch/tests/ && pytest' +test-elasticsearch: + -$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest' docker-compose down .PHONY: test-opensearch test-opensearch: - -$(run_os) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd /app/stac_fastapi/opensearch/tests/ && pytest' + -$(run_os) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd stac_fastapi/tests/ && pytest' docker-compose down .PHONY: test test: - -$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd /app/stac_fastapi/elasticsearch/tests/ && pytest' + -$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest' docker-compose down - -$(run_os) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd /app/stac_fastapi/opensearch/tests/ && pytest' + -$(run_os) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd stac_fastapi/tests/ && pytest' docker-compose down .PHONY: run-database-es diff --git a/stac_fastapi/opensearch/setup.py b/stac_fastapi/opensearch/setup.py index f6a11e57..9811c2ad 100644 --- a/stac_fastapi/opensearch/setup.py +++ b/stac_fastapi/opensearch/setup.py @@ -48,4 +48,8 @@ packages=find_namespace_packages(), zip_safe=False, install_requires=install_requires, + extras_require=extra_reqs, + entry_points={ + "console_scripts": ["stac-fastapi-opensearch=stac_fastapi.opensearch.app:run"] + }, ) diff --git a/stac_fastapi/tests/resources/test_item.py b/stac_fastapi/tests/resources/test_item.py index e62da8b8..377f5aa1 100644 --- a/stac_fastapi/tests/resources/test_item.py +++ b/stac_fastapi/tests/resources/test_item.py @@ -14,11 +14,17 @@ from stac_fastapi.core.core import CoreClient from stac_fastapi.core.datetime_utils import now_to_rfc3339_str -from stac_fastapi.elasticsearch.database_logic import DatabaseLogic + +# from stac_fastapi.core.base_database_logic import BaseDatabaseLogic from stac_fastapi.types.core import LandingPageMixin from ..conftest import create_item, refresh_indices +if os.getenv("BACKEND", "elasticsearch").lower() == "opensearch": + from stac_fastapi.opensearch.database_logic import DatabaseLogic +else: + from stac_fastapi.elasticsearch.database_logic import DatabaseLogic + def rfc3339_str_to_datetime(s: str) -> datetime: return ciso8601.parse_rfc3339(s) From d530cb8b41ae62ab3b8b420af99782954e31c573 Mon Sep 17 00:00:00 2001 From: jonhealy1 Date: Tue, 6 Feb 2024 12:54:09 +0800 Subject: [PATCH 33/33] remove comment --- stac_fastapi/tests/resources/test_item.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/stac_fastapi/tests/resources/test_item.py b/stac_fastapi/tests/resources/test_item.py index 377f5aa1..958d0703 100644 --- a/stac_fastapi/tests/resources/test_item.py +++ b/stac_fastapi/tests/resources/test_item.py @@ -14,8 +14,6 @@ from stac_fastapi.core.core import CoreClient from stac_fastapi.core.datetime_utils import now_to_rfc3339_str - -# from stac_fastapi.core.base_database_logic import BaseDatabaseLogic from stac_fastapi.types.core import LandingPageMixin from ..conftest import create_item, refresh_indices