Skip to content

Commit 2f980df

Browse files
committed
remove db logic
1 parent 86885c0 commit 2f980df

File tree

3 files changed

+67
-85
lines changed

3 files changed

+67
-85
lines changed

stac_fastapi/core/stac_fastapi/core/core.py

Lines changed: 16 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
"""Item crud client."""
22
import logging
33
import re
4-
from base64 import urlsafe_b64encode
54
from datetime import datetime as datetime_type
65
from datetime import timezone
76
from typing import Any, Dict, List, Optional, Set, Type, Union
@@ -193,66 +192,36 @@ async def landing_page(self, **kwargs) -> stac_types.LandingPage:
193192
async def all_collections(self, **kwargs) -> Collections:
194193
"""Read all collections from the database.
195194
196-
Returns:
197-
Collections: A `Collections` object containing all the collections in the database and
198-
links to various resources.
195+
Args:
196+
**kwargs: Keyword arguments from the request.
199197
200-
Raises:
201-
Exception: If any error occurs while reading the collections from the database.
198+
Returns:
199+
A Collections object containing all the collections in the database and links to various resources.
202200
"""
203-
request: Request = kwargs["request"]
204-
base_url = str(kwargs["request"].base_url)
201+
request = kwargs["request"]
202+
base_url = str(request.base_url)
203+
limit = int(request.query_params.get("limit", 10))
204+
token = request.query_params.get("token")
205205

206-
limit = (
207-
int(request.query_params["limit"])
208-
if "limit" in request.query_params
209-
else 10
210-
)
211-
token = (
212-
request.query_params["token"] if "token" in request.query_params else None
206+
collections, next_token = await self.database.get_all_collections(
207+
token=token, limit=limit
213208
)
214209

215-
hits = await self.database.get_all_collections(limit=limit, token=token)
216-
217-
next_search_after = None
218-
next_link = None
219-
if len(hits) == limit:
220-
last_hit = hits[-1]
221-
next_search_after = last_hit["sort"]
222-
next_token = urlsafe_b64encode(
223-
",".join(map(str, next_search_after)).encode()
224-
).decode()
225-
paging_links = PagingLinks(next=next_token, request=request)
226-
next_link = paging_links.link_next()
227-
228210
links = [
229-
{
230-
"rel": Relations.root.value,
231-
"type": MimeTypes.json,
232-
"href": base_url,
233-
},
234-
{
235-
"rel": Relations.parent.value,
236-
"type": MimeTypes.json,
237-
"href": base_url,
238-
},
211+
{"rel": Relations.root.value, "type": MimeTypes.json, "href": base_url},
212+
{"rel": Relations.parent.value, "type": MimeTypes.json, "href": base_url},
239213
{
240214
"rel": Relations.self.value,
241215
"type": MimeTypes.json,
242-
"href": urljoin(base_url, "collections"),
216+
"href": f"{base_url}collections",
243217
},
244218
]
245219

246-
if next_link:
220+
if next_token:
221+
next_link = PagingLinks(next=next_token, request=request).link_next()
247222
links.append(next_link)
248223

249-
return Collections(
250-
collections=[
251-
self.collection_serializer.db_to_stac(c["_source"], base_url=base_url)
252-
for c in hits
253-
],
254-
links=links,
255-
)
224+
return Collections(collections=collections, links=links)
256225

257226
async def get_collection(self, collection_id: str, **kwargs) -> Collection:
258227
"""Get a collection from the database by its id.

stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py

Lines changed: 22 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -291,32 +291,37 @@ class DatabaseLogic:
291291

292292
async def get_all_collections(
293293
self, token: Optional[str], limit: int
294-
) -> Iterable[Dict[str, Any]]:
295-
"""Retrieve a list of all collections from the database.
294+
) -> Tuple[List[Dict[str, Any]], Optional[str]]:
295+
"""Retrieve a list of all collections from Elasticsearch, supporting pagination.
296296
297297
Args:
298-
token (Optional[str]): The token used to return the next set of results.
299-
limit (int): Number of results to return
298+
token (Optional[str]): The pagination token.
299+
limit (int): The number of results to return.
300300
301301
Returns:
302-
collections (Iterable[Dict[str, Any]]): A list of dictionaries containing the source data for each collection.
303-
304-
Notes:
305-
The collections are retrieved from the Elasticsearch database using the `client.search` method,
306-
with the `COLLECTIONS_INDEX` as the target index and `size=limit` to retrieve records.
307-
The result is a generator of dictionaries containing the source data for each collection.
302+
A tuple of (collections, next pagination token if any).
308303
"""
309304
search_after = None
310305
if token:
311-
search_after = urlsafe_b64decode(token.encode()).decode().split(",")
312-
collections = await self.client.search(
306+
search_after = [token]
307+
308+
response = await self.client.search(
313309
index=COLLECTIONS_INDEX,
314-
search_after=search_after,
315-
size=limit,
316-
sort={"id": {"order": "asc"}},
310+
body={
311+
"sort": [{"id": {"order": "asc"}}],
312+
"size": limit,
313+
"search_after": search_after,
314+
},
317315
)
318-
hits = collections["hits"]["hits"]
319-
return hits
316+
317+
hits = response["hits"]["hits"]
318+
collections = [hit["_source"] for hit in hits]
319+
320+
next_token = None
321+
if len(hits) == limit:
322+
next_token = hits[-1]["sort"][0]
323+
324+
return collections, next_token
320325

321326
async def get_one_item(self, collection_id: str, item_id: str) -> Dict:
322327
"""Retrieve a single item from the database.

stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py

Lines changed: 29 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -311,36 +311,44 @@ class DatabaseLogic:
311311
"""CORE LOGIC"""
312312

313313
async def get_all_collections(
314-
self,
315-
token: Optional[str],
316-
limit: int,
317-
) -> Iterable[Dict[str, Any]]:
318-
"""Retrieve a list of all collections from the database.
314+
self, token: Optional[str], limit: int
315+
) -> Tuple[List[Dict[str, Any]], Optional[str]]:
316+
"""
317+
Retrieve a list of all collections from Elasticsearch, supporting pagination.
319318
320319
Args:
321-
token (Optional[str]): The token used to return the next set of results.
322-
limit (int): Number of results to return
320+
token (Optional[str]): The pagination token.
321+
limit (int): The number of results to return.
323322
324323
Returns:
325-
collections (Iterable[Dict[str, Any]]): A list of dictionaries containing the source data for each collection.
326-
327-
Notes:
328-
The collections are retrieved from the Elasticsearch database using the `client.search` method,
329-
with the `COLLECTIONS_INDEX` as the target index and `size=limit` to retrieve records.
330-
The result is a generator of dictionaries containing the source data for each collection.
324+
A tuple of (collections, next pagination token if any).
331325
"""
332-
search_body: Dict[str, Any] = {}
326+
search_body = {
327+
"sort": [{"id": {"order": "asc"}}],
328+
"size": limit,
329+
}
330+
331+
# Only add search_after to the query if token is not None and not empty
333332
if token:
334-
search_after = urlsafe_b64decode(token.encode()).decode().split(",")
333+
search_after = [token]
335334
search_body["search_after"] = search_after
336335

337-
search_body["sort"] = {"id": {"order": "asc"}}
338-
339-
collections = await self.client.search(
340-
index=COLLECTIONS_INDEX, body=search_body, size=limit
336+
response = await self.client.search(
337+
index="collections",
338+
body=search_body,
341339
)
342-
hits = collections["hits"]["hits"]
343-
return hits
340+
341+
hits = response["hits"]["hits"]
342+
collections = [hit["_source"] for hit in hits]
343+
344+
next_token = None
345+
if len(hits) == limit:
346+
# Ensure we have a valid sort value for next_token
347+
next_token_values = hits[-1].get("sort")
348+
if next_token_values:
349+
next_token = next_token_values[0]
350+
351+
return collections, next_token
344352

345353
async def get_one_item(self, collection_id: str, item_id: str) -> Dict:
346354
"""Retrieve a single item from the database.

0 commit comments

Comments
 (0)