Skip to content

Commit fb4988d

Browse files
committed
remmove base_url from elasticsearch
1 parent 2108963 commit fb4988d

File tree

2 files changed

+4
-13
lines changed

2 files changed

+4
-13
lines changed

stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/core.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ async def all_collections(self, **kwargs) -> Collections:
5353
"""Read all collections from the database."""
5454
base_url = str(kwargs["request"].base_url)
5555
collection_list = await self.database.get_all_collections()
56-
collection_list = [
56+
collection_list = [
5757
self.collection_serializer.db_to_stac(c, base_url=base_url)
5858
for c in collection_list
5959
]
@@ -91,7 +91,6 @@ async def item_collection(
9191
) -> ItemCollection:
9292
"""Read an item collection from the database."""
9393
request: Request = kwargs["request"]
94-
base_url = str(request.base_url)
9594

9695
items, maybe_count, next_token = await self.database.execute_search(
9796
search=self.database.apply_collections_filter(
@@ -100,7 +99,6 @@ async def item_collection(
10099
limit=limit,
101100
token=token,
102101
sort=None,
103-
base_url=base_url,
104102
)
105103

106104
context_obj = None
@@ -273,12 +271,10 @@ async def post_search(
273271
limit=limit,
274272
token=search_request.token, # type: ignore
275273
sort=sort,
276-
base_url=base_url,
277274
)
278275

279276
items = [
280-
self.item_serializer.db_to_stac(item, base_url=base_url)
281-
for item in items
277+
self.item_serializer.db_to_stac(item, base_url=base_url) for item in items
282278
]
283279

284280
# if self.extension_is_enabled("FieldsExtension"):

stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/database_logic.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -71,9 +71,7 @@ async def get_all_collections(self) -> List[Collection]:
7171
# https://github.com/stac-utils/stac-fastapi-elasticsearch/issues/65
7272
# collections should be paginated, but at least return more than the default 10 for now
7373
collections = await self.client.search(index=COLLECTIONS_INDEX, size=1000)
74-
return [
75-
c["_source"] for c in collections["hits"]["hits"]
76-
]
74+
return [c["_source"] for c in collections["hits"]["hits"]]
7775

7876
async def get_one_item(self, collection_id: str, item_id: str) -> Dict:
7977
"""Database logic to retrieve a single item."""
@@ -192,7 +190,6 @@ async def execute_search(
192190
limit: int,
193191
token: Optional[str],
194192
sort: Optional[Dict[str, Dict[str, str]]],
195-
base_url: str,
196193
) -> Tuple[List[Item], Optional[int], Optional[str]]:
197194
"""Database logic to execute search with limit."""
198195
search_after = None
@@ -218,9 +215,7 @@ async def execute_search(
218215
es_response = await search_task
219216

220217
hits = es_response["hits"]["hits"]
221-
items = [
222-
hit["_source"] for hit in hits
223-
]
218+
items = [hit["_source"] for hit in hits]
224219

225220
next_token = None
226221
if hits and (sort_array := hits[-1].get("sort")):

0 commit comments

Comments
 (0)