Skip to content

Commit

Permalink
Make mulit-version queries valid.
Browse files Browse the repository at this point in the history
  • Loading branch information
antarcticrainforest committed Feb 26, 2025
1 parent d8a359c commit 5053fbd
Show file tree
Hide file tree
Showing 4 changed files with 161 additions and 108 deletions.
48 changes: 21 additions & 27 deletions freva-rest/src/freva_rest/databrowser_api/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,9 +311,7 @@ def primary_keys(self) -> list[str]:
if v == "primary"
]
else:
_keys = [
k for (k, v) in self._freva_facets.items() if v == "primary"
]
_keys = [k for (k, v) in self._freva_facets.items() if v == "primary"]
if self.flavour in ("cordex",):
for key in self.cordex_keys:
_keys.append(key)
Expand Down Expand Up @@ -535,9 +533,7 @@ async def _session_post(self) -> AsyncIterator[Tuple[int, Dict[str, Any]]]:
)
async with aiohttp.ClientSession(timeout=self.timeout) as session:
try:
async with session.post(
self._post_url, json=self.payload
) as res:
async with session.post(self._post_url, json=self.payload) as res:
try:
await self.check_for_status(res)
logger.info(
Expand All @@ -546,9 +542,7 @@ async def _session_post(self) -> AsyncIterator[Tuple[int, Dict[str, Any]]]:
)
response_data = await res.json()
except HTTPException: # pragma: no cover
logger.error(
"POST request failed: %s", await res.text()
)
logger.error("POST request failed: %s", await res.text())
response_data = {}
except Exception as error:
logger.error("Connection to %s failed: %s", self.url, error)
Expand Down Expand Up @@ -592,10 +586,13 @@ async def validate_parameters(
Translate the output to the required DRS flavour.
"""
translator = Translator(flavour, translate)
valid_facets = translator.valid_facets
if multi_version:
valid_facets = translator.valid_facets + ["version"]
for key in query:
key = key.lower().replace("_not_", "")
if (
key not in translator.valid_facets
key not in valid_facets
and key not in ("time_select",) + cls.uniq_keys
):
raise HTTPException(
Expand Down Expand Up @@ -781,12 +778,10 @@ async def _insert_to_mongo(
)
if bulk_operations:
try:
result = (
await self._config.mongo_collection_userdata.bulk_write(
bulk_operations,
ordered=False,
bypass_document_validation=False,
)
result = await self._config.mongo_collection_userdata.bulk_write(
bulk_operations,
ordered=False,
bypass_document_validation=False,
)
successful_upsert = (
result.upserted_count
Expand Down Expand Up @@ -827,9 +822,7 @@ async def _insert_to_mongo(
nMatched,
)
except Exception as error:
logger.exception(
"[MONGO] Could not insert metadata: %s", error
)
logger.exception("[MONGO] Could not insert metadata: %s", error)

@ensure_future
async def store_results(self, num_results: int, status: int) -> None:
Expand Down Expand Up @@ -919,15 +912,20 @@ async def extended_search(
-------
int: status code of the apache solr query.
"""
search_facets = [f for f in facets if f not in ("*", "all")]
search_facets = [f for f in facets if f not in ("*", "all")] or [
f for f in self._config.solr_fields
]
if self.multi_version:
search_facets.append("version")

self.query["facet"] = "true"
self.query["rows"] = max_results
self.query["facet.sort"] = "index"
self.query["facet.mincount"] = "1"
self.query["facet.limit"] = "-1"
self.query["wt"] = "json"
self.query["facet.field"] = self.translator.translate_facets(
search_facets or self._config.solr_fields, backwards=True
search_facets, backwards=True
)
self.query["fl"] = [self.uniq_key, "fs_type"]
logger.info(
Expand Down Expand Up @@ -977,9 +975,7 @@ async def init_stream(self) -> Tuple[int, int]:
search_status, search = res
return search_status, search.get("response", {}).get("numFound", 0)

def _join_facet_queries(
self, key: str, facets: List[str]
) -> Tuple[str, str]:
def _join_facet_queries(self, key: str, facets: List[str]) -> Tuple[str, str]:
"""Create lucene search contain and NOT contain search queries"""

negative, positive = [], []
Expand Down Expand Up @@ -1244,9 +1240,7 @@ async def _process_metadata(
)
if not is_duplicate:
new_querie.append(metadata)
return [
dict(t) for t in {tuple(sorted(d.items())) for d in new_querie}
]
return [dict(t) for t in {tuple(sorted(d.items())) for d in new_querie}]

async def _purge_user_data(
self, search_keys: Dict[str, Union[str, int]]
Expand Down
17 changes: 5 additions & 12 deletions freva-rest/src/freva_rest/databrowser_api/endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,7 @@ async def metadata_search(
uniq_key: Literal["file", "uri"],
multi_version: Annotated[bool, SolrSchema.params["multi_version"]] = False,
translate: Annotated[bool, SolrSchema.params["translate"]] = True,
facets: Annotated[
Union[List[str], None], SolrSchema.params["facets"]
] = None,
facets: Annotated[Union[List[str], None], SolrSchema.params["facets"]] = None,
request: Request = Required,
) -> JSONResponse:
"""Query the available metadata.
Expand Down Expand Up @@ -232,9 +230,7 @@ async def extended_search(
multi_version: Annotated[bool, SolrSchema.params["multi_version"]] = False,
translate: Annotated[bool, SolrSchema.params["translate"]] = True,
max_results: Annotated[int, SolrSchema.params["batch_size"]] = 150,
facets: Annotated[
Union[List[str], None], SolrSchema.params["facets"]
] = None,
facets: Annotated[Union[List[str], None], SolrSchema.params["facets"]] = None,
request: Request = Required,
) -> JSONResponse:
"""This endpoint is used by the databrowser web ui client."""
Expand Down Expand Up @@ -276,8 +272,7 @@ async def load_data(
title="Catalogue type",
alias="catalogue-type",
description=(
"Set the type of catalogue you want to create from this"
"query"
"Set the type of catalogue you want to create from this" "query"
),
),
] = None,
Expand Down Expand Up @@ -345,10 +340,8 @@ async def post_user_data(
solr_instance = Solr(server_config)
try:
try:
validated_user_metadata = (
await solr_instance._validate_user_metadata(
request.user_metadata
)
validated_user_metadata = await solr_instance._validate_user_metadata(
request.user_metadata
)
except HTTPException as error:
raise HTTPException(
Expand Down
Loading

0 comments on commit 5053fbd

Please sign in to comment.