Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert "[Performance] Add ES query timeout" #307

Merged
merged 1 commit into from
Nov 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 0 additions & 15 deletions aio/aio-proxy/aio_proxy/decorators/http_exception.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,6 @@ def inner_function(*args, **kwargs):
text=serialize_error_text(str(error)),
content_type="application/json",
)
except elasticsearch.exceptions.ConnectionTimeout as error:
with push_scope() as scope:
# group value errors together based on their response
# (Bad request)
scope.fingerprint = ["Timeout query"]
# capture_exception(error)
logging.warning(f"Query too slow: {str(error)}")
response_text = serialize_error_text(
"La requête de recherche est trop lente. "
"Veuillez affiner votre requête ou réessayer ultérieurement."
)
return web.Response(
text=response_text, content_type="application/json", status=504
)
except ValidationError as err:
with push_scope() as scope:
# group value errors together based on their response
Expand All @@ -48,7 +34,6 @@ def inner_function(*args, **kwargs):
),
content_type="application/json",
)

except BaseException as error:
# capture error in Sentry
capture_exception(error)
Expand Down
12 changes: 4 additions & 8 deletions aio/aio-proxy/aio_proxy/search/es_search_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from datetime import timedelta

from aio_proxy.request.search_type import SearchType
from aio_proxy.response.helpers import is_dev_env
from aio_proxy.search.es_index import StructureMapping
from aio_proxy.search.geo_search import build_es_search_geo_query
from aio_proxy.search.helpers.helpers import (
Expand All @@ -20,7 +19,7 @@

class ElasticSearchRunner:
def __init__(self, search_params, search_type):
self.es_search_client = StructureMapping.search().params(request_timeout=1)
self.es_search_client = StructureMapping.search()
self.search_type = search_type
self.search_params = search_params
self.has_full_text_query = False
Expand Down Expand Up @@ -69,12 +68,9 @@ def execute_and_format_es_search(self):
self.es_search_results.append(matching_structure_dict)

def sort_and_execute_es_search_query(self):
self.es_search_client = self.es_search_client.extra(track_scores=True)

# explain query result in dev env
if is_dev_env():
self.es_search_client = self.es_search_client.extra(explain=True)

self.es_search_client = self.es_search_client.extra(
track_scores=True, explain=True
)
# Collapse is used to aggregate the results by siren. It is the consequence of
# separating large documents into smaller ones

Expand Down