Skip to content

Commit

Permalink
chore(on-call): Properly handling query validation errors (#6019)
Browse files Browse the repository at this point in the history
* properly handling query validation

* update test

* fix typing
  • Loading branch information
enochtangg authored Jun 14, 2024
1 parent 4fe5e18 commit 38a902e
Show file tree
Hide file tree
Showing 4 changed files with 90 additions and 66 deletions.
6 changes: 3 additions & 3 deletions snuba/querylog/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def record_invalid_request(
it records failures during parsing/validation.
This is for client errors.
"""
_record_failure_building_request(
_record_failure_metric_with_status(
QueryStatus.INVALID_REQUEST, request_status, timer, referrer, exception_name
)

Expand All @@ -228,12 +228,12 @@ def record_error_building_request(
it records failures during parsing/validation.
This is for system errors during parsing/validation.
"""
_record_failure_building_request(
_record_failure_metric_with_status(
QueryStatus.ERROR, request_status, timer, referrer, exception_name
)


def _record_failure_building_request(
def _record_failure_metric_with_status(
status: QueryStatus,
request_status: Status,
timer: Timer,
Expand Down
15 changes: 12 additions & 3 deletions snuba/web/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
EntityProcessingStage,
StorageProcessingStage,
)
from snuba.query.exceptions import QueryPlanException
from snuba.querylog import record_query
from snuba.querylog.query_metadata import SnubaQueryMetadata
from snuba.query.exceptions import InvalidQueryException, QueryPlanException
from snuba.querylog import record_invalid_request, record_query
from snuba.querylog.query_metadata import SnubaQueryMetadata, get_request_status
from snuba.request import Request
from snuba.utils.metrics.gauge import Gauge
from snuba.utils.metrics.timer import Timer
Expand Down Expand Up @@ -79,6 +79,15 @@ def run_query(
if not request.query_settings.get_dry_run():
record_query(request, timer, query_metadata, result)
_set_query_final(request, result.extra)
except InvalidQueryException as error:
request_status = get_request_status(error)
record_invalid_request(
timer,
request_status,
request.attribution_info.referrer,
str(type(error).__name__),
)
raise error
except QueryException as error:
_set_query_final(request, error.extra)
record_query(request, timer, query_metadata, error)
Expand Down
15 changes: 13 additions & 2 deletions snuba/web/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -373,10 +373,21 @@ def dataset_query(
try:
result = run_query(dataset, request, timer)
assert result.extra["stats"]
except InvalidQueryException as exception:
details: Mapping[str, Any]
details = {
"type": "invalid_query",
"message": str(exception),
}
return Response(
json.dumps(
{"error": details},
),
400,
{"Content-Type": "application/json"},
)
except QueryException as exception:
status = 500
details: Mapping[str, Any]

cause = exception.__cause__
if isinstance(cause, (RateLimitExceeded, AllocationPolicyViolations)):
status = 429
Expand Down
120 changes: 62 additions & 58 deletions tests/test_snql_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -1136,67 +1136,71 @@ def test_clickhouse_illegal_type_error(self) -> None:
assert response.status_code == 400

def test_invalid_tag_queries(self) -> None:
response = self.post(
"/discover/snql",
data=json.dumps(
{
"query": f"""MATCH (discover)
SELECT count() AS `count`
BY time, tags[error_code] AS `error_code`, tags[count] AS `count`
WHERE ifNull(tags[user_flow], '') = 'buy'
AND timestamp >= toDateTime('{self.base_time.isoformat()}')
AND timestamp < toDateTime('{self.next_time.isoformat()}')
AND project_id IN array({self.project_id})
AND environment = 'www.something.com'
AND tags[error_code] = '2300'
AND tags[count] = 419
ORDER BY time ASC LIMIT 10000
GRANULARITY 3600""",
"turbo": False,
"consistent": True,
"debug": True,
}
),
)
with patch(
"snuba.querylog._record_failure_metric_with_status"
) as record_failure_metric_mock:
response = self.post(
"/discover/snql",
data=json.dumps(
{
"query": f"""MATCH (discover)
SELECT count() AS `count`
BY time, tags[error_code] AS `error_code`, tags[count] AS `count`
WHERE ifNull(tags[user_flow], '') = 'buy'
AND timestamp >= toDateTime('{self.base_time.isoformat()}')
AND timestamp < toDateTime('{self.next_time.isoformat()}')
AND project_id IN array({self.project_id})
AND environment = 'www.something.com'
AND tags[error_code] = '2300'
AND tags[count] = 419
ORDER BY time ASC LIMIT 10000
GRANULARITY 3600""",
"turbo": False,
"consistent": True,
"debug": True,
}
),
)

assert response.status_code == 400
data = response.json
assert data["error"]["type"] == "invalid_query"
assert (
data["error"]["message"]
== "validation failed for entity discover: invalid tag condition on 'tags[count]': 419 must be a string"
)
assert response.status_code == 400
data = response.json
assert data["error"]["type"] == "invalid_query"
assert (
data["error"]["message"]
== "validation failed for entity discover: invalid tag condition on 'tags[count]': 419 must be a string"
)

response = self.post(
"/discover/snql",
data=json.dumps(
{
"query": f"""MATCH (discover)
SELECT count() AS `count`
BY time, tags[error_code] AS `error_code`, tags[count] AS `count`
WHERE ifNull(tags[user_flow], '') = 'buy'
AND timestamp >= toDateTime('{self.base_time.isoformat()}')
AND timestamp < toDateTime('{self.next_time.isoformat()}')
AND project_id IN array({self.project_id})
AND environment = 'www.something.com'
AND tags[error_code] IN array('2300')
AND tags[count] IN array(419, 70, 175, 181, 58)
ORDER BY time ASC LIMIT 10000
GRANULARITY 3600""",
"turbo": False,
"consistent": True,
"debug": True,
}
),
)
response = self.post(
"/discover/snql",
data=json.dumps(
{
"query": f"""MATCH (discover)
SELECT count() AS `count`
BY time, tags[error_code] AS `error_code`, tags[count] AS `count`
WHERE ifNull(tags[user_flow], '') = 'buy'
AND timestamp >= toDateTime('{self.base_time.isoformat()}')
AND timestamp < toDateTime('{self.next_time.isoformat()}')
AND project_id IN array({self.project_id})
AND environment = 'www.something.com'
AND tags[error_code] IN array('2300')
AND tags[count] IN array(419, 70, 175, 181, 58)
ORDER BY time ASC LIMIT 10000
GRANULARITY 3600""",
"turbo": False,
"consistent": True,
"debug": True,
}
),
)

assert response.status_code == 400
data = response.json
assert data["error"]["type"] == "invalid_query"
assert (
data["error"]["message"]
== "validation failed for entity discover: invalid tag condition on 'tags[count]': array literal 419 must be a string"
)
assert response.status_code == 400
data = response.json
assert data["error"]["type"] == "invalid_query"
assert (
data["error"]["message"]
== "validation failed for entity discover: invalid tag condition on 'tags[count]': array literal 419 must be a string"
)
assert record_failure_metric_mock.call_count == 2

def test_datetime_condition_types(self) -> None:
response = self.post(
Expand Down

0 comments on commit 38a902e

Please sign in to comment.