diff --git a/superset-frontend/src/components/DatabaseSelector/index.tsx b/superset-frontend/src/components/DatabaseSelector/index.tsx index c1b13f6f70d8a..a65ae5ef3a6a2 100644 --- a/superset-frontend/src/components/DatabaseSelector/index.tsx +++ b/superset-frontend/src/components/DatabaseSelector/index.tsx @@ -24,6 +24,10 @@ import Label from 'src/components/Label'; import { FormLabel } from 'src/components/Form'; import RefreshLabel from 'src/components/RefreshLabel'; import { useToasts } from 'src/components/MessageToasts/withToasts'; +import { + getClientErrorMessage, + getClientErrorObject, +} from 'src/utils/getClientErrorObject'; const DatabaseSelectorWrapper = styled.div` ${({ theme }) => ` @@ -238,9 +242,16 @@ export default function DatabaseSelector({ setLoadingSchemas(false); if (refresh > 0) addSuccessToast('List refreshed'); }) - .catch(() => { + .catch(err => { setLoadingSchemas(false); - handleError(t('There was an error loading the schemas')); + getClientErrorObject(err).then(clientError => { + handleError( + getClientErrorMessage( + t('There was an error loading the schemas'), + clientError, + ), + ); + }); }); } }, [currentDb, onSchemasLoad, refresh]); diff --git a/superset-frontend/src/components/TableSelector/index.tsx b/superset-frontend/src/components/TableSelector/index.tsx index a0f6e5366bf32..e41696b7e62e0 100644 --- a/superset-frontend/src/components/TableSelector/index.tsx +++ b/superset-frontend/src/components/TableSelector/index.tsx @@ -38,6 +38,10 @@ import WarningIconWithTooltip from 'src/components/WarningIconWithTooltip'; import { useToasts } from 'src/components/MessageToasts/withToasts'; import { SchemaOption } from 'src/SqlLab/types'; import { useTables, Table } from 'src/hooks/apiResources'; +import { + getClientErrorMessage, + getClientErrorObject, +} from 'src/utils/getClientErrorObject'; const REFRESH_WIDTH = 30; @@ -187,7 +191,16 @@ const TableSelector: FunctionComponent = ({ addSuccessToast(t('List updated')); } }, - onError: () => handleError(t('There was an error loading the tables')), + onError: (err: Response) => { + getClientErrorObject(err).then(clientError => { + handleError( + getClientErrorMessage( + t('There was an error loading the tables'), + clientError, + ), + ); + }); + }, }); const tableOptions = useMemo( diff --git a/superset-frontend/src/explore/ExplorePage.tsx b/superset-frontend/src/explore/ExplorePage.tsx index 6084cee7826e3..cea43560afb69 100644 --- a/superset-frontend/src/explore/ExplorePage.tsx +++ b/superset-frontend/src/explore/ExplorePage.tsx @@ -42,10 +42,8 @@ import { fallbackExploreInitialData } from './fixtures'; import { getItem, LocalStorageKeys } from '../utils/localStorageHelpers'; import { getFormDataWithDashboardContext } from './controlUtils/getFormDataWithDashboardContext'; -const isResult = (rv: JsonObject): rv is ExploreResponsePayload => - rv?.result?.form_data && - rv?.result?.dataset && - isDefined(rv?.result?.dataset?.id); +const isValidResult = (rv: JsonObject): boolean => + rv?.result?.form_data && isDefined(rv?.result?.dataset?.id); const fetchExploreData = async (exploreUrlParams: URLSearchParams) => { try { @@ -53,10 +51,15 @@ const fetchExploreData = async (exploreUrlParams: URLSearchParams) => { method: 'GET', endpoint: 'api/v1/explore/', })(exploreUrlParams); - if (isResult(rv)) { + if (isValidResult(rv)) { return rv; } - throw new Error(t('Failed to load chart data.')); + let message = t('Failed to load chart data'); + const responseError = rv?.result?.message; + if (responseError) { + message = `${message}:\n${responseError}`; + } + throw new Error(message); } catch (err) { // todo: encapsulate the error handler const clientError = await getClientErrorObject(err); diff --git a/superset-frontend/src/utils/getClientErrorObject.ts b/superset-frontend/src/utils/getClientErrorObject.ts index b6b6c58995dab..2706031e8266d 100644 --- a/superset-frontend/src/utils/getClientErrorObject.ts +++ b/superset-frontend/src/utils/getClientErrorObject.ts @@ -171,3 +171,15 @@ export function getClientErrorObject( }); }); } + +export function getClientErrorMessage( + message: string, + clientError?: ClientErrorObject, +) { + let finalMessage = message; + const errorMessage = clientError?.message || clientError?.error; + if (errorMessage) { + finalMessage = `${finalMessage}:\n${errorMessage}`; + } + return finalMessage; +} diff --git a/superset/databases/api.py b/superset/databases/api.py index 07253a5548eb1..c3efda9501d4b 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -75,7 +75,7 @@ from superset.databases.utils import get_table_metadata from superset.db_engine_specs import get_available_engine_specs from superset.errors import ErrorLevel, SupersetError, SupersetErrorType -from superset.exceptions import SupersetErrorsException +from superset.exceptions import SupersetErrorsException, SupersetException from superset.extensions import security_manager from superset.models.core import Database from superset.superset_typing import FlaskResponse @@ -293,6 +293,8 @@ def post(self) -> FlaskResponse: exc_info=True, ) return self.response_422(message=str(ex)) + except SupersetException as ex: + return self.response(ex.status, message=ex.message) @expose("/", methods=["PUT"]) @protect() @@ -486,6 +488,8 @@ def schemas(self, pk: int, **kwargs: Any) -> FlaskResponse: return self.response( 500, message="There was an error connecting to the database" ) + except SupersetException as ex: + return self.response(ex.status, message=ex.message) @expose("//table///", methods=["GET"]) @protect() @@ -544,6 +548,9 @@ def table_metadata( except SQLAlchemyError as ex: self.incr_stats("error", self.table_metadata.__name__) return self.response_422(error_msg_from_exception(ex)) + except SupersetException as ex: + return self.response(ex.status, message=ex.message) + self.incr_stats("success", self.table_metadata.__name__) return self.response(200, **table_info) diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index dabed0c7aeeec..55a026541ade3 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -639,7 +639,7 @@ def fetch_data( return cursor.fetchmany(limit) return cursor.fetchall() except Exception as ex: - raise cls.get_dbapi_mapped_exception(ex) + raise cls.get_dbapi_mapped_exception(ex) from ex @classmethod def expand_data( @@ -1025,7 +1025,11 @@ def get_table_names( # pylint: disable=unused-argument :param schema: Schema to inspect. If omitted, uses default schema for database :return: All tables in schema """ - tables = inspector.get_table_names(schema) + try: + tables = inspector.get_table_names(schema) + except Exception as ex: + raise cls.get_dbapi_mapped_exception(ex) from ex + if schema and cls.try_remove_schema_from_table_name: tables = [re.sub(f"^{schema}\\.", "", table) for table in tables] return sorted(tables) @@ -1045,7 +1049,11 @@ def get_view_names( # pylint: disable=unused-argument :param schema: Schema name. If omitted, uses default schema for database :return: All views in schema """ - views = inspector.get_view_names(schema) + try: + views = inspector.get_view_names(schema) + except Exception as ex: + raise cls.get_dbapi_mapped_exception(ex) from ex + if schema and cls.try_remove_schema_from_table_name: views = [re.sub(f"^{schema}\\.", "", view) for view in views] return sorted(views) @@ -1326,7 +1334,7 @@ def execute( # pylint: disable=unused-argument try: cursor.execute(query) except Exception as ex: - raise cls.get_dbapi_mapped_exception(ex) + raise cls.get_dbapi_mapped_exception(ex) from ex @classmethod def make_label_compatible(cls, label: str) -> Union[str, quoted_name]: diff --git a/superset/db_engine_specs/bigquery.py b/superset/db_engine_specs/bigquery.py index 59ed8e1bc9e1f..ffeff31b17787 100644 --- a/superset/db_engine_specs/bigquery.py +++ b/superset/db_engine_specs/bigquery.py @@ -35,7 +35,7 @@ from superset.databases.schemas import encrypted_field_properties, EncryptedString from superset.databases.utils import make_url_safe from superset.db_engine_specs.base import BaseEngineSpec, BasicPropertiesType -from superset.db_engine_specs.exceptions import SupersetDBAPIDisconnectionError +from superset.db_engine_specs.exceptions import SupersetDBAPIConnectionError from superset.errors import SupersetError, SupersetErrorType from superset.sql_parse import Table from superset.utils import core as utils @@ -449,7 +449,7 @@ def get_dbapi_exception_mapping(cls) -> Dict[Type[Exception], Type[Exception]]: # pylint: disable=import-outside-toplevel from google.auth.exceptions import DefaultCredentialsError - return {DefaultCredentialsError: SupersetDBAPIDisconnectionError} + return {DefaultCredentialsError: SupersetDBAPIConnectionError} @classmethod def validate_parameters( diff --git a/superset/db_engine_specs/druid.py b/superset/db_engine_specs/druid.py index ff0a438d7e51b..1484429bafbce 100644 --- a/superset/db_engine_specs/druid.py +++ b/superset/db_engine_specs/druid.py @@ -17,13 +17,14 @@ import json import logging from datetime import datetime -from typing import Any, Dict, List, Optional, TYPE_CHECKING +from typing import Any, Dict, List, Optional, Type, TYPE_CHECKING from sqlalchemy import types from sqlalchemy.engine.reflection import Inspector from superset import is_feature_enabled from superset.db_engine_specs.base import BaseEngineSpec +from superset.db_engine_specs.exceptions import SupersetDBAPIConnectionError from superset.exceptions import SupersetException from superset.utils import core as utils @@ -136,3 +137,12 @@ def get_columns( type_map["complex"] = types.BLOB return super().get_columns(inspector, table_name, schema) + + @classmethod + def get_dbapi_exception_mapping(cls) -> Dict[Type[Exception], Type[Exception]]: + # pylint: disable=import-error,import-outside-toplevel + from requests import exceptions as requests_exceptions + + return { + requests_exceptions.ConnectionError: SupersetDBAPIConnectionError, + } diff --git a/superset/db_engine_specs/exceptions.py b/superset/db_engine_specs/exceptions.py index 6b4fb5549dd20..56e354d62a574 100644 --- a/superset/db_engine_specs/exceptions.py +++ b/superset/db_engine_specs/exceptions.py @@ -29,7 +29,7 @@ class SupersetDBAPIDatabaseError(SupersetDBAPIError): pass -class SupersetDBAPIDisconnectionError(SupersetDBAPIError): +class SupersetDBAPIConnectionError(SupersetDBAPIError): pass diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py index 0ba6eb1d1f3e6..490a36ab2bbc2 100644 --- a/superset/db_engine_specs/trino.py +++ b/superset/db_engine_specs/trino.py @@ -17,7 +17,7 @@ from __future__ import annotations import logging -from typing import Any, Dict, Optional, TYPE_CHECKING +from typing import Any, Dict, Optional, Type, TYPE_CHECKING import simplejson as json from flask import current_app @@ -27,6 +27,7 @@ from superset.constants import USER_AGENT from superset.databases.utils import make_url_safe from superset.db_engine_specs.base import BaseEngineSpec +from superset.db_engine_specs.exceptions import SupersetDBAPIConnectionError from superset.db_engine_specs.presto import PrestoBaseEngineSpec from superset.models.sql_lab import Query from superset.utils import core as utils @@ -220,3 +221,12 @@ def update_params_from_encrypted_extra( except json.JSONDecodeError as ex: logger.error(ex, exc_info=True) raise ex + + @classmethod + def get_dbapi_exception_mapping(cls) -> Dict[Type[Exception], Type[Exception]]: + # pylint: disable=import-error,import-outside-toplevel + from requests import exceptions as requests_exceptions + + return { + requests_exceptions.ConnectionError: SupersetDBAPIConnectionError, + } diff --git a/superset/explore/commands/get.py b/superset/explore/commands/get.py index 331f662767ca9..5bd42e2dc4a88 100644 --- a/superset/explore/commands/get.py +++ b/superset/explore/commands/get.py @@ -144,7 +144,8 @@ def run(self) -> Optional[Dict[str, Any]]: utils.merge_extra_filters(form_data) utils.merge_request_params(form_data, request.args) - dummy_dataset_data: Dict[str, Any] = { + # TODO: this is a dummy placeholder - should be refactored to being just `None` + dataset_data: Dict[str, Any] = { "type": self._dataset_type, "name": dataset_name, "columns": [], @@ -152,9 +153,12 @@ def run(self) -> Optional[Dict[str, Any]]: "database": {"id": 0, "backend": ""}, } try: - dataset_data = dataset.data if dataset else dummy_dataset_data - except (SupersetException, SQLAlchemyError): - dataset_data = dummy_dataset_data + if dataset: + dataset_data = dataset.data + except SupersetException as ex: + message = ex.message + except SQLAlchemyError: + message = "SQLAlchemy error" metadata = None diff --git a/superset/models/core.py b/superset/models/core.py index d0a32a1864c2b..36832a977b0ec 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -563,9 +563,8 @@ def get_all_table_names_in_schema( # pylint: disable=unused-argument database=self, inspector=self.inspector, schema=schema ) return [(table, schema) for table in tables] - except Exception: # pylint: disable=broad-except - logger.warning("Get all table names in schema failed", exc_info=True) - return [] + except Exception as ex: # pylint: disable=broad-except + raise self.db_engine_spec.get_dbapi_mapped_exception(ex) @cache_util.memoized_func( key="db:{self.id}:schema:{schema}:view_list", @@ -594,9 +593,8 @@ def get_all_view_names_in_schema( # pylint: disable=unused-argument database=self, inspector=self.inspector, schema=schema ) return [(view, schema) for view in views] - except Exception: # pylint: disable=broad-except - logger.warning("Get all view names failed", exc_info=True) - return [] + except Exception as ex: # pylint: disable=broad-except + raise self.db_engine_spec.get_dbapi_mapped_exception(ex) @cache_util.memoized_func( key="db:{self.id}:schema_list", @@ -618,7 +616,10 @@ def get_all_schema_names( # pylint: disable=unused-argument :param force: whether to force refresh the cache :return: schema list """ - return self.db_engine_spec.get_schema_names(self.inspector) + try: + return self.db_engine_spec.get_schema_names(self.inspector) + except Exception as ex: + raise self.db_engine_spec.get_dbapi_mapped_exception(ex) from ex @property def db_engine_spec(self) -> Type[db_engine_specs.BaseEngineSpec]: diff --git a/superset/views/core.py b/superset/views/core.py index 60ce1edfd2dc6..371a632f29535 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -1165,35 +1165,40 @@ def tables( # pylint: disable=no-self-use ) if not database: - return json_error_response("Database not found", status=404) - - tables = security_manager.get_datasources_accessible_by_user( - database=database, - schema=schema_parsed, - datasource_names=[ - utils.DatasourceName(*datasource_name) - for datasource_name in database.get_all_table_names_in_schema( - schema=schema_parsed, - force=force_refresh_parsed, - cache=database.table_cache_enabled, - cache_timeout=database.table_cache_timeout, - ) - ], - ) + return json_error_response( + __("Database not found: %(id)s", id=db_id), status=404 + ) - views = security_manager.get_datasources_accessible_by_user( - database=database, - schema=schema_parsed, - datasource_names=[ - utils.DatasourceName(*datasource_name) - for datasource_name in database.get_all_view_names_in_schema( - schema=schema_parsed, - force=force_refresh_parsed, - cache=database.table_cache_enabled, - cache_timeout=database.table_cache_timeout, - ) - ], - ) + try: + tables = security_manager.get_datasources_accessible_by_user( + database=database, + schema=schema_parsed, + datasource_names=[ + utils.DatasourceName(*datasource_name) + for datasource_name in database.get_all_table_names_in_schema( + schema=schema_parsed, + force=force_refresh_parsed, + cache=database.table_cache_enabled, + cache_timeout=database.table_cache_timeout, + ) + ], + ) + + views = security_manager.get_datasources_accessible_by_user( + database=database, + schema=schema_parsed, + datasource_names=[ + utils.DatasourceName(*datasource_name) + for datasource_name in database.get_all_view_names_in_schema( + schema=schema_parsed, + force=force_refresh_parsed, + cache=database.table_cache_enabled, + cache_timeout=database.table_cache_timeout, + ) + ], + ) + except SupersetException as ex: + return json_error_response(ex.message, ex.status) extra_dict_by_name = { table.name: table.extra_dict