diff --git a/superset-frontend/cypress-base/cypress/integration/explore/_skip.AdhocFilters.test.ts b/superset-frontend/cypress-base/cypress/integration/explore/_skip.AdhocFilters.test.ts index 8eb12a95b14f7..a4e9c8fe46888 100644 --- a/superset-frontend/cypress-base/cypress/integration/explore/_skip.AdhocFilters.test.ts +++ b/superset-frontend/cypress-base/cypress/integration/explore/_skip.AdhocFilters.test.ts @@ -18,7 +18,9 @@ */ describe.skip('AdhocFilters', () => { beforeEach(() => { - cy.intercept('GET', '/superset/filter/table/*/name').as('filterValues'); + cy.intercept('GET', '/api/v1/datasource/table/*/column/name/values').as( + 'filterValues', + ); cy.intercept('POST', '/superset/explore_json/**').as('postJson'); cy.intercept('GET', '/superset/explore_json/**').as('getJson'); cy.visitChartByName('Boys'); // a table chart diff --git a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/index.tsx b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/index.tsx index c15123ef3498b..f0b05d8f1f6fe 100644 --- a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/index.tsx +++ b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/index.tsx @@ -410,14 +410,16 @@ const AdhocFilterEditPopoverSimpleTabContent: React.FC = props => { setLoadingComparatorSuggestions(true); SupersetClient.get({ signal, - endpoint: `/superset/filter/${datasource.type}/${datasource.id}/${col}/`, + endpoint: `/api/v1/datasource/${datasource.type}/${datasource.id}/column/${col}/values/`, }) .then(({ json }) => { setSuggestions( - json.map((suggestion: null | number | boolean | string) => ({ - value: suggestion, - label: optionLabel(suggestion), - })), + json.result.map( + (suggestion: null | number | boolean | string) => ({ + value: suggestion, + label: optionLabel(suggestion), + }), + ), ); setLoadingComparatorSuggestions(false); }) diff --git a/superset/datasource/api.py b/superset/datasource/api.py new file mode 100644 index 0000000000000..be246c915d535 --- /dev/null +++ b/superset/datasource/api.py @@ -0,0 +1,130 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging + +from flask_appbuilder.api import expose, protect, safe + +from superset import app, db, event_logger +from superset.dao.exceptions import DatasourceNotFound, DatasourceTypeNotSupportedError +from superset.datasource.dao import DatasourceDAO +from superset.exceptions import SupersetSecurityException +from superset.superset_typing import FlaskResponse +from superset.utils.core import apply_max_row_limit, DatasourceType +from superset.views.base_api import BaseSupersetApi, statsd_metrics + +logger = logging.getLogger(__name__) + + +class DatasourceRestApi(BaseSupersetApi): + allow_browser_login = True + class_permission_name = "Datasource" + resource_name = "datasource" + openapi_spec_tag = "Datasources" + + @expose( + "///column//values/", + methods=["GET"], + ) + @protect() + @safe + @statsd_metrics + @event_logger.log_this_with_context( + action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" + f".get_column_values", + log_to_statsd=False, + ) + def get_column_values( + self, datasource_type: str, datasource_id: int, column_name: str + ) -> FlaskResponse: + """Get possible values for a datasource column + --- + get: + summary: Get possible values for a datasource column + parameters: + - in: path + schema: + type: string + name: datasource_type + description: The type of datasource + - in: path + schema: + type: integer + name: datasource_id + description: The id of the datasource + - in: path + schema: + type: string + name: column_name + description: The name of the column to get values for + responses: + 200: + description: A List of distinct values for the column + content: + application/json: + schema: + type: object + properties: + result: + type: array + items: + oneOf: + - type: string + - type: integer + - type: number + - type: boolean + - type: object + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 403: + $ref: '#/components/responses/403' + 404: + $ref: '#/components/responses/404' + 500: + $ref: '#/components/responses/500' + """ + try: + datasource = DatasourceDAO.get_datasource( + db.session, DatasourceType(datasource_type), datasource_id + ) + datasource.raise_for_access() + except ValueError: + return self.response( + 400, message=f"Invalid datasource type: {datasource_type}" + ) + except DatasourceTypeNotSupportedError as ex: + return self.response(400, message=ex.message) + except DatasourceNotFound as ex: + return self.response(404, message=ex.message) + except SupersetSecurityException as ex: + return self.response(403, message=ex.message) + + row_limit = apply_max_row_limit(app.config["FILTER_SELECT_ROW_LIMIT"]) + try: + payload = datasource.values_for_column( + column_name=column_name, limit=row_limit + ) + return self.response(200, result=payload) + except NotImplementedError: + return self.response( + 400, + message=( + "Unable to get column values for " + f"datasource type: {datasource_type}" + ), + ) diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py index bcfaf7839f9e6..870b066787c1c 100644 --- a/superset/initialization/__init__.py +++ b/superset/initialization/__init__.py @@ -138,6 +138,7 @@ def init_views(self) -> None: from superset.datasets.api import DatasetRestApi from superset.datasets.columns.api import DatasetColumnsRestApi from superset.datasets.metrics.api import DatasetMetricRestApi + from superset.datasource.api import DatasourceRestApi from superset.embedded.api import EmbeddedDashboardRestApi from superset.embedded.view import EmbeddedView from superset.explore.api import ExploreRestApi @@ -207,6 +208,7 @@ def init_views(self) -> None: appbuilder.add_api(DatasetRestApi) appbuilder.add_api(DatasetColumnsRestApi) appbuilder.add_api(DatasetMetricRestApi) + appbuilder.add_api(DatasourceRestApi) appbuilder.add_api(EmbeddedDashboardRestApi) appbuilder.add_api(ExploreRestApi) appbuilder.add_api(ExploreFormDataRestApi) diff --git a/superset/security/manager.py b/superset/security/manager.py index 9fe07ec16c6fd..b1be13b782701 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -406,8 +406,10 @@ def get_datasource_access_error_msg(datasource: "BaseDatasource") -> str: :returns: The error message """ - return f"""This endpoint requires the datasource {datasource.name}, database or - `all_datasource_access` permission""" + return ( + f"This endpoint requires the datasource {datasource.name}, " + "database or `all_datasource_access` permission" + ) @staticmethod def get_datasource_access_link( # pylint: disable=unused-argument diff --git a/superset/views/core.py b/superset/views/core.py index 9733ea2910a09..83dc003295f40 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -999,6 +999,7 @@ def explore( @has_access_api @event_logger.log_this @expose("/filter////") + @deprecated() def filter( # pylint: disable=no-self-use self, datasource_type: str, datasource_id: int, column: str ) -> FlaskResponse: diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index 1b35d81f8394a..deedfb5e5d786 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -325,21 +325,13 @@ def test_save_slice(self): @pytest.mark.usefixtures("load_energy_table_with_slice") def test_filter_endpoint(self): self.login(username="admin") - slice_name = "Energy Sankey" - slice_id = self.get_slice(slice_name, db.session).id - db.session.commit() tbl_id = self.table_ids.get("energy_usage") table = db.session.query(SqlaTable).filter(SqlaTable.id == tbl_id) table.filter_select_enabled = True - url = ( - "/superset/filter/table/{}/target/?viz_type=sankey&groupby=source" - "&metric=sum__value&flt_col_0=source&flt_op_0=in&flt_eq_0=&" - "slice_id={}&datasource_name=energy_usage&" - "datasource_id=1&datasource_type=table" - ) + url = "/superset/filter/table/{}/target/" # Changing name - resp = self.get_resp(url.format(tbl_id, slice_id)) + resp = self.get_resp(url.format(tbl_id)) assert len(resp) > 0 assert "energy_target0" in resp diff --git a/tests/integration_tests/datasource/__init__.py b/tests/integration_tests/datasource/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/datasource/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/datasource/api_tests.py b/tests/integration_tests/datasource/api_tests.py new file mode 100644 index 0000000000000..522aa33383e62 --- /dev/null +++ b/tests/integration_tests/datasource/api_tests.py @@ -0,0 +1,137 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from unittest.mock import Mock, patch + +import pytest + +from superset import db, security_manager +from superset.connectors.sqla.models import SqlaTable +from superset.dao.exceptions import DatasourceTypeNotSupportedError +from tests.integration_tests.base_tests import SupersetTestCase + + +class TestDatasourceApi(SupersetTestCase): + def get_virtual_dataset(self): + return ( + db.session.query(SqlaTable) + .filter(SqlaTable.table_name == "virtual_dataset") + .one() + ) + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_ints(self): + self.login(username="admin") + table = self.get_virtual_dataset() + rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col1/values/") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + for val in range(10): + assert val in response["result"] + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_strs(self): + self.login(username="admin") + table = self.get_virtual_dataset() + rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col2/values/") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + for val in ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]: + assert val in response["result"] + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_floats(self): + self.login(username="admin") + table = self.get_virtual_dataset() + rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col3/values/") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + for val in [1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9]: + assert val in response["result"] + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_nulls(self): + self.login(username="admin") + table = self.get_virtual_dataset() + rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col4/values/") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["result"], [None]) + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_invalid_datasource_type(self): + self.login(username="admin") + table = self.get_virtual_dataset() + rv = self.client.get( + f"api/v1/datasource/not_table/{table.id}/column/col1/values/" + ) + self.assertEqual(rv.status_code, 400) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["message"], "Invalid datasource type: not_table") + + @patch("superset.datasource.api.DatasourceDAO.get_datasource") + def test_get_column_values_datasource_type_not_supported(self, get_datasource_mock): + get_datasource_mock.side_effect = DatasourceTypeNotSupportedError + self.login(username="admin") + rv = self.client.get("api/v1/datasource/table/1/column/col1/values/") + self.assertEqual(rv.status_code, 400) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response["message"], "DAO datasource query source type is not supported" + ) + + def test_get_column_values_datasource_not_found(self): + self.login(username="admin") + rv = self.client.get("api/v1/datasource/table/999/column/col1/values/") + self.assertEqual(rv.status_code, 404) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["message"], "Datasource does not exist") + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_no_datasource_access(self): + # Allow gamma user to use this endpoint, but does not have datasource access + perm = security_manager.find_permission_view_menu( + "can_get_column_values", "Datasource" + ) + gamma_role = security_manager.find_role("Gamma") + security_manager.add_permission_role(gamma_role, perm) + + self.login(username="gamma") + table = self.get_virtual_dataset() + rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col1/values/") + self.assertEqual(rv.status_code, 403) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response["message"], + "This endpoint requires the datasource virtual_dataset, " + "database or `all_datasource_access` permission", + ) + + @patch("superset.datasource.api.DatasourceDAO.get_datasource") + def test_get_column_values_not_implemented_error(self, get_datasource_mock): + datasource = Mock() + datasource.values_for_column.side_effect = NotImplementedError + get_datasource_mock.return_value = datasource + + self.login(username="admin") + rv = self.client.get("api/v1/datasource/sl_table/1/column/col1/values/") + self.assertEqual(rv.status_code, 400) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response["message"], + "Unable to get column values for datasource type: sl_table", + )