Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Backend Validation for Creation Method #16375

Merged
2 changes: 1 addition & 1 deletion superset/models/slice.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
logger = logging.getLogger(__name__)


class Slice( # pylint: disable=too-many-public-methods
class Slice( # pylint: disable=too-many-public-methods, too-many-instance-attributes
Model, AuditMixinNullable, ImportExportMixin
):
"""A slice is essentially a report or a view on data"""
Expand Down
1 change: 0 additions & 1 deletion superset/reports/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,6 @@ def delete(self, pk: int) -> Response:

@expose("/", methods=["POST"])
@protect()
@safe
@statsd_metrics
@permission_name("post")
def post(self) -> Response:
Expand Down
17 changes: 16 additions & 1 deletion superset/reports/commands/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,13 @@
from superset.commands.base import CreateMixin
from superset.dao.exceptions import DAOCreateFailedError
from superset.databases.dao import DatabaseDAO
from superset.models.reports import ReportScheduleType
from superset.models.reports import ReportCreationMethodType, ReportScheduleType
from superset.reports.commands.base import BaseReportScheduleCommand
from superset.reports.commands.exceptions import (
DatabaseNotFoundValidationError,
ReportScheduleAlertRequiredDatabaseValidationError,
ReportScheduleCreateFailedError,
ReportScheduleCreationMethodUniquenessValidationError,
ReportScheduleInvalidError,
ReportScheduleNameUniquenessValidationError,
ReportScheduleRequiredTypeValidationError,
Expand Down Expand Up @@ -59,6 +60,10 @@ def validate(self) -> None:
owner_ids: Optional[List[int]] = self._properties.get("owners")
name = self._properties.get("name", "")
report_type = self._properties.get("type")
creation_method = self._properties.get("creation_method")
chart_id = self._properties.get("chart")
dashboard_id = self._properties.get("dashboard")
user_id = self._actor.id
AAfghahi marked this conversation as resolved.
Show resolved Hide resolved

# Validate type is required
if not report_type:
Expand All @@ -84,6 +89,16 @@ def validate(self) -> None:
# Validate chart or dashboard relations
self.validate_chart_dashboard(exceptions)

# Validate that each chart or dashboard only has one report with
# the respective creation method.
if (
creation_method != ReportCreationMethodType.ALERTS_REPORTS
and not ReportScheduleDAO.validate_unique_creation_method(
user_id, dashboard_id, chart_id
)
):
raise ReportScheduleCreationMethodUniquenessValidationError()

if "validator_config_json" in self._properties:
self._properties["validator_config_json"] = json.dumps(
self._properties["validator_config_json"]
Expand Down
5 changes: 5 additions & 0 deletions superset/reports/commands/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,11 @@ def __init__(self) -> None:
super().__init__([_("Name must be unique")], field_name="name")


class ReportScheduleCreationMethodUniquenessValidationError(CommandException):
status = 409
message = "Resource already has an attached report."


class AlertQueryMultipleRowsError(CommandException):

message = _("Alert query returned more then one row.")
Expand Down
18 changes: 18 additions & 0 deletions superset/reports/dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,24 @@ def bulk_delete(
db.session.rollback()
raise DAODeleteFailedError(str(ex)) from ex

@staticmethod
def validate_unique_creation_method(
user_id: int, dashboard_id: Optional[int] = None, chart_id: Optional[int] = None
) -> bool:
"""
Validate if the user already has a chart or dashboard
with a report attached form the self subscribe reports
"""

query = db.session.query(ReportSchedule).filter_by(created_by_fk=user_id)
if dashboard_id is not None:
query = query.filter(ReportSchedule.dashboard_id == dashboard_id)

if chart_id is not None:
query = query.filter(ReportSchedule.chart_id == chart_id)

return not db.session.query(query.exists()).scalar()
AAfghahi marked this conversation as resolved.
Show resolved Hide resolved

@staticmethod
def validate_update_uniqueness(
name: str, report_type: str, report_schedule_id: Optional[int] = None
Expand Down
118 changes: 117 additions & 1 deletion tests/integration_tests/reports/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -768,7 +768,7 @@ def test_unsaved_report_schedule_schema(self):
)
def test_no_dashboard_report_schedule_schema(self):
"""
ReportSchedule Api: Test create report schedule with not dashboard id
ReportSchedule Api: Test create report schedule with no dashboard id
"""
self.login(username="admin")
chart = db.session.query(Slice).first()
Expand All @@ -790,6 +790,122 @@ def test_no_dashboard_report_schedule_schema(self):
== "Please save your dashboard first, then try creating a new email report."
)

@pytest.mark.usefixtures(
"load_birth_names_dashboard_with_slices", "create_report_schedules"
)
def test_create_multiple_creation_method_report_schedule_charts(self):
"""
ReportSchedule Api: Test create multiple reports with the same creation method
"""
self.login(username="admin")
chart = db.session.query(Slice).first()
dashboard = db.session.query(Dashboard).first()
example_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.REPORT,
"name": "name4",
"description": "description",
"creation_method": ReportCreationMethodType.CHARTS,
"crontab": "0 9 * * *",
"working_timeout": 3600,
"chart": chart.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 201

# this second time it should receive an error because the chart has an attached report
# with the same creation method from the same user.
report_schedule_data = {
"type": ReportScheduleType.REPORT,
"name": "name5",
"description": "description",
"creation_method": ReportCreationMethodType.CHARTS,
"crontab": "0 9 * * *",
"working_timeout": 3600,
"chart": chart.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 409
assert data == {
"errors": [
{
"message": "Resource already has an attached report.",
"error_type": "GENERIC_COMMAND_ERROR",
"level": "warning",
"extra": {
"issue_codes": [
{
"code": 1010,
"message": "Issue 1010 - Superset encountered an error while running a command.",
}
]
},
}
]
}

@pytest.mark.usefixtures(
"load_birth_names_dashboard_with_slices", "create_report_schedules"
)
def test_create_multiple_creation_method_report_schedule_dashboards(self):
"""
ReportSchedule Api: Test create multiple reports with the same creation method
"""
self.login(username="admin")
chart = db.session.query(Slice).first()
dashboard = db.session.query(Dashboard).first()
example_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.REPORT,
"name": "name4",
"description": "description",
"creation_method": ReportCreationMethodType.DASHBOARDS,
"crontab": "0 9 * * *",
"working_timeout": 3600,
"dashboard": dashboard.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 201

# this second time it should receive an error because the dashboard has an attached report
# with the same creation method from the same user.
report_schedule_data = {
"type": ReportScheduleType.REPORT,
"name": "name5",
"description": "description",
"creation_method": ReportCreationMethodType.DASHBOARDS,
"crontab": "0 9 * * *",
"working_timeout": 3600,
"dashboard": dashboard.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 409
assert data == {
"errors": [
{
"message": "Resource already has an attached report.",
"error_type": "GENERIC_COMMAND_ERROR",
"level": "warning",
"extra": {
"issue_codes": [
{
"code": 1010,
"message": "Issue 1010 - Superset encountered an error while running a command.",
}
]
},
}
]
}

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_create_report_schedule_chart_dash_validation(self):
"""
Expand Down