Skip to content

Commit

Permalink
Update scope
Browse files Browse the repository at this point in the history
  • Loading branch information
bogdan-dbx committed Dec 16, 2021
1 parent d0f18dd commit 6854d0d
Show file tree
Hide file tree
Showing 7 changed files with 59 additions and 54 deletions.
9 changes: 6 additions & 3 deletions tests/integration_tests/access_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,16 @@
from sqlalchemy import inspect

from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices, load_birth_names_data
load_birth_names_dashboard_with_slices,
load_birth_names_data,
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
load_world_bank_dashboard_with_slices, load_world_bank_data
load_world_bank_dashboard_with_slices,
load_world_bank_data,
)
from tests.integration_tests.fixtures.energy_dashboard import (
load_energy_table_with_slice, load_energy_table_data
load_energy_table_with_slice,
load_energy_table_data,
)
from tests.integration_tests.test_app import app # isort:skip
from superset import db, security_manager
Expand Down
12 changes: 6 additions & 6 deletions tests/integration_tests/dashboard_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,15 @@


def get_table(
table_name: str,
database: Database,
schema: Optional[str] = None,
table_name: str, database: Database, schema: Optional[str] = None,
):
schema = schema or get_example_default_schema()
table_source = ConnectorRegistry.sources["table"]
return db.session.query(table_source).filter_by(
database_id=database.id, schema=schema, table_name=table_name
).one_or_none()
return (
db.session.query(table_source)
.filter_by(database_id=database.id, schema=schema, table_name=table_name)
.one_or_none()
)


def create_table_for_dashboard(
Expand Down
15 changes: 9 additions & 6 deletions tests/integration_tests/fixtures/birth_names_dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,10 @@
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.utils.core import get_example_database, get_example_default_schema
from tests.integration_tests.dashboard_utils import create_table_for_dashboard, \
get_table
from tests.integration_tests.dashboard_utils import (
create_table_for_dashboard,
get_table,
)
from tests.integration_tests.test_app import app

BIRTH_NAMES_TBL_NAME = "birth_names"
Expand Down Expand Up @@ -60,22 +62,23 @@ def load_birth_names_data():

@pytest.fixture()
def load_birth_names_dashboard_with_slices(load_birth_names_data):
dash_id_to_delete, slices_ids_to_delete = _create_dashboards()
yield
with app.app_context():
dash_id_to_delete, slices_ids_to_delete = _create_dashboards()
yield
_cleanup(dash_id_to_delete, slices_ids_to_delete)


@pytest.fixture(scope="module")
def load_birth_names_dashboard_with_slices_module_scope(load_birth_names_data):
dash_id_to_delete, slices_ids_to_delete = _create_dashboards()
yield
with app.app_context():
dash_id_to_delete, slices_ids_to_delete = _create_dashboards()
yield
_cleanup(dash_id_to_delete, slices_ids_to_delete)


def _create_dashboards():
from superset.examples.birth_names import create_dashboard, create_slices

table = get_table(BIRTH_NAMES_TBL_NAME, get_example_database())
slices, _ = create_slices(table, admin_owner=False)
dash = create_dashboard(slices)
Expand Down
18 changes: 10 additions & 8 deletions tests/integration_tests/fixtures/energy_dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@
from superset.utils.core import get_example_database
from tests.integration_tests.dashboard_utils import (
create_slice,
create_table_for_dashboard, get_table,
create_table_for_dashboard,
get_table,
)
from tests.integration_tests.test_app import app

Expand All @@ -39,13 +40,14 @@

@pytest.fixture(scope="session")
def load_energy_table_data():
database = get_example_database()
table_description = "Energy consumption"
df = _get_dataframe()
schema = {"source": String(255), "target": String(255), "value": Float()}
create_table_for_dashboard(
df, ENERGY_USAGE_TBL_NAME, database, schema, table_description
)
with app.app_context():
database = get_example_database()
table_description = "Energy consumption"
df = _get_dataframe()
schema = {"source": String(255), "target": String(255), "value": Float()}
create_table_for_dashboard(
df, ENERGY_USAGE_TBL_NAME, database, schema, table_description
)


@pytest.fixture()
Expand Down
22 changes: 10 additions & 12 deletions tests/integration_tests/fixtures/unicode_dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,22 +26,23 @@
from tests.integration_tests.dashboard_utils import (
create_dashboard,
create_slice,
create_table_for_dashboard, get_table,
create_table_for_dashboard,
get_table,
)
from tests.integration_tests.test_app import app


UNICODE_TBL_NAME = "unicode_test"


@pytest.fixture(scope="session")
def load_unicode_data():
database = get_example_database()
dtype = {
"phrase": String(500),
}
df = _get_dataframe()
create_table_for_dashboard(df, UNICODE_TBL_NAME, database, dtype)
with app.app_context():
database = get_example_database()
dtype = {
"phrase": String(500),
}
df = _get_dataframe()
create_table_for_dashboard(df, UNICODE_TBL_NAME, database, dtype)


@pytest.fixture()
Expand All @@ -50,7 +51,6 @@ def load_unicode_dashboard_with_slice(load_unicode_data):
with app.app_context():
dash = _create_unicode_dashboard(slice_name, None)
yield

_cleanup(dash, slice_name)


Expand Down Expand Up @@ -82,9 +82,7 @@ def _get_unicode_data():
]


def _create_unicode_dashboard(
slice_title: str, position: str
) -> Dashboard:
def _create_unicode_dashboard(slice_title: str, position: str) -> Dashboard:
table = get_table(UNICODE_TBL_NAME, get_example_database())
table.fetch_metadata()

Expand Down
35 changes: 17 additions & 18 deletions tests/integration_tests/fixtures/world_bank_dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,46 +32,45 @@
from superset.utils.core import get_example_database
from tests.integration_tests.dashboard_utils import (
create_dashboard,
create_table_for_dashboard, get_table,
create_table_for_dashboard,
get_table,
)
from tests.integration_tests.test_app import app


WB_HEALTH_POPULATION = "wb_health_population"


@pytest.fixture(scope="session")
def load_world_bank_data():
database = get_example_database()
df = _get_dataframe(database)
dtype = {
"year": DateTime if database.backend != "presto" else String(255),
"country_code": String(3),
"country_name": String(255),
"region": String(255),
}
create_table_for_dashboard(
df, WB_HEALTH_POPULATION, database, dtype
)
with app.app_context():
database = get_example_database()
df = _get_dataframe(database)
dtype = {
"year": DateTime if database.backend != "presto" else String(255),
"country_code": String(3),
"country_name": String(255),
"region": String(255),
}
create_table_for_dashboard(df, WB_HEALTH_POPULATION, database, dtype)


@pytest.fixture()
def load_world_bank_dashboard_with_slices(load_world_bank_data):
dash_id_to_delete, slices_ids_to_delete = create_dashboard()
yield
with app.app_context():
dash_id_to_delete, slices_ids_to_delete = create_dashboard_for_loaded_data()
yield
_cleanup(dash_id_to_delete, slices_ids_to_delete)


@pytest.fixture(scope="module")
def load_world_bank_dashboard_with_slices_module_scope(load_world_bank_data):
dash_id_to_delete, slices_ids_to_delete = create_dashboard()
yield
with app.app_context():
dash_id_to_delete, slices_ids_to_delete = create_dashboard_for_loaded_data()
yield
_cleanup(dash_id_to_delete, slices_ids_to_delete)


def create_dashboard():
def create_dashboard_for_loaded_data():
with app.app_context():
table = get_table(WB_HEALTH_POPULATION, get_example_database())
slices = _create_world_bank_slices(table)
Expand Down
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ setenv =
postgres: SUPERSET__SQLALCHEMY_DATABASE_URI = postgresql+psycopg2://superset:superset@localhost/test
sqlite: SUPERSET__SQLALCHEMY_DATABASE_URI = sqlite:////{envtmpdir}/superset.db
mysql-presto: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
# docker run -p 8080:8080 --name presto prestosql/presto
# docker run -p 8080:8080 --name presto starburstdata/presto
mysql-presto: SUPERSET__SQLALCHEMY_EXAMPLES_URI = presto://localhost:8080/memory/default
# based on https://github.com/big-data-europe/docker-hadoop
# clone the repo & run docker-compose up -d to test locally
Expand Down

0 comments on commit 6854d0d

Please sign in to comment.