From 24212052b13d3c0b5e2a4090a3c4e4156f25b830 Mon Sep 17 00:00:00 2001 From: rguerraZero Date: Mon, 4 Sep 2023 14:11:49 -0300 Subject: [PATCH] add tests REF: ZFE-75510 Add test to drone update . . . . .. . . . . . . . . . . --- .drone.yml | 20 +- superset-embedded-sdk/.gitignore | 3 + superset-embedded-sdk/CONTRIBUTING.md | 77 + superset-embedded-sdk/README.md | 96 + superset-embedded-sdk/babel.config.js | 26 + superset-embedded-sdk/package-lock.json | 14335 ++++++++++++++++ superset-embedded-sdk/package.json | 62 + superset-embedded-sdk/release-if-necessary.js | 62 + superset-embedded-sdk/src/const.ts | 24 + .../src/guestTokenRefresh.test.ts | 96 + .../src/guestTokenRefresh.ts | 33 + superset-embedded-sdk/src/index.ts | 190 + superset-embedded-sdk/tsconfig.json | 29 + superset-embedded-sdk/webpack.config.js | 49 + tests/__init__.py | 16 + tests/common/__init__.py | 16 + tests/common/logger_utils.py | 174 + tests/common/query_context_generator.py | 272 + tests/conftest.py | 111 + tests/consts/__init__.py | 16 + tests/consts/birth_names.py | 26 + tests/consts/us_states.py | 86 + tests/example_data/__init__.py | 16 + tests/example_data/data_generator/__init__.py | 25 + .../data_generator/base_generator.py | 24 + .../data_generator/birth_names/__init__.py | 16 + .../birth_names/birth_names_generator.py | 81 + .../birth_names_generator_factory.py | 65 + tests/example_data/data_generator/consts.py | 69 + .../data_generator/string_generator.py | 33 + .../string_generator_factory.py | 46 + .../data_generator/tests/__init__.py | 25 + .../tests/test_string_generator.py | 35 + tests/example_data/data_loading/__init__.py | 16 + .../data_loading/base_data_loader.py | 33 + .../data_loading/data_definitions/__init__.py | 16 + .../data_definitions/birth_names.py | 64 + .../data_loading/data_definitions/types.py | 53 + .../data_loading/pandas/__init__.py | 16 + .../data_loading/pandas/pandas_data_loader.py | 84 + .../pandas/pands_data_loading_conf.py | 64 + .../data_loading/pandas/table_df_convertor.py | 48 + tests/fixtures/__init__.py | 18 + tests/fixtures/birth_names.py | 52 + tests/integration_tests/__init__.py | 16 + tests/integration_tests/access_tests.py | 598 + .../advanced_data_type/__init__.py | 16 + .../advanced_data_type/api_tests.py | 135 + .../annotation_layers/__init__.py | 16 + .../annotation_layers/api_tests.py | 749 + .../annotation_layers/fixtures.py | 105 + .../async_events/__init__.py | 16 + .../async_events/api_tests.py | 120 + .../available_domains/__init__.py | 16 + .../available_domains/api_tests.py | 30 + tests/integration_tests/base_api_tests.py | 425 + tests/integration_tests/base_tests.py | 531 + tests/integration_tests/cache_tests.py | 104 + tests/integration_tests/cachekeys/__init__.py | 16 + .../integration_tests/cachekeys/api_tests.py | 167 + tests/integration_tests/celery_tests.py | 502 + tests/integration_tests/charts/__init__.py | 16 + tests/integration_tests/charts/api_tests.py | 1533 ++ .../charts/commands_tests.py | 397 + .../integration_tests/charts/data/__init__.py | 16 + .../charts/data/api_tests.py | 1282 ++ .../integration_tests/charts/schema_tests.py | 79 + tests/integration_tests/cli_tests.py | 519 + tests/integration_tests/commands_test.py | 202 + tests/integration_tests/conftest.py | 449 + tests/integration_tests/core_tests.py | 1685 ++ .../css_templates/__init__.py | 16 + .../css_templates/api_tests.py | 382 + tests/integration_tests/csv_upload_tests.py | 516 + tests/integration_tests/dashboard_tests.py | 569 + tests/integration_tests/dashboard_utils.py | 97 + .../integration_tests/dashboards/__init__.py | 16 + .../integration_tests/dashboards/api_tests.py | 2066 +++ .../integration_tests/dashboards/base_case.py | 118 + .../dashboards/commands_tests.py | 663 + tests/integration_tests/dashboards/consts.py | 43 + .../integration_tests/dashboards/dao_tests.py | 129 + .../dashboards/dashboard_test_utils.py | 121 + .../dashboards/filter_sets/__init__.py | 16 + .../dashboards/filter_sets/conftest.py | 285 + .../dashboards/filter_sets/consts.py | 22 + .../filter_sets/create_api_tests.py | 629 + .../filter_sets/delete_api_tests.py | 210 + .../dashboards/filter_sets/get_api_tests.py | 132 + .../filter_sets/update_api_tests.py | 520 + .../dashboards/filter_sets/utils.py | 102 + .../dashboards/filter_state/__init__.py | 16 + .../dashboards/filter_state/api_tests.py | 294 + .../dashboards/permalink/__init__.py | 16 + .../dashboards/permalink/api_tests.py | 114 + .../dashboards/security/__init__.py | 16 + .../dashboards/security/base_case.py | 54 + .../security/security_dataset_tests.py | 236 + .../security/security_rbac_tests.py | 352 + .../dashboards/superset_factory_util.py | 333 + tests/integration_tests/databases/__init__.py | 16 + .../integration_tests/databases/api_tests.py | 3173 ++++ .../databases/commands_tests.py | 962 ++ .../databases/ssh_tunnel/__init__.py | 16 + .../databases/ssh_tunnel/commands/__init__.py | 16 + .../ssh_tunnel/commands/commands_tests.py | 78 + tests/integration_tests/datasets/__init__.py | 16 + tests/integration_tests/datasets/api_tests.py | 2414 +++ .../datasets/commands_tests.py | 570 + .../integration_tests/datasource/__init__.py | 16 + .../integration_tests/datasource/api_tests.py | 137 + tests/integration_tests/datasource_tests.py | 689 + .../db_engine_specs/__init__.py | 16 + .../db_engine_specs/ascend_tests.py | 32 + .../db_engine_specs/base_engine_spec_tests.py | 522 + .../db_engine_specs/base_tests.py | 38 + .../db_engine_specs/bigquery_tests.py | 366 + .../db_engine_specs/databricks_tests.py | 61 + .../db_engine_specs/gsheets_tests.py | 44 + .../db_engine_specs/hive_tests.py | 434 + .../db_engine_specs/mysql_tests.py | 197 + .../db_engine_specs/pinot_tests.py | 89 + .../db_engine_specs/postgres_tests.py | 516 + .../db_engine_specs/presto_tests.py | 1034 ++ .../db_engine_specs/redshift_tests.py | 185 + .../dict_import_export_tests.py | 273 + .../dynamic_plugins_tests.py | 40 + tests/integration_tests/email_tests.py | 233 + tests/integration_tests/embedded/__init__.py | 16 + tests/integration_tests/embedded/api_tests.py | 53 + tests/integration_tests/embedded/dao_tests.py | 51 + tests/integration_tests/embedded/test_view.py | 72 + tests/integration_tests/event_logger_tests.py | 232 + tests/integration_tests/explore/__init__.py | 16 + tests/integration_tests/explore/api_tests.py | 240 + .../explore/form_data/__init__.py | 16 + .../explore/form_data/api_tests.py | 407 + .../explore/form_data/commands_tests.py | 348 + .../explore/permalink/__init__.py | 16 + .../explore/permalink/api_tests.py | 136 + .../explore/permalink/commands_tests.py | 172 + .../integration_tests/extensions/__init__.py | 16 + .../extensions/metastore_cache_test.py | 77 + tests/integration_tests/fixtures/__init__.py | 31 + .../fixtures/birth_names_dashboard.py | 108 + .../fixtures/certificates.py | 38 + tests/integration_tests/fixtures/client.py | 26 + tests/integration_tests/fixtures/database.py | 22 + .../integration_tests/fixtures/datasource.py | 211 + .../fixtures/deck_geojson_form_data.json | 47 + .../fixtures/deck_path_form_data.json | 49 + .../fixtures/energy_dashboard.py | 195 + .../fixtures/importexport.py | 516 + .../integration_tests/fixtures/public_role.py | 44 + tests/integration_tests/fixtures/pyodbcRow.py | 25 + .../fixtures/query_context.py | 50 + tests/integration_tests/fixtures/sample.png | Bin 0 -> 4481 bytes .../fixtures/tabbed_dashboard.py | 141 + tests/integration_tests/fixtures/tags.py | 33 + tests/integration_tests/fixtures/trends.csv | 3 + .../fixtures/unicode_dashboard.py | 120 + .../fixtures/world_bank_dashboard.py | 505 + tests/integration_tests/form_tests.py | 36 + .../integration_tests/import_export_tests.py | 676 + .../importexport/__init__.py | 16 + .../importexport/commands_tests.py | 48 + tests/integration_tests/insert_chart_mixin.py | 66 + tests/integration_tests/key_value/__init__.py | 16 + .../key_value/commands/__init__.py | 16 + .../key_value/commands/create_test.py | 63 + .../key_value/commands/delete_test.py | 82 + .../key_value/commands/fixtures.py | 63 + .../key_value/commands/get_test.py | 101 + .../key_value/commands/update_test.py | 96 + .../key_value/commands/upsert_test.py | 102 + tests/integration_tests/log_api_tests.py | 335 + .../integration_tests/log_model_view_tests.py | 37 + .../logging_configurator_tests.py | 55 + ...06e1e70058c7_migrate_legacy_area__tests.py | 99 + .../integration_tests/migrations/__init__.py | 16 + ...rm_time_range_endpoints_from_qc_3__test.py | 138 + ...78868b6_migrating_legacy_treemap__tests.py | 91 + ...ate_native_filters_to_new_schema__tests.py | 98 + .../fb13d49b72f9_better_filters__tests.py | 39 + ...igrate_filter_sets_to_new_format__tests.py | 367 + tests/integration_tests/model_tests.py | 648 + tests/integration_tests/queries/__init__.py | 16 + tests/integration_tests/queries/api_tests.py | 505 + .../queries/saved_queries/__init__.py | 16 + .../queries/saved_queries/api_tests.py | 804 + .../queries/saved_queries/commands_tests.py | 240 + .../integration_tests/query_context_tests.py | 1102 ++ tests/integration_tests/reports/__init__.py | 16 + .../integration_tests/reports/alert_tests.py | 200 + tests/integration_tests/reports/api_tests.py | 1670 ++ .../commands/create_dashboard_report_tests.py | 91 + .../execute_dashboard_report_tests.py | 113 + .../reports/commands_tests.py | 2036 +++ .../reports/scheduler_tests.py | 213 + tests/integration_tests/reports/utils.py | 201 + tests/integration_tests/result_set_tests.py | 250 + tests/integration_tests/security/__init__.py | 16 + .../security/analytics_db_safety_tests.py | 43 + tests/integration_tests/security/api_tests.py | 126 + .../security/guest_token_security_tests.py | 206 + .../security/migrate_roles_tests.py | 280 + .../security/row_level_security_tests.py | 408 + tests/integration_tests/security_tests.py | 2032 +++ tests/integration_tests/sql_lab/__init__.py | 16 + tests/integration_tests/sql_lab/api_tests.py | 215 + .../sql_lab/commands_tests.py | 293 + tests/integration_tests/sql_lab/conftest.py | 71 + .../sql_lab/test_execute_sql_statements.py | 56 + .../integration_tests/sql_validator_tests.py | 285 + tests/integration_tests/sqla_models_tests.py | 888 + tests/integration_tests/sqllab_tests.py | 1027 ++ tests/integration_tests/stats_logger_tests.py | 49 + tests/integration_tests/strategy_tests.py | 153 + .../integration_tests/superset_test_config.py | 146 + ..._test_config_sqllab_backend_persist_off.py | 24 + .../superset_test_config_thumbnails.py | 88 + ...uperset_test_custom_template_processors.py | 59 + tests/integration_tests/tagging_tests.py | 309 + tests/integration_tests/tasks/__init__.py | 16 + .../tasks/async_queries_tests.py | 190 + tests/integration_tests/test_app.py | 36 + tests/integration_tests/test_jinja_context.py | 207 + tests/integration_tests/thumbnails_tests.py | 416 + tests/integration_tests/users/__init__.py | 17 + tests/integration_tests/users/api_tests.py | 64 + tests/integration_tests/utils/__init__.py | 29 + .../utils/cache_manager_tests.py | 49 + tests/integration_tests/utils/core_tests.py | 84 + tests/integration_tests/utils/csv_tests.py | 83 + .../integration_tests/utils/encrypt_tests.py | 63 + .../integration_tests/utils/get_dashboards.py | 28 + .../integration_tests/utils/hashing_tests.py | 96 + .../utils/machine_auth_tests.py | 56 + .../utils/public_interfaces_test.py | 103 + tests/integration_tests/utils_tests.py | 1134 ++ tests/integration_tests/viz_tests.py | 1540 ++ tests/unit_tests/__init__.py | 16 + .../unit_tests/advanced_data_type/__init__.py | 16 + .../advanced_data_type/types_tests.py | 515 + .../unit_tests/annotation_layers/fixtures.py | 24 + .../annotation_layers/schema_tests.py | 157 + tests/unit_tests/charts/__init__.py | 16 + tests/unit_tests/charts/commands/__init__.py | 16 + .../charts/commands/importers/__init__.py | 16 + .../charts/commands/importers/v1/__init__.py | 16 + .../commands/importers/v1/import_test.py | 69 + tests/unit_tests/charts/dao/__init__.py | 16 + tests/unit_tests/charts/dao/dao_tests.py | 67 + .../unit_tests/charts/test_post_processing.py | 2031 +++ tests/unit_tests/columns/__init__.py | 16 + tests/unit_tests/columns/test_models.py | 57 + tests/unit_tests/commands/__init__.py | 16 + tests/unit_tests/commands/export_test.py | 94 + .../unit_tests/commands/importers/__init__.py | 16 + .../commands/importers/v1/__init__.py | 16 + .../commands/importers/v1/assets_test.py | 131 + tests/unit_tests/common/__init__.py | 16 + .../unit_tests/common/test_dataframe_utils.py | 50 + .../common/test_query_object_factory.py | 114 + .../common/test_time_range_utils.py | 94 + tests/unit_tests/config_test.py | 330 + tests/unit_tests/conftest.py | 165 + tests/unit_tests/core_tests.py | 235 + tests/unit_tests/dao/queries_test.py | 274 + tests/unit_tests/dashboards/__init__.py | 16 + .../dashboards/commands/__init__.py | 16 + .../dashboards/commands/importers/__init__.py | 16 + .../commands/importers/v1/__init__.py | 16 + .../commands/importers/v1/import_test.py | 65 + .../commands/importers/v1/utils_test.py | 123 + tests/unit_tests/databases/__init__.py | 16 + tests/unit_tests/databases/api_test.py | 345 + .../unit_tests/databases/commands/__init__.py | 16 + .../databases/commands/importers/__init__.py | 16 + .../commands/importers/v1/__init__.py | 16 + .../commands/importers/v1/import_test.py | 78 + .../commands/test_connection_test.py | 32 + tests/unit_tests/databases/dao/__init__.py | 16 + tests/unit_tests/databases/dao/dao_tests.py | 69 + tests/unit_tests/databases/schema_tests.py | 227 + .../databases/ssh_tunnel/__init__.py | 16 + .../databases/ssh_tunnel/commands/__init__.py | 16 + .../ssh_tunnel/commands/create_test.py | 68 + .../ssh_tunnel/commands/delete_test.py | 73 + .../ssh_tunnel/commands/update_test.py | 93 + .../databases/ssh_tunnel/dao_tests.py | 43 + tests/unit_tests/databases/utils_test.py | 40 + tests/unit_tests/dataframe_test.py | 205 + tests/unit_tests/datasets/__init__.py | 16 + .../unit_tests/datasets/commands/__init__.py | 16 + .../datasets/commands/export_test.py | 208 + .../datasets/commands/importers/__init__.py | 16 + .../commands/importers/v1/__init__.py | 16 + .../commands/importers/v1/import_test.py | 486 + tests/unit_tests/datasets/conftest.py | 124 + tests/unit_tests/datasets/dao/__init__.py | 16 + tests/unit_tests/datasets/dao/dao_tests.py | 103 + tests/unit_tests/datasource/dao_tests.py | 212 + .../unit_tests/db_engine_specs/test_athena.py | 87 + tests/unit_tests/db_engine_specs/test_base.py | 140 + .../db_engine_specs/test_bigquery.py | 314 + .../db_engine_specs/test_clickhouse.py | 213 + .../unit_tests/db_engine_specs/test_crate.py | 71 + .../db_engine_specs/test_databricks.py | 246 + .../unit_tests/db_engine_specs/test_dremio.py | 42 + .../unit_tests/db_engine_specs/test_drill.py | 108 + .../unit_tests/db_engine_specs/test_druid.py | 95 + .../unit_tests/db_engine_specs/test_duckdb.py | 40 + .../db_engine_specs/test_dynamodb.py | 40 + .../db_engine_specs/test_elasticsearch.py | 106 + .../db_engine_specs/test_firebird.py | 102 + .../db_engine_specs/test_firebolt.py | 57 + .../db_engine_specs/test_gsheets.py | 309 + tests/unit_tests/db_engine_specs/test_hana.py | 43 + tests/unit_tests/db_engine_specs/test_hive.py | 44 + .../unit_tests/db_engine_specs/test_impala.py | 40 + tests/unit_tests/db_engine_specs/test_init.py | 80 + .../unit_tests/db_engine_specs/test_kusto.py | 144 + .../unit_tests/db_engine_specs/test_kylin.py | 40 + .../unit_tests/db_engine_specs/test_mssql.py | 432 + .../unit_tests/db_engine_specs/test_mysql.py | 150 + .../unit_tests/db_engine_specs/test_oracle.py | 113 + .../db_engine_specs/test_postgres.py | 91 + .../unit_tests/db_engine_specs/test_presto.py | 84 + .../db_engine_specs/test_rockset.py | 41 + .../db_engine_specs/test_snowflake.py | 171 + .../unit_tests/db_engine_specs/test_sqlite.py | 99 + .../db_engine_specs/test_teradata.py | 43 + .../unit_tests/db_engine_specs/test_trino.py | 368 + tests/unit_tests/db_engine_specs/utils.py | 67 + tests/unit_tests/explore/__init__.py | 16 + tests/unit_tests/explore/api_test.py | 30 + tests/unit_tests/explore/utils_test.py | 289 + tests/unit_tests/extension_tests.py | 51 + tests/unit_tests/feature_flag_test.py | 65 + tests/unit_tests/fixtures/__init__.py | 16 + tests/unit_tests/fixtures/assets_configs.py | 260 + tests/unit_tests/fixtures/common.py | 25 + tests/unit_tests/fixtures/dataframes.py | 197 + tests/unit_tests/fixtures/datasets.py | 206 + .../fixtures/static/assets/manifest.json | 20 + tests/unit_tests/importexport/__init__.py | 16 + tests/unit_tests/importexport/api_test.py | 245 + tests/unit_tests/jinja_context_test.py | 126 + tests/unit_tests/key_value/__init__.py | 16 + tests/unit_tests/key_value/utils_test.py | 60 + tests/unit_tests/models/__init__.py | 16 + tests/unit_tests/models/core_test.py | 145 + tests/unit_tests/notifications/email_tests.py | 54 + .../pandas_postprocessing/__init__.py | 16 + .../pandas_postprocessing/test_aggregate.py | 40 + .../pandas_postprocessing/test_boxplot.py | 151 + .../pandas_postprocessing/test_compare.py | 231 + .../test_contribution.py | 80 + .../pandas_postprocessing/test_cum.py | 164 + .../pandas_postprocessing/test_diff.py | 51 + .../pandas_postprocessing/test_flatten.py | 177 + .../pandas_postprocessing/test_geography.py | 90 + .../pandas_postprocessing/test_pivot.py | 205 + .../pandas_postprocessing/test_prophet.py | 190 + .../pandas_postprocessing/test_rename.py | 175 + .../pandas_postprocessing/test_resample.py | 208 + .../pandas_postprocessing/test_rolling.py | 222 + .../pandas_postprocessing/test_select.py | 55 + .../pandas_postprocessing/test_sort.py | 53 + .../pandas_postprocessing/test_utils.py | 30 + .../unit_tests/pandas_postprocessing/utils.py | 55 + tests/unit_tests/result_set_test.py | 142 + tests/unit_tests/sql_lab_test.py | 218 + tests/unit_tests/sql_parse_tests.py | 1508 ++ tests/unit_tests/tables/__init__.py | 16 + tests/unit_tests/tables/test_models.py | 56 + tests/unit_tests/tasks/__init__.py | 16 + tests/unit_tests/tasks/test_cron_util.py | 212 + tests/unit_tests/tasks/test_utils.py | 323 + tests/unit_tests/test_jinja_context.py | 267 + tests/unit_tests/thumbnails/__init__.py | 16 + tests/unit_tests/thumbnails/test_digest.py | 258 + tests/unit_tests/utils/cache_test.py | 52 + tests/unit_tests/utils/date_parser_tests.py | 358 + tests/unit_tests/utils/db.py | 30 + tests/unit_tests/utils/log_tests.py | 37 + tests/unit_tests/utils/test_core.py | 86 + tests/unit_tests/utils/test_decorators.py | 87 + tests/unit_tests/utils/test_file.py | 44 + tests/unit_tests/utils/urls_tests.py | 66 + tests/unit_tests/views/__init__.py | 16 + 392 files changed, 91856 insertions(+), 1 deletion(-) create mode 100644 superset-embedded-sdk/.gitignore create mode 100644 superset-embedded-sdk/CONTRIBUTING.md create mode 100644 superset-embedded-sdk/README.md create mode 100644 superset-embedded-sdk/babel.config.js create mode 100644 superset-embedded-sdk/package-lock.json create mode 100644 superset-embedded-sdk/package.json create mode 100644 superset-embedded-sdk/release-if-necessary.js create mode 100644 superset-embedded-sdk/src/const.ts create mode 100644 superset-embedded-sdk/src/guestTokenRefresh.test.ts create mode 100644 superset-embedded-sdk/src/guestTokenRefresh.ts create mode 100644 superset-embedded-sdk/src/index.ts create mode 100644 superset-embedded-sdk/tsconfig.json create mode 100644 superset-embedded-sdk/webpack.config.js create mode 100644 tests/__init__.py create mode 100644 tests/common/__init__.py create mode 100644 tests/common/logger_utils.py create mode 100644 tests/common/query_context_generator.py create mode 100644 tests/conftest.py create mode 100644 tests/consts/__init__.py create mode 100644 tests/consts/birth_names.py create mode 100644 tests/consts/us_states.py create mode 100644 tests/example_data/__init__.py create mode 100644 tests/example_data/data_generator/__init__.py create mode 100644 tests/example_data/data_generator/base_generator.py create mode 100644 tests/example_data/data_generator/birth_names/__init__.py create mode 100644 tests/example_data/data_generator/birth_names/birth_names_generator.py create mode 100644 tests/example_data/data_generator/birth_names/birth_names_generator_factory.py create mode 100644 tests/example_data/data_generator/consts.py create mode 100644 tests/example_data/data_generator/string_generator.py create mode 100644 tests/example_data/data_generator/string_generator_factory.py create mode 100644 tests/example_data/data_generator/tests/__init__.py create mode 100644 tests/example_data/data_generator/tests/test_string_generator.py create mode 100644 tests/example_data/data_loading/__init__.py create mode 100644 tests/example_data/data_loading/base_data_loader.py create mode 100644 tests/example_data/data_loading/data_definitions/__init__.py create mode 100644 tests/example_data/data_loading/data_definitions/birth_names.py create mode 100644 tests/example_data/data_loading/data_definitions/types.py create mode 100644 tests/example_data/data_loading/pandas/__init__.py create mode 100644 tests/example_data/data_loading/pandas/pandas_data_loader.py create mode 100644 tests/example_data/data_loading/pandas/pands_data_loading_conf.py create mode 100644 tests/example_data/data_loading/pandas/table_df_convertor.py create mode 100644 tests/fixtures/__init__.py create mode 100644 tests/fixtures/birth_names.py create mode 100644 tests/integration_tests/__init__.py create mode 100644 tests/integration_tests/access_tests.py create mode 100644 tests/integration_tests/advanced_data_type/__init__.py create mode 100644 tests/integration_tests/advanced_data_type/api_tests.py create mode 100644 tests/integration_tests/annotation_layers/__init__.py create mode 100644 tests/integration_tests/annotation_layers/api_tests.py create mode 100644 tests/integration_tests/annotation_layers/fixtures.py create mode 100644 tests/integration_tests/async_events/__init__.py create mode 100644 tests/integration_tests/async_events/api_tests.py create mode 100644 tests/integration_tests/available_domains/__init__.py create mode 100644 tests/integration_tests/available_domains/api_tests.py create mode 100644 tests/integration_tests/base_api_tests.py create mode 100644 tests/integration_tests/base_tests.py create mode 100644 tests/integration_tests/cache_tests.py create mode 100644 tests/integration_tests/cachekeys/__init__.py create mode 100644 tests/integration_tests/cachekeys/api_tests.py create mode 100644 tests/integration_tests/celery_tests.py create mode 100644 tests/integration_tests/charts/__init__.py create mode 100644 tests/integration_tests/charts/api_tests.py create mode 100644 tests/integration_tests/charts/commands_tests.py create mode 100644 tests/integration_tests/charts/data/__init__.py create mode 100644 tests/integration_tests/charts/data/api_tests.py create mode 100644 tests/integration_tests/charts/schema_tests.py create mode 100644 tests/integration_tests/cli_tests.py create mode 100644 tests/integration_tests/commands_test.py create mode 100644 tests/integration_tests/conftest.py create mode 100644 tests/integration_tests/core_tests.py create mode 100644 tests/integration_tests/css_templates/__init__.py create mode 100644 tests/integration_tests/css_templates/api_tests.py create mode 100644 tests/integration_tests/csv_upload_tests.py create mode 100644 tests/integration_tests/dashboard_tests.py create mode 100644 tests/integration_tests/dashboard_utils.py create mode 100644 tests/integration_tests/dashboards/__init__.py create mode 100644 tests/integration_tests/dashboards/api_tests.py create mode 100644 tests/integration_tests/dashboards/base_case.py create mode 100644 tests/integration_tests/dashboards/commands_tests.py create mode 100644 tests/integration_tests/dashboards/consts.py create mode 100644 tests/integration_tests/dashboards/dao_tests.py create mode 100644 tests/integration_tests/dashboards/dashboard_test_utils.py create mode 100644 tests/integration_tests/dashboards/filter_sets/__init__.py create mode 100644 tests/integration_tests/dashboards/filter_sets/conftest.py create mode 100644 tests/integration_tests/dashboards/filter_sets/consts.py create mode 100644 tests/integration_tests/dashboards/filter_sets/create_api_tests.py create mode 100644 tests/integration_tests/dashboards/filter_sets/delete_api_tests.py create mode 100644 tests/integration_tests/dashboards/filter_sets/get_api_tests.py create mode 100644 tests/integration_tests/dashboards/filter_sets/update_api_tests.py create mode 100644 tests/integration_tests/dashboards/filter_sets/utils.py create mode 100644 tests/integration_tests/dashboards/filter_state/__init__.py create mode 100644 tests/integration_tests/dashboards/filter_state/api_tests.py create mode 100644 tests/integration_tests/dashboards/permalink/__init__.py create mode 100644 tests/integration_tests/dashboards/permalink/api_tests.py create mode 100644 tests/integration_tests/dashboards/security/__init__.py create mode 100644 tests/integration_tests/dashboards/security/base_case.py create mode 100644 tests/integration_tests/dashboards/security/security_dataset_tests.py create mode 100644 tests/integration_tests/dashboards/security/security_rbac_tests.py create mode 100644 tests/integration_tests/dashboards/superset_factory_util.py create mode 100644 tests/integration_tests/databases/__init__.py create mode 100644 tests/integration_tests/databases/api_tests.py create mode 100644 tests/integration_tests/databases/commands_tests.py create mode 100644 tests/integration_tests/databases/ssh_tunnel/__init__.py create mode 100644 tests/integration_tests/databases/ssh_tunnel/commands/__init__.py create mode 100644 tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py create mode 100644 tests/integration_tests/datasets/__init__.py create mode 100644 tests/integration_tests/datasets/api_tests.py create mode 100644 tests/integration_tests/datasets/commands_tests.py create mode 100644 tests/integration_tests/datasource/__init__.py create mode 100644 tests/integration_tests/datasource/api_tests.py create mode 100644 tests/integration_tests/datasource_tests.py create mode 100644 tests/integration_tests/db_engine_specs/__init__.py create mode 100644 tests/integration_tests/db_engine_specs/ascend_tests.py create mode 100644 tests/integration_tests/db_engine_specs/base_engine_spec_tests.py create mode 100644 tests/integration_tests/db_engine_specs/base_tests.py create mode 100644 tests/integration_tests/db_engine_specs/bigquery_tests.py create mode 100644 tests/integration_tests/db_engine_specs/databricks_tests.py create mode 100644 tests/integration_tests/db_engine_specs/gsheets_tests.py create mode 100644 tests/integration_tests/db_engine_specs/hive_tests.py create mode 100644 tests/integration_tests/db_engine_specs/mysql_tests.py create mode 100644 tests/integration_tests/db_engine_specs/pinot_tests.py create mode 100644 tests/integration_tests/db_engine_specs/postgres_tests.py create mode 100644 tests/integration_tests/db_engine_specs/presto_tests.py create mode 100644 tests/integration_tests/db_engine_specs/redshift_tests.py create mode 100644 tests/integration_tests/dict_import_export_tests.py create mode 100644 tests/integration_tests/dynamic_plugins_tests.py create mode 100644 tests/integration_tests/email_tests.py create mode 100644 tests/integration_tests/embedded/__init__.py create mode 100644 tests/integration_tests/embedded/api_tests.py create mode 100644 tests/integration_tests/embedded/dao_tests.py create mode 100644 tests/integration_tests/embedded/test_view.py create mode 100644 tests/integration_tests/event_logger_tests.py create mode 100644 tests/integration_tests/explore/__init__.py create mode 100644 tests/integration_tests/explore/api_tests.py create mode 100644 tests/integration_tests/explore/form_data/__init__.py create mode 100644 tests/integration_tests/explore/form_data/api_tests.py create mode 100644 tests/integration_tests/explore/form_data/commands_tests.py create mode 100644 tests/integration_tests/explore/permalink/__init__.py create mode 100644 tests/integration_tests/explore/permalink/api_tests.py create mode 100644 tests/integration_tests/explore/permalink/commands_tests.py create mode 100644 tests/integration_tests/extensions/__init__.py create mode 100644 tests/integration_tests/extensions/metastore_cache_test.py create mode 100644 tests/integration_tests/fixtures/__init__.py create mode 100644 tests/integration_tests/fixtures/birth_names_dashboard.py create mode 100644 tests/integration_tests/fixtures/certificates.py create mode 100644 tests/integration_tests/fixtures/client.py create mode 100644 tests/integration_tests/fixtures/database.py create mode 100644 tests/integration_tests/fixtures/datasource.py create mode 100644 tests/integration_tests/fixtures/deck_geojson_form_data.json create mode 100644 tests/integration_tests/fixtures/deck_path_form_data.json create mode 100644 tests/integration_tests/fixtures/energy_dashboard.py create mode 100644 tests/integration_tests/fixtures/importexport.py create mode 100644 tests/integration_tests/fixtures/public_role.py create mode 100644 tests/integration_tests/fixtures/pyodbcRow.py create mode 100644 tests/integration_tests/fixtures/query_context.py create mode 100644 tests/integration_tests/fixtures/sample.png create mode 100644 tests/integration_tests/fixtures/tabbed_dashboard.py create mode 100644 tests/integration_tests/fixtures/tags.py create mode 100644 tests/integration_tests/fixtures/trends.csv create mode 100644 tests/integration_tests/fixtures/unicode_dashboard.py create mode 100644 tests/integration_tests/fixtures/world_bank_dashboard.py create mode 100644 tests/integration_tests/form_tests.py create mode 100644 tests/integration_tests/import_export_tests.py create mode 100644 tests/integration_tests/importexport/__init__.py create mode 100644 tests/integration_tests/importexport/commands_tests.py create mode 100644 tests/integration_tests/insert_chart_mixin.py create mode 100644 tests/integration_tests/key_value/__init__.py create mode 100644 tests/integration_tests/key_value/commands/__init__.py create mode 100644 tests/integration_tests/key_value/commands/create_test.py create mode 100644 tests/integration_tests/key_value/commands/delete_test.py create mode 100644 tests/integration_tests/key_value/commands/fixtures.py create mode 100644 tests/integration_tests/key_value/commands/get_test.py create mode 100644 tests/integration_tests/key_value/commands/update_test.py create mode 100644 tests/integration_tests/key_value/commands/upsert_test.py create mode 100644 tests/integration_tests/log_api_tests.py create mode 100644 tests/integration_tests/log_model_view_tests.py create mode 100644 tests/integration_tests/logging_configurator_tests.py create mode 100644 tests/integration_tests/migrations/06e1e70058c7_migrate_legacy_area__tests.py create mode 100644 tests/integration_tests/migrations/__init__.py create mode 100644 tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py create mode 100644 tests/integration_tests/migrations/c747c78868b6_migrating_legacy_treemap__tests.py create mode 100644 tests/integration_tests/migrations/f1410ed7ec95_migrate_native_filters_to_new_schema__tests.py create mode 100644 tests/integration_tests/migrations/fb13d49b72f9_better_filters__tests.py create mode 100644 tests/integration_tests/migrations/fc3a3a8ff221_migrate_filter_sets_to_new_format__tests.py create mode 100644 tests/integration_tests/model_tests.py create mode 100644 tests/integration_tests/queries/__init__.py create mode 100644 tests/integration_tests/queries/api_tests.py create mode 100644 tests/integration_tests/queries/saved_queries/__init__.py create mode 100644 tests/integration_tests/queries/saved_queries/api_tests.py create mode 100644 tests/integration_tests/queries/saved_queries/commands_tests.py create mode 100644 tests/integration_tests/query_context_tests.py create mode 100644 tests/integration_tests/reports/__init__.py create mode 100644 tests/integration_tests/reports/alert_tests.py create mode 100644 tests/integration_tests/reports/api_tests.py create mode 100644 tests/integration_tests/reports/commands/create_dashboard_report_tests.py create mode 100644 tests/integration_tests/reports/commands/execute_dashboard_report_tests.py create mode 100644 tests/integration_tests/reports/commands_tests.py create mode 100644 tests/integration_tests/reports/scheduler_tests.py create mode 100644 tests/integration_tests/reports/utils.py create mode 100644 tests/integration_tests/result_set_tests.py create mode 100644 tests/integration_tests/security/__init__.py create mode 100644 tests/integration_tests/security/analytics_db_safety_tests.py create mode 100644 tests/integration_tests/security/api_tests.py create mode 100644 tests/integration_tests/security/guest_token_security_tests.py create mode 100644 tests/integration_tests/security/migrate_roles_tests.py create mode 100644 tests/integration_tests/security/row_level_security_tests.py create mode 100644 tests/integration_tests/security_tests.py create mode 100644 tests/integration_tests/sql_lab/__init__.py create mode 100644 tests/integration_tests/sql_lab/api_tests.py create mode 100644 tests/integration_tests/sql_lab/commands_tests.py create mode 100644 tests/integration_tests/sql_lab/conftest.py create mode 100644 tests/integration_tests/sql_lab/test_execute_sql_statements.py create mode 100644 tests/integration_tests/sql_validator_tests.py create mode 100644 tests/integration_tests/sqla_models_tests.py create mode 100644 tests/integration_tests/sqllab_tests.py create mode 100644 tests/integration_tests/stats_logger_tests.py create mode 100644 tests/integration_tests/strategy_tests.py create mode 100644 tests/integration_tests/superset_test_config.py create mode 100644 tests/integration_tests/superset_test_config_sqllab_backend_persist_off.py create mode 100644 tests/integration_tests/superset_test_config_thumbnails.py create mode 100644 tests/integration_tests/superset_test_custom_template_processors.py create mode 100644 tests/integration_tests/tagging_tests.py create mode 100644 tests/integration_tests/tasks/__init__.py create mode 100644 tests/integration_tests/tasks/async_queries_tests.py create mode 100644 tests/integration_tests/test_app.py create mode 100644 tests/integration_tests/test_jinja_context.py create mode 100644 tests/integration_tests/thumbnails_tests.py create mode 100644 tests/integration_tests/users/__init__.py create mode 100644 tests/integration_tests/users/api_tests.py create mode 100644 tests/integration_tests/utils/__init__.py create mode 100644 tests/integration_tests/utils/cache_manager_tests.py create mode 100644 tests/integration_tests/utils/core_tests.py create mode 100644 tests/integration_tests/utils/csv_tests.py create mode 100644 tests/integration_tests/utils/encrypt_tests.py create mode 100644 tests/integration_tests/utils/get_dashboards.py create mode 100644 tests/integration_tests/utils/hashing_tests.py create mode 100644 tests/integration_tests/utils/machine_auth_tests.py create mode 100644 tests/integration_tests/utils/public_interfaces_test.py create mode 100644 tests/integration_tests/utils_tests.py create mode 100644 tests/integration_tests/viz_tests.py create mode 100644 tests/unit_tests/__init__.py create mode 100644 tests/unit_tests/advanced_data_type/__init__.py create mode 100644 tests/unit_tests/advanced_data_type/types_tests.py create mode 100644 tests/unit_tests/annotation_layers/fixtures.py create mode 100644 tests/unit_tests/annotation_layers/schema_tests.py create mode 100644 tests/unit_tests/charts/__init__.py create mode 100644 tests/unit_tests/charts/commands/__init__.py create mode 100644 tests/unit_tests/charts/commands/importers/__init__.py create mode 100644 tests/unit_tests/charts/commands/importers/v1/__init__.py create mode 100644 tests/unit_tests/charts/commands/importers/v1/import_test.py create mode 100644 tests/unit_tests/charts/dao/__init__.py create mode 100644 tests/unit_tests/charts/dao/dao_tests.py create mode 100644 tests/unit_tests/charts/test_post_processing.py create mode 100644 tests/unit_tests/columns/__init__.py create mode 100644 tests/unit_tests/columns/test_models.py create mode 100644 tests/unit_tests/commands/__init__.py create mode 100644 tests/unit_tests/commands/export_test.py create mode 100644 tests/unit_tests/commands/importers/__init__.py create mode 100644 tests/unit_tests/commands/importers/v1/__init__.py create mode 100644 tests/unit_tests/commands/importers/v1/assets_test.py create mode 100644 tests/unit_tests/common/__init__.py create mode 100644 tests/unit_tests/common/test_dataframe_utils.py create mode 100644 tests/unit_tests/common/test_query_object_factory.py create mode 100644 tests/unit_tests/common/test_time_range_utils.py create mode 100644 tests/unit_tests/config_test.py create mode 100644 tests/unit_tests/conftest.py create mode 100644 tests/unit_tests/core_tests.py create mode 100644 tests/unit_tests/dao/queries_test.py create mode 100644 tests/unit_tests/dashboards/__init__.py create mode 100644 tests/unit_tests/dashboards/commands/__init__.py create mode 100644 tests/unit_tests/dashboards/commands/importers/__init__.py create mode 100644 tests/unit_tests/dashboards/commands/importers/v1/__init__.py create mode 100644 tests/unit_tests/dashboards/commands/importers/v1/import_test.py create mode 100644 tests/unit_tests/dashboards/commands/importers/v1/utils_test.py create mode 100644 tests/unit_tests/databases/__init__.py create mode 100644 tests/unit_tests/databases/api_test.py create mode 100644 tests/unit_tests/databases/commands/__init__.py create mode 100644 tests/unit_tests/databases/commands/importers/__init__.py create mode 100644 tests/unit_tests/databases/commands/importers/v1/__init__.py create mode 100644 tests/unit_tests/databases/commands/importers/v1/import_test.py create mode 100644 tests/unit_tests/databases/commands/test_connection_test.py create mode 100644 tests/unit_tests/databases/dao/__init__.py create mode 100644 tests/unit_tests/databases/dao/dao_tests.py create mode 100644 tests/unit_tests/databases/schema_tests.py create mode 100644 tests/unit_tests/databases/ssh_tunnel/__init__.py create mode 100644 tests/unit_tests/databases/ssh_tunnel/commands/__init__.py create mode 100644 tests/unit_tests/databases/ssh_tunnel/commands/create_test.py create mode 100644 tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py create mode 100644 tests/unit_tests/databases/ssh_tunnel/commands/update_test.py create mode 100644 tests/unit_tests/databases/ssh_tunnel/dao_tests.py create mode 100644 tests/unit_tests/databases/utils_test.py create mode 100644 tests/unit_tests/dataframe_test.py create mode 100644 tests/unit_tests/datasets/__init__.py create mode 100644 tests/unit_tests/datasets/commands/__init__.py create mode 100644 tests/unit_tests/datasets/commands/export_test.py create mode 100644 tests/unit_tests/datasets/commands/importers/__init__.py create mode 100644 tests/unit_tests/datasets/commands/importers/v1/__init__.py create mode 100644 tests/unit_tests/datasets/commands/importers/v1/import_test.py create mode 100644 tests/unit_tests/datasets/conftest.py create mode 100644 tests/unit_tests/datasets/dao/__init__.py create mode 100644 tests/unit_tests/datasets/dao/dao_tests.py create mode 100644 tests/unit_tests/datasource/dao_tests.py create mode 100644 tests/unit_tests/db_engine_specs/test_athena.py create mode 100644 tests/unit_tests/db_engine_specs/test_base.py create mode 100644 tests/unit_tests/db_engine_specs/test_bigquery.py create mode 100644 tests/unit_tests/db_engine_specs/test_clickhouse.py create mode 100644 tests/unit_tests/db_engine_specs/test_crate.py create mode 100644 tests/unit_tests/db_engine_specs/test_databricks.py create mode 100644 tests/unit_tests/db_engine_specs/test_dremio.py create mode 100644 tests/unit_tests/db_engine_specs/test_drill.py create mode 100644 tests/unit_tests/db_engine_specs/test_druid.py create mode 100644 tests/unit_tests/db_engine_specs/test_duckdb.py create mode 100644 tests/unit_tests/db_engine_specs/test_dynamodb.py create mode 100644 tests/unit_tests/db_engine_specs/test_elasticsearch.py create mode 100644 tests/unit_tests/db_engine_specs/test_firebird.py create mode 100644 tests/unit_tests/db_engine_specs/test_firebolt.py create mode 100644 tests/unit_tests/db_engine_specs/test_gsheets.py create mode 100644 tests/unit_tests/db_engine_specs/test_hana.py create mode 100644 tests/unit_tests/db_engine_specs/test_hive.py create mode 100644 tests/unit_tests/db_engine_specs/test_impala.py create mode 100644 tests/unit_tests/db_engine_specs/test_init.py create mode 100644 tests/unit_tests/db_engine_specs/test_kusto.py create mode 100644 tests/unit_tests/db_engine_specs/test_kylin.py create mode 100644 tests/unit_tests/db_engine_specs/test_mssql.py create mode 100644 tests/unit_tests/db_engine_specs/test_mysql.py create mode 100644 tests/unit_tests/db_engine_specs/test_oracle.py create mode 100644 tests/unit_tests/db_engine_specs/test_postgres.py create mode 100644 tests/unit_tests/db_engine_specs/test_presto.py create mode 100644 tests/unit_tests/db_engine_specs/test_rockset.py create mode 100644 tests/unit_tests/db_engine_specs/test_snowflake.py create mode 100644 tests/unit_tests/db_engine_specs/test_sqlite.py create mode 100644 tests/unit_tests/db_engine_specs/test_teradata.py create mode 100644 tests/unit_tests/db_engine_specs/test_trino.py create mode 100644 tests/unit_tests/db_engine_specs/utils.py create mode 100644 tests/unit_tests/explore/__init__.py create mode 100644 tests/unit_tests/explore/api_test.py create mode 100644 tests/unit_tests/explore/utils_test.py create mode 100644 tests/unit_tests/extension_tests.py create mode 100644 tests/unit_tests/feature_flag_test.py create mode 100644 tests/unit_tests/fixtures/__init__.py create mode 100644 tests/unit_tests/fixtures/assets_configs.py create mode 100644 tests/unit_tests/fixtures/common.py create mode 100644 tests/unit_tests/fixtures/dataframes.py create mode 100644 tests/unit_tests/fixtures/datasets.py create mode 100644 tests/unit_tests/fixtures/static/assets/manifest.json create mode 100644 tests/unit_tests/importexport/__init__.py create mode 100644 tests/unit_tests/importexport/api_test.py create mode 100644 tests/unit_tests/jinja_context_test.py create mode 100644 tests/unit_tests/key_value/__init__.py create mode 100644 tests/unit_tests/key_value/utils_test.py create mode 100644 tests/unit_tests/models/__init__.py create mode 100644 tests/unit_tests/models/core_test.py create mode 100644 tests/unit_tests/notifications/email_tests.py create mode 100644 tests/unit_tests/pandas_postprocessing/__init__.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_aggregate.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_boxplot.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_compare.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_contribution.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_cum.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_diff.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_flatten.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_geography.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_pivot.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_prophet.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_rename.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_resample.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_rolling.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_select.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_sort.py create mode 100644 tests/unit_tests/pandas_postprocessing/test_utils.py create mode 100644 tests/unit_tests/pandas_postprocessing/utils.py create mode 100644 tests/unit_tests/result_set_test.py create mode 100644 tests/unit_tests/sql_lab_test.py create mode 100644 tests/unit_tests/sql_parse_tests.py create mode 100644 tests/unit_tests/tables/__init__.py create mode 100644 tests/unit_tests/tables/test_models.py create mode 100644 tests/unit_tests/tasks/__init__.py create mode 100644 tests/unit_tests/tasks/test_cron_util.py create mode 100644 tests/unit_tests/tasks/test_utils.py create mode 100644 tests/unit_tests/test_jinja_context.py create mode 100644 tests/unit_tests/thumbnails/__init__.py create mode 100644 tests/unit_tests/thumbnails/test_digest.py create mode 100644 tests/unit_tests/utils/cache_test.py create mode 100644 tests/unit_tests/utils/date_parser_tests.py create mode 100644 tests/unit_tests/utils/db.py create mode 100644 tests/unit_tests/utils/log_tests.py create mode 100644 tests/unit_tests/utils/test_core.py create mode 100644 tests/unit_tests/utils/test_decorators.py create mode 100644 tests/unit_tests/utils/test_file.py create mode 100644 tests/unit_tests/utils/urls_tests.py create mode 100644 tests/unit_tests/views/__init__.py diff --git a/.drone.yml b/.drone.yml index 98423531a5ae2..fc36d8e9a1443 100644 --- a/.drone.yml +++ b/.drone.yml @@ -22,4 +22,22 @@ steps: - make -C .terra/superset init - make -C .terra/superset validate - make -C .terra/insights init - - make -C .terra/insights validate \ No newline at end of file + - make -C .terra/insights validate + + - name: test + pull: if-not-exists + image: python:3.9 + environment: + SUPERSET_SECRET_KEY: asd123 + commands: + - mkdir ~/.ssh && echo "$SSH_PRIVATE_KEY" > ~/.ssh/id_ed25519 && chmod 0600 ~/.ssh/id_ed25519 + - ssh-keyscan -H github.com >> ~/.ssh/known_hosts + - apt-get update && apt-get install libsasl2-dev + - pip install -r requirements/testing.txt + - cp -R bi_superset/ superset/bi_superset/ + - cp bi_superset/superset_config.py superset/superset_config.py + - cp bi_superset/bi_cli/bi_cli.py superset/cli/bi_cli.py + - cp -R zf_integration/ superset/zf_integration/ + - cp zf_utils/jwt.py superset/utils/jwt.py + - pip install coverage + - pytest ./tests/unit_tests/ \ No newline at end of file diff --git a/superset-embedded-sdk/.gitignore b/superset-embedded-sdk/.gitignore new file mode 100644 index 0000000000000..6675fa567cca0 --- /dev/null +++ b/superset-embedded-sdk/.gitignore @@ -0,0 +1,3 @@ +bundle +dist +lib diff --git a/superset-embedded-sdk/CONTRIBUTING.md b/superset-embedded-sdk/CONTRIBUTING.md new file mode 100644 index 0000000000000..9bf48e10a1384 --- /dev/null +++ b/superset-embedded-sdk/CONTRIBUTING.md @@ -0,0 +1,77 @@ + + +# Contributing to the Superset Embedded SDK + +The superset-embedded-sdk directory is a self contained sub-project in the Superset codebase. + +This is because the SDK has different requirements from other parts of the Superset codebase: +Namely, we need to export a lightweight frontend library that can be used in as many environments as possible. +Having separate configs allows for better separation of concerns and allows the SDK code to remain simple. + +## Testing + +The functions used in the sdk so far are very closely tied to browser behavior, +and therefore are not easily unit-testable. We have instead opted to test the sdk behavior using end-to-end tests. +This way, the tests can assert that the sdk actually mounts the iframe and communicates with it correctly. + +At time of writing, these tests are not written yet, because we haven't yet put together the demo app that they will leverage. +### Things to e2e test once we have a demo app: + +**happy path:** + +fetch valid guest token and pass it to the sdk, verify that the dashboard shows up + +**security:** + +it should fail if you pass a fake guest token +it should fail if your guest token doesn't have permission to access this resource +it should apply rls filters correctly +it should not apply rls filters to a dataset that isn't included + +**edge cases:** + +what happens if superset is offline +what happens if the superset domain is invalid or incorrect +what happens if dashboard id doesn't exist + +## Publishing + +To publish a new version, first determine whether it will be a major/minor/patch version according to [semver rules](https://semver.org/). +Run `npm version [major|minor|patch]`, and include the resulting version change in your PR. + +Building the package and publishing to npm will be handled by github actions automatically on merge to master, +provided that the currently specified package version isn't already published. + +## Building + +Builds are handled by CI, so there is no need to run the build yourself unless you are curious about it. + +The library is built in two modes: one for consumption by package managers +and subsequent build systems, and one for consumption directly by a web browser. + +Babel is used to build the sdk into a relatively modern js package in the `lib` directory. +This is used by consumers who install the embedded sdk via npm, yarn, or other package manager. + +Webpack is used to bundle the `bundle` directory, +for use directly in the browser with no build step e.g. when importing via unpkg. + +Typescript outputs type definition files to the `dist` directory. + +Which of these outputs is used by the library consumer is determined by our package.json's `main`, `module`, and `types` fields. diff --git a/superset-embedded-sdk/README.md b/superset-embedded-sdk/README.md new file mode 100644 index 0000000000000..7e05d94a6ce1d --- /dev/null +++ b/superset-embedded-sdk/README.md @@ -0,0 +1,96 @@ + + +# Superset Embedded SDK + +The Embedded SDK allows you to embed dashboards from Superset into your own app, +using your app's authentication. + +Embedding is done by inserting an iframe, containing a Superset page, into the host application. + +## Embedding a Dashboard + +Using npm: + +```sh +npm install --save @superset-ui/embedded-sdk +``` + +```js +import { embedDashboard } from "@superset-ui/embedded-sdk"; + +embedDashboard({ + id: "abc123", // given by the Superset embedding UI + supersetDomain: "https://superset.example.com", + mountPoint: document.getElementById("my-superset-container"), // any html element that can contain an iframe + fetchGuestToken: () => fetchGuestTokenFromBackend(), + dashboardUiConfig: { // dashboard UI config: hideTitle, hideTab, hideChartControls, filters.visible, filters.expanded (optional) + hideTitle: true, + filters: { + expanded: true, + } + }, +}); +``` + +You can also load the Embedded SDK from a CDN. The SDK will be available as `supersetEmbeddedSdk` globally: + +```html + + + +``` + +## Authentication/Authorization with Guest Tokens + +Embedded resources use a special auth token called a Guest Token to grant Superset access to your users, +without requiring your users to log in to Superset directly. Your backend must create a Guest Token +by requesting Superset's `POST /security/guest_token` endpoint, and pass that guest token to your frontend. + +The Embedding SDK takes the guest token and use it to embed a dashboard. + +### Creating a Guest Token + +From the backend, http `POST` to `/security/guest_token` with some parameters to define what the guest token will grant access to. +Guest tokens can have Row Level Security rules which filter data for the user carrying the token. + +The agent making the `POST` request must be authenticated with the `can_grant_guest_token` permission. + +Example `POST /security/guest_token` payload: + +```json +{ + "user": { + "username": "stan_lee", + "first_name": "Stan", + "last_name": "Lee" + }, + "resources": [{ + "type": "dashboard", + "id": "abc123" + }], + "rls": [ + { "clause": "publisher = 'Nintendo'" } + ] +} +``` diff --git a/superset-embedded-sdk/babel.config.js b/superset-embedded-sdk/babel.config.js new file mode 100644 index 0000000000000..299fe1c6d9154 --- /dev/null +++ b/superset-embedded-sdk/babel.config.js @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +module.exports = { + presets: [ + "@babel/preset-typescript", + "@babel/preset-env" + ], + sourceMaps: true, +}; diff --git a/superset-embedded-sdk/package-lock.json b/superset-embedded-sdk/package-lock.json new file mode 100644 index 0000000000000..826fb282c9e28 --- /dev/null +++ b/superset-embedded-sdk/package-lock.json @@ -0,0 +1,14335 @@ +{ + "name": "@superset-ui/embedded-sdk", + "version": "0.1.0-alpha.8", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "@superset-ui/embedded-sdk", + "version": "0.1.0-alpha.8", + "license": "Apache-2.0", + "dependencies": { + "@superset-ui/switchboard": "^0.18.26-0", + "jwt-decode": "^3.1.2" + }, + "devDependencies": { + "@babel/cli": "^7.16.8", + "@babel/core": "^7.16.12", + "@babel/preset-env": "^7.16.11", + "@babel/preset-typescript": "^7.16.7", + "@types/jest": "^27.4.1", + "axios": "^0.25.0", + "babel-loader": "^8.2.3", + "jest": "^27.5.1", + "typescript": "^4.5.5", + "webpack": "^5.67.0", + "webpack-cli": "^4.9.2" + } + }, + "node_modules/@babel/cli": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/cli/-/cli-7.16.8.tgz", + "integrity": "sha512-FTKBbxyk5TclXOGmwYyqelqP5IF6hMxaeJskd85jbR5jBfYlwqgwAbJwnixi1ZBbTqKfFuAA95mdmUFeSRwyJA==", + "dev": true, + "dependencies": { + "commander": "^4.0.1", + "convert-source-map": "^1.1.0", + "fs-readdir-recursive": "^1.1.0", + "glob": "^7.0.0", + "make-dir": "^2.1.0", + "slash": "^2.0.0", + "source-map": "^0.5.0" + }, + "bin": { + "babel": "bin/babel.js", + "babel-external-helpers": "bin/babel-external-helpers.js" + }, + "engines": { + "node": ">=6.9.0" + }, + "optionalDependencies": { + "@nicolo-ribaudo/chokidar-2": "2.1.8-no-fsevents.3", + "chokidar": "^3.4.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.16.8.tgz", + "integrity": "sha512-m7OkX0IdKLKPpBlJtF561YJal5y/jyI5fNfWbPxh2D/nbzzGI4qRyrD8xO2jB24u7l+5I2a43scCG2IrfjC50Q==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.16.12", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.16.12.tgz", + "integrity": "sha512-dK5PtG1uiN2ikk++5OzSYsitZKny4wOCD0nrO4TqnW4BVBTQ2NGS3NgilvT/TEyxTST7LNyWV/T4tXDoD3fOgg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.16.7", + "@babel/generator": "^7.16.8", + "@babel/helper-compilation-targets": "^7.16.7", + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helpers": "^7.16.7", + "@babel/parser": "^7.16.12", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.10", + "@babel/types": "^7.16.8", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.1.2", + "semver": "^6.3.0", + "source-map": "^0.5.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.16.8.tgz", + "integrity": "sha512-1ojZwE9+lOXzcWdWmO6TbUzDfqLD39CmEhN8+2cX9XkDo5yW1OpgfejfliysR2AWLpMamTiOiAp/mtroaymhpw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.8", + "jsesc": "^2.5.1", + "source-map": "^0.5.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-annotate-as-pure": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.16.7.tgz", + "integrity": "sha512-s6t2w/IPQVTAET1HitoowRGXooX8mCgtuP5195wD/QJPV6wYjpujCGF7JuMODVX2ZAJOf1GT6DT9MHEZvLOFSw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.16.7.tgz", + "integrity": "sha512-C6FdbRaxYjwVu/geKW4ZeQ0Q31AftgRcdSnZ5/jsH6BzCJbtvXvhpfkbkThYSuutZA7nCXpPR6AD9zd1dprMkA==", + "dev": true, + "dependencies": { + "@babel/helper-explode-assignable-expression": "^7.16.7", + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.16.7.tgz", + "integrity": "sha512-mGojBwIWcwGD6rfqgRXVlVYmPAv7eOpIemUG3dGnDdCY4Pae70ROij3XmfrH6Fa1h1aiDylpglbZyktfzyo/hA==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.16.4", + "@babel/helper-validator-option": "^7.16.7", + "browserslist": "^4.17.5", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.16.10", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.16.10.tgz", + "integrity": "sha512-wDeej0pu3WN/ffTxMNCPW5UCiOav8IcLRxSIyp/9+IF2xJUM9h/OYjg0IJLHaL6F8oU8kqMz9nc1vryXhMsgXg==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.16.7", + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-function-name": "^7.16.7", + "@babel/helper-member-expression-to-functions": "^7.16.7", + "@babel/helper-optimise-call-expression": "^7.16.7", + "@babel/helper-replace-supers": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-create-regexp-features-plugin": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.16.7.tgz", + "integrity": "sha512-fk5A6ymfp+O5+p2yCkXAu5Kyj6v0xh0RBeNcAkYUMDvvAAoxvSKXn+Jb37t/yWFiQVDFK1ELpUTD8/aLhCPu+g==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.16.7", + "regexpu-core": "^4.7.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-define-polyfill-provider": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz", + "integrity": "sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA==", + "dev": true, + "dependencies": { + "@babel/helper-compilation-targets": "^7.13.0", + "@babel/helper-module-imports": "^7.12.13", + "@babel/helper-plugin-utils": "^7.13.0", + "@babel/traverse": "^7.13.0", + "debug": "^4.1.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.14.2", + "semver": "^6.1.2" + }, + "peerDependencies": { + "@babel/core": "^7.4.0-0" + } + }, + "node_modules/@babel/helper-environment-visitor": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.16.7.tgz", + "integrity": "sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-explode-assignable-expression": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.16.7.tgz", + "integrity": "sha512-KyUenhWMC8VrxzkGP0Jizjo4/Zx+1nNZhgocs+gLzyZyB8SHidhoq9KK/8Ato4anhwsivfkBLftky7gvzbZMtQ==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-function-name": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.16.7.tgz", + "integrity": "sha512-QfDfEnIUyyBSR3HtrtGECuZ6DAyCkYFp7GHl75vFtTnn6pjKeK0T1DB5lLkFvBea8MdaiUABx3osbgLyInoejA==", + "dev": true, + "dependencies": { + "@babel/helper-get-function-arity": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-get-function-arity": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.16.7.tgz", + "integrity": "sha512-flc+RLSOBXzNzVhcLu6ujeHUrD6tANAOU5ojrRx/as+tbzf8+stUCj7+IfRRoAbEZqj/ahXEMsjhOhgeZsrnTw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-hoist-variables": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz", + "integrity": "sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-member-expression-to-functions": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.16.7.tgz", + "integrity": "sha512-VtJ/65tYiU/6AbMTDwyoXGPKHgTsfRarivm+YbB5uAzKUyuPjgZSgAFeG87FCigc7KNHu2Pegh1XIT3lXjvz3Q==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz", + "integrity": "sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.16.7.tgz", + "integrity": "sha512-gaqtLDxJEFCeQbYp9aLAefjhkKdjKcdh6DB7jniIGU3Pz52WAmP268zK0VgPz9hUNkMSYeH976K2/Y6yPadpng==", + "dev": true, + "dependencies": { + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-module-imports": "^7.16.7", + "@babel/helper-simple-access": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "@babel/helper-validator-identifier": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-optimise-call-expression": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.16.7.tgz", + "integrity": "sha512-EtgBhg7rd/JcnpZFXpBy0ze1YRfdm7BnBX4uKMBd3ixa3RGAE002JZB66FJyNH7g0F38U05pXmA5P8cBh7z+1w==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.16.7.tgz", + "integrity": "sha512-Qg3Nk7ZxpgMrsox6HreY1ZNKdBq7K72tDSliA6dCl5f007jR4ne8iD5UzuNnCJH2xBf2BEEVGr+/OL6Gdp7RxA==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-remap-async-to-generator": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.16.8.tgz", + "integrity": "sha512-fm0gH7Flb8H51LqJHy3HJ3wnE1+qtYR2A99K06ahwrawLdOFsCEWjZOrYricXJHoPSudNKxrMBUPEIPxiIIvBw==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.16.7", + "@babel/helper-wrap-function": "^7.16.8", + "@babel/types": "^7.16.8" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-replace-supers": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.16.7.tgz", + "integrity": "sha512-y9vsWilTNaVnVh6xiJfABzsNpgDPKev9HnAgz6Gb1p6UUwf9NepdlsV7VXGCftJM+jqD5f7JIEubcpLjZj5dBw==", + "dev": true, + "dependencies": { + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-member-expression-to-functions": "^7.16.7", + "@babel/helper-optimise-call-expression": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-simple-access": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.16.7.tgz", + "integrity": "sha512-ZIzHVyoeLMvXMN/vok/a4LWRy8G2v205mNP0XOuf9XRLyX5/u9CnVulUtDgUTama3lT+bf/UqucuZjqiGuTS1g==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.16.0.tgz", + "integrity": "sha512-+il1gTy0oHwUsBQZyJvukbB4vPMdcYBrFHa0Uc4AizLxbq6BOYC51Rv4tWocX9BLBDLZ4kc6qUFpQ6HRgL+3zw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-split-export-declaration": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz", + "integrity": "sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz", + "integrity": "sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-wrap-function": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.16.8.tgz", + "integrity": "sha512-8RpyRVIAW1RcDDGTA+GpPAwV22wXCfKOoM9bet6TLkGIFTkRQSkH1nMQ5Yet4MpoXe1ZwHPVtNasc2w0uZMqnw==", + "dev": true, + "dependencies": { + "@babel/helper-function-name": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.8", + "@babel/types": "^7.16.8" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.16.7.tgz", + "integrity": "sha512-9ZDoqtfY7AuEOt3cxchfii6C7GDyyMBffktR5B2jvWv8u2+efwvpnVKXMWzNehqy68tKgAfSwfdw/lWpthS2bw==", + "dev": true, + "dependencies": { + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.16.10", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.10.tgz", + "integrity": "sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.16.7", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.16.12", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.16.12.tgz", + "integrity": "sha512-VfaV15po8RiZssrkPweyvbGVSe4x2y+aciFCgn0n0/SJMR22cwofRV1mtnJQYcSB1wUTaA/X1LnA3es66MCO5A==", + "dev": true, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.16.7.tgz", + "integrity": "sha512-anv/DObl7waiGEnC24O9zqL0pSuI9hljihqiDuFHC8d7/bjr/4RLGPWuc8rYOff/QPzbEPSkzG8wGG9aDuhHRg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.16.7.tgz", + "integrity": "sha512-di8vUHRdf+4aJ7ltXhaDbPoszdkh59AQtJM5soLsuHpQJdFQZOA4uGj0V2u/CZ8bJ/u8ULDL5yq6FO/bCXnKHw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0", + "@babel/plugin-proposal-optional-chaining": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.13.0" + } + }, + "node_modules/@babel/plugin-proposal-async-generator-functions": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.16.8.tgz", + "integrity": "sha512-71YHIvMuiuqWJQkebWJtdhQTfd4Q4mF76q2IX37uZPkG9+olBxsX+rH1vkhFto4UeJZ9dPY2s+mDvhDm1u2BGQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-remap-async-to-generator": "^7.16.8", + "@babel/plugin-syntax-async-generators": "^7.8.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-class-properties": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.16.7.tgz", + "integrity": "sha512-IobU0Xme31ewjYOShSIqd/ZGM/r/cuOz2z0MDbNrhF5FW+ZVgi0f2lyeoj9KFPDOAqsYxmLWZte1WOwlvY9aww==", + "dev": true, + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-class-static-block": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.16.7.tgz", + "integrity": "sha512-dgqJJrcZoG/4CkMopzhPJjGxsIe9A8RlkQLnL/Vhhx8AA9ZuaRwGSlscSh42hazc7WSrya/IK7mTeoF0DP9tEw==", + "dev": true, + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-class-static-block": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.12.0" + } + }, + "node_modules/@babel/plugin-proposal-dynamic-import": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.16.7.tgz", + "integrity": "sha512-I8SW9Ho3/8DRSdmDdH3gORdyUuYnk1m4cMxUAdu5oy4n3OfN8flDEH+d60iG7dUfi0KkYwSvoalHzzdRzpWHTg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-dynamic-import": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-export-namespace-from": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.16.7.tgz", + "integrity": "sha512-ZxdtqDXLRGBL64ocZcs7ovt71L3jhC1RGSyR996svrCi3PYqHNkb3SwPJCs8RIzD86s+WPpt2S73+EHCGO+NUA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-json-strings": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.16.7.tgz", + "integrity": "sha512-lNZ3EEggsGY78JavgbHsK9u5P3pQaW7k4axlgFLYkMd7UBsiNahCITShLjNQschPyjtO6dADrL24757IdhBrsQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-json-strings": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-logical-assignment-operators": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.16.7.tgz", + "integrity": "sha512-K3XzyZJGQCr00+EtYtrDjmwX7o7PLK6U9bi1nCwkQioRFVUv6dJoxbQjtWVtP+bCPy82bONBKG8NPyQ4+i6yjg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-nullish-coalescing-operator": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.16.7.tgz", + "integrity": "sha512-aUOrYU3EVtjf62jQrCj63pYZ7k6vns2h/DQvHPWGmsJRYzWXZ6/AsfgpiRy6XiuIDADhJzP2Q9MwSMKauBQ+UQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-numeric-separator": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.16.7.tgz", + "integrity": "sha512-vQgPMknOIgiuVqbokToyXbkY/OmmjAzr/0lhSIbG/KmnzXPGwW/AdhdKpi+O4X/VkWiWjnkKOBiqJrTaC98VKw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-numeric-separator": "^7.10.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-object-rest-spread": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.16.7.tgz", + "integrity": "sha512-3O0Y4+dw94HA86qSg9IHfyPktgR7q3gpNVAeiKQd+8jBKFaU5NQS1Yatgo4wY+UFNuLjvxcSmzcsHqrhgTyBUA==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.16.4", + "@babel/helper-compilation-targets": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-optional-catch-binding": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.16.7.tgz", + "integrity": "sha512-eMOH/L4OvWSZAE1VkHbr1vckLG1WUcHGJSLqqQwl2GaUqG6QjddvrOaTUMNYiv77H5IKPMZ9U9P7EaHwvAShfA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-optional-chaining": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.16.7.tgz", + "integrity": "sha512-eC3xy+ZrUcBtP7x+sq62Q/HYd674pPTb/77XZMb5wbDPGWIdUbSr4Agr052+zaUPSb+gGRnjxXfKFvx5iMJ+DA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0", + "@babel/plugin-syntax-optional-chaining": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-private-methods": { + "version": "7.16.11", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.16.11.tgz", + "integrity": "sha512-F/2uAkPlXDr8+BHpZvo19w3hLFKge+k75XUprE6jaqKxjGkSYcK+4c+bup5PdW/7W/Rpjwql7FTVEDW+fRAQsw==", + "dev": true, + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.16.10", + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-private-property-in-object": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.16.7.tgz", + "integrity": "sha512-rMQkjcOFbm+ufe3bTZLyOfsOUOxyvLXZJCTARhJr+8UMSoZmqTe1K1BgkFcrW37rAchWg57yI69ORxiWvUINuQ==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.16.7", + "@babel/helper-create-class-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-unicode-property-regex": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.16.7.tgz", + "integrity": "sha512-QRK0YI/40VLhNVGIjRNAAQkEHws0cswSdFFjpFyt943YmJIU1da9uW63Iu6NFV6CxTZW5eTDCrwZUstBWgp/Rg==", + "dev": true, + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-dynamic-import": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", + "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-export-namespace-from": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", + "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.3" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.16.7.tgz", + "integrity": "sha512-YhUIJHHGkqPgEcMYkPCKTyGUdoGKWtopIycQyjJH8OjvRgOYsXsaKehLVPScKJWAULPxMa4N1vCe6szREFlZ7A==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-arrow-functions": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.16.7.tgz", + "integrity": "sha512-9ffkFFMbvzTvv+7dTp/66xvZAWASuPD5Tl9LK3Z9vhOmANo6j94rik+5YMBt4CwHVMWLWpMsriIc2zsa3WW3xQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-async-to-generator": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.16.8.tgz", + "integrity": "sha512-MtmUmTJQHCnyJVrScNzNlofQJ3dLFuobYn3mwOTKHnSCMtbNsqvF71GQmJfFjdrXSsAA7iysFmYWw4bXZ20hOg==", + "dev": true, + "dependencies": { + "@babel/helper-module-imports": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-remap-async-to-generator": "^7.16.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoped-functions": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.16.7.tgz", + "integrity": "sha512-JUuzlzmF40Z9cXyytcbZEZKckgrQzChbQJw/5PuEHYeqzCsvebDx0K0jWnIIVcmmDOAVctCgnYs0pMcrYj2zJg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoping": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.16.7.tgz", + "integrity": "sha512-ObZev2nxVAYA4bhyusELdo9hb3H+A56bxH3FZMbEImZFiEDYVHXQSJ1hQKFlDnlt8G9bBrCZ5ZpURZUrV4G5qQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-classes": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.16.7.tgz", + "integrity": "sha512-WY7og38SFAGYRe64BrjKf8OrE6ulEHtr5jEYaZMwox9KebgqPi67Zqz8K53EKk1fFEJgm96r32rkKZ3qA2nCWQ==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.16.7", + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-function-name": "^7.16.7", + "@babel/helper-optimise-call-expression": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-replace-supers": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-computed-properties": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.16.7.tgz", + "integrity": "sha512-gN72G9bcmenVILj//sv1zLNaPyYcOzUho2lIJBMh/iakJ9ygCo/hEF9cpGb61SCMEDxbbyBoVQxrt+bWKu5KGw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-destructuring": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.16.7.tgz", + "integrity": "sha512-VqAwhTHBnu5xBVDCvrvqJbtLUa++qZaWC0Fgr2mqokBlulZARGyIvZDoqbPlPaKImQ9dKAcCzbv+ul//uqu70A==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-dotall-regex": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.16.7.tgz", + "integrity": "sha512-Lyttaao2SjZF6Pf4vk1dVKv8YypMpomAbygW+mU5cYP3S5cWTfCJjG8xV6CFdzGFlfWK81IjL9viiTvpb6G7gQ==", + "dev": true, + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-duplicate-keys": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.16.7.tgz", + "integrity": "sha512-03DvpbRfvWIXyK0/6QiR1KMTWeT6OcQ7tbhjrXyFS02kjuX/mu5Bvnh5SDSWHxyawit2g5aWhKwI86EE7GUnTw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-exponentiation-operator": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.16.7.tgz", + "integrity": "sha512-8UYLSlyLgRixQvlYH3J2ekXFHDFLQutdy7FfFAMm3CPZ6q9wHCwnUyiXpQCe3gVVnQlHc5nsuiEVziteRNTXEA==", + "dev": true, + "dependencies": { + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-for-of": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.16.7.tgz", + "integrity": "sha512-/QZm9W92Ptpw7sjI9Nx1mbcsWz33+l8kuMIQnDwgQBG5s3fAfQvkRjQ7NqXhtNcKOnPkdICmUHyCaWW06HCsqg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-function-name": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.16.7.tgz", + "integrity": "sha512-SU/C68YVwTRxqWj5kgsbKINakGag0KTgq9f2iZEXdStoAbOzLHEBRYzImmA6yFo8YZhJVflvXmIHUO7GWHmxxA==", + "dev": true, + "dependencies": { + "@babel/helper-compilation-targets": "^7.16.7", + "@babel/helper-function-name": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-literals": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.16.7.tgz", + "integrity": "sha512-6tH8RTpTWI0s2sV6uq3e/C9wPo4PTqqZps4uF0kzQ9/xPLFQtipynvmT1g/dOfEJ+0EQsHhkQ/zyRId8J2b8zQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-member-expression-literals": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.16.7.tgz", + "integrity": "sha512-mBruRMbktKQwbxaJof32LT9KLy2f3gH+27a5XSuXo6h7R3vqltl0PgZ80C8ZMKw98Bf8bqt6BEVi3svOh2PzMw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-amd": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.16.7.tgz", + "integrity": "sha512-KaaEtgBL7FKYwjJ/teH63oAmE3lP34N3kshz8mm4VMAw7U3PxjVwwUmxEFksbgsNUaO3wId9R2AVQYSEGRa2+g==", + "dev": true, + "dependencies": { + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "babel-plugin-dynamic-import-node": "^2.3.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-commonjs": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.16.8.tgz", + "integrity": "sha512-oflKPvsLT2+uKQopesJt3ApiaIS2HW+hzHFcwRNtyDGieAeC/dIHZX8buJQ2J2X1rxGPy4eRcUijm3qcSPjYcA==", + "dev": true, + "dependencies": { + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-simple-access": "^7.16.7", + "babel-plugin-dynamic-import-node": "^2.3.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-systemjs": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.16.7.tgz", + "integrity": "sha512-DuK5E3k+QQmnOqBR9UkusByy5WZWGRxfzV529s9nPra1GE7olmxfqO2FHobEOYSPIjPBTr4p66YDcjQnt8cBmw==", + "dev": true, + "dependencies": { + "@babel/helper-hoist-variables": "^7.16.7", + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-validator-identifier": "^7.16.7", + "babel-plugin-dynamic-import-node": "^2.3.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-umd": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.16.7.tgz", + "integrity": "sha512-EMh7uolsC8O4xhudF2F6wedbSHm1HHZ0C6aJ7K67zcDNidMzVcxWdGr+htW9n21klm+bOn+Rx4CBsAntZd3rEQ==", + "dev": true, + "dependencies": { + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.16.8.tgz", + "integrity": "sha512-j3Jw+n5PvpmhRR+mrgIh04puSANCk/T/UA3m3P1MjJkhlK906+ApHhDIqBQDdOgL/r1UYpz4GNclTXxyZrYGSw==", + "dev": true, + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-new-target": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.16.7.tgz", + "integrity": "sha512-xiLDzWNMfKoGOpc6t3U+etCE2yRnn3SM09BXqWPIZOBpL2gvVrBWUKnsJx0K/ADi5F5YC5f8APFfWrz25TdlGg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-super": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.16.7.tgz", + "integrity": "sha512-14J1feiQVWaGvRxj2WjyMuXS2jsBkgB3MdSN5HuC2G5nRspa5RK9COcs82Pwy5BuGcjb+fYaUj94mYcOj7rCvw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-replace-supers": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-parameters": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.16.7.tgz", + "integrity": "sha512-AT3MufQ7zZEhU2hwOA11axBnExW0Lszu4RL/tAlUJBuNoRak+wehQW8h6KcXOcgjY42fHtDxswuMhMjFEuv/aw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-property-literals": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.16.7.tgz", + "integrity": "sha512-z4FGr9NMGdoIl1RqavCqGG+ZuYjfZ/hkCIeuH6Do7tXmSm0ls11nYVSJqFEUOSJbDab5wC6lRE/w6YjVcr6Hqw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-regenerator": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.16.7.tgz", + "integrity": "sha512-mF7jOgGYCkSJagJ6XCujSQg+6xC1M77/03K2oBmVJWoFGNUtnVJO4WHKJk3dnPC8HCcj4xBQP1Egm8DWh3Pb3Q==", + "dev": true, + "dependencies": { + "regenerator-transform": "^0.14.2" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-reserved-words": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.16.7.tgz", + "integrity": "sha512-KQzzDnZ9hWQBjwi5lpY5v9shmm6IVG0U9pB18zvMu2i4H90xpT4gmqwPYsn8rObiadYe2M0gmgsiOIF5A/2rtg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-shorthand-properties": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.16.7.tgz", + "integrity": "sha512-hah2+FEnoRoATdIb05IOXf+4GzXYTq75TVhIn1PewihbpyrNWUt2JbudKQOETWw6QpLe+AIUpJ5MVLYTQbeeUg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-spread": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.16.7.tgz", + "integrity": "sha512-+pjJpgAngb53L0iaA5gU/1MLXJIfXcYepLgXB3esVRf4fqmj8f2cxM3/FKaHsZms08hFQJkFccEWuIpm429TXg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-sticky-regex": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.16.7.tgz", + "integrity": "sha512-NJa0Bd/87QV5NZZzTuZG5BPJjLYadeSZ9fO6oOUoL4iQx+9EEuw/eEM92SrsT19Yc2jgB1u1hsjqDtH02c3Drw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-template-literals": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.16.7.tgz", + "integrity": "sha512-VwbkDDUeenlIjmfNeDX/V0aWrQH2QiVyJtwymVQSzItFDTpxfyJh3EVaQiS0rIN/CqbLGr0VcGmuwyTdZtdIsA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-typeof-symbol": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.16.7.tgz", + "integrity": "sha512-p2rOixCKRJzpg9JB4gjnG4gjWkWa89ZoYUnl9snJ1cWIcTH/hvxZqfO+WjG6T8DRBpctEol5jw1O5rA8gkCokQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-typescript": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.16.8.tgz", + "integrity": "sha512-bHdQ9k7YpBDO2d0NVfkj51DpQcvwIzIusJ7mEUaMlbZq3Kt/U47j24inXZHQ5MDiYpCs+oZiwnXyKedE8+q7AQ==", + "dev": true, + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-typescript": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-escapes": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.16.7.tgz", + "integrity": "sha512-TAV5IGahIz3yZ9/Hfv35TV2xEm+kaBDaZQCn2S/hG9/CZ0DktxJv9eKfPc7yYCvOYR4JGx1h8C+jcSOvgaaI/Q==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-regex": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.16.7.tgz", + "integrity": "sha512-oC5tYYKw56HO75KZVLQ+R/Nl3Hro9kf8iG0hXoaHP7tjAyCpvqBiSNe6vGrZni1Z6MggmUOC6A7VP7AVmw225Q==", + "dev": true, + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-env": { + "version": "7.16.11", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.16.11.tgz", + "integrity": "sha512-qcmWG8R7ZW6WBRPZK//y+E3Cli151B20W1Rv7ln27vuPaXU/8TKms6jFdiJtF7UDTxcrb7mZd88tAeK9LjdT8g==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.16.8", + "@babel/helper-compilation-targets": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-validator-option": "^7.16.7", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.16.7", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.16.7", + "@babel/plugin-proposal-async-generator-functions": "^7.16.8", + "@babel/plugin-proposal-class-properties": "^7.16.7", + "@babel/plugin-proposal-class-static-block": "^7.16.7", + "@babel/plugin-proposal-dynamic-import": "^7.16.7", + "@babel/plugin-proposal-export-namespace-from": "^7.16.7", + "@babel/plugin-proposal-json-strings": "^7.16.7", + "@babel/plugin-proposal-logical-assignment-operators": "^7.16.7", + "@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7", + "@babel/plugin-proposal-numeric-separator": "^7.16.7", + "@babel/plugin-proposal-object-rest-spread": "^7.16.7", + "@babel/plugin-proposal-optional-catch-binding": "^7.16.7", + "@babel/plugin-proposal-optional-chaining": "^7.16.7", + "@babel/plugin-proposal-private-methods": "^7.16.11", + "@babel/plugin-proposal-private-property-in-object": "^7.16.7", + "@babel/plugin-proposal-unicode-property-regex": "^7.16.7", + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-dynamic-import": "^7.8.3", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5", + "@babel/plugin-transform-arrow-functions": "^7.16.7", + "@babel/plugin-transform-async-to-generator": "^7.16.8", + "@babel/plugin-transform-block-scoped-functions": "^7.16.7", + "@babel/plugin-transform-block-scoping": "^7.16.7", + "@babel/plugin-transform-classes": "^7.16.7", + "@babel/plugin-transform-computed-properties": "^7.16.7", + "@babel/plugin-transform-destructuring": "^7.16.7", + "@babel/plugin-transform-dotall-regex": "^7.16.7", + "@babel/plugin-transform-duplicate-keys": "^7.16.7", + "@babel/plugin-transform-exponentiation-operator": "^7.16.7", + "@babel/plugin-transform-for-of": "^7.16.7", + "@babel/plugin-transform-function-name": "^7.16.7", + "@babel/plugin-transform-literals": "^7.16.7", + "@babel/plugin-transform-member-expression-literals": "^7.16.7", + "@babel/plugin-transform-modules-amd": "^7.16.7", + "@babel/plugin-transform-modules-commonjs": "^7.16.8", + "@babel/plugin-transform-modules-systemjs": "^7.16.7", + "@babel/plugin-transform-modules-umd": "^7.16.7", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.16.8", + "@babel/plugin-transform-new-target": "^7.16.7", + "@babel/plugin-transform-object-super": "^7.16.7", + "@babel/plugin-transform-parameters": "^7.16.7", + "@babel/plugin-transform-property-literals": "^7.16.7", + "@babel/plugin-transform-regenerator": "^7.16.7", + "@babel/plugin-transform-reserved-words": "^7.16.7", + "@babel/plugin-transform-shorthand-properties": "^7.16.7", + "@babel/plugin-transform-spread": "^7.16.7", + "@babel/plugin-transform-sticky-regex": "^7.16.7", + "@babel/plugin-transform-template-literals": "^7.16.7", + "@babel/plugin-transform-typeof-symbol": "^7.16.7", + "@babel/plugin-transform-unicode-escapes": "^7.16.7", + "@babel/plugin-transform-unicode-regex": "^7.16.7", + "@babel/preset-modules": "^0.1.5", + "@babel/types": "^7.16.8", + "babel-plugin-polyfill-corejs2": "^0.3.0", + "babel-plugin-polyfill-corejs3": "^0.5.0", + "babel-plugin-polyfill-regenerator": "^0.3.0", + "core-js-compat": "^3.20.2", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-modules": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz", + "integrity": "sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", + "@babel/plugin-transform-dotall-regex": "^7.4.4", + "@babel/types": "^7.4.4", + "esutils": "^2.0.2" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-typescript": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.16.7.tgz", + "integrity": "sha512-WbVEmgXdIyvzB77AQjGBEyYPZx+8tTsO50XtfozQrkW8QB2rLJpH2lgx0TRw5EJrBxOZQ+wCcyPVQvS8tjEHpQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-validator-option": "^7.16.7", + "@babel/plugin-transform-typescript": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.16.7.tgz", + "integrity": "sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==", + "dev": true, + "dependencies": { + "regenerator-runtime": "^0.13.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.16.7.tgz", + "integrity": "sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.16.7", + "@babel/parser": "^7.16.7", + "@babel/types": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.16.10", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.16.10.tgz", + "integrity": "sha512-yzuaYXoRJBGMlBhsMJoUW7G1UmSb/eXr/JHYM/MsOJgavJibLwASijW7oXBdw3NQ6T0bW7Ty5P/VarOs9cHmqw==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.16.7", + "@babel/generator": "^7.16.8", + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-function-name": "^7.16.7", + "@babel/helper-hoist-variables": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "@babel/parser": "^7.16.10", + "@babel/types": "^7.16.8", + "debug": "^4.1.0", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.16.8.tgz", + "integrity": "sha512-smN2DQc5s4M7fntyjGtyIPbRJv6wW4rU/94fmYJ7PKQuZkC0qGMHXJbg6sNGt12JmVr4k5YaptI/XtiLJBnmIg==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.16.7", + "to-fast-properties": "^2.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, + "node_modules/@discoveryjs/json-ext": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.6.tgz", + "integrity": "sha512-ws57AidsDvREKrZKYffXddNkyaF14iHNHm8VQnZH6t99E8gczjNN0GpvcGny0imC80yQ0tHz1xVUKk/KFQSUyA==", + "dev": true, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-27.5.1.tgz", + "integrity": "sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^27.5.1", + "jest-util": "^27.5.1", + "slash": "^3.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/console/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/console/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@jest/console/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@jest/console/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/@jest/console/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/core": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-27.5.1.tgz", + "integrity": "sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ==", + "dev": true, + "dependencies": { + "@jest/console": "^27.5.1", + "@jest/reporters": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.8.1", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^27.5.1", + "jest-config": "^27.5.1", + "jest-haste-map": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-resolve-dependencies": "^27.5.1", + "jest-runner": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "jest-watcher": "^27.5.1", + "micromatch": "^4.0.4", + "rimraf": "^3.0.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/core/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/core/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@jest/core/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@jest/core/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/@jest/core/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/core/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/core/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/environment": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-27.5.1.tgz", + "integrity": "sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA==", + "dev": true, + "dependencies": { + "@jest/fake-timers": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "jest-mock": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-27.5.1.tgz", + "integrity": "sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@sinonjs/fake-timers": "^8.0.1", + "@types/node": "*", + "jest-message-util": "^27.5.1", + "jest-mock": "^27.5.1", + "jest-util": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-27.5.1.tgz", + "integrity": "sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q==", + "dev": true, + "dependencies": { + "@jest/environment": "^27.5.1", + "@jest/types": "^27.5.1", + "expect": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-27.5.1.tgz", + "integrity": "sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw==", + "dev": true, + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.2", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^5.1.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-haste-map": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-util": "^27.5.1", + "jest-worker": "^27.5.1", + "slash": "^3.0.0", + "source-map": "^0.6.0", + "string-length": "^4.0.1", + "terminal-link": "^2.0.0", + "v8-to-istanbul": "^8.1.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/reporters/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/reporters/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@jest/reporters/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@jest/reporters/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/@jest/reporters/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/reporters/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/reporters/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/reporters/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/source-map": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-27.5.1.tgz", + "integrity": "sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9", + "source-map": "^0.6.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/source-map/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/test-result": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-27.5.1.tgz", + "integrity": "sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag==", + "dev": true, + "dependencies": { + "@jest/console": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz", + "integrity": "sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ==", + "dev": true, + "dependencies": { + "@jest/test-result": "^27.5.1", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-runtime": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-27.5.1.tgz", + "integrity": "sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw==", + "dev": true, + "dependencies": { + "@babel/core": "^7.1.0", + "@jest/types": "^27.5.1", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^1.4.0", + "fast-json-stable-stringify": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-util": "^27.5.1", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "source-map": "^0.6.1", + "write-file-atomic": "^3.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/transform/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/transform/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@jest/transform/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@jest/transform/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/@jest/transform/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/transform/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/transform/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/transform/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/types": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.5.1.tgz", + "integrity": "sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^16.0.0", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/types/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/types/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@jest/types/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@jest/types/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/@jest/types/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/types/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/source-map": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz", + "integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.14", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz", + "integrity": "sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@nicolo-ribaudo/chokidar-2": { + "version": "2.1.8-no-fsevents.3", + "resolved": "https://registry.npmjs.org/@nicolo-ribaudo/chokidar-2/-/chokidar-2-2.1.8-no-fsevents.3.tgz", + "integrity": "sha512-s88O1aVtXftvp5bCPB7WnmXc5IwOZZ7YPuwNPt+GtOOXpPvad1LfbmjYv+qII7zP6RU2QGnqve27dnLycEnyEQ==", + "dev": true, + "optional": true + }, + "node_modules/@sinonjs/commons": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", + "integrity": "sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==", + "dev": true, + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz", + "integrity": "sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.0" + } + }, + "node_modules/@superset-ui/switchboard": { + "version": "0.18.26-0", + "resolved": "https://registry.npmjs.org/@superset-ui/switchboard/-/switchboard-0.18.26-0.tgz", + "integrity": "sha512-MYvigrspA0EgNU6tA9UrsXcrUYid9YktsbIPx/D4Xd5cWWrJrJl303imQ/SIZbC25faJCd2gL30ORll60Yz3Ww==" + }, + "node_modules/@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@types/babel__core": { + "version": "7.1.18", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.18.tgz", + "integrity": "sha512-S7unDjm/C7z2A2R9NzfKCK1I+BAALDtxEmsJBwlB3EzNfb929ykjL++1CK9LO++EIp2fQrC8O+BwjKvz6UeDyQ==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.6.4", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.4.tgz", + "integrity": "sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.1.tgz", + "integrity": "sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.14.2.tgz", + "integrity": "sha512-K2waXdXBi2302XUdcHcR1jCeU0LL4TD9HRs/gk0N2Xvrht+G/BfJa4QObBQZfhMdxiCpV3COl5Nfq4uKTeTnJA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.3.0" + } + }, + "node_modules/@types/eslint": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz", + "integrity": "sha512-GE44+DNEyxxh2Kc6ro/VkIj+9ma0pO0bwv9+uHSyBrikYOHr8zYcdPvnBOp1aw8s+CjRvuSx7CyWqRrNFQ59mA==", + "dev": true, + "dependencies": { + "@types/estree": "*", + "@types/json-schema": "*" + } + }, + "node_modules/@types/eslint-scope": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.3.tgz", + "integrity": "sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g==", + "dev": true, + "dependencies": { + "@types/eslint": "*", + "@types/estree": "*" + } + }, + "node_modules/@types/estree": { + "version": "0.0.50", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.50.tgz", + "integrity": "sha512-C6N5s2ZFtuZRj54k2/zyRhNDjJwwcViAM3Nbm8zjBpbqAdZ00mr0CFxvSKeO8Y/e03WVFLpQMdHYVfUd6SB+Hw==", + "dev": true + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.5.tgz", + "integrity": "sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", + "dev": true + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "27.4.1", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-27.4.1.tgz", + "integrity": "sha512-23iPJADSmicDVrWk+HT58LMJtzLAnB2AgIzplQuq/bSrGaxCrlvRFjGbXmamnnk/mAmCdLStiGqggu28ocUyiw==", + "dev": true, + "dependencies": { + "jest-matcher-utils": "^27.0.0", + "pretty-format": "^27.0.0" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.9", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz", + "integrity": "sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==", + "dev": true + }, + "node_modules/@types/node": { + "version": "17.0.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.13.tgz", + "integrity": "sha512-Y86MAxASe25hNzlDbsviXl8jQHb0RDvKt4c40ZJQ1Don0AAL0STLZSs4N+6gLEO55pedy7r2cLwS+ZDxPm/2Bw==", + "dev": true + }, + "node_modules/@types/prettier": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.4.4.tgz", + "integrity": "sha512-ReVR2rLTV1kvtlWFyuot+d1pkpG2Fw/XKE3PDAdj57rbM97ttSp9JZ2UsP+2EHTylra9cUf6JA7tGwW1INzUrA==", + "dev": true + }, + "node_modules/@types/stack-utils": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", + "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", + "dev": true + }, + "node_modules/@types/yargs": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz", + "integrity": "sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.0", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.0.tgz", + "integrity": "sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==", + "dev": true + }, + "node_modules/@webassemblyjs/ast": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz", + "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==", + "dev": true, + "dependencies": { + "@webassemblyjs/helper-numbers": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1" + } + }, + "node_modules/@webassemblyjs/floating-point-hex-parser": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz", + "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==", + "dev": true + }, + "node_modules/@webassemblyjs/helper-api-error": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz", + "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==", + "dev": true + }, + "node_modules/@webassemblyjs/helper-buffer": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz", + "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==", + "dev": true + }, + "node_modules/@webassemblyjs/helper-numbers": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz", + "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==", + "dev": true, + "dependencies": { + "@webassemblyjs/floating-point-hex-parser": "1.11.1", + "@webassemblyjs/helper-api-error": "1.11.1", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/helper-wasm-bytecode": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz", + "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==", + "dev": true + }, + "node_modules/@webassemblyjs/helper-wasm-section": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz", + "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==", + "dev": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1" + } + }, + "node_modules/@webassemblyjs/ieee754": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz", + "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==", + "dev": true, + "dependencies": { + "@xtuc/ieee754": "^1.2.0" + } + }, + "node_modules/@webassemblyjs/leb128": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz", + "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==", + "dev": true, + "dependencies": { + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/utf8": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz", + "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==", + "dev": true + }, + "node_modules/@webassemblyjs/wasm-edit": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz", + "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==", + "dev": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/helper-wasm-section": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1", + "@webassemblyjs/wasm-opt": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1", + "@webassemblyjs/wast-printer": "1.11.1" + } + }, + "node_modules/@webassemblyjs/wasm-gen": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz", + "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==", + "dev": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/ieee754": "1.11.1", + "@webassemblyjs/leb128": "1.11.1", + "@webassemblyjs/utf8": "1.11.1" + } + }, + "node_modules/@webassemblyjs/wasm-opt": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz", + "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==", + "dev": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1" + } + }, + "node_modules/@webassemblyjs/wasm-parser": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz", + "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==", + "dev": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-api-error": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/ieee754": "1.11.1", + "@webassemblyjs/leb128": "1.11.1", + "@webassemblyjs/utf8": "1.11.1" + } + }, + "node_modules/@webassemblyjs/wast-printer": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz", + "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==", + "dev": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webpack-cli/configtest": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.1.1.tgz", + "integrity": "sha512-1FBc1f9G4P/AxMqIgfZgeOTuRnwZMten8E7zap5zgpPInnCrP8D4Q81+4CWIch8i/Nf7nXjP0v6CjjbHOrXhKg==", + "dev": true, + "peerDependencies": { + "webpack": "4.x.x || 5.x.x", + "webpack-cli": "4.x.x" + } + }, + "node_modules/@webpack-cli/info": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.4.1.tgz", + "integrity": "sha512-PKVGmazEq3oAo46Q63tpMr4HipI3OPfP7LiNOEJg963RMgT0rqheag28NCML0o3GIzA3DmxP1ZIAv9oTX1CUIA==", + "dev": true, + "dependencies": { + "envinfo": "^7.7.3" + }, + "peerDependencies": { + "webpack-cli": "4.x.x" + } + }, + "node_modules/@webpack-cli/serve": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.6.1.tgz", + "integrity": "sha512-gNGTiTrjEVQ0OcVnzsRSqTxaBSr+dmTfm+qJsCDluky8uhdLWep7Gcr62QsAKHTMxjCS/8nEITsmFAhfIx+QSw==", + "dev": true, + "peerDependencies": { + "webpack-cli": "4.x.x" + }, + "peerDependenciesMeta": { + "webpack-dev-server": { + "optional": true + } + } + }, + "node_modules/@xtuc/ieee754": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", + "dev": true + }, + "node_modules/@xtuc/long": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", + "dev": true + }, + "node_modules/abab": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.5.tgz", + "integrity": "sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q==", + "dev": true + }, + "node_modules/acorn": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", + "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-globals": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-6.0.0.tgz", + "integrity": "sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg==", + "dev": true, + "dependencies": { + "acorn": "^7.1.1", + "acorn-walk": "^7.1.1" + } + }, + "node_modules/acorn-globals/node_modules/acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-import-assertions": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz", + "integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==", + "dev": true, + "peerDependencies": { + "acorn": "^8" + } + }, + "node_modules/acorn-walk": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", + "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true, + "peerDependencies": { + "ajv": "^6.9.1" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/anymatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true + }, + "node_modules/axios": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", + "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", + "dev": true, + "dependencies": { + "follow-redirects": "^1.14.7" + } + }, + "node_modules/babel-jest": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-27.5.1.tgz", + "integrity": "sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg==", + "dev": true, + "dependencies": { + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^27.5.1", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-jest/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/babel-jest/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/babel-jest/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/babel-jest/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/babel-jest/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-jest/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-jest/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-loader": { + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.3.tgz", + "integrity": "sha512-n4Zeta8NC3QAsuyiizu0GkmRcQ6clkV9WFUnUf1iXP//IeSKbWjofW3UHyZVwlOB4y039YQKefawyTn64Zwbuw==", + "dev": true, + "dependencies": { + "find-cache-dir": "^3.3.1", + "loader-utils": "^1.4.0", + "make-dir": "^3.1.0", + "schema-utils": "^2.6.5" + }, + "engines": { + "node": ">= 8.9" + }, + "peerDependencies": { + "@babel/core": "^7.0.0", + "webpack": ">=2" + } + }, + "node_modules/babel-loader/node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/babel-loader/node_modules/schema-utils": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz", + "integrity": "sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.5", + "ajv": "^6.12.4", + "ajv-keywords": "^3.5.2" + }, + "engines": { + "node": ">= 8.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/babel-plugin-dynamic-import-node": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", + "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", + "dev": true, + "dependencies": { + "object.assign": "^4.1.0" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz", + "integrity": "sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ==", + "dev": true, + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.0.0", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/babel-plugin-polyfill-corejs2": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz", + "integrity": "sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.13.11", + "@babel/helper-define-polyfill-provider": "^0.3.1", + "semver": "^6.1.1" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/babel-plugin-polyfill-corejs3": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.1.tgz", + "integrity": "sha512-TihqEe4sQcb/QcPJvxe94/9RZuLQuF1+To4WqQcRvc+3J3gLCPIPgDKzGLG6zmQLfH3nn25heRuDNkS2KR4I8A==", + "dev": true, + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.3.1", + "core-js-compat": "^3.20.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/babel-plugin-polyfill-regenerator": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz", + "integrity": "sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A==", + "dev": true, + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.3.1" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz", + "integrity": "sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==", + "dev": true, + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.8.3", + "@babel/plugin-syntax-import-meta": "^7.8.3", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.8.3", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.8.3", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-top-level-await": "^7.8.3" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/babel-preset-jest": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz", + "integrity": "sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag==", + "dev": true, + "dependencies": { + "babel-plugin-jest-hoist": "^27.5.1", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/big.js": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "optional": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browser-process-hrtime": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", + "dev": true + }, + "node_modules/browserslist": { + "version": "4.19.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.19.1.tgz", + "integrity": "sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A==", + "dev": true, + "dependencies": { + "caniuse-lite": "^1.0.30001286", + "electron-to-chromium": "^1.4.17", + "escalade": "^3.1.1", + "node-releases": "^2.0.1", + "picocolors": "^1.0.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001303", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001303.tgz", + "integrity": "sha512-/Mqc1oESndUNszJP0kx0UaQU9kEv9nNtJ7Kn8AdA0mNnH8eR1cj0kG+NbNuC1Wq/b21eA8prhKRA3bbkjONegQ==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + } + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "optional": true, + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chrome-trace-event": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz", + "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==", + "dev": true, + "engines": { + "node": ">=6.0" + } + }, + "node_modules/ci-info": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.3.0.tgz", + "integrity": "sha512-riT/3vI5YpVH6/qomlDnJow6TBee2PBKSEpx3O32EGPYbWGIRsIlGRms3Sm74wYE1JMo8RnO04Hb12+v1J5ICw==", + "dev": true + }, + "node_modules/cjs-module-lexer": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz", + "integrity": "sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==", + "dev": true + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/clone-deep": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", + "dev": true, + "dependencies": { + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=", + "dev": true, + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz", + "integrity": "sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==", + "dev": true + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "node_modules/colorette": { + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz", + "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==", + "dev": true + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "node_modules/convert-source-map": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz", + "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.1" + } + }, + "node_modules/core-js-compat": { + "version": "3.20.3", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.20.3.tgz", + "integrity": "sha512-c8M5h0IkNZ+I92QhIpuSijOxGAcj3lgpsWdkCqmUTZNwidujF4r3pi6x1DCN+Vcs5qTS2XWWMfWSuCqyupX8gw==", + "dev": true, + "dependencies": { + "browserslist": "^4.19.1", + "semver": "7.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/core-js-compat/node_modules/semver": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", + "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/cssom": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.4.4.tgz", + "integrity": "sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw==", + "dev": true + }, + "node_modules/cssstyle": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-2.3.0.tgz", + "integrity": "sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==", + "dev": true, + "dependencies": { + "cssom": "~0.3.6" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cssstyle/node_modules/cssom": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.8.tgz", + "integrity": "sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==", + "dev": true + }, + "node_modules/data-urls": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-2.0.0.tgz", + "integrity": "sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ==", + "dev": true, + "dependencies": { + "abab": "^2.0.3", + "whatwg-mimetype": "^2.3.0", + "whatwg-url": "^8.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/debug": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decimal.js": { + "version": "10.3.1", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.3.1.tgz", + "integrity": "sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ==", + "dev": true + }, + "node_modules/dedent": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", + "integrity": "sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw=", + "dev": true + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/deepmerge": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", + "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "dependencies": { + "object-keys": "^1.0.12" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/diff-sequences": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz", + "integrity": "sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/domexception": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/domexception/-/domexception-2.0.1.tgz", + "integrity": "sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg==", + "dev": true, + "dependencies": { + "webidl-conversions": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/domexception/node_modules/webidl-conversions": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-5.0.0.tgz", + "integrity": "sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.4.57", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.57.tgz", + "integrity": "sha512-FNC+P5K1n6pF+M0zIK+gFCoXcJhhzDViL3DRIGy2Fv5PohuSES1JHR7T+GlwxSxlzx4yYbsuzCZvHxcBSRCIOw==", + "dev": true + }, + "node_modules/emittery": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.8.1.tgz", + "integrity": "sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/emojis-list": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", + "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/enhanced-resolve": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.3.tgz", + "integrity": "sha512-EGAbGvH7j7Xt2nc0E7D99La1OiEs8LnyimkRgwExpUMScN6O+3x9tIWs7PLQZVNx4YD+00skHXPXi1yQHpAmZA==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/envinfo": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz", + "integrity": "sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==", + "dev": true, + "bin": { + "envinfo": "dist/cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-module-lexer": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz", + "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==", + "dev": true + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/escodegen": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.0.0.tgz", + "integrity": "sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==", + "dev": true, + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=6.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/escodegen/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/escodegen/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "dev": true, + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha1-BjJjj42HfMghB9MKD/8aF8uhzQw=", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz", + "integrity": "sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true + }, + "node_modules/fastest-levenshtein": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz", + "integrity": "sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow==", + "dev": true + }, + "node_modules/fb-watchman": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.1.tgz", + "integrity": "sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg==", + "dev": true, + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-cache-dir": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", + "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", + "dev": true, + "dependencies": { + "commondir": "^1.0.1", + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/avajs/find-cache-dir?sponsor=1" + } + }, + "node_modules/find-cache-dir/node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/follow-redirects": { + "version": "1.14.8", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.8.tgz", + "integrity": "sha512-1x0S9UVJHsQprFcEC/qnNzBLcIxsjAV905f/UkQxbclCsoTWlacCNOpQa/anodLl2uaEKFhfWOvM2Qg77+15zA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fs-readdir-recursive": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz", + "integrity": "sha512-GNanXlVr2pf02+sPN40XN8HG+ePaNcvM0q5mZBd668Obwb0yD5GiUbZOFgwn8kGMY6I3mdyDJzieUy3PTYyTRA==", + "dev": true + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "optional": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/glob-to-regexp": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", + "dev": true + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", + "dev": true + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/html-encoding-sniffer": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", + "integrity": "sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ==", + "dev": true, + "dependencies": { + "whatwg-encoding": "^1.0.5" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true + }, + "node_modules/http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "dependencies": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/https-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", + "dev": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/import-local": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", + "dev": true, + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/interpret": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz", + "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "optional": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true, + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "optional": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-potential-custom-element-name": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", + "dev": true + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "node_modules/isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", + "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.1.0.tgz", + "integrity": "sha512-czwUz525rkOFDJxfKK6mYfIs9zBKILyrZQxjz3ABhjQXhbhFsSbo1HW/BFcsDnfJYJWA6thRR5/TUY2qs5W99Q==", + "dev": true, + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", + "dev": true, + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^3.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report/node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/istanbul-lib-report/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.4.tgz", + "integrity": "sha512-r1/DshN4KSE7xWEknZLLLLDn5CJybV3nw01VTkp6D5jzLuELlcbudfj/eSQFvrKsJuTVCGnePO7ho82Nw9zzfw==", + "dev": true, + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest/-/jest-27.5.1.tgz", + "integrity": "sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ==", + "dev": true, + "dependencies": { + "@jest/core": "^27.5.1", + "import-local": "^3.0.2", + "jest-cli": "^27.5.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-27.5.1.tgz", + "integrity": "sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "execa": "^5.0.0", + "throat": "^6.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-circus": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-27.5.1.tgz", + "integrity": "sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw==", + "dev": true, + "dependencies": { + "@jest/environment": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^0.7.0", + "expect": "^27.5.1", + "is-generator-fn": "^2.0.0", + "jest-each": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3", + "throat": "^6.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-circus/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-circus/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-circus/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-circus/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-circus/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-circus/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-circus/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-cli": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-27.5.1.tgz", + "integrity": "sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw==", + "dev": true, + "dependencies": { + "@jest/core": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "import-local": "^3.0.2", + "jest-config": "^27.5.1", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "prompts": "^2.0.1", + "yargs": "^16.2.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-cli/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-cli/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-cli/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-cli/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-cli/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-cli/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-config": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-27.5.1.tgz", + "integrity": "sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA==", + "dev": true, + "dependencies": { + "@babel/core": "^7.8.0", + "@jest/test-sequencer": "^27.5.1", + "@jest/types": "^27.5.1", + "babel-jest": "^27.5.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.1", + "graceful-fs": "^4.2.9", + "jest-circus": "^27.5.1", + "jest-environment-jsdom": "^27.5.1", + "jest-environment-node": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-jasmine2": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-runner": "^27.5.1", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "peerDependencies": { + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-config/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-config/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-config/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-config/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-config/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-config/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-config/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-diff": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz", + "integrity": "sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-diff/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-diff/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-diff/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-diff/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-diff/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-diff/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-docblock": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-27.5.1.tgz", + "integrity": "sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ==", + "dev": true, + "dependencies": { + "detect-newline": "^3.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-each": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-27.5.1.tgz", + "integrity": "sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "jest-get-type": "^27.5.1", + "jest-util": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-each/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-each/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-each/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-each/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-each/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-each/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-environment-jsdom": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz", + "integrity": "sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw==", + "dev": true, + "dependencies": { + "@jest/environment": "^27.5.1", + "@jest/fake-timers": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "jest-mock": "^27.5.1", + "jest-util": "^27.5.1", + "jsdom": "^16.6.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-27.5.1.tgz", + "integrity": "sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw==", + "dev": true, + "dependencies": { + "@jest/environment": "^27.5.1", + "@jest/fake-timers": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "jest-mock": "^27.5.1", + "jest-util": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.5.1.tgz", + "integrity": "sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/graceful-fs": "^4.1.2", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^27.5.1", + "jest-serializer": "^27.5.1", + "jest-util": "^27.5.1", + "jest-worker": "^27.5.1", + "micromatch": "^4.0.4", + "walker": "^1.0.7" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-jasmine2": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz", + "integrity": "sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ==", + "dev": true, + "dependencies": { + "@jest/environment": "^27.5.1", + "@jest/source-map": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "expect": "^27.5.1", + "is-generator-fn": "^2.0.0", + "jest-each": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "pretty-format": "^27.5.1", + "throat": "^6.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-jasmine2/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-jasmine2/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-jasmine2/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-jasmine2/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-jasmine2/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-jasmine2/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-leak-detector": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz", + "integrity": "sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ==", + "dev": true, + "dependencies": { + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz", + "integrity": "sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-matcher-utils/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-matcher-utils/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-matcher-utils/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-matcher-utils/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-matcher-utils/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-matcher-utils/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-message-util/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-message-util/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-message-util/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-message-util/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-message-util/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-message-util/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-message-util/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-mock": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-27.5.1.tgz", + "integrity": "sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz", + "integrity": "sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==", + "dev": true, + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-27.5.1.tgz", + "integrity": "sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-27.5.1.tgz", + "integrity": "sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "resolve": "^1.20.0", + "resolve.exports": "^1.1.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz", + "integrity": "sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-snapshot": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-resolve/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-resolve/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-resolve/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-resolve/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-resolve/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-resolve/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-resolve/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-runner": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-27.5.1.tgz", + "integrity": "sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ==", + "dev": true, + "dependencies": { + "@jest/console": "^27.5.1", + "@jest/environment": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.8.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^27.5.1", + "jest-environment-jsdom": "^27.5.1", + "jest-environment-node": "^27.5.1", + "jest-haste-map": "^27.5.1", + "jest-leak-detector": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-util": "^27.5.1", + "jest-worker": "^27.5.1", + "source-map-support": "^0.5.6", + "throat": "^6.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-runner/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-runner/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-runner/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-runner/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-runner/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-runner/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-runtime": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-27.5.1.tgz", + "integrity": "sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A==", + "dev": true, + "dependencies": { + "@jest/environment": "^27.5.1", + "@jest/fake-timers": "^27.5.1", + "@jest/globals": "^27.5.1", + "@jest/source-map": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "execa": "^5.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-mock": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-runtime/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-runtime/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-runtime/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-runtime/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-runtime/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-runtime/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-runtime/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-serializer": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-27.5.1.tgz", + "integrity": "sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w==", + "dev": true, + "dependencies": { + "@types/node": "*", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-27.5.1.tgz", + "integrity": "sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA==", + "dev": true, + "dependencies": { + "@babel/core": "^7.7.2", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/traverse": "^7.7.2", + "@babel/types": "^7.0.0", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/babel__traverse": "^7.0.4", + "@types/prettier": "^2.1.5", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^27.5.1", + "graceful-fs": "^4.2.9", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-haste-map": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-util": "^27.5.1", + "natural-compare": "^1.4.0", + "pretty-format": "^27.5.1", + "semver": "^7.3.2" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-snapshot/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-snapshot/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-snapshot/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-snapshot/node_modules/semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest-snapshot/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-util/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-util/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-util/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-util/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-util/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-util/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-validate": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-27.5.1.tgz", + "integrity": "sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^27.5.1", + "leven": "^3.1.0", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-validate/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-validate/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-validate/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-validate/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-validate/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-validate/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-watcher": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-27.5.1.tgz", + "integrity": "sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw==", + "dev": true, + "dependencies": { + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "jest-util": "^27.5.1", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-watcher/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-watcher/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-watcher/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-watcher/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-watcher/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-watcher/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-worker": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", + "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", + "dev": true, + "dependencies": { + "@types/node": "*", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/jest-worker/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsdom": { + "version": "16.7.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz", + "integrity": "sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw==", + "dev": true, + "dependencies": { + "abab": "^2.0.5", + "acorn": "^8.2.4", + "acorn-globals": "^6.0.0", + "cssom": "^0.4.4", + "cssstyle": "^2.3.0", + "data-urls": "^2.0.0", + "decimal.js": "^10.2.1", + "domexception": "^2.0.1", + "escodegen": "^2.0.0", + "form-data": "^3.0.0", + "html-encoding-sniffer": "^2.0.1", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-potential-custom-element-name": "^1.0.1", + "nwsapi": "^2.2.0", + "parse5": "6.0.1", + "saxes": "^5.0.1", + "symbol-tree": "^3.2.4", + "tough-cookie": "^4.0.0", + "w3c-hr-time": "^1.0.2", + "w3c-xmlserializer": "^2.0.0", + "webidl-conversions": "^6.1.0", + "whatwg-encoding": "^1.0.5", + "whatwg-mimetype": "^2.3.0", + "whatwg-url": "^8.5.0", + "ws": "^7.4.6", + "xml-name-validator": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "canvas": "^2.5.0" + }, + "peerDependenciesMeta": { + "canvas": { + "optional": true + } + } + }, + "node_modules/jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jwt-decode": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", + "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==" + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "dev": true, + "dependencies": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/loader-runner": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.2.0.tgz", + "integrity": "sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw==", + "dev": true, + "engines": { + "node": ">=6.11.5" + } + }, + "node_modules/loader-utils": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.2.tgz", + "integrity": "sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg==", + "dev": true, + "dependencies": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^1.0.1" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/loader-utils/node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=", + "dev": true + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "dependencies": { + "pify": "^4.0.1", + "semver": "^5.6.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, + "dependencies": { + "braces": "^3.0.1", + "picomatch": "^2.2.3" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "dev": true, + "dependencies": { + "mime-db": "1.51.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", + "dev": true + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "dev": true + }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs=", + "dev": true + }, + "node_modules/node-releases": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.1.tgz", + "integrity": "sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA==", + "dev": true + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nwsapi": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.0.tgz", + "integrity": "sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==", + "dev": true + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz", + "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dev": true, + "dependencies": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", + "dev": true + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/pirates": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz", + "integrity": "sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/pretty-format": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/psl": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", + "dev": true + }, + "node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "optional": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/rechoir": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.7.1.tgz", + "integrity": "sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg==", + "dev": true, + "dependencies": { + "resolve": "^1.9.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/regenerate": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", + "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==", + "dev": true + }, + "node_modules/regenerate-unicode-properties": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-9.0.0.tgz", + "integrity": "sha512-3E12UeNSPfjrgwjkR81m5J7Aw/T55Tu7nUyZVQYCKEOs+2dkxEY+DpPtZzO4YruuiPb7NkYLVcyJC4+zCbk5pA==", + "dev": true, + "dependencies": { + "regenerate": "^1.4.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", + "dev": true + }, + "node_modules/regenerator-transform": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.14.5.tgz", + "integrity": "sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.8.4" + } + }, + "node_modules/regexpu-core": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-4.8.0.tgz", + "integrity": "sha512-1F6bYsoYiz6is+oz70NWur2Vlh9KWtswuRuzJOfeYUrfPX2o8n74AnUVaOGDbUqVGO9fNHu48/pjJO4sNVwsOg==", + "dev": true, + "dependencies": { + "regenerate": "^1.4.2", + "regenerate-unicode-properties": "^9.0.0", + "regjsgen": "^0.5.2", + "regjsparser": "^0.7.0", + "unicode-match-property-ecmascript": "^2.0.0", + "unicode-match-property-value-ecmascript": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/regjsgen": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.5.2.tgz", + "integrity": "sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A==", + "dev": true + }, + "node_modules/regjsparser": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.7.0.tgz", + "integrity": "sha512-A4pcaORqmNMDVwUjWoTzuhwMGpP+NykpfqAsEgI1FSH/EzC7lrN5TMd+kN8YCovX+jMpu8eaqXgXPCa0g8FQNQ==", + "dev": true, + "dependencies": { + "jsesc": "~0.5.0" + }, + "bin": { + "regjsparser": "bin/parser" + } + }, + "node_modules/regjsparser/node_modules/jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve.exports": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-1.1.0.tgz", + "integrity": "sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true + }, + "node_modules/saxes": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz", + "integrity": "sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw==", + "dev": true, + "dependencies": { + "xmlchars": "^2.2.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/schema-utils": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/shallow-clone": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.6.tgz", + "integrity": "sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ==", + "dev": true + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true + }, + "node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/source-map-support/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "node_modules/stack-utils": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.5.tgz", + "integrity": "sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/supports-hyperlinks": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz", + "integrity": "sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0", + "supports-color": "^7.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-hyperlinks/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-hyperlinks/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/symbol-tree": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", + "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", + "dev": true + }, + "node_modules/tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/terminal-link": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/terminal-link/-/terminal-link-2.1.1.tgz", + "integrity": "sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==", + "dev": true, + "dependencies": { + "ansi-escapes": "^4.2.1", + "supports-hyperlinks": "^2.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/terser": { + "version": "5.14.2", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.14.2.tgz", + "integrity": "sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA==", + "dev": true, + "dependencies": { + "@jridgewell/source-map": "^0.3.2", + "acorn": "^8.5.0", + "commander": "^2.20.0", + "source-map-support": "~0.5.20" + }, + "bin": { + "terser": "bin/terser" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/terser-webpack-plugin": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.0.tgz", + "integrity": "sha512-LPIisi3Ol4chwAaPP8toUJ3L4qCM1G0wao7L3qNv57Drezxj6+VEyySpPw4B1HSO2Eg/hDY/MNF5XihCAoqnsQ==", + "dev": true, + "dependencies": { + "jest-worker": "^27.4.1", + "schema-utils": "^3.1.1", + "serialize-javascript": "^6.0.0", + "source-map": "^0.6.1", + "terser": "^5.7.2" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "uglify-js": { + "optional": true + } + } + }, + "node_modules/terser-webpack-plugin/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/terser/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/throat": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/throat/-/throat-6.0.1.tgz", + "integrity": "sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w==", + "dev": true + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true + }, + "node_modules/to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tough-cookie": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", + "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", + "dev": true, + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.1.2" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tr46": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.1.0.tgz", + "integrity": "sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw==", + "dev": true, + "dependencies": { + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "dev": true, + "dependencies": { + "prelude-ls": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "dev": true, + "dependencies": { + "is-typedarray": "^1.0.0" + } + }, + "node_modules/typescript": { + "version": "4.5.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.5.tgz", + "integrity": "sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/unicode-canonical-property-names-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", + "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", + "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", + "dev": true, + "dependencies": { + "unicode-canonical-property-names-ecmascript": "^2.0.0", + "unicode-property-aliases-ecmascript": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-value-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz", + "integrity": "sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-property-aliases-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz", + "integrity": "sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/v8-to-istanbul": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz", + "integrity": "sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^1.6.0", + "source-map": "^0.7.3" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/v8-to-istanbul/node_modules/source-map": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", + "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/w3c-hr-time": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz", + "integrity": "sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==", + "dev": true, + "dependencies": { + "browser-process-hrtime": "^1.0.0" + } + }, + "node_modules/w3c-xmlserializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz", + "integrity": "sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA==", + "dev": true, + "dependencies": { + "xml-name-validator": "^3.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/watchpack": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.3.1.tgz", + "integrity": "sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA==", + "dev": true, + "dependencies": { + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.1.2" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/webidl-conversions": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-6.1.0.tgz", + "integrity": "sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w==", + "dev": true, + "engines": { + "node": ">=10.4" + } + }, + "node_modules/webpack": { + "version": "5.67.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.67.0.tgz", + "integrity": "sha512-LjFbfMh89xBDpUMgA1W9Ur6Rn/gnr2Cq1jjHFPo4v6a79/ypznSYbAyPgGhwsxBtMIaEmDD1oJoA7BEYw/Fbrw==", + "dev": true, + "dependencies": { + "@types/eslint-scope": "^3.7.0", + "@types/estree": "^0.0.50", + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/wasm-edit": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1", + "acorn": "^8.4.1", + "acorn-import-assertions": "^1.7.6", + "browserslist": "^4.14.5", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^5.8.3", + "es-module-lexer": "^0.9.0", + "eslint-scope": "5.1.1", + "events": "^3.2.0", + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.2.9", + "json-parse-better-errors": "^1.0.2", + "loader-runner": "^4.2.0", + "mime-types": "^2.1.27", + "neo-async": "^2.6.2", + "schema-utils": "^3.1.0", + "tapable": "^2.1.1", + "terser-webpack-plugin": "^5.1.3", + "watchpack": "^2.3.1", + "webpack-sources": "^3.2.3" + }, + "bin": { + "webpack": "bin/webpack.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependenciesMeta": { + "webpack-cli": { + "optional": true + } + } + }, + "node_modules/webpack-cli": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.9.2.tgz", + "integrity": "sha512-m3/AACnBBzK/kMTcxWHcZFPrw/eQuY4Df1TxvIWfWM2x7mRqBQCqKEd96oCUa9jkapLBaFfRce33eGDb4Pr7YQ==", + "dev": true, + "dependencies": { + "@discoveryjs/json-ext": "^0.5.0", + "@webpack-cli/configtest": "^1.1.1", + "@webpack-cli/info": "^1.4.1", + "@webpack-cli/serve": "^1.6.1", + "colorette": "^2.0.14", + "commander": "^7.0.0", + "execa": "^5.0.0", + "fastest-levenshtein": "^1.0.12", + "import-local": "^3.0.2", + "interpret": "^2.2.0", + "rechoir": "^0.7.0", + "webpack-merge": "^5.7.3" + }, + "bin": { + "webpack-cli": "bin/cli.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "peerDependencies": { + "webpack": "4.x.x || 5.x.x" + }, + "peerDependenciesMeta": { + "@webpack-cli/generators": { + "optional": true + }, + "@webpack-cli/migrate": { + "optional": true + }, + "webpack-bundle-analyzer": { + "optional": true + }, + "webpack-dev-server": { + "optional": true + } + } + }, + "node_modules/webpack-cli/node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/webpack-merge": { + "version": "5.8.0", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", + "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", + "dev": true, + "dependencies": { + "clone-deep": "^4.0.1", + "wildcard": "^2.0.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/webpack-sources": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", + "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", + "dev": true, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/whatwg-encoding": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz", + "integrity": "sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==", + "dev": true, + "dependencies": { + "iconv-lite": "0.4.24" + } + }, + "node_modules/whatwg-mimetype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz", + "integrity": "sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==", + "dev": true + }, + "node_modules/whatwg-url": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.7.0.tgz", + "integrity": "sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg==", + "dev": true, + "dependencies": { + "lodash": "^4.7.0", + "tr46": "^2.1.0", + "webidl-conversions": "^6.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wildcard": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", + "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==", + "dev": true + }, + "node_modules/word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/wrap-ansi/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "node_modules/write-file-atomic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "node_modules/ws": { + "version": "7.5.7", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.7.tgz", + "integrity": "sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A==", + "dev": true, + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/xml-name-validator": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-3.0.0.tgz", + "integrity": "sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==", + "dev": true + }, + "node_modules/xmlchars": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", + "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", + "dev": true + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "engines": { + "node": ">=10" + } + } + }, + "dependencies": { + "@babel/cli": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/cli/-/cli-7.16.8.tgz", + "integrity": "sha512-FTKBbxyk5TclXOGmwYyqelqP5IF6hMxaeJskd85jbR5jBfYlwqgwAbJwnixi1ZBbTqKfFuAA95mdmUFeSRwyJA==", + "dev": true, + "requires": { + "@nicolo-ribaudo/chokidar-2": "2.1.8-no-fsevents.3", + "chokidar": "^3.4.0", + "commander": "^4.0.1", + "convert-source-map": "^1.1.0", + "fs-readdir-recursive": "^1.1.0", + "glob": "^7.0.0", + "make-dir": "^2.1.0", + "slash": "^2.0.0", + "source-map": "^0.5.0" + } + }, + "@babel/code-frame": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", + "dev": true, + "requires": { + "@babel/highlight": "^7.16.7" + } + }, + "@babel/compat-data": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.16.8.tgz", + "integrity": "sha512-m7OkX0IdKLKPpBlJtF561YJal5y/jyI5fNfWbPxh2D/nbzzGI4qRyrD8xO2jB24u7l+5I2a43scCG2IrfjC50Q==", + "dev": true + }, + "@babel/core": { + "version": "7.16.12", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.16.12.tgz", + "integrity": "sha512-dK5PtG1uiN2ikk++5OzSYsitZKny4wOCD0nrO4TqnW4BVBTQ2NGS3NgilvT/TEyxTST7LNyWV/T4tXDoD3fOgg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.16.7", + "@babel/generator": "^7.16.8", + "@babel/helper-compilation-targets": "^7.16.7", + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helpers": "^7.16.7", + "@babel/parser": "^7.16.12", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.10", + "@babel/types": "^7.16.8", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.1.2", + "semver": "^6.3.0", + "source-map": "^0.5.0" + } + }, + "@babel/generator": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.16.8.tgz", + "integrity": "sha512-1ojZwE9+lOXzcWdWmO6TbUzDfqLD39CmEhN8+2cX9XkDo5yW1OpgfejfliysR2AWLpMamTiOiAp/mtroaymhpw==", + "dev": true, + "requires": { + "@babel/types": "^7.16.8", + "jsesc": "^2.5.1", + "source-map": "^0.5.0" + } + }, + "@babel/helper-annotate-as-pure": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.16.7.tgz", + "integrity": "sha512-s6t2w/IPQVTAET1HitoowRGXooX8mCgtuP5195wD/QJPV6wYjpujCGF7JuMODVX2ZAJOf1GT6DT9MHEZvLOFSw==", + "dev": true, + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-builder-binary-assignment-operator-visitor": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.16.7.tgz", + "integrity": "sha512-C6FdbRaxYjwVu/geKW4ZeQ0Q31AftgRcdSnZ5/jsH6BzCJbtvXvhpfkbkThYSuutZA7nCXpPR6AD9zd1dprMkA==", + "dev": true, + "requires": { + "@babel/helper-explode-assignable-expression": "^7.16.7", + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-compilation-targets": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.16.7.tgz", + "integrity": "sha512-mGojBwIWcwGD6rfqgRXVlVYmPAv7eOpIemUG3dGnDdCY4Pae70ROij3XmfrH6Fa1h1aiDylpglbZyktfzyo/hA==", + "dev": true, + "requires": { + "@babel/compat-data": "^7.16.4", + "@babel/helper-validator-option": "^7.16.7", + "browserslist": "^4.17.5", + "semver": "^6.3.0" + } + }, + "@babel/helper-create-class-features-plugin": { + "version": "7.16.10", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.16.10.tgz", + "integrity": "sha512-wDeej0pu3WN/ffTxMNCPW5UCiOav8IcLRxSIyp/9+IF2xJUM9h/OYjg0IJLHaL6F8oU8kqMz9nc1vryXhMsgXg==", + "dev": true, + "requires": { + "@babel/helper-annotate-as-pure": "^7.16.7", + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-function-name": "^7.16.7", + "@babel/helper-member-expression-to-functions": "^7.16.7", + "@babel/helper-optimise-call-expression": "^7.16.7", + "@babel/helper-replace-supers": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7" + } + }, + "@babel/helper-create-regexp-features-plugin": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.16.7.tgz", + "integrity": "sha512-fk5A6ymfp+O5+p2yCkXAu5Kyj6v0xh0RBeNcAkYUMDvvAAoxvSKXn+Jb37t/yWFiQVDFK1ELpUTD8/aLhCPu+g==", + "dev": true, + "requires": { + "@babel/helper-annotate-as-pure": "^7.16.7", + "regexpu-core": "^4.7.1" + } + }, + "@babel/helper-define-polyfill-provider": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz", + "integrity": "sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA==", + "dev": true, + "requires": { + "@babel/helper-compilation-targets": "^7.13.0", + "@babel/helper-module-imports": "^7.12.13", + "@babel/helper-plugin-utils": "^7.13.0", + "@babel/traverse": "^7.13.0", + "debug": "^4.1.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.14.2", + "semver": "^6.1.2" + } + }, + "@babel/helper-environment-visitor": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.16.7.tgz", + "integrity": "sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag==", + "dev": true, + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-explode-assignable-expression": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.16.7.tgz", + "integrity": "sha512-KyUenhWMC8VrxzkGP0Jizjo4/Zx+1nNZhgocs+gLzyZyB8SHidhoq9KK/8Ato4anhwsivfkBLftky7gvzbZMtQ==", + "dev": true, + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-function-name": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.16.7.tgz", + "integrity": "sha512-QfDfEnIUyyBSR3HtrtGECuZ6DAyCkYFp7GHl75vFtTnn6pjKeK0T1DB5lLkFvBea8MdaiUABx3osbgLyInoejA==", + "dev": true, + "requires": { + "@babel/helper-get-function-arity": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-get-function-arity": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.16.7.tgz", + "integrity": "sha512-flc+RLSOBXzNzVhcLu6ujeHUrD6tANAOU5ojrRx/as+tbzf8+stUCj7+IfRRoAbEZqj/ahXEMsjhOhgeZsrnTw==", + "dev": true, + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-hoist-variables": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz", + "integrity": "sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg==", + "dev": true, + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-member-expression-to-functions": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.16.7.tgz", + "integrity": "sha512-VtJ/65tYiU/6AbMTDwyoXGPKHgTsfRarivm+YbB5uAzKUyuPjgZSgAFeG87FCigc7KNHu2Pegh1XIT3lXjvz3Q==", + "dev": true, + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-module-imports": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz", + "integrity": "sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg==", + "dev": true, + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-module-transforms": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.16.7.tgz", + "integrity": "sha512-gaqtLDxJEFCeQbYp9aLAefjhkKdjKcdh6DB7jniIGU3Pz52WAmP268zK0VgPz9hUNkMSYeH976K2/Y6yPadpng==", + "dev": true, + "requires": { + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-module-imports": "^7.16.7", + "@babel/helper-simple-access": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "@babel/helper-validator-identifier": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-optimise-call-expression": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.16.7.tgz", + "integrity": "sha512-EtgBhg7rd/JcnpZFXpBy0ze1YRfdm7BnBX4uKMBd3ixa3RGAE002JZB66FJyNH7g0F38U05pXmA5P8cBh7z+1w==", + "dev": true, + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-plugin-utils": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.16.7.tgz", + "integrity": "sha512-Qg3Nk7ZxpgMrsox6HreY1ZNKdBq7K72tDSliA6dCl5f007jR4ne8iD5UzuNnCJH2xBf2BEEVGr+/OL6Gdp7RxA==", + "dev": true + }, + "@babel/helper-remap-async-to-generator": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.16.8.tgz", + "integrity": "sha512-fm0gH7Flb8H51LqJHy3HJ3wnE1+qtYR2A99K06ahwrawLdOFsCEWjZOrYricXJHoPSudNKxrMBUPEIPxiIIvBw==", + "dev": true, + "requires": { + "@babel/helper-annotate-as-pure": "^7.16.7", + "@babel/helper-wrap-function": "^7.16.8", + "@babel/types": "^7.16.8" + } + }, + "@babel/helper-replace-supers": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.16.7.tgz", + "integrity": "sha512-y9vsWilTNaVnVh6xiJfABzsNpgDPKev9HnAgz6Gb1p6UUwf9NepdlsV7VXGCftJM+jqD5f7JIEubcpLjZj5dBw==", + "dev": true, + "requires": { + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-member-expression-to-functions": "^7.16.7", + "@babel/helper-optimise-call-expression": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-simple-access": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.16.7.tgz", + "integrity": "sha512-ZIzHVyoeLMvXMN/vok/a4LWRy8G2v205mNP0XOuf9XRLyX5/u9CnVulUtDgUTama3lT+bf/UqucuZjqiGuTS1g==", + "dev": true, + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-skip-transparent-expression-wrappers": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.16.0.tgz", + "integrity": "sha512-+il1gTy0oHwUsBQZyJvukbB4vPMdcYBrFHa0Uc4AizLxbq6BOYC51Rv4tWocX9BLBDLZ4kc6qUFpQ6HRgL+3zw==", + "dev": true, + "requires": { + "@babel/types": "^7.16.0" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz", + "integrity": "sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw==", + "dev": true, + "requires": { + "@babel/types": "^7.16.7" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", + "dev": true + }, + "@babel/helper-validator-option": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz", + "integrity": "sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ==", + "dev": true + }, + "@babel/helper-wrap-function": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.16.8.tgz", + "integrity": "sha512-8RpyRVIAW1RcDDGTA+GpPAwV22wXCfKOoM9bet6TLkGIFTkRQSkH1nMQ5Yet4MpoXe1ZwHPVtNasc2w0uZMqnw==", + "dev": true, + "requires": { + "@babel/helper-function-name": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.8", + "@babel/types": "^7.16.8" + } + }, + "@babel/helpers": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.16.7.tgz", + "integrity": "sha512-9ZDoqtfY7AuEOt3cxchfii6C7GDyyMBffktR5B2jvWv8u2+efwvpnVKXMWzNehqy68tKgAfSwfdw/lWpthS2bw==", + "dev": true, + "requires": { + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7" + } + }, + "@babel/highlight": { + "version": "7.16.10", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.10.tgz", + "integrity": "sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.16.7", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@babel/parser": { + "version": "7.16.12", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.16.12.tgz", + "integrity": "sha512-VfaV15po8RiZssrkPweyvbGVSe4x2y+aciFCgn0n0/SJMR22cwofRV1mtnJQYcSB1wUTaA/X1LnA3es66MCO5A==", + "dev": true + }, + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.16.7.tgz", + "integrity": "sha512-anv/DObl7waiGEnC24O9zqL0pSuI9hljihqiDuFHC8d7/bjr/4RLGPWuc8rYOff/QPzbEPSkzG8wGG9aDuhHRg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.16.7.tgz", + "integrity": "sha512-di8vUHRdf+4aJ7ltXhaDbPoszdkh59AQtJM5soLsuHpQJdFQZOA4uGj0V2u/CZ8bJ/u8ULDL5yq6FO/bCXnKHw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0", + "@babel/plugin-proposal-optional-chaining": "^7.16.7" + } + }, + "@babel/plugin-proposal-async-generator-functions": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.16.8.tgz", + "integrity": "sha512-71YHIvMuiuqWJQkebWJtdhQTfd4Q4mF76q2IX37uZPkG9+olBxsX+rH1vkhFto4UeJZ9dPY2s+mDvhDm1u2BGQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-remap-async-to-generator": "^7.16.8", + "@babel/plugin-syntax-async-generators": "^7.8.4" + } + }, + "@babel/plugin-proposal-class-properties": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.16.7.tgz", + "integrity": "sha512-IobU0Xme31ewjYOShSIqd/ZGM/r/cuOz2z0MDbNrhF5FW+ZVgi0f2lyeoj9KFPDOAqsYxmLWZte1WOwlvY9aww==", + "dev": true, + "requires": { + "@babel/helper-create-class-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-proposal-class-static-block": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.16.7.tgz", + "integrity": "sha512-dgqJJrcZoG/4CkMopzhPJjGxsIe9A8RlkQLnL/Vhhx8AA9ZuaRwGSlscSh42hazc7WSrya/IK7mTeoF0DP9tEw==", + "dev": true, + "requires": { + "@babel/helper-create-class-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-class-static-block": "^7.14.5" + } + }, + "@babel/plugin-proposal-dynamic-import": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.16.7.tgz", + "integrity": "sha512-I8SW9Ho3/8DRSdmDdH3gORdyUuYnk1m4cMxUAdu5oy4n3OfN8flDEH+d60iG7dUfi0KkYwSvoalHzzdRzpWHTg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-dynamic-import": "^7.8.3" + } + }, + "@babel/plugin-proposal-export-namespace-from": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.16.7.tgz", + "integrity": "sha512-ZxdtqDXLRGBL64ocZcs7ovt71L3jhC1RGSyR996svrCi3PYqHNkb3SwPJCs8RIzD86s+WPpt2S73+EHCGO+NUA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3" + } + }, + "@babel/plugin-proposal-json-strings": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.16.7.tgz", + "integrity": "sha512-lNZ3EEggsGY78JavgbHsK9u5P3pQaW7k4axlgFLYkMd7UBsiNahCITShLjNQschPyjtO6dADrL24757IdhBrsQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-json-strings": "^7.8.3" + } + }, + "@babel/plugin-proposal-logical-assignment-operators": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.16.7.tgz", + "integrity": "sha512-K3XzyZJGQCr00+EtYtrDjmwX7o7PLK6U9bi1nCwkQioRFVUv6dJoxbQjtWVtP+bCPy82bONBKG8NPyQ4+i6yjg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" + } + }, + "@babel/plugin-proposal-nullish-coalescing-operator": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.16.7.tgz", + "integrity": "sha512-aUOrYU3EVtjf62jQrCj63pYZ7k6vns2h/DQvHPWGmsJRYzWXZ6/AsfgpiRy6XiuIDADhJzP2Q9MwSMKauBQ+UQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" + } + }, + "@babel/plugin-proposal-numeric-separator": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.16.7.tgz", + "integrity": "sha512-vQgPMknOIgiuVqbokToyXbkY/OmmjAzr/0lhSIbG/KmnzXPGwW/AdhdKpi+O4X/VkWiWjnkKOBiqJrTaC98VKw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-numeric-separator": "^7.10.4" + } + }, + "@babel/plugin-proposal-object-rest-spread": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.16.7.tgz", + "integrity": "sha512-3O0Y4+dw94HA86qSg9IHfyPktgR7q3gpNVAeiKQd+8jBKFaU5NQS1Yatgo4wY+UFNuLjvxcSmzcsHqrhgTyBUA==", + "dev": true, + "requires": { + "@babel/compat-data": "^7.16.4", + "@babel/helper-compilation-targets": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.16.7" + } + }, + "@babel/plugin-proposal-optional-catch-binding": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.16.7.tgz", + "integrity": "sha512-eMOH/L4OvWSZAE1VkHbr1vckLG1WUcHGJSLqqQwl2GaUqG6QjddvrOaTUMNYiv77H5IKPMZ9U9P7EaHwvAShfA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" + } + }, + "@babel/plugin-proposal-optional-chaining": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.16.7.tgz", + "integrity": "sha512-eC3xy+ZrUcBtP7x+sq62Q/HYd674pPTb/77XZMb5wbDPGWIdUbSr4Agr052+zaUPSb+gGRnjxXfKFvx5iMJ+DA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0", + "@babel/plugin-syntax-optional-chaining": "^7.8.3" + } + }, + "@babel/plugin-proposal-private-methods": { + "version": "7.16.11", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.16.11.tgz", + "integrity": "sha512-F/2uAkPlXDr8+BHpZvo19w3hLFKge+k75XUprE6jaqKxjGkSYcK+4c+bup5PdW/7W/Rpjwql7FTVEDW+fRAQsw==", + "dev": true, + "requires": { + "@babel/helper-create-class-features-plugin": "^7.16.10", + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-proposal-private-property-in-object": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.16.7.tgz", + "integrity": "sha512-rMQkjcOFbm+ufe3bTZLyOfsOUOxyvLXZJCTARhJr+8UMSoZmqTe1K1BgkFcrW37rAchWg57yI69ORxiWvUINuQ==", + "dev": true, + "requires": { + "@babel/helper-annotate-as-pure": "^7.16.7", + "@babel/helper-create-class-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5" + } + }, + "@babel/plugin-proposal-unicode-property-regex": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.16.7.tgz", + "integrity": "sha512-QRK0YI/40VLhNVGIjRNAAQkEHws0cswSdFFjpFyt943YmJIU1da9uW63Iu6NFV6CxTZW5eTDCrwZUstBWgp/Rg==", + "dev": true, + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.12.13" + } + }, + "@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.14.5" + } + }, + "@babel/plugin-syntax-dynamic-import": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", + "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-export-namespace-from": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", + "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } + }, + "@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.10.4" + } + }, + "@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.10.4" + } + }, + "@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.10.4" + } + }, + "@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.14.5" + } + }, + "@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.14.5" + } + }, + "@babel/plugin-syntax-typescript": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.16.7.tgz", + "integrity": "sha512-YhUIJHHGkqPgEcMYkPCKTyGUdoGKWtopIycQyjJH8OjvRgOYsXsaKehLVPScKJWAULPxMa4N1vCe6szREFlZ7A==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-arrow-functions": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.16.7.tgz", + "integrity": "sha512-9ffkFFMbvzTvv+7dTp/66xvZAWASuPD5Tl9LK3Z9vhOmANo6j94rik+5YMBt4CwHVMWLWpMsriIc2zsa3WW3xQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-async-to-generator": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.16.8.tgz", + "integrity": "sha512-MtmUmTJQHCnyJVrScNzNlofQJ3dLFuobYn3mwOTKHnSCMtbNsqvF71GQmJfFjdrXSsAA7iysFmYWw4bXZ20hOg==", + "dev": true, + "requires": { + "@babel/helper-module-imports": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-remap-async-to-generator": "^7.16.8" + } + }, + "@babel/plugin-transform-block-scoped-functions": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.16.7.tgz", + "integrity": "sha512-JUuzlzmF40Z9cXyytcbZEZKckgrQzChbQJw/5PuEHYeqzCsvebDx0K0jWnIIVcmmDOAVctCgnYs0pMcrYj2zJg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-block-scoping": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.16.7.tgz", + "integrity": "sha512-ObZev2nxVAYA4bhyusELdo9hb3H+A56bxH3FZMbEImZFiEDYVHXQSJ1hQKFlDnlt8G9bBrCZ5ZpURZUrV4G5qQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-classes": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.16.7.tgz", + "integrity": "sha512-WY7og38SFAGYRe64BrjKf8OrE6ulEHtr5jEYaZMwox9KebgqPi67Zqz8K53EKk1fFEJgm96r32rkKZ3qA2nCWQ==", + "dev": true, + "requires": { + "@babel/helper-annotate-as-pure": "^7.16.7", + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-function-name": "^7.16.7", + "@babel/helper-optimise-call-expression": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-replace-supers": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "globals": "^11.1.0" + } + }, + "@babel/plugin-transform-computed-properties": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.16.7.tgz", + "integrity": "sha512-gN72G9bcmenVILj//sv1zLNaPyYcOzUho2lIJBMh/iakJ9ygCo/hEF9cpGb61SCMEDxbbyBoVQxrt+bWKu5KGw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-destructuring": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.16.7.tgz", + "integrity": "sha512-VqAwhTHBnu5xBVDCvrvqJbtLUa++qZaWC0Fgr2mqokBlulZARGyIvZDoqbPlPaKImQ9dKAcCzbv+ul//uqu70A==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-dotall-regex": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.16.7.tgz", + "integrity": "sha512-Lyttaao2SjZF6Pf4vk1dVKv8YypMpomAbygW+mU5cYP3S5cWTfCJjG8xV6CFdzGFlfWK81IjL9viiTvpb6G7gQ==", + "dev": true, + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-duplicate-keys": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.16.7.tgz", + "integrity": "sha512-03DvpbRfvWIXyK0/6QiR1KMTWeT6OcQ7tbhjrXyFS02kjuX/mu5Bvnh5SDSWHxyawit2g5aWhKwI86EE7GUnTw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-exponentiation-operator": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.16.7.tgz", + "integrity": "sha512-8UYLSlyLgRixQvlYH3J2ekXFHDFLQutdy7FfFAMm3CPZ6q9wHCwnUyiXpQCe3gVVnQlHc5nsuiEVziteRNTXEA==", + "dev": true, + "requires": { + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-for-of": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.16.7.tgz", + "integrity": "sha512-/QZm9W92Ptpw7sjI9Nx1mbcsWz33+l8kuMIQnDwgQBG5s3fAfQvkRjQ7NqXhtNcKOnPkdICmUHyCaWW06HCsqg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-function-name": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.16.7.tgz", + "integrity": "sha512-SU/C68YVwTRxqWj5kgsbKINakGag0KTgq9f2iZEXdStoAbOzLHEBRYzImmA6yFo8YZhJVflvXmIHUO7GWHmxxA==", + "dev": true, + "requires": { + "@babel/helper-compilation-targets": "^7.16.7", + "@babel/helper-function-name": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-literals": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.16.7.tgz", + "integrity": "sha512-6tH8RTpTWI0s2sV6uq3e/C9wPo4PTqqZps4uF0kzQ9/xPLFQtipynvmT1g/dOfEJ+0EQsHhkQ/zyRId8J2b8zQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-member-expression-literals": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.16.7.tgz", + "integrity": "sha512-mBruRMbktKQwbxaJof32LT9KLy2f3gH+27a5XSuXo6h7R3vqltl0PgZ80C8ZMKw98Bf8bqt6BEVi3svOh2PzMw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-modules-amd": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.16.7.tgz", + "integrity": "sha512-KaaEtgBL7FKYwjJ/teH63oAmE3lP34N3kshz8mm4VMAw7U3PxjVwwUmxEFksbgsNUaO3wId9R2AVQYSEGRa2+g==", + "dev": true, + "requires": { + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "babel-plugin-dynamic-import-node": "^2.3.3" + } + }, + "@babel/plugin-transform-modules-commonjs": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.16.8.tgz", + "integrity": "sha512-oflKPvsLT2+uKQopesJt3ApiaIS2HW+hzHFcwRNtyDGieAeC/dIHZX8buJQ2J2X1rxGPy4eRcUijm3qcSPjYcA==", + "dev": true, + "requires": { + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-simple-access": "^7.16.7", + "babel-plugin-dynamic-import-node": "^2.3.3" + } + }, + "@babel/plugin-transform-modules-systemjs": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.16.7.tgz", + "integrity": "sha512-DuK5E3k+QQmnOqBR9UkusByy5WZWGRxfzV529s9nPra1GE7olmxfqO2FHobEOYSPIjPBTr4p66YDcjQnt8cBmw==", + "dev": true, + "requires": { + "@babel/helper-hoist-variables": "^7.16.7", + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-validator-identifier": "^7.16.7", + "babel-plugin-dynamic-import-node": "^2.3.3" + } + }, + "@babel/plugin-transform-modules-umd": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.16.7.tgz", + "integrity": "sha512-EMh7uolsC8O4xhudF2F6wedbSHm1HHZ0C6aJ7K67zcDNidMzVcxWdGr+htW9n21klm+bOn+Rx4CBsAntZd3rEQ==", + "dev": true, + "requires": { + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-named-capturing-groups-regex": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.16.8.tgz", + "integrity": "sha512-j3Jw+n5PvpmhRR+mrgIh04puSANCk/T/UA3m3P1MjJkhlK906+ApHhDIqBQDdOgL/r1UYpz4GNclTXxyZrYGSw==", + "dev": true, + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.16.7" + } + }, + "@babel/plugin-transform-new-target": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.16.7.tgz", + "integrity": "sha512-xiLDzWNMfKoGOpc6t3U+etCE2yRnn3SM09BXqWPIZOBpL2gvVrBWUKnsJx0K/ADi5F5YC5f8APFfWrz25TdlGg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-object-super": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.16.7.tgz", + "integrity": "sha512-14J1feiQVWaGvRxj2WjyMuXS2jsBkgB3MdSN5HuC2G5nRspa5RK9COcs82Pwy5BuGcjb+fYaUj94mYcOj7rCvw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-replace-supers": "^7.16.7" + } + }, + "@babel/plugin-transform-parameters": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.16.7.tgz", + "integrity": "sha512-AT3MufQ7zZEhU2hwOA11axBnExW0Lszu4RL/tAlUJBuNoRak+wehQW8h6KcXOcgjY42fHtDxswuMhMjFEuv/aw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-property-literals": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.16.7.tgz", + "integrity": "sha512-z4FGr9NMGdoIl1RqavCqGG+ZuYjfZ/hkCIeuH6Do7tXmSm0ls11nYVSJqFEUOSJbDab5wC6lRE/w6YjVcr6Hqw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-regenerator": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.16.7.tgz", + "integrity": "sha512-mF7jOgGYCkSJagJ6XCujSQg+6xC1M77/03K2oBmVJWoFGNUtnVJO4WHKJk3dnPC8HCcj4xBQP1Egm8DWh3Pb3Q==", + "dev": true, + "requires": { + "regenerator-transform": "^0.14.2" + } + }, + "@babel/plugin-transform-reserved-words": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.16.7.tgz", + "integrity": "sha512-KQzzDnZ9hWQBjwi5lpY5v9shmm6IVG0U9pB18zvMu2i4H90xpT4gmqwPYsn8rObiadYe2M0gmgsiOIF5A/2rtg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-shorthand-properties": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.16.7.tgz", + "integrity": "sha512-hah2+FEnoRoATdIb05IOXf+4GzXYTq75TVhIn1PewihbpyrNWUt2JbudKQOETWw6QpLe+AIUpJ5MVLYTQbeeUg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-spread": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.16.7.tgz", + "integrity": "sha512-+pjJpgAngb53L0iaA5gU/1MLXJIfXcYepLgXB3esVRf4fqmj8f2cxM3/FKaHsZms08hFQJkFccEWuIpm429TXg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0" + } + }, + "@babel/plugin-transform-sticky-regex": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.16.7.tgz", + "integrity": "sha512-NJa0Bd/87QV5NZZzTuZG5BPJjLYadeSZ9fO6oOUoL4iQx+9EEuw/eEM92SrsT19Yc2jgB1u1hsjqDtH02c3Drw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-template-literals": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.16.7.tgz", + "integrity": "sha512-VwbkDDUeenlIjmfNeDX/V0aWrQH2QiVyJtwymVQSzItFDTpxfyJh3EVaQiS0rIN/CqbLGr0VcGmuwyTdZtdIsA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-typeof-symbol": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.16.7.tgz", + "integrity": "sha512-p2rOixCKRJzpg9JB4gjnG4gjWkWa89ZoYUnl9snJ1cWIcTH/hvxZqfO+WjG6T8DRBpctEol5jw1O5rA8gkCokQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-typescript": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.16.8.tgz", + "integrity": "sha512-bHdQ9k7YpBDO2d0NVfkj51DpQcvwIzIusJ7mEUaMlbZq3Kt/U47j24inXZHQ5MDiYpCs+oZiwnXyKedE8+q7AQ==", + "dev": true, + "requires": { + "@babel/helper-create-class-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/plugin-syntax-typescript": "^7.16.7" + } + }, + "@babel/plugin-transform-unicode-escapes": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.16.7.tgz", + "integrity": "sha512-TAV5IGahIz3yZ9/Hfv35TV2xEm+kaBDaZQCn2S/hG9/CZ0DktxJv9eKfPc7yYCvOYR4JGx1h8C+jcSOvgaaI/Q==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/plugin-transform-unicode-regex": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.16.7.tgz", + "integrity": "sha512-oC5tYYKw56HO75KZVLQ+R/Nl3Hro9kf8iG0hXoaHP7tjAyCpvqBiSNe6vGrZni1Z6MggmUOC6A7VP7AVmw225Q==", + "dev": true, + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7" + } + }, + "@babel/preset-env": { + "version": "7.16.11", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.16.11.tgz", + "integrity": "sha512-qcmWG8R7ZW6WBRPZK//y+E3Cli151B20W1Rv7ln27vuPaXU/8TKms6jFdiJtF7UDTxcrb7mZd88tAeK9LjdT8g==", + "dev": true, + "requires": { + "@babel/compat-data": "^7.16.8", + "@babel/helper-compilation-targets": "^7.16.7", + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-validator-option": "^7.16.7", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.16.7", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.16.7", + "@babel/plugin-proposal-async-generator-functions": "^7.16.8", + "@babel/plugin-proposal-class-properties": "^7.16.7", + "@babel/plugin-proposal-class-static-block": "^7.16.7", + "@babel/plugin-proposal-dynamic-import": "^7.16.7", + "@babel/plugin-proposal-export-namespace-from": "^7.16.7", + "@babel/plugin-proposal-json-strings": "^7.16.7", + "@babel/plugin-proposal-logical-assignment-operators": "^7.16.7", + "@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7", + "@babel/plugin-proposal-numeric-separator": "^7.16.7", + "@babel/plugin-proposal-object-rest-spread": "^7.16.7", + "@babel/plugin-proposal-optional-catch-binding": "^7.16.7", + "@babel/plugin-proposal-optional-chaining": "^7.16.7", + "@babel/plugin-proposal-private-methods": "^7.16.11", + "@babel/plugin-proposal-private-property-in-object": "^7.16.7", + "@babel/plugin-proposal-unicode-property-regex": "^7.16.7", + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-dynamic-import": "^7.8.3", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5", + "@babel/plugin-transform-arrow-functions": "^7.16.7", + "@babel/plugin-transform-async-to-generator": "^7.16.8", + "@babel/plugin-transform-block-scoped-functions": "^7.16.7", + "@babel/plugin-transform-block-scoping": "^7.16.7", + "@babel/plugin-transform-classes": "^7.16.7", + "@babel/plugin-transform-computed-properties": "^7.16.7", + "@babel/plugin-transform-destructuring": "^7.16.7", + "@babel/plugin-transform-dotall-regex": "^7.16.7", + "@babel/plugin-transform-duplicate-keys": "^7.16.7", + "@babel/plugin-transform-exponentiation-operator": "^7.16.7", + "@babel/plugin-transform-for-of": "^7.16.7", + "@babel/plugin-transform-function-name": "^7.16.7", + "@babel/plugin-transform-literals": "^7.16.7", + "@babel/plugin-transform-member-expression-literals": "^7.16.7", + "@babel/plugin-transform-modules-amd": "^7.16.7", + "@babel/plugin-transform-modules-commonjs": "^7.16.8", + "@babel/plugin-transform-modules-systemjs": "^7.16.7", + "@babel/plugin-transform-modules-umd": "^7.16.7", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.16.8", + "@babel/plugin-transform-new-target": "^7.16.7", + "@babel/plugin-transform-object-super": "^7.16.7", + "@babel/plugin-transform-parameters": "^7.16.7", + "@babel/plugin-transform-property-literals": "^7.16.7", + "@babel/plugin-transform-regenerator": "^7.16.7", + "@babel/plugin-transform-reserved-words": "^7.16.7", + "@babel/plugin-transform-shorthand-properties": "^7.16.7", + "@babel/plugin-transform-spread": "^7.16.7", + "@babel/plugin-transform-sticky-regex": "^7.16.7", + "@babel/plugin-transform-template-literals": "^7.16.7", + "@babel/plugin-transform-typeof-symbol": "^7.16.7", + "@babel/plugin-transform-unicode-escapes": "^7.16.7", + "@babel/plugin-transform-unicode-regex": "^7.16.7", + "@babel/preset-modules": "^0.1.5", + "@babel/types": "^7.16.8", + "babel-plugin-polyfill-corejs2": "^0.3.0", + "babel-plugin-polyfill-corejs3": "^0.5.0", + "babel-plugin-polyfill-regenerator": "^0.3.0", + "core-js-compat": "^3.20.2", + "semver": "^6.3.0" + } + }, + "@babel/preset-modules": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz", + "integrity": "sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", + "@babel/plugin-transform-dotall-regex": "^7.4.4", + "@babel/types": "^7.4.4", + "esutils": "^2.0.2" + } + }, + "@babel/preset-typescript": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.16.7.tgz", + "integrity": "sha512-WbVEmgXdIyvzB77AQjGBEyYPZx+8tTsO50XtfozQrkW8QB2rLJpH2lgx0TRw5EJrBxOZQ+wCcyPVQvS8tjEHpQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-validator-option": "^7.16.7", + "@babel/plugin-transform-typescript": "^7.16.7" + } + }, + "@babel/runtime": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.16.7.tgz", + "integrity": "sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "@babel/template": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.16.7.tgz", + "integrity": "sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.16.7", + "@babel/parser": "^7.16.7", + "@babel/types": "^7.16.7" + } + }, + "@babel/traverse": { + "version": "7.16.10", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.16.10.tgz", + "integrity": "sha512-yzuaYXoRJBGMlBhsMJoUW7G1UmSb/eXr/JHYM/MsOJgavJibLwASijW7oXBdw3NQ6T0bW7Ty5P/VarOs9cHmqw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.16.7", + "@babel/generator": "^7.16.8", + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-function-name": "^7.16.7", + "@babel/helper-hoist-variables": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "@babel/parser": "^7.16.10", + "@babel/types": "^7.16.8", + "debug": "^4.1.0", + "globals": "^11.1.0" + } + }, + "@babel/types": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.16.8.tgz", + "integrity": "sha512-smN2DQc5s4M7fntyjGtyIPbRJv6wW4rU/94fmYJ7PKQuZkC0qGMHXJbg6sNGt12JmVr4k5YaptI/XtiLJBnmIg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.16.7", + "to-fast-properties": "^2.0.0" + } + }, + "@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, + "@discoveryjs/json-ext": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.6.tgz", + "integrity": "sha512-ws57AidsDvREKrZKYffXddNkyaF14iHNHm8VQnZH6t99E8gczjNN0GpvcGny0imC80yQ0tHz1xVUKk/KFQSUyA==", + "dev": true + }, + "@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "requires": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + } + }, + "@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true + }, + "@jest/console": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-27.5.1.tgz", + "integrity": "sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^27.5.1", + "jest-util": "^27.5.1", + "slash": "^3.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "@jest/core": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-27.5.1.tgz", + "integrity": "sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ==", + "dev": true, + "requires": { + "@jest/console": "^27.5.1", + "@jest/reporters": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.8.1", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^27.5.1", + "jest-config": "^27.5.1", + "jest-haste-map": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-resolve-dependencies": "^27.5.1", + "jest-runner": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "jest-watcher": "^27.5.1", + "micromatch": "^4.0.4", + "rimraf": "^3.0.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "@jest/environment": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-27.5.1.tgz", + "integrity": "sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA==", + "dev": true, + "requires": { + "@jest/fake-timers": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "jest-mock": "^27.5.1" + } + }, + "@jest/fake-timers": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-27.5.1.tgz", + "integrity": "sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@sinonjs/fake-timers": "^8.0.1", + "@types/node": "*", + "jest-message-util": "^27.5.1", + "jest-mock": "^27.5.1", + "jest-util": "^27.5.1" + } + }, + "@jest/globals": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-27.5.1.tgz", + "integrity": "sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q==", + "dev": true, + "requires": { + "@jest/environment": "^27.5.1", + "@jest/types": "^27.5.1", + "expect": "^27.5.1" + } + }, + "@jest/reporters": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-27.5.1.tgz", + "integrity": "sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw==", + "dev": true, + "requires": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.2", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^5.1.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-haste-map": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-util": "^27.5.1", + "jest-worker": "^27.5.1", + "slash": "^3.0.0", + "source-map": "^0.6.0", + "string-length": "^4.0.1", + "terminal-link": "^2.0.0", + "v8-to-istanbul": "^8.1.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "@jest/source-map": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-27.5.1.tgz", + "integrity": "sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg==", + "dev": true, + "requires": { + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9", + "source-map": "^0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "@jest/test-result": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-27.5.1.tgz", + "integrity": "sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag==", + "dev": true, + "requires": { + "@jest/console": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + } + }, + "@jest/test-sequencer": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz", + "integrity": "sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ==", + "dev": true, + "requires": { + "@jest/test-result": "^27.5.1", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-runtime": "^27.5.1" + } + }, + "@jest/transform": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-27.5.1.tgz", + "integrity": "sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw==", + "dev": true, + "requires": { + "@babel/core": "^7.1.0", + "@jest/types": "^27.5.1", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^1.4.0", + "fast-json-stable-stringify": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-util": "^27.5.1", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "source-map": "^0.6.1", + "write-file-atomic": "^3.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "@jest/types": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.5.1.tgz", + "integrity": "sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw==", + "dev": true, + "requires": { + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^16.0.0", + "chalk": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "dev": true, + "requires": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "@jridgewell/resolve-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", + "dev": true + }, + "@jridgewell/set-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "dev": true + }, + "@jridgewell/source-map": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz", + "integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==", + "dev": true, + "requires": { + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "@jridgewell/sourcemap-codec": { + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", + "dev": true + }, + "@jridgewell/trace-mapping": { + "version": "0.3.14", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz", + "integrity": "sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ==", + "dev": true, + "requires": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "@nicolo-ribaudo/chokidar-2": { + "version": "2.1.8-no-fsevents.3", + "resolved": "https://registry.npmjs.org/@nicolo-ribaudo/chokidar-2/-/chokidar-2-2.1.8-no-fsevents.3.tgz", + "integrity": "sha512-s88O1aVtXftvp5bCPB7WnmXc5IwOZZ7YPuwNPt+GtOOXpPvad1LfbmjYv+qII7zP6RU2QGnqve27dnLycEnyEQ==", + "dev": true, + "optional": true + }, + "@sinonjs/commons": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", + "integrity": "sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==", + "dev": true, + "requires": { + "type-detect": "4.0.8" + } + }, + "@sinonjs/fake-timers": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz", + "integrity": "sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg==", + "dev": true, + "requires": { + "@sinonjs/commons": "^1.7.0" + } + }, + "@superset-ui/switchboard": { + "version": "0.18.26-0", + "resolved": "https://registry.npmjs.org/@superset-ui/switchboard/-/switchboard-0.18.26-0.tgz", + "integrity": "sha512-MYvigrspA0EgNU6tA9UrsXcrUYid9YktsbIPx/D4Xd5cWWrJrJl303imQ/SIZbC25faJCd2gL30ORll60Yz3Ww==" + }, + "@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true + }, + "@types/babel__core": { + "version": "7.1.18", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.18.tgz", + "integrity": "sha512-S7unDjm/C7z2A2R9NzfKCK1I+BAALDtxEmsJBwlB3EzNfb929ykjL++1CK9LO++EIp2fQrC8O+BwjKvz6UeDyQ==", + "dev": true, + "requires": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "@types/babel__generator": { + "version": "7.6.4", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.4.tgz", + "integrity": "sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==", + "dev": true, + "requires": { + "@babel/types": "^7.0.0" + } + }, + "@types/babel__template": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.1.tgz", + "integrity": "sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==", + "dev": true, + "requires": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "@types/babel__traverse": { + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.14.2.tgz", + "integrity": "sha512-K2waXdXBi2302XUdcHcR1jCeU0LL4TD9HRs/gk0N2Xvrht+G/BfJa4QObBQZfhMdxiCpV3COl5Nfq4uKTeTnJA==", + "dev": true, + "requires": { + "@babel/types": "^7.3.0" + } + }, + "@types/eslint": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz", + "integrity": "sha512-GE44+DNEyxxh2Kc6ro/VkIj+9ma0pO0bwv9+uHSyBrikYOHr8zYcdPvnBOp1aw8s+CjRvuSx7CyWqRrNFQ59mA==", + "dev": true, + "requires": { + "@types/estree": "*", + "@types/json-schema": "*" + } + }, + "@types/eslint-scope": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.3.tgz", + "integrity": "sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g==", + "dev": true, + "requires": { + "@types/eslint": "*", + "@types/estree": "*" + } + }, + "@types/estree": { + "version": "0.0.50", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.50.tgz", + "integrity": "sha512-C6N5s2ZFtuZRj54k2/zyRhNDjJwwcViAM3Nbm8zjBpbqAdZ00mr0CFxvSKeO8Y/e03WVFLpQMdHYVfUd6SB+Hw==", + "dev": true + }, + "@types/graceful-fs": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.5.tgz", + "integrity": "sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/istanbul-lib-coverage": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", + "dev": true + }, + "@types/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", + "dev": true, + "requires": { + "@types/istanbul-lib-coverage": "*" + } + }, + "@types/istanbul-reports": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", + "dev": true, + "requires": { + "@types/istanbul-lib-report": "*" + } + }, + "@types/jest": { + "version": "27.4.1", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-27.4.1.tgz", + "integrity": "sha512-23iPJADSmicDVrWk+HT58LMJtzLAnB2AgIzplQuq/bSrGaxCrlvRFjGbXmamnnk/mAmCdLStiGqggu28ocUyiw==", + "dev": true, + "requires": { + "jest-matcher-utils": "^27.0.0", + "pretty-format": "^27.0.0" + } + }, + "@types/json-schema": { + "version": "7.0.9", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz", + "integrity": "sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==", + "dev": true + }, + "@types/node": { + "version": "17.0.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.13.tgz", + "integrity": "sha512-Y86MAxASe25hNzlDbsviXl8jQHb0RDvKt4c40ZJQ1Don0AAL0STLZSs4N+6gLEO55pedy7r2cLwS+ZDxPm/2Bw==", + "dev": true + }, + "@types/prettier": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.4.4.tgz", + "integrity": "sha512-ReVR2rLTV1kvtlWFyuot+d1pkpG2Fw/XKE3PDAdj57rbM97ttSp9JZ2UsP+2EHTylra9cUf6JA7tGwW1INzUrA==", + "dev": true + }, + "@types/stack-utils": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", + "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", + "dev": true + }, + "@types/yargs": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz", + "integrity": "sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==", + "dev": true, + "requires": { + "@types/yargs-parser": "*" + } + }, + "@types/yargs-parser": { + "version": "21.0.0", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.0.tgz", + "integrity": "sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==", + "dev": true + }, + "@webassemblyjs/ast": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz", + "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==", + "dev": true, + "requires": { + "@webassemblyjs/helper-numbers": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1" + } + }, + "@webassemblyjs/floating-point-hex-parser": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz", + "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==", + "dev": true + }, + "@webassemblyjs/helper-api-error": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz", + "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==", + "dev": true + }, + "@webassemblyjs/helper-buffer": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz", + "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==", + "dev": true + }, + "@webassemblyjs/helper-numbers": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz", + "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==", + "dev": true, + "requires": { + "@webassemblyjs/floating-point-hex-parser": "1.11.1", + "@webassemblyjs/helper-api-error": "1.11.1", + "@xtuc/long": "4.2.2" + } + }, + "@webassemblyjs/helper-wasm-bytecode": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz", + "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==", + "dev": true + }, + "@webassemblyjs/helper-wasm-section": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz", + "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1" + } + }, + "@webassemblyjs/ieee754": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz", + "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==", + "dev": true, + "requires": { + "@xtuc/ieee754": "^1.2.0" + } + }, + "@webassemblyjs/leb128": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz", + "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==", + "dev": true, + "requires": { + "@xtuc/long": "4.2.2" + } + }, + "@webassemblyjs/utf8": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz", + "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==", + "dev": true + }, + "@webassemblyjs/wasm-edit": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz", + "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/helper-wasm-section": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1", + "@webassemblyjs/wasm-opt": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1", + "@webassemblyjs/wast-printer": "1.11.1" + } + }, + "@webassemblyjs/wasm-gen": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz", + "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/ieee754": "1.11.1", + "@webassemblyjs/leb128": "1.11.1", + "@webassemblyjs/utf8": "1.11.1" + } + }, + "@webassemblyjs/wasm-opt": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz", + "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1" + } + }, + "@webassemblyjs/wasm-parser": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz", + "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-api-error": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/ieee754": "1.11.1", + "@webassemblyjs/leb128": "1.11.1", + "@webassemblyjs/utf8": "1.11.1" + } + }, + "@webassemblyjs/wast-printer": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz", + "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@xtuc/long": "4.2.2" + } + }, + "@webpack-cli/configtest": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.1.1.tgz", + "integrity": "sha512-1FBc1f9G4P/AxMqIgfZgeOTuRnwZMten8E7zap5zgpPInnCrP8D4Q81+4CWIch8i/Nf7nXjP0v6CjjbHOrXhKg==", + "dev": true, + "requires": {} + }, + "@webpack-cli/info": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.4.1.tgz", + "integrity": "sha512-PKVGmazEq3oAo46Q63tpMr4HipI3OPfP7LiNOEJg963RMgT0rqheag28NCML0o3GIzA3DmxP1ZIAv9oTX1CUIA==", + "dev": true, + "requires": { + "envinfo": "^7.7.3" + } + }, + "@webpack-cli/serve": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.6.1.tgz", + "integrity": "sha512-gNGTiTrjEVQ0OcVnzsRSqTxaBSr+dmTfm+qJsCDluky8uhdLWep7Gcr62QsAKHTMxjCS/8nEITsmFAhfIx+QSw==", + "dev": true, + "requires": {} + }, + "@xtuc/ieee754": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", + "dev": true + }, + "@xtuc/long": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", + "dev": true + }, + "abab": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.5.tgz", + "integrity": "sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q==", + "dev": true + }, + "acorn": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", + "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "dev": true + }, + "acorn-globals": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-6.0.0.tgz", + "integrity": "sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg==", + "dev": true, + "requires": { + "acorn": "^7.1.1", + "acorn-walk": "^7.1.1" + }, + "dependencies": { + "acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true + } + } + }, + "acorn-import-assertions": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz", + "integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==", + "dev": true, + "requires": {} + }, + "acorn-walk": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", + "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", + "dev": true + }, + "agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "requires": { + "debug": "4" + } + }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true, + "requires": {} + }, + "ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "requires": { + "type-fest": "^0.21.3" + } + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "anymatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true + }, + "axios": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", + "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", + "dev": true, + "requires": { + "follow-redirects": "^1.14.7" + } + }, + "babel-jest": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-27.5.1.tgz", + "integrity": "sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg==", + "dev": true, + "requires": { + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^27.5.1", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "babel-loader": { + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.3.tgz", + "integrity": "sha512-n4Zeta8NC3QAsuyiizu0GkmRcQ6clkV9WFUnUf1iXP//IeSKbWjofW3UHyZVwlOB4y039YQKefawyTn64Zwbuw==", + "dev": true, + "requires": { + "find-cache-dir": "^3.3.1", + "loader-utils": "^1.4.0", + "make-dir": "^3.1.0", + "schema-utils": "^2.6.5" + }, + "dependencies": { + "make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "requires": { + "semver": "^6.0.0" + } + }, + "schema-utils": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz", + "integrity": "sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.5", + "ajv": "^6.12.4", + "ajv-keywords": "^3.5.2" + } + } + } + }, + "babel-plugin-dynamic-import-node": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", + "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", + "dev": true, + "requires": { + "object.assign": "^4.1.0" + } + }, + "babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + } + }, + "babel-plugin-jest-hoist": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz", + "integrity": "sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ==", + "dev": true, + "requires": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.0.0", + "@types/babel__traverse": "^7.0.6" + } + }, + "babel-plugin-polyfill-corejs2": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz", + "integrity": "sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w==", + "dev": true, + "requires": { + "@babel/compat-data": "^7.13.11", + "@babel/helper-define-polyfill-provider": "^0.3.1", + "semver": "^6.1.1" + } + }, + "babel-plugin-polyfill-corejs3": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.1.tgz", + "integrity": "sha512-TihqEe4sQcb/QcPJvxe94/9RZuLQuF1+To4WqQcRvc+3J3gLCPIPgDKzGLG6zmQLfH3nn25heRuDNkS2KR4I8A==", + "dev": true, + "requires": { + "@babel/helper-define-polyfill-provider": "^0.3.1", + "core-js-compat": "^3.20.0" + } + }, + "babel-plugin-polyfill-regenerator": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz", + "integrity": "sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A==", + "dev": true, + "requires": { + "@babel/helper-define-polyfill-provider": "^0.3.1" + } + }, + "babel-preset-current-node-syntax": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz", + "integrity": "sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==", + "dev": true, + "requires": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.8.3", + "@babel/plugin-syntax-import-meta": "^7.8.3", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.8.3", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.8.3", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-top-level-await": "^7.8.3" + } + }, + "babel-preset-jest": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz", + "integrity": "sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag==", + "dev": true, + "requires": { + "babel-plugin-jest-hoist": "^27.5.1", + "babel-preset-current-node-syntax": "^1.0.0" + } + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "big.js": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", + "dev": true + }, + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "optional": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "browser-process-hrtime": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", + "dev": true + }, + "browserslist": { + "version": "4.19.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.19.1.tgz", + "integrity": "sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A==", + "dev": true, + "requires": { + "caniuse-lite": "^1.0.30001286", + "electron-to-chromium": "^1.4.17", + "escalade": "^3.1.1", + "node-releases": "^2.0.1", + "picocolors": "^1.0.0" + } + }, + "bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "requires": { + "node-int64": "^0.4.0" + } + }, + "buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "caniuse-lite": { + "version": "1.0.30001303", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001303.tgz", + "integrity": "sha512-/Mqc1oESndUNszJP0kx0UaQU9kEv9nNtJ7Kn8AdA0mNnH8eR1cj0kG+NbNuC1Wq/b21eA8prhKRA3bbkjONegQ==", + "dev": true + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true + }, + "chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "optional": true, + "requires": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "fsevents": "~2.3.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + } + }, + "chrome-trace-event": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz", + "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==", + "dev": true + }, + "ci-info": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.3.0.tgz", + "integrity": "sha512-riT/3vI5YpVH6/qomlDnJow6TBee2PBKSEpx3O32EGPYbWGIRsIlGRms3Sm74wYE1JMo8RnO04Hb12+v1J5ICw==", + "dev": true + }, + "cjs-module-lexer": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz", + "integrity": "sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==", + "dev": true + }, + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "clone-deep": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", + "dev": true, + "requires": { + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" + } + }, + "co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=", + "dev": true + }, + "collect-v8-coverage": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz", + "integrity": "sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==", + "dev": true + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "colorette": { + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz", + "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==", + "dev": true + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true + }, + "commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "convert-source-map": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz", + "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.1" + } + }, + "core-js-compat": { + "version": "3.20.3", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.20.3.tgz", + "integrity": "sha512-c8M5h0IkNZ+I92QhIpuSijOxGAcj3lgpsWdkCqmUTZNwidujF4r3pi6x1DCN+Vcs5qTS2XWWMfWSuCqyupX8gw==", + "dev": true, + "requires": { + "browserslist": "^4.19.1", + "semver": "7.0.0" + }, + "dependencies": { + "semver": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", + "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", + "dev": true + } + } + }, + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "cssom": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.4.4.tgz", + "integrity": "sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw==", + "dev": true + }, + "cssstyle": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-2.3.0.tgz", + "integrity": "sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==", + "dev": true, + "requires": { + "cssom": "~0.3.6" + }, + "dependencies": { + "cssom": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.8.tgz", + "integrity": "sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==", + "dev": true + } + } + }, + "data-urls": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-2.0.0.tgz", + "integrity": "sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ==", + "dev": true, + "requires": { + "abab": "^2.0.3", + "whatwg-mimetype": "^2.3.0", + "whatwg-url": "^8.0.0" + } + }, + "debug": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "decimal.js": { + "version": "10.3.1", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.3.1.tgz", + "integrity": "sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ==", + "dev": true + }, + "dedent": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", + "integrity": "sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw=", + "dev": true + }, + "deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "deepmerge": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", + "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==", + "dev": true + }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "requires": { + "object-keys": "^1.0.12" + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true + }, + "detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true + }, + "diff-sequences": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz", + "integrity": "sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==", + "dev": true + }, + "domexception": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/domexception/-/domexception-2.0.1.tgz", + "integrity": "sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg==", + "dev": true, + "requires": { + "webidl-conversions": "^5.0.0" + }, + "dependencies": { + "webidl-conversions": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-5.0.0.tgz", + "integrity": "sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==", + "dev": true + } + } + }, + "electron-to-chromium": { + "version": "1.4.57", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.57.tgz", + "integrity": "sha512-FNC+P5K1n6pF+M0zIK+gFCoXcJhhzDViL3DRIGy2Fv5PohuSES1JHR7T+GlwxSxlzx4yYbsuzCZvHxcBSRCIOw==", + "dev": true + }, + "emittery": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.8.1.tgz", + "integrity": "sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "emojis-list": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", + "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", + "dev": true + }, + "enhanced-resolve": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.3.tgz", + "integrity": "sha512-EGAbGvH7j7Xt2nc0E7D99La1OiEs8LnyimkRgwExpUMScN6O+3x9tIWs7PLQZVNx4YD+00skHXPXi1yQHpAmZA==", + "dev": true, + "requires": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + } + }, + "envinfo": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz", + "integrity": "sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==", + "dev": true + }, + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "requires": { + "is-arrayish": "^0.2.1" + } + }, + "es-module-lexer": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz", + "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==", + "dev": true + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "escodegen": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.0.0.tgz", + "integrity": "sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==", + "dev": true, + "requires": { + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1", + "source-map": "~0.6.1" + }, + "dependencies": { + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true + } + } + }, + "eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + } + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "requires": { + "estraverse": "^5.2.0" + }, + "dependencies": { + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + } + } + }, + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true + }, + "events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "dev": true + }, + "execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + } + }, + "exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha1-BjJjj42HfMghB9MKD/8aF8uhzQw=", + "dev": true + }, + "expect": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz", + "integrity": "sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1" + } + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true + }, + "fastest-levenshtein": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz", + "integrity": "sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow==", + "dev": true + }, + "fb-watchman": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.1.tgz", + "integrity": "sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg==", + "dev": true, + "requires": { + "bser": "2.1.1" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-cache-dir": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", + "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", + "dev": true, + "requires": { + "commondir": "^1.0.1", + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" + }, + "dependencies": { + "make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "requires": { + "semver": "^6.0.0" + } + } + } + }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "follow-redirects": { + "version": "1.14.8", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.8.tgz", + "integrity": "sha512-1x0S9UVJHsQprFcEC/qnNzBLcIxsjAV905f/UkQxbclCsoTWlacCNOpQa/anodLl2uaEKFhfWOvM2Qg77+15zA==", + "dev": true + }, + "form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "fs-readdir-recursive": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz", + "integrity": "sha512-GNanXlVr2pf02+sPN40XN8HG+ePaNcvM0q5mZBd668Obwb0yD5GiUbZOFgwn8kGMY6I3mdyDJzieUy3PTYyTRA==", + "dev": true + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + } + }, + "get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true + }, + "get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true + }, + "glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "optional": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "glob-to-regexp": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", + "dev": true + }, + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true + }, + "graceful-fs": { + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", + "dev": true + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "dev": true + }, + "html-encoding-sniffer": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", + "integrity": "sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ==", + "dev": true, + "requires": { + "whatwg-encoding": "^1.0.5" + } + }, + "html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true + }, + "http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "requires": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + } + }, + "https-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", + "dev": true, + "requires": { + "agent-base": "6", + "debug": "4" + } + }, + "human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true + }, + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "import-local": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", + "dev": true, + "requires": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + } + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "interpret": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz", + "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==", + "dev": true + }, + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "optional": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-core-module": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true, + "optional": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "optional": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "requires": { + "isobject": "^3.0.1" + } + }, + "is-potential-custom-element-name": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", + "dev": true + }, + "is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true + }, + "is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", + "dev": true + }, + "istanbul-lib-coverage": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", + "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", + "dev": true + }, + "istanbul-lib-instrument": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.1.0.tgz", + "integrity": "sha512-czwUz525rkOFDJxfKK6mYfIs9zBKILyrZQxjz3ABhjQXhbhFsSbo1HW/BFcsDnfJYJWA6thRR5/TUY2qs5W99Q==", + "dev": true, + "requires": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + } + }, + "istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", + "dev": true, + "requires": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^3.0.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "requires": { + "semver": "^6.0.0" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "requires": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "istanbul-reports": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.4.tgz", + "integrity": "sha512-r1/DshN4KSE7xWEknZLLLLDn5CJybV3nw01VTkp6D5jzLuELlcbudfj/eSQFvrKsJuTVCGnePO7ho82Nw9zzfw==", + "dev": true, + "requires": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + } + }, + "jest": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest/-/jest-27.5.1.tgz", + "integrity": "sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ==", + "dev": true, + "requires": { + "@jest/core": "^27.5.1", + "import-local": "^3.0.2", + "jest-cli": "^27.5.1" + } + }, + "jest-changed-files": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-27.5.1.tgz", + "integrity": "sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "execa": "^5.0.0", + "throat": "^6.0.1" + } + }, + "jest-circus": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-27.5.1.tgz", + "integrity": "sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw==", + "dev": true, + "requires": { + "@jest/environment": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^0.7.0", + "expect": "^27.5.1", + "is-generator-fn": "^2.0.0", + "jest-each": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3", + "throat": "^6.0.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-cli": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-27.5.1.tgz", + "integrity": "sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw==", + "dev": true, + "requires": { + "@jest/core": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "import-local": "^3.0.2", + "jest-config": "^27.5.1", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "prompts": "^2.0.1", + "yargs": "^16.2.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-config": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-27.5.1.tgz", + "integrity": "sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA==", + "dev": true, + "requires": { + "@babel/core": "^7.8.0", + "@jest/test-sequencer": "^27.5.1", + "@jest/types": "^27.5.1", + "babel-jest": "^27.5.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.1", + "graceful-fs": "^4.2.9", + "jest-circus": "^27.5.1", + "jest-environment-jsdom": "^27.5.1", + "jest-environment-node": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-jasmine2": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-runner": "^27.5.1", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-diff": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz", + "integrity": "sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "diff-sequences": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-docblock": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-27.5.1.tgz", + "integrity": "sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ==", + "dev": true, + "requires": { + "detect-newline": "^3.0.0" + } + }, + "jest-each": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-27.5.1.tgz", + "integrity": "sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "jest-get-type": "^27.5.1", + "jest-util": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-environment-jsdom": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz", + "integrity": "sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw==", + "dev": true, + "requires": { + "@jest/environment": "^27.5.1", + "@jest/fake-timers": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "jest-mock": "^27.5.1", + "jest-util": "^27.5.1", + "jsdom": "^16.6.0" + } + }, + "jest-environment-node": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-27.5.1.tgz", + "integrity": "sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw==", + "dev": true, + "requires": { + "@jest/environment": "^27.5.1", + "@jest/fake-timers": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "jest-mock": "^27.5.1", + "jest-util": "^27.5.1" + } + }, + "jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true + }, + "jest-haste-map": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.5.1.tgz", + "integrity": "sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/graceful-fs": "^4.1.2", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "fsevents": "^2.3.2", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^27.5.1", + "jest-serializer": "^27.5.1", + "jest-util": "^27.5.1", + "jest-worker": "^27.5.1", + "micromatch": "^4.0.4", + "walker": "^1.0.7" + } + }, + "jest-jasmine2": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz", + "integrity": "sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ==", + "dev": true, + "requires": { + "@jest/environment": "^27.5.1", + "@jest/source-map": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "expect": "^27.5.1", + "is-generator-fn": "^2.0.0", + "jest-each": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "pretty-format": "^27.5.1", + "throat": "^6.0.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-leak-detector": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz", + "integrity": "sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ==", + "dev": true, + "requires": { + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + } + }, + "jest-matcher-utils": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz", + "integrity": "sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-mock": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-27.5.1.tgz", + "integrity": "sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*" + } + }, + "jest-pnp-resolver": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz", + "integrity": "sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==", + "dev": true, + "requires": {} + }, + "jest-regex-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-27.5.1.tgz", + "integrity": "sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg==", + "dev": true + }, + "jest-resolve": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-27.5.1.tgz", + "integrity": "sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "resolve": "^1.20.0", + "resolve.exports": "^1.1.0", + "slash": "^3.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-resolve-dependencies": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz", + "integrity": "sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-snapshot": "^27.5.1" + } + }, + "jest-runner": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-27.5.1.tgz", + "integrity": "sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ==", + "dev": true, + "requires": { + "@jest/console": "^27.5.1", + "@jest/environment": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.8.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^27.5.1", + "jest-environment-jsdom": "^27.5.1", + "jest-environment-node": "^27.5.1", + "jest-haste-map": "^27.5.1", + "jest-leak-detector": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-util": "^27.5.1", + "jest-worker": "^27.5.1", + "source-map-support": "^0.5.6", + "throat": "^6.0.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-runtime": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-27.5.1.tgz", + "integrity": "sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A==", + "dev": true, + "requires": { + "@jest/environment": "^27.5.1", + "@jest/fake-timers": "^27.5.1", + "@jest/globals": "^27.5.1", + "@jest/source-map": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "execa": "^5.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-mock": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-serializer": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-27.5.1.tgz", + "integrity": "sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w==", + "dev": true, + "requires": { + "@types/node": "*", + "graceful-fs": "^4.2.9" + } + }, + "jest-snapshot": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-27.5.1.tgz", + "integrity": "sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA==", + "dev": true, + "requires": { + "@babel/core": "^7.7.2", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/traverse": "^7.7.2", + "@babel/types": "^7.0.0", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/babel__traverse": "^7.0.4", + "@types/prettier": "^2.1.5", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^27.5.1", + "graceful-fs": "^4.2.9", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-haste-map": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-util": "^27.5.1", + "natural-compare": "^1.4.0", + "pretty-format": "^27.5.1", + "semver": "^7.3.2" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-validate": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-27.5.1.tgz", + "integrity": "sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^27.5.1", + "leven": "^3.1.0", + "pretty-format": "^27.5.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-watcher": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-27.5.1.tgz", + "integrity": "sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw==", + "dev": true, + "requires": { + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "jest-util": "^27.5.1", + "string-length": "^4.0.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "jest-worker": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", + "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", + "dev": true, + "requires": { + "@types/node": "*", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "dependencies": { + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "jsdom": { + "version": "16.7.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz", + "integrity": "sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw==", + "dev": true, + "requires": { + "abab": "^2.0.5", + "acorn": "^8.2.4", + "acorn-globals": "^6.0.0", + "cssom": "^0.4.4", + "cssstyle": "^2.3.0", + "data-urls": "^2.0.0", + "decimal.js": "^10.2.1", + "domexception": "^2.0.1", + "escodegen": "^2.0.0", + "form-data": "^3.0.0", + "html-encoding-sniffer": "^2.0.1", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-potential-custom-element-name": "^1.0.1", + "nwsapi": "^2.2.0", + "parse5": "6.0.1", + "saxes": "^5.0.1", + "symbol-tree": "^3.2.4", + "tough-cookie": "^4.0.0", + "w3c-hr-time": "^1.0.2", + "w3c-xmlserializer": "^2.0.0", + "webidl-conversions": "^6.1.0", + "whatwg-encoding": "^1.0.5", + "whatwg-mimetype": "^2.3.0", + "whatwg-url": "^8.5.0", + "ws": "^7.4.6", + "xml-name-validator": "^3.0.0" + } + }, + "jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true + }, + "json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, + "json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true + }, + "jwt-decode": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", + "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==" + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true + }, + "kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true + }, + "leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true + }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + } + }, + "lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "loader-runner": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.2.0.tgz", + "integrity": "sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw==", + "dev": true + }, + "loader-utils": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.2.tgz", + "integrity": "sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg==", + "dev": true, + "requires": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^1.0.1" + }, + "dependencies": { + "json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "requires": { + "minimist": "^1.2.0" + } + } + } + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=", + "dev": true + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "requires": { + "tmpl": "1.0.5" + } + }, + "merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, + "requires": { + "braces": "^3.0.1", + "picomatch": "^2.2.3" + } + }, + "mime-db": { + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", + "dev": true + }, + "mime-types": { + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "dev": true, + "requires": { + "mime-db": "1.51.0" + } + }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", + "dev": true + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "dev": true + }, + "neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true + }, + "node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs=", + "dev": true + }, + "node-releases": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.1.tgz", + "integrity": "sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA==", + "dev": true + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "requires": { + "path-key": "^3.0.0" + } + }, + "nwsapi": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.0.tgz", + "integrity": "sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "object.assign": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz", + "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dev": true, + "requires": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + } + }, + "parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", + "dev": true + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + }, + "pirates": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz", + "integrity": "sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==", + "dev": true + }, + "pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "requires": { + "find-up": "^4.0.0" + } + }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "dev": true + }, + "pretty-format": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "dependencies": { + "ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true + } + } + }, + "prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "requires": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + } + }, + "psl": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", + "dev": true + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true + }, + "readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "optional": true, + "requires": { + "picomatch": "^2.2.1" + } + }, + "rechoir": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.7.1.tgz", + "integrity": "sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg==", + "dev": true, + "requires": { + "resolve": "^1.9.0" + } + }, + "regenerate": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", + "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==", + "dev": true + }, + "regenerate-unicode-properties": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-9.0.0.tgz", + "integrity": "sha512-3E12UeNSPfjrgwjkR81m5J7Aw/T55Tu7nUyZVQYCKEOs+2dkxEY+DpPtZzO4YruuiPb7NkYLVcyJC4+zCbk5pA==", + "dev": true, + "requires": { + "regenerate": "^1.4.2" + } + }, + "regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", + "dev": true + }, + "regenerator-transform": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.14.5.tgz", + "integrity": "sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw==", + "dev": true, + "requires": { + "@babel/runtime": "^7.8.4" + } + }, + "regexpu-core": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-4.8.0.tgz", + "integrity": "sha512-1F6bYsoYiz6is+oz70NWur2Vlh9KWtswuRuzJOfeYUrfPX2o8n74AnUVaOGDbUqVGO9fNHu48/pjJO4sNVwsOg==", + "dev": true, + "requires": { + "regenerate": "^1.4.2", + "regenerate-unicode-properties": "^9.0.0", + "regjsgen": "^0.5.2", + "regjsparser": "^0.7.0", + "unicode-match-property-ecmascript": "^2.0.0", + "unicode-match-property-value-ecmascript": "^2.0.0" + } + }, + "regjsgen": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.5.2.tgz", + "integrity": "sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A==", + "dev": true + }, + "regjsparser": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.7.0.tgz", + "integrity": "sha512-A4pcaORqmNMDVwUjWoTzuhwMGpP+NykpfqAsEgI1FSH/EzC7lrN5TMd+kN8YCovX+jMpu8eaqXgXPCa0g8FQNQ==", + "dev": true, + "requires": { + "jsesc": "~0.5.0" + }, + "dependencies": { + "jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=", + "dev": true + } + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, + "resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "requires": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "requires": { + "resolve-from": "^5.0.0" + } + }, + "resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true + }, + "resolve.exports": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-1.1.0.tgz", + "integrity": "sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==", + "dev": true + }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true + }, + "saxes": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz", + "integrity": "sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw==", + "dev": true, + "requires": { + "xmlchars": "^2.2.0" + } + }, + "schema-utils": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, + "shallow-clone": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", + "dev": true, + "requires": { + "kind-of": "^6.0.2" + } + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true + }, + "signal-exit": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.6.tgz", + "integrity": "sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ==", + "dev": true + }, + "sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true + }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "dev": true + }, + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true + }, + "source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "stack-utils": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.5.tgz", + "integrity": "sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==", + "dev": true, + "requires": { + "escape-string-regexp": "^2.0.0" + }, + "dependencies": { + "escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true + } + } + }, + "string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "requires": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + } + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true + }, + "strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true + }, + "strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "supports-hyperlinks": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz", + "integrity": "sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ==", + "dev": true, + "requires": { + "has-flag": "^4.0.0", + "supports-color": "^7.0.0" + }, + "dependencies": { + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, + "symbol-tree": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", + "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", + "dev": true + }, + "tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true + }, + "terminal-link": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/terminal-link/-/terminal-link-2.1.1.tgz", + "integrity": "sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==", + "dev": true, + "requires": { + "ansi-escapes": "^4.2.1", + "supports-hyperlinks": "^2.0.0" + } + }, + "terser": { + "version": "5.14.2", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.14.2.tgz", + "integrity": "sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA==", + "dev": true, + "requires": { + "@jridgewell/source-map": "^0.3.2", + "acorn": "^8.5.0", + "commander": "^2.20.0", + "source-map-support": "~0.5.20" + }, + "dependencies": { + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + } + } + }, + "terser-webpack-plugin": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.0.tgz", + "integrity": "sha512-LPIisi3Ol4chwAaPP8toUJ3L4qCM1G0wao7L3qNv57Drezxj6+VEyySpPw4B1HSO2Eg/hDY/MNF5XihCAoqnsQ==", + "dev": true, + "requires": { + "jest-worker": "^27.4.1", + "schema-utils": "^3.1.1", + "serialize-javascript": "^6.0.0", + "source-map": "^0.6.1", + "terser": "^5.7.2" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "requires": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + } + }, + "throat": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/throat/-/throat-6.0.1.tgz", + "integrity": "sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w==", + "dev": true + }, + "tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true + }, + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "dev": true + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "tough-cookie": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", + "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", + "dev": true, + "requires": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.1.2" + } + }, + "tr46": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.1.0.tgz", + "integrity": "sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw==", + "dev": true, + "requires": { + "punycode": "^2.1.1" + } + }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2" + } + }, + "type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true + }, + "type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true + }, + "typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "dev": true, + "requires": { + "is-typedarray": "^1.0.0" + } + }, + "typescript": { + "version": "4.5.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.5.tgz", + "integrity": "sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA==", + "dev": true + }, + "unicode-canonical-property-names-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", + "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==", + "dev": true + }, + "unicode-match-property-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", + "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", + "dev": true, + "requires": { + "unicode-canonical-property-names-ecmascript": "^2.0.0", + "unicode-property-aliases-ecmascript": "^2.0.0" + } + }, + "unicode-match-property-value-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz", + "integrity": "sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==", + "dev": true + }, + "unicode-property-aliases-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz", + "integrity": "sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==", + "dev": true + }, + "universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true + }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "requires": { + "punycode": "^2.1.0" + } + }, + "v8-to-istanbul": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz", + "integrity": "sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w==", + "dev": true, + "requires": { + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^1.6.0", + "source-map": "^0.7.3" + }, + "dependencies": { + "source-map": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", + "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", + "dev": true + } + } + }, + "w3c-hr-time": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz", + "integrity": "sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==", + "dev": true, + "requires": { + "browser-process-hrtime": "^1.0.0" + } + }, + "w3c-xmlserializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz", + "integrity": "sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA==", + "dev": true, + "requires": { + "xml-name-validator": "^3.0.0" + } + }, + "walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "requires": { + "makeerror": "1.0.12" + } + }, + "watchpack": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.3.1.tgz", + "integrity": "sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA==", + "dev": true, + "requires": { + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.1.2" + } + }, + "webidl-conversions": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-6.1.0.tgz", + "integrity": "sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w==", + "dev": true + }, + "webpack": { + "version": "5.67.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.67.0.tgz", + "integrity": "sha512-LjFbfMh89xBDpUMgA1W9Ur6Rn/gnr2Cq1jjHFPo4v6a79/ypznSYbAyPgGhwsxBtMIaEmDD1oJoA7BEYw/Fbrw==", + "dev": true, + "requires": { + "@types/eslint-scope": "^3.7.0", + "@types/estree": "^0.0.50", + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/wasm-edit": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1", + "acorn": "^8.4.1", + "acorn-import-assertions": "^1.7.6", + "browserslist": "^4.14.5", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^5.8.3", + "es-module-lexer": "^0.9.0", + "eslint-scope": "5.1.1", + "events": "^3.2.0", + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.2.9", + "json-parse-better-errors": "^1.0.2", + "loader-runner": "^4.2.0", + "mime-types": "^2.1.27", + "neo-async": "^2.6.2", + "schema-utils": "^3.1.0", + "tapable": "^2.1.1", + "terser-webpack-plugin": "^5.1.3", + "watchpack": "^2.3.1", + "webpack-sources": "^3.2.3" + } + }, + "webpack-cli": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.9.2.tgz", + "integrity": "sha512-m3/AACnBBzK/kMTcxWHcZFPrw/eQuY4Df1TxvIWfWM2x7mRqBQCqKEd96oCUa9jkapLBaFfRce33eGDb4Pr7YQ==", + "dev": true, + "requires": { + "@discoveryjs/json-ext": "^0.5.0", + "@webpack-cli/configtest": "^1.1.1", + "@webpack-cli/info": "^1.4.1", + "@webpack-cli/serve": "^1.6.1", + "colorette": "^2.0.14", + "commander": "^7.0.0", + "execa": "^5.0.0", + "fastest-levenshtein": "^1.0.12", + "import-local": "^3.0.2", + "interpret": "^2.2.0", + "rechoir": "^0.7.0", + "webpack-merge": "^5.7.3" + }, + "dependencies": { + "commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "dev": true + } + } + }, + "webpack-merge": { + "version": "5.8.0", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", + "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", + "dev": true, + "requires": { + "clone-deep": "^4.0.1", + "wildcard": "^2.0.0" + } + }, + "webpack-sources": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", + "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", + "dev": true + }, + "whatwg-encoding": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz", + "integrity": "sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==", + "dev": true, + "requires": { + "iconv-lite": "0.4.24" + } + }, + "whatwg-mimetype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz", + "integrity": "sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==", + "dev": true + }, + "whatwg-url": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.7.0.tgz", + "integrity": "sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg==", + "dev": true, + "requires": { + "lodash": "^4.7.0", + "tr46": "^2.1.0", + "webidl-conversions": "^6.1.0" + } + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "wildcard": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", + "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==", + "dev": true + }, + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "write-file-atomic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", + "dev": true, + "requires": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "ws": { + "version": "7.5.7", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.7.tgz", + "integrity": "sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A==", + "dev": true, + "requires": {} + }, + "xml-name-validator": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-3.0.0.tgz", + "integrity": "sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==", + "dev": true + }, + "xmlchars": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", + "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", + "dev": true + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + } + }, + "yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true + } + } +} diff --git a/superset-embedded-sdk/package.json b/superset-embedded-sdk/package.json new file mode 100644 index 0000000000000..055f44191a7e8 --- /dev/null +++ b/superset-embedded-sdk/package.json @@ -0,0 +1,62 @@ +{ + "name": "@superset-ui/embedded-sdk", + "version": "0.1.0-alpha.8", + "description": "SDK for embedding resources from Superset into your own application", + "access": "public", + "keywords": [ + "superset", + "embed", + "embedded", + "sdk", + "iframe", + "dashboard", + "chart", + "analytics" + ], + "files": [ + "bundle", + "lib", + "dist" + ], + "main": "bundle/index.js", + "module": "lib/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsc ; babel src --out-dir lib --extensions '.ts,.tsx' ; webpack --mode production", + "ci:release": "node ./release-if-necessary.js", + "test": "jest" + }, + "browserslist": [ + "last 3 chrome versions", + "last 3 firefox versions", + "last 3 safari versions", + "last 3 edge versions" + ], + "dependencies": { + "@superset-ui/switchboard": "^0.18.26-0", + "jwt-decode": "^3.1.2" + }, + "devDependencies": { + "@babel/cli": "^7.16.8", + "@babel/core": "^7.16.12", + "@babel/preset-env": "^7.16.11", + "@babel/preset-typescript": "^7.16.7", + "@types/jest": "^27.4.1", + "axios": "^0.25.0", + "babel-loader": "^8.2.3", + "jest": "^27.5.1", + "typescript": "^4.5.5", + "webpack": "^5.67.0", + "webpack-cli": "^4.9.2" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/apache/superset.git" + }, + "homepage": "https://github.com/apache/superset#readme", + "bugs": { + "url": "https://github.com/apache/superset/issues" + }, + "license": "Apache-2.0", + "author": "Superset" +} diff --git a/superset-embedded-sdk/release-if-necessary.js b/superset-embedded-sdk/release-if-necessary.js new file mode 100644 index 0000000000000..632f8cd4b873c --- /dev/null +++ b/superset-embedded-sdk/release-if-necessary.js @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +const { execSync } = require('child_process'); +const axios = require('axios'); +const { name, version } = require('./package.json'); + +function log(...args) { + console.log('[embedded-sdk-release]', ...args); +} + +function logError(...args) { + console.error('[embedded-sdk-release]', ...args); +} + +(async () => { + log(`checking if ${name}@${version} needs releasing`); + + const packageUrl = `https://registry.npmjs.org/${name}/${version}`; + // npm commands output a bunch of garbage in the edge cases, + // and require sending semi-validated strings to the command line, + // so let's just use good old http. + const { status } = await axios.get(packageUrl, { + validateStatus: (status) => true // we literally just want the status so any status is valid + }); + + if (status === 200) { + log('version already exists on npm, exiting'); + } else if (status === 404) { + log('release required, building'); + try { + execSync('npm run build', { stdio: 'pipe' }); + log('build successful, publishing') + execSync('npm publish --access public', { stdio: 'pipe' }); + log(`published ${version} to npm`); + } catch (err) { + console.error(String(err.stdout)); + logError('Encountered an error, details should be above'); + process.exitCode = 1; + } + } else { + logError(`ERROR: Received unexpected http status code ${status} from GET ${packageUrl} +The embedded sdk release script might need to be fixed, or maybe you just need to try again later.`); + process.exitCode = 1; + } +})(); diff --git a/superset-embedded-sdk/src/const.ts b/superset-embedded-sdk/src/const.ts new file mode 100644 index 0000000000000..72eba8525d758 --- /dev/null +++ b/superset-embedded-sdk/src/const.ts @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export const IFRAME_COMMS_MESSAGE_TYPE = "__embedded_comms__"; +export const DASHBOARD_UI_FILTER_CONFIG_URL_PARAM_KEY: { [index: string]: any } = { + visible: "show_filters", + expanded: "expand_filters", +} diff --git a/superset-embedded-sdk/src/guestTokenRefresh.test.ts b/superset-embedded-sdk/src/guestTokenRefresh.test.ts new file mode 100644 index 0000000000000..dd61401f1e601 --- /dev/null +++ b/superset-embedded-sdk/src/guestTokenRefresh.test.ts @@ -0,0 +1,96 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + REFRESH_TIMING_BUFFER_MS, + getGuestTokenRefreshTiming, + MIN_REFRESH_WAIT_MS, + DEFAULT_TOKEN_EXP_MS, +} from "./guestTokenRefresh"; + +describe("guest token refresh", () => { + beforeAll(() => { + jest.useFakeTimers("modern"); // "modern" allows us to fake the system time + jest.setSystemTime(new Date("2022-03-03 01:00")); + jest.spyOn(global, "setTimeout"); + }); + + afterAll(() => { + jest.useRealTimers(); + }); + + function makeFakeJWT(claims: any) { + // not a valid jwt, but close enough for this code + const tokenifiedClaims = Buffer.from(JSON.stringify(claims)).toString( + "base64" + ); + return `abc.${tokenifiedClaims}.xyz`; + } + + it("schedules refresh with an epoch exp", () => { + // exp is in seconds + const ttl = 1300; + const exp = Date.now() / 1000 + ttl; + const fakeToken = makeFakeJWT({ exp }); + + const timing = getGuestTokenRefreshTiming(fakeToken); + + expect(timing).toBeGreaterThan(MIN_REFRESH_WAIT_MS); + expect(timing).toBe(ttl * 1000 - REFRESH_TIMING_BUFFER_MS); + }); + + it("schedules refresh with an epoch exp containing a decimal", () => { + const ttl = 1300.123; + const exp = Date.now() / 1000 + ttl; + const fakeToken = makeFakeJWT({ exp }); + + const timing = getGuestTokenRefreshTiming(fakeToken); + + expect(timing).toBeGreaterThan(MIN_REFRESH_WAIT_MS); + expect(timing).toBe(ttl * 1000 - REFRESH_TIMING_BUFFER_MS); + }); + + it("schedules refresh with iso exp", () => { + const exp = new Date("2022-03-03 01:09").toISOString(); + const fakeToken = makeFakeJWT({ exp }); + + const timing = getGuestTokenRefreshTiming(fakeToken); + const expectedTiming = 1000 * 60 * 9 - REFRESH_TIMING_BUFFER_MS; + + expect(timing).toBeGreaterThan(MIN_REFRESH_WAIT_MS); + expect(timing).toBe(expectedTiming); + }); + + it("avoids refresh spam", () => { + const fakeToken = makeFakeJWT({ exp: Date.now() / 1000 }); + + const timing = getGuestTokenRefreshTiming(fakeToken); + + expect(timing).toBe(MIN_REFRESH_WAIT_MS - REFRESH_TIMING_BUFFER_MS); + }); + + it("uses a default when it cannot parse the date", () => { + const fakeToken = makeFakeJWT({ exp: "invalid date" }); + + const timing = getGuestTokenRefreshTiming(fakeToken); + + expect(timing).toBeGreaterThan(MIN_REFRESH_WAIT_MS); + expect(timing).toBe(DEFAULT_TOKEN_EXP_MS - REFRESH_TIMING_BUFFER_MS); + }); +}); diff --git a/superset-embedded-sdk/src/guestTokenRefresh.ts b/superset-embedded-sdk/src/guestTokenRefresh.ts new file mode 100644 index 0000000000000..101c4d9e93930 --- /dev/null +++ b/superset-embedded-sdk/src/guestTokenRefresh.ts @@ -0,0 +1,33 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import jwt_decode from "jwt-decode"; + +export const REFRESH_TIMING_BUFFER_MS = 5000 // refresh guest token early to avoid failed superset requests +export const MIN_REFRESH_WAIT_MS = 10000 // avoid blasting requests as fast as the cpu can handle +export const DEFAULT_TOKEN_EXP_MS = 300000 // (5 min) used only when parsing guest token exp fails + +// when do we refresh the guest token? +export function getGuestTokenRefreshTiming(currentGuestToken: string) { + const parsedJwt = jwt_decode>(currentGuestToken); + // if exp is int, it is in seconds, but Date() takes milliseconds + const exp = new Date(/[^0-9\.]/g.test(parsedJwt.exp) ? parsedJwt.exp : parseFloat(parsedJwt.exp) * 1000); + const isValidDate = exp.toString() !== 'Invalid Date'; + const ttl = isValidDate ? Math.max(MIN_REFRESH_WAIT_MS, exp.getTime() - Date.now()) : DEFAULT_TOKEN_EXP_MS; + return ttl - REFRESH_TIMING_BUFFER_MS; +} diff --git a/superset-embedded-sdk/src/index.ts b/superset-embedded-sdk/src/index.ts new file mode 100644 index 0000000000000..56a07e5544c1d --- /dev/null +++ b/superset-embedded-sdk/src/index.ts @@ -0,0 +1,190 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + DASHBOARD_UI_FILTER_CONFIG_URL_PARAM_KEY, + IFRAME_COMMS_MESSAGE_TYPE +} from './const'; + +// We can swap this out for the actual switchboard package once it gets published +import { Switchboard } from '@superset-ui/switchboard'; +import { getGuestTokenRefreshTiming } from './guestTokenRefresh'; + +/** + * The function to fetch a guest token from your Host App's backend server. + * The Host App backend must supply an API endpoint + * which returns a guest token with appropriate resource access. + */ +export type GuestTokenFetchFn = () => Promise; + +export type UiConfigType = { + hideTitle?: boolean + hideTab?: boolean + hideChartControls?: boolean + filters?: { + [key: string]: boolean | undefined + visible?: boolean + expanded?: boolean + } +} + +export type EmbedDashboardParams = { + /** The id provided by the embed configuration UI in Superset */ + id: string + /** The domain where Superset can be located, with protocol, such as: https://superset.example.com */ + supersetDomain: string + /** The html element within which to mount the iframe */ + mountPoint: HTMLElement + /** A function to fetch a guest token from the Host App's backend server */ + fetchGuestToken: GuestTokenFetchFn + /** The dashboard UI config: hideTitle, hideTab, hideChartControls, filters.visible, filters.expanded **/ + dashboardUiConfig?: UiConfigType + /** Are we in debug mode? */ + debug?: boolean +} + +export type Size = { + width: number, height: number +} + +export type EmbeddedDashboard = { + getScrollSize: () => Promise + unmount: () => void + getDashboardPermalink: (anchor: string) => Promise + getActiveTabs: () => Promise +} + +/** + * Embeds a Superset dashboard into the page using an iframe. + */ +export async function embedDashboard({ + id, + supersetDomain, + mountPoint, + fetchGuestToken, + dashboardUiConfig, + debug = false +}: EmbedDashboardParams): Promise { + function log(...info: unknown[]) { + if (debug) { + console.debug(`[superset-embedded-sdk][dashboard ${id}]`, ...info); + } + } + + log('embedding'); + + function calculateConfig() { + let configNumber = 0 + if(dashboardUiConfig) { + if(dashboardUiConfig.hideTitle) { + configNumber += 1 + } + if(dashboardUiConfig.hideTab) { + configNumber += 2 + } + if(dashboardUiConfig.hideChartControls) { + configNumber += 8 + } + } + return configNumber + } + + async function mountIframe(): Promise { + return new Promise(resolve => { + const iframe = document.createElement('iframe'); + const dashboardConfig = dashboardUiConfig ? `?uiConfig=${calculateConfig()}` : "" + const filterConfig = dashboardUiConfig?.filters || {} + const filterConfigKeys = Object.keys(filterConfig) + const filterConfigUrlParams = filterConfigKeys.length > 0 + ? "&" + + filterConfigKeys + .map(key => DASHBOARD_UI_FILTER_CONFIG_URL_PARAM_KEY[key] + '=' + filterConfig[key]).join('&') + : "" + + // set up the iframe's sandbox configuration + iframe.sandbox.add("allow-same-origin"); // needed for postMessage to work + iframe.sandbox.add("allow-scripts"); // obviously the iframe needs scripts + iframe.sandbox.add("allow-presentation"); // for fullscreen charts + iframe.sandbox.add("allow-downloads"); // for downloading charts as image + iframe.sandbox.add("allow-forms"); // for forms to submit + iframe.sandbox.add("allow-popups"); // for exporting charts as csv + // add these if it turns out we need them: + // iframe.sandbox.add("allow-top-navigation"); + + // add the event listener before setting src, to be 100% sure that we capture the load event + iframe.addEventListener('load', () => { + // MessageChannel allows us to send and receive messages smoothly between our window and the iframe + // See https://developer.mozilla.org/en-US/docs/Web/API/Channel_Messaging_API + const commsChannel = new MessageChannel(); + const ourPort = commsChannel.port1; + const theirPort = commsChannel.port2; + + // Send one of the message channel ports to the iframe to initialize embedded comms + // See https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage + // we know the content window isn't null because we are in the load event handler. + iframe.contentWindow!.postMessage( + { type: IFRAME_COMMS_MESSAGE_TYPE, handshake: "port transfer" }, + supersetDomain, + [theirPort], + ) + log('sent message channel to the iframe'); + + // return our port from the promise + resolve(new Switchboard({ port: ourPort, name: 'superset-embedded-sdk', debug })); + }); + + iframe.src = `${supersetDomain}/embedded/${id}${dashboardConfig}${filterConfigUrlParams}`; + mountPoint.replaceChildren(iframe); + log('placed the iframe') + }); + } + + const [guestToken, ourPort]: [string, Switchboard] = await Promise.all([ + fetchGuestToken(), + mountIframe(), + ]); + + ourPort.emit('guestToken', { guestToken }); + log('sent guest token'); + + async function refreshGuestToken() { + const newGuestToken = await fetchGuestToken(); + ourPort.emit('guestToken', { guestToken: newGuestToken }); + setTimeout(refreshGuestToken, getGuestTokenRefreshTiming(newGuestToken)); + } + + setTimeout(refreshGuestToken, getGuestTokenRefreshTiming(guestToken)); + + function unmount() { + log('unmounting'); + mountPoint.replaceChildren(); + } + + const getScrollSize = () => ourPort.get('getScrollSize'); + const getDashboardPermalink = (anchor: string) => + ourPort.get('getDashboardPermalink', { anchor }); + const getActiveTabs = () => ourPort.get('getActiveTabs') + + return { + getScrollSize, + unmount, + getDashboardPermalink, + getActiveTabs, + }; +} diff --git a/superset-embedded-sdk/tsconfig.json b/superset-embedded-sdk/tsconfig.json new file mode 100644 index 0000000000000..a9ee59739f47b --- /dev/null +++ b/superset-embedded-sdk/tsconfig.json @@ -0,0 +1,29 @@ +{ + "compilerOptions": { + // syntax rules + "strict": true, + + "moduleResolution": "node", + + // environment + "target": "es6", + "lib": ["DOM", "ESNext"], + "module": "esnext", + + // output + "outDir": "./dist", + "emitDeclarationOnly": true, + "declaration": true + }, + + "include": [ + "src/**/*" + ], + + "exclude": [ + "tests", + "dist", + "lib", + "node_modules" + ] +} diff --git a/superset-embedded-sdk/webpack.config.js b/superset-embedded-sdk/webpack.config.js new file mode 100644 index 0000000000000..f0bc69038da9e --- /dev/null +++ b/superset-embedded-sdk/webpack.config.js @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + filename: 'index.js', + path: path.resolve(__dirname, 'bundle'), + + // this exposes the library's exports under a global variable + library: { + name: "supersetEmbeddedSdk", + type: "umd" + } + }, + devtool: "source-map", + module: { + rules: [ + { + test: /\.[tj]s$/, + // babel-loader is faster than ts-loader because it ignores types. + // We do type checking in a separate process, so that's fine. + use: 'babel-loader', + exclude: /node_modules/, + }, + ], + }, + resolve: { + extensions: ['.ts', '.js'], + }, +}; diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/common/__init__.py b/tests/common/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/common/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/common/logger_utils.py b/tests/common/logger_utils.py new file mode 100644 index 0000000000000..61e05bc45cff5 --- /dev/null +++ b/tests/common/logger_utils.py @@ -0,0 +1,174 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import logging +from functools import wraps +from inspect import ( + getcallargs, + getmembers, + getmodule, + isclass, + isfunction, + ismethod, + signature, + Signature, +) +from logging import Logger +from typing import Any, Callable, cast, Optional, Type, Union + +_DEFAULT_ENTER_MSG_PREFIX = "enter to " +_DEFAULT_ENTER_MSG_SUFFIX = "" +_DEFAULT_WITH_ARGUMENTS_MSG_PART = " with: " +_DEFAULT_EXIT_MSG_PREFIX = "exit from " +_DEFAULT_EXIT_MSG_SUFFIX = "" +_DEFAULT_RETURN_VALUE_MSG_PART = " with return value: " + +_CLS_PARAM = "cls" +_SELF_PARAM = "self" +_PRIVATE_PREFIX_SYMBOL = "_" +_FIXTURE_ATTRIBUTE = "_pytestfixturefunction" +_LOGGER_VAR_NAME = "logger" + +empty_and_none = {Signature.empty, "None"} + + +Function = Callable[..., Any] +Decorated = Union[Type[Any], Function] + + +def log( + decorated: Optional[Decorated] = None, + *, + prefix_enter_msg: str = _DEFAULT_ENTER_MSG_PREFIX, + suffix_enter_msg: str = _DEFAULT_ENTER_MSG_SUFFIX, + with_arguments_msg_part=_DEFAULT_WITH_ARGUMENTS_MSG_PART, + prefix_exit_msg: str = _DEFAULT_EXIT_MSG_PREFIX, + suffix_exit_msg: str = _DEFAULT_EXIT_MSG_SUFFIX, + return_value_msg_part=_DEFAULT_RETURN_VALUE_MSG_PART, +) -> Decorated: + + decorator: Decorated = _make_decorator( + prefix_enter_msg, + suffix_enter_msg, + with_arguments_msg_part, + prefix_exit_msg, + suffix_exit_msg, + return_value_msg_part, + ) + if decorated is None: + return decorator + return decorator(decorated) + + +def _make_decorator( + prefix_enter_msg: str, + suffix_enter_msg: str, + with_arguments_msg_part, + prefix_out_msg: str, + suffix_out_msg: str, + return_value_msg_part, +) -> Decorated: + def decorator(decorated: Decorated): + decorated_logger = _get_logger(decorated) + + def decorator_class(clazz: Type[Any]) -> Type[Any]: + _decorate_class_members_with_logs(clazz) + return clazz + + def _decorate_class_members_with_logs(clazz: Type[Any]) -> None: + members = getmembers( + clazz, predicate=lambda val: ismethod(val) or isfunction(val) + ) + for member_name, member in members: + setattr(clazz, member_name, decorator_func(member, f"{clazz.__name__}")) + + def decorator_func(func: Function, prefix_name: str = "") -> Function: + func_name = func.__name__ + func_signature: Signature = signature(func) + is_fixture = hasattr(func, _FIXTURE_ATTRIBUTE) + has_return_value = func_signature.return_annotation not in empty_and_none + is_private = func_name.startswith(_PRIVATE_PREFIX_SYMBOL) + full_func_name = f"{prefix_name}.{func_name}" + under_info = None + debug_enable = None + + @wraps(func) + def _wrapper_func(*args, **kwargs) -> Any: + _log_enter_to_function(*args, **kwargs) + val = func(*args, **kwargs) + _log_exit_of_function(val) + return val + + def _log_enter_to_function(*args, **kwargs) -> None: + if _is_log_info(): + decorated_logger.info( + f"{prefix_enter_msg}'{full_func_name}'{suffix_enter_msg}" + ) + elif _is_debug_enable(): + _log_debug(*args, **kwargs) + + def _is_log_info() -> bool: + return not (_is_under_info() or is_private or is_fixture) + + def _is_under_info() -> bool: + nonlocal under_info + if under_info is None: + under_info = decorated_logger.getEffectiveLevel() < logging.INFO + return under_info + + def _is_debug_enable() -> bool: + nonlocal debug_enable + if debug_enable is None: + debug_enable = decorated_logger.isEnabledFor(logging.DEBUG) + return debug_enable + + def _log_debug(*args, **kwargs) -> None: + used_parameters = getcallargs(func, *args, **kwargs) + _SELF_PARAM in used_parameters and used_parameters.pop(_SELF_PARAM) + _CLS_PARAM in used_parameters and used_parameters.pop(_CLS_PARAM) + if used_parameters: + decorated_logger.debug( + f"{prefix_enter_msg}'{full_func_name}'{with_arguments_msg_part}" + f"{used_parameters}{suffix_enter_msg}" + ) + else: + decorated_logger.debug( + f"{prefix_enter_msg}'{full_func_name}'{suffix_enter_msg}" + ) + + def _log_exit_of_function(return_value: Any) -> None: + if _is_debug_enable() and has_return_value: + decorated_logger.debug( + f"{prefix_out_msg}'{full_func_name}'{return_value_msg_part}" + f"'{return_value}'{suffix_out_msg}" + ) + + return _wrapper_func + + if isclass(decorated): + return decorator_class(cast(Type[Any], decorated)) + return decorator_func(cast(Function, decorated)) + + return decorator + + +def _get_logger(decorated: Decorated) -> Logger: + module = getmodule(decorated) + return module.__dict__.get( + _LOGGER_VAR_NAME, logging.getLogger(module.__name__) # type: ignore + ) diff --git a/tests/common/query_context_generator.py b/tests/common/query_context_generator.py new file mode 100644 index 0000000000000..15b013dc845c2 --- /dev/null +++ b/tests/common/query_context_generator.py @@ -0,0 +1,272 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import copy +import dataclasses +from typing import Any, Dict, List, Optional + +from superset.common.chart_data import ChartDataResultType +from superset.utils.core import AnnotationType, DTTM_ALIAS + +query_birth_names = { + "extras": {"where": "", "time_grain_sqla": "P1D", "having_druid": []}, + "columns": ["name"], + "metrics": [{"label": "sum__num"}], + "orderby": [("sum__num", False)], + "row_limit": 100, + "granularity": "ds", + "time_range": "100 years ago : now", + "timeseries_limit": 0, + "timeseries_limit_metric": None, + "order_desc": True, + "filters": [ + {"col": "gender", "op": "==", "val": "boy"}, + {"col": "num", "op": "IS NOT NULL"}, + {"col": "name", "op": "NOT IN", "val": ["", '"abc"']}, + ], + "having": "", + "having_filters": [], + "where": "", +} + +QUERY_OBJECTS: Dict[str, Dict[str, object]] = { + "birth_names": query_birth_names, + # `:suffix` are overrides only + "birth_names:include_time": { + "groupby": [DTTM_ALIAS, "name"], + }, + "birth_names:orderby_dup_alias": { + "metrics": [ + { + "expressionType": "SIMPLE", + "column": {"column_name": "num_girls", "type": "BIGINT(20)"}, + "aggregate": "SUM", + "label": "num_girls", + }, + { + "expressionType": "SIMPLE", + "column": {"column_name": "num_boys", "type": "BIGINT(20)"}, + "aggregate": "SUM", + "label": "num_boys", + }, + ], + "orderby": [ + [ + { + "expressionType": "SIMPLE", + "column": {"column_name": "num_girls", "type": "BIGINT(20)"}, + "aggregate": "SUM", + # the same underlying expression, but different label + "label": "SUM(num_girls)", + }, + False, + ], + # reference the ambiguous alias in SIMPLE metric + [ + { + "expressionType": "SIMPLE", + "column": {"column_name": "num_boys", "type": "BIGINT(20)"}, + "aggregate": "AVG", + "label": "AVG(num_boys)", + }, + False, + ], + # reference the ambiguous alias in CUSTOM SQL metric + [ + { + "expressionType": "SQL", + "sqlExpression": "MAX(CASE WHEN num_boys > 0 THEN 1 ELSE 0 END)", + "label": "MAX(CASE WHEN...", + }, + True, + ], + ], + }, + "birth_names:only_orderby_has_metric": { + "metrics": [], + }, +} + +ANNOTATION_LAYERS = { + AnnotationType.FORMULA: { + "annotationType": "FORMULA", + "color": "#ff7f44", + "hideLine": False, + "name": "my formula", + "opacity": "", + "overrides": {"time_range": None}, + "show": True, + "showMarkers": False, + "sourceType": "", + "style": "solid", + "value": "3+x", + "width": 5, + }, + AnnotationType.EVENT: { + "name": "my event", + "annotationType": "EVENT", + "sourceType": "NATIVE", + "color": "#e04355", + "opacity": "", + "style": "solid", + "width": 5, + "showMarkers": False, + "hideLine": False, + "value": 1, + "overrides": {"time_range": None}, + "show": True, + "titleColumn": "", + "descriptionColumns": [], + "timeColumn": "", + "intervalEndColumn": "", + }, + AnnotationType.INTERVAL: { + "name": "my interval", + "annotationType": "INTERVAL", + "sourceType": "NATIVE", + "color": "#e04355", + "opacity": "", + "style": "solid", + "width": 1, + "showMarkers": False, + "hideLine": False, + "value": 1, + "overrides": {"time_range": None}, + "show": True, + "titleColumn": "", + "descriptionColumns": [], + "timeColumn": "", + "intervalEndColumn": "", + }, + AnnotationType.TIME_SERIES: { + "annotationType": "TIME_SERIES", + "color": None, + "descriptionColumns": [], + "hideLine": False, + "intervalEndColumn": "", + "name": "my line", + "opacity": "", + "overrides": {"time_range": None}, + "show": True, + "showMarkers": False, + "sourceType": "line", + "style": "dashed", + "timeColumn": "", + "titleColumn": "", + "value": 837, + "width": 5, + }, +} + +POSTPROCESSING_OPERATIONS = { + "birth_names": [ + { + "operation": "aggregate", + "options": { + "groupby": ["name"], + "aggregates": { + "q1": { + "operator": "percentile", + "column": "sum__num", + # todo: rename "interpolation" to "method" when we updated + # numpy. + # https://numpy.org/doc/stable/reference/generated/numpy.percentile.html + "options": {"q": 25, "interpolation": "lower"}, + }, + "median": { + "operator": "median", + "column": "sum__num", + }, + }, + }, + }, + { + "operation": "sort", + "options": {"by": ["q1", "name"], "ascending": [False, True]}, + }, + ] +} + + +def get_query_object( + query_name: str, + add_postprocessing_operations: bool, + add_time_offsets: bool, +) -> Dict[str, Any]: + if query_name not in QUERY_OBJECTS: + raise Exception(f"QueryObject fixture not defined for datasource: {query_name}") + obj = QUERY_OBJECTS[query_name] + + # apply overrides + if ":" in query_name: + parent_query_name = query_name.split(":")[0] + obj = { + **QUERY_OBJECTS[parent_query_name], + **obj, + } + + query_object = copy.deepcopy(obj) + if add_postprocessing_operations: + query_object["post_processing"] = _get_postprocessing_operation(query_name) + if add_time_offsets: + query_object["time_offsets"] = ["1 year ago"] + + return query_object + + +def _get_postprocessing_operation(query_name: str) -> List[Dict[str, Any]]: + if query_name not in QUERY_OBJECTS: + raise Exception( + f"Post-processing fixture not defined for datasource: {query_name}" + ) + return copy.deepcopy(POSTPROCESSING_OPERATIONS[query_name]) + + +@dataclasses.dataclass +class Table: + id: int + type: str + name: str + + +class QueryContextGenerator: + def generate( + self, + query_name: str, + add_postprocessing_operations: bool = False, + add_time_offsets: bool = False, + table_id=1, + table_type="table", + form_data: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + form_data = form_data or {} + table_name = query_name.split(":")[0] + table = self.get_table(table_name, table_id, table_type) + return { + "datasource": {"id": table.id, "type": table.type}, + "queries": [ + get_query_object( + query_name, + add_postprocessing_operations, + add_time_offsets, + ) + ], + "result_type": ChartDataResultType.FULL, + "form_data": form_data, + } + + def get_table(self, name, id_, type_): + return Table(id_, type_, name) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000000000..9d13e581704e4 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,111 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Callable, TYPE_CHECKING +from unittest.mock import MagicMock, Mock, PropertyMock + +from flask import current_app, Flask +from flask.ctx import AppContext +from pytest import fixture + +from superset.app import create_app +from tests.example_data.data_loading.pandas.pandas_data_loader import PandasDataLoader +from tests.example_data.data_loading.pandas.pands_data_loading_conf import ( + PandasLoaderConfigurations, +) +from tests.example_data.data_loading.pandas.table_df_convertor import ( + TableToDfConvertorImpl, +) +from tests.integration_tests.test_app import app + +SUPPORT_DATETIME_TYPE = "support_datetime_type" + +if TYPE_CHECKING: + from sqlalchemy.engine import Engine + + from superset.connectors.sqla.models import Database + from tests.example_data.data_loading.base_data_loader import DataLoader + from tests.example_data.data_loading.pandas.pandas_data_loader import ( + TableToDfConvertor, + ) + +pytest_plugins = "tests.fixtures" + +PRESTO = "presto" +BACKEND_PROPERTY_VALUE = "sqlite" + + +@fixture(scope="session") +def example_db_provider() -> Callable[[], Database]: + def mock_provider() -> Mock: + mock = MagicMock() + type(mock).backend = PropertyMock(return_value=BACKEND_PROPERTY_VALUE) + return mock + + return mock_provider + + +@fixture(scope="session") +def example_db_engine(example_db_provider: Callable[[], Database]) -> Engine: + with app.app_context(): + with example_db_provider().get_sqla_engine_with_context() as engine: + return engine + + +@fixture(scope="session") +def pandas_loader_configuration( + support_datetime_type, +) -> PandasLoaderConfigurations: + return PandasLoaderConfigurations.make_from_dict( + {SUPPORT_DATETIME_TYPE: support_datetime_type} + ) + + +@fixture(scope="session") +def support_datetime_type(example_db_provider: Callable[[], Database]) -> bool: + return example_db_provider().backend != PRESTO + + +@fixture(scope="session") +def table_to_df_convertor( + pandas_loader_configuration: PandasLoaderConfigurations, +) -> TableToDfConvertor: + return TableToDfConvertorImpl( + not pandas_loader_configuration.support_datetime_type, + pandas_loader_configuration.strftime, + ) + + +@fixture(scope="session") +def data_loader( + example_db_engine: Engine, + pandas_loader_configuration: PandasLoaderConfigurations, + table_to_df_convertor: TableToDfConvertor, +) -> DataLoader: + return PandasDataLoader( + example_db_engine, pandas_loader_configuration, table_to_df_convertor + ) diff --git a/tests/consts/__init__.py b/tests/consts/__init__.py new file mode 100644 index 0000000000000..87f6e4d72b350 --- /dev/null +++ b/tests/consts/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/consts/birth_names.py b/tests/consts/birth_names.py new file mode 100644 index 0000000000000..540ce6f7de852 --- /dev/null +++ b/tests/consts/birth_names.py @@ -0,0 +1,26 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +TABLE_NAME = "birth_names" +NUM_GIRLS = "num_girls" +NUM_BOYS = "num_boys" +STATE = "state" +NUM = "num" +NAME = "name" +GENDER = "gender" +DS = "ds" +GIRL = "girl" +BOY = "boy" diff --git a/tests/consts/us_states.py b/tests/consts/us_states.py new file mode 100644 index 0000000000000..741ef816cac39 --- /dev/null +++ b/tests/consts/us_states.py @@ -0,0 +1,86 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +US_STATES = [ + "AL", + "AK", + "AZ", + "AR", + "CA", + "CO", + "CT", + "DE", + "FL", + "GA", + "HI", + "ID", + "IL", + "IN", + "IA", + "KS", + "KY", + "LA", + "ME", + "MD", + "MA", + "MI", + "MN", + "MS", + "MO", + "MT", + "NE", + "NV", + "NH", + "NJ", + "NM", + "NY", + "NC", + "ND", + "OH", + "OK", + "OR", + "PA", + "RI", + "SC", + "SD", + "TN", + "TX", + "UT", + "VT", + "VA", + "WA", + "WV", + "WI", + "WY", + "other", +] diff --git a/tests/example_data/__init__.py b/tests/example_data/__init__.py new file mode 100644 index 0000000000000..87f6e4d72b350 --- /dev/null +++ b/tests/example_data/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/example_data/data_generator/__init__.py b/tests/example_data/data_generator/__init__.py new file mode 100644 index 0000000000000..dc74f5a8fa252 --- /dev/null +++ b/tests/example_data/data_generator/__init__.py @@ -0,0 +1,25 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/example_data/data_generator/base_generator.py b/tests/example_data/data_generator/base_generator.py new file mode 100644 index 0000000000000..023b929091439 --- /dev/null +++ b/tests/example_data/data_generator/base_generator.py @@ -0,0 +1,24 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from abc import ABC, abstractmethod +from typing import Any, Dict, Iterable + + +class ExampleDataGenerator(ABC): + @abstractmethod + def generate(self) -> Iterable[Dict[Any, Any]]: + ... diff --git a/tests/example_data/data_generator/birth_names/__init__.py b/tests/example_data/data_generator/birth_names/__init__.py new file mode 100644 index 0000000000000..87f6e4d72b350 --- /dev/null +++ b/tests/example_data/data_generator/birth_names/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/example_data/data_generator/birth_names/birth_names_generator.py b/tests/example_data/data_generator/birth_names/birth_names_generator.py new file mode 100644 index 0000000000000..2b68abbd4f12b --- /dev/null +++ b/tests/example_data/data_generator/birth_names/birth_names_generator.py @@ -0,0 +1,81 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime +from random import choice, randint +from typing import Any, Dict, Iterable, TYPE_CHECKING + +from tests.consts.birth_names import ( + BOY, + DS, + GENDER, + GIRL, + NAME, + NUM, + NUM_BOYS, + NUM_GIRLS, + STATE, +) +from tests.consts.us_states import US_STATES +from tests.example_data.data_generator.base_generator import ExampleDataGenerator + +if TYPE_CHECKING: + from tests.example_data.data_generator.string_generator import StringGenerator + + +class BirthNamesGenerator(ExampleDataGenerator): + _names_generator: StringGenerator + _start_year: int + _until_not_include_year: int + _rows_per_year: int + + def __init__( + self, + names_generator: StringGenerator, + start_year: int, + years_amount: int, + rows_per_year: int, + ) -> None: + assert start_year > -1 + assert years_amount > 0 + self._names_generator = names_generator + self._start_year = start_year + self._until_not_include_year = start_year + years_amount + self._rows_per_year = rows_per_year + + def generate(self) -> Iterable[Dict[Any, Any]]: + for year in range(self._start_year, self._until_not_include_year): + ds = self._make_year(year) + for _ in range(self._rows_per_year): + yield self.generate_row(ds) + + def _make_year(self, year: int): + return datetime(year, 1, 1, 0, 0, 0) + + def generate_row(self, dt: datetime) -> Dict[Any, Any]: + gender = choice([BOY, GIRL]) + num = randint(1, 100000) + return { + DS: dt, + GENDER: gender, + NAME: self._names_generator.generate(), + NUM: num, + STATE: choice(US_STATES), + NUM_BOYS: num if gender == BOY else 0, + NUM_GIRLS: num if gender == GIRL else 0, + } diff --git a/tests/example_data/data_generator/birth_names/birth_names_generator_factory.py b/tests/example_data/data_generator/birth_names/birth_names_generator_factory.py new file mode 100644 index 0000000000000..0eba3f1c322aa --- /dev/null +++ b/tests/example_data/data_generator/birth_names/birth_names_generator_factory.py @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from abc import ABC, abstractmethod + +from tests.example_data.data_generator.birth_names.birth_names_generator import ( + BirthNamesGenerator, +) +from tests.example_data.data_generator.string_generator_factory import ( + StringGeneratorFactory, +) + + +class BirthNamesGeneratorFactory(ABC): + __factory: BirthNamesGeneratorFactory + + @abstractmethod + def _make(self) -> BirthNamesGenerator: + ... + + @classmethod + def make(cls) -> BirthNamesGenerator: + return cls._get_instance()._make() + + @classmethod + def set_instance(cls, factory: BirthNamesGeneratorFactory) -> None: + cls.__factory = factory + + @classmethod + def _get_instance(cls) -> BirthNamesGeneratorFactory: + if not hasattr(cls, "_BirthNamesGeneratorFactory__factory"): + cls.__factory = BirthNamesGeneratorFactoryImpl() + return cls.__factory + + +MIN_NAME_LEN = 3 +MAX_NAME_SIZE = 10 +START_YEAR = 1960 +YEARS_AMOUNT = 60 +ROW_PER_YEAR = 20 + + +class BirthNamesGeneratorFactoryImpl(BirthNamesGeneratorFactory): + def _make(self) -> BirthNamesGenerator: + string_generator = StringGeneratorFactory.make_lowercase_based( + MIN_NAME_LEN, MAX_NAME_SIZE + ) + return BirthNamesGenerator( + string_generator, START_YEAR, YEARS_AMOUNT, ROW_PER_YEAR + ) diff --git a/tests/example_data/data_generator/consts.py b/tests/example_data/data_generator/consts.py new file mode 100644 index 0000000000000..2432d60b6de49 --- /dev/null +++ b/tests/example_data/data_generator/consts.py @@ -0,0 +1,69 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +US_STATES = [ + "AL", + "AK", + "AZ", + "AR", + "CA", + "CO", + "CT", + "DE", + "FL", + "GA", + "HI", + "ID", + "IL", + "IN", + "IA", + "KS", + "KY", + "LA", + "ME", + "MD", + "MA", + "MI", + "MN", + "MS", + "MO", + "MT", + "NE", + "NV", + "NH", + "NJ", + "NM", + "NY", + "NC", + "ND", + "OH", + "OK", + "OR", + "PA", + "RI", + "SC", + "SD", + "TN", + "TX", + "UT", + "VT", + "VA", + "WA", + "WV", + "WI", + "WY", + "other", +] diff --git a/tests/example_data/data_generator/string_generator.py b/tests/example_data/data_generator/string_generator.py new file mode 100644 index 0000000000000..103ceefcb52e1 --- /dev/null +++ b/tests/example_data/data_generator/string_generator.py @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from random import choices, randint + + +class StringGenerator: + _seed_letters: str + _min_length: int + _max_length: int + + def __init__(self, seed_letters: str, min_length: int, max_length: int): + self._seed_letters = seed_letters + self._min_length = min_length + self._max_length = max_length + + def generate(self) -> str: + rv_string_length = randint(self._min_length, self._max_length) + randomized_letters = choices(self._seed_letters, k=rv_string_length) + return "".join(randomized_letters) diff --git a/tests/example_data/data_generator/string_generator_factory.py b/tests/example_data/data_generator/string_generator_factory.py new file mode 100644 index 0000000000000..2cad2f785a226 --- /dev/null +++ b/tests/example_data/data_generator/string_generator_factory.py @@ -0,0 +1,46 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import string + +from tests.example_data.data_generator.string_generator import StringGenerator + + +class StringGeneratorFactory: + @classmethod + def make( + cls, seed_letters: str, min_length: int, max_length: int + ) -> StringGenerator: + cls.__validate_arguments(seed_letters, min_length, max_length) + return StringGenerator(seed_letters, min_length, max_length) + + @classmethod + def make_lowercase_based(cls, min_length: int, max_length: int) -> StringGenerator: + return cls.make(string.ascii_lowercase, min_length, max_length) + + @classmethod + def make_ascii_letters_based( + cls, min_length: int, max_length: int + ) -> StringGenerator: + return cls.make(string.ascii_letters, min_length, max_length) + + @staticmethod + def __validate_arguments( + seed_letters: str, min_length: int, max_length: int + ) -> None: + assert seed_letters, "seed_letters is empty" + assert min_length > -1, "min_length is negative" + assert max_length > min_length, "max_length is not bigger then min_length" diff --git a/tests/example_data/data_generator/tests/__init__.py b/tests/example_data/data_generator/tests/__init__.py new file mode 100644 index 0000000000000..dc74f5a8fa252 --- /dev/null +++ b/tests/example_data/data_generator/tests/__init__.py @@ -0,0 +1,25 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/example_data/data_generator/tests/test_string_generator.py b/tests/example_data/data_generator/tests/test_string_generator.py new file mode 100644 index 0000000000000..65fdd18ce6b3f --- /dev/null +++ b/tests/example_data/data_generator/tests/test_string_generator.py @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from unittest.mock import Mock, patch + +from tests.example_data.data_generator.string_generator import StringGenerator + + +@patch("tests.example_data.data_generator.string_generator.choices") +@patch("tests.example_data.data_generator.string_generator.randint") +def test_string_generator(randint_mock: Mock, choices_mock: Mock): + letters = "abcdets" + min_len = 3 + max_len = 5 + randomized_string_len = 4 + string_generator = StringGenerator(letters, min_len, max_len) + randint_mock.return_value = randomized_string_len + choices_mock.return_value = ["t", "e", "s", "t"] + + assert string_generator.generate() == "test" + randint_mock.assert_called_once_with(min_len, max_len) + choices_mock.assert_called_with(letters, k=randomized_string_len) diff --git a/tests/example_data/data_loading/__init__.py b/tests/example_data/data_loading/__init__.py new file mode 100644 index 0000000000000..87f6e4d72b350 --- /dev/null +++ b/tests/example_data/data_loading/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/example_data/data_loading/base_data_loader.py b/tests/example_data/data_loading/base_data_loader.py new file mode 100644 index 0000000000000..770150c14fabd --- /dev/null +++ b/tests/example_data/data_loading/base_data_loader.py @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from tests.common.example_data.data_loading.data_definitions.types import Table + + +class DataLoader(ABC): + @abstractmethod + def load_table(self, table: Table) -> None: + ... + + @abstractmethod + def remove_table(self, table_name: str) -> None: + ... diff --git a/tests/example_data/data_loading/data_definitions/__init__.py b/tests/example_data/data_loading/data_definitions/__init__.py new file mode 100644 index 0000000000000..87f6e4d72b350 --- /dev/null +++ b/tests/example_data/data_loading/data_definitions/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/example_data/data_loading/data_definitions/birth_names.py b/tests/example_data/data_loading/data_definitions/birth_names.py new file mode 100644 index 0000000000000..64c1a50d49bd4 --- /dev/null +++ b/tests/example_data/data_loading/data_definitions/birth_names.py @@ -0,0 +1,64 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from sqlalchemy import DateTime, Integer, String + +from tests.consts.birth_names import ( + DS, + GENDER, + NAME, + NUM, + NUM_BOYS, + NUM_GIRLS, + STATE, + TABLE_NAME, +) +from tests.example_data.data_loading.data_definitions.types import ( + TableMetaData, + TableMetaDataFactory, +) + +BIRTH_NAMES_COLUMNS = { + DS: DateTime, + GENDER: String(16), + NAME: String(255), + NUM: Integer, + STATE: String(10), + NUM_BOYS: Integer, + NUM_GIRLS: Integer, +} + +BIRTH_NAMES_COLUMNS_WITHOUT_DATETIME = { + DS: String(255), + GENDER: String(16), + NAME: String(255), + NUM: Integer, + STATE: String(10), + NUM_BOYS: Integer, + NUM_GIRLS: Integer, +} + + +class BirthNamesMetaDataFactory(TableMetaDataFactory): + _datetime_type_support: bool + + def __init__(self, datetime_type_support: bool = True): + self._datetime_type_support = datetime_type_support + + def make(self) -> TableMetaData: + if self._datetime_type_support: + return TableMetaData(TABLE_NAME, BIRTH_NAMES_COLUMNS.copy()) + return TableMetaData(TABLE_NAME, BIRTH_NAMES_COLUMNS_WITHOUT_DATETIME.copy()) diff --git a/tests/example_data/data_loading/data_definitions/types.py b/tests/example_data/data_loading/data_definitions/types.py new file mode 100644 index 0000000000000..e393019e0192f --- /dev/null +++ b/tests/example_data/data_loading/data_definitions/types.py @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import Any, Dict, Iterable, Optional + +from sqlalchemy.types import TypeEngine + + +@dataclass +class TableMetaData: + table_name: str + types: Optional[Dict[str, TypeEngine]] + + +@dataclass +class Table: + table_name: str + table_metadata: TableMetaData + data: Iterable[Dict[Any, Any]] + + +class TableMetaDataFactory(ABC): + @abstractmethod + def make(self) -> TableMetaData: + ... + + def make_table(self, data: Iterable[Dict[Any, Any]]) -> Table: + metadata = self.make() + return Table(metadata.table_name, metadata, data) diff --git a/tests/example_data/data_loading/pandas/__init__.py b/tests/example_data/data_loading/pandas/__init__.py new file mode 100644 index 0000000000000..87f6e4d72b350 --- /dev/null +++ b/tests/example_data/data_loading/pandas/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/example_data/data_loading/pandas/pandas_data_loader.py b/tests/example_data/data_loading/pandas/pandas_data_loader.py new file mode 100644 index 0000000000000..00f3f775cafb3 --- /dev/null +++ b/tests/example_data/data_loading/pandas/pandas_data_loader.py @@ -0,0 +1,84 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Dict, Optional, TYPE_CHECKING + +from pandas import DataFrame +from sqlalchemy.inspection import inspect + +from tests.common.logger_utils import log +from tests.example_data.data_loading.base_data_loader import DataLoader + +if TYPE_CHECKING: + from sqlalchemy.engine import Engine + + from tests.example_data.data_loading.data_definitions.types import Table + from tests.example_data.data_loading.pandas.pands_data_loading_conf import ( + PandasLoaderConfigurations, + ) + + +@log +class PandasDataLoader(DataLoader): + _db_engine: Engine + _configurations: PandasLoaderConfigurations + _table_to_df_convertor: TableToDfConvertor + + def __init__( + self, + db_engine: Engine, + config: PandasLoaderConfigurations, + table_to_df_convertor: TableToDfConvertor, + ) -> None: + self._db_engine = db_engine + self._configurations = config + self._table_to_df_convertor = table_to_df_convertor + + def load_table(self, table: Table) -> None: + df = self._table_to_df_convertor.convert(table) + df.to_sql( + table.table_name, + self._db_engine, + if_exists=self._configurations.if_exists, + chunksize=self._configurations.chunksize, + index=self._configurations.index, + dtype=self._take_data_types(table), + method=self._configurations.method, + schema=self._detect_schema_name(), + ) + + def _detect_schema_name(self) -> Optional[str]: + return inspect(self._db_engine).default_schema_name + + def _take_data_types(self, table: Table) -> Optional[Dict[str, str]]: + metadata_table = table.table_metadata + if metadata_table: + types = metadata_table.types + if types: + return types + return None + + def remove_table(self, table_name: str) -> None: + self._db_engine.execute(f"DROP TABLE IF EXISTS {table_name}") + + +class TableToDfConvertor(ABC): + @abstractmethod + def convert(self, table: Table) -> DataFrame: + ... diff --git a/tests/example_data/data_loading/pandas/pands_data_loading_conf.py b/tests/example_data/data_loading/pandas/pands_data_loading_conf.py new file mode 100644 index 0000000000000..1c43adc9316e9 --- /dev/null +++ b/tests/example_data/data_loading/pandas/pands_data_loading_conf.py @@ -0,0 +1,64 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Any, Dict + +default_pandas_data_loader_config = { + "if_exists": "replace", + "chunksize": 500, + "index": False, + "method": "multi", + "strftime": "%Y-%m-%d %H:%M:%S", + "support_datetime_type": False, +} + + +class PandasLoaderConfigurations: + if_exists: str + chunksize: int + index: bool + method: str + strftime: str + support_datetime_type: bool + + def __init__( + self, + *, + if_exists: str, + chunksize: int, + index: bool, + method: str, + strftime: str, + support_datetime_type: bool, + ): + self.if_exists = if_exists + self.chunksize = chunksize + self.index = index + self.method = method + self.strftime = strftime + self.support_datetime_type = support_datetime_type + + @classmethod + def make_from_dict(cls, _dict: Dict[str, Any]) -> PandasLoaderConfigurations: + copy_dict = default_pandas_data_loader_config.copy() + copy_dict.update(_dict) + return PandasLoaderConfigurations(**copy_dict) # type: ignore + + @classmethod + def make_default(cls) -> PandasLoaderConfigurations: + return cls.make_from_dict({}) diff --git a/tests/example_data/data_loading/pandas/table_df_convertor.py b/tests/example_data/data_loading/pandas/table_df_convertor.py new file mode 100644 index 0000000000000..e801c8464e9e8 --- /dev/null +++ b/tests/example_data/data_loading/pandas/table_df_convertor.py @@ -0,0 +1,48 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Optional, TYPE_CHECKING + +from pandas import DataFrame + +from tests.common.logger_utils import log +from tests.example_data.data_loading.pandas.pandas_data_loader import TableToDfConvertor + +if TYPE_CHECKING: + from tests.example_data.data_loading.data_definitions.types import Table + + +@log +class TableToDfConvertorImpl(TableToDfConvertor): + convert_datetime_to_str: bool + _time_format: Optional[str] + + def __init__( + self, convert_ds_to_datetime: bool, time_format: Optional[str] = None + ) -> None: + self.convert_datetime_to_str = convert_ds_to_datetime + self._time_format = time_format + + def convert(self, table: Table) -> DataFrame: + df_rv = DataFrame(table.data) + if self._should_convert_datetime_to_str(): + df_rv.ds = df_rv.ds.dt.strftime(self._time_format) + return df_rv + + def _should_convert_datetime_to_str(self) -> bool: + return self.convert_datetime_to_str and self._time_format is not None diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py new file mode 100644 index 0000000000000..18d5e2f245a23 --- /dev/null +++ b/tests/fixtures/__init__.py @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +pytest_plugins = "tests.fixtures.birth_names" diff --git a/tests/fixtures/birth_names.py b/tests/fixtures/birth_names.py new file mode 100644 index 0000000000000..5a0135b456f25 --- /dev/null +++ b/tests/fixtures/birth_names.py @@ -0,0 +1,52 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Callable, TYPE_CHECKING + +from pytest import fixture + +from tests.example_data.data_generator.birth_names.birth_names_generator_factory import ( + BirthNamesGeneratorFactory, +) +from tests.example_data.data_loading.data_definitions.birth_names import ( + BirthNamesMetaDataFactory, +) + +if TYPE_CHECKING: + from tests.example_data.data_generator.birth_names.birth_names_generator import ( + BirthNamesGenerator, + ) + from tests.example_data.data_loading.data_definitions.types import Table + + +@fixture(scope="session") +def birth_names_data_generator() -> BirthNamesGenerator: + return BirthNamesGeneratorFactory.make() + + +@fixture(scope="session") +def birth_names_table_factory( + birth_names_data_generator: BirthNamesGenerator, + support_datetime_type: bool, +) -> Callable[[], Table]: + def _birth_names_table_factory() -> Table: + return BirthNamesMetaDataFactory(support_datetime_type).make_table( + data=birth_names_data_generator.generate() + ) + + return _birth_names_table_factory diff --git a/tests/integration_tests/__init__.py b/tests/integration_tests/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/access_tests.py b/tests/integration_tests/access_tests.py new file mode 100644 index 0000000000000..38fd10524019f --- /dev/null +++ b/tests/integration_tests/access_tests.py @@ -0,0 +1,598 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import json +import unittest +from typing import Optional +from unittest import mock + +import pytest +from flask.ctx import AppContext +from pytest_mock import MockFixture +from sqlalchemy import inspect + +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_with_slice, + load_energy_table_data, +) +from tests.integration_tests.test_app import app # isort:skip +from superset import db, security_manager +from superset.connectors.sqla.models import SqlaTable +from superset.models import core as models +from superset.models.datasource_access_request import DatasourceAccessRequest +from superset.utils.core import get_user_id, get_username, override_user +from superset.utils.database import get_example_database + +from .base_tests import SupersetTestCase + +ROLE_TABLES_PERM_DATA = { + "role_name": "override_me", + "database": [ + { + "datasource_type": "table", + "name": "examples", + "schema": [{"name": "", "datasources": ["birth_names"]}], + } + ], +} + +ROLE_ALL_PERM_DATA = { + "role_name": "override_me", + "database": [ + { + "datasource_type": "table", + "name": "examples", + "schema": [{"name": "", "datasources": ["birth_names"]}], + }, + { + "datasource_type": "druid", + "name": "druid_test", + "schema": [{"name": "", "datasources": ["druid_ds_1", "druid_ds_2"]}], + }, + ], +} + +EXTEND_ROLE_REQUEST = ( + "/superset/approve?datasource_type={}&datasource_id={}&" + "created_by={}&role_to_extend={}" +) +GRANT_ROLE_REQUEST = ( + "/superset/approve?datasource_type={}&datasource_id={}&" + "created_by={}&role_to_grant={}" +) +TEST_ROLE_1 = "test_role1" +TEST_ROLE_2 = "test_role2" +DB_ACCESS_ROLE = "db_access_role" +SCHEMA_ACCESS_ROLE = "schema_access_role" + + +def create_access_request(session, ds_type, ds_name, role_name, username): + # TODO: generalize datasource names + if ds_type == "table": + ds = session.query(SqlaTable).filter(SqlaTable.table_name == ds_name).first() + else: + # This function will only work for ds_type == "table" + raise NotImplementedError() + ds_perm_view = security_manager.find_permission_view_menu( + "datasource_access", ds.perm + ) + security_manager.add_permission_role( + security_manager.find_role(role_name), ds_perm_view + ) + access_request = DatasourceAccessRequest( + datasource_id=ds.id, + datasource_type=ds_type, + created_by_fk=security_manager.find_user(username=username).id, + ) + session.add(access_request) + session.commit() + return access_request + + +class TestRequestAccess(SupersetTestCase): + @classmethod + def setUpClass(cls): + with app.app_context(): + security_manager.add_role("override_me") + security_manager.add_role(TEST_ROLE_1) + security_manager.add_role(TEST_ROLE_2) + security_manager.add_role(DB_ACCESS_ROLE) + security_manager.add_role(SCHEMA_ACCESS_ROLE) + db.session.commit() + + @classmethod + def tearDownClass(cls): + with app.app_context(): + override_me = security_manager.find_role("override_me") + db.session.delete(override_me) + db.session.delete(security_manager.find_role(TEST_ROLE_1)) + db.session.delete(security_manager.find_role(TEST_ROLE_2)) + db.session.delete(security_manager.find_role(DB_ACCESS_ROLE)) + db.session.delete(security_manager.find_role(SCHEMA_ACCESS_ROLE)) + db.session.commit() + + def setUp(self): + self.login("admin") + + def tearDown(self): + self.logout() + override_me = security_manager.find_role("override_me") + override_me.permissions = [] + db.session.commit() + db.session.close() + + def test_override_role_permissions_is_admin_only(self): + self.logout() + self.login("alpha") + response = self.client.post( + "/superset/override_role_permissions/", + data=json.dumps(ROLE_TABLES_PERM_DATA), + content_type="application/json", + follow_redirects=True, + ) + self.assertNotEqual(405, response.status_code) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_override_role_permissions_1_table(self): + database = get_example_database() + with database.get_sqla_engine_with_context() as engine: + schema = inspect(engine).default_schema_name + + perm_data = ROLE_TABLES_PERM_DATA.copy() + perm_data["database"][0]["schema"][0]["name"] = schema + + response = self.client.post( + "/superset/override_role_permissions/", + data=json.dumps(perm_data), + content_type="application/json", + ) + self.assertEqual(201, response.status_code) + + updated_override_me = security_manager.find_role("override_me") + self.assertEqual(1, len(updated_override_me.permissions)) + birth_names = self.get_table(name="birth_names") + self.assertEqual( + birth_names.perm, updated_override_me.permissions[0].view_menu.name + ) + self.assertEqual( + "datasource_access", updated_override_me.permissions[0].permission.name + ) + + @pytest.mark.usefixtures( + "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" + ) + def test_override_role_permissions_drops_absent_perms(self): + database = get_example_database() + with database.get_sqla_engine_with_context() as engine: + schema = inspect(engine).default_schema_name + + override_me = security_manager.find_role("override_me") + override_me.permissions.append( + security_manager.find_permission_view_menu( + view_menu_name=self.get_table(name="energy_usage").perm, + permission_name="datasource_access", + ) + ) + db.session.flush() + + perm_data = ROLE_TABLES_PERM_DATA.copy() + perm_data["database"][0]["schema"][0]["name"] = schema + + response = self.client.post( + "/superset/override_role_permissions/", + data=json.dumps(perm_data), + content_type="application/json", + ) + self.assertEqual(201, response.status_code) + updated_override_me = security_manager.find_role("override_me") + self.assertEqual(1, len(updated_override_me.permissions)) + birth_names = self.get_table(name="birth_names") + self.assertEqual( + birth_names.perm, updated_override_me.permissions[0].view_menu.name + ) + self.assertEqual( + "datasource_access", updated_override_me.permissions[0].permission.name + ) + + def test_clean_requests_after_role_extend(self): + session = db.session + + # Case 1. Gamma and gamma2 requested test_role1 on energy_usage access + # Gamma already has role test_role1 + # Extend test_role1 with energy_usage access for gamma2 + # Check if access request for gamma at energy_usage was deleted + + # gamma2 and gamma request table_role on energy usage + if app.config["ENABLE_ACCESS_REQUEST"]: + access_request1 = create_access_request( + session, "table", "random_time_series", TEST_ROLE_1, "gamma2" + ) + ds_1_id = access_request1.datasource_id + create_access_request( + session, "table", "random_time_series", TEST_ROLE_1, "gamma" + ) + access_requests = self.get_access_requests("gamma", "table", ds_1_id) + self.assertTrue(access_requests) + # gamma gets test_role1 + self.get_resp( + GRANT_ROLE_REQUEST.format("table", ds_1_id, "gamma", TEST_ROLE_1) + ) + # extend test_role1 with access on energy usage + self.client.get( + EXTEND_ROLE_REQUEST.format("table", ds_1_id, "gamma2", TEST_ROLE_1) + ) + access_requests = self.get_access_requests("gamma", "table", ds_1_id) + self.assertFalse(access_requests) + + gamma_user = security_manager.find_user(username="gamma") + gamma_user.roles.remove(security_manager.find_role("test_role1")) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_clean_requests_after_alpha_grant(self): + session = db.session + + # Case 2. Two access requests from gamma and gamma2 + # Gamma becomes alpha, gamma2 gets granted + # Check if request by gamma has been deleted + + access_request1 = create_access_request( + session, "table", "birth_names", TEST_ROLE_1, "gamma" + ) + create_access_request(session, "table", "birth_names", TEST_ROLE_2, "gamma2") + ds_1_id = access_request1.datasource_id + # gamma becomes alpha + alpha_role = security_manager.find_role("Alpha") + gamma_user = security_manager.find_user(username="gamma") + gamma_user.roles.append(alpha_role) + session.commit() + access_requests = self.get_access_requests("gamma", "table", ds_1_id) + self.assertTrue(access_requests) + self.client.post( + EXTEND_ROLE_REQUEST.format("table", ds_1_id, "gamma2", TEST_ROLE_2) + ) + access_requests = self.get_access_requests("gamma", "table", ds_1_id) + self.assertFalse(access_requests) + + gamma_user = security_manager.find_user(username="gamma") + gamma_user.roles.remove(security_manager.find_role("Alpha")) + session.commit() + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_clean_requests_after_db_grant(self): + session = db.session + + # Case 3. Two access requests from gamma and gamma2 + # Gamma gets database access, gamma2 access request granted + # Check if request by gamma has been deleted + + gamma_user = security_manager.find_user(username="gamma") + access_request1 = create_access_request( + session, "table", "energy_usage", TEST_ROLE_1, "gamma" + ) + create_access_request(session, "table", "energy_usage", TEST_ROLE_2, "gamma2") + ds_1_id = access_request1.datasource_id + # gamma gets granted database access + database = session.query(models.Database).first() + + security_manager.add_permission_view_menu("database_access", database.perm) + ds_perm_view = security_manager.find_permission_view_menu( + "database_access", database.perm + ) + security_manager.add_permission_role( + security_manager.find_role(DB_ACCESS_ROLE), ds_perm_view + ) + gamma_user.roles.append(security_manager.find_role(DB_ACCESS_ROLE)) + session.commit() + access_requests = self.get_access_requests("gamma", "table", ds_1_id) + self.assertTrue(access_requests) + # gamma2 request gets fulfilled + self.client.post( + EXTEND_ROLE_REQUEST.format("table", ds_1_id, "gamma2", TEST_ROLE_2) + ) + access_requests = self.get_access_requests("gamma", "table", ds_1_id) + + self.assertFalse(access_requests) + gamma_user = security_manager.find_user(username="gamma") + gamma_user.roles.remove(security_manager.find_role(DB_ACCESS_ROLE)) + session.commit() + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_clean_requests_after_schema_grant(self): + session = db.session + + # Case 4. Two access requests from gamma and gamma2 + # Gamma gets schema access, gamma2 access request granted + # Check if request by gamma has been deleted + + gamma_user = security_manager.find_user(username="gamma") + access_request1 = create_access_request( + session, "table", "wb_health_population", TEST_ROLE_1, "gamma" + ) + create_access_request( + session, "table", "wb_health_population", TEST_ROLE_2, "gamma2" + ) + ds_1_id = access_request1.datasource_id + ds = ( + session.query(SqlaTable) + .filter_by(table_name="wb_health_population") + .first() + ) + original_schema = ds.schema + + ds.schema = "temp_schema" + security_manager.add_permission_view_menu("schema_access", ds.schema_perm) + schema_perm_view = security_manager.find_permission_view_menu( + "schema_access", ds.schema_perm + ) + security_manager.add_permission_role( + security_manager.find_role(SCHEMA_ACCESS_ROLE), schema_perm_view + ) + gamma_user.roles.append(security_manager.find_role(SCHEMA_ACCESS_ROLE)) + session.commit() + # gamma2 request gets fulfilled + self.client.post( + EXTEND_ROLE_REQUEST.format("table", ds_1_id, "gamma2", TEST_ROLE_2) + ) + access_requests = self.get_access_requests("gamma", "table", ds_1_id) + self.assertFalse(access_requests) + gamma_user = security_manager.find_user(username="gamma") + gamma_user.roles.remove(security_manager.find_role(SCHEMA_ACCESS_ROLE)) + + ds.schema = original_schema + session.commit() + + @mock.patch("superset.utils.core.send_mime_email") + def test_approve(self, mock_send_mime): + if app.config["ENABLE_ACCESS_REQUEST"]: + session = db.session + TEST_ROLE_NAME = "table_role" + security_manager.add_role(TEST_ROLE_NAME) + + # Case 1. Grant new role to the user. + + access_request1 = create_access_request( + session, "table", "unicode_test", TEST_ROLE_NAME, "gamma" + ) + ds_1_id = access_request1.datasource_id + self.get_resp( + GRANT_ROLE_REQUEST.format("table", ds_1_id, "gamma", TEST_ROLE_NAME) + ) + # Test email content. + self.assertTrue(mock_send_mime.called) + call_args = mock_send_mime.call_args[0] + self.assertEqual( + [ + security_manager.find_user(username="gamma").email, + security_manager.find_user(username="admin").email, + ], + call_args[1], + ) + self.assertEqual( + "[Superset] Access to the datasource {} was granted".format( + self.get_table_by_id(ds_1_id).full_name + ), + call_args[2]["Subject"], + ) + self.assertIn(TEST_ROLE_NAME, call_args[2].as_string()) + self.assertIn("unicode_test", call_args[2].as_string()) + + access_requests = self.get_access_requests("gamma", "table", ds_1_id) + # request was removed + self.assertFalse(access_requests) + # user was granted table_role + user_roles = [r.name for r in security_manager.find_user("gamma").roles] + self.assertIn(TEST_ROLE_NAME, user_roles) + + # Case 2. Extend the role to have access to the table + + access_request2 = create_access_request( + session, "table", "energy_usage", TEST_ROLE_NAME, "gamma" + ) + ds_2_id = access_request2.datasource_id + energy_usage_perm = access_request2.datasource.perm + + self.client.get( + EXTEND_ROLE_REQUEST.format( + "table", access_request2.datasource_id, "gamma", TEST_ROLE_NAME + ) + ) + access_requests = self.get_access_requests("gamma", "table", ds_2_id) + + # Test email content. + self.assertTrue(mock_send_mime.called) + call_args = mock_send_mime.call_args[0] + self.assertEqual( + [ + security_manager.find_user(username="gamma").email, + security_manager.find_user(username="admin").email, + ], + call_args[1], + ) + self.assertEqual( + "[Superset] Access to the datasource {} was granted".format( + self.get_table_by_id(ds_2_id).full_name + ), + call_args[2]["Subject"], + ) + self.assertIn(TEST_ROLE_NAME, call_args[2].as_string()) + self.assertIn("energy_usage", call_args[2].as_string()) + + # request was removed + self.assertFalse(access_requests) + # table_role was extended to grant access to the energy_usage table/ + perm_view = security_manager.find_permission_view_menu( + "datasource_access", energy_usage_perm + ) + TEST_ROLE = security_manager.find_role(TEST_ROLE_NAME) + self.assertIn(perm_view, TEST_ROLE.permissions) + + def test_request_access(self): + if app.config["ENABLE_ACCESS_REQUEST"]: + session = db.session + self.logout() + self.login(username="gamma") + gamma_user = security_manager.find_user(username="gamma") + security_manager.add_role("dummy_role") + gamma_user.roles.append(security_manager.find_role("dummy_role")) + session.commit() + + ACCESS_REQUEST = ( + "/superset/request_access?" + "datasource_type={}&" + "datasource_id={}&" + "action={}&" + ) + ROLE_GRANT_LINK = ( + 'Grant {} Role' + ) + + # Request table access, there are no roles have this table. + + table1 = ( + session.query(SqlaTable) + .filter_by(table_name="random_time_series") + .first() + ) + table_1_id = table1.id + + # request access to the table + resp = self.get_resp(ACCESS_REQUEST.format("table", table_1_id, "go")) + assert "Access was requested" in resp + access_request1 = self.get_access_requests("gamma", "table", table_1_id) + assert access_request1 is not None + + # Request access, roles exist that contains the table. + # add table to the existing roles + table3 = ( + session.query(SqlaTable).filter_by(table_name="energy_usage").first() + ) + table_3_id = table3.id + table3_perm = table3.perm + + security_manager.add_role("energy_usage_role") + alpha_role = security_manager.find_role("Alpha") + security_manager.add_permission_role( + alpha_role, + security_manager.find_permission_view_menu( + "datasource_access", table3_perm + ), + ) + security_manager.add_permission_role( + security_manager.find_role("energy_usage_role"), + security_manager.find_permission_view_menu( + "datasource_access", table3_perm + ), + ) + session.commit() + + self.get_resp(ACCESS_REQUEST.format("table", table_3_id, "go")) + access_request3 = self.get_access_requests("gamma", "table", table_3_id) + approve_link_3 = ROLE_GRANT_LINK.format( + "table", table_3_id, "gamma", "energy_usage_role", "energy_usage_role" + ) + self.assertEqual( + access_request3.roles_with_datasource, + "
  • {}
".format(approve_link_3), + ) + + # cleanup + gamma_user = security_manager.find_user(username="gamma") + gamma_user.roles.remove(security_manager.find_role("dummy_role")) + session.commit() + + +@pytest.mark.parametrize( + "username,user_id", + [ + (None, None), + ("alpha", 5), + ("gamma", 2), + ], +) +def test_get_user_id( + app_context: AppContext, + mocker: MockFixture, + username: Optional[str], + user_id: Optional[int], +) -> None: + mock_g = mocker.patch("superset.utils.core.g", spec={}) + mock_g.user = security_manager.find_user(username) + assert get_user_id() == user_id + + +@pytest.mark.parametrize( + "username", + [ + None, + "alpha", + "gamma", + ], +) +def test_get_username( + app_context: AppContext, + mocker: MockFixture, + username: Optional[str], +) -> None: + mock_g = mocker.patch("superset.utils.core.g", spec={}) + mock_g.user = security_manager.find_user(username) + assert get_username() == username + + +@pytest.mark.parametrize("username", [None, "alpha", "gamma"]) +@pytest.mark.parametrize("force", [False, True]) +def test_override_user( + app_context: AppContext, + mocker: MockFixture, + username: str, + force: bool, +) -> None: + mock_g = mocker.patch("superset.utils.core.g", spec={}) + admin = security_manager.find_user(username="admin") + user = security_manager.find_user(username) + + with override_user(user, force): + assert mock_g.user == user + + assert not hasattr(mock_g, "user") + + mock_g.user = None + + with override_user(user, force): + assert mock_g.user == user + + assert mock_g.user is None + + mock_g.user = admin + + with override_user(user, force): + assert mock_g.user == user if force else admin + + assert mock_g.user == admin + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/integration_tests/advanced_data_type/__init__.py b/tests/integration_tests/advanced_data_type/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/advanced_data_type/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/advanced_data_type/api_tests.py b/tests/integration_tests/advanced_data_type/api_tests.py new file mode 100644 index 0000000000000..5bfe308e1683b --- /dev/null +++ b/tests/integration_tests/advanced_data_type/api_tests.py @@ -0,0 +1,135 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import json +import prison + +from superset.utils.core import get_example_default_schema + +from tests.integration_tests.utils.get_dashboards import get_dashboards_ids +from unittest import mock +from sqlalchemy import Column +from typing import Any, List +from superset.advanced_data_type.types import ( + AdvancedDataType, + AdvancedDataTypeRequest, + AdvancedDataTypeResponse, +) +from superset.utils.core import FilterOperator, FilterStringOperators + + +target_resp: AdvancedDataTypeResponse = { + "values": [], + "error_message": "", + "display_value": "", + "valid_filter_operators": [ + FilterStringOperators.EQUALS, + FilterStringOperators.GREATER_THAN_OR_EQUAL, + FilterStringOperators.GREATER_THAN, + FilterStringOperators.IN, + FilterStringOperators.LESS_THAN, + FilterStringOperators.LESS_THAN_OR_EQUAL, + ], +} + + +def translation_func(req: AdvancedDataTypeRequest) -> AdvancedDataTypeResponse: + return target_resp + + +def translate_filter_func(col: Column, op: FilterOperator, values: List[Any]): + pass + + +test_type: AdvancedDataType = AdvancedDataType( + verbose_name="type", + valid_data_types=["int"], + translate_type=translation_func, + description="", + translate_filter=translate_filter_func, +) + +CHART_DATA_URI = "api/v1/chart/advanced_data_type" +CHARTS_FIXTURE_COUNT = 10 + + +@mock.patch( + "superset.advanced_data_type.api.ADVANCED_DATA_TYPES", + {"type": 1}, +) +def test_types_type_request(test_client, login_as_admin): + """ + Advanced Data Type API: Test to see if the API call returns all the valid advanced data types + """ + uri = f"api/v1/advanced_data_type/types" + response_value = test_client.get(uri) + data = json.loads(response_value.data.decode("utf-8")) + assert response_value.status_code == 200 + assert data == {"result": ["type"]} + + +def test_types_convert_bad_request_no_vals(test_client, login_as_admin): + """ + Advanced Data Type API: Test request to see if it behaves as expected when no values are passed + """ + arguments = {"type": "type", "values": []} + uri = f"api/v1/advanced_data_type/convert?q={prison.dumps(arguments)}" + response_value = test_client.get(uri) + assert response_value.status_code == 400 + + +def test_types_convert_bad_request_no_type(test_client, login_as_admin): + """ + Advanced Data Type API: Test request to see if it behaves as expected when no type is passed + """ + arguments = {"type": "", "values": [1]} + uri = f"api/v1/advanced_data_type/convert?q={prison.dumps(arguments)}" + response_value = test_client.get(uri) + assert response_value.status_code == 400 + + +@mock.patch( + "superset.advanced_data_type.api.ADVANCED_DATA_TYPES", + {"type": 1}, +) +def test_types_convert_bad_request_type_not_found(test_client, login_as_admin): + """ + Advanced Data Type API: Test request to see if it behaves as expected when passed in type is + not found/not valid + """ + arguments = {"type": "not_found", "values": [1]} + uri = f"api/v1/advanced_data_type/convert?q={prison.dumps(arguments)}" + response_value = test_client.get(uri) + assert response_value.status_code == 400 + + +@mock.patch( + "superset.advanced_data_type.api.ADVANCED_DATA_TYPES", + {"type": test_type}, +) +def test_types_convert_request(test_client, login_as_admin): + """ + Advanced Data Type API: Test request to see if it behaves as expected when a valid type + and valid values are passed in + """ + arguments = {"type": "type", "values": [1]} + uri = f"api/v1/advanced_data_type/convert?q={prison.dumps(arguments)}" + response_value = test_client.get(uri) + assert response_value.status_code == 200 + data = json.loads(response_value.data.decode("utf-8")) + assert data == {"result": target_resp} diff --git a/tests/integration_tests/annotation_layers/__init__.py b/tests/integration_tests/annotation_layers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/annotation_layers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/annotation_layers/api_tests.py b/tests/integration_tests/annotation_layers/api_tests.py new file mode 100644 index 0000000000000..61f6b2ff67829 --- /dev/null +++ b/tests/integration_tests/annotation_layers/api_tests.py @@ -0,0 +1,749 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import json + +import pytest +import prison +from sqlalchemy.sql import func + +import tests.integration_tests.test_app +from superset import db +from superset.models.annotations import Annotation, AnnotationLayer + +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.annotation_layers.fixtures import ( + create_annotation_layers, + get_end_dttm, + get_start_dttm, +) +from tests.unit_tests.annotation_layers.fixtures import ( + START_STR, + END_STR, +) + +ANNOTATION_LAYERS_COUNT = 10 +ANNOTATIONS_COUNT = 5 + + +class TestAnnotationLayerApi(SupersetTestCase): + @staticmethod + def get_layer_with_annotation() -> AnnotationLayer: + return ( + db.session.query(AnnotationLayer) + .filter(AnnotationLayer.name == "layer_with_annotations") + .one_or_none() + ) + + @pytest.mark.usefixtures("create_annotation_layers") + def test_get_annotation_layer(self): + """ + Annotation Api: Test get annotation layer + """ + annotation_layer = ( + db.session.query(AnnotationLayer) + .filter(AnnotationLayer.name == "name1") + .first() + ) + + self.login(username="admin") + uri = f"api/v1/annotation_layer/{annotation_layer.id}" + rv = self.get_assert_metric(uri, "get") + assert rv.status_code == 200 + + expected_result = { + "id": annotation_layer.id, + "name": "name1", + "descr": "descr1", + } + data = json.loads(rv.data.decode("utf-8")) + assert data["result"] == expected_result + + def test_info_annotation(self): + """ + Annotation API: Test info + """ + self.login(username="admin") + uri = "api/v1/annotation_layer/_info" + rv = self.get_assert_metric(uri, "info") + assert rv.status_code == 200 + + def test_info_security_query(self): + """ + Annotation API: Test info security + """ + self.login(username="admin") + params = {"keys": ["permissions"]} + uri = f"api/v1/annotation_layer/_info?q={prison.dumps(params)}" + rv = self.get_assert_metric(uri, "info") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert "can_read" in data["permissions"] + assert "can_write" in data["permissions"] + assert len(data["permissions"]) == 2 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_get_annotation_layer_not_found(self): + """ + Annotation Api: Test get annotation layer not found + """ + max_id = db.session.query(func.max(AnnotationLayer.id)).scalar() + self.login(username="admin") + uri = f"api/v1/annotation_layer/{max_id + 1}" + rv = self.get_assert_metric(uri, "get") + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_get_list_annotation_layer(self): + """ + Annotation Api: Test get list annotation layers + """ + self.login(username="admin") + uri = "api/v1/annotation_layer/" + rv = self.get_assert_metric(uri, "get_list") + + expected_fields = [ + "name", + "descr", + "created_by", + "created_on", + "changed_by", + "changed_on_delta_humanized", + "changed_on", + ] + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == ANNOTATION_LAYERS_COUNT + for expected_field in expected_fields: + assert expected_field in data["result"][0] + + @pytest.mark.usefixtures("create_annotation_layers") + def test_get_list_annotation_layer_sorting(self): + """ + Annotation Api: Test sorting on get list annotation layers + """ + self.login(username="admin") + uri = "api/v1/annotation_layer/" + + order_columns = [ + "name", + "descr", + "created_by.first_name", + "changed_by.first_name", + "changed_on", + "changed_on_delta_humanized", + "created_on", + ] + + for order_column in order_columns: + arguments = {"order_column": order_column, "order_direction": "asc"} + uri = f"api/v1/annotation_layer/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_get_list_annotation_layer_filter(self): + """ + Annotation Api: Test filters on get list annotation layers + """ + self.login(username="admin") + arguments = { + "columns": ["name", "descr"], + "filters": [ + {"col": "name", "opr": "annotation_layer_all_text", "value": "2"} + ], + } + uri = f"api/v1/annotation_layer/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + expected_result = { + "name": "name2", + "descr": "descr2", + } + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + assert data["result"][0] == expected_result + + arguments = { + "columns": ["name", "descr"], + "filters": [ + {"col": "name", "opr": "annotation_layer_all_text", "value": "descr3"} + ], + } + uri = f"api/v1/annotation_layer/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + expected_result = { + "name": "name3", + "descr": "descr3", + } + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + assert data["result"][0] == expected_result + + def test_create_annotation_layer(self): + """ + Annotation Api: Test create annotation layer + """ + self.login(username="admin") + annotation_layer_data = { + "name": "new3", + "descr": "description", + } + uri = "api/v1/annotation_layer/" + rv = self.client.post(uri, json=annotation_layer_data) + assert rv.status_code == 201 + data = json.loads(rv.data.decode("utf-8")) + created_model = db.session.query(AnnotationLayer).get(data.get("id")) + assert created_model is not None + assert created_model.name == annotation_layer_data["name"] + assert created_model.descr == annotation_layer_data["descr"] + + # Rollback changes + db.session.delete(created_model) + db.session.commit() + + def test_create_incorrect_annotation_layer(self): + """ + Annotation Api: Test create incorrect annotation layer + """ + self.login(username="admin") + annotation_layer_data = {} + uri = "api/v1/annotation_layer/" + rv = self.client.post(uri, json=annotation_layer_data) + assert rv.status_code == 400 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"name": ["Missing data for required field."]}} + + @pytest.mark.usefixtures("create_annotation_layers") + def test_create_annotation_layer_uniqueness(self): + """ + Annotation Api: Test create annotation layer uniqueness + """ + self.login(username="admin") + annotation_layer_data = {"name": "name3", "descr": "description"} + uri = "api/v1/annotation_layer/" + rv = self.client.post(uri, json=annotation_layer_data) + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"name": ["Name must be unique"]}} + + @pytest.mark.usefixtures("create_annotation_layers") + def test_update_annotation_layer(self): + """ + Annotation Api: Test update annotation layer + """ + annotation_layer = ( + db.session.query(AnnotationLayer) + .filter(AnnotationLayer.name == "name2") + .one_or_none() + ) + + self.login(username="admin") + annotation_layer_data = {"name": "changed_name"} + uri = f"api/v1/annotation_layer/{annotation_layer.id}" + rv = self.client.put(uri, json=annotation_layer_data) + assert rv.status_code == 200 + updated_model = db.session.query(AnnotationLayer).get(annotation_layer.id) + assert updated_model is not None + assert updated_model.name == annotation_layer_data["name"] + # make sure the descr hasn't updated + assert updated_model.descr == annotation_layer.descr + + @pytest.mark.usefixtures("create_annotation_layers") + def test_update_annotation_layer_uniqueness(self): + """ + Annotation Api: Test update annotation layer uniqueness + """ + annotation_layer = ( + db.session.query(AnnotationLayer) + .filter(AnnotationLayer.name == "name2") + .one_or_none() + ) + + self.login(username="admin") + annotation_layer_data = {"name": "name3", "descr": "changed_description"} + uri = f"api/v1/annotation_layer/{annotation_layer.id}" + rv = self.client.put(uri, json=annotation_layer_data) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert data == {"message": {"name": ["Name must be unique"]}} + + @pytest.mark.usefixtures("create_annotation_layers") + def test_update_annotation_layer_not_found(self): + """ + Annotation Api: Test update annotation layer not found + """ + max_id = db.session.query(func.max(AnnotationLayer.id)).scalar() + + self.login(username="admin") + annotation_layer_data = {"name": "changed_name", "descr": "changed_description"} + uri = f"api/v1/annotation_layer/{max_id + 1}" + rv = self.client.put(uri, json=annotation_layer_data) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_delete_annotation_layer(self): + """ + Annotation Api: Test update annotation layer + """ + annotation_layer = ( + db.session.query(AnnotationLayer) + .filter(AnnotationLayer.name == "name1") + .one_or_none() + ) + self.login(username="admin") + uri = f"api/v1/annotation_layer/{annotation_layer.id}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + updated_model = db.session.query(AnnotationLayer).get(annotation_layer.id) + assert updated_model is None + + @pytest.mark.usefixtures("create_annotation_layers") + def test_delete_annotation_layer_not_found(self): + """ + Annotation Api: Test delete annotation layer not found + """ + max_id = db.session.query(func.max(AnnotationLayer.id)).scalar() + self.login(username="admin") + uri = f"api/v1/annotation_layer/{max_id + 1}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_delete_annotation_layer_integrity(self): + """ + Annotation Api: Test delete annotation layer integrity error + """ + query_child_layer = db.session.query(AnnotationLayer).filter( + AnnotationLayer.name == "layer_with_annotations" + ) + child_layer = query_child_layer.one_or_none() + self.login(username="admin") + uri = f"api/v1/annotation_layer/{child_layer.id}" + rv = self.client.delete(uri) + assert rv.status_code == 422 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_bulk_delete_annotation_layer(self): + """ + Annotation Api: Test bulk delete annotation layers + """ + query_no_child_layers = db.session.query(AnnotationLayer).filter( + AnnotationLayer.name.like("name%") + ) + + no_child_layers = query_no_child_layers.all() + no_child_layers_ids = [ + annotation_layer.id for annotation_layer in no_child_layers + ] + self.login(username="admin") + uri = f"api/v1/annotation_layer/?q={prison.dumps(no_child_layers_ids)}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + deleted_annotation_layers = query_no_child_layers.all() + assert deleted_annotation_layers == [] + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "message": f"Deleted {len(no_child_layers_ids)} annotation layers" + } + assert response == expected_response + + @pytest.mark.usefixtures("create_annotation_layers") + def test_bulk_delete_annotation_layer_not_found(self): + """ + Annotation Api: Test bulk delete annotation layers not found + """ + all_annotation_layers = ( + db.session.query(AnnotationLayer) + .filter(AnnotationLayer.name.like("name%")) + .all() + ) + all_annotation_layers_ids = [ + annotation_layer.id for annotation_layer in all_annotation_layers + ] + max_id = db.session.query(func.max(AnnotationLayer.id)).scalar() + all_annotation_layers_ids.append(max_id + 1) + self.login(username="admin") + uri = f"api/v1/annotation_layer/?q={prison.dumps(all_annotation_layers_ids)}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_get_annotation(self): + """ + Annotation API: Test get annotation + """ + annotation_id = 1 + annotation = ( + db.session.query(Annotation) + .filter(Annotation.short_descr == f"short_descr{annotation_id}") + .one_or_none() + ) + + self.login(username="admin") + uri = ( + f"api/v1/annotation_layer/{annotation.layer_id}/annotation/{annotation.id}" + ) + rv = self.get_assert_metric(uri, "get") + assert rv.status_code == 200 + + expected_result = { + "id": annotation.id, + "end_dttm": get_end_dttm(annotation_id).isoformat(), + "json_metadata": "", + "layer": {"id": annotation.layer_id, "name": "layer_with_annotations"}, + "long_descr": annotation.long_descr, + "short_descr": annotation.short_descr, + "start_dttm": get_start_dttm(annotation_id).isoformat(), + } + + data = json.loads(rv.data.decode("utf-8")) + assert data["result"] == expected_result + + @pytest.mark.usefixtures("create_annotation_layers") + def test_get_annotation_not_found(self): + """ + Annotation API: Test get annotation not found + """ + layer = self.get_layer_with_annotation() + max_id = db.session.query(func.max(Annotation.id)).scalar() + self.login(username="admin") + uri = f"api/v1/annotation_layer/{layer.id}/annotation/{max_id + 1}" + rv = self.get_assert_metric(uri, "get") + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_get_list_annotation(self): + """ + Annotation Api: Test get list of annotations + """ + layer = self.get_layer_with_annotation() + self.login(username="admin") + uri = f"api/v1/annotation_layer/{layer.id}/annotation/" + rv = self.get_assert_metric(uri, "get_list") + + expected_fields = [ + "short_descr", + "created_by", + "changed_by", + "start_dttm", + "end_dttm", + ] + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == ANNOTATIONS_COUNT + for expected_field in expected_fields: + assert expected_field in data["result"][0] + + @pytest.mark.usefixtures("create_annotation_layers") + def test_get_list_annotation_sorting(self): + """ + Annotation Api: Test sorting on get list of annotations + """ + layer = self.get_layer_with_annotation() + self.login(username="admin") + + order_columns = [ + "short_descr", + "created_by.first_name", + "changed_by.first_name", + "changed_on_delta_humanized", + "start_dttm", + "end_dttm", + ] + for order_column in order_columns: + arguments = {"order_column": order_column, "order_direction": "asc"} + uri = f"api/v1/annotation_layer/{layer.id}/annotation/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_get_list_annotation_filter(self): + """ + Annotation Api: Test filters on get list annotation layers + """ + layer = self.get_layer_with_annotation() + self.login(username="admin") + arguments = { + "filters": [ + {"col": "short_descr", "opr": "annotation_all_text", "value": "2"} + ] + } + uri = f"api/v1/annotation_layer/{layer.id}/annotation/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + + arguments = { + "filters": [ + {"col": "short_descr", "opr": "annotation_all_text", "value": "descr3"} + ] + } + uri = f"api/v1/annotation_layer/{layer.id}/annotation/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_create_annotation(self): + """ + Annotation Api: Test create annotation + """ + layer = self.get_layer_with_annotation() + + self.login(username="admin") + annotation_data = { + "short_descr": "new", + "long_descr": "description", + "start_dttm": START_STR, + "end_dttm": END_STR, + } + uri = f"api/v1/annotation_layer/{layer.id}/annotation/" + rv = self.client.post(uri, json=annotation_data) + assert rv.status_code == 201 + data = json.loads(rv.data.decode("utf-8")) + created_model: Annotation = db.session.query(Annotation).get(data.get("id")) + assert created_model is not None + assert created_model.short_descr == annotation_data["short_descr"] + assert created_model.long_descr == annotation_data["long_descr"] + + # Rollback changes + db.session.delete(created_model) + db.session.commit() + + @pytest.mark.usefixtures("create_annotation_layers") + def test_create_incorrect_annotation(self): + """ + Annotation Api: Test create incorrect annotation + """ + layer = self.get_layer_with_annotation() + + self.login(username="admin") + annotation_data = { + "long_descr": "description", + } + uri = f"api/v1/annotation_layer/{layer.id}/annotation/" + rv = self.client.post(uri, json=annotation_data) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 400 + assert data == { + "message": { + "end_dttm": ["Missing data for required field."], + "short_descr": ["Missing data for required field."], + "start_dttm": ["Missing data for required field."], + } + } + + @pytest.mark.usefixtures("create_annotation_layers") + def test_create_annotation_uniqueness(self): + """ + Annotation Api: Test create annotation uniqueness + """ + layer = self.get_layer_with_annotation() + + self.login(username="admin") + annotation_data = { + "short_descr": "short_descr2", + "long_descr": "description", + "start_dttm": START_STR, + "end_dttm": END_STR, + } + uri = f"api/v1/annotation_layer/{layer.id}/annotation/" + rv = self.client.post(uri, json=annotation_data) + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == { + "message": { + "short_descr": ["Short description must be unique for this layer"] + } + } + + @pytest.mark.usefixtures("create_annotation_layers") + def test_update_annotation(self): + """ + Annotation Api: Test update annotation + """ + layer = self.get_layer_with_annotation() + annotation = ( + db.session.query(Annotation) + .filter(Annotation.short_descr == "short_descr2") + .one_or_none() + ) + + self.login(username="admin") + annotation_data = { + "short_descr": "changed_name", + } + uri = f"api/v1/annotation_layer/{layer.id}/annotation/{annotation.id}" + rv = self.client.put(uri, json=annotation_data) + assert rv.status_code == 200 + updated_model: Annotation = db.session.query(Annotation).get(annotation.id) + assert updated_model is not None + assert updated_model.short_descr == annotation_data["short_descr"] + # make sure long_descr hasn't updated + assert updated_model.long_descr == annotation.long_descr + + @pytest.mark.usefixtures("create_annotation_layers") + def test_update_annotation_null_datetime(self): + """ + Annotation Api: Test update annotation null datetime + """ + layer = self.get_layer_with_annotation() + annotation = ( + db.session.query(Annotation) + .filter(Annotation.short_descr == "short_descr2") + .one_or_none() + ) + + self.login(username="admin") + annotation_data = {"start_dttm": None, "end_dttm": None} + uri = f"api/v1/annotation_layer/{layer.id}/annotation/{annotation.id}" + rv = self.client.put(uri, json=annotation_data) + assert rv.status_code == 400 + data = json.loads(rv.data.decode("utf-8")) + assert data == { + "message": { + "end_dttm": ["Field may not be null."], + "start_dttm": ["Field may not be null."], + } + } + + @pytest.mark.usefixtures("create_annotation_layers") + def test_update_annotation_uniqueness(self): + """ + Annotation Api: Test update annotation uniqueness + """ + layer = self.get_layer_with_annotation() + annotation = ( + db.session.query(Annotation) + .filter(Annotation.short_descr == "short_descr2") + .one_or_none() + ) + + self.login(username="admin") + annotation_layer_data = { + "short_descr": "short_descr3", + "long_descr": "changed_description", + } + uri = f"api/v1/annotation_layer/{layer.id}/annotation/{annotation.id}" + rv = self.client.put(uri, json=annotation_layer_data) + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == { + "message": { + "short_descr": ["Short description must be unique for this layer"] + } + } + + @pytest.mark.usefixtures("create_annotation_layers") + def test_update_annotation_not_found(self): + """ + Annotation Api: Test update annotation not found + """ + layer = self.get_layer_with_annotation() + max_id = db.session.query(func.max(Annotation.id)).scalar() + + self.login(username="admin") + annotation_layer_data = { + "short_descr": "changed_name", + } + uri = f"api/v1/annotation_layer/{layer.id}/annotation/{max_id + 1}" + rv = self.client.put(uri, json=annotation_layer_data) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_delete_annotation(self): + """ + Annotation Api: Test update annotation + """ + layer = self.get_layer_with_annotation() + annotation = ( + db.session.query(Annotation) + .filter(Annotation.short_descr == "short_descr1") + .one_or_none() + ) + self.login(username="admin") + uri = f"api/v1/annotation_layer/{layer.id}/annotation/{annotation.id}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + updated_model = db.session.query(Annotation).get(annotation.id) + assert updated_model is None + + @pytest.mark.usefixtures("create_annotation_layers") + def test_delete_annotation_not_found(self): + """ + Annotation Api: Test delete annotation not found + """ + layer = self.get_layer_with_annotation() + max_id = db.session.query(func.max(Annotation.id)).scalar() + self.login(username="admin") + uri = f"api/v1/annotation_layer/{layer.id}/annotation{max_id + 1}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_annotation_layers") + def test_bulk_delete_annotation(self): + """ + Annotation Api: Test bulk delete annotation + """ + layer = self.get_layer_with_annotation() + query_annotations = db.session.query(Annotation).filter( + Annotation.layer == layer + ) + + annotations = query_annotations.all() + annotations_ids = [annotation.id for annotation in annotations] + self.login(username="admin") + uri = f"api/v1/annotation_layer/{layer.id}/annotation/?q={prison.dumps(annotations_ids)}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + deleted_annotations = query_annotations.all() + assert deleted_annotations == [] + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": f"Deleted {len(annotations_ids)} annotations"} + assert response == expected_response + + @pytest.mark.usefixtures("create_annotation_layers") + def test_bulk_delete_annotation_not_found(self): + """ + Annotation Api: Test bulk delete annotation not found + """ + layer = self.get_layer_with_annotation() + query_annotations = db.session.query(Annotation).filter( + Annotation.layer == layer + ) + + annotations = query_annotations.all() + annotations_ids = [annotation.id for annotation in annotations] + + max_id = db.session.query(func.max(Annotation.id)).scalar() + + annotations_ids.append(max_id + 1) + self.login(username="admin") + uri = f"api/v1/annotation_layer/{layer.id}/annotation/?q={prison.dumps(annotations_ids)}" + rv = self.client.delete(uri) + assert rv.status_code == 404 diff --git a/tests/integration_tests/annotation_layers/fixtures.py b/tests/integration_tests/annotation_layers/fixtures.py new file mode 100644 index 0000000000000..8243d7e474d57 --- /dev/null +++ b/tests/integration_tests/annotation_layers/fixtures.py @@ -0,0 +1,105 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +import pytest +from datetime import datetime +from typing import Optional + + +from superset import db +from superset.models.annotations import Annotation, AnnotationLayer + +from tests.integration_tests.test_app import app + + +ANNOTATION_LAYERS_COUNT = 10 +ANNOTATIONS_COUNT = 5 + + +def get_start_dttm(annotation_id: int) -> datetime: + return datetime(1990 + annotation_id, 1, 1) + + +def get_end_dttm(annotation_id: int) -> datetime: + return datetime(1990 + annotation_id, 7, 1) + + +def _insert_annotation_layer(name: str = "", descr: str = "") -> AnnotationLayer: + annotation_layer = AnnotationLayer( + name=name, + descr=descr, + ) + db.session.add(annotation_layer) + db.session.commit() + return annotation_layer + + +def _insert_annotation( + layer: AnnotationLayer, + short_descr: str, + long_descr: str, + json_metadata: Optional[str] = "", + start_dttm: Optional[datetime] = None, + end_dttm: Optional[datetime] = None, +) -> Annotation: + annotation = Annotation( + layer=layer, + short_descr=short_descr, + long_descr=long_descr, + json_metadata=json_metadata, + start_dttm=start_dttm, + end_dttm=end_dttm, + ) + db.session.add(annotation) + db.session.commit() + return annotation + + +@pytest.fixture() +def create_annotation_layers(): + """ + Creates ANNOTATION_LAYERS_COUNT-1 layers with no annotations + and a final one with ANNOTATION_COUNT children + :return: + """ + with app.app_context(): + annotation_layers = [] + annotations = [] + for cx in range(ANNOTATION_LAYERS_COUNT - 1): + annotation_layers.append( + _insert_annotation_layer(name=f"name{cx}", descr=f"descr{cx}") + ) + layer_with_annotations = _insert_annotation_layer("layer_with_annotations") + annotation_layers.append(layer_with_annotations) + for cx in range(ANNOTATIONS_COUNT): + annotations.append( + _insert_annotation( + layer_with_annotations, + short_descr=f"short_descr{cx}", + long_descr=f"long_descr{cx}", + start_dttm=get_start_dttm(cx), + end_dttm=get_end_dttm(cx), + ) + ) + yield annotation_layers + + # rollback changes + for annotation_layer in annotation_layers: + db.session.delete(annotation_layer) + for annotation in annotations: + db.session.delete(annotation) + db.session.commit() diff --git a/tests/integration_tests/async_events/__init__.py b/tests/integration_tests/async_events/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/async_events/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/async_events/api_tests.py b/tests/integration_tests/async_events/api_tests.py new file mode 100644 index 0000000000000..a63f540dd0f84 --- /dev/null +++ b/tests/integration_tests/async_events/api_tests.py @@ -0,0 +1,120 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from typing import Optional +from unittest import mock + +from superset.extensions import async_query_manager +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.test_app import app + + +class TestAsyncEventApi(SupersetTestCase): + UUID = "943c920-32a5-412a-977d-b8e47d36f5a4" + + def fetch_events(self, last_id: Optional[str] = None): + base_uri = "api/v1/async_event/" + uri = f"{base_uri}?last_id={last_id}" if last_id else base_uri + return self.client.get(uri) + + @mock.patch("uuid.uuid4", return_value=UUID) + def test_events(self, mock_uuid4): + async_query_manager.init_app(app) + self.login(username="admin") + with mock.patch.object(async_query_manager._redis, "xrange") as mock_xrange: + rv = self.fetch_events() + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + channel_id = app.config["GLOBAL_ASYNC_QUERIES_REDIS_STREAM_PREFIX"] + self.UUID + mock_xrange.assert_called_with(channel_id, "-", "+", 100) + self.assertEqual(response, {"result": []}) + + @mock.patch("uuid.uuid4", return_value=UUID) + def test_events_last_id(self, mock_uuid4): + async_query_manager.init_app(app) + self.login(username="admin") + with mock.patch.object(async_query_manager._redis, "xrange") as mock_xrange: + rv = self.fetch_events("1607471525180-0") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + channel_id = app.config["GLOBAL_ASYNC_QUERIES_REDIS_STREAM_PREFIX"] + self.UUID + mock_xrange.assert_called_with(channel_id, "1607471525180-1", "+", 100) + self.assertEqual(response, {"result": []}) + + @mock.patch("uuid.uuid4", return_value=UUID) + def test_events_results(self, mock_uuid4): + async_query_manager.init_app(app) + self.login(username="admin") + with mock.patch.object(async_query_manager._redis, "xrange") as mock_xrange: + mock_xrange.return_value = [ + ( + "1607477697866-0", + { + "data": '{"channel_id": "1095c1c9-b6b1-444d-aa83-8e323b32831f", "job_id": "10a0bd9a-03c8-4737-9345-f4234ba86512", "user_id": "1", "status": "done", "errors": [], "result_url": "/api/v1/chart/data/qc-ecd766dd461f294e1bcdaa321e0e8463"}' + }, + ), + ( + "1607477697993-0", + { + "data": '{"channel_id": "1095c1c9-b6b1-444d-aa83-8e323b32831f", "job_id": "027cbe49-26ce-4813-bb5a-0b95a626b84c", "user_id": "1", "status": "done", "errors": [], "result_url": "/api/v1/chart/data/qc-1bbc3a240e7039ba4791aefb3a7ee80d"}' + }, + ), + ] + rv = self.fetch_events() + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + channel_id = app.config["GLOBAL_ASYNC_QUERIES_REDIS_STREAM_PREFIX"] + self.UUID + mock_xrange.assert_called_with(channel_id, "-", "+", 100) + expected = { + "result": [ + { + "channel_id": "1095c1c9-b6b1-444d-aa83-8e323b32831f", + "errors": [], + "id": "1607477697866-0", + "job_id": "10a0bd9a-03c8-4737-9345-f4234ba86512", + "result_url": "/api/v1/chart/data/qc-ecd766dd461f294e1bcdaa321e0e8463", + "status": "done", + "user_id": "1", + }, + { + "channel_id": "1095c1c9-b6b1-444d-aa83-8e323b32831f", + "errors": [], + "id": "1607477697993-0", + "job_id": "027cbe49-26ce-4813-bb5a-0b95a626b84c", + "result_url": "/api/v1/chart/data/qc-1bbc3a240e7039ba4791aefb3a7ee80d", + "status": "done", + "user_id": "1", + }, + ] + } + self.assertEqual(response, expected) + + def test_events_no_login(self): + async_query_manager.init_app(app) + rv = self.fetch_events() + assert rv.status_code == 401 + + def test_events_no_token(self): + self.login(username="admin") + self.client.set_cookie( + "localhost", app.config["GLOBAL_ASYNC_QUERIES_JWT_COOKIE_NAME"], "" + ) + rv = self.fetch_events() + assert rv.status_code == 401 diff --git a/tests/integration_tests/available_domains/__init__.py b/tests/integration_tests/available_domains/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/available_domains/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/available_domains/api_tests.py b/tests/integration_tests/available_domains/api_tests.py new file mode 100644 index 0000000000000..8838207d29094 --- /dev/null +++ b/tests/integration_tests/available_domains/api_tests.py @@ -0,0 +1,30 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json + +from tests.integration_tests.test_app import app + + +def test_get_available_domains(test_client, login_as_admin): + cached = app.config["SUPERSET_WEBSERVER_DOMAINS"] + app.config["SUPERSET_WEBSERVER_DOMAINS"] = ["a", "b"] + resp = test_client.get("api/v1/available_domains/") + assert resp.status_code == 200 + data = json.loads(resp.data.decode("utf-8")) + result = data.get("result") + assert result == {"domains": ["a", "b"]} + app.config["SUPERSET_WEBSERVER_DOMAINS"] = cached diff --git a/tests/integration_tests/base_api_tests.py b/tests/integration_tests/base_api_tests.py new file mode 100644 index 0000000000000..478fee0a0dca4 --- /dev/null +++ b/tests/integration_tests/base_api_tests.py @@ -0,0 +1,425 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +import json +from unittest.mock import patch + +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) + +import pytest +from flask_appbuilder.models.sqla.interface import SQLAInterface +import prison + +import tests.integration_tests.test_app +from superset import db, security_manager +from superset.extensions import appbuilder +from superset.models.dashboard import Dashboard +from superset.views.base_api import BaseSupersetModelRestApi, requires_json + +from .base_tests import SupersetTestCase +from .conftest import with_config + + +class Model1Api(BaseSupersetModelRestApi): + datamodel = SQLAInterface(Dashboard) + allow_browser_login = True + class_permission_name = "Dashboard" + method_permission_name = { + "get_list": "read", + "get": "read", + "export": "read", + "post": "write", + "put": "write", + "delete": "write", + "bulk_delete": "write", + "info": "read", + "related": "read", + } + + +appbuilder.add_api(Model1Api) + + +class TestOpenApiSpec(SupersetTestCase): + def test_open_api_spec(self): + """ + API: Test validate OpenAPI spec + :return: + """ + from openapi_spec_validator import validate_spec + + self.login(username="admin") + uri = "api/v1/_openapi" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + validate_spec(response) + + +class TestBaseModelRestApi(SupersetTestCase): + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_default_missing_declaration_get(self): + """ + API: Test default missing declaration on get + + We want to make sure that not declared list_columns will + not render all columns by default but just the model's pk + """ + # Check get list response + self.login(username="admin") + uri = "api/v1/model1api/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["list_columns"], ["id"]) + for result in response["result"]: + self.assertEqual(list(result.keys()), ["id"]) + + # Check get response + dashboard = db.session.query(Dashboard).first() + uri = f"api/v1/model1api/{dashboard.id}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["show_columns"], ["id"]) + self.assertEqual(list(response["result"].keys()), ["id"]) + + def test_default_missing_declaration_put_spec(self): + """ + API: Test default missing declaration on put openapi spec + + We want to make sure that not declared edit_columns will + not render all columns by default but just the model's pk + """ + self.login(username="admin") + uri = "api/v1/_openapi" + rv = self.client.get(uri) + # dashboard model accepts all fields are null + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + expected_mutation_spec = { + "properties": {"id": {"format": "int32", "type": "integer"}}, + "type": "object", + } + self.assertEqual( + response["components"]["schemas"]["Model1Api.post"], expected_mutation_spec + ) + self.assertEqual( + response["components"]["schemas"]["Model1Api.put"], expected_mutation_spec + ) + + def test_default_missing_declaration_post(self): + """ + API: Test default missing declaration on post + + We want to make sure that not declared add_columns will + not accept all columns by default + """ + dashboard_data = { + "dashboard_title": "title1", + "slug": "slug1", + "position_json": '{"a": "A"}', + "css": "css", + "json_metadata": '{"b": "B"}', + "published": True, + } + self.login(username="admin") + uri = "api/v1/model1api/" + rv = self.client.post(uri, json=dashboard_data) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 422) + expected_response = { + "message": { + "css": ["Unknown field."], + "dashboard_title": ["Unknown field."], + "json_metadata": ["Unknown field."], + "position_json": ["Unknown field."], + "published": ["Unknown field."], + "slug": ["Unknown field."], + } + } + self.assertEqual(response, expected_response) + + def test_refuse_invalid_format_request(self): + """ + API: Test invalid format of request + + We want to make sure that non-JSON request are refused + """ + self.login(username="admin") + uri = "api/v1/report/" # endpoint decorated with @requires_json + rv = self.client.post( + uri, data="a: value\nb: 1\n", content_type="application/yaml" + ) + self.assertEqual(rv.status_code, 400) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_default_missing_declaration_put(self): + """ + API: Test default missing declaration on put + + We want to make sure that not declared edit_columns will + not accept all columns by default + """ + dashboard = db.session.query(Dashboard).first() + dashboard_data = {"dashboard_title": "CHANGED", "slug": "CHANGED"} + self.login(username="admin") + uri = f"api/v1/model1api/{dashboard.id}" + rv = self.client.put(uri, json=dashboard_data) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 422) + expected_response = { + "message": { + "dashboard_title": ["Unknown field."], + "slug": ["Unknown field."], + } + } + self.assertEqual(response, expected_response) + + +class ApiOwnersTestCaseMixin: + """ + Implements shared tests for owners related field + """ + + resource_name: str = "" + + def test_get_related_owners(self): + """ + API: Test get related owners + """ + self.login(username="admin") + uri = f"api/v1/{self.resource_name}/related/owners" + rv = self.client.get(uri) + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + users = db.session.query(security_manager.user_model).all() + expected_users = [str(user) for user in users] + assert response["count"] == len(users) + # This needs to be implemented like this, because ordering varies between + # postgres and mysql + response_users = [result["text"] for result in response["result"]] + for expected_user in expected_users: + assert expected_user in response_users + + def test_get_related_owners_with_extra_filters(self): + """ + API: Test get related owners with extra related query filters + """ + self.login(username="admin") + + def _base_filter(query): + return query.filter_by(username="alpha") + + with patch.dict( + "superset.views.filters.current_app.config", + {"EXTRA_RELATED_QUERY_FILTERS": {"user": _base_filter}}, + ): + uri = f"api/v1/{self.resource_name}/related/owners" + rv = self.client.get(uri) + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + response_users = [result["text"] for result in response["result"]] + assert response_users == ["alpha user"] + + def test_get_related_owners_paginated(self): + """ + API: Test get related owners with pagination + """ + self.login(username="admin") + page_size = 1 + argument = {"page_size": page_size} + uri = f"api/v1/{self.resource_name}/related/owners?q={prison.dumps(argument)}" + rv = self.client.get(uri) + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + users = db.session.query(security_manager.user_model).all() + + # the count should correspond with the total number of users + assert response["count"] == len(users) + + # the length of the result should be at most equal to the page size + assert len(response["result"]) == min(page_size, len(users)) + + # make sure all received users are included in the full set of users + all_users = [str(user) for user in users] + for received_user in [result["text"] for result in response["result"]]: + assert received_user in all_users + + def test_get_ids_related_owners_paginated(self): + """ + API: Test get related owners with pagination returns 422 + """ + self.login(username="admin") + argument = {"page": 1, "page_size": 1, "include_ids": [2]} + uri = f"api/v1/{self.resource_name}/related/owners?q={prison.dumps(argument)}" + rv = self.client.get(uri) + assert rv.status_code == 422 + + def test_get_filter_related_owners(self): + """ + API: Test get filter related owners + """ + self.login(username="admin") + argument = {"filter": "gamma"} + uri = f"api/v1/{self.resource_name}/related/owners?q={prison.dumps(argument)}" + + rv = self.client.get(uri) + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + assert 4 == response["count"] + sorted_results = sorted(response["result"], key=lambda value: value["text"]) + expected_results = [ + { + "extra": {"active": True, "email": "gamma@fab.org"}, + "text": "gamma user", + "value": 2, + }, + { + "extra": {"active": True, "email": "gamma2@fab.org"}, + "text": "gamma2 user", + "value": 3, + }, + { + "extra": {"active": True, "email": "gamma_no_csv@fab.org"}, + "text": "gamma_no_csv user", + "value": 6, + }, + { + "extra": {"active": True, "email": "gamma_sqllab@fab.org"}, + "text": "gamma_sqllab user", + "value": 4, + }, + ] + # TODO Check me + assert expected_results == sorted_results + + @with_config({"EXCLUDE_USERS_FROM_LISTS": ["gamma"]}) + def test_get_base_filter_related_owners(self): + """ + API: Test get base filter related owners + """ + self.login(username="admin") + uri = f"api/v1/{self.resource_name}/related/owners" + gamma_user = ( + db.session.query(security_manager.user_model) + .filter(security_manager.user_model.username == "gamma") + .one_or_none() + ) + assert gamma_user is not None + users = db.session.query(security_manager.user_model).all() + + rv = self.client.get(uri) + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + assert response["count"] == len(users) - 1 + response_users = [result["text"] for result in response["result"]] + assert "gamma user" not in response_users + + @patch( + "superset.security.SupersetSecurityManager.get_exclude_users_from_lists", + return_value=["gamma"], + ) + def test_get_base_filter_related_owners_on_sm( + self, mock_get_exclude_users_from_list + ): + """ + API: Test get base filter related owners using security manager + """ + self.login(username="admin") + uri = f"api/v1/{self.resource_name}/related/owners" + gamma_user = ( + db.session.query(security_manager.user_model) + .filter(security_manager.user_model.username == "gamma") + .one_or_none() + ) + assert gamma_user is not None + users = db.session.query(security_manager.user_model).all() + + rv = self.client.get(uri) + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + assert response["count"] == len(users) - 1 + response_users = [result["text"] for result in response["result"]] + assert "gamma user" not in response_users + + def test_get_ids_related_owners(self): + """ + API: Test get filter related owners + """ + self.login(username="admin") + argument = {"filter": "gamma_sqllab", "include_ids": [2]} + uri = f"api/v1/{self.resource_name}/related/owners?q={prison.dumps(argument)}" + + rv = self.client.get(uri) + response = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert 2 == response["count"] + sorted_results = sorted(response["result"], key=lambda value: value["text"]) + expected_results = [ + { + "extra": {"active": True, "email": "gamma@fab.org"}, + "text": "gamma user", + "value": 2, + }, + { + "extra": {"active": True, "email": "gamma_sqllab@fab.org"}, + "text": "gamma_sqllab user", + "value": 4, + }, + ] + assert expected_results == sorted_results + + def test_get_repeated_ids_related_owners(self): + """ + API: Test get filter related owners + """ + self.login(username="admin") + argument = {"filter": "gamma_sqllab", "include_ids": [2, 4]} + uri = f"api/v1/{self.resource_name}/related/owners?q={prison.dumps(argument)}" + + rv = self.client.get(uri) + response = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert 2 == response["count"] + sorted_results = sorted(response["result"], key=lambda value: value["text"]) + expected_results = [ + { + "extra": {"active": True, "email": "gamma@fab.org"}, + "text": "gamma user", + "value": 2, + }, + { + "extra": {"active": True, "email": "gamma_sqllab@fab.org"}, + "text": "gamma_sqllab user", + "value": 4, + }, + ] + assert expected_results == sorted_results + + def test_get_related_fail(self): + """ + API: Test get related fail + """ + self.login(username="admin") + uri = f"api/v1/{self.resource_name}/related/owner" + + rv = self.client.get(uri) + assert rv.status_code == 404 diff --git a/tests/integration_tests/base_tests.py b/tests/integration_tests/base_tests.py new file mode 100644 index 0000000000000..f70f0f63bde36 --- /dev/null +++ b/tests/integration_tests/base_tests.py @@ -0,0 +1,531 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +from datetime import datetime +import imp +import json +from contextlib import contextmanager +from typing import Any, Dict, Union, List, Optional +from unittest.mock import Mock, patch, MagicMock + +import pandas as pd +from flask import Response +from flask_appbuilder.security.sqla import models as ab_models +from flask_testing import TestCase +from sqlalchemy.engine.interfaces import Dialect +from sqlalchemy.ext.declarative import DeclarativeMeta +from sqlalchemy.orm import Session +from sqlalchemy.sql import func +from sqlalchemy.dialects.mysql import dialect + +from tests.integration_tests.test_app import app, login +from superset.sql_parse import CtasMethod +from superset import db, security_manager +from superset.connectors.base.models import BaseDatasource +from superset.connectors.sqla.models import SqlaTable +from superset.models import core as models +from superset.models.slice import Slice +from superset.models.core import Database +from superset.models.dashboard import Dashboard +from superset.models.datasource_access_request import DatasourceAccessRequest +from superset.utils.core import get_example_default_schema +from superset.utils.database import get_example_database +from superset.views.base_api import BaseSupersetModelRestApi + +FAKE_DB_NAME = "fake_db_100" +test_client = app.test_client() + + +def get_resp( + client: Any, + url: str, + data: Any = None, + follow_redirects: bool = True, + raise_on_error: bool = True, + json_: Optional[str] = None, +): + """Shortcut to get the parsed results while following redirects""" + if data: + resp = client.post(url, data=data, follow_redirects=follow_redirects) + elif json_: + resp = client.post(url, json=json_, follow_redirects=follow_redirects) + else: + resp = client.get(url, follow_redirects=follow_redirects) + if raise_on_error and resp.status_code > 400: + raise Exception("http request failed with code {}".format(resp.status_code)) + return resp.data.decode("utf-8") + + +def post_assert_metric( + client: Any, uri: str, data: Dict[str, Any], func_name: str +) -> Response: + """ + Simple client post with an extra assertion for statsd metrics + + :param client: test client for superset api requests + :param uri: The URI to use for the HTTP POST + :param data: The JSON data payload to be posted + :param func_name: The function name that the HTTP POST triggers + for the statsd metric assertion + :return: HTTP Response + """ + with patch.object( + BaseSupersetModelRestApi, "incr_stats", return_value=None + ) as mock_method: + rv = client.post(uri, json=data) + if 200 <= rv.status_code < 400: + mock_method.assert_called_once_with("success", func_name) + elif 400 <= rv.status_code < 500: + mock_method.assert_called_once_with("warning", func_name) + else: + mock_method.assert_called_once_with("error", func_name) + return rv + + +class SupersetTestCase(TestCase): + default_schema_backend_map = { + "sqlite": "main", + "mysql": "superset", + "postgresql": "public", + "presto": "default", + "hive": "default", + } + + maxDiff = -1 + + def create_app(self): + return app + + @staticmethod + def get_nonexistent_numeric_id(model): + return (db.session.query(func.max(model.id)).scalar() or 0) + 1 + + @staticmethod + def get_birth_names_dataset() -> SqlaTable: + return SupersetTestCase.get_table(name="birth_names") + + @staticmethod + def create_user_with_roles( + username: str, roles: List[str], should_create_roles: bool = False + ): + user_to_create = security_manager.find_user(username) + if not user_to_create: + security_manager.add_user( + username, + username, + username, + f"{username}@superset.com", + security_manager.find_role("Gamma"), # it needs a role + password="general", + ) + db.session.commit() + user_to_create = security_manager.find_user(username) + assert user_to_create + user_to_create.roles = [] + for chosen_user_role in roles: + if should_create_roles: + # copy role from gamma but without data permissions + security_manager.copy_role("Gamma", chosen_user_role, merge=False) + user_to_create.roles.append(security_manager.find_role(chosen_user_role)) + db.session.commit() + return user_to_create + + @staticmethod + def create_user( + username: str, + password: str, + role_name: str, + first_name: str = "admin", + last_name: str = "user", + email: str = "admin@fab.org", + ) -> Union[ab_models.User, bool]: + role_admin = security_manager.find_role(role_name) + return security_manager.add_user( + username, first_name, last_name, email, role_admin, password + ) + + @staticmethod + def get_user(username: str) -> ab_models.User: + user = ( + db.session.query(security_manager.user_model) + .filter_by(username=username) + .one_or_none() + ) + return user + + @staticmethod + def get_role(name: str) -> Optional[ab_models.User]: + user = ( + db.session.query(security_manager.role_model) + .filter_by(name=name) + .one_or_none() + ) + return user + + @staticmethod + def get_table_by_id(table_id: int) -> SqlaTable: + return db.session.query(SqlaTable).filter_by(id=table_id).one() + + @staticmethod + def is_module_installed(module_name): + try: + imp.find_module(module_name) + return True + except ImportError: + return False + + def get_or_create(self, cls, criteria, session, **kwargs): + obj = session.query(cls).filter_by(**criteria).first() + if not obj: + obj = cls(**criteria) + obj.__dict__.update(**kwargs) + session.add(obj) + session.commit() + return obj + + def login(self, username="admin", password="general"): + return login(self.client, username, password) + + def get_slice( + self, slice_name: str, session: Session, expunge_from_session: bool = True + ) -> Slice: + slc = session.query(Slice).filter_by(slice_name=slice_name).one() + if expunge_from_session: + session.expunge_all() + return slc + + @staticmethod + def get_table( + name: str, database_id: Optional[int] = None, schema: Optional[str] = None + ) -> SqlaTable: + schema = schema or get_example_default_schema() + + return ( + db.session.query(SqlaTable) + .filter_by( + database_id=database_id + or SupersetTestCase.get_database_by_name("examples").id, + schema=schema, + table_name=name, + ) + .one() + ) + + @staticmethod + def get_database_by_id(db_id: int) -> Database: + return db.session.query(Database).filter_by(id=db_id).one() + + @staticmethod + def get_database_by_name(database_name: str = "main") -> Database: + if database_name == "examples": + return get_example_database() + else: + raise ValueError("Database doesn't exist") + + @staticmethod + def get_datasource_mock() -> BaseDatasource: + datasource = MagicMock() + results = Mock() + results.query = Mock() + results.status = Mock() + results.error_message = None + results.df = pd.DataFrame() + datasource.type = "table" + datasource.query = Mock(return_value=results) + mock_dttm_col = Mock() + datasource.get_col = Mock(return_value=mock_dttm_col) + datasource.query = Mock(return_value=results) + datasource.database = Mock() + datasource.database.db_engine_spec = Mock() + datasource.database.db_engine_spec.mutate_expression_label = lambda x: x + datasource.owners = MagicMock() + return datasource + + def get_resp( + self, url, data=None, follow_redirects=True, raise_on_error=True, json_=None + ): + return get_resp(self.client, url, data, follow_redirects, raise_on_error, json_) + + def get_json_resp( + self, url, data=None, follow_redirects=True, raise_on_error=True, json_=None + ): + """Shortcut to get the parsed results while following redirects""" + resp = self.get_resp(url, data, follow_redirects, raise_on_error, json_) + return json.loads(resp) + + def get_access_requests(self, username, ds_type, ds_id): + DAR = DatasourceAccessRequest + return ( + db.session.query(DAR) + .filter( + DAR.created_by == security_manager.find_user(username=username), + DAR.datasource_type == ds_type, + DAR.datasource_id == ds_id, + ) + .first() + ) + + def logout(self): + self.client.get("/logout/", follow_redirects=True) + + def grant_public_access_to_table(self, table): + role_name = "Public" + self.grant_role_access_to_table(table, role_name) + + def grant_role_access_to_table(self, table, role_name): + role = security_manager.find_role(role_name) + perms = db.session.query(ab_models.PermissionView).all() + for perm in perms: + if ( + perm.permission.name == "datasource_access" + and perm.view_menu + and table.perm in perm.view_menu.name + ): + security_manager.add_permission_role(role, perm) + + def revoke_public_access_to_table(self, table): + role_name = "Public" + self.revoke_role_access_to_table(role_name, table) + + def revoke_role_access_to_table(self, role_name, table): + public_role = security_manager.find_role(role_name) + perms = db.session.query(ab_models.PermissionView).all() + for perm in perms: + if ( + perm.permission.name == "datasource_access" + and perm.view_menu + and table.perm in perm.view_menu.name + ): + security_manager.del_permission_role(public_role, perm) + + def run_sql( + self, + sql, + client_id=None, + username=None, + raise_on_error=False, + query_limit=None, + database_name="examples", + sql_editor_id=None, + select_as_cta=False, + tmp_table_name=None, + schema=None, + ctas_method=CtasMethod.TABLE, + template_params="{}", + ): + if username: + self.logout() + self.login(username=username) + dbid = SupersetTestCase.get_database_by_name(database_name).id + json_payload = { + "database_id": dbid, + "sql": sql, + "client_id": client_id, + "queryLimit": query_limit, + "sql_editor_id": sql_editor_id, + "ctas_method": ctas_method, + "templateParams": template_params, + } + if tmp_table_name: + json_payload["tmp_table_name"] = tmp_table_name + if select_as_cta: + json_payload["select_as_cta"] = select_as_cta + if schema: + json_payload["schema"] = schema + + resp = self.get_json_resp( + "/api/v1/sqllab/execute/", raise_on_error=False, json_=json_payload + ) + if raise_on_error and "error" in resp: + raise Exception("run_sql failed") + return resp + + def create_fake_db(self): + self.login(username="admin") + database_name = FAKE_DB_NAME + db_id = 100 + extra = """{ + "schemas_allowed_for_file_upload": + ["this_schema_is_allowed", "this_schema_is_allowed_too"] + }""" + + return self.get_or_create( + cls=models.Database, + criteria={"database_name": database_name}, + session=db.session, + sqlalchemy_uri="sqlite:///:memory:", + id=db_id, + extra=extra, + ) + + def delete_fake_db(self): + database = ( + db.session.query(Database) + .filter(Database.database_name == FAKE_DB_NAME) + .scalar() + ) + if database: + db.session.delete(database) + + def create_fake_db_for_macros(self): + self.login(username="admin") + database_name = "db_for_macros_testing" + db_id = 200 + database = self.get_or_create( + cls=models.Database, + criteria={"database_name": database_name}, + session=db.session, + sqlalchemy_uri="db_for_macros_testing://user@host:8080/hive", + id=db_id, + ) + + def mock_get_dialect() -> Dialect: + return dialect() + + database.get_dialect = mock_get_dialect + return database + + @staticmethod + def delete_fake_db_for_macros(): + database = ( + db.session.query(Database) + .filter(Database.database_name == "db_for_macros_testing") + .scalar() + ) + if database: + db.session.delete(database) + db.session.commit() + + def validate_sql( + self, + sql, + client_id=None, + username=None, + raise_on_error=False, + database_name="examples", + template_params=None, + ): + if username: + self.logout() + self.login(username=username) + dbid = SupersetTestCase.get_database_by_name(database_name).id + resp = self.get_json_resp( + "/superset/validate_sql_json/", + raise_on_error=False, + data=dict( + database_id=dbid, + sql=sql, + client_id=client_id, + templateParams=template_params, + ), + ) + if raise_on_error and "error" in resp: + raise Exception("validate_sql failed") + return resp + + def get_dash_by_slug(self, dash_slug): + sesh = db.session() + return sesh.query(Dashboard).filter_by(slug=dash_slug).first() + + def get_assert_metric(self, uri: str, func_name: str) -> Response: + """ + Simple client get with an extra assertion for statsd metrics + + :param uri: The URI to use for the HTTP GET + :param func_name: The function name that the HTTP GET triggers + for the statsd metric assertion + :return: HTTP Response + """ + with patch.object( + BaseSupersetModelRestApi, "incr_stats", return_value=None + ) as mock_method: + rv = self.client.get(uri) + if 200 <= rv.status_code < 400: + mock_method.assert_called_once_with("success", func_name) + elif 400 <= rv.status_code < 500: + mock_method.assert_called_once_with("warning", func_name) + else: + mock_method.assert_called_once_with("error", func_name) + return rv + + def delete_assert_metric(self, uri: str, func_name: str) -> Response: + """ + Simple client delete with an extra assertion for statsd metrics + + :param uri: The URI to use for the HTTP DELETE + :param func_name: The function name that the HTTP DELETE triggers + for the statsd metric assertion + :return: HTTP Response + """ + with patch.object( + BaseSupersetModelRestApi, "incr_stats", return_value=None + ) as mock_method: + rv = self.client.delete(uri) + if 200 <= rv.status_code < 400: + mock_method.assert_called_once_with("success", func_name) + elif 400 <= rv.status_code < 500: + mock_method.assert_called_once_with("warning", func_name) + else: + mock_method.assert_called_once_with("error", func_name) + return rv + + def post_assert_metric( + self, uri: str, data: Dict[str, Any], func_name: str + ) -> Response: + return post_assert_metric(self.client, uri, data, func_name) + + def put_assert_metric( + self, uri: str, data: Dict[str, Any], func_name: str + ) -> Response: + """ + Simple client put with an extra assertion for statsd metrics + + :param uri: The URI to use for the HTTP PUT + :param data: The JSON data payload to be posted + :param func_name: The function name that the HTTP PUT triggers + for the statsd metric assertion + :return: HTTP Response + """ + with patch.object( + BaseSupersetModelRestApi, "incr_stats", return_value=None + ) as mock_method: + rv = self.client.put(uri, json=data) + if 200 <= rv.status_code < 400: + mock_method.assert_called_once_with("success", func_name) + elif 400 <= rv.status_code < 500: + mock_method.assert_called_once_with("warning", func_name) + else: + mock_method.assert_called_once_with("error", func_name) + return rv + + @classmethod + def get_dttm(cls): + return datetime.strptime("2019-01-02 03:04:05.678900", "%Y-%m-%d %H:%M:%S.%f") + + +@contextmanager +def db_insert_temp_object(obj: DeclarativeMeta): + """Insert a temporary object in database; delete when done.""" + session = db.session + try: + session.add(obj) + session.commit() + yield obj + finally: + session.delete(obj) + session.commit() diff --git a/tests/integration_tests/cache_tests.py b/tests/integration_tests/cache_tests.py new file mode 100644 index 0000000000000..a7da8a50d2a59 --- /dev/null +++ b/tests/integration_tests/cache_tests.py @@ -0,0 +1,104 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Unit tests for Superset with caching""" +import json + +import pytest + +from superset import app, db +from superset.common.db_query_status import QueryStatus +from superset.extensions import cache_manager +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + +from .base_tests import SupersetTestCase + + +class TestCache(SupersetTestCase): + def setUp(self): + self.login(username="admin") + cache_manager.cache.clear() + cache_manager.data_cache.clear() + + def tearDown(self): + cache_manager.cache.clear() + cache_manager.data_cache.clear() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_no_data_cache(self): + data_cache_config = app.config["DATA_CACHE_CONFIG"] + app.config["DATA_CACHE_CONFIG"] = {"CACHE_TYPE": "NullCache"} + cache_manager.init_app(app) + + slc = self.get_slice("Girls", db.session) + json_endpoint = "/superset/explore_json/{}/{}/".format( + slc.datasource_type, slc.datasource_id + ) + resp = self.get_json_resp( + json_endpoint, {"form_data": json.dumps(slc.viz.form_data)} + ) + resp_from_cache = self.get_json_resp( + json_endpoint, {"form_data": json.dumps(slc.viz.form_data)} + ) + # restore DATA_CACHE_CONFIG + app.config["DATA_CACHE_CONFIG"] = data_cache_config + self.assertFalse(resp["is_cached"]) + self.assertFalse(resp_from_cache["is_cached"]) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_slice_data_cache(self): + # Override cache config + data_cache_config = app.config["DATA_CACHE_CONFIG"] + cache_default_timeout = app.config["CACHE_DEFAULT_TIMEOUT"] + app.config["CACHE_DEFAULT_TIMEOUT"] = 100 + app.config["DATA_CACHE_CONFIG"] = { + "CACHE_TYPE": "SimpleCache", + "CACHE_DEFAULT_TIMEOUT": 10, + } + cache_manager.init_app(app) + + slc = self.get_slice("Boys", db.session) + json_endpoint = "/superset/explore_json/{}/{}/".format( + slc.datasource_type, slc.datasource_id + ) + resp = self.get_json_resp( + json_endpoint, {"form_data": json.dumps(slc.viz.form_data)} + ) + resp_from_cache = self.get_json_resp( + json_endpoint, {"form_data": json.dumps(slc.viz.form_data)} + ) + self.assertFalse(resp["is_cached"]) + self.assertTrue(resp_from_cache["is_cached"]) + # should fallback to default cache timeout + self.assertEqual(resp_from_cache["cache_timeout"], 10) + self.assertEqual(resp_from_cache["status"], QueryStatus.SUCCESS) + self.assertEqual(resp["data"], resp_from_cache["data"]) + self.assertEqual(resp["query"], resp_from_cache["query"]) + # should exists in `data_cache` + self.assertEqual( + cache_manager.data_cache.get(resp_from_cache["cache_key"])["query"], + resp_from_cache["query"], + ) + # should not exists in `cache` + self.assertIsNone(cache_manager.cache.get(resp_from_cache["cache_key"])) + + # reset cache config + app.config["DATA_CACHE_CONFIG"] = data_cache_config + app.config["CACHE_DEFAULT_TIMEOUT"] = cache_default_timeout + cache_manager.init_app(app) diff --git a/tests/integration_tests/cachekeys/__init__.py b/tests/integration_tests/cachekeys/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/cachekeys/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/cachekeys/api_tests.py b/tests/integration_tests/cachekeys/api_tests.py new file mode 100644 index 0000000000000..d3552bfc8df26 --- /dev/null +++ b/tests/integration_tests/cachekeys/api_tests.py @@ -0,0 +1,167 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +from typing import Dict, Any + +import pytest + +from superset.extensions import cache_manager, db +from superset.models.cache import CacheKey +from superset.utils.core import get_example_default_schema +from tests.integration_tests.base_tests import ( + SupersetTestCase, + post_assert_metric, +) + + +@pytest.fixture +def invalidate(test_client, login_as_admin): + def _invalidate(params: Dict[str, Any]): + return post_assert_metric( + test_client, "api/v1/cachekey/invalidate", params, "invalidate" + ) + + return _invalidate + + +def test_invalidate_cache(invalidate): + rv = invalidate({"datasource_uids": ["3__table"]}) + assert rv.status_code == 201 + + +def test_invalidate_existing_cache(invalidate): + db.session.add(CacheKey(cache_key="cache_key", datasource_uid="3__table")) + db.session.commit() + cache_manager.cache.set("cache_key", "value") + + rv = invalidate({"datasource_uids": ["3__table"]}) + + assert rv.status_code == 201 + assert cache_manager.cache.get("cache_key") == None + assert ( + not db.session.query(CacheKey).filter(CacheKey.cache_key == "cache_key").first() + ) + + +def test_invalidate_cache_empty_input(invalidate): + rv = invalidate({"datasource_uids": []}) + assert rv.status_code == 201 + + rv = invalidate({"datasources": []}) + assert rv.status_code == 201 + + rv = invalidate({"datasource_uids": [], "datasources": []}) + assert rv.status_code == 201 + + +def test_invalidate_cache_bad_request(invalidate): + rv = invalidate( + { + "datasource_uids": [], + "datasources": [{"datasource_name": "", "datasource_type": None}], + } + ) + assert rv.status_code == 400 + + rv = invalidate( + { + "datasource_uids": [], + "datasources": [{"datasource_name": "", "datasource_type": "bla"}], + } + ) + assert rv.status_code == 400 + + rv = invalidate( + { + "datasource_uids": "datasource", + "datasources": [{"datasource_name": "", "datasource_type": "bla"}], + } + ) + assert rv.status_code == 400 + + +def test_invalidate_existing_caches(invalidate): + schema = get_example_default_schema() or "" + bn = SupersetTestCase.get_birth_names_dataset() + + db.session.add(CacheKey(cache_key="cache_key1", datasource_uid="3__druid")) + db.session.add(CacheKey(cache_key="cache_key2", datasource_uid="3__druid")) + db.session.add(CacheKey(cache_key="cache_key4", datasource_uid=f"{bn.id}__table")) + db.session.add(CacheKey(cache_key="cache_keyX", datasource_uid="X__table")) + db.session.commit() + + cache_manager.cache.set("cache_key1", "value") + cache_manager.cache.set("cache_key2", "value") + cache_manager.cache.set("cache_key4", "value") + cache_manager.cache.set("cache_keyX", "value") + + rv = invalidate( + { + "datasource_uids": ["3__druid", "4__druid"], + "datasources": [ + { + "datasource_name": "birth_names", + "database_name": "examples", + "schema": schema, + "datasource_type": "table", + }, + { # table exists, no cache to invalidate + "datasource_name": "energy_usage", + "database_name": "examples", + "schema": schema, + "datasource_type": "table", + }, + { # table doesn't exist + "datasource_name": "does_not_exist", + "database_name": "examples", + "schema": schema, + "datasource_type": "table", + }, + { # database doesn't exist + "datasource_name": "birth_names", + "database_name": "does_not_exist", + "schema": schema, + "datasource_type": "table", + }, + { # database doesn't exist + "datasource_name": "birth_names", + "database_name": "examples", + "schema": "does_not_exist", + "datasource_type": "table", + }, + ], + } + ) + + assert rv.status_code == 201 + assert cache_manager.cache.get("cache_key1") is None + assert cache_manager.cache.get("cache_key2") is None + assert cache_manager.cache.get("cache_key4") is None + assert cache_manager.cache.get("cache_keyX") == "value" + assert ( + not db.session.query(CacheKey) + .filter(CacheKey.cache_key.in_({"cache_key1", "cache_key2", "cache_key4"})) + .first() + ) + assert ( + db.session.query(CacheKey) + .filter(CacheKey.cache_key == "cache_keyX") + .first() + .datasource_uid + == "X__table" + ) diff --git a/tests/integration_tests/celery_tests.py b/tests/integration_tests/celery_tests.py new file mode 100644 index 0000000000000..8693a888879d4 --- /dev/null +++ b/tests/integration_tests/celery_tests.py @@ -0,0 +1,502 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset Celery worker""" +import datetime +import random +import string +import time +import unittest.mock as mock +from typing import Optional +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + +import pytest + +import flask +from flask import current_app + +from superset import db, sql_lab +from superset.common.db_query_status import QueryStatus +from superset.result_set import SupersetResultSet +from superset.db_engine_specs.base import BaseEngineSpec +from superset.errors import ErrorLevel, SupersetErrorType +from superset.extensions import celery_app +from superset.models.sql_lab import Query +from superset.sql_parse import ParsedQuery, CtasMethod +from superset.utils.core import backend +from superset.utils.database import get_example_database +from tests.integration_tests.conftest import CTAS_SCHEMA_NAME +from tests.integration_tests.test_app import app + +CELERY_SLEEP_TIME = 6 +QUERY = "SELECT name FROM birth_names LIMIT 1" +TEST_SYNC = "test_sync" +TEST_ASYNC_LOWER_LIMIT = "test_async_lower_limit" +TEST_SYNC_CTA = "test_sync_cta" +TEST_ASYNC_CTA = "test_async_cta" +TEST_ASYNC_CTA_CONFIG = "test_async_cta_config" +TMP_TABLES = [ + TEST_SYNC, + TEST_SYNC_CTA, + TEST_ASYNC_CTA, + TEST_ASYNC_CTA_CONFIG, + TEST_ASYNC_LOWER_LIMIT, +] + + +def get_query_by_id(id: int): + db.session.commit() + query = db.session.query(Query).filter_by(id=id).first() + return query + + +@pytest.fixture(autouse=True, scope="module") +def setup_sqllab(): + yield + # clean up after all tests are done + # use a new app context + with app.app_context(): + db.session.query(Query).delete() + db.session.commit() + for tbl in TMP_TABLES: + drop_table_if_exists(f"{tbl}_{CtasMethod.TABLE.lower()}", CtasMethod.TABLE) + drop_table_if_exists(f"{tbl}_{CtasMethod.VIEW.lower()}", CtasMethod.VIEW) + drop_table_if_exists( + f"{CTAS_SCHEMA_NAME}.{tbl}_{CtasMethod.TABLE.lower()}", CtasMethod.TABLE + ) + drop_table_if_exists( + f"{CTAS_SCHEMA_NAME}.{tbl}_{CtasMethod.VIEW.lower()}", CtasMethod.VIEW + ) + + +def run_sql( + test_client, + sql, + cta=False, + ctas_method=CtasMethod.TABLE, + tmp_table="tmp", + async_=False, +): + db_id = get_example_database().id + return test_client.post( + "/api/v1/sqllab/execute/", + json=dict( + database_id=db_id, + sql=sql, + runAsync=async_, + select_as_cta=cta, + tmp_table_name=tmp_table, + client_id="".join(random.choice(string.ascii_lowercase) for i in range(5)), + ctas_method=ctas_method, + ), + ).json + + +def drop_table_if_exists(table_name: str, table_type: CtasMethod) -> None: + """Drop table if it exists, works on any DB""" + sql = f"DROP {table_type} IF EXISTS {table_name}" + database = get_example_database() + with database.get_sqla_engine_with_context() as engine: + engine.execute(sql) + + +def quote_f(value: Optional[str]): + if not value: + return value + return get_example_database().inspector.engine.dialect.identifier_preparer.quote_identifier( + value + ) + + +def cta_result(ctas_method: CtasMethod): + if backend() != "presto": + return [], [] + if ctas_method == CtasMethod.TABLE: + return [{"rows": 1}], [{"name": "rows", "type": "BIGINT", "is_dttm": False}] + return [{"result": True}], [{"name": "result", "type": "BOOLEAN", "is_dttm": False}] + + +# TODO(bkyryliuk): quote table and schema names for all databases +def get_select_star(table: str, limit: int, schema: Optional[str] = None): + if backend() in {"presto", "hive"}: + schema = quote_f(schema) + table = quote_f(table) + if schema: + return f"SELECT *\nFROM {schema}.{table}\nLIMIT {limit}" + return f"SELECT *\nFROM {table}\nLIMIT {limit}" + + +@pytest.mark.usefixtures("login_as_admin") +@pytest.mark.parametrize("ctas_method", [CtasMethod.TABLE, CtasMethod.VIEW]) +def test_run_sync_query_dont_exist(test_client, ctas_method): + examples_db = get_example_database() + engine_name = examples_db.db_engine_spec.engine_name + sql_dont_exist = "SELECT name FROM table_dont_exist" + result = run_sql(test_client, sql_dont_exist, cta=True, ctas_method=ctas_method) + if backend() == "sqlite" and ctas_method == CtasMethod.VIEW: + assert QueryStatus.SUCCESS == result["status"], result + elif backend() == "presto": + assert ( + result["errors"][0]["error_type"] + == SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR + ) + assert result["errors"][0]["level"] == ErrorLevel.ERROR + assert result["errors"][0]["extra"] == { + "engine_name": "Presto", + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", + }, + { + "code": 1005, + "message": "Issue 1005 - The table was deleted or renamed in the database.", + }, + ], + } + else: + assert ( + result["errors"][0]["error_type"] + == SupersetErrorType.GENERIC_DB_ENGINE_ERROR + ) + assert result["errors"][0]["level"] == ErrorLevel.ERROR + assert result["errors"][0]["extra"] == { + "issue_codes": [ + { + "code": 1002, + "message": "Issue 1002 - The database returned an unexpected error.", + } + ], + "engine_name": engine_name, + } + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices", "login_as_admin") +@pytest.mark.parametrize("ctas_method", [CtasMethod.TABLE, CtasMethod.VIEW]) +def test_run_sync_query_cta(test_client, ctas_method): + tmp_table_name = f"{TEST_SYNC}_{ctas_method.lower()}" + result = run_sql( + test_client, QUERY, tmp_table=tmp_table_name, cta=True, ctas_method=ctas_method + ) + assert QueryStatus.SUCCESS == result["query"]["state"], result + assert cta_result(ctas_method) == (result["data"], result["columns"]) + + # Check the data in the tmp table. + select_query = get_query_by_id(result["query"]["serverId"]) + results = run_sql(test_client, select_query.select_sql) + assert QueryStatus.SUCCESS == results["status"], results + assert len(results["data"]) > 0 + + delete_tmp_view_or_table(tmp_table_name, ctas_method) + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices", "login_as_admin") +def test_run_sync_query_cta_no_data(test_client): + sql_empty_result = "SELECT * FROM birth_names WHERE name='random'" + result = run_sql(test_client, sql_empty_result) + assert QueryStatus.SUCCESS == result["query"]["state"] + assert ([], []) == (result["data"], result["columns"]) + + query = get_query_by_id(result["query"]["serverId"]) + assert QueryStatus.SUCCESS == query.status + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices", "login_as_admin") +@pytest.mark.parametrize("ctas_method", [CtasMethod.TABLE, CtasMethod.VIEW]) +@mock.patch( + "superset.sqllab.sqllab_execution_context.get_cta_schema_name", + lambda d, u, s, sql: CTAS_SCHEMA_NAME, +) +def test_run_sync_query_cta_config(test_client, ctas_method): + if backend() == "sqlite": + # sqlite doesn't support schemas + return + tmp_table_name = f"{TEST_SYNC_CTA}_{ctas_method.lower()}" + result = run_sql( + test_client, QUERY, cta=True, ctas_method=ctas_method, tmp_table=tmp_table_name + ) + assert QueryStatus.SUCCESS == result["query"]["state"], result + assert cta_result(ctas_method) == (result["data"], result["columns"]) + + query = get_query_by_id(result["query"]["serverId"]) + assert ( + f"CREATE {ctas_method} {CTAS_SCHEMA_NAME}.{tmp_table_name} AS \n{QUERY}" + == query.executed_sql + ) + assert query.select_sql == get_select_star( + tmp_table_name, limit=query.limit, schema=CTAS_SCHEMA_NAME + ) + results = run_sql(test_client, query.select_sql) + assert QueryStatus.SUCCESS == results["status"], result + + delete_tmp_view_or_table(f"{CTAS_SCHEMA_NAME}.{tmp_table_name}", ctas_method) + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices", "login_as_admin") +@pytest.mark.parametrize("ctas_method", [CtasMethod.TABLE, CtasMethod.VIEW]) +@mock.patch( + "superset.sqllab.sqllab_execution_context.get_cta_schema_name", + lambda d, u, s, sql: CTAS_SCHEMA_NAME, +) +def test_run_async_query_cta_config(test_client, ctas_method): + if backend() in {"sqlite", "mysql"}: + # sqlite doesn't support schemas, mysql is flaky + return + tmp_table_name = f"{TEST_ASYNC_CTA_CONFIG}_{ctas_method.lower()}" + result = run_sql( + test_client, + QUERY, + cta=True, + ctas_method=ctas_method, + async_=True, + tmp_table=tmp_table_name, + ) + + query = wait_for_success(result) + + assert QueryStatus.SUCCESS == query.status + assert ( + get_select_star(tmp_table_name, limit=query.limit, schema=CTAS_SCHEMA_NAME) + == query.select_sql + ) + assert ( + f"CREATE {ctas_method} {CTAS_SCHEMA_NAME}.{tmp_table_name} AS \n{QUERY}" + == query.executed_sql + ) + + delete_tmp_view_or_table(f"{CTAS_SCHEMA_NAME}.{tmp_table_name}", ctas_method) + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices", "login_as_admin") +@pytest.mark.parametrize("ctas_method", [CtasMethod.TABLE, CtasMethod.VIEW]) +def test_run_async_cta_query(test_client, ctas_method): + if backend() == "mysql": + # failing + return + + table_name = f"{TEST_ASYNC_CTA}_{ctas_method.lower()}" + result = run_sql( + test_client, + QUERY, + cta=True, + ctas_method=ctas_method, + async_=True, + tmp_table=table_name, + ) + + query = wait_for_success(result) + + assert QueryStatus.SUCCESS == query.status + assert get_select_star(table_name, query.limit) in query.select_sql + + assert f"CREATE {ctas_method} {table_name} AS \n{QUERY}" == query.executed_sql + assert QUERY == query.sql + assert query.rows == (1 if backend() == "presto" else 0) + assert query.select_as_cta + assert query.select_as_cta_used + + delete_tmp_view_or_table(table_name, ctas_method) + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices", "login_as_admin") +@pytest.mark.parametrize("ctas_method", [CtasMethod.TABLE, CtasMethod.VIEW]) +def test_run_async_cta_query_with_lower_limit(test_client, ctas_method): + if backend() == "mysql": + # failing + return + + tmp_table = f"{TEST_ASYNC_LOWER_LIMIT}_{ctas_method.lower()}" + result = run_sql( + test_client, + QUERY, + cta=True, + ctas_method=ctas_method, + async_=True, + tmp_table=tmp_table, + ) + query = wait_for_success(result) + assert QueryStatus.SUCCESS == query.status + + sqllite_select_sql = f"SELECT *\nFROM {tmp_table}\nLIMIT {query.limit}\nOFFSET 0" + assert query.select_sql == ( + sqllite_select_sql + if backend() == "sqlite" + else get_select_star(tmp_table, query.limit) + ) + + assert f"CREATE {ctas_method} {tmp_table} AS \n{QUERY}" == query.executed_sql + assert QUERY == query.sql + + assert query.rows == (1 if backend() == "presto" else 0) + assert query.limit == 10000 + assert query.select_as_cta + assert query.select_as_cta_used + + delete_tmp_view_or_table(tmp_table, ctas_method) + + +SERIALIZATION_DATA = [("a", 4, 4.0, datetime.datetime(2019, 8, 18, 16, 39, 16, 660000))] +CURSOR_DESCR = ( + ("a", "string"), + ("b", "int"), + ("c", "float"), + ("d", "datetime"), +) + + +def test_default_data_serialization(): + db_engine_spec = BaseEngineSpec() + results = SupersetResultSet(SERIALIZATION_DATA, CURSOR_DESCR, db_engine_spec) + + with mock.patch.object( + db_engine_spec, "expand_data", wraps=db_engine_spec.expand_data + ) as expand_data: + data = sql_lab._serialize_and_expand_data(results, db_engine_spec, False, True) + expand_data.assert_called_once() + assert isinstance(data[0], list) + + +def test_new_data_serialization(): + db_engine_spec = BaseEngineSpec() + results = SupersetResultSet(SERIALIZATION_DATA, CURSOR_DESCR, db_engine_spec) + + with mock.patch.object( + db_engine_spec, "expand_data", wraps=db_engine_spec.expand_data + ) as expand_data: + data = sql_lab._serialize_and_expand_data(results, db_engine_spec, True) + expand_data.assert_not_called() + assert isinstance(data[0], bytes) + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +def test_default_payload_serialization(): + use_new_deserialization = False + db_engine_spec = BaseEngineSpec() + results = SupersetResultSet(SERIALIZATION_DATA, CURSOR_DESCR, db_engine_spec) + query = { + "database_id": 1, + "sql": "SELECT * FROM birth_names LIMIT 100", + "status": QueryStatus.PENDING, + } + ( + serialized_data, + selected_columns, + all_columns, + expanded_columns, + ) = sql_lab._serialize_and_expand_data( + results, db_engine_spec, use_new_deserialization + ) + payload = { + "query_id": 1, + "status": QueryStatus.SUCCESS, + "state": QueryStatus.SUCCESS, + "data": serialized_data, + "columns": all_columns, + "selected_columns": selected_columns, + "expanded_columns": expanded_columns, + "query": query, + } + + serialized = sql_lab._serialize_payload(payload, use_new_deserialization) + assert isinstance(serialized, str) + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +def test_msgpack_payload_serialization(): + use_new_deserialization = True + db_engine_spec = BaseEngineSpec() + results = SupersetResultSet(SERIALIZATION_DATA, CURSOR_DESCR, db_engine_spec) + query = { + "database_id": 1, + "sql": "SELECT * FROM birth_names LIMIT 100", + "status": QueryStatus.PENDING, + } + ( + serialized_data, + selected_columns, + all_columns, + expanded_columns, + ) = sql_lab._serialize_and_expand_data( + results, db_engine_spec, use_new_deserialization + ) + payload = { + "query_id": 1, + "status": QueryStatus.SUCCESS, + "state": QueryStatus.SUCCESS, + "data": serialized_data, + "columns": all_columns, + "selected_columns": selected_columns, + "expanded_columns": expanded_columns, + "query": query, + } + + serialized = sql_lab._serialize_payload(payload, use_new_deserialization) + assert isinstance(serialized, bytes) + + +def test_create_table_as(): + q = ParsedQuery("SELECT * FROM outer_space;") + + assert "CREATE TABLE tmp AS \nSELECT * FROM outer_space" == q.as_create_table("tmp") + assert ( + "DROP TABLE IF EXISTS tmp;\nCREATE TABLE tmp AS \nSELECT * FROM outer_space" + == q.as_create_table("tmp", overwrite=True) + ) + + # now without a semicolon + q = ParsedQuery("SELECT * FROM outer_space") + assert "CREATE TABLE tmp AS \nSELECT * FROM outer_space" == q.as_create_table("tmp") + + # now a multi-line query + multi_line_query = "SELECT * FROM planets WHERE\n" "Luke_Father = 'Darth Vader'" + q = ParsedQuery(multi_line_query) + assert ( + "CREATE TABLE tmp AS \nSELECT * FROM planets WHERE\nLuke_Father = 'Darth Vader'" + == q.as_create_table("tmp") + ) + + +def test_in_app_context(): + @celery_app.task() + def my_task(): + assert current_app + + # Make sure we can call tasks with an app already setup + my_task() + + # Make sure the app gets pushed onto the stack properly + try: + popped_app = flask._app_ctx_stack.pop() + my_task() + finally: + flask._app_ctx_stack.push(popped_app) + + +def delete_tmp_view_or_table(name: str, db_object_type: str): + db.get_engine().execute(f"DROP {db_object_type} IF EXISTS {name}") + + +def wait_for_success(result): + for _ in range(CELERY_SLEEP_TIME * 2): + time.sleep(0.5) + query = get_query_by_id(result["query"]["serverId"]) + if QueryStatus.SUCCESS == query.status: + break + return query diff --git a/tests/integration_tests/charts/__init__.py b/tests/integration_tests/charts/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/charts/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py new file mode 100644 index 0000000000000..38fa1b7a6c9d3 --- /dev/null +++ b/tests/integration_tests/charts/api_tests.py @@ -0,0 +1,1533 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import json +import logging +from io import BytesIO +from zipfile import is_zipfile, ZipFile + +import prison +import pytest +import yaml +from sqlalchemy import and_ +from sqlalchemy.sql import func + +from superset.connectors.sqla.models import SqlaTable +from superset.extensions import cache_manager, db +from superset.models.core import Database, FavStar, FavStarClassName +from superset.models.dashboard import Dashboard +from superset.reports.models import ReportSchedule, ReportScheduleType +from superset.models.slice import Slice +from superset.utils.core import get_example_default_schema + +from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_with_slice, + load_energy_table_data, +) +from tests.integration_tests.fixtures.importexport import ( + chart_config, + chart_metadata_config, + database_config, + dataset_config, + dataset_metadata_config, +) +from tests.integration_tests.fixtures.unicode_dashboard import ( + load_unicode_dashboard_with_slice, + load_unicode_data, +) +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) +from tests.integration_tests.insert_chart_mixin import InsertChartMixin +from tests.integration_tests.test_app import app +from tests.integration_tests.utils.get_dashboards import get_dashboards_ids + +CHART_DATA_URI = "api/v1/chart/data" +CHARTS_FIXTURE_COUNT = 10 + + +class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin): + resource_name = "chart" + + @pytest.fixture(autouse=True) + def clear_data_cache(self): + with app.app_context(): + cache_manager.data_cache.clear() + yield + + @pytest.fixture() + def create_charts(self): + with self.create_app().app_context(): + charts = [] + admin = self.get_user("admin") + for cx in range(CHARTS_FIXTURE_COUNT - 1): + charts.append(self.insert_chart(f"name{cx}", [admin.id], 1)) + fav_charts = [] + for cx in range(round(CHARTS_FIXTURE_COUNT / 2)): + fav_star = FavStar( + user_id=admin.id, class_name="slice", obj_id=charts[cx].id + ) + db.session.add(fav_star) + db.session.commit() + fav_charts.append(fav_star) + yield charts + + # rollback changes + for chart in charts: + db.session.delete(chart) + for fav_chart in fav_charts: + db.session.delete(fav_chart) + db.session.commit() + + @pytest.fixture() + def create_charts_created_by_gamma(self): + with self.create_app().app_context(): + charts = [] + user = self.get_user("gamma") + for cx in range(CHARTS_FIXTURE_COUNT - 1): + charts.append(self.insert_chart(f"gamma{cx}", [user.id], 1)) + yield charts + # rollback changes + for chart in charts: + db.session.delete(chart) + db.session.commit() + + @pytest.fixture() + def create_certified_charts(self): + with self.create_app().app_context(): + certified_charts = [] + admin = self.get_user("admin") + for cx in range(CHARTS_FIXTURE_COUNT): + certified_charts.append( + self.insert_chart( + f"certified{cx}", + [admin.id], + 1, + certified_by="John Doe", + certification_details="Sample certification", + ) + ) + + yield certified_charts + + # rollback changes + for chart in certified_charts: + db.session.delete(chart) + db.session.commit() + + @pytest.fixture() + def create_chart_with_report(self): + with self.create_app().app_context(): + admin = self.get_user("admin") + chart = self.insert_chart(f"chart_report", [admin.id], 1) + report_schedule = ReportSchedule( + type=ReportScheduleType.REPORT, + name="report_with_chart", + crontab="* * * * *", + chart=chart, + ) + db.session.commit() + + yield chart + + # rollback changes + db.session.delete(report_schedule) + db.session.delete(chart) + db.session.commit() + + @pytest.fixture() + def add_dashboard_to_chart(self): + with self.create_app().app_context(): + admin = self.get_user("admin") + + self.chart = self.insert_chart("My chart", [admin.id], 1) + + self.original_dashboard = Dashboard() + self.original_dashboard.dashboard_title = "Original Dashboard" + self.original_dashboard.slug = "slug" + self.original_dashboard.owners = [admin] + self.original_dashboard.slices = [self.chart] + self.original_dashboard.published = False + db.session.add(self.original_dashboard) + + self.new_dashboard = Dashboard() + self.new_dashboard.dashboard_title = "New Dashboard" + self.new_dashboard.slug = "new_slug" + self.new_dashboard.owners = [admin] + self.new_dashboard.slices = [] + self.new_dashboard.published = False + db.session.add(self.new_dashboard) + + db.session.commit() + + yield self.chart + + db.session.delete(self.original_dashboard) + db.session.delete(self.new_dashboard) + db.session.delete(self.chart) + db.session.commit() + + def test_info_security_chart(self): + """ + Chart API: Test info security + """ + self.login(username="admin") + params = {"keys": ["permissions"]} + uri = f"api/v1/chart/_info?q={prison.dumps(params)}" + rv = self.get_assert_metric(uri, "info") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert set(data["permissions"]) == {"can_read", "can_write", "can_export"} + + def create_chart_import(self): + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("chart_export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(chart_metadata_config).encode()) + with bundle.open( + "chart_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(database_config).encode()) + with bundle.open("chart_export/datasets/imported_dataset.yaml", "w") as fp: + fp.write(yaml.safe_dump(dataset_config).encode()) + with bundle.open("chart_export/charts/imported_chart.yaml", "w") as fp: + fp.write(yaml.safe_dump(chart_config).encode()) + buf.seek(0) + return buf + + def test_delete_chart(self): + """ + Chart API: Test delete + """ + admin_id = self.get_user("admin").id + chart_id = self.insert_chart("name", [admin_id], 1).id + self.login(username="admin") + uri = f"api/v1/chart/{chart_id}" + rv = self.delete_assert_metric(uri, "delete") + self.assertEqual(rv.status_code, 200) + model = db.session.query(Slice).get(chart_id) + self.assertEqual(model, None) + + def test_delete_bulk_charts(self): + """ + Chart API: Test delete bulk + """ + admin = self.get_user("admin") + chart_count = 4 + chart_ids = list() + for chart_name_index in range(chart_count): + chart_ids.append( + self.insert_chart(f"title{chart_name_index}", [admin.id], 1, admin).id + ) + self.login(username="admin") + argument = chart_ids + uri = f"api/v1/chart/?q={prison.dumps(argument)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": f"Deleted {chart_count} charts"} + self.assertEqual(response, expected_response) + for chart_id in chart_ids: + model = db.session.query(Slice).get(chart_id) + self.assertEqual(model, None) + + def test_delete_bulk_chart_bad_request(self): + """ + Chart API: Test delete bulk bad request + """ + chart_ids = [1, "a"] + self.login(username="admin") + argument = chart_ids + uri = f"api/v1/chart/?q={prison.dumps(argument)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + self.assertEqual(rv.status_code, 400) + + def test_delete_not_found_chart(self): + """ + Chart API: Test not found delete + """ + self.login(username="admin") + chart_id = 1000 + uri = f"api/v1/chart/{chart_id}" + rv = self.delete_assert_metric(uri, "delete") + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("create_chart_with_report") + def test_delete_chart_with_report(self): + """ + Chart API: Test delete with associated report + """ + self.login(username="admin") + chart = ( + db.session.query(Slice) + .filter(Slice.slice_name == "chart_report") + .one_or_none() + ) + uri = f"api/v1/chart/{chart.id}" + rv = self.client.delete(uri) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 422) + expected_response = { + "message": "There are associated alerts or reports: report_with_chart" + } + self.assertEqual(response, expected_response) + + def test_delete_bulk_charts_not_found(self): + """ + Chart API: Test delete bulk not found + """ + max_id = db.session.query(func.max(Slice.id)).scalar() + chart_ids = [max_id + 1, max_id + 2] + self.login(username="admin") + uri = f"api/v1/chart/?q={prison.dumps(chart_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("create_chart_with_report", "create_charts") + def test_bulk_delete_chart_with_report(self): + """ + Chart API: Test bulk delete with associated report + """ + self.login(username="admin") + chart_with_report = ( + db.session.query(Slice.id) + .filter(Slice.slice_name == "chart_report") + .one_or_none() + ) + + charts = db.session.query(Slice.id).filter(Slice.slice_name.like("name%")).all() + chart_ids = [chart.id for chart in charts] + chart_ids.append(chart_with_report.id) + + uri = f"api/v1/chart/?q={prison.dumps(chart_ids)}" + rv = self.client.delete(uri) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 422) + expected_response = { + "message": "There are associated alerts or reports: report_with_chart" + } + self.assertEqual(response, expected_response) + + def test_delete_chart_admin_not_owned(self): + """ + Chart API: Test admin delete not owned + """ + gamma_id = self.get_user("gamma").id + chart_id = self.insert_chart("title", [gamma_id], 1).id + + self.login(username="admin") + uri = f"api/v1/chart/{chart_id}" + rv = self.delete_assert_metric(uri, "delete") + self.assertEqual(rv.status_code, 200) + model = db.session.query(Slice).get(chart_id) + self.assertEqual(model, None) + + def test_delete_bulk_chart_admin_not_owned(self): + """ + Chart API: Test admin delete bulk not owned + """ + gamma_id = self.get_user("gamma").id + chart_count = 4 + chart_ids = list() + for chart_name_index in range(chart_count): + chart_ids.append( + self.insert_chart(f"title{chart_name_index}", [gamma_id], 1).id + ) + + self.login(username="admin") + argument = chart_ids + uri = f"api/v1/chart/?q={prison.dumps(argument)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 200) + expected_response = {"message": f"Deleted {chart_count} charts"} + self.assertEqual(response, expected_response) + + for chart_id in chart_ids: + model = db.session.query(Slice).get(chart_id) + self.assertEqual(model, None) + + def test_delete_chart_not_owned(self): + """ + Chart API: Test delete try not owned + """ + user_alpha1 = self.create_user( + "alpha1", "password", "Alpha", email="alpha1@superset.org" + ) + user_alpha2 = self.create_user( + "alpha2", "password", "Alpha", email="alpha2@superset.org" + ) + chart = self.insert_chart("title", [user_alpha1.id], 1) + self.login(username="alpha2", password="password") + uri = f"api/v1/chart/{chart.id}" + rv = self.delete_assert_metric(uri, "delete") + self.assertEqual(rv.status_code, 403) + db.session.delete(chart) + db.session.delete(user_alpha1) + db.session.delete(user_alpha2) + db.session.commit() + + def test_delete_bulk_chart_not_owned(self): + """ + Chart API: Test delete bulk try not owned + """ + user_alpha1 = self.create_user( + "alpha1", "password", "Alpha", email="alpha1@superset.org" + ) + user_alpha2 = self.create_user( + "alpha2", "password", "Alpha", email="alpha2@superset.org" + ) + + chart_count = 4 + charts = list() + for chart_name_index in range(chart_count): + charts.append( + self.insert_chart(f"title{chart_name_index}", [user_alpha1.id], 1) + ) + + owned_chart = self.insert_chart("title_owned", [user_alpha2.id], 1) + + self.login(username="alpha2", password="password") + + # verify we can't delete not owned charts + arguments = [chart.id for chart in charts] + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + self.assertEqual(rv.status_code, 403) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": "Forbidden"} + self.assertEqual(response, expected_response) + + # # nothing is deleted in bulk with a list of owned and not owned charts + arguments = [chart.id for chart in charts] + [owned_chart.id] + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + self.assertEqual(rv.status_code, 403) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": "Forbidden"} + self.assertEqual(response, expected_response) + + for chart in charts: + db.session.delete(chart) + db.session.delete(owned_chart) + db.session.delete(user_alpha1) + db.session.delete(user_alpha2) + db.session.commit() + + @pytest.mark.usefixtures( + "load_world_bank_dashboard_with_slices", + "load_birth_names_dashboard_with_slices", + ) + def test_create_chart(self): + """ + Chart API: Test create chart + """ + dashboards_ids = get_dashboards_ids(db, ["world_health", "births"]) + admin_id = self.get_user("admin").id + chart_data = { + "slice_name": "name1", + "description": "description1", + "owners": [admin_id], + "viz_type": "viz_type1", + "params": "1234", + "cache_timeout": 1000, + "datasource_id": 1, + "datasource_type": "table", + "dashboards": dashboards_ids, + "certified_by": "John Doe", + "certification_details": "Sample certification", + } + self.login(username="admin") + uri = f"api/v1/chart/" + rv = self.post_assert_metric(uri, chart_data, "post") + self.assertEqual(rv.status_code, 201) + data = json.loads(rv.data.decode("utf-8")) + model = db.session.query(Slice).get(data.get("id")) + db.session.delete(model) + db.session.commit() + + def test_create_simple_chart(self): + """ + Chart API: Test create simple chart + """ + chart_data = { + "slice_name": "title1", + "datasource_id": 1, + "datasource_type": "table", + } + self.login(username="admin") + uri = f"api/v1/chart/" + rv = self.post_assert_metric(uri, chart_data, "post") + self.assertEqual(rv.status_code, 201) + data = json.loads(rv.data.decode("utf-8")) + model = db.session.query(Slice).get(data.get("id")) + db.session.delete(model) + db.session.commit() + + def test_create_chart_validate_owners(self): + """ + Chart API: Test create validate owners + """ + chart_data = { + "slice_name": "title1", + "datasource_id": 1, + "datasource_type": "table", + "owners": [1000], + } + self.login(username="admin") + uri = f"api/v1/chart/" + rv = self.post_assert_metric(uri, chart_data, "post") + self.assertEqual(rv.status_code, 422) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": {"owners": ["Owners are invalid"]}} + self.assertEqual(response, expected_response) + + def test_create_chart_validate_params(self): + """ + Chart API: Test create validate params json + """ + chart_data = { + "slice_name": "title1", + "datasource_id": 1, + "datasource_type": "table", + "params": '{"A:"a"}', + } + self.login(username="admin") + uri = f"api/v1/chart/" + rv = self.post_assert_metric(uri, chart_data, "post") + self.assertEqual(rv.status_code, 400) + + def test_create_chart_validate_datasource(self): + """ + Chart API: Test create validate datasource + """ + self.login(username="admin") + chart_data = { + "slice_name": "title1", + "datasource_id": 1, + "datasource_type": "unknown", + } + rv = self.post_assert_metric("/api/v1/chart/", chart_data, "post") + self.assertEqual(rv.status_code, 400) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response, + { + "message": { + "datasource_type": [ + "Must be one of: sl_table, table, dataset, query, saved_query, view." + ] + } + }, + ) + chart_data = { + "slice_name": "title1", + "datasource_id": 0, + "datasource_type": "table", + } + rv = self.post_assert_metric("/api/v1/chart/", chart_data, "post") + self.assertEqual(rv.status_code, 422) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response, {"message": {"datasource_id": ["Datasource does not exist"]}} + ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_update_chart(self): + """ + Chart API: Test update + """ + schema = get_example_default_schema() + full_table_name = f"{schema}.birth_names" if schema else "birth_names" + + admin = self.get_user("admin") + gamma = self.get_user("gamma") + birth_names_table_id = SupersetTestCase.get_table(name="birth_names").id + chart_id = self.insert_chart( + "title", [admin.id], birth_names_table_id, admin + ).id + dash_id = db.session.query(Dashboard.id).filter_by(slug="births").first()[0] + chart_data = { + "slice_name": "title1_changed", + "description": "description1", + "owners": [gamma.id], + "viz_type": "viz_type1", + "params": """{"a": 1}""", + "cache_timeout": 1000, + "datasource_id": birth_names_table_id, + "datasource_type": "table", + "dashboards": [dash_id], + "certified_by": "Mario Rossi", + "certification_details": "Edited certification", + } + self.login(username="admin") + uri = f"api/v1/chart/{chart_id}" + rv = self.put_assert_metric(uri, chart_data, "put") + self.assertEqual(rv.status_code, 200) + model = db.session.query(Slice).get(chart_id) + related_dashboard = db.session.query(Dashboard).filter_by(slug="births").first() + self.assertEqual(model.created_by, admin) + self.assertEqual(model.slice_name, "title1_changed") + self.assertEqual(model.description, "description1") + self.assertNotIn(admin, model.owners) + self.assertIn(gamma, model.owners) + self.assertEqual(model.viz_type, "viz_type1") + self.assertEqual(model.params, """{"a": 1}""") + self.assertEqual(model.cache_timeout, 1000) + self.assertEqual(model.datasource_id, birth_names_table_id) + self.assertEqual(model.datasource_type, "table") + self.assertEqual(model.datasource_name, full_table_name) + self.assertEqual(model.certified_by, "Mario Rossi") + self.assertEqual(model.certification_details, "Edited certification") + self.assertIn(model.id, [slice.id for slice in related_dashboard.slices]) + db.session.delete(model) + db.session.commit() + + def test_update_chart_new_owner_not_admin(self): + """ + Chart API: Test update set new owner implicitly adds logged in owner + """ + gamma = self.get_user("gamma_no_csv") + alpha = self.get_user("alpha") + chart_id = self.insert_chart("title", [gamma.id], 1).id + chart_data = { + "slice_name": (new_name := "title1_changed"), + "owners": [alpha.id], + } + self.login(username=gamma.username) + uri = f"api/v1/chart/{chart_id}" + rv = self.put_assert_metric(uri, chart_data, "put") + assert rv.status_code == 200 + model = db.session.query(Slice).get(chart_id) + assert model.slice_name == new_name + assert alpha in model.owners + assert gamma in model.owners + db.session.delete(model) + db.session.commit() + + def test_update_chart_new_owner_admin(self): + """ + Chart API: Test update set new owner as admin to other than current user + """ + gamma = self.get_user("gamma") + admin = self.get_user("admin") + chart_id = self.insert_chart("title", [admin.id], 1).id + chart_data = {"slice_name": "title1_changed", "owners": [gamma.id]} + self.login(username="admin") + uri = f"api/v1/chart/{chart_id}" + rv = self.put_assert_metric(uri, chart_data, "put") + self.assertEqual(rv.status_code, 200) + model = db.session.query(Slice).get(chart_id) + self.assertNotIn(admin, model.owners) + self.assertIn(gamma, model.owners) + db.session.delete(model) + db.session.commit() + + @pytest.mark.usefixtures("add_dashboard_to_chart") + def test_update_chart_new_dashboards(self): + """ + Chart API: Test update set new owner to current user + """ + chart_data = { + "slice_name": "title1_changed", + "dashboards": [self.new_dashboard.id], + } + self.login(username="admin") + uri = f"api/v1/chart/{self.chart.id}" + rv = self.put_assert_metric(uri, chart_data, "put") + self.assertEqual(rv.status_code, 200) + self.assertIn(self.new_dashboard, self.chart.dashboards) + self.assertNotIn(self.original_dashboard, self.chart.dashboards) + + @pytest.mark.usefixtures("add_dashboard_to_chart") + def test_not_update_chart_none_dashboards(self): + """ + Chart API: Test update set new owner to current user + """ + chart_data = {"slice_name": "title1_changed_again"} + self.login(username="admin") + uri = f"api/v1/chart/{self.chart.id}" + rv = self.put_assert_metric(uri, chart_data, "put") + self.assertEqual(rv.status_code, 200) + self.assertIn(self.original_dashboard, self.chart.dashboards) + self.assertEqual(len(self.chart.dashboards), 1) + + def test_update_chart_not_owned(self): + """ + Chart API: Test update not owned + """ + user_alpha1 = self.create_user( + "alpha1", "password", "Alpha", email="alpha1@superset.org" + ) + user_alpha2 = self.create_user( + "alpha2", "password", "Alpha", email="alpha2@superset.org" + ) + chart = self.insert_chart("title", [user_alpha1.id], 1) + + self.login(username="alpha2", password="password") + chart_data = {"slice_name": "title1_changed"} + uri = f"api/v1/chart/{chart.id}" + rv = self.put_assert_metric(uri, chart_data, "put") + self.assertEqual(rv.status_code, 403) + db.session.delete(chart) + db.session.delete(user_alpha1) + db.session.delete(user_alpha2) + db.session.commit() + + def test_update_chart_linked_with_not_owned_dashboard(self): + """ + Chart API: Test update chart which is linked to not owned dashboard + """ + user_alpha1 = self.create_user( + "alpha1", "password", "Alpha", email="alpha1@superset.org" + ) + user_alpha2 = self.create_user( + "alpha2", "password", "Alpha", email="alpha2@superset.org" + ) + chart = self.insert_chart("title", [user_alpha1.id], 1) + + original_dashboard = Dashboard() + original_dashboard.dashboard_title = "Original Dashboard" + original_dashboard.slug = "slug" + original_dashboard.owners = [user_alpha1] + original_dashboard.slices = [chart] + original_dashboard.published = False + db.session.add(original_dashboard) + + new_dashboard = Dashboard() + new_dashboard.dashboard_title = "Cloned Dashboard" + new_dashboard.slug = "new_slug" + new_dashboard.owners = [user_alpha2] + new_dashboard.slices = [chart] + new_dashboard.published = False + db.session.add(new_dashboard) + + self.login(username="alpha1", password="password") + chart_data_with_invalid_dashboard = { + "slice_name": "title1_changed", + "dashboards": [original_dashboard.id, 0], + } + chart_data = { + "slice_name": "title1_changed", + "dashboards": [original_dashboard.id, new_dashboard.id], + } + uri = f"api/v1/chart/{chart.id}" + + rv = self.put_assert_metric(uri, chart_data_with_invalid_dashboard, "put") + self.assertEqual(rv.status_code, 422) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": {"dashboards": ["Dashboards do not exist"]}} + self.assertEqual(response, expected_response) + + rv = self.put_assert_metric(uri, chart_data, "put") + self.assertEqual(rv.status_code, 200) + + db.session.delete(chart) + db.session.delete(original_dashboard) + db.session.delete(new_dashboard) + db.session.delete(user_alpha1) + db.session.delete(user_alpha2) + db.session.commit() + + def test_update_chart_validate_datasource(self): + """ + Chart API: Test update validate datasource + """ + admin = self.get_user("admin") + chart = self.insert_chart("title", owners=[admin.id], datasource_id=1) + self.login(username="admin") + + chart_data = {"datasource_id": 1, "datasource_type": "unknown"} + rv = self.put_assert_metric(f"/api/v1/chart/{chart.id}", chart_data, "put") + self.assertEqual(rv.status_code, 400) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response, + { + "message": { + "datasource_type": [ + "Must be one of: sl_table, table, dataset, query, saved_query, view." + ] + } + }, + ) + + chart_data = {"datasource_id": 0, "datasource_type": "table"} + rv = self.put_assert_metric(f"/api/v1/chart/{chart.id}", chart_data, "put") + self.assertEqual(rv.status_code, 422) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response, {"message": {"datasource_id": ["Datasource does not exist"]}} + ) + + db.session.delete(chart) + db.session.commit() + + def test_update_chart_validate_owners(self): + """ + Chart API: Test update validate owners + """ + chart_data = { + "slice_name": "title1", + "datasource_id": 1, + "datasource_type": "table", + "owners": [1000], + } + self.login(username="admin") + uri = f"api/v1/chart/" + rv = self.client.post(uri, json=chart_data) + self.assertEqual(rv.status_code, 422) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": {"owners": ["Owners are invalid"]}} + self.assertEqual(response, expected_response) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_get_chart(self): + """ + Chart API: Test get chart + """ + admin = self.get_user("admin") + chart = self.insert_chart("title", [admin.id], 1) + self.login(username="admin") + uri = f"api/v1/chart/{chart.id}" + rv = self.get_assert_metric(uri, "get") + self.assertEqual(rv.status_code, 200) + expected_result = { + "cache_timeout": None, + "certified_by": None, + "certification_details": None, + "dashboards": [], + "description": None, + "owners": [ + { + "id": 1, + "username": "admin", + "first_name": "admin", + "last_name": "user", + } + ], + "params": None, + "slice_name": "title", + "viz_type": None, + "query_context": None, + "is_managed_externally": False, + } + data = json.loads(rv.data.decode("utf-8")) + self.assertIn("changed_on_delta_humanized", data["result"]) + self.assertIn("id", data["result"]) + self.assertIn("thumbnail_url", data["result"]) + self.assertIn("url", data["result"]) + for key, value in data["result"].items(): + # We can't assert timestamp values or id/urls + if key not in ( + "changed_on_delta_humanized", + "id", + "thumbnail_url", + "url", + ): + self.assertEqual(value, expected_result[key]) + db.session.delete(chart) + db.session.commit() + + def test_get_chart_not_found(self): + """ + Chart API: Test get chart not found + """ + chart_id = 1000 + self.login(username="admin") + uri = f"api/v1/chart/{chart_id}" + rv = self.get_assert_metric(uri, "get") + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_chart_no_data_access(self): + """ + Chart API: Test get chart without data access + """ + self.login(username="gamma") + chart_no_access = ( + db.session.query(Slice) + .filter_by(slice_name="Girl Name Cloud") + .one_or_none() + ) + uri = f"api/v1/chart/{chart_no_access.id}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures( + "load_energy_table_with_slice", + "load_birth_names_dashboard_with_slices", + "load_unicode_dashboard_with_slice", + "load_world_bank_dashboard_with_slices", + ) + def test_get_charts(self): + """ + Chart API: Test get charts + """ + self.login(username="admin") + uri = f"api/v1/chart/" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 34) + + @pytest.mark.usefixtures("load_energy_table_with_slice", "add_dashboard_to_chart") + def test_get_charts_dashboards(self): + """ + Chart API: Test get charts with related dashboards + """ + self.login(username="admin") + arguments = { + "filters": [ + {"col": "slice_name", "opr": "eq", "value": self.chart.slice_name} + ] + } + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + assert data["result"][0]["dashboards"] == [ + { + "id": self.original_dashboard.id, + "dashboard_title": self.original_dashboard.dashboard_title, + } + ] + + @pytest.mark.usefixtures("load_energy_table_with_slice", "add_dashboard_to_chart") + def test_get_charts_dashboard_filter(self): + """ + Chart API: Test get charts with dashboard filter + """ + self.login(username="admin") + arguments = { + "filters": [ + { + "col": "dashboards", + "opr": "rel_m_m", + "value": self.original_dashboard.id, + } + ] + } + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + result = data["result"] + assert len(result) == 1 + assert result[0]["slice_name"] == self.chart.slice_name + + def test_get_charts_changed_on(self): + """ + Dashboard API: Test get charts changed on + """ + admin = self.get_user("admin") + chart = self.insert_chart("foo_a", [admin.id], 1, description="ZY_bar") + + self.login(username="admin") + + arguments = { + "order_column": "changed_on_delta_humanized", + "order_direction": "desc", + } + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + assert data["result"][0]["changed_on_delta_humanized"] in ( + "now", + "a second ago", + ) + + # rollback changes + db.session.delete(chart) + db.session.commit() + + @pytest.mark.usefixtures( + "load_world_bank_dashboard_with_slices", + "load_birth_names_dashboard_with_slices", + ) + def test_get_charts_filter(self): + """ + Chart API: Test get charts filter + """ + self.login(username="admin") + arguments = {"filters": [{"col": "slice_name", "opr": "sw", "value": "G"}]} + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 5) + + @pytest.fixture() + def load_energy_charts(self): + with app.app_context(): + admin = self.get_user("admin") + energy_table = ( + db.session.query(SqlaTable) + .filter_by(table_name="energy_usage") + .one_or_none() + ) + energy_table_id = 1 + if energy_table: + energy_table_id = energy_table.id + chart1 = self.insert_chart( + "foo_a", [admin.id], energy_table_id, description="ZY_bar" + ) + chart2 = self.insert_chart( + "zy_foo", [admin.id], energy_table_id, description="desc1" + ) + chart3 = self.insert_chart( + "foo_b", [admin.id], energy_table_id, description="desc1zy_" + ) + chart4 = self.insert_chart( + "foo_c", [admin.id], energy_table_id, viz_type="viz_zy_" + ) + chart5 = self.insert_chart( + "bar", [admin.id], energy_table_id, description="foo" + ) + + yield + # rollback changes + db.session.delete(chart1) + db.session.delete(chart2) + db.session.delete(chart3) + db.session.delete(chart4) + db.session.delete(chart5) + db.session.commit() + + @pytest.mark.usefixtures("load_energy_charts") + def test_get_charts_custom_filter(self): + """ + Chart API: Test get charts custom filter + """ + + arguments = { + "filters": [{"col": "slice_name", "opr": "chart_all_text", "value": "zy_"}], + "order_column": "slice_name", + "order_direction": "asc", + "keys": ["none"], + "columns": ["slice_name", "description", "viz_type"], + } + self.login(username="admin") + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 4) + + expected_response = [ + {"description": "ZY_bar", "slice_name": "foo_a", "viz_type": None}, + {"description": "desc1zy_", "slice_name": "foo_b", "viz_type": None}, + {"description": None, "slice_name": "foo_c", "viz_type": "viz_zy_"}, + {"description": "desc1", "slice_name": "zy_foo", "viz_type": None}, + ] + for index, item in enumerate(data["result"]): + self.assertEqual( + item["description"], expected_response[index]["description"] + ) + self.assertEqual(item["slice_name"], expected_response[index]["slice_name"]) + self.assertEqual(item["viz_type"], expected_response[index]["viz_type"]) + + @pytest.mark.usefixtures("load_energy_table_with_slice", "load_energy_charts") + def test_admin_gets_filtered_energy_slices(self): + # test filtering on datasource_name + arguments = { + "filters": [ + { + "col": "slice_name", + "opr": "chart_all_text", + "value": "energy", + } + ], + "keys": ["none"], + "columns": ["slice_name", "description", "table.table_name"], + } + self.login(username="admin") + + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + data = rv.json + assert rv.status_code == 200 + assert data["count"] > 0 + for chart in data["result"]: + print(chart) + assert ( + "energy" + in " ".join( + [ + chart["slice_name"] or "", + chart["description"] or "", + chart["table"]["table_name"] or "", + ] + ).lower() + ) + + @pytest.mark.usefixtures("create_certified_charts") + def test_gets_certified_charts_filter(self): + arguments = { + "filters": [ + { + "col": "id", + "opr": "chart_is_certified", + "value": True, + } + ], + "keys": ["none"], + "columns": ["slice_name"], + } + self.login(username="admin") + + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], CHARTS_FIXTURE_COUNT) + + @pytest.mark.usefixtures("create_charts") + def test_gets_not_certified_charts_filter(self): + arguments = { + "filters": [ + { + "col": "id", + "opr": "chart_is_certified", + "value": False, + } + ], + "keys": ["none"], + "columns": ["slice_name"], + } + self.login(username="admin") + + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 17) + + @pytest.mark.usefixtures("load_energy_charts") + def test_user_gets_none_filtered_energy_slices(self): + # test filtering on datasource_name + arguments = { + "filters": [ + { + "col": "slice_name", + "opr": "chart_all_text", + "value": "energy", + } + ], + "keys": ["none"], + "columns": ["slice_name"], + } + + self.login(username="gamma") + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 0) + + @pytest.mark.usefixtures("create_charts") + def test_get_charts_favorite_filter(self): + """ + Chart API: Test get charts favorite filter + """ + admin = self.get_user("admin") + users_favorite_query = db.session.query(FavStar.obj_id).filter( + and_(FavStar.user_id == admin.id, FavStar.class_name == "slice") + ) + expected_models = ( + db.session.query(Slice) + .filter(and_(Slice.id.in_(users_favorite_query))) + .order_by(Slice.slice_name.asc()) + .all() + ) + + arguments = { + "filters": [{"col": "id", "opr": "chart_is_favorite", "value": True}], + "order_column": "slice_name", + "order_direction": "asc", + "keys": ["none"], + "columns": ["slice_name"], + } + self.login(username="admin") + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert len(expected_models) == data["count"] + + for i, expected_model in enumerate(expected_models): + assert expected_model.slice_name == data["result"][i]["slice_name"] + + # Test not favorite charts + expected_models = ( + db.session.query(Slice) + .filter(and_(~Slice.id.in_(users_favorite_query))) + .order_by(Slice.slice_name.asc()) + .all() + ) + arguments["filters"][0]["value"] = False + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert len(expected_models) == data["count"] + + @pytest.mark.usefixtures("create_charts_created_by_gamma") + def test_get_charts_created_by_me_filter(self): + """ + Chart API: Test get charts with created by me special filter + """ + gamma_user = self.get_user("gamma") + expected_models = ( + db.session.query(Slice).filter(Slice.created_by_fk == gamma_user.id).all() + ) + arguments = { + "filters": [ + {"col": "created_by", "opr": "chart_created_by_me", "value": "me"} + ], + "order_column": "slice_name", + "order_direction": "asc", + "keys": ["none"], + "columns": ["slice_name"], + } + self.login(username="gamma") + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert len(expected_models) == data["count"] + for i, expected_model in enumerate(expected_models): + assert expected_model.slice_name == data["result"][i]["slice_name"] + + @pytest.mark.usefixtures("create_charts") + def test_get_current_user_favorite_status(self): + """ + Dataset API: Test get current user favorite stars + """ + admin = self.get_user("admin") + users_favorite_ids = [ + star.obj_id + for star in db.session.query(FavStar.obj_id) + .filter( + and_( + FavStar.user_id == admin.id, + FavStar.class_name == FavStarClassName.CHART, + ) + ) + .all() + ] + + assert users_favorite_ids + arguments = [s.id for s in db.session.query(Slice.id).all()] + self.login(username="admin") + uri = f"api/v1/chart/favorite_status/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + for res in data["result"]: + if res["id"] in users_favorite_ids: + assert res["value"] + + def test_get_time_range(self): + """ + Chart API: Test get actually time range from human readable string + """ + self.login(username="admin") + humanize_time_range = "100 years ago : now" + uri = f"api/v1/time_range/?q={prison.dumps(humanize_time_range)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 200) + self.assertEqual(len(data["result"]), 3) + + @pytest.mark.usefixtures( + "load_unicode_dashboard_with_slice", + "load_energy_table_with_slice", + "load_world_bank_dashboard_with_slices", + "load_birth_names_dashboard_with_slices", + ) + def test_get_charts_page(self): + """ + Chart API: Test get charts filter + """ + # Assuming we have 34 sample charts + self.login(username="admin") + arguments = {"page_size": 10, "page": 0} + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(len(data["result"]), 10) + + arguments = {"page_size": 10, "page": 3} + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(len(data["result"]), 4) + + def test_get_charts_no_data_access(self): + """ + Chart API: Test get charts no data access + """ + self.login(username="gamma") + uri = "api/v1/chart/" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 0) + + def test_export_chart(self): + """ + Chart API: Test export chart + """ + example_chart = db.session.query(Slice).all()[0] + argument = [example_chart.id] + uri = f"api/v1/chart/export/?q={prison.dumps(argument)}" + + self.login(username="admin") + rv = self.get_assert_metric(uri, "export") + + assert rv.status_code == 200 + + buf = BytesIO(rv.data) + assert is_zipfile(buf) + + def test_export_chart_not_found(self): + """ + Chart API: Test export chart not found + """ + # Just one does not exist and we get 404 + argument = [-1, 1] + uri = f"api/v1/chart/export/?q={prison.dumps(argument)}" + self.login(username="admin") + rv = self.get_assert_metric(uri, "export") + + assert rv.status_code == 404 + + def test_export_chart_gamma(self): + """ + Chart API: Test export chart has gamma + """ + example_chart = db.session.query(Slice).all()[0] + argument = [example_chart.id] + uri = f"api/v1/chart/export/?q={prison.dumps(argument)}" + + self.login(username="gamma") + rv = self.client.get(uri) + + assert rv.status_code == 404 + + def test_import_chart(self): + """ + Chart API: Test import chart + """ + self.login(username="admin") + uri = "api/v1/chart/import/" + + buf = self.create_chart_import() + form_data = { + "formData": (buf, "chart_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert database.database_name == "imported_database" + + assert len(database.tables) == 1 + dataset = database.tables[0] + assert dataset.table_name == "imported_dataset" + assert str(dataset.uuid) == dataset_config["uuid"] + + chart = db.session.query(Slice).filter_by(uuid=chart_config["uuid"]).one() + assert chart.table == dataset + + chart.owners = [] + dataset.owners = [] + db.session.delete(chart) + db.session.commit() + db.session.delete(dataset) + db.session.commit() + db.session.delete(database) + db.session.commit() + + def test_import_chart_overwrite(self): + """ + Chart API: Test import existing chart + """ + self.login(username="admin") + uri = "api/v1/chart/import/" + + buf = self.create_chart_import() + form_data = { + "formData": (buf, "chart_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + # import again without overwrite flag + buf = self.create_chart_import() + form_data = { + "formData": (buf, "chart_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "Error importing chart", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "charts/imported_chart.yaml": "Chart already exists and `overwrite=true` was not passed", + "issue_codes": [ + { + "code": 1010, + "message": "Issue 1010 - Superset encountered an error while running a command.", + } + ], + }, + } + ] + } + + # import with overwrite flag + buf = self.create_chart_import() + form_data = { + "formData": (buf, "chart_export.zip"), + "overwrite": "true", + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + # clean up + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + dataset = database.tables[0] + chart = db.session.query(Slice).filter_by(uuid=chart_config["uuid"]).one() + + chart.owners = [] + dataset.owners = [] + db.session.delete(chart) + db.session.commit() + db.session.delete(dataset) + db.session.commit() + db.session.delete(database) + db.session.commit() + + def test_import_chart_invalid(self): + """ + Chart API: Test import invalid chart + """ + self.login(username="admin") + uri = "api/v1/chart/import/" + + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("chart_export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(dataset_metadata_config).encode()) + with bundle.open( + "chart_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(database_config).encode()) + with bundle.open("chart_export/datasets/imported_dataset.yaml", "w") as fp: + fp.write(yaml.safe_dump(dataset_config).encode()) + with bundle.open("chart_export/charts/imported_chart.yaml", "w") as fp: + fp.write(yaml.safe_dump(chart_config).encode()) + buf.seek(0) + + form_data = { + "formData": (buf, "chart_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "Error importing chart", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "metadata.yaml": {"type": ["Must be equal to Slice."]}, + "issue_codes": [ + { + "code": 1010, + "message": ( + "Issue 1010 - Superset encountered an " + "error while running a command." + ), + } + ], + }, + } + ] + } + + def test_gets_created_by_user_charts_filter(self): + arguments = { + "filters": [{"col": "id", "opr": "chart_has_created_by", "value": True}], + "keys": ["none"], + "columns": ["slice_name"], + } + self.login(username="admin") + + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 8) + + def test_gets_not_created_by_user_charts_filter(self): + arguments = { + "filters": [{"col": "id", "opr": "chart_has_created_by", "value": False}], + "keys": ["none"], + "columns": ["slice_name"], + } + self.login(username="admin") + + uri = f"api/v1/chart/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 8) diff --git a/tests/integration_tests/charts/commands_tests.py b/tests/integration_tests/charts/commands_tests.py new file mode 100644 index 0000000000000..da9a7550acb44 --- /dev/null +++ b/tests/integration_tests/charts/commands_tests.py @@ -0,0 +1,397 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from unittest.mock import patch + +import pytest +import yaml +from flask import g + +from superset import db, security_manager +from superset.charts.commands.create import CreateChartCommand +from superset.charts.commands.exceptions import ChartNotFoundError +from superset.charts.commands.export import ExportChartsCommand +from superset.charts.commands.importers.v1 import ImportChartsCommand +from superset.charts.commands.update import UpdateChartCommand +from superset.commands.exceptions import CommandInvalidError +from superset.commands.importers.exceptions import IncorrectVersionError +from superset.connectors.sqla.models import SqlaTable +from superset.models.core import Database +from superset.models.slice import Slice +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_data, + load_energy_table_with_slice, +) +from tests.integration_tests.fixtures.importexport import ( + chart_config, + chart_metadata_config, + database_config, + database_metadata_config, + dataset_config, +) + + +class TestExportChartsCommand(SupersetTestCase): + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_export_chart_command(self, mock_g): + mock_g.user = security_manager.find_user("admin") + + example_chart = ( + db.session.query(Slice).filter_by(slice_name="Energy Sankey").one() + ) + command = ExportChartsCommand([example_chart.id]) + contents = dict(command.run()) + + expected = [ + "metadata.yaml", + f"charts/Energy_Sankey_{example_chart.id}.yaml", + "datasets/examples/energy_usage.yaml", + "databases/examples.yaml", + ] + assert expected == list(contents.keys()) + + metadata = yaml.safe_load( + contents[f"charts/Energy_Sankey_{example_chart.id}.yaml"] + ) + + assert metadata == { + "slice_name": "Energy Sankey", + "description": None, + "certified_by": None, + "certification_details": None, + "viz_type": "sankey", + "params": { + "collapsed_fieldsets": "", + "groupby": ["source", "target"], + "metric": "sum__value", + "row_limit": "5000", + "slice_name": "Energy Sankey", + "viz_type": "sankey", + }, + "cache_timeout": None, + "dataset_uuid": str(example_chart.table.uuid), + "uuid": str(example_chart.uuid), + "version": "1.0.0", + "query_context": None, + } + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_export_chart_command_no_access(self, mock_g): + """Test that users can't export datasets they don't have access to""" + mock_g.user = security_manager.find_user("gamma") + + example_chart = db.session.query(Slice).all()[0] + command = ExportChartsCommand([example_chart.id]) + contents = command.run() + with self.assertRaises(ChartNotFoundError): + next(contents) + + @patch("superset.security.manager.g") + def test_export_chart_command_invalid_dataset(self, mock_g): + """Test that an error is raised when exporting an invalid dataset""" + mock_g.user = security_manager.find_user("admin") + command = ExportChartsCommand([-1]) + contents = command.run() + with self.assertRaises(ChartNotFoundError): + next(contents) + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_export_chart_command_key_order(self, mock_g): + """Test that they keys in the YAML have the same order as export_fields""" + mock_g.user = security_manager.find_user("admin") + + example_chart = ( + db.session.query(Slice).filter_by(slice_name="Energy Sankey").one() + ) + command = ExportChartsCommand([example_chart.id]) + contents = dict(command.run()) + + metadata = yaml.safe_load( + contents[f"charts/Energy_Sankey_{example_chart.id}.yaml"] + ) + assert list(metadata.keys()) == [ + "slice_name", + "description", + "certified_by", + "certification_details", + "viz_type", + "params", + "query_context", + "cache_timeout", + "uuid", + "version", + "dataset_uuid", + ] + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_export_chart_command_no_related(self, mock_g): + """ + Test that only the chart is exported when export_related=False. + """ + mock_g.user = security_manager.find_user("admin") + + example_chart = ( + db.session.query(Slice).filter_by(slice_name="Energy Sankey").one() + ) + command = ExportChartsCommand([example_chart.id], export_related=False) + contents = dict(command.run()) + + expected = [ + "metadata.yaml", + f"charts/Energy_Sankey_{example_chart.id}.yaml", + ] + assert expected == list(contents.keys()) + + +class TestImportChartsCommand(SupersetTestCase): + @patch("superset.charts.commands.importers.v1.utils.g") + def test_import_v1_chart(self, mock_g): + """Test that we can import a chart""" + mock_g.user = security_manager.find_user("admin") + contents = { + "metadata.yaml": yaml.safe_dump(chart_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "charts/imported_chart.yaml": yaml.safe_dump(chart_config), + } + command = ImportChartsCommand(contents) + command.run() + + chart: Slice = ( + db.session.query(Slice).filter_by(uuid=chart_config["uuid"]).one() + ) + dataset = chart.datasource + assert json.loads(chart.params) == { + "color_picker": {"a": 1, "b": 135, "g": 122, "r": 0}, + "datasource": dataset.uid, + "js_columns": ["color"], + "js_data_mutator": "data => data.map(d => ({\\n ...d,\\n color: colors.hexToRGB(d.extraProps.color)\\n}));", + "js_onclick_href": "", + "js_tooltip": "", + "line_column": "path_json", + "line_type": "json", + "line_width": 150, + "mapbox_style": "mapbox://styles/mapbox/light-v9", + "reverse_long_lat": False, + "row_limit": 5000, + "slice_id": 43, + "time_grain_sqla": None, + "time_range": " : ", + "viewport": { + "altitude": 1.5, + "bearing": 0, + "height": 1094, + "latitude": 37.73671752604488, + "longitude": -122.18885402582598, + "maxLatitude": 85.05113, + "maxPitch": 60, + "maxZoom": 20, + "minLatitude": -85.05113, + "minPitch": 0, + "minZoom": 0, + "pitch": 0, + "width": 669, + "zoom": 9.51847667620428, + }, + "viz_type": "deck_path", + } + + dataset = ( + db.session.query(SqlaTable).filter_by(uuid=dataset_config["uuid"]).one() + ) + assert dataset.table_name == "imported_dataset" + assert chart.table == dataset + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert database.database_name == "imported_database" + assert chart.table.database == database + + assert chart.owners == [mock_g.user] + + chart.owners = [] + dataset.owners = [] + database.owners = [] + db.session.delete(chart) + db.session.delete(dataset) + db.session.delete(database) + db.session.commit() + + def test_import_v1_chart_multiple(self): + """Test that a chart can be imported multiple times""" + contents = { + "metadata.yaml": yaml.safe_dump(chart_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "charts/imported_chart.yaml": yaml.safe_dump(chart_config), + } + command = ImportChartsCommand(contents, overwrite=True) + command.run() + command.run() + + dataset = ( + db.session.query(SqlaTable).filter_by(uuid=dataset_config["uuid"]).one() + ) + charts = db.session.query(Slice).filter_by(datasource_id=dataset.id).all() + assert len(charts) == 1 + + database = dataset.database + + db.session.delete(charts[0]) + db.session.delete(dataset) + db.session.delete(database) + db.session.commit() + + def test_import_v1_chart_validation(self): + """Test different validations applied when importing a chart""" + # metadata.yaml must be present + contents = { + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "charts/imported_chart.yaml": yaml.safe_dump(chart_config), + } + command = ImportChartsCommand(contents) + with pytest.raises(IncorrectVersionError) as excinfo: + command.run() + assert str(excinfo.value) == "Missing metadata.yaml" + + # version should be 1.0.0 + contents["metadata.yaml"] = yaml.safe_dump( + { + "version": "2.0.0", + "type": "SqlaTable", + "timestamp": "2020-11-04T21:27:44.423819+00:00", + } + ) + command = ImportChartsCommand(contents) + with pytest.raises(IncorrectVersionError) as excinfo: + command.run() + assert str(excinfo.value) == "Must be equal to 1.0.0." + + # type should be Slice + contents["metadata.yaml"] = yaml.safe_dump(database_metadata_config) + command = ImportChartsCommand(contents) + with pytest.raises(CommandInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == "Error importing chart" + assert excinfo.value.normalized_messages() == { + "metadata.yaml": {"type": ["Must be equal to Slice."]} + } + + # must also validate datasets and databases + broken_config = database_config.copy() + del broken_config["database_name"] + contents["metadata.yaml"] = yaml.safe_dump(chart_metadata_config) + contents["databases/imported_database.yaml"] = yaml.safe_dump(broken_config) + command = ImportChartsCommand(contents) + with pytest.raises(CommandInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == "Error importing chart" + assert excinfo.value.normalized_messages() == { + "databases/imported_database.yaml": { + "database_name": ["Missing data for required field."], + } + } + + +class TestChartsCreateCommand(SupersetTestCase): + @patch("superset.utils.core.g") + @patch("superset.charts.commands.create.g") + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_create_v1_response(self, mock_sm_g, mock_c_g, mock_u_g): + """Test that the create chart command creates a chart""" + user = security_manager.find_user(username="admin") + mock_u_g.user = mock_c_g.user = mock_sm_g.user = user + chart_data = { + "slice_name": "new chart", + "description": "new description", + "owners": [user.id], + "viz_type": "new_viz_type", + "params": json.dumps({"viz_type": "new_viz_type"}), + "cache_timeout": 1000, + "datasource_id": 1, + "datasource_type": "table", + } + command = CreateChartCommand(chart_data) + chart = command.run() + chart = db.session.query(Slice).get(chart.id) + assert chart.viz_type == "new_viz_type" + json_params = json.loads(chart.params) + assert json_params == {"viz_type": "new_viz_type"} + assert chart.slice_name == "new chart" + assert chart.owners == [user] + db.session.delete(chart) + db.session.commit() + + +class TestChartsUpdateCommand(SupersetTestCase): + @patch("superset.charts.commands.update.g") + @patch("superset.utils.core.g") + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_update_v1_response(self, mock_sm_g, mock_c_g, mock_u_g): + """Test that a chart command updates properties""" + pk = db.session.query(Slice).all()[0].id + user = security_manager.find_user(username="admin") + mock_u_g.user = mock_c_g.user = mock_sm_g.user = user + model_id = pk + json_obj = { + "description": "test for update", + "cache_timeout": None, + "owners": [user.id], + } + command = UpdateChartCommand(model_id, json_obj) + last_saved_before = db.session.query(Slice).get(pk).last_saved_at + command.run() + chart = db.session.query(Slice).get(pk) + assert chart.last_saved_at != last_saved_before + assert chart.last_saved_by == user + + @patch("superset.utils.core.g") + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_query_context_update_command(self, mock_sm_g, mock_g): + """ + Test that a user can generate the chart query context + payloadwithout affecting owners + """ + chart = db.session.query(Slice).all()[0] + pk = chart.id + admin = security_manager.find_user(username="admin") + chart.owners = [admin] + db.session.commit() + + user = security_manager.find_user(username="alpha") + mock_g.user = mock_sm_g.user = user + query_context = json.dumps({"foo": "bar"}) + json_obj = { + "query_context_generation": True, + "query_context": query_context, + } + command = UpdateChartCommand(pk, json_obj) + command.run() + chart = db.session.query(Slice).get(pk) + assert chart.query_context == query_context + assert len(chart.owners) == 1 + assert chart.owners[0] == admin diff --git a/tests/integration_tests/charts/data/__init__.py b/tests/integration_tests/charts/data/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/charts/data/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py new file mode 100644 index 0000000000000..83fb7281fbc74 --- /dev/null +++ b/tests/integration_tests/charts/data/api_tests.py @@ -0,0 +1,1282 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import json +import unittest +import copy +from datetime import datetime +from io import BytesIO +from typing import Any, Dict, Optional, List +from unittest import mock +from zipfile import ZipFile + +from flask import Response +from tests.integration_tests.conftest import with_feature_flags +from superset.models.sql_lab import Query +from tests.integration_tests.base_tests import ( + SupersetTestCase, + test_client, +) +from tests.integration_tests.annotation_layers.fixtures import create_annotation_layers +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.test_app import app +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_with_slice, + load_energy_table_data, +) +import pytest +from superset.models.slice import Slice + +from superset.charts.data.commands.get_data_command import ChartDataCommand +from superset.connectors.sqla.models import TableColumn, SqlaTable +from superset.errors import SupersetErrorType +from superset.extensions import async_query_manager, db +from superset.models.annotations import AnnotationLayer +from superset.models.slice import Slice +from superset.superset_typing import AdhocColumn +from superset.utils.core import ( + AnnotationType, + get_example_default_schema, + AdhocMetricExpressionType, + ExtraFiltersReasonType, +) +from superset.utils.database import get_example_database, get_main_database +from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType + +from tests.common.query_context_generator import ANNOTATION_LAYERS +from tests.integration_tests.fixtures.query_context import get_query_context + + +CHART_DATA_URI = "api/v1/chart/data" +CHARTS_FIXTURE_COUNT = 10 +ADHOC_COLUMN_FIXTURE: AdhocColumn = { + "hasCustomLabel": True, + "label": "male_or_female", + "sqlExpression": "case when gender = 'boy' then 'male' " + "when gender = 'girl' then 'female' else 'other' end", +} + +INCOMPATIBLE_ADHOC_COLUMN_FIXTURE: AdhocColumn = { + "hasCustomLabel": True, + "label": "exciting_or_boring", + "sqlExpression": "case when genre = 'Action' then 'Exciting' else 'Boring' end", +} + + +class BaseTestChartDataApi(SupersetTestCase): + query_context_payload_template = None + + def setUp(self) -> None: + self.login("admin") + if self.query_context_payload_template is None: + BaseTestChartDataApi.query_context_payload_template = get_query_context( + "birth_names" + ) + self.query_context_payload = copy.deepcopy(self.query_context_payload_template) + + def get_expected_row_count(self, client_id: str) -> int: + start_date = datetime.now() + start_date = start_date.replace( + year=start_date.year - 100, hour=0, minute=0, second=0 + ) + + quoted_table_name = self.quote_name("birth_names") + sql = f""" + SELECT COUNT(*) AS rows_count FROM ( + SELECT name AS name, SUM(num) AS sum__num + FROM {quoted_table_name} + WHERE ds >= '{start_date.strftime("%Y-%m-%d %H:%M:%S")}' + AND gender = 'boy' + GROUP BY name + ORDER BY sum__num DESC + LIMIT 100) AS inner__query + """ + resp = self.run_sql(sql, client_id, raise_on_error=True) + db.session.query(Query).delete() + db.session.commit() + return resp["data"][0]["rows_count"] + + def quote_name(self, name: str): + if get_main_database().backend in {"presto", "hive"}: + return get_example_database().inspector.engine.dialect.identifier_preparer.quote_identifier( + name + ) + return name + + +@pytest.mark.chart_data_flow +class TestPostChartDataApi(BaseTestChartDataApi): + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_valid_qc__data_is_returned(self): + # arrange + expected_row_count = self.get_expected_row_count("client_id_1") + # act + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + # assert + assert rv.status_code == 200 + self.assert_row_count(rv, expected_row_count) + + @staticmethod + def assert_row_count(rv: Response, expected_row_count: int): + assert rv.json["result"][0]["rowcount"] == expected_row_count + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch( + "superset.common.query_context_factory.config", + {**app.config, "ROW_LIMIT": 7}, + ) + def test_without_row_limit__row_count_as_default_row_limit(self): + # arrange + expected_row_count = 7 + del self.query_context_payload["queries"][0]["row_limit"] + # act + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + # assert + self.assert_row_count(rv, expected_row_count) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch( + "superset.common.query_context_factory.config", + {**app.config, "SAMPLES_ROW_LIMIT": 5}, + ) + def test_as_samples_without_row_limit__row_count_as_default_samples_row_limit(self): + # arrange + expected_row_count = 5 + app.config["SAMPLES_ROW_LIMIT"] = expected_row_count + self.query_context_payload["result_type"] = ChartDataResultType.SAMPLES + del self.query_context_payload["queries"][0]["row_limit"] + + # act + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + # assert + self.assert_row_count(rv, expected_row_count) + assert "GROUP BY" not in rv.json["result"][0]["query"] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch( + "superset.utils.core.current_app.config", + {**app.config, "SQL_MAX_ROW": 10}, + ) + def test_with_row_limit_bigger_then_sql_max_row__rowcount_as_sql_max_row(self): + # arrange + expected_row_count = 10 + self.query_context_payload["queries"][0]["row_limit"] = 10000000 + + # act + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + # assert + self.assert_row_count(rv, expected_row_count) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch( + "superset.utils.core.current_app.config", + {**app.config, "SQL_MAX_ROW": 5}, + ) + def test_as_samples_with_row_limit_bigger_then_sql_max_row_rowcount_as_sql_max_row( + self, + ): + expected_row_count = app.config["SQL_MAX_ROW"] + self.query_context_payload["result_type"] = ChartDataResultType.SAMPLES + self.query_context_payload["queries"][0]["row_limit"] = 10000000 + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + # assert + self.assert_row_count(rv, expected_row_count) + assert "GROUP BY" not in rv.json["result"][0]["query"] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch( + "superset.common.query_actions.config", + {**app.config, "SAMPLES_ROW_LIMIT": 5, "SQL_MAX_ROW": 15}, + ) + def test_with_row_limit_as_samples__rowcount_as_row_limit(self): + + expected_row_count = 10 + self.query_context_payload["result_type"] = ChartDataResultType.SAMPLES + self.query_context_payload["queries"][0]["row_limit"] = expected_row_count + + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + # assert + self.assert_row_count(rv, expected_row_count) + assert "GROUP BY" not in rv.json["result"][0]["query"] + + def test_with_incorrect_result_type__400(self): + self.query_context_payload["result_type"] = "qwerty" + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + assert rv.status_code == 400 + + def test_with_incorrect_result_format__400(self): + self.query_context_payload["result_format"] = "qwerty" + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + assert rv.status_code == 400 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_invalid_payload__400(self): + + invalid_query_context = {"form_data": "NOT VALID JSON"} + + rv = self.client.post( + CHART_DATA_URI, + data=invalid_query_context, + content_type="multipart/form-data", + ) + + assert rv.status_code == 400 + assert rv.json["message"] == "Request is not JSON" + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_query_result_type__200(self): + self.query_context_payload["result_type"] = ChartDataResultType.QUERY + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + assert rv.status_code == 200 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_empty_request_with_csv_result_format(self): + """ + Chart data API: Test empty chart data with CSV result format + """ + self.query_context_payload["result_format"] = "csv" + self.query_context_payload["queries"] = [] + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + assert rv.status_code == 400 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_empty_request_with_excel_result_format(self): + """ + Chart data API: Test empty chart data with Excel result format + """ + self.query_context_payload["result_format"] = "xlsx" + self.query_context_payload["queries"] = [] + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + assert rv.status_code == 400 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_csv_result_format(self): + """ + Chart data API: Test chart data with CSV result format + """ + self.query_context_payload["result_format"] = "csv" + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + assert rv.status_code == 200 + assert rv.mimetype == "text/csv" + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_excel_result_format(self): + """ + Chart data API: Test chart data with Excel result format + """ + self.query_context_payload["result_format"] = "xlsx" + mimetype = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + assert rv.status_code == 200 + assert rv.mimetype == mimetype + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_multi_query_csv_result_format(self): + """ + Chart data API: Test chart data with multi-query CSV result format + """ + self.query_context_payload["result_format"] = "csv" + self.query_context_payload["queries"].append( + self.query_context_payload["queries"][0] + ) + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + assert rv.status_code == 200 + assert rv.mimetype == "application/zip" + zipfile = ZipFile(BytesIO(rv.data), "r") + assert zipfile.namelist() == ["query_1.csv", "query_2.csv"] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_multi_query_excel_result_format(self): + """ + Chart data API: Test chart data with multi-query Excel result format + """ + self.query_context_payload["result_format"] = "xlsx" + self.query_context_payload["queries"].append( + self.query_context_payload["queries"][0] + ) + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + assert rv.status_code == 200 + assert rv.mimetype == "application/zip" + zipfile = ZipFile(BytesIO(rv.data), "r") + assert zipfile.namelist() == ["query_1.xlsx", "query_2.xlsx"] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_csv_result_format_when_actor_not_permitted_for_csv__403(self): + """ + Chart data API: Test chart data with CSV result format + """ + self.logout() + self.login(username="gamma_no_csv") + self.query_context_payload["result_format"] = "csv" + + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + assert rv.status_code == 403 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_excel_result_format_when_actor_not_permitted_for_excel__403(self): + """ + Chart data API: Test chart data with Excel result format + """ + self.logout() + self.login(username="gamma_no_csv") + self.query_context_payload["result_format"] = "xlsx" + + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + assert rv.status_code == 403 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_row_limit_and_offset__row_limit_and_offset_were_applied(self): + """ + Chart data API: Test chart data query with limit and offset + """ + self.query_context_payload["queries"][0]["row_limit"] = 5 + self.query_context_payload["queries"][0]["row_offset"] = 0 + self.query_context_payload["queries"][0]["orderby"] = [["name", True]] + + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + self.assert_row_count(rv, 5) + result = rv.json["result"][0] + + # TODO: fix offset for presto DB + if get_example_database().backend == "presto": + return + + # ensure that offset works properly + offset = 2 + expected_name = result["data"][offset]["name"] + self.query_context_payload["queries"][0]["row_offset"] = offset + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + result = rv.json["result"][0] + assert result["rowcount"] == 5 + assert result["data"][0]["name"] == expected_name + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_applied_time_extras(self): + """ + Chart data API: Test chart data query with applied time extras + """ + self.query_context_payload["queries"][0]["applied_time_extras"] = { + "__time_range": "100 years ago : now", + "__time_origin": "now", + } + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + data["result"][0]["applied_filters"], + [ + {"column": "gender"}, + {"column": "num"}, + {"column": "name"}, + {"column": "__time_range"}, + ], + ) + expected_row_count = self.get_expected_row_count("client_id_2") + self.assertEqual(data["result"][0]["rowcount"], expected_row_count) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_in_op_filter__data_is_returned(self): + """ + Chart data API: Ensure mixed case filter operator generates valid result + """ + expected_row_count = 10 + self.query_context_payload["queries"][0]["filters"][0]["op"] = "In" + self.query_context_payload["queries"][0]["row_limit"] = expected_row_count + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + self.assert_row_count(rv, expected_row_count) + + @unittest.skip("Failing due to timezone difference") + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_dttm_filter(self): + """ + Chart data API: Ensure temporal column filter converts epoch to dttm expression + """ + table = self.get_birth_names_dataset() + if table.database.backend == "presto": + # TODO: date handling on Presto not fully in line with other engine specs + return + + self.query_context_payload["queries"][0]["time_range"] = "" + dttm = self.get_dttm() + ms_epoch = dttm.timestamp() * 1000 + self.query_context_payload["queries"][0]["filters"][0] = { + "col": "ds", + "op": "!=", + "val": ms_epoch, + } + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + response_payload = json.loads(rv.data.decode("utf-8")) + result = response_payload["result"][0] + + # assert that unconverted timestamp is not present in query + assert str(ms_epoch) not in result["query"] + + # assert that converted timestamp is present in query where supported + dttm_col: Optional[TableColumn] = None + for col in table.columns: + if col.column_name == table.main_dttm_col: + dttm_col = col + if dttm_col: + dttm_expression = table.database.db_engine_spec.convert_dttm( + dttm_col.type, + dttm, + ) + self.assertIn(dttm_expression, result["query"]) + else: + raise Exception("ds column not found") + + def test_chart_data_prophet(self): + """ + Chart data API: Ensure prophet post transformation works + """ + pytest.importorskip("prophet") + time_grain = "P1Y" + self.query_context_payload["queries"][0]["is_timeseries"] = True + self.query_context_payload["queries"][0]["groupby"] = [] + self.query_context_payload["queries"][0]["extras"] = { + "time_grain_sqla": time_grain + } + self.query_context_payload["queries"][0]["granularity"] = "ds" + self.query_context_payload["queries"][0]["post_processing"] = [ + { + "operation": "prophet", + "options": { + "time_grain": time_grain, + "periods": 3, + "confidence_interval": 0.9, + }, + } + ] + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + self.assertEqual(rv.status_code, 200) + response_payload = json.loads(rv.data.decode("utf-8")) + result = response_payload["result"][0] + row = result["data"][0] + self.assertIn("__timestamp", row) + self.assertIn("sum__num", row) + self.assertIn("sum__num__yhat", row) + self.assertIn("sum__num__yhat_upper", row) + self.assertIn("sum__num__yhat_lower", row) + self.assertEqual(result["rowcount"], 47) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_query_result_type_and_non_existent_filter__filter_omitted(self): + self.query_context_payload["queries"][0]["filters"] = [ + {"col": "non_existent_filter", "op": "==", "val": "foo"}, + ] + self.query_context_payload["result_type"] = ChartDataResultType.QUERY + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + assert rv.status_code == 200 + assert "non_existent_filter" not in rv.json["result"][0]["query"] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_filter_suppose_to_return_empty_data__no_data_returned(self): + self.query_context_payload["queries"][0]["filters"] = [ + {"col": "gender", "op": "==", "val": "foo"} + ] + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + assert rv.status_code == 200 + assert rv.json["result"][0]["data"] == [] + self.assert_row_count(rv, 0) + + def test_with_invalid_where_parameter__400(self): + self.query_context_payload["queries"][0]["filters"] = [] + # erroneous WHERE-clause + self.query_context_payload["queries"][0]["extras"]["where"] = "(gender abc def)" + + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + assert rv.status_code == 400 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_invalid_where_parameter_closing_unclosed__400(self): + self.query_context_payload["queries"][0]["filters"] = [] + self.query_context_payload["queries"][0]["extras"][ + "where" + ] = "state = 'CA') OR (state = 'NY'" + + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + assert rv.status_code == 400 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_where_parameter_including_comment___200(self): + self.query_context_payload["queries"][0]["filters"] = [] + self.query_context_payload["queries"][0]["extras"]["where"] = "1 = 1 -- abc" + + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + assert rv.status_code == 200 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_orderby_parameter_with_second_query__400(self): + self.query_context_payload["queries"][0]["filters"] = [] + self.query_context_payload["queries"][0]["orderby"] = [ + [ + { + "expressionType": "SQL", + "sqlExpression": "sum__num; select 1, 1", + }, + True, + ], + ] + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + assert rv.status_code == 400 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_invalid_having_parameter_closing_and_comment__400(self): + self.query_context_payload["queries"][0]["filters"] = [] + self.query_context_payload["queries"][0]["extras"][ + "having" + ] = "COUNT(1) = 0) UNION ALL SELECT 'abc', 1--comment" + + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + assert rv.status_code == 400 + + def test_with_invalid_datasource__400(self): + self.query_context_payload["datasource"] = "abc" + + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + assert rv.status_code == 400 + + def test_with_not_permitted_actor__403(self): + """ + Chart data API: Test chart data query not allowed + """ + self.logout() + self.login(username="gamma") + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + assert rv.status_code == 403 + assert ( + rv.json["errors"][0]["error_type"] + == SupersetErrorType.DATASOURCE_SECURITY_ACCESS_ERROR + ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_when_where_parameter_is_template_and_query_result_type__query_is_templated( + self, + ): + + self.query_context_payload["result_type"] = ChartDataResultType.QUERY + self.query_context_payload["queries"][0]["filters"] = [ + {"col": "gender", "op": "==", "val": "boy"} + ] + self.query_context_payload["queries"][0]["extras"][ + "where" + ] = "('boy' = '{{ filter_values('gender', 'xyz' )[0] }}')" + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + result = rv.json["result"][0]["query"] + if get_example_database().backend != "presto": + assert "('boy' = 'boy')" in result + + @with_feature_flags(GLOBAL_ASYNC_QUERIES=True) + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_async(self): + self.logout() + async_query_manager.init_app(app) + self.login("admin") + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + self.assertEqual(rv.status_code, 202) + data = json.loads(rv.data.decode("utf-8")) + keys = list(data.keys()) + self.assertCountEqual( + keys, ["channel_id", "job_id", "user_id", "status", "errors", "result_url"] + ) + + @with_feature_flags(GLOBAL_ASYNC_QUERIES=True) + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_async_cached_sync_response(self): + """ + Chart data API: Test chart data query returns results synchronously + when results are already cached. + """ + async_query_manager.init_app(app) + + class QueryContext: + result_format = ChartDataResultFormat.JSON + result_type = ChartDataResultType.FULL + + cmd_run_val = { + "query_context": QueryContext(), + "queries": [{"query": "select * from foo"}], + } + + with mock.patch.object( + ChartDataCommand, "run", return_value=cmd_run_val + ) as patched_run: + self.query_context_payload["result_type"] = ChartDataResultType.FULL + rv = self.post_assert_metric( + CHART_DATA_URI, self.query_context_payload, "data" + ) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + patched_run.assert_called_once_with(force_cached=True) + self.assertEqual(data, {"result": [{"query": "select * from foo"}]}) + + @with_feature_flags(GLOBAL_ASYNC_QUERIES=True) + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_async_results_type(self): + """ + Chart data API: Test chart data query non-JSON format (async) + """ + async_query_manager.init_app(app) + self.query_context_payload["result_type"] = "results" + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + self.assertEqual(rv.status_code, 200) + + @with_feature_flags(GLOBAL_ASYNC_QUERIES=True) + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_async_invalid_token(self): + """ + Chart data API: Test chart data query (async) + """ + async_query_manager.init_app(app) + test_client.set_cookie( + "localhost", app.config["GLOBAL_ASYNC_QUERIES_JWT_COOKIE_NAME"], "foo" + ) + rv = test_client.post(CHART_DATA_URI, json=self.query_context_payload) + self.assertEqual(rv.status_code, 401) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_rowcount(self): + """ + Chart data API: Query total rows + """ + expected_row_count = self.get_expected_row_count("client_id_4") + self.query_context_payload["queries"][0]["is_rowcount"] = True + self.query_context_payload["queries"][0]["groupby"] = ["name"] + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + assert rv.json["result"][0]["data"][0]["rowcount"] == expected_row_count + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_timegrains_and_columns_result_types(self): + """ + Chart data API: Query timegrains and columns + """ + self.query_context_payload["queries"] = [ + {"result_type": ChartDataResultType.TIMEGRAINS}, + {"result_type": ChartDataResultType.COLUMNS}, + ] + result = self.post_assert_metric( + CHART_DATA_URI, self.query_context_payload, "data" + ).json["result"] + + timegrain_data_keys = result[0]["data"][0].keys() + column_data_keys = result[1]["data"][0].keys() + assert list(timegrain_data_keys) == [ + "name", + "function", + "duration", + ] + assert list(column_data_keys) == [ + "column_name", + "verbose_name", + "dtype", + ] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_series_limit(self): + SERIES_LIMIT = 5 + self.query_context_payload["queries"][0]["columns"] = ["state", "name"] + self.query_context_payload["queries"][0]["series_columns"] = ["name"] + self.query_context_payload["queries"][0]["series_limit"] = SERIES_LIMIT + + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + + data = rv.json["result"][0]["data"] + + unique_names = set(row["name"] for row in data) + self.maxDiff = None + self.assertEqual(len(unique_names), SERIES_LIMIT) + self.assertEqual( + set(column for column in data[0].keys()), {"state", "name", "sum__num"} + ) + + @pytest.mark.usefixtures( + "create_annotation_layers", "load_birth_names_dashboard_with_slices" + ) + def test_with_annotations_layers__annotations_data_returned(self): + """ + Chart data API: Test chart data query + """ + + annotation_layers = [] + self.query_context_payload["queries"][0][ + "annotation_layers" + ] = annotation_layers + + # formula + annotation_layers.append(ANNOTATION_LAYERS[AnnotationType.FORMULA]) + + # interval + interval_layer = ( + db.session.query(AnnotationLayer) + .filter(AnnotationLayer.name == "name1") + .one() + ) + interval = ANNOTATION_LAYERS[AnnotationType.INTERVAL] + interval["value"] = interval_layer.id + annotation_layers.append(interval) + + # event + event_layer = ( + db.session.query(AnnotationLayer) + .filter(AnnotationLayer.name == "name2") + .one() + ) + event = ANNOTATION_LAYERS[AnnotationType.EVENT] + event["value"] = event_layer.id + annotation_layers.append(event) + + rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + # response should only contain interval and event data, not formula + self.assertEqual(len(data["result"][0]["annotation_data"]), 2) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_virtual_table_with_colons_as_datasource(self): + """ + Chart data API: test query with literal colon characters in query, metrics, + where clause and filters + """ + owner = self.get_user("admin") + table = SqlaTable( + table_name="virtual_table_1", + schema=get_example_default_schema(), + owners=[owner], + database=get_example_database(), + sql="select ':foo' as foo, ':bar:' as bar, state, num from birth_names", + ) + db.session.add(table) + db.session.commit() + table.fetch_metadata() + + request_payload = self.query_context_payload + request_payload["datasource"] = { + "type": "table", + "id": table.id, + } + request_payload["queries"][0]["columns"] = ["foo", "bar", "state"] + request_payload["queries"][0]["where"] = "':abc' != ':xyz:qwerty'" + request_payload["queries"][0]["orderby"] = None + request_payload["queries"][0]["metrics"] = [ + { + "expressionType": AdhocMetricExpressionType.SQL, + "sqlExpression": "sum(case when state = ':asdf' then 0 else 1 end)", + "label": "count", + } + ] + request_payload["queries"][0]["filters"] = [ + { + "col": "foo", + "op": "!=", + "val": ":qwerty:", + } + ] + + rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data") + db.session.delete(table) + db.session.commit() + assert rv.status_code == 200 + result = rv.json["result"][0] + data = result["data"] + assert {col for col in data[0].keys()} == {"foo", "bar", "state", "count"} + # make sure results and query parameters are unescaped + assert {row["foo"] for row in data} == {":foo"} + assert {row["bar"] for row in data} == {":bar:"} + assert "':asdf'" in result["query"] + assert "':xyz:qwerty'" in result["query"] + assert "':qwerty:'" in result["query"] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_table_columns_without_metrics(self): + request_payload = self.query_context_payload + request_payload["queries"][0]["columns"] = ["name", "gender"] + request_payload["queries"][0]["metrics"] = None + request_payload["queries"][0]["orderby"] = [] + + rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data") + result = rv.json["result"][0] + + assert rv.status_code == 200 + assert "name" in result["colnames"] + assert "gender" in result["colnames"] + assert "name" in result["query"] + assert "gender" in result["query"] + assert list(result["data"][0].keys()) == ["name", "gender"] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_with_adhoc_column_without_metrics(self): + request_payload = self.query_context_payload + request_payload["queries"][0]["columns"] = [ + "name", + { + "label": "num divide by 10", + "sqlExpression": "num/10", + "expressionType": "SQL", + }, + ] + request_payload["queries"][0]["metrics"] = None + request_payload["queries"][0]["orderby"] = [] + + rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data") + result = rv.json["result"][0] + + assert rv.status_code == 200 + assert "num divide by 10" in result["colnames"] + assert "name" in result["colnames"] + assert "num divide by 10" in result["query"] + assert "name" in result["query"] + assert list(result["data"][0].keys()) == ["name", "num divide by 10"] + + +@pytest.mark.chart_data_flow +class TestGetChartDataApi(BaseTestChartDataApi): + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_data_when_query_context_is_null(self): + """ + Chart data API: Test GET endpoint when query context is null + """ + chart = db.session.query(Slice).filter_by(slice_name="Genders").one() + rv = self.get_assert_metric(f"api/v1/chart/{chart.id}/data/", "get_data") + data = json.loads(rv.data.decode("utf-8")) + assert data == { + "message": "Chart has no query context saved. Please save the chart again." + } + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_get(self): + """ + Chart data API: Test GET endpoint + """ + chart = db.session.query(Slice).filter_by(slice_name="Genders").one() + chart.query_context = json.dumps( + { + "datasource": {"id": chart.table.id, "type": "table"}, + "force": False, + "queries": [ + { + "time_range": "1900-01-01T00:00:00 : 2000-01-01T00:00:00", + "granularity": "ds", + "filters": [], + "extras": { + "having": "", + "where": "", + }, + "applied_time_extras": {}, + "columns": ["gender"], + "metrics": ["sum__num"], + "orderby": [["sum__num", False]], + "annotation_layers": [], + "row_limit": 50000, + "timeseries_limit": 0, + "order_desc": True, + "url_params": {}, + "custom_params": {}, + "custom_form_data": {}, + } + ], + "result_format": "json", + "result_type": "full", + } + ) + rv = self.get_assert_metric(f"api/v1/chart/{chart.id}/data/", "get_data") + assert rv.mimetype == "application/json" + data = json.loads(rv.data.decode("utf-8")) + assert data["result"][0]["status"] == "success" + assert data["result"][0]["rowcount"] == 2 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_get_forced(self): + """ + Chart data API: Test GET endpoint with force cache parameter + """ + chart = db.session.query(Slice).filter_by(slice_name="Genders").one() + chart.query_context = json.dumps( + { + "datasource": {"id": chart.table.id, "type": "table"}, + "force": False, + "queries": [ + { + "time_range": "1900-01-01T00:00:00 : 2000-01-01T00:00:00", + "granularity": "ds", + "filters": [], + "extras": { + "having": "", + "having_druid": [], + "where": "", + }, + "applied_time_extras": {}, + "columns": ["gender"], + "metrics": ["sum__num"], + "orderby": [["sum__num", False]], + "annotation_layers": [], + "row_limit": 50000, + "timeseries_limit": 0, + "order_desc": True, + "url_params": {}, + "custom_params": {}, + "custom_form_data": {}, + } + ], + "result_format": "json", + "result_type": "full", + } + ) + + self.get_assert_metric(f"api/v1/chart/{chart.id}/data/?force=true", "get_data") + + # should burst cache + rv = self.get_assert_metric( + f"api/v1/chart/{chart.id}/data/?force=true", "get_data" + ) + assert rv.json["result"][0]["is_cached"] is None + + # should get response from the cache + rv = self.get_assert_metric(f"api/v1/chart/{chart.id}/data/", "get_data") + assert rv.json["result"][0]["is_cached"] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(GLOBAL_ASYNC_QUERIES=True) + @mock.patch("superset.charts.data.api.QueryContextCacheLoader") + def test_chart_data_cache(self, cache_loader): + """ + Chart data cache API: Test chart data async cache request + """ + async_query_manager.init_app(app) + cache_loader.load.return_value = self.query_context_payload + orig_run = ChartDataCommand.run + + def mock_run(self, **kwargs): + assert kwargs["force_cached"] == True + # override force_cached to get result from DB + return orig_run(self, force_cached=False) + + with mock.patch.object(ChartDataCommand, "run", new=mock_run): + rv = self.get_assert_metric( + f"{CHART_DATA_URI}/test-cache-key", "data_from_cache" + ) + data = json.loads(rv.data.decode("utf-8")) + + expected_row_count = self.get_expected_row_count("client_id_3") + self.assertEqual(rv.status_code, 200) + self.assertEqual(data["result"][0]["rowcount"], expected_row_count) + + @with_feature_flags(GLOBAL_ASYNC_QUERIES=True) + @mock.patch("superset.charts.data.api.QueryContextCacheLoader") + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_cache_run_failed(self, cache_loader): + """ + Chart data cache API: Test chart data async cache request with run failure + """ + async_query_manager.init_app(app) + cache_loader.load.return_value = self.query_context_payload + rv = self.get_assert_metric( + f"{CHART_DATA_URI}/test-cache-key", "data_from_cache" + ) + data = json.loads(rv.data.decode("utf-8")) + + self.assertEqual(rv.status_code, 422) + self.assertEqual(data["message"], "Error loading data from cache") + + @with_feature_flags(GLOBAL_ASYNC_QUERIES=True) + @mock.patch("superset.charts.data.api.QueryContextCacheLoader") + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_cache_no_login(self, cache_loader): + """ + Chart data cache API: Test chart data async cache request (no login) + """ + self.logout() + async_query_manager.init_app(app) + cache_loader.load.return_value = self.query_context_payload + orig_run = ChartDataCommand.run + + def mock_run(self, **kwargs): + assert kwargs["force_cached"] == True + # override force_cached to get result from DB + return orig_run(self, force_cached=False) + + with mock.patch.object(ChartDataCommand, "run", new=mock_run): + rv = self.client.get( + f"{CHART_DATA_URI}/test-cache-key", + ) + + self.assertEqual(rv.status_code, 401) + + @with_feature_flags(GLOBAL_ASYNC_QUERIES=True) + def test_chart_data_cache_key_error(self): + """ + Chart data cache API: Test chart data async cache request with invalid cache key + """ + async_query_manager.init_app(app) + rv = self.get_assert_metric( + f"{CHART_DATA_URI}/test-cache-key", "data_from_cache" + ) + + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_with_adhoc_column(self): + """ + Chart data API: Test query with adhoc column in both select and where clause + """ + self.login(username="admin") + request_payload = get_query_context("birth_names") + request_payload["queries"][0]["columns"] = [ADHOC_COLUMN_FIXTURE] + request_payload["queries"][0]["filters"] = [ + {"col": ADHOC_COLUMN_FIXTURE, "op": "IN", "val": ["male", "female"]} + ] + rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data") + response_payload = json.loads(rv.data.decode("utf-8")) + result = response_payload["result"][0] + data = result["data"] + assert {column for column in data[0].keys()} == {"male_or_female", "sum__num"} + unique_genders = {row["male_or_female"] for row in data} + assert unique_genders == {"male", "female"} + assert result["applied_filters"] == [{"column": "male_or_female"}] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_with_incompatible_adhoc_column(self): + """ + Chart data API: Test query with adhoc column that fails to run on this dataset + """ + self.login(username="admin") + request_payload = get_query_context("birth_names") + request_payload["queries"][0]["columns"] = [ADHOC_COLUMN_FIXTURE] + request_payload["queries"][0]["filters"] = [ + {"col": INCOMPATIBLE_ADHOC_COLUMN_FIXTURE, "op": "IN", "val": ["Exciting"]}, + {"col": ADHOC_COLUMN_FIXTURE, "op": "IN", "val": ["male", "female"]}, + ] + rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data") + response_payload = json.loads(rv.data.decode("utf-8")) + result = response_payload["result"][0] + data = result["data"] + assert {column for column in data[0].keys()} == {"male_or_female", "sum__num"} + unique_genders = {row["male_or_female"] for row in data} + assert unique_genders == {"male", "female"} + assert result["applied_filters"] == [{"column": "male_or_female"}] + assert result["rejected_filters"] == [ + { + "column": "exciting_or_boring", + "reason": ExtraFiltersReasonType.COL_NOT_IN_DATASOURCE, + } + ] + + +@pytest.fixture() +def physical_query_context(physical_dataset) -> Dict[str, Any]: + return { + "datasource": { + "type": physical_dataset.type, + "id": physical_dataset.id, + }, + "queries": [ + { + "columns": ["col1"], + "metrics": ["count"], + "orderby": [["col1", True]], + } + ], + "result_type": ChartDataResultType.FULL, + "force": True, + } + + +@mock.patch( + "superset.common.query_context_processor.config", + { + **app.config, + "CACHE_DEFAULT_TIMEOUT": 1234, + "DATA_CACHE_CONFIG": { + **app.config["DATA_CACHE_CONFIG"], + "CACHE_DEFAULT_TIMEOUT": None, + }, + }, +) +def test_cache_default_timeout(test_client, login_as_admin, physical_query_context): + rv = test_client.post(CHART_DATA_URI, json=physical_query_context) + assert rv.json["result"][0]["cache_timeout"] == 1234 + + +def test_custom_cache_timeout(test_client, login_as_admin, physical_query_context): + physical_query_context["custom_cache_timeout"] = 5678 + rv = test_client.post(CHART_DATA_URI, json=physical_query_context) + assert rv.json["result"][0]["cache_timeout"] == 5678 + + +@mock.patch( + "superset.common.query_context_processor.config", + { + **app.config, + "CACHE_DEFAULT_TIMEOUT": 100000, + "DATA_CACHE_CONFIG": { + **app.config["DATA_CACHE_CONFIG"], + "CACHE_DEFAULT_TIMEOUT": 3456, + }, + }, +) +def test_data_cache_default_timeout( + test_client, + login_as_admin, + physical_query_context, +): + rv = test_client.post(CHART_DATA_URI, json=physical_query_context) + assert rv.json["result"][0]["cache_timeout"] == 3456 + + +def test_chart_cache_timeout( + test_client, + login_as_admin, + physical_query_context, + load_energy_table_with_slice: List[Slice], +): + # should override datasource cache timeout + + slice_with_cache_timeout = load_energy_table_with_slice[0] + slice_with_cache_timeout.cache_timeout = 20 + db.session.merge(slice_with_cache_timeout) + + datasource: SqlaTable = ( + db.session.query(SqlaTable) + .filter(SqlaTable.id == physical_query_context["datasource"]["id"]) + .first() + ) + datasource.cache_timeout = 1254 + db.session.merge(datasource) + + db.session.commit() + + physical_query_context["form_data"] = {"slice_id": slice_with_cache_timeout.id} + + rv = test_client.post(CHART_DATA_URI, json=physical_query_context) + assert rv.json["result"][0]["cache_timeout"] == 20 + + +@mock.patch( + "superset.common.query_context_processor.config", + { + **app.config, + "DATA_CACHE_CONFIG": { + **app.config["DATA_CACHE_CONFIG"], + "CACHE_DEFAULT_TIMEOUT": 1010, + }, + }, +) +def test_chart_cache_timeout_not_present( + test_client, login_as_admin, physical_query_context +): + # should use datasource cache, if it's present + + datasource: SqlaTable = ( + db.session.query(SqlaTable) + .filter(SqlaTable.id == physical_query_context["datasource"]["id"]) + .first() + ) + datasource.cache_timeout = 1980 + db.session.merge(datasource) + db.session.commit() + + rv = test_client.post(CHART_DATA_URI, json=physical_query_context) + assert rv.json["result"][0]["cache_timeout"] == 1980 + + +@mock.patch( + "superset.common.query_context_processor.config", + { + **app.config, + "DATA_CACHE_CONFIG": { + **app.config["DATA_CACHE_CONFIG"], + "CACHE_DEFAULT_TIMEOUT": 1010, + }, + }, +) +def test_chart_cache_timeout_chart_not_found( + test_client, login_as_admin, physical_query_context +): + # should use default timeout + + physical_query_context["form_data"] = {"slice_id": 0} + + rv = test_client.post(CHART_DATA_URI, json=physical_query_context) + assert rv.json["result"][0]["cache_timeout"] == 1010 + + +@pytest.mark.parametrize( + "status_code,extras", + [ + (200, {"where": "1 = 1"}), + (200, {"having": "count(*) > 0"}), + (400, {"where": "col1 in (select distinct col1 from physical_dataset)"}), + (400, {"having": "count(*) > (select count(*) from physical_dataset)"}), + ], +) +@with_feature_flags(ALLOW_ADHOC_SUBQUERY=False) +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +def test_chart_data_subquery_not_allowed( + test_client, + login_as_admin, + physical_dataset, + physical_query_context, + status_code, + extras, +): + physical_query_context["queries"][0]["extras"] = extras + rv = test_client.post(CHART_DATA_URI, json=physical_query_context) + + assert rv.status_code == status_code + + +@pytest.mark.parametrize( + "status_code,extras", + [ + (200, {"where": "1 = 1"}), + (200, {"having": "count(*) > 0"}), + (200, {"where": "col1 in (select distinct col1 from physical_dataset)"}), + (200, {"having": "count(*) > (select count(*) from physical_dataset)"}), + ], +) +@with_feature_flags(ALLOW_ADHOC_SUBQUERY=True) +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +def test_chart_data_subquery_allowed( + test_client, + login_as_admin, + physical_dataset, + physical_query_context, + status_code, + extras, +): + physical_query_context["queries"][0]["extras"] = extras + rv = test_client.post(CHART_DATA_URI, json=physical_query_context) + + assert rv.status_code == status_code diff --git a/tests/integration_tests/charts/schema_tests.py b/tests/integration_tests/charts/schema_tests.py new file mode 100644 index 0000000000000..c28699f3302e4 --- /dev/null +++ b/tests/integration_tests/charts/schema_tests.py @@ -0,0 +1,79 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +from unittest import mock + +import pytest + +from marshmallow import ValidationError +from tests.integration_tests.test_app import app +from superset.charts.schemas import ChartDataQueryContextSchema +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.query_context import get_query_context + + +class TestSchema(SupersetTestCase): + @mock.patch( + "superset.common.query_context_factory.config", + {**app.config, "ROW_LIMIT": 5000}, + ) + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_query_context_limit_and_offset(self): + self.login(username="admin") + payload = get_query_context("birth_names") + + # too low limit and offset + payload["queries"][0]["row_limit"] = -1 + payload["queries"][0]["row_offset"] = -1 + with self.assertRaises(ValidationError) as context: + _ = ChartDataQueryContextSchema().load(payload) + self.assertIn("row_limit", context.exception.messages["queries"][0]) + self.assertIn("row_offset", context.exception.messages["queries"][0]) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_query_context_null_timegrain(self): + self.login(username="admin") + payload = get_query_context("birth_names") + payload["queries"][0]["extras"]["time_grain_sqla"] = None + _ = ChartDataQueryContextSchema().load(payload) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_query_context_series_limit(self): + self.login(username="admin") + payload = get_query_context("birth_names") + + payload["queries"][0]["timeseries_limit"] = 2 + payload["queries"][0]["timeseries_limit_metric"] = { + "expressionType": "SIMPLE", + "column": { + "id": 334, + "column_name": "gender", + "filterable": True, + "groupby": True, + "is_dttm": False, + "type": "VARCHAR(16)", + "optionName": "_col_gender", + }, + "aggregate": "COUNT_DISTINCT", + "label": "COUNT_DISTINCT(gender)", + } + _ = ChartDataQueryContextSchema().load(payload) diff --git a/tests/integration_tests/cli_tests.py b/tests/integration_tests/cli_tests.py new file mode 100644 index 0000000000000..aaa682bee0f3e --- /dev/null +++ b/tests/integration_tests/cli_tests.py @@ -0,0 +1,519 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import importlib +import json +import logging +from pathlib import Path +from unittest import mock +from zipfile import is_zipfile, ZipFile + +import pytest +import yaml +from freezegun import freeze_time + +import superset.cli.importexport +import superset.cli.thumbnails +from superset import app, db +from superset.models.dashboard import Dashboard +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + +logger = logging.getLogger(__name__) + + +def assert_cli_fails_properly(response, caplog): + """ + Ensure that a CLI command fails according to a predefined behaviour. + """ + # don't exit successfully + assert response.exit_code != 0 + + # end the logs with a record on an error + assert caplog.records[-1].levelname == "ERROR" + + +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True +) +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +def test_export_dashboards_original(app_context, fs): + """ + Test that a JSON file is exported. + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_dashboards correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.export_dashboards, ("-f", "dashboards.json") + ) + + assert response.exit_code == 0 + assert Path("dashboards.json").exists() + + # check that file is valid JSON + with open("dashboards.json") as fp: + contents = fp.read() + json.loads(contents) + + +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True +) +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +def test_export_datasources_original(app_context, fs): + """ + Test that a YAML file is exported. + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_dashboards correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.export_datasources, ("-f", "datasources.yaml") + ) + + assert response.exit_code == 0 + + assert Path("datasources.yaml").exists() + + # check that file is valid JSON + with open("datasources.yaml") as fp: + contents = fp.read() + yaml.safe_load(contents) + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True +) +def test_export_dashboards_versioned_export(app_context, fs): + """ + Test that a ZIP file is exported. + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_dashboards correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + runner = app.test_cli_runner() + with freeze_time("2021-01-01T00:00:00Z"): + response = runner.invoke(superset.cli.importexport.export_dashboards, ()) + + assert response.exit_code == 0 + assert Path("dashboard_export_20210101T000000.zip").exists() + + assert is_zipfile("dashboard_export_20210101T000000.zip") + + +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True +) +@mock.patch( + "superset.dashboards.commands.export.ExportDashboardsCommand.run", + side_effect=Exception(), +) +def test_failing_export_dashboards_versioned_export( + export_dashboards_command, app_context, fs, caplog +): + """ + Test that failing to export ZIP file is done elegantly. + """ + caplog.set_level(logging.DEBUG) + + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_dashboards correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + runner = app.test_cli_runner() + with freeze_time("2021-01-01T00:00:00Z"): + response = runner.invoke(superset.cli.importexport.export_dashboards, ()) + + assert_cli_fails_properly(response, caplog) + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True +) +def test_export_datasources_versioned_export(app_context, fs): + """ + Test that a ZIP file is exported. + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_dashboards correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + runner = app.test_cli_runner() + with freeze_time("2021-01-01T00:00:00Z"): + response = runner.invoke(superset.cli.importexport.export_datasources, ()) + + assert response.exit_code == 0 + assert Path("dataset_export_20210101T000000.zip").exists() + + assert is_zipfile("dataset_export_20210101T000000.zip") + + +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True +) +@mock.patch( + "superset.dashboards.commands.export.ExportDatasetsCommand.run", + side_effect=Exception(), +) +def test_failing_export_datasources_versioned_export( + export_dashboards_command, app_context, fs, caplog +): + """ + Test that failing to export ZIP file is done elegantly. + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_dashboards correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + runner = app.test_cli_runner() + with freeze_time("2021-01-01T00:00:00Z"): + response = runner.invoke(superset.cli.importexport.export_datasources, ()) + + assert_cli_fails_properly(response, caplog) + + +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True +) +@mock.patch("superset.dashboards.commands.importers.dispatcher.ImportDashboardsCommand") +def test_import_dashboards_versioned_export(import_dashboards_command, app_context, fs): + """ + Test that both ZIP and JSON can be imported. + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_dashboards correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + # write JSON file + with open("dashboards.json", "w") as fp: + fp.write('{"hello": "world"}') + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.import_dashboards, ("-p", "dashboards.json") + ) + + assert response.exit_code == 0 + expected_contents = {"dashboards.json": '{"hello": "world"}'} + import_dashboards_command.assert_called_with(expected_contents, overwrite=True) + + # write ZIP file + with ZipFile("dashboards.zip", "w") as bundle: + with bundle.open("dashboards/dashboard.yaml", "w") as fp: + fp.write(b"hello: world") + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.import_dashboards, ("-p", "dashboards.zip") + ) + + assert response.exit_code == 0 + expected_contents = {"dashboard.yaml": "hello: world"} + import_dashboards_command.assert_called_with(expected_contents, overwrite=True) + + +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True +) +@mock.patch( + "superset.dashboards.commands.importers.dispatcher.ImportDashboardsCommand.run", + side_effect=Exception(), +) +def test_failing_import_dashboards_versioned_export( + import_dashboards_command, app_context, fs, caplog +): + """ + Test that failing to import either ZIP and JSON is done elegantly. + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_dashboards correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + # write JSON file + with open("dashboards.json", "w") as fp: + fp.write('{"hello": "world"}') + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.import_dashboards, ("-p", "dashboards.json") + ) + + assert_cli_fails_properly(response, caplog) + + # write ZIP file + with ZipFile("dashboards.zip", "w") as bundle: + with bundle.open("dashboards/dashboard.yaml", "w") as fp: + fp.write(b"hello: world") + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.import_dashboards, ("-p", "dashboards.zip") + ) + + assert_cli_fails_properly(response, caplog) + + +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True +) +@mock.patch("superset.datasets.commands.importers.dispatcher.ImportDatasetsCommand") +def test_import_datasets_versioned_export(import_datasets_command, app_context, fs): + """ + Test that both ZIP and YAML can be imported. + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_datasets correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + # write YAML file + with open("datasets.yaml", "w") as fp: + fp.write("hello: world") + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.import_datasources, ("-p", "datasets.yaml") + ) + + assert response.exit_code == 0 + expected_contents = {"datasets.yaml": "hello: world"} + import_datasets_command.assert_called_with(expected_contents, overwrite=True) + + # write ZIP file + with ZipFile("datasets.zip", "w") as bundle: + with bundle.open("datasets/dataset.yaml", "w") as fp: + fp.write(b"hello: world") + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.import_datasources, ("-p", "datasets.zip") + ) + + assert response.exit_code == 0 + expected_contents = {"dataset.yaml": "hello: world"} + import_datasets_command.assert_called_with(expected_contents, overwrite=True) + + +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True +) +@mock.patch("superset.datasets.commands.importers.v0.ImportDatasetsCommand") +def test_import_datasets_sync_argument_columns_metrics( + import_datasets_command, app_context, fs +): + """ + Test that the --sync command line argument syncs dataset in superset + with YAML file. Using both columns and metrics with the --sync flag + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_datasets correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + # write YAML file + with open("dataset.yaml", "w") as fp: + fp.write("hello: world") + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.import_datasources, + ["-p", "dataset.yaml", "-s", "metrics,columns"], + ) + + assert response.exit_code == 0 + expected_contents = {"dataset.yaml": "hello: world"} + import_datasets_command.assert_called_with( + expected_contents, + sync_columns=True, + sync_metrics=True, + ) + + +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True +) +@mock.patch("superset.datasets.commands.importers.v0.ImportDatasetsCommand") +def test_import_datasets_sync_argument_columns( + import_datasets_command, app_context, fs +): + """ + Test that the --sync command line argument syncs dataset in superset + with YAML file. Using only columns with the --sync flag + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_datasets correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + # write YAML file + with open("dataset.yaml", "w") as fp: + fp.write("hello: world") + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.import_datasources, + ["-p", "dataset.yaml", "-s", "columns"], + ) + + assert response.exit_code == 0 + expected_contents = {"dataset.yaml": "hello: world"} + import_datasets_command.assert_called_with( + expected_contents, + sync_columns=True, + sync_metrics=False, + ) + + +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True +) +@mock.patch("superset.datasets.commands.importers.v0.ImportDatasetsCommand") +def test_import_datasets_sync_argument_metrics( + import_datasets_command, app_context, fs +): + """ + Test that the --sync command line argument syncs dataset in superset + with YAML file. Using only metrics with the --sync flag + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_datasets correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + # write YAML file + with open("dataset.yaml", "w") as fp: + fp.write("hello: world") + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.import_datasources, + ["-p", "dataset.yaml", "-s", "metrics"], + ) + + assert response.exit_code == 0 + expected_contents = {"dataset.yaml": "hello: world"} + import_datasets_command.assert_called_with( + expected_contents, + sync_columns=False, + sync_metrics=True, + ) + + +@mock.patch.dict( + "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True +) +@mock.patch( + "superset.datasets.commands.importers.dispatcher.ImportDatasetsCommand.run", + side_effect=Exception(), +) +def test_failing_import_datasets_versioned_export( + import_datasets_command, app_context, fs, caplog +): + """ + Test that failing to import either ZIP or YAML is done elegantly. + """ + # pylint: disable=reimported, redefined-outer-name + import superset.cli.importexport # noqa: F811 + + # reload to define export_datasets correctly based on the + # feature flags + importlib.reload(superset.cli.importexport) + + # write YAML file + with open("datasets.yaml", "w") as fp: + fp.write("hello: world") + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.import_datasources, ("-p", "datasets.yaml") + ) + + assert_cli_fails_properly(response, caplog) + + # write ZIP file + with ZipFile("datasets.zip", "w") as bundle: + with bundle.open("datasets/dataset.yaml", "w") as fp: + fp.write(b"hello: world") + + runner = app.test_cli_runner() + response = runner.invoke( + superset.cli.importexport.import_datasources, ("-p", "datasets.zip") + ) + + assert_cli_fails_properly(response, caplog) + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +@mock.patch("superset.tasks.thumbnails.cache_dashboard_thumbnail") +def test_compute_thumbnails(thumbnail_mock, app_context, fs): + + thumbnail_mock.return_value = None + runner = app.test_cli_runner() + dashboard = db.session.query(Dashboard).filter_by(slug="births").first() + response = runner.invoke( + superset.cli.thumbnails.compute_thumbnails, + ["-d", "-i", dashboard.id], + ) + + thumbnail_mock.assert_called_with( + None, + dashboard.id, + force=False, + ) + assert response.exit_code == 0 diff --git a/tests/integration_tests/commands_test.py b/tests/integration_tests/commands_test.py new file mode 100644 index 0000000000000..77fbad05f3a39 --- /dev/null +++ b/tests/integration_tests/commands_test.py @@ -0,0 +1,202 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import copy +import json + +import yaml +from flask import g + +from superset import db +from superset.commands.exceptions import CommandInvalidError +from superset.commands.importers.v1.assets import ImportAssetsCommand +from superset.commands.importers.v1.utils import is_valid_config +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.importexport import ( + chart_config, + dashboard_config, + database_config, + dataset_config, +) + +metadata_config = { + "version": "1.0.0", + "type": "assets", + "timestamp": "2020-11-04T21:27:44.423819+00:00", +} + + +class TestCommandsExceptions(SupersetTestCase): + def test_command_invalid_error(self): + exception = CommandInvalidError("A test") + assert str(exception) == "A test" + + +class TestImportersV1Utils(SupersetTestCase): + def test_is_valid_config(self): + assert is_valid_config("metadata.yaml") + assert is_valid_config("databases/examples.yaml") + assert not is_valid_config(".DS_Store") + assert not is_valid_config( + "__MACOSX/chart_export_20210111T145253/databases/._examples.yaml" + ) + + +class TestImportAssetsCommand(SupersetTestCase): + def setUp(self): + user = self.get_user("admin") + self.user = user + setattr(g, "user", user) + + def test_import_assets(self): + """Test that we can import multiple assets""" + contents = { + "metadata.yaml": yaml.safe_dump(metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "charts/imported_chart.yaml": yaml.safe_dump(chart_config), + "dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config), + } + command = ImportAssetsCommand(contents) + command.run() + + dashboard = ( + db.session.query(Dashboard).filter_by(uuid=dashboard_config["uuid"]).one() + ) + + assert len(dashboard.slices) == 1 + chart = dashboard.slices[0] + assert str(chart.uuid) == chart_config["uuid"] + new_chart_id = chart.id + + assert dashboard.dashboard_title == "Test dash" + assert dashboard.description is None + assert dashboard.css == "" + assert dashboard.slug is None + assert json.loads(dashboard.position_json) == { + "CHART-SVAlICPOSJ": { + "children": [], + "id": "CHART-SVAlICPOSJ", + "meta": { + "chartId": new_chart_id, + "height": 50, + "sliceName": "Number of California Births", + "uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1", + "width": 4, + }, + "parents": ["ROOT_ID", "GRID_ID", "ROW-dP_CHaK2q"], + "type": "CHART", + }, + "DASHBOARD_VERSION_KEY": "v2", + "GRID_ID": { + "children": ["ROW-dP_CHaK2q"], + "id": "GRID_ID", + "parents": ["ROOT_ID"], + "type": "GRID", + }, + "HEADER_ID": { + "id": "HEADER_ID", + "meta": {"text": "Test dash"}, + "type": "HEADER", + }, + "ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"}, + "ROW-dP_CHaK2q": { + "children": ["CHART-SVAlICPOSJ"], + "id": "ROW-dP_CHaK2q", + "meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"}, + "parents": ["ROOT_ID", "GRID_ID"], + "type": "ROW", + }, + } + assert json.loads(dashboard.json_metadata) == { + "color_scheme": None, + "default_filters": "{}", + "expanded_slices": {str(new_chart_id): True}, + "filter_scopes": { + str(new_chart_id): { + "region": {"scope": ["ROOT_ID"], "immune": [new_chart_id]} + }, + }, + "import_time": 1604342885, + "refresh_frequency": 0, + "remote_id": 7, + "timed_refresh_immune_slices": [new_chart_id], + } + + dataset = chart.table + assert str(dataset.uuid) == dataset_config["uuid"] + + database = dataset.database + assert str(database.uuid) == database_config["uuid"] + + assert dashboard.owners == [self.user] + + dashboard.owners = [] + chart.owners = [] + dataset.owners = [] + database.owners = [] + db.session.delete(dashboard) + db.session.delete(chart) + db.session.delete(dataset) + db.session.delete(database) + db.session.commit() + + def test_import_v1_dashboard_overwrite(self): + """Test that assets can be overwritten""" + contents = { + "metadata.yaml": yaml.safe_dump(metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "charts/imported_chart.yaml": yaml.safe_dump(chart_config), + "dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config), + } + command = ImportAssetsCommand(contents) + command.run() + chart = db.session.query(Slice).filter_by(uuid=chart_config["uuid"]).one() + assert chart.cache_timeout is None + + modified_chart_config = copy.deepcopy(chart_config) + modified_chart_config["cache_timeout"] = 3600 + contents = { + "metadata.yaml": yaml.safe_dump(metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "charts/imported_chart.yaml": yaml.safe_dump(modified_chart_config), + "dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config), + } + command = ImportAssetsCommand(contents) + command.run() + chart = db.session.query(Slice).filter_by(uuid=chart_config["uuid"]).one() + assert chart.cache_timeout == 3600 + + dashboard = ( + db.session.query(Dashboard).filter_by(uuid=dashboard_config["uuid"]).one() + ) + chart = dashboard.slices[0] + dataset = chart.table + database = dataset.database + dashboard.owners = [] + + chart.owners = [] + dataset.owners = [] + database.owners = [] + db.session.delete(dashboard) + db.session.delete(chart) + db.session.delete(dataset) + db.session.delete(database) + db.session.commit() diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py new file mode 100644 index 0000000000000..5c132381b1930 --- /dev/null +++ b/tests/integration_tests/conftest.py @@ -0,0 +1,449 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import contextlib +import functools +import os +from typing import Any, Callable, Dict, Optional, TYPE_CHECKING +from unittest.mock import patch + +import pytest +from flask.ctx import AppContext +from flask_appbuilder.security.sqla import models as ab_models +from sqlalchemy.engine import Engine + +from superset import db, security_manager +from superset.extensions import feature_flag_manager +from superset.utils.core import json_dumps_w_dates +from superset.utils.database import get_example_database, remove_database +from tests.integration_tests.test_app import app, login + +if TYPE_CHECKING: + from flask.testing import FlaskClient + + from superset.connectors.sqla.models import Database + +CTAS_SCHEMA_NAME = "sqllab_test_db" +ADMIN_SCHEMA_NAME = "admin_database" + + +@pytest.fixture +def app_context(): + with app.app_context() as ctx: + yield ctx + + +@pytest.fixture +def test_client(app_context: AppContext): + with app.test_client() as client: + yield client + + +@pytest.fixture +def login_as(test_client: "FlaskClient[Any]"): + """Fixture with app context and logged in admin user.""" + + def _login_as(username: str, password: str = "general"): + login(test_client, username=username, password=password) + + yield _login_as + # no need to log out as both app_context and test_client are + # function level fixtures anyway + + +@pytest.fixture +def login_as_admin(login_as: Callable[..., None]): + yield login_as("admin") + + +@pytest.fixture +def create_user(app_context: AppContext): + def _create_user(username: str, role: str = "Admin", password: str = "general"): + security_manager.add_user( + username, + "firstname", + "lastname", + "email@exaple.com", + security_manager.find_role(role), + password, + ) + return security_manager.find_user(username) + + return _create_user + + +@pytest.fixture +def get_user(app_context: AppContext): + def _get_user(username: str) -> ab_models.User: + return ( + db.session.query(security_manager.user_model) + .filter_by(username=username) + .one_or_none() + ) + + return _get_user + + +@pytest.fixture +def get_or_create_user(get_user, create_user) -> ab_models.User: + @contextlib.contextmanager + def _get_user(username: str) -> ab_models.User: + user = get_user(username) + if not user: + # if user is created by test, remove it after done + user = create_user(username) + yield user + db.session.delete(user) + else: + yield user + + return _get_user + + +@pytest.fixture(autouse=True, scope="session") +def setup_sample_data() -> Any: + # TODO(john-bodley): Determine a cleaner way of setting up the sample data without + # relying on `tests.integration_tests.test_app.app` leveraging an `app` fixture which is purposely + # scoped to the function level to ensure tests remain idempotent. + with app.app_context(): + setup_presto_if_needed() + + from superset.cli.test import load_test_users_run + + load_test_users_run() + + from superset.examples.css_templates import load_css_templates + + load_css_templates() + + yield + + with app.app_context(): + # drop sqlachemy tables + + db.session.commit() + from sqlalchemy.ext import declarative + + sqla_base = declarative.declarative_base() + # uses sorted_tables to drop in proper order without violating foreign constrains + for table in sqla_base.metadata.sorted_tables: + table.__table__.drop() + db.session.commit() + + +def drop_from_schema(engine: Engine, schema_name: str): + schemas = engine.execute(f"SHOW SCHEMAS").fetchall() + if schema_name not in [s[0] for s in schemas]: + # schema doesn't exist + return + tables_or_views = engine.execute(f"SHOW TABLES in {schema_name}").fetchall() + for tv in tables_or_views: + engine.execute(f"DROP TABLE IF EXISTS {schema_name}.{tv[0]}") + engine.execute(f"DROP VIEW IF EXISTS {schema_name}.{tv[0]}") + + +@pytest.fixture(scope="session") +def example_db_provider() -> Callable[[], Database]: # type: ignore + class _example_db_provider: + _db: Optional[Database] = None + + def __call__(self) -> Database: + with app.app_context(): + if self._db is None: + self._db = get_example_database() + self._load_lazy_data_to_decouple_from_session() + + return self._db + + def _load_lazy_data_to_decouple_from_session(self) -> None: + self._db._get_sqla_engine() # type: ignore + self._db.backend # type: ignore + + def remove(self) -> None: + if self._db: + with app.app_context(): + remove_database(self._db) + + _instance = _example_db_provider() + + yield _instance + + # TODO - can not use it until referenced objects will be deleted. + # _instance.remove() + + +def setup_presto_if_needed(): + db_uri = ( + app.config.get("SQLALCHEMY_EXAMPLES_URI") + or app.config["SQLALCHEMY_DATABASE_URI"] + ) + backend = db_uri.split("://")[0] + database = get_example_database() + extra = database.get_extra() + + if backend == "presto": + # decrease poll interval for tests + extra = { + **extra, + "engine_params": { + "connect_args": {"poll_interval": app.config["PRESTO_POLL_INTERVAL"]} + }, + } + else: + # remove `poll_interval` from databases that do not support it + extra = {**extra, "engine_params": {}} + database.extra = json_dumps_w_dates(extra) + db.session.commit() + + if backend in {"presto", "hive"}: + database = get_example_database() + with database.get_sqla_engine_with_context() as engine: + drop_from_schema(engine, CTAS_SCHEMA_NAME) + engine.execute(f"DROP SCHEMA IF EXISTS {CTAS_SCHEMA_NAME}") + engine.execute(f"CREATE SCHEMA {CTAS_SCHEMA_NAME}") + + drop_from_schema(engine, ADMIN_SCHEMA_NAME) + engine.execute(f"DROP SCHEMA IF EXISTS {ADMIN_SCHEMA_NAME}") + engine.execute(f"CREATE SCHEMA {ADMIN_SCHEMA_NAME}") + + +def with_feature_flags(**mock_feature_flags): + """ + Use this decorator to mock feature flags in tests.integration_tests. + + Usage: + + class TestYourFeature(SupersetTestCase): + + @with_feature_flags(YOUR_FEATURE=True) + def test_your_feature_enabled(self): + self.assertEqual(is_feature_enabled("YOUR_FEATURE"), True) + + @with_feature_flags(YOUR_FEATURE=False) + def test_your_feature_disabled(self): + self.assertEqual(is_feature_enabled("YOUR_FEATURE"), False) + """ + + def mock_get_feature_flags(): + feature_flags = feature_flag_manager._feature_flags or {} + return {**feature_flags, **mock_feature_flags} + + def decorate(test_fn): + def wrapper(*args, **kwargs): + with patch.object( + feature_flag_manager, + "get_feature_flags", + side_effect=mock_get_feature_flags, + ): + test_fn(*args, **kwargs) + + return functools.update_wrapper(wrapper, test_fn) + + return decorate + + +def with_config(override_config: Dict[str, Any]): + """ + Use this decorator to mock specific config keys. + + Usage: + + class TestYourFeature(SupersetTestCase): + + @with_config({"SOME_CONFIG": True}) + def test_your_config(self): + self.assertEqual(curren_app.config["SOME_CONFIG"), True) + + """ + + def decorate(test_fn): + config_backup = {} + + def wrapper(*args, **kwargs): + from flask import current_app + + for key, value in override_config.items(): + config_backup[key] = current_app.config[key] + current_app.config[key] = value + test_fn(*args, **kwargs) + for key, value in config_backup.items(): + current_app.config[key] = value + + return functools.update_wrapper(wrapper, test_fn) + + return decorate + + +@pytest.fixture +def virtual_dataset(): + from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + + dataset = SqlaTable( + table_name="virtual_dataset", + sql=( + "SELECT 0 as col1, 'a' as col2, 1.0 as col3, NULL as col4, '2000-01-01 00:00:00' as col5 " + "UNION ALL " + "SELECT 1, 'b', 1.1, NULL, '2000-01-02 00:00:00' " + "UNION ALL " + "SELECT 2 as col1, 'c' as col2, 1.2, NULL, '2000-01-03 00:00:00' " + "UNION ALL " + "SELECT 3 as col1, 'd' as col2, 1.3, NULL, '2000-01-04 00:00:00' " + "UNION ALL " + "SELECT 4 as col1, 'e' as col2, 1.4, NULL, '2000-01-05 00:00:00' " + "UNION ALL " + "SELECT 5 as col1, 'f' as col2, 1.5, NULL, '2000-01-06 00:00:00' " + "UNION ALL " + "SELECT 6 as col1, 'g' as col2, 1.6, NULL, '2000-01-07 00:00:00' " + "UNION ALL " + "SELECT 7 as col1, 'h' as col2, 1.7, NULL, '2000-01-08 00:00:00' " + "UNION ALL " + "SELECT 8 as col1, 'i' as col2, 1.8, NULL, '2000-01-09 00:00:00' " + "UNION ALL " + "SELECT 9 as col1, 'j' as col2, 1.9, NULL, '2000-01-10 00:00:00' " + ), + database=get_example_database(), + ) + TableColumn(column_name="col1", type="INTEGER", table=dataset) + TableColumn(column_name="col2", type="VARCHAR(255)", table=dataset) + TableColumn(column_name="col3", type="DECIMAL(4,2)", table=dataset) + TableColumn(column_name="col4", type="VARCHAR(255)", table=dataset) + # Different database dialect datetime type is not consistent, so temporarily use varchar + TableColumn(column_name="col5", type="VARCHAR(255)", table=dataset) + + SqlMetric(metric_name="count", expression="count(*)", table=dataset) + db.session.merge(dataset) + + yield dataset + + db.session.delete(dataset) + db.session.commit() + + +@pytest.fixture +def physical_dataset(): + from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + from superset.connectors.sqla.utils import get_identifier_quoter + + example_database = get_example_database() + + with example_database.get_sqla_engine_with_context() as engine: + quoter = get_identifier_quoter(engine.name) + # sqlite can only execute one statement at a time + engine.execute( + f""" + CREATE TABLE IF NOT EXISTS physical_dataset( + col1 INTEGER, + col2 VARCHAR(255), + col3 DECIMAL(4,2), + col4 VARCHAR(255), + col5 TIMESTAMP DEFAULT '1970-01-01 00:00:01', + col6 TIMESTAMP DEFAULT '1970-01-01 00:00:01', + {quoter('time column with spaces')} TIMESTAMP DEFAULT '1970-01-01 00:00:01' + ); + """ + ) + engine.execute( + """ + INSERT INTO physical_dataset values + (0, 'a', 1.0, NULL, '2000-01-01 00:00:00', '2002-01-03 00:00:00', '2002-01-03 00:00:00'), + (1, 'b', 1.1, NULL, '2000-01-02 00:00:00', '2002-02-04 00:00:00', '2002-02-04 00:00:00'), + (2, 'c', 1.2, NULL, '2000-01-03 00:00:00', '2002-03-07 00:00:00', '2002-03-07 00:00:00'), + (3, 'd', 1.3, NULL, '2000-01-04 00:00:00', '2002-04-12 00:00:00', '2002-04-12 00:00:00'), + (4, 'e', 1.4, NULL, '2000-01-05 00:00:00', '2002-05-11 00:00:00', '2002-05-11 00:00:00'), + (5, 'f', 1.5, NULL, '2000-01-06 00:00:00', '2002-06-13 00:00:00', '2002-06-13 00:00:00'), + (6, 'g', 1.6, NULL, '2000-01-07 00:00:00', '2002-07-15 00:00:00', '2002-07-15 00:00:00'), + (7, 'h', 1.7, NULL, '2000-01-08 00:00:00', '2002-08-18 00:00:00', '2002-08-18 00:00:00'), + (8, 'i', 1.8, NULL, '2000-01-09 00:00:00', '2002-09-20 00:00:00', '2002-09-20 00:00:00'), + (9, 'j', 1.9, NULL, '2000-01-10 00:00:00', '2002-10-22 00:00:00', '2002-10-22 00:00:00'); + """ + ) + + dataset = SqlaTable( + table_name="physical_dataset", + database=example_database, + ) + TableColumn(column_name="col1", type="INTEGER", table=dataset) + TableColumn(column_name="col2", type="VARCHAR(255)", table=dataset) + TableColumn(column_name="col3", type="DECIMAL(4,2)", table=dataset) + TableColumn(column_name="col4", type="VARCHAR(255)", table=dataset) + TableColumn(column_name="col5", type="TIMESTAMP", is_dttm=True, table=dataset) + TableColumn(column_name="col6", type="TIMESTAMP", is_dttm=True, table=dataset) + TableColumn( + column_name="time column with spaces", + type="TIMESTAMP", + is_dttm=True, + table=dataset, + ) + SqlMetric(metric_name="count", expression="count(*)", table=dataset) + db.session.merge(dataset) + db.session.commit() + + yield dataset + + engine.execute( + """ + DROP TABLE physical_dataset; + """ + ) + dataset = db.session.query(SqlaTable).filter_by(table_name="physical_dataset").all() + for ds in dataset: + db.session.delete(ds) + db.session.commit() + + +@pytest.fixture +def virtual_dataset_comma_in_column_value(): + from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + + dataset = SqlaTable( + table_name="virtual_dataset", + sql=( + "SELECT 'col1,row1' as col1, 'col2, row1' as col2 " + "UNION ALL " + "SELECT 'col1,row2' as col1, 'col2, row2' as col2 " + "UNION ALL " + "SELECT 'col1,row3' as col1, 'col2, row3' as col2 " + ), + database=get_example_database(), + ) + TableColumn(column_name="col1", type="VARCHAR(255)", table=dataset) + TableColumn(column_name="col2", type="VARCHAR(255)", table=dataset) + + SqlMetric(metric_name="count", expression="count(*)", table=dataset) + db.session.merge(dataset) + + yield dataset + + db.session.delete(dataset) + db.session.commit() + + +only_postgresql = pytest.mark.skipif( + "postgresql" not in os.environ.get("SUPERSET__SQLALCHEMY_DATABASE_URI", ""), + reason="Only run test case in Postgresql", +) + +only_sqlite = pytest.mark.skipif( + "sqlite" not in os.environ.get("SUPERSET__SQLALCHEMY_DATABASE_URI", ""), + reason="Only run test case in SQLite", +) + +only_mysql = pytest.mark.skipif( + "mysql" not in os.environ.get("SUPERSET__SQLALCHEMY_DATABASE_URI", ""), + reason="Only run test case in MySQL", +) diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py new file mode 100644 index 0000000000000..f036f18bf6aa3 --- /dev/null +++ b/tests/integration_tests/core_tests.py @@ -0,0 +1,1685 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import csv +import datetime +import doctest +import html +import io +import json +import logging +from typing import Dict, List +from urllib.parse import quote + +import superset.utils.database +from superset.utils.core import backend +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from sqlalchemy import Table + +import pytest +import pytz +import random +import re +import unittest +from unittest import mock + +import pandas as pd +import sqlalchemy as sqla +from sqlalchemy.exc import SQLAlchemyError +from superset.models.cache import CacheKey +from superset.utils.database import get_example_database +from tests.integration_tests.conftest import with_feature_flags +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_with_slice, + load_energy_table_data, +) +from tests.integration_tests.test_app import app +import superset.views.utils +from superset import ( + dataframe, + db, + security_manager, + sql_lab, +) +from superset.common.db_query_status import QueryStatus +from superset.connectors.sqla.models import SqlaTable +from superset.db_engine_specs.base import BaseEngineSpec +from superset.db_engine_specs.mssql import MssqlEngineSpec +from superset.exceptions import SupersetException +from superset.extensions import async_query_manager, cache_manager +from superset.models import core as models +from superset.models.annotations import Annotation, AnnotationLayer +from superset.models.dashboard import Dashboard +from superset.models.datasource_access_request import DatasourceAccessRequest +from superset.models.slice import Slice +from superset.models.sql_lab import Query +from superset.result_set import SupersetResultSet +from superset.utils import core as utils +from superset.views import core as views +from superset.views.database.views import DatabaseView + +from .base_tests import SupersetTestCase +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) +from tests.integration_tests.conftest import CTAS_SCHEMA_NAME + +logger = logging.getLogger(__name__) + + +@pytest.fixture(scope="module") +def cleanup(): + db.session.query(Query).delete() + db.session.query(DatasourceAccessRequest).delete() + db.session.query(models.Log).delete() + db.session.commit() + yield + + +class TestCore(SupersetTestCase): + def setUp(self): + self.table_ids = { + tbl.table_name: tbl.id for tbl in (db.session.query(SqlaTable).all()) + } + self.original_unsafe_db_setting = app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] + + def tearDown(self): + db.session.query(Query).delete() + app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = self.original_unsafe_db_setting + + def test_login(self): + resp = self.get_resp("/login/", data=dict(username="admin", password="general")) + self.assertNotIn("User confirmation needed", resp) + + resp = self.get_resp("/logout/", follow_redirects=True) + self.assertIn("User confirmation needed", resp) + + resp = self.get_resp( + "/login/", data=dict(username="admin", password="wrongPassword") + ) + self.assertIn("User confirmation needed", resp) + + def test_dashboard_endpoint(self): + self.login() + resp = self.client.get("/superset/dashboard/-1/") + assert resp.status_code == 404 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_slice_endpoint(self): + self.login(username="admin") + resp = self.client.get("/superset/slice/-1/") + assert resp.status_code == 404 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_viz_cache_key(self): + self.login(username="admin") + slc = self.get_slice("Girls", db.session) + + viz = slc.viz + qobj = viz.query_obj() + cache_key = viz.cache_key(qobj) + + qobj["groupby"] = [] + cache_key_with_groupby = viz.cache_key(qobj) + self.assertNotEqual(cache_key, cache_key_with_groupby) + + self.assertNotEqual( + viz.cache_key(qobj), viz.cache_key(qobj, time_compare="12 weeks") + ) + + self.assertNotEqual( + viz.cache_key(qobj, time_compare="28 days"), + viz.cache_key(qobj, time_compare="12 weeks"), + ) + + qobj["inner_from_dttm"] = datetime.datetime(1901, 1, 1) + + self.assertEqual(cache_key_with_groupby, viz.cache_key(qobj)) + + def test_get_superset_tables_not_allowed(self): + example_db = superset.utils.database.get_example_database() + schema_name = self.default_schema_backend_map[example_db.backend] + self.login(username="gamma") + uri = f"superset/tables/{example_db.id}/{schema_name}/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_get_superset_tables_allowed(self): + session = db.session + table_name = "energy_usage" + role_name = "dummy_role" + self.logout() + self.login(username="gamma") + gamma_user = security_manager.find_user(username="gamma") + security_manager.add_role(role_name) + dummy_role = security_manager.find_role(role_name) + gamma_user.roles.append(dummy_role) + + tbl_id = self.table_ids.get(table_name) + table = db.session.query(SqlaTable).filter(SqlaTable.id == tbl_id).first() + table_perm = table.perm + + security_manager.add_permission_role( + dummy_role, + security_manager.find_permission_view_menu("datasource_access", table_perm), + ) + + session.commit() + + example_db = utils.get_example_database() + schema_name = self.default_schema_backend_map[example_db.backend] + uri = f"superset/tables/{example_db.id}/{schema_name}/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + + # cleanup + gamma_user = security_manager.find_user(username="gamma") + gamma_user.roles.remove(security_manager.find_role(role_name)) + session.commit() + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_get_superset_tables_not_allowed_with_out_permissions(self): + session = db.session + role_name = "dummy_role_no_table_access" + self.logout() + self.login(username="gamma") + gamma_user = security_manager.find_user(username="gamma") + security_manager.add_role(role_name) + dummy_role = security_manager.find_role(role_name) + gamma_user.roles.append(dummy_role) + + session.commit() + + example_db = utils.get_example_database() + schema_name = self.default_schema_backend_map[example_db.backend] + uri = f"superset/tables/{example_db.id}/{schema_name}/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + # cleanup + gamma_user = security_manager.find_user(username="gamma") + gamma_user.roles.remove(security_manager.find_role(role_name)) + session.commit() + + def test_get_superset_tables_database_not_found(self): + self.login(username="admin") + uri = f"superset/tables/invalid/public/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + def test_get_superset_tables_schema_undefined(self): + example_db = superset.utils.database.get_example_database() + self.login(username="gamma") + uri = f"superset/tables/{example_db.id}/undefined/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 422) + + def test_admin_only_permissions(self): + def assert_admin_permission_in(role_name, assert_func): + role = security_manager.find_role(role_name) + permissions = [p.permission.name for p in role.permissions] + assert_func("can_approve", permissions) + + assert_admin_permission_in("Admin", self.assertIn) + assert_admin_permission_in("Alpha", self.assertNotIn) + assert_admin_permission_in("Gamma", self.assertNotIn) + + def test_admin_only_menu_views(self): + def assert_admin_view_menus_in(role_name, assert_func): + role = security_manager.find_role(role_name) + view_menus = [p.view_menu.name for p in role.permissions] + assert_func("ResetPasswordView", view_menus) + assert_func("RoleModelView", view_menus) + assert_func("Security", view_menus) + assert_func("SQL Lab", view_menus) + + assert_admin_view_menus_in("Admin", self.assertIn) + assert_admin_view_menus_in("Alpha", self.assertNotIn) + assert_admin_view_menus_in("Gamma", self.assertNotIn) + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_save_slice(self): + self.login(username="admin") + slice_name = f"Energy Sankey" + slice_id = self.get_slice(slice_name, db.session).id + copy_name_prefix = "Test Sankey" + copy_name = f"{copy_name_prefix}[save]{random.random()}" + tbl_id = self.table_ids.get("energy_usage") + new_slice_name = f"{copy_name_prefix}[overwrite]{random.random()}" + + url = ( + "/superset/explore/table/{}/?slice_name={}&" + "action={}&datasource_name=energy_usage" + ) + + form_data = { + "adhoc_filters": [], + "viz_type": "sankey", + "groupby": ["target"], + "metric": "sum__value", + "row_limit": 5000, + "slice_id": slice_id, + } + # Changing name and save as a new slice + resp = self.client.post( + url.format(tbl_id, copy_name, "saveas"), + data={"form_data": json.dumps(form_data)}, + ) + db.session.expunge_all() + new_slice_id = resp.json["form_data"]["slice_id"] + slc = db.session.query(Slice).filter_by(id=new_slice_id).one() + + self.assertEqual(slc.slice_name, copy_name) + form_data.pop("slice_id") # We don't save the slice id when saving as + self.assertEqual(slc.viz.form_data, form_data) + + form_data = { + "adhoc_filters": [], + "viz_type": "sankey", + "groupby": ["source"], + "metric": "sum__value", + "row_limit": 5000, + "slice_id": new_slice_id, + "time_range": "now", + } + # Setting the name back to its original name by overwriting new slice + self.client.post( + url.format(tbl_id, new_slice_name, "overwrite"), + data={"form_data": json.dumps(form_data)}, + ) + db.session.expunge_all() + slc = db.session.query(Slice).filter_by(id=new_slice_id).one() + self.assertEqual(slc.slice_name, new_slice_name) + self.assertEqual(slc.viz.form_data, form_data) + + # Cleanup + slices = ( + db.session.query(Slice) + .filter(Slice.slice_name.like(copy_name_prefix + "%")) + .all() + ) + for slc in slices: + db.session.delete(slc) + db.session.commit() + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_filter_endpoint(self): + self.login(username="admin") + tbl_id = self.table_ids.get("energy_usage") + table = db.session.query(SqlaTable).filter(SqlaTable.id == tbl_id) + table.filter_select_enabled = True + url = "/superset/filter/table/{}/target/" + + # Changing name + resp = self.get_resp(url.format(tbl_id)) + assert len(resp) > 0 + assert "energy_target0" in resp + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_slice_data(self): + # slice data should have some required attributes + self.login(username="admin") + slc = self.get_slice( + slice_name="Girls", session=db.session, expunge_from_session=False + ) + slc_data_attributes = slc.data.keys() + assert "changed_on" in slc_data_attributes + assert "modified" in slc_data_attributes + assert "owners" in slc_data_attributes + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_slices(self): + # Testing by hitting the two supported end points for all slices + self.login(username="admin") + Slc = Slice + urls = [] + for slc in db.session.query(Slc).all(): + urls += [ + (slc.slice_name, "explore", slc.slice_url), + ] + for name, method, url in urls: + logger.info(f"[{name}]/[{method}]: {url}") + print(f"[{name}]/[{method}]: {url}") + resp = self.client.get(url) + self.assertEqual(resp.status_code, 200) + + def test_add_slice(self): + self.login(username="admin") + # assert that /chart/add responds with 200 + url = "/chart/add" + resp = self.client.get(url) + self.assertEqual(resp.status_code, 200) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_user_slices_for_owners(self): + self.login(username="alpha") + user = security_manager.find_user("alpha") + slice_name = "Girls" + + # ensure user is not owner of any slices + url = f"/superset/user_slices/{user.id}/" + resp = self.client.get(url) + data = json.loads(resp.data) + self.assertEqual(data, []) + + # make user owner of slice and verify that endpoint returns said slice + slc = self.get_slice( + slice_name=slice_name, session=db.session, expunge_from_session=False + ) + slc.owners = [user] + db.session.merge(slc) + db.session.commit() + url = f"/superset/user_slices/{user.id}/" + resp = self.client.get(url) + data = json.loads(resp.data) + self.assertEqual(len(data), 1) + self.assertEqual(data[0]["title"], slice_name) + + # remove ownership and ensure user no longer gets slice + slc = self.get_slice( + slice_name=slice_name, session=db.session, expunge_from_session=False + ) + slc.owners = [] + db.session.merge(slc) + db.session.commit() + url = f"/superset/user_slices/{user.id}/" + resp = self.client.get(url) + data = json.loads(resp.data) + self.assertEqual(data, []) + + def test_get_user_slices(self): + self.login(username="admin") + userid = security_manager.find_user("admin").id + url = f"/sliceasync/api/read?_flt_0_created_by={userid}" + resp = self.client.get(url) + self.assertEqual(resp.status_code, 200) + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_slices_V2(self): + # Add explore-v2-beta role to admin user + # Test all slice urls as user with explore-v2-beta role + security_manager.add_role("explore-v2-beta") + + security_manager.add_user( + "explore_beta", + "explore_beta", + " user", + "explore_beta@airbnb.com", + security_manager.find_role("explore-v2-beta"), + password="general", + ) + self.login(username="explore_beta", password="general") + + Slc = Slice + urls = [] + for slc in db.session.query(Slc).all(): + urls += [(slc.slice_name, "slice_url", slc.slice_url)] + for name, method, url in urls: + print(f"[{name}]/[{method}]: {url}") + self.client.get(url) + + def test_doctests(self): + modules = [utils, models, sql_lab] + for mod in modules: + failed, tests = doctest.testmod(mod) + if failed: + raise Exception("Failed a doctest") + + def test_misc(self): + assert self.get_resp("/health") == "OK" + assert self.get_resp("/healthcheck") == "OK" + assert self.get_resp("/ping") == "OK" + + def test_testconn(self, username="admin"): + # need to temporarily allow sqlite dbs, teardown will undo this + app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False + self.login(username=username) + database = superset.utils.database.get_example_database() + # validate that the endpoint works with the password-masked sqlalchemy uri + data = json.dumps( + { + "uri": database.safe_sqlalchemy_uri(), + "name": "examples", + "impersonate_user": False, + } + ) + response = self.client.post( + "/superset/testconn", data=data, content_type="application/json" + ) + assert response.status_code == 200 + assert response.headers["Content-Type"] == "application/json" + + # validate that the endpoint works with the decrypted sqlalchemy uri + data = json.dumps( + { + "uri": database.sqlalchemy_uri_decrypted, + "name": "examples", + "impersonate_user": False, + } + ) + response = self.client.post( + "/superset/testconn", data=data, content_type="application/json" + ) + assert response.status_code == 200 + assert response.headers["Content-Type"] == "application/json" + + def test_testconn_failed_conn(self, username="admin"): + self.login(username=username) + + data = json.dumps( + {"uri": "broken://url", "name": "examples", "impersonate_user": False} + ) + response = self.client.post( + "/superset/testconn", data=data, content_type="application/json" + ) + assert response.status_code == 400 + assert response.headers["Content-Type"] == "application/json" + response_body = json.loads(response.data.decode("utf-8")) + expected_body = {"error": "Could not load database driver: broken"} + assert response_body == expected_body, "%s != %s" % ( + response_body, + expected_body, + ) + + data = json.dumps( + { + "uri": "mssql+pymssql://url", + "name": "examples", + "impersonate_user": False, + } + ) + response = self.client.post( + "/superset/testconn", data=data, content_type="application/json" + ) + assert response.status_code == 400 + assert response.headers["Content-Type"] == "application/json" + response_body = json.loads(response.data.decode("utf-8")) + expected_body = {"error": "Could not load database driver: mssql+pymssql"} + assert response_body == expected_body, "%s != %s" % ( + response_body, + expected_body, + ) + + def test_testconn_unsafe_uri(self, username="admin"): + self.login(username=username) + app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = True + + response = self.client.post( + "/superset/testconn", + data=json.dumps( + { + "uri": "sqlite:///home/superset/unsafe.db", + "name": "unsafe", + "impersonate_user": False, + } + ), + content_type="application/json", + ) + self.assertEqual(400, response.status_code) + response_body = json.loads(response.data.decode("utf-8")) + expected_body = { + "error": "SQLiteDialect_pysqlite cannot be used as a data source for security reasons." + } + self.assertEqual(expected_body, response_body) + + def test_custom_password_store(self): + database = superset.utils.database.get_example_database() + conn_pre = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted) + + def custom_password_store(uri): + return "password_store_test" + + models.custom_password_store = custom_password_store + conn = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted) + if conn_pre.password: + assert conn.password == "password_store_test" + assert conn.password != conn_pre.password + # Disable for password store for later tests + models.custom_password_store = None + + def test_databaseview_edit(self, username="admin"): + # validate that sending a password-masked uri does not over-write the decrypted + # uri + self.login(username=username) + database = superset.utils.database.get_example_database() + sqlalchemy_uri_decrypted = database.sqlalchemy_uri_decrypted + url = "databaseview/edit/{}".format(database.id) + data = {k: database.__getattribute__(k) for k in DatabaseView.add_columns} + data["sqlalchemy_uri"] = database.safe_sqlalchemy_uri() + self.client.post(url, data=data) + database = superset.utils.database.get_example_database() + self.assertEqual(sqlalchemy_uri_decrypted, database.sqlalchemy_uri_decrypted) + + # Need to clean up after ourselves + database.impersonate_user = False + database.allow_dml = False + database.allow_run_async = False + db.session.commit() + + @pytest.mark.usefixtures( + "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" + ) + def test_warm_up_cache(self): + self.login() + slc = self.get_slice("Girls", db.session) + data = self.get_json_resp("/superset/warm_up_cache?slice_id={}".format(slc.id)) + self.assertEqual( + data, [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + ) + + data = self.get_json_resp( + "/superset/warm_up_cache?table_name=energy_usage&db_name=main" + ) + assert len(data) > 0 + + dashboard = self.get_dash_by_slug("births") + + assert self.get_json_resp( + f"/superset/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}" + ) == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + + assert self.get_json_resp( + f"/superset/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}&extra_filters=" + + quote(json.dumps([{"col": "name", "op": "in", "val": ["Jennifer"]}])) + ) == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_cache_logging(self): + self.login("admin") + store_cache_keys = app.config["STORE_CACHE_KEYS_IN_METADATA_DB"] + app.config["STORE_CACHE_KEYS_IN_METADATA_DB"] = True + girls_slice = self.get_slice("Girls", db.session) + self.get_json_resp("/superset/warm_up_cache?slice_id={}".format(girls_slice.id)) + ck = db.session.query(CacheKey).order_by(CacheKey.id.desc()).first() + assert ck.datasource_uid == f"{girls_slice.table.id}__table" + app.config["STORE_CACHE_KEYS_IN_METADATA_DB"] = store_cache_keys + + def test_redirect_invalid(self): + model_url = models.Url(url="hhttp://invalid.com") + db.session.add(model_url) + db.session.commit() + + self.login(username="admin") + response = self.client.get(f"/r/{model_url.id}") + assert response.headers["Location"] == "/" + db.session.delete(model_url) + db.session.commit() + + @with_feature_flags(KV_STORE=False) + def test_kv_disabled(self): + self.login(username="admin") + + resp = self.client.get("/kv/10001/") + self.assertEqual(404, resp.status_code) + + value = json.dumps({"data": "this is a test"}) + resp = self.client.post("/kv/store/", data=dict(data=value)) + self.assertEqual(resp.status_code, 404) + + @with_feature_flags(KV_STORE=True) + def test_kv_enabled(self): + self.login(username="admin") + + resp = self.client.get("/kv/10001/") + self.assertEqual(404, resp.status_code) + + value = json.dumps({"data": "this is a test"}) + resp = self.client.post("/kv/store/", data=dict(data=value)) + self.assertEqual(resp.status_code, 200) + kv = db.session.query(models.KeyValue).first() + kv_value = kv.value + self.assertEqual(json.loads(value), json.loads(kv_value)) + + resp = self.client.get("/kv/{}/".format(kv.id)) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(value), json.loads(resp.data.decode("utf-8"))) + + def test_gamma(self): + self.login(username="gamma") + assert "Charts" in self.get_resp("/chart/list/") + assert "Dashboards" in self.get_resp("/dashboard/list/") + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_csv_endpoint(self): + self.login() + client_id = "{}".format(random.getrandbits(64))[:10] + get_name_sql = """ + SELECT name + FROM birth_names + LIMIT 1 + """ + resp = self.run_sql(get_name_sql, client_id, raise_on_error=True) + name = resp["data"][0]["name"] + sql = f""" + SELECT name + FROM birth_names + WHERE name = '{name}' + LIMIT 1 + """ + client_id = "{}".format(random.getrandbits(64))[:10] + self.run_sql(sql, client_id, raise_on_error=True) + + resp = self.get_resp("/superset/csv/{}".format(client_id)) + data = csv.reader(io.StringIO(resp)) + expected_data = csv.reader(io.StringIO(f"name\n{name}\n")) + + client_id = "{}".format(random.getrandbits(64))[:10] + self.run_sql(sql, client_id, raise_on_error=True) + + resp = self.get_resp("/superset/csv/{}".format(client_id)) + data = csv.reader(io.StringIO(resp)) + expected_data = csv.reader(io.StringIO(f"name\n{name}\n")) + + self.assertEqual(list(expected_data), list(data)) + self.logout() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_extra_table_metadata(self): + self.login() + example_db = superset.utils.database.get_example_database() + schema = "default" if example_db.backend in {"presto", "hive"} else "superset" + self.get_json_resp( + f"/superset/extra_table_metadata/{example_db.id}/birth_names/{schema}/" + ) + + def test_required_params_in_sql_json(self): + self.login() + client_id = "{}".format(random.getrandbits(64))[:10] + + data = {"client_id": client_id} + rv = self.client.post( + "/superset/sql_json/", + json=data, + ) + failed_resp = { + "sql": ["Missing data for required field."], + "database_id": ["Missing data for required field."], + } + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertDictEqual(resp_data, failed_resp) + self.assertEqual(rv.status_code, 400) + + data = {"sql": "SELECT 1", "client_id": client_id} + rv = self.client.post( + "/superset/sql_json/", + json=data, + ) + failed_resp = {"database_id": ["Missing data for required field."]} + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertDictEqual(resp_data, failed_resp) + self.assertEqual(rv.status_code, 400) + + data = {"database_id": 1, "client_id": client_id} + rv = self.client.post( + "/superset/sql_json/", + json=data, + ) + failed_resp = {"sql": ["Missing data for required field."]} + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertDictEqual(resp_data, failed_resp) + self.assertEqual(rv.status_code, 400) + + data = {"sql": "SELECT 1", "database_id": 1, "client_id": client_id} + rv = self.client.post( + "/superset/sql_json/", + json=data, + ) + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(resp_data.get("status"), "success") + self.assertEqual(rv.status_code, 200) + + def test_templated_sql_json(self): + if superset.utils.database.get_example_database().backend == "presto": + # TODO: make it work for presto + return + self.login() + sql = "SELECT '{{ 1+1 }}' as test" + data = self.run_sql(sql, "fdaklj3ws") + self.assertEqual(data["data"][0]["test"], "2") + + @mock.patch( + "tests.integration_tests.superset_test_custom_template_processors.datetime" + ) + @mock.patch("superset.views.core.get_sql_results") + def test_custom_templated_sql_json(self, sql_lab_mock, mock_dt) -> None: + """Test sqllab receives macros expanded query.""" + mock_dt.utcnow = mock.Mock(return_value=datetime.datetime(1970, 1, 1)) + self.login() + sql = "SELECT '$DATE()' as test" + resp = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 1}, + "data": [{"test": "'1970-01-01'"}], + } + sql_lab_mock.return_value = resp + + dbobj = self.create_fake_db_for_macros() + json_payload = dict(database_id=dbobj.id, sql=sql) + self.get_json_resp( + "/superset/sql_json/", raise_on_error=False, json_=json_payload + ) + assert sql_lab_mock.called + self.assertEqual(sql_lab_mock.call_args[0][1], "SELECT '1970-01-01' as test") + + self.delete_fake_db_for_macros() + + def test_fetch_datasource_metadata(self): + self.login(username="admin") + url = "/superset/fetch_datasource_metadata?" "datasourceKey=1__table" + resp = self.get_json_resp(url) + keys = [ + "name", + "type", + "order_by_choices", + "granularity_sqla", + "time_grain_sqla", + "id", + ] + for k in keys: + self.assertIn(k, resp.keys()) + + @staticmethod + def _get_user_activity_endpoints(user: str): + userid = security_manager.find_user(user).id + return ( + f"/superset/recent_activity/{userid}/", + f"/superset/created_slices/{userid}/", + f"/superset/created_dashboards/{userid}/", + f"/superset/fave_slices/{userid}/", + f"/superset/fave_dashboards/{userid}/", + f"/superset/user_slices/{userid}/", + f"/superset/fave_dashboards_by_username/{user}/", + ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_user_profile(self, username="admin"): + self.login(username=username) + slc = self.get_slice("Girls", db.session) + + # Setting some faves + url = f"/superset/favstar/Slice/{slc.id}/select/" + resp = self.get_json_resp(url) + self.assertEqual(resp["count"], 1) + + dash = db.session.query(Dashboard).filter_by(slug="births").first() + url = f"/superset/favstar/Dashboard/{dash.id}/select/" + resp = self.get_json_resp(url) + self.assertEqual(resp["count"], 1) + + resp = self.get_resp(f"/superset/profile/{username}/") + self.assertIn('"app"', resp) + + for endpoint in self._get_user_activity_endpoints(username): + data = self.get_json_resp(endpoint) + self.assertNotIn("message", data) + + def test_user_profile_optional_access(self): + self.login(username="gamma") + resp = self.client.get(f"/superset/profile/admin/") + self.assertEqual(resp.status_code, 200) + + app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = False + resp = self.client.get(f"/superset/profile/admin/") + self.assertEqual(resp.status_code, 403) + + # Restore config + app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = True + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_user_activity_access(self, username="gamma"): + self.login(username=username) + + # accessing own and other users' activity is allowed by default + for user in ("admin", "gamma"): + for endpoint in self._get_user_activity_endpoints(user): + resp = self.client.get(endpoint) + assert resp.status_code == 200 + + # disabling flag will block access to other users' activity data + access_flag = app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] + app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = False + for user in ("admin", "gamma"): + for endpoint in self._get_user_activity_endpoints(user): + resp = self.client.get(endpoint) + expected_status_code = 200 if user == username else 403 + assert resp.status_code == expected_status_code + + # restore flag + app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = access_flag + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_slice_id_is_always_logged_correctly_on_web_request(self): + # explore case + self.login("admin") + slc = db.session.query(Slice).filter_by(slice_name="Girls").one() + qry = db.session.query(models.Log).filter_by(slice_id=slc.id) + self.get_resp(slc.slice_url) + self.assertEqual(1, qry.count()) + + def create_sample_csvfile(self, filename: str, content: List[str]) -> None: + with open(filename, "w+") as test_file: + for l in content: + test_file.write(f"{l}\n") + + def create_sample_excelfile(self, filename: str, content: Dict[str, str]) -> None: + pd.DataFrame(content).to_excel(filename) + + def enable_csv_upload(self, database: models.Database) -> None: + """Enables csv upload in the given database.""" + database.allow_file_upload = True + db.session.commit() + add_datasource_page = self.get_resp("/databaseview/list/") + self.assertIn("Upload a CSV", add_datasource_page) + + form_get = self.get_resp("/csvtodatabaseview/form") + self.assertIn("CSV to Database configuration", form_get) + + def test_dataframe_timezone(self): + tz = pytz.FixedOffset(60) + data = [ + (datetime.datetime(2017, 11, 18, 21, 53, 0, 219225, tzinfo=tz),), + (datetime.datetime(2017, 11, 18, 22, 6, 30, tzinfo=tz),), + ] + results = SupersetResultSet(list(data), [["data"]], BaseEngineSpec) + df = results.to_pandas_df() + data = dataframe.df_to_records(df) + json_str = json.dumps(data, default=utils.pessimistic_json_iso_dttm_ser) + self.assertDictEqual( + data[0], {"data": pd.Timestamp("2017-11-18 21:53:00.219225+0100", tz=tz)} + ) + self.assertDictEqual( + data[1], {"data": pd.Timestamp("2017-11-18 22:06:30+0100", tz=tz)} + ) + self.assertEqual( + json_str, + '[{"data": "2017-11-18T21:53:00.219225+01:00"}, {"data": "2017-11-18T22:06:30+01:00"}]', + ) + + def test_mssql_engine_spec_pymssql(self): + # Test for case when tuple is returned (pymssql) + data = [ + (1, 1, datetime.datetime(2017, 10, 19, 23, 39, 16, 660000)), + (2, 2, datetime.datetime(2018, 10, 19, 23, 39, 16, 660000)), + ] + results = SupersetResultSet( + list(data), [["col1"], ["col2"], ["col3"]], MssqlEngineSpec + ) + df = results.to_pandas_df() + data = dataframe.df_to_records(df) + self.assertEqual(len(data), 2) + self.assertEqual( + data[0], + {"col1": 1, "col2": 1, "col3": pd.Timestamp("2017-10-19 23:39:16.660000")}, + ) + + def test_comments_in_sqlatable_query(self): + clean_query = "SELECT '/* val 1 */' as c1, '-- val 2' as c2 FROM tbl" + commented_query = "/* comment 1 */" + clean_query + "-- comment 2" + table = SqlaTable( + table_name="test_comments_in_sqlatable_query_table", + sql=commented_query, + database=get_example_database(), + ) + rendered_query = str(table.get_from_clause()[0]) + self.assertEqual(clean_query, rendered_query) + + def test_slice_payload_no_datasource(self): + self.login(username="admin") + data = self.get_json_resp("/superset/explore_json/", raise_on_error=False) + + self.assertEqual( + data["errors"][0]["message"], + "The dataset associated with this chart no longer exists", + ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_explore_json(self): + tbl_id = self.table_ids.get("birth_names") + form_data = { + "datasource": f"{tbl_id}__table", + "viz_type": "dist_bar", + "granularity_sqla": "ds", + "time_range": "No filter", + "metrics": ["count"], + "adhoc_filters": [], + "groupby": ["gender"], + "row_limit": 100, + } + self.login(username="admin") + rv = self.client.post( + "/superset/explore_json/", + data={"form_data": json.dumps(form_data)}, + ) + data = json.loads(rv.data.decode("utf-8")) + + self.assertEqual(rv.status_code, 200) + self.assertEqual(data["rowcount"], 2) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_explore_json_dist_bar_order(self): + tbl_id = self.table_ids.get("birth_names") + form_data = { + "datasource": f"{tbl_id}__table", + "viz_type": "dist_bar", + "url_params": {}, + "granularity_sqla": "ds", + "time_range": 'DATEADD(DATETIME("2021-01-22T00:00:00"), -100, year) : 2021-01-22T00:00:00', + "metrics": [ + { + "expressionType": "SIMPLE", + "column": { + "id": 334, + "column_name": "name", + "verbose_name": "null", + "description": "null", + "expression": "", + "filterable": True, + "groupby": True, + "is_dttm": False, + "type": "VARCHAR(255)", + "python_date_format": "null", + }, + "aggregate": "COUNT", + "sqlExpression": "null", + "isNew": False, + "hasCustomLabel": False, + "label": "COUNT(name)", + "optionName": "metric_xdzsijn42f9_khi4h3v3vci", + }, + { + "expressionType": "SIMPLE", + "column": { + "id": 332, + "column_name": "ds", + "verbose_name": "null", + "description": "null", + "expression": "", + "filterable": True, + "groupby": True, + "is_dttm": True, + "type": "TIMESTAMP WITHOUT TIME ZONE", + "python_date_format": "null", + }, + "aggregate": "COUNT", + "sqlExpression": "null", + "isNew": False, + "hasCustomLabel": False, + "label": "COUNT(ds)", + "optionName": "metric_80g1qb9b6o7_ci5vquydcbe", + }, + ], + "order_desc": True, + "adhoc_filters": [], + "groupby": ["name"], + "columns": [], + "row_limit": 10, + "color_scheme": "supersetColors", + "label_colors": {}, + "show_legend": True, + "y_axis_format": "SMART_NUMBER", + "bottom_margin": "auto", + "x_ticks_layout": "auto", + } + + self.login(username="admin") + rv = self.client.post( + "/superset/explore_json/", + data={"form_data": json.dumps(form_data)}, + ) + data = json.loads(rv.data.decode("utf-8")) + + resp = self.run_sql( + """ + SELECT count(name) AS count_name, count(ds) AS count_ds + FROM birth_names + WHERE ds >= '1921-01-22 00:00:00.000000' AND ds < '2021-01-22 00:00:00.000000' + GROUP BY name + ORDER BY count_name DESC + LIMIT 10; + """, + client_id="client_id_1", + username="admin", + ) + count_ds = [] + count_name = [] + for series in data["data"]: + if series["key"] == "COUNT(ds)": + count_ds = series["values"] + if series["key"] == "COUNT(name)": + count_name = series["values"] + for expected, actual_ds, actual_name in zip(resp["data"], count_ds, count_name): + assert expected["count_name"] == actual_name["y"] + assert expected["count_ds"] == actual_ds["y"] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + GLOBAL_ASYNC_QUERIES=True, + ) + def test_explore_json_async(self): + tbl_id = self.table_ids.get("birth_names") + form_data = { + "datasource": f"{tbl_id}__table", + "viz_type": "dist_bar", + "granularity_sqla": "ds", + "time_range": "No filter", + "metrics": ["count"], + "adhoc_filters": [], + "groupby": ["gender"], + "row_limit": 100, + } + async_query_manager.init_app(app) + self.login(username="admin") + rv = self.client.post( + "/superset/explore_json/", + data={"form_data": json.dumps(form_data)}, + ) + data = json.loads(rv.data.decode("utf-8")) + keys = list(data.keys()) + + self.assertEqual(rv.status_code, 202) + self.assertCountEqual( + keys, ["channel_id", "job_id", "user_id", "status", "errors", "result_url"] + ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + GLOBAL_ASYNC_QUERIES=True, + ) + def test_explore_json_async_results_format(self): + tbl_id = self.table_ids.get("birth_names") + form_data = { + "datasource": f"{tbl_id}__table", + "viz_type": "dist_bar", + "granularity_sqla": "ds", + "time_range": "No filter", + "metrics": ["count"], + "adhoc_filters": [], + "groupby": ["gender"], + "row_limit": 100, + } + async_query_manager.init_app(app) + self.login(username="admin") + rv = self.client.post( + "/superset/explore_json/?results=true", + data={"form_data": json.dumps(form_data)}, + ) + self.assertEqual(rv.status_code, 200) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch( + "superset.utils.cache_manager.CacheManager.cache", + new_callable=mock.PropertyMock, + ) + @mock.patch("superset.viz.BaseViz.force_cached", new_callable=mock.PropertyMock) + def test_explore_json_data(self, mock_force_cached, mock_cache): + tbl_id = self.table_ids.get("birth_names") + form_data = dict( + { + "form_data": { + "datasource": f"{tbl_id}__table", + "viz_type": "dist_bar", + "granularity_sqla": "ds", + "time_range": "No filter", + "metrics": ["count"], + "adhoc_filters": [], + "groupby": ["gender"], + "row_limit": 100, + } + } + ) + + class MockCache: + def get(self, key): + return form_data + + def set(self): + return None + + mock_cache.return_value = MockCache() + mock_force_cached.return_value = False + + self.login(username="admin") + rv = self.client.get("/superset/explore_json/data/valid-cache-key") + data = json.loads(rv.data.decode("utf-8")) + + self.assertEqual(rv.status_code, 200) + self.assertEqual(data["rowcount"], 2) + + @mock.patch( + "superset.utils.cache_manager.CacheManager.cache", + new_callable=mock.PropertyMock, + ) + def test_explore_json_data_no_login(self, mock_cache): + tbl_id = self.table_ids.get("birth_names") + form_data = dict( + { + "form_data": { + "datasource": f"{tbl_id}__table", + "viz_type": "dist_bar", + "granularity_sqla": "ds", + "time_range": "No filter", + "metrics": ["count"], + "adhoc_filters": [], + "groupby": ["gender"], + "row_limit": 100, + } + } + ) + + class MockCache: + def get(self, key): + return form_data + + def set(self): + return None + + mock_cache.return_value = MockCache() + + rv = self.client.get("/superset/explore_json/data/valid-cache-key") + self.assertEqual(rv.status_code, 401) + + def test_explore_json_data_invalid_cache_key(self): + self.login(username="admin") + cache_key = "invalid-cache-key" + rv = self.client.get(f"/superset/explore_json/data/{cache_key}") + data = json.loads(rv.data.decode("utf-8")) + + self.assertEqual(rv.status_code, 404) + self.assertEqual(data["error"], "Cached data not found") + + @mock.patch( + "superset.security.SupersetSecurityManager.get_schemas_accessible_by_user" + ) + @mock.patch("superset.security.SupersetSecurityManager.can_access_database") + @mock.patch("superset.security.SupersetSecurityManager.can_access_all_datasources") + def test_schemas_access_for_csv_upload_endpoint( + self, + mock_can_access_all_datasources, + mock_can_access_database, + mock_schemas_accessible, + ): + self.login(username="admin") + dbobj = self.create_fake_db() + mock_can_access_all_datasources.return_value = False + mock_can_access_database.return_value = False + mock_schemas_accessible.return_value = ["this_schema_is_allowed_too"] + data = self.get_json_resp( + url="/superset/schemas_access_for_file_upload?db_id={db_id}".format( + db_id=dbobj.id + ) + ) + assert data == ["this_schema_is_allowed_too"] + self.delete_fake_db() + + @mock.patch("superset.views.core.results_backend_use_msgpack", False) + def test_display_limit(self): + from superset.views import core + + core.results_backend = mock.Mock() + self.login() + + data = [{"col_0": i} for i in range(100)] + payload = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 100}, + "data": data, + } + # limit results to 1 + expected_key = {"status": "success", "query": {"rows": 100}, "data": data} + limited_data = data[:1] + expected_limited = { + "status": "success", + "query": {"rows": 100}, + "data": limited_data, + "displayLimitReached": True, + } + + query_mock = mock.Mock() + query_mock.sql = "SELECT *" + query_mock.database = 1 + query_mock.schema = "superset" + + # do not apply msgpack serialization + use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"] + app.config["RESULTS_BACKEND_USE_MSGPACK"] = False + serialized_payload = sql_lab._serialize_payload(payload, False) + compressed = utils.zlib_compress(serialized_payload) + core.results_backend.get.return_value = compressed + + with mock.patch("superset.views.core.db") as mock_superset_db: + mock_superset_db.session.query().filter_by().one_or_none.return_value = ( + query_mock + ) + # get all results + result_key = json.loads(self.get_resp("/superset/results/key/")) + result_limited = json.loads(self.get_resp("/superset/results/key/?rows=1")) + + self.assertEqual(result_key, expected_key) + self.assertEqual(result_limited, expected_limited) + + app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack + + def test_results_default_deserialization(self): + use_new_deserialization = False + data = [("a", 4, 4.0, "2019-08-18T16:39:16.660000")] + cursor_descr = ( + ("a", "string"), + ("b", "int"), + ("c", "float"), + ("d", "datetime"), + ) + db_engine_spec = BaseEngineSpec() + results = SupersetResultSet(data, cursor_descr, db_engine_spec) + query = { + "database_id": 1, + "sql": "SELECT * FROM birth_names LIMIT 100", + "status": QueryStatus.PENDING, + } + ( + serialized_data, + selected_columns, + all_columns, + expanded_columns, + ) = sql_lab._serialize_and_expand_data( + results, db_engine_spec, use_new_deserialization + ) + payload = { + "query_id": 1, + "status": QueryStatus.SUCCESS, + "state": QueryStatus.SUCCESS, + "data": serialized_data, + "columns": all_columns, + "selected_columns": selected_columns, + "expanded_columns": expanded_columns, + "query": query, + } + + serialized_payload = sql_lab._serialize_payload( + payload, use_new_deserialization + ) + self.assertIsInstance(serialized_payload, str) + + query_mock = mock.Mock() + deserialized_payload = superset.views.utils._deserialize_results_payload( + serialized_payload, query_mock, use_new_deserialization + ) + + self.assertDictEqual(deserialized_payload, payload) + query_mock.assert_not_called() + + def test_results_msgpack_deserialization(self): + use_new_deserialization = True + data = [("a", 4, 4.0, "2019-08-18T16:39:16.660000")] + cursor_descr = ( + ("a", "string"), + ("b", "int"), + ("c", "float"), + ("d", "datetime"), + ) + db_engine_spec = BaseEngineSpec() + results = SupersetResultSet(data, cursor_descr, db_engine_spec) + query = { + "database_id": 1, + "sql": "SELECT * FROM birth_names LIMIT 100", + "status": QueryStatus.PENDING, + } + ( + serialized_data, + selected_columns, + all_columns, + expanded_columns, + ) = sql_lab._serialize_and_expand_data( + results, db_engine_spec, use_new_deserialization + ) + payload = { + "query_id": 1, + "status": QueryStatus.SUCCESS, + "state": QueryStatus.SUCCESS, + "data": serialized_data, + "columns": all_columns, + "selected_columns": selected_columns, + "expanded_columns": expanded_columns, + "query": query, + } + + serialized_payload = sql_lab._serialize_payload( + payload, use_new_deserialization + ) + self.assertIsInstance(serialized_payload, bytes) + + with mock.patch.object( + db_engine_spec, "expand_data", wraps=db_engine_spec.expand_data + ) as expand_data: + query_mock = mock.Mock() + query_mock.database.db_engine_spec.expand_data = expand_data + + deserialized_payload = superset.views.utils._deserialize_results_payload( + serialized_payload, query_mock, use_new_deserialization + ) + df = results.to_pandas_df() + payload["data"] = dataframe.df_to_records(df) + + self.assertDictEqual(deserialized_payload, payload) + expand_data.assert_called_once() + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"FOO": lambda x: 1}, + clear=True, + ) + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_feature_flag_serialization(self): + """ + Functions in feature flags don't break bootstrap data serialization. + """ + # feature flags are cached + cache_manager.cache.clear() + self.login() + + encoded = json.dumps( + {"FOO": lambda x: 1, "super": "set"}, + default=utils.pessimistic_json_iso_dttm_ser, + ) + html_string = ( + html.escape(encoded, quote=False) + .replace("'", "'") + .replace('"', """) + ) + dash_id = db.session.query(Dashboard.id).first()[0] + tbl_id = self.table_ids.get("wb_health_population") + urls = [ + "/superset/sqllab", + "/superset/welcome", + f"/superset/dashboard/{dash_id}/", + "/superset/profile/admin/", + f"/explore/?datasource_type=table&datasource_id={tbl_id}", + ] + for url in urls: + data = self.get_resp(url) + self.assertTrue(html_string in data) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"SQLLAB_BACKEND_PERSISTENCE": True}, + clear=True, + ) + def test_sqllab_backend_persistence_payload(self): + username = "admin" + self.login(username) + user_id = security_manager.find_user(username).id + + # create a tab + data = { + "queryEditor": json.dumps( + { + "title": "Untitled Query 1", + "dbId": 1, + "schema": None, + "autorun": False, + "sql": "SELECT ...", + "queryLimit": 1000, + } + ) + } + resp = self.get_json_resp("/tabstateview/", data=data) + tab_state_id = resp["id"] + + # run a query in the created tab + self.run_sql( + "SELECT name FROM birth_names", + "client_id_1", + username=username, + raise_on_error=True, + sql_editor_id=str(tab_state_id), + ) + # run an orphan query (no tab) + self.run_sql( + "SELECT name FROM birth_names", + "client_id_2", + username=username, + raise_on_error=True, + ) + + # we should have only 1 query returned, since the second one is not + # associated with any tabs + payload = views.Superset._get_sqllab_tabs(user_id=user_id) + self.assertEqual(len(payload["queries"]), 1) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"SQLLAB_BACKEND_PERSISTENCE": True}, + clear=True, + ) + def test_tabstate_with_name(self): + """ + The tabstateview endpoint GET should be able to take name or title + for backward compatibility + """ + username = "admin" + self.login(username) + + # create a tab + data = { + "queryEditor": json.dumps( + { + "name": "Untitled Query foo", + "dbId": 1, + "schema": None, + "autorun": False, + "sql": "SELECT ...", + "queryLimit": 1000, + } + ) + } + resp = self.get_json_resp("/tabstateview/", data=data) + tab_state_id = resp["id"] + payload = self.get_json_resp(f"/tabstateview/{tab_state_id}") + + self.assertEqual(payload["label"], "Untitled Query foo") + + def test_virtual_table_explore_visibility(self): + # test that default visibility it set to True + database = superset.utils.database.get_example_database() + self.assertEqual(database.allows_virtual_table_explore, True) + + # test that visibility is disabled when extra is set to False + extra = database.get_extra() + extra["allows_virtual_table_explore"] = False + database.extra = json.dumps(extra) + self.assertEqual(database.allows_virtual_table_explore, False) + + # test that visibility is enabled when extra is set to True + extra = database.get_extra() + extra["allows_virtual_table_explore"] = True + database.extra = json.dumps(extra) + self.assertEqual(database.allows_virtual_table_explore, True) + + # test that visibility is not broken with bad values + extra = database.get_extra() + extra["allows_virtual_table_explore"] = "trash value" + database.extra = json.dumps(extra) + self.assertEqual(database.allows_virtual_table_explore, True) + + def test_data_preview_visibility(self): + # test that default visibility is allowed + database = utils.get_example_database() + self.assertEqual(database.disable_data_preview, False) + + # test that visibility is disabled when extra is set to true + extra = database.get_extra() + extra["disable_data_preview"] = True + database.extra = json.dumps(extra) + self.assertEqual(database.disable_data_preview, True) + + # test that visibility is enabled when extra is set to false + extra = database.get_extra() + extra["disable_data_preview"] = False + database.extra = json.dumps(extra) + self.assertEqual(database.disable_data_preview, False) + + # test that visibility is not broken with bad values + extra = database.get_extra() + extra["disable_data_preview"] = "trash value" + database.extra = json.dumps(extra) + self.assertEqual(database.disable_data_preview, False) + + def test_explore_database_id(self): + database = superset.utils.database.get_example_database() + explore_database = superset.utils.database.get_example_database() + + # test that explore_database_id is the regular database + # id if none is set in the extra + self.assertEqual(database.explore_database_id, database.id) + + # test that explore_database_id is correct if the extra is set + extra = database.get_extra() + extra["explore_database_id"] = explore_database.id + database.extra = json.dumps(extra) + self.assertEqual(database.explore_database_id, explore_database.id) + + def test_get_column_names_from_metric(self): + simple_metric = { + "expressionType": utils.AdhocMetricExpressionType.SIMPLE.value, + "column": {"column_name": "my_col"}, + "aggregate": "SUM", + "label": "My Simple Label", + } + assert utils.get_column_name_from_metric(simple_metric) == "my_col" + + sql_metric = { + "expressionType": utils.AdhocMetricExpressionType.SQL.value, + "sqlExpression": "SUM(my_label)", + "label": "My SQL Label", + } + assert utils.get_column_name_from_metric(sql_metric) is None + assert utils.get_column_names_from_metrics([simple_metric, sql_metric]) == [ + "my_col" + ] + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @mock.patch("superset.models.core.DB_CONNECTION_MUTATOR") + def test_explore_injected_exceptions(self, mock_db_connection_mutator): + """ + Handle injected exceptions from the db mutator + """ + # Assert we can handle a custom exception at the mutator level + exception = SupersetException("Error message") + mock_db_connection_mutator.side_effect = exception + slice = db.session.query(Slice).first() + url = f"/explore/?form_data=%7B%22slice_id%22%3A%20{slice.id}%7D" + + self.login() + data = self.get_resp(url) + self.assertIn("Error message", data) + + # Assert we can handle a driver exception at the mutator level + exception = SQLAlchemyError("Error message") + mock_db_connection_mutator.side_effect = exception + slice = db.session.query(Slice).first() + url = f"/explore/?form_data=%7B%22slice_id%22%3A%20{slice.id}%7D" + + self.login() + data = self.get_resp(url) + self.assertIn("Error message", data) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @mock.patch("superset.models.core.DB_CONNECTION_MUTATOR") + def test_dashboard_injected_exceptions(self, mock_db_connection_mutator): + """ + Handle injected exceptions from the db mutator + """ + + # Assert we can handle a custom exception at the mutator level + exception = SupersetException("Error message") + mock_db_connection_mutator.side_effect = exception + dash = db.session.query(Dashboard).first() + url = f"/superset/dashboard/{dash.id}/" + + self.login() + data = self.get_resp(url) + self.assertIn("Error message", data) + + # Assert we can handle a driver exception at the mutator level + exception = SQLAlchemyError("Error message") + mock_db_connection_mutator.side_effect = exception + dash = db.session.query(Dashboard).first() + url = f"/superset/dashboard/{dash.id}/" + + self.login() + data = self.get_resp(url) + self.assertIn("Error message", data) + + @mock.patch("superset.sql_lab.cancel_query") + @mock.patch("superset.views.core.db.session") + def test_stop_query_not_implemented( + self, mock_superset_db_session, mock_sql_lab_cancel_query + ): + """ + Handles stop query when the DB engine spec does not + have a cancel query method. + """ + form_data = {"client_id": "foo"} + query_mock = mock.Mock() + query_mock.client_id = "foo" + query_mock.status = QueryStatus.RUNNING + self.login(username="admin") + mock_superset_db_session.query().filter_by().one().return_value = query_mock + mock_sql_lab_cancel_query.return_value = False + rv = self.client.post( + "/superset/stop_query/", + data={"form_data": json.dumps(form_data)}, + ) + + assert rv.status_code == 422 + + @pytest.mark.usefixtures("load_energy_table_with_slice") + @mock.patch("superset.explore.form_data.commands.create.CreateFormDataCommand.run") + def test_explore_redirect(self, mock_command: mock.Mock): + self.login(username="admin") + random_key = "random_key" + mock_command.return_value = random_key + slice_name = f"Energy Sankey" + slice_id = self.get_slice(slice_name, db.session).id + form_data = {"slice_id": slice_id, "viz_type": "line", "datasource": "1__table"} + rv = self.client.get( + f"/superset/explore/?form_data={quote(json.dumps(form_data))}" + ) + self.assertEqual( + rv.headers["Location"], f"/explore/?form_data_key={random_key}" + ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_has_table_by_name(self): + if backend() in ("sqlite", "mysql"): + return + example_db = superset.utils.database.get_example_database() + assert ( + example_db.has_table_by_name(table_name="birth_names", schema="public") + is True + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/integration_tests/css_templates/__init__.py b/tests/integration_tests/css_templates/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/css_templates/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/css_templates/api_tests.py b/tests/integration_tests/css_templates/api_tests.py new file mode 100644 index 0000000000000..b28cca955ca8d --- /dev/null +++ b/tests/integration_tests/css_templates/api_tests.py @@ -0,0 +1,382 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import json +import pytest +import prison +from sqlalchemy.sql import func + +import tests.integration_tests.test_app +from superset import db +from superset.models.core import CssTemplate +from superset.utils.database import get_example_database + +from tests.integration_tests.base_tests import SupersetTestCase + + +CSS_TEMPLATES_FIXTURE_COUNT = 5 + + +class TestCssTemplateApi(SupersetTestCase): + def insert_css_template( + self, + template_name: str, + css: str, + created_by_username: str = "admin", + ) -> CssTemplate: + admin = self.get_user(created_by_username) + css_template = CssTemplate( + template_name=template_name, css=css, created_by=admin, changed_by=admin + ) + db.session.add(css_template) + db.session.commit() + return css_template + + @pytest.fixture() + def create_css_templates(self): + with self.create_app().app_context(): + css_templates = [] + for cx in range(CSS_TEMPLATES_FIXTURE_COUNT): + css_templates.append( + self.insert_css_template( + template_name=f"template_name{cx}", css=f"css{cx}" + ) + ) + yield css_templates + + # rollback changes + for css_template in css_templates: + db.session.delete(css_template) + db.session.commit() + + @pytest.mark.usefixtures("create_css_templates") + def test_get_list_css_template(self): + """ + CSS Template API: Test get list css template + """ + css_templates = db.session.query(CssTemplate).all() + + self.login(username="admin") + uri = f"api/v1/css_template/" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(css_templates) + expected_columns = [ + "changed_by", + "changed_on_delta_humanized", + "created_by", + "created_on", + "css", + "id", + "template_name", + ] + result_columns = list(data["result"][0].keys()) + result_columns.sort() + assert expected_columns == result_columns + created_by_columns = list(data["result"][0]["created_by"].keys()) + created_by_columns.sort() + assert ["first_name", "id", "last_name"] == created_by_columns + changed_by_columns = list(data["result"][0]["changed_by"].keys()) + changed_by_columns.sort() + assert ["first_name", "id", "last_name"] == changed_by_columns + + @pytest.mark.usefixtures("create_css_templates") + def test_get_list_sort_css_template(self): + """ + CSS Template API: Test get list and sort CSS Template + """ + css_templates = ( + db.session.query(CssTemplate) + .order_by(CssTemplate.template_name.asc()) + .all() + ) + self.login(username="admin") + query_string = {"order_column": "template_name", "order_direction": "asc"} + uri = f"api/v1/css_template/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(css_templates) + for i, query in enumerate(css_templates): + assert query.template_name == data["result"][i]["template_name"] + + @pytest.mark.usefixtures("create_css_templates") + def test_get_list_custom_filter_css_template(self): + """ + CSS Template API: Test get list and custom filter + """ + self.login(username="admin") + + all_css_templates = ( + db.session.query(CssTemplate).filter(CssTemplate.css.ilike("%css2%")).all() + ) + query_string = { + "filters": [ + { + "col": "template_name", + "opr": "css_template_all_text", + "value": "css2", + } + ], + } + uri = f"api/v1/css_template/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(all_css_templates) + + all_css_templates = ( + db.session.query(CssTemplate) + .filter(CssTemplate.template_name.ilike("%template_name3%")) + .all() + ) + query_string = { + "filters": [ + { + "col": "template_name", + "opr": "css_template_all_text", + "value": "template_name3", + } + ], + } + uri = f"api/v1/css_template/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(all_css_templates) + + def test_info_css_template(self): + """ + CssTemplate API: Test info + """ + self.login(username="admin") + uri = f"api/v1/css_template/_info" + rv = self.get_assert_metric(uri, "info") + assert rv.status_code == 200 + + def test_info_security_css_template(self): + """ + CssTemplate API: Test info security + """ + self.login(username="admin") + params = {"keys": ["permissions"]} + uri = f"api/v1/css_template/_info?q={prison.dumps(params)}" + rv = self.get_assert_metric(uri, "info") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert "can_read" in data["permissions"] + assert "can_write" in data["permissions"] + assert len(data["permissions"]) == 2 + + @pytest.mark.usefixtures("create_css_templates") + def test_get_css_template(self): + """ + CSS Template API: Test get CSS Template + """ + css_template = ( + db.session.query(CssTemplate) + .filter(CssTemplate.template_name == "template_name1") + .one_or_none() + ) + self.login(username="admin") + uri = f"api/v1/css_template/{css_template.id}" + rv = self.get_assert_metric(uri, "get") + assert rv.status_code == 200 + + expected_result = { + "id": css_template.id, + "template_name": "template_name1", + "css": "css1", + "created_by": { + "first_name": css_template.created_by.first_name, + "id": css_template.created_by.id, + "last_name": css_template.created_by.last_name, + }, + } + data = json.loads(rv.data.decode("utf-8")) + for key, value in data["result"].items(): + assert value == expected_result[key] + + @pytest.mark.usefixtures("create_css_templates") + def test_get_css_template_not_found(self): + """ + CSS Template API: Test get CSS Template not found + """ + max_id = db.session.query(func.max(CssTemplate.id)).scalar() + self.login(username="admin") + uri = f"api/v1/css_template/{max_id + 1}" + rv = self.get_assert_metric(uri, "get") + assert rv.status_code == 404 + + def test_create_css_template(self): + """ + CSS Template API: Test create + """ + post_data = { + "template_name": "template_name_create", + "css": "css_create", + } + + self.login(username="admin") + uri = f"api/v1/css_template/" + rv = self.post_assert_metric(uri, post_data, "post") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 201 + + css_template_id = data.get("id") + model = db.session.query(CssTemplate).get(css_template_id) + for key in post_data: + assert getattr(model, key) == data["result"][key] + + # Rollback changes + db.session.delete(model) + db.session.commit() + + @pytest.mark.usefixtures("create_css_templates") + def test_update_css_template(self): + """ + CSS Template API: Test update + """ + css_template = ( + db.session.query(CssTemplate) + .filter(CssTemplate.template_name == "template_name1") + .all()[0] + ) + + put_data = { + "template_name": "template_name_changed", + "css": "css_changed", + } + + self.login(username="admin") + uri = f"api/v1/css_template/{css_template.id}" + rv = self.put_assert_metric(uri, put_data, "put") + assert rv.status_code == 200 + + model = db.session.query(CssTemplate).get(css_template.id) + assert model.template_name == "template_name_changed" + assert model.css == "css_changed" + + @pytest.mark.usefixtures("create_css_templates") + def test_update_css_template_not_found(self): + """ + CSS Template API: Test update not found + """ + max_id = db.session.query(func.max(CssTemplate.id)).scalar() + self.login(username="admin") + + put_data = { + "template_name": "template_name_changed", + "css": "css_changed", + } + + uri = f"api/v1/css_template/{max_id + 1}" + rv = self.put_assert_metric(uri, put_data, "put") + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_css_templates") + def test_delete_css_template(self): + """ + CSS Template API: Test delete + """ + css_template = ( + db.session.query(CssTemplate) + .filter(CssTemplate.template_name == "template_name1") + .one_or_none() + ) + + self.login(username="admin") + uri = f"api/v1/css_template/{css_template.id}" + rv = self.delete_assert_metric(uri, "delete") + assert rv.status_code == 200 + + model = db.session.query(CssTemplate).get(css_template.id) + assert model is None + + @pytest.mark.usefixtures("create_css_templates") + def test_delete_css_template_not_found(self): + """ + CSS Template API: Test delete not found + """ + max_id = db.session.query(func.max(CssTemplate.id)).scalar() + self.login(username="admin") + uri = f"api/v1/css_template/{max_id + 1}" + rv = self.delete_assert_metric(uri, "delete") + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_css_templates") + def test_delete_bulk_css_templates(self): + """ + CSS Template API: Test delete bulk + """ + css_templates = db.session.query(CssTemplate).all() + css_template_ids = [css_template.id for css_template in css_templates] + + self.login(username="admin") + uri = f"api/v1/css_template/?q={prison.dumps(css_template_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "message": f"Deleted {len(css_template_ids)} css templates" + } + assert response == expected_response + css_templates = db.session.query(CssTemplate).all() + assert css_templates == [] + + @pytest.mark.usefixtures("create_css_templates") + def test_delete_one_bulk_css_templates(self): + """ + CSS Template API: Test delete one in bulk + """ + css_template = db.session.query(CssTemplate).first() + css_template_ids = [css_template.id] + + self.login(username="admin") + uri = f"api/v1/css_template/?q={prison.dumps(css_template_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": f"Deleted {len(css_template_ids)} css template"} + assert response == expected_response + css_template_ = db.session.query(CssTemplate).get(css_template_ids[0]) + assert css_template_ is None + + def test_delete_bulk_css_template_bad_request(self): + """ + CSS Template API: Test delete bulk bad request + """ + css_template_ids = [1, "a"] + self.login(username="admin") + uri = f"api/v1/css_template/?q={prison.dumps(css_template_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 400 + + @pytest.mark.usefixtures("create_css_templates") + def test_delete_bulk_css_template_not_found(self): + """ + CSS Template API: Test delete bulk not found + """ + max_id = db.session.query(func.max(CssTemplate.id)).scalar() + + css_template_ids = [max_id + 1, max_id + 2] + self.login(username="admin") + uri = f"api/v1/css_template/?q={prison.dumps(css_template_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 404 diff --git a/tests/integration_tests/csv_upload_tests.py b/tests/integration_tests/csv_upload_tests.py new file mode 100644 index 0000000000000..3e0200d18a26b --- /dev/null +++ b/tests/integration_tests/csv_upload_tests.py @@ -0,0 +1,516 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset CSV upload""" +import json +import logging +import os +import shutil +from typing import Dict, Optional + +from unittest import mock + +import pandas as pd +import pytest + +import superset.utils.database +from superset.sql_parse import Table +from tests.integration_tests.conftest import ADMIN_SCHEMA_NAME +from superset import db +from superset import security_manager +from superset.models.core import Database +from superset.utils import core as utils +from tests.integration_tests.test_app import app, login +from tests.integration_tests.base_tests import get_resp, SupersetTestCase + +logger = logging.getLogger(__name__) + + +test_client = app.test_client() + +CSV_UPLOAD_DATABASE = "csv_explore_db" +CSV_FILENAME1 = "testCSV1.csv" +CSV_FILENAME2 = "testCSV2.csv" +EXCEL_FILENAME = "testExcel.xlsx" +PARQUET_FILENAME1 = "testZip/testParquet1.parquet" +PARQUET_FILENAME2 = "testZip/testParquet2.parquet" +ZIP_DIRNAME = "testZip" +ZIP_FILENAME = "testZip.zip" + +EXCEL_UPLOAD_TABLE = "excel_upload" +CSV_UPLOAD_TABLE = "csv_upload" +PARQUET_UPLOAD_TABLE = "parquet_upload" +CSV_UPLOAD_TABLE_W_SCHEMA = "csv_upload_w_schema" +CSV_UPLOAD_TABLE_W_EXPLORE = "csv_upload_w_explore" + + +def _setup_csv_upload(): + upload_db = superset.utils.database.get_or_create_db( + CSV_UPLOAD_DATABASE, app.config["SQLALCHEMY_EXAMPLES_URI"] + ) + extra = upload_db.get_extra() + extra["explore_database_id"] = superset.utils.database.get_example_database().id + upload_db.extra = json.dumps(extra) + upload_db.allow_file_upload = True + db.session.commit() + + yield + + upload_db = get_upload_db() + with upload_db.get_sqla_engine_with_context() as engine: + engine.execute(f"DROP TABLE IF EXISTS {EXCEL_UPLOAD_TABLE}") + engine.execute(f"DROP TABLE IF EXISTS {CSV_UPLOAD_TABLE}") + engine.execute(f"DROP TABLE IF EXISTS {PARQUET_UPLOAD_TABLE}") + engine.execute(f"DROP TABLE IF EXISTS {CSV_UPLOAD_TABLE_W_SCHEMA}") + engine.execute(f"DROP TABLE IF EXISTS {CSV_UPLOAD_TABLE_W_EXPLORE}") + db.session.delete(upload_db) + db.session.commit() + + +@pytest.fixture(scope="module") +def setup_csv_upload(login_as_admin): + yield from _setup_csv_upload() + + +@pytest.fixture(scope="module") +def setup_csv_upload_with_context(): + with app.app_context(): + login(test_client, username="admin") + yield from _setup_csv_upload() + + +@pytest.fixture(scope="module") +def create_csv_files(): + with open(CSV_FILENAME1, "w+") as test_file: + for line in ["a,b", "john,1", "paul,2"]: + test_file.write(f"{line}\n") + + with open(CSV_FILENAME2, "w+") as test_file: + for line in ["b,c,d", "john,1,x", "paul,2,"]: + test_file.write(f"{line}\n") + yield + os.remove(CSV_FILENAME1) + os.remove(CSV_FILENAME2) + + +@pytest.fixture() +def create_excel_files(): + pd.DataFrame({"a": ["john", "paul"], "b": [1, 2]}).to_excel(EXCEL_FILENAME) + yield + os.remove(EXCEL_FILENAME) + + +@pytest.fixture() +def create_columnar_files(): + os.mkdir(ZIP_DIRNAME) + pd.DataFrame({"a": ["john", "paul"], "b": [1, 2]}).to_parquet(PARQUET_FILENAME1) + pd.DataFrame({"a": ["max", "bob"], "b": [3, 4]}).to_parquet(PARQUET_FILENAME2) + shutil.make_archive(ZIP_DIRNAME, "zip", ZIP_DIRNAME) + yield + os.remove(ZIP_FILENAME) + shutil.rmtree(ZIP_DIRNAME) + + +def get_upload_db(): + return db.session.query(Database).filter_by(database_name=CSV_UPLOAD_DATABASE).one() + + +def upload_csv(filename: str, table_name: str, extra: Optional[Dict[str, str]] = None): + csv_upload_db_id = get_upload_db().id + schema = utils.get_example_default_schema() + form_data = { + "csv_file": open(filename, "rb"), + "delimiter": ",", + "table_name": table_name, + "database": csv_upload_db_id, + "if_exists": "fail", + "index_label": "test_label", + "overwrite_duplicate": False, + } + if schema: + form_data["schema"] = schema + if extra: + form_data.update(extra) + return get_resp(test_client, "/csvtodatabaseview/form", data=form_data) + + +def upload_excel( + filename: str, table_name: str, extra: Optional[Dict[str, str]] = None +): + excel_upload_db_id = get_upload_db().id + schema = utils.get_example_default_schema() + form_data = { + "excel_file": open(filename, "rb"), + "name": table_name, + "database": excel_upload_db_id, + "sheet_name": "Sheet1", + "if_exists": "fail", + "index_label": "test_label", + "mangle_dupe_cols": False, + } + if schema: + form_data["schema"] = schema + if extra: + form_data.update(extra) + return get_resp(test_client, "/exceltodatabaseview/form", data=form_data) + + +def upload_columnar( + filename: str, table_name: str, extra: Optional[Dict[str, str]] = None +): + columnar_upload_db_id = get_upload_db().id + schema = utils.get_example_default_schema() + form_data = { + "columnar_file": open(filename, "rb"), + "name": table_name, + "database": columnar_upload_db_id, + "if_exists": "fail", + "index_label": "test_label", + } + if schema: + form_data["schema"] = schema + if extra: + form_data.update(extra) + return get_resp(test_client, "/columnartodatabaseview/form", data=form_data) + + +def mock_upload_to_s3(filename: str, upload_prefix: str, table: Table) -> str: + """ + HDFS is used instead of S3 for the unit tests.integration_tests. + + :param filename: The file to upload + :param upload_prefix: The S3 prefix + :param table: The table that will be created + :returns: The HDFS path to the directory with external table files + """ + # only needed for the hive tests + import docker + + client = docker.from_env() + container = client.containers.get("namenode") + # docker mounted volume that contains csv uploads + src = os.path.join("/tmp/superset_uploads", os.path.basename(filename)) + # hdfs destination for the external tables + dest_dir = os.path.join("/tmp/external/superset_uploads/", str(table)) + container.exec_run(f"hdfs dfs -mkdir -p {dest_dir}") + dest = os.path.join(dest_dir, os.path.basename(filename)) + container.exec_run(f"hdfs dfs -put {src} {dest}") + # hive external table expects a directory for the location + return dest_dir + + +def escaped_double_quotes(text): + return f"\"{text}\"" + + +def escaped_parquet(text): + return escaped_double_quotes(f"['{text}']") + + +@pytest.mark.usefixtures("setup_csv_upload_with_context") +@pytest.mark.usefixtures("create_csv_files") +@mock.patch( + "superset.models.core.config", + {**app.config, "ALLOWED_USER_CSV_SCHEMA_FUNC": lambda d, u: ["admin_database"]}, +) +@mock.patch("superset.db_engine_specs.hive.upload_to_s3", mock_upload_to_s3) +@mock.patch("superset.views.database.views.event_logger.log_with_context") +def test_import_csv_enforced_schema(mock_event_logger): + if utils.backend() == "sqlite": + pytest.skip("Sqlite doesn't support schema / database creation") + + full_table_name = f"admin_database.{CSV_UPLOAD_TABLE_W_SCHEMA}" + + # Invalid table name + resp = upload_csv(CSV_FILENAME1, full_table_name) + assert "Table name cannot contain a schema" in resp + + # no schema specified, fail upload + resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE_W_SCHEMA, extra={"schema": None}) + assert ( + f"Database {escaped_double_quotes(CSV_UPLOAD_DATABASE)} schema" + f" {escaped_double_quotes('None')} is not allowed for csv uploads" in resp + ) + + success_msg = f"CSV file {escaped_double_quotes(CSV_FILENAME1)} uploaded to table {escaped_double_quotes(full_table_name)}" + + resp = upload_csv( + CSV_FILENAME1, + CSV_UPLOAD_TABLE_W_SCHEMA, + extra={"schema": "admin_database", "if_exists": "replace"}, + ) + + assert success_msg in resp + mock_event_logger.assert_called_with( + action="successful_csv_upload", + database=get_upload_db().name, + schema="admin_database", + table=CSV_UPLOAD_TABLE_W_SCHEMA, + ) + + with get_upload_db().get_sqla_engine_with_context() as engine: + data = engine.execute( + f"SELECT * from {ADMIN_SCHEMA_NAME}.{CSV_UPLOAD_TABLE_W_SCHEMA}" + ).fetchall() + assert data == [("john", 1), ("paul", 2)] + + # user specified schema doesn't match, fail + resp = upload_csv( + CSV_FILENAME1, CSV_UPLOAD_TABLE_W_SCHEMA, extra={"schema": "gold"} + ) + assert ( + f'Database {escaped_double_quotes(CSV_UPLOAD_DATABASE)} schema {escaped_double_quotes("gold")} is not allowed for csv uploads' + in resp + ) + + # user specified schema matches the expected schema, append + if utils.backend() == "hive": + pytest.skip("Hive database doesn't support append csv uploads.") + resp = upload_csv( + CSV_FILENAME1, + CSV_UPLOAD_TABLE_W_SCHEMA, + extra={"schema": "admin_database", "if_exists": "append"}, + ) + assert success_msg in resp + + # Clean up + with get_upload_db().get_sqla_engine_with_context() as engine: + engine.execute(f"DROP TABLE {full_table_name}") + + +@mock.patch("superset.db_engine_specs.hive.upload_to_s3", mock_upload_to_s3) +def test_import_csv_explore_database(setup_csv_upload_with_context, create_csv_files): + schema = utils.get_example_default_schema() + full_table_name = ( + f"{schema}.{CSV_UPLOAD_TABLE_W_EXPLORE}" + if schema + else CSV_UPLOAD_TABLE_W_EXPLORE + ) + + if utils.backend() == "sqlite": + pytest.skip("Sqlite doesn't support schema / database creation") + + resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE_W_EXPLORE) + assert ( + f"CSV file {escaped_double_quotes(CSV_FILENAME1)} uploaded to table {escaped_double_quotes(full_table_name)}" + in resp + ) + table = SupersetTestCase.get_table(name=CSV_UPLOAD_TABLE_W_EXPLORE) + assert table.database_id == superset.utils.database.get_example_database().id + + +@pytest.mark.usefixtures("setup_csv_upload_with_context") +@pytest.mark.usefixtures("create_csv_files") +@mock.patch("superset.db_engine_specs.hive.upload_to_s3", mock_upload_to_s3) +@mock.patch("superset.views.database.views.event_logger.log_with_context") +def test_import_csv(mock_event_logger): + schema = utils.get_example_default_schema() + full_table_name = f"{schema}.{CSV_UPLOAD_TABLE}" if schema else CSV_UPLOAD_TABLE + success_msg_f1 = f"CSV file {escaped_double_quotes(CSV_FILENAME1)} uploaded to table {escaped_double_quotes(full_table_name)}" + + test_db = get_upload_db() + + # initial upload with fail mode + resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE) + assert success_msg_f1 in resp + + # upload again with fail mode; should fail + fail_msg = f"Unable to upload CSV file {escaped_double_quotes(CSV_FILENAME1)} to table {escaped_double_quotes(CSV_UPLOAD_TABLE)}" + resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE) + assert fail_msg in resp + + if utils.backend() != "hive": + # upload again with append mode + resp = upload_csv( + CSV_FILENAME1, CSV_UPLOAD_TABLE, extra={"if_exists": "append"} + ) + assert success_msg_f1 in resp + mock_event_logger.assert_called_with( + action="successful_csv_upload", + database=test_db.name, + schema=schema, + table=CSV_UPLOAD_TABLE, + ) + + # upload again with replace mode + resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE, extra={"if_exists": "replace"}) + assert success_msg_f1 in resp + + # try to append to table from file with different schema + resp = upload_csv(CSV_FILENAME2, CSV_UPLOAD_TABLE, extra={"if_exists": "append"}) + fail_msg_f2 = f"Unable to upload CSV file {escaped_double_quotes(CSV_FILENAME2)} to table {escaped_double_quotes(CSV_UPLOAD_TABLE)}" + assert fail_msg_f2 in resp + + # replace table from file with different schema + resp = upload_csv(CSV_FILENAME2, CSV_UPLOAD_TABLE, extra={"if_exists": "replace"}) + success_msg_f2 = f"CSV file {escaped_double_quotes(CSV_FILENAME2)} uploaded to table {escaped_double_quotes(full_table_name)}" + assert success_msg_f2 in resp + + table = SupersetTestCase.get_table(name=CSV_UPLOAD_TABLE) + # make sure the new column name is reflected in the table metadata + assert "d" in table.column_names + + # ensure user is assigned as an owner + assert security_manager.find_user("admin") in table.owners + + # null values are set + upload_csv( + CSV_FILENAME2, + CSV_UPLOAD_TABLE, + extra={"null_values": '["", "john"]', "if_exists": "replace"}, + ) + # make sure that john and empty string are replaced with None + with test_db.get_sqla_engine_with_context() as engine: + data = engine.execute(f"SELECT * from {CSV_UPLOAD_TABLE}").fetchall() + assert data == [(None, 1, "x"), ("paul", 2, None)] + # default null values + upload_csv(CSV_FILENAME2, CSV_UPLOAD_TABLE, extra={"if_exists": "replace"}) + # make sure that john and empty string are replaced with None + data = engine.execute(f"SELECT * from {CSV_UPLOAD_TABLE}").fetchall() + assert data == [("john", 1, "x"), ("paul", 2, None)] + + +@pytest.mark.usefixtures("setup_csv_upload_with_context") +@pytest.mark.usefixtures("create_excel_files") +@mock.patch("superset.db_engine_specs.hive.upload_to_s3", mock_upload_to_s3) +@mock.patch("superset.views.database.views.event_logger.log_with_context") +def test_import_excel(mock_event_logger): + if utils.backend() == "hive": + pytest.skip("Hive doesn't excel upload.") + + schema = utils.get_example_default_schema() + full_table_name = f"{schema}.{EXCEL_UPLOAD_TABLE}" if schema else EXCEL_UPLOAD_TABLE + test_db = get_upload_db() + + success_msg = f"Excel file {escaped_double_quotes(EXCEL_FILENAME)} uploaded to table {escaped_double_quotes(full_table_name)}" + + # initial upload with fail mode + resp = upload_excel(EXCEL_FILENAME, EXCEL_UPLOAD_TABLE) + assert success_msg in resp + mock_event_logger.assert_called_with( + action="successful_excel_upload", + database=test_db.name, + schema=schema, + table=EXCEL_UPLOAD_TABLE, + ) + + # ensure user is assigned as an owner + table = SupersetTestCase.get_table(name=EXCEL_UPLOAD_TABLE) + assert security_manager.find_user("admin") in table.owners + + # upload again with fail mode; should fail + fail_msg = f"Unable to upload Excel file {escaped_double_quotes(EXCEL_FILENAME)} to table {escaped_double_quotes(EXCEL_UPLOAD_TABLE)}" + resp = upload_excel(EXCEL_FILENAME, EXCEL_UPLOAD_TABLE) + assert fail_msg in resp + + if utils.backend() != "hive": + # upload again with append mode + resp = upload_excel( + EXCEL_FILENAME, EXCEL_UPLOAD_TABLE, extra={"if_exists": "append"} + ) + assert success_msg in resp + + # upload again with replace mode + resp = upload_excel( + EXCEL_FILENAME, EXCEL_UPLOAD_TABLE, extra={"if_exists": "replace"} + ) + assert success_msg in resp + mock_event_logger.assert_called_with( + action="successful_excel_upload", + database=test_db.name, + schema=schema, + table=EXCEL_UPLOAD_TABLE, + ) + + with test_db.get_sqla_engine_with_context() as engine: + data = engine.execute(f"SELECT * from {EXCEL_UPLOAD_TABLE}").fetchall() + assert data == [(0, "john", 1), (1, "paul", 2)] + + +@pytest.mark.usefixtures("setup_csv_upload_with_context") +@pytest.mark.usefixtures("create_columnar_files") +@mock.patch("superset.db_engine_specs.hive.upload_to_s3", mock_upload_to_s3) +@mock.patch("superset.views.database.views.event_logger.log_with_context") +def test_import_parquet(mock_event_logger): + if utils.backend() == "hive": + pytest.skip("Hive doesn't allow parquet upload.") + + schema = utils.get_example_default_schema() + full_table_name = ( + f"{schema}.{PARQUET_UPLOAD_TABLE}" if schema else PARQUET_UPLOAD_TABLE + ) + test_db = get_upload_db() + + success_msg_f1 = f"Columnar file {escaped_parquet(PARQUET_FILENAME1)} uploaded to table {escaped_double_quotes(full_table_name)}" + + # initial upload with fail mode + resp = upload_columnar(PARQUET_FILENAME1, PARQUET_UPLOAD_TABLE) + assert success_msg_f1 in resp + + # upload again with fail mode; should fail + fail_msg = f"Unable to upload Columnar file {escaped_parquet(PARQUET_FILENAME1)} to table {escaped_double_quotes(PARQUET_UPLOAD_TABLE)}" + resp = upload_columnar(PARQUET_FILENAME1, PARQUET_UPLOAD_TABLE) + assert fail_msg in resp + + if utils.backend() != "hive": + # upload again with append mode + resp = upload_columnar( + PARQUET_FILENAME1, PARQUET_UPLOAD_TABLE, extra={"if_exists": "append"} + ) + assert success_msg_f1 in resp + mock_event_logger.assert_called_with( + action="successful_columnar_upload", + database=test_db.name, + schema=schema, + table=PARQUET_UPLOAD_TABLE, + ) + + # upload again with replace mode and specific columns + resp = upload_columnar( + PARQUET_FILENAME1, + PARQUET_UPLOAD_TABLE, + extra={"if_exists": "replace", "usecols": '["a"]'}, + ) + assert success_msg_f1 in resp + + table = SupersetTestCase.get_table(name=PARQUET_UPLOAD_TABLE, schema=None) + # make sure only specified column name was read + assert "b" not in table.column_names + + # ensure user is assigned as an owner + assert security_manager.find_user("admin") in table.owners + + # upload again with replace mode + resp = upload_columnar( + PARQUET_FILENAME1, PARQUET_UPLOAD_TABLE, extra={"if_exists": "replace"} + ) + assert success_msg_f1 in resp + + with test_db.get_sqla_engine_with_context() as engine: + data = engine.execute(f"SELECT * from {PARQUET_UPLOAD_TABLE}").fetchall() + assert data == [("john", 1), ("paul", 2)] + + # replace table with zip file + resp = upload_columnar( + ZIP_FILENAME, PARQUET_UPLOAD_TABLE, extra={"if_exists": "replace"} + ) + success_msg_f2 = f"Columnar file {escaped_parquet(ZIP_FILENAME)} uploaded to table {escaped_double_quotes(full_table_name)}" + assert success_msg_f2 in resp + + with test_db.get_sqla_engine_with_context() as engine: + data = engine.execute(f"SELECT * from {PARQUET_UPLOAD_TABLE}").fetchall() + assert data == [("john", 1), ("paul", 2), ("max", 3), ("bob", 4)] diff --git a/tests/integration_tests/dashboard_tests.py b/tests/integration_tests/dashboard_tests.py new file mode 100644 index 0000000000000..d54151db83c2d --- /dev/null +++ b/tests/integration_tests/dashboard_tests.py @@ -0,0 +1,569 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +from datetime import datetime +import json +import re +import unittest +from random import random + +import pytest +from flask import Response, escape, url_for +from sqlalchemy import func + +from tests.integration_tests.test_app import app +from superset import db, security_manager +from superset.connectors.sqla.models import SqlaTable +from superset.models import core as models +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_with_slice, + load_energy_table_data, +) +from tests.integration_tests.fixtures.public_role import public_role_like_gamma +from tests.integration_tests.fixtures.unicode_dashboard import ( + load_unicode_dashboard_with_position, + load_unicode_data, +) +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) + +from .base_tests import SupersetTestCase + + +class TestDashboard(SupersetTestCase): + @pytest.fixture + def cleanup_copied_dash(self): + with app.app_context(): + original_dashboard = ( + db.session.query(Dashboard).filter_by(slug="births").first() + ) + original_dashboard_id = original_dashboard.id + yield + copied_dashboard = ( + db.session.query(Dashboard) + .filter( + Dashboard.dashboard_title == "Copy Of Births", + Dashboard.id != original_dashboard_id, + ) + .first() + ) + + db.session.merge(original_dashboard) + if copied_dashboard: + db.session.delete(copied_dashboard) + db.session.commit() + + @pytest.fixture + def load_dashboard(self): + with app.app_context(): + table = ( + db.session.query(SqlaTable).filter_by(table_name="energy_usage").one() + ) + # get a slice from the allowed table + slice = db.session.query(Slice).filter_by(slice_name="Energy Sankey").one() + + self.grant_public_access_to_table(table) + + pytest.hidden_dash_slug = f"hidden_dash_{random()}" + pytest.published_dash_slug = f"published_dash_{random()}" + + # Create a published and hidden dashboard and add them to the database + published_dash = Dashboard() + published_dash.dashboard_title = "Published Dashboard" + published_dash.slug = pytest.published_dash_slug + published_dash.slices = [slice] + published_dash.published = True + + hidden_dash = Dashboard() + hidden_dash.dashboard_title = "Hidden Dashboard" + hidden_dash.slug = pytest.hidden_dash_slug + hidden_dash.slices = [slice] + hidden_dash.published = False + + db.session.merge(published_dash) + db.session.merge(hidden_dash) + yield db.session.commit() + + self.revoke_public_access_to_table(table) + db.session.delete(published_dash) + db.session.delete(hidden_dash) + db.session.commit() + + def get_mock_positions(self, dash): + positions = {"DASHBOARD_VERSION_KEY": "v2"} + for i, slc in enumerate(dash.slices): + id = "DASHBOARD_CHART_TYPE-{}".format(i) + d = { + "type": "CHART", + "id": id, + "children": [], + "meta": {"width": 4, "height": 50, "chartId": slc.id}, + } + positions[id] = d + return positions + + def test_get_dashboard(self): + self.login(username="admin") + for dash in db.session.query(Dashboard): + assert escape(dash.dashboard_title) in self.client.get(dash.url).get_data( + as_text=True + ) + + def test_superset_dashboard_url(self): + url_for("Superset.dashboard", dashboard_id_or_slug=1) + + def test_new_dashboard(self): + self.login(username="admin") + dash_count_before = db.session.query(func.count(Dashboard.id)).first()[0] + url = "/dashboard/new/" + response = self.client.get(url, follow_redirects=False) + dash_count_after = db.session.query(func.count(Dashboard.id)).first()[0] + self.assertEqual(dash_count_before + 1, dash_count_after) + group = re.match( + r"\/superset\/dashboard\/([0-9]*)\/\?edit=true", + response.headers["Location"], + ) + assert group is not None + + # Cleanup + created_dashboard_id = int(group[1]) + created_dashboard = db.session.query(Dashboard).get(created_dashboard_id) + db.session.delete(created_dashboard) + db.session.commit() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_save_dash(self, username="admin"): + self.login(username=username) + dash = db.session.query(Dashboard).filter_by(slug="births").first() + positions = self.get_mock_positions(dash) + data = { + "css": "", + "expanded_slices": {}, + "positions": positions, + "dashboard_title": dash.dashboard_title, + # set a further modified_time for unit test + "last_modified_time": datetime.now().timestamp() + 1000, + } + url = "/superset/save_dash/{}/".format(dash.id) + resp = self.get_resp(url, data=dict(data=json.dumps(data))) + self.assertIn("SUCCESS", resp) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_save_dash_with_filter(self, username="admin"): + self.login(username=username) + dash = db.session.query(Dashboard).filter_by(slug="world_health").first() + + positions = self.get_mock_positions(dash) + filters = {str(dash.slices[0].id): {"region": ["North America"]}} + default_filters = json.dumps(filters) + data = { + "css": "", + "expanded_slices": {}, + "positions": positions, + "dashboard_title": dash.dashboard_title, + "default_filters": default_filters, + # set a further modified_time for unit test + "last_modified_time": datetime.now().timestamp() + 1000, + } + + url = "/superset/save_dash/{}/".format(dash.id) + resp = self.get_resp(url, data=dict(data=json.dumps(data))) + self.assertIn("SUCCESS", resp) + + updatedDash = db.session.query(Dashboard).filter_by(slug="world_health").first() + new_url = updatedDash.url + self.assertIn("world_health", new_url) + self.assertNotIn("preselect_filters", new_url) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_save_dash_with_invalid_filters(self, username="admin"): + self.login(username=username) + dash = db.session.query(Dashboard).filter_by(slug="world_health").first() + + # add an invalid filter slice + positions = self.get_mock_positions(dash) + filters = {str(99999): {"region": ["North America"]}} + default_filters = json.dumps(filters) + data = { + "css": "", + "expanded_slices": {}, + "positions": positions, + "dashboard_title": dash.dashboard_title, + "default_filters": default_filters, + # set a further modified_time for unit test + "last_modified_time": datetime.now().timestamp() + 1000, + } + + url = "/superset/save_dash/{}/".format(dash.id) + resp = self.get_resp(url, data=dict(data=json.dumps(data))) + self.assertIn("SUCCESS", resp) + + updatedDash = db.session.query(Dashboard).filter_by(slug="world_health").first() + new_url = updatedDash.url + self.assertNotIn("region", new_url) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_save_dash_with_dashboard_title(self, username="admin"): + self.login(username=username) + dash = db.session.query(Dashboard).filter_by(slug="births").first() + origin_title = dash.dashboard_title + positions = self.get_mock_positions(dash) + data = { + "css": "", + "expanded_slices": {}, + "positions": positions, + "dashboard_title": "new title", + # set a further modified_time for unit test + "last_modified_time": datetime.now().timestamp() + 1000, + } + url = "/superset/save_dash/{}/".format(dash.id) + self.get_resp(url, data=dict(data=json.dumps(data))) + updatedDash = db.session.query(Dashboard).filter_by(slug="births").first() + self.assertEqual(updatedDash.dashboard_title, "new title") + # bring back dashboard original title + data["dashboard_title"] = origin_title + self.get_resp(url, data=dict(data=json.dumps(data))) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_save_dash_with_colors(self, username="admin"): + self.login(username=username) + dash = db.session.query(Dashboard).filter_by(slug="births").first() + positions = self.get_mock_positions(dash) + new_label_colors = {"data value": "random color"} + data = { + "css": "", + "expanded_slices": {}, + "positions": positions, + "dashboard_title": dash.dashboard_title, + "color_namespace": "Color Namespace Test", + "color_scheme": "Color Scheme Test", + "label_colors": new_label_colors, + # set a further modified_time for unit test + "last_modified_time": datetime.now().timestamp() + 1000, + } + url = "/superset/save_dash/{}/".format(dash.id) + self.get_resp(url, data=dict(data=json.dumps(data))) + updatedDash = db.session.query(Dashboard).filter_by(slug="births").first() + self.assertIn("color_namespace", updatedDash.json_metadata) + self.assertIn("color_scheme", updatedDash.json_metadata) + self.assertIn("label_colors", updatedDash.json_metadata) + # bring back original dashboard + del data["color_namespace"] + del data["color_scheme"] + del data["label_colors"] + self.get_resp(url, data=dict(data=json.dumps(data))) + + @pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", + "cleanup_copied_dash", + "load_unicode_dashboard_with_position", + ) + def test_copy_dash(self, username="admin"): + self.login(username=username) + dash = db.session.query(Dashboard).filter_by(slug="births").first() + positions = self.get_mock_positions(dash) + new_label_colors = {"data value": "random color"} + data = { + "css": "", + "duplicate_slices": False, + "expanded_slices": {}, + "positions": positions, + "dashboard_title": "Copy Of Births", + "color_namespace": "Color Namespace Test", + "color_scheme": "Color Scheme Test", + "label_colors": new_label_colors, + # set a further modified_time for unit test + "last_modified_time": datetime.now().timestamp() + 1000, + } + + # Save changes to Births dashboard and retrieve updated dash + dash_id = dash.id + url = "/superset/save_dash/{}/".format(dash_id) + self.client.post(url, data=dict(data=json.dumps(data))) + dash = db.session.query(Dashboard).filter_by(id=dash_id).first() + orig_json_data = dash.data + + # Verify that copy matches original + url = "/superset/copy_dash/{}/".format(dash_id) + resp = self.get_json_resp(url, data=dict(data=json.dumps(data))) + self.assertEqual(resp["dashboard_title"], "Copy Of Births") + self.assertEqual(resp["position_json"], orig_json_data["position_json"]) + self.assertEqual(resp["metadata"], orig_json_data["metadata"]) + # check every attribute in each dashboard's slices list, + # exclude modified and changed_on attribute + for index, slc in enumerate(orig_json_data["slices"]): + for key in slc: + if key not in ["modified", "changed_on", "changed_on_humanized"]: + self.assertEqual(slc[key], resp["slices"][index][key]) + + @pytest.mark.usefixtures( + "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" + ) + def test_add_slices(self, username="admin"): + self.login(username=username) + dash = db.session.query(Dashboard).filter_by(slug="births").first() + new_slice = ( + db.session.query(Slice).filter_by(slice_name="Energy Force Layout").first() + ) + existing_slice = ( + db.session.query(Slice).filter_by(slice_name="Girl Name Cloud").first() + ) + data = { + "slice_ids": [new_slice.data["slice_id"], existing_slice.data["slice_id"]] + } + url = "/superset/add_slices/{}/".format(dash.id) + resp = self.client.post(url, data=dict(data=json.dumps(data))) + assert "SLICES ADDED" in resp.data.decode("utf-8") + + dash = db.session.query(Dashboard).filter_by(slug="births").first() + new_slice = ( + db.session.query(Slice).filter_by(slice_name="Energy Force Layout").first() + ) + assert new_slice in dash.slices + assert len(set(dash.slices)) == len(dash.slices) + + # cleaning up + dash = db.session.query(Dashboard).filter_by(slug="births").first() + dash.slices = [o for o in dash.slices if o.slice_name != "Energy Force Layout"] + db.session.commit() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_remove_slices(self, username="admin"): + self.login(username=username) + dash = db.session.query(Dashboard).filter_by(slug="births").first() + origin_slices_length = len(dash.slices) + + positions = self.get_mock_positions(dash) + # remove one chart + chart_keys = [] + for key in positions.keys(): + if key.startswith("DASHBOARD_CHART_TYPE"): + chart_keys.append(key) + positions.pop(chart_keys[0]) + + data = { + "css": "", + "expanded_slices": {}, + "positions": positions, + "dashboard_title": dash.dashboard_title, + # set a further modified_time for unit test + "last_modified_time": datetime.now().timestamp() + 1000, + } + + # save dash + dash_id = dash.id + url = "/superset/save_dash/{}/".format(dash_id) + self.client.post(url, data=dict(data=json.dumps(data))) + dash = db.session.query(Dashboard).filter_by(id=dash_id).first() + + # verify slices data + data = dash.data + self.assertEqual(len(data["slices"]), origin_slices_length - 1) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @pytest.mark.usefixtures("public_role_like_gamma") + def test_public_user_dashboard_access(self): + table = db.session.query(SqlaTable).filter_by(table_name="birth_names").one() + + # Make the births dash published so it can be seen + births_dash = db.session.query(Dashboard).filter_by(slug="births").one() + births_dash.published = True + + db.session.merge(births_dash) + db.session.commit() + + # Try access before adding appropriate permissions. + self.revoke_public_access_to_table(table) + self.logout() + + resp = self.get_resp("/api/v1/chart/") + self.assertNotIn("birth_names", resp) + + resp = self.get_resp("/api/v1/dashboard/") + self.assertNotIn("/superset/dashboard/births/", resp) + + self.grant_public_access_to_table(table) + + # Try access after adding appropriate permissions. + self.assertIn("birth_names", self.get_resp("/api/v1/chart/")) + + resp = self.get_resp("/api/v1/dashboard/") + self.assertIn("/superset/dashboard/births/", resp) + + # Confirm that public doesn't have access to other datasets. + resp = self.get_resp("/api/v1/chart/") + self.assertNotIn("wb_health_population", resp) + + resp = self.get_resp("/api/v1/dashboard/") + self.assertNotIn("/superset/dashboard/world_health/", resp) + + # Cleanup + self.revoke_public_access_to_table(table) + + @pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "public_role_like_gamma" + ) + def test_dashboard_with_created_by_can_be_accessed_by_public_users(self): + self.logout() + table = db.session.query(SqlaTable).filter_by(table_name="birth_names").one() + self.grant_public_access_to_table(table) + + dash = db.session.query(Dashboard).filter_by(slug="births").first() + dash.owners = [security_manager.find_user("admin")] + dash.created_by = security_manager.find_user("admin") + db.session.merge(dash) + db.session.commit() + + res: Response = self.client.get("/superset/dashboard/births/") + assert res.status_code == 200 + + # Cleanup + self.revoke_public_access_to_table(table) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_only_owners_can_save(self): + dash = db.session.query(Dashboard).filter_by(slug="births").first() + dash.owners = [] + db.session.merge(dash) + db.session.commit() + self.test_save_dash("admin") + + self.logout() + self.assertRaises(Exception, self.test_save_dash, "alpha") + + alpha = security_manager.find_user("alpha") + + dash = db.session.query(Dashboard).filter_by(slug="births").first() + dash.owners = [alpha] + db.session.merge(dash) + db.session.commit() + self.test_save_dash("alpha") + + @pytest.mark.usefixtures("load_energy_table_with_slice", "load_dashboard") + def test_users_can_list_published_dashboard(self): + self.login("alpha") + resp = self.get_resp("/api/v1/dashboard/") + assert f"/superset/dashboard/{pytest.hidden_dash_slug}/" not in resp + assert f"/superset/dashboard/{pytest.published_dash_slug}/" in resp + + def test_users_can_view_own_dashboard(self): + user = security_manager.find_user("gamma") + my_dash_slug = f"my_dash_{random()}" + not_my_dash_slug = f"not_my_dash_{random()}" + + # Create one dashboard I own and another that I don't + dash = Dashboard() + dash.dashboard_title = "My Dashboard" + dash.slug = my_dash_slug + dash.owners = [user] + dash.slices = [] + + hidden_dash = Dashboard() + hidden_dash.dashboard_title = "Not My Dashboard" + hidden_dash.slug = not_my_dash_slug + hidden_dash.slices = [] + hidden_dash.owners = [] + + db.session.add(dash) + db.session.add(hidden_dash) + db.session.commit() + + self.login(user.username) + + resp = self.get_resp("/api/v1/dashboard/") + + db.session.delete(dash) + db.session.delete(hidden_dash) + db.session.commit() + + self.assertIn(f"/superset/dashboard/{my_dash_slug}/", resp) + self.assertNotIn(f"/superset/dashboard/{not_my_dash_slug}/", resp) + + def test_users_can_view_favorited_dashboards(self): + user = security_manager.find_user("gamma") + fav_dash_slug = f"my_favorite_dash_{random()}" + regular_dash_slug = f"regular_dash_{random()}" + + favorite_dash = Dashboard() + favorite_dash.dashboard_title = "My Favorite Dashboard" + favorite_dash.slug = fav_dash_slug + + regular_dash = Dashboard() + regular_dash.dashboard_title = "A Plain Ol Dashboard" + regular_dash.slug = regular_dash_slug + + db.session.add(favorite_dash) + db.session.add(regular_dash) + db.session.commit() + + dash = db.session.query(Dashboard).filter_by(slug=fav_dash_slug).first() + + favorites = models.FavStar() + favorites.obj_id = dash.id + favorites.class_name = "Dashboard" + favorites.user_id = user.id + + db.session.add(favorites) + db.session.commit() + + self.login(user.username) + + resp = self.get_resp("/api/v1/dashboard/") + + db.session.delete(favorites) + db.session.delete(regular_dash) + db.session.delete(favorite_dash) + db.session.commit() + + self.assertIn(f"/superset/dashboard/{fav_dash_slug}/", resp) + + def test_user_can_not_view_unpublished_dash(self): + admin_user = security_manager.find_user("admin") + gamma_user = security_manager.find_user("gamma") + slug = f"admin_owned_unpublished_dash_{random()}" + + # Create a dashboard owned by admin and unpublished + dash = Dashboard() + dash.dashboard_title = "My Dashboard" + dash.slug = slug + dash.owners = [admin_user] + dash.slices = [] + dash.published = False + db.session.add(dash) + db.session.commit() + + # list dashboards as a gamma user + self.login(gamma_user.username) + resp = self.get_resp("/api/v1/dashboard/") + + db.session.delete(dash) + db.session.commit() + + self.assertNotIn(f"/superset/dashboard/{slug}/", resp) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/integration_tests/dashboard_utils.py b/tests/integration_tests/dashboard_utils.py new file mode 100644 index 0000000000000..bea724dafc95d --- /dev/null +++ b/tests/integration_tests/dashboard_utils.py @@ -0,0 +1,97 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Utils to provide dashboards for tests""" + +import json +from typing import Any, Dict, List, Optional + +from pandas import DataFrame + +from superset import db +from superset.connectors.sqla.models import SqlaTable +from superset.models.core import Database +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from superset.utils.core import DatasourceType, get_example_default_schema + + +def get_table( + table_name: str, + database: Database, + schema: Optional[str] = None, +): + schema = schema or get_example_default_schema() + return ( + db.session.query(SqlaTable) + .filter_by(database_id=database.id, schema=schema, table_name=table_name) + .one_or_none() + ) + + +def create_table_metadata( + table_name: str, + database: Database, + table_description: str = "", + fetch_values_predicate: Optional[str] = None, + schema: Optional[str] = None, +) -> SqlaTable: + schema = schema or get_example_default_schema() + + table = get_table(table_name, database, schema) + if not table: + table = SqlaTable(schema=schema, table_name=table_name) + if fetch_values_predicate: + table.fetch_values_predicate = fetch_values_predicate + table.database = database + table.description = table_description + db.session.merge(table) + db.session.commit() + + return table + + +def create_slice( + title: str, viz_type: str, table: SqlaTable, slices_dict: Dict[str, str] +) -> Slice: + return Slice( + slice_name=title, + viz_type=viz_type, + datasource_type=DatasourceType.TABLE, + datasource_id=table.id, + params=json.dumps(slices_dict, indent=4, sort_keys=True), + ) + + +def create_dashboard( + slug: str, title: str, position: str, slices: List[Slice] +) -> Dashboard: + dash = db.session.query(Dashboard).filter_by(slug=slug).one_or_none() + if dash: + return dash + dash = Dashboard() + dash.dashboard_title = title + if position is not None: + js = position + pos = json.loads(js) + dash.position_json = json.dumps(pos, indent=4) + dash.slug = slug + if slices is not None: + dash.slices = slices + db.session.add(dash) + db.session.commit() + + return dash diff --git a/tests/integration_tests/dashboards/__init__.py b/tests/integration_tests/dashboards/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/dashboards/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py new file mode 100644 index 0000000000000..725811ce5f68d --- /dev/null +++ b/tests/integration_tests/dashboards/api_tests.py @@ -0,0 +1,2066 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import json +from io import BytesIO +from time import sleep +from typing import List, Optional +from unittest.mock import patch +from zipfile import is_zipfile, ZipFile + +from tests.integration_tests.insert_chart_mixin import InsertChartMixin + +import pytest +import prison +import yaml + +from freezegun import freeze_time +from sqlalchemy import and_ +from superset import db, security_manager +from superset.models.dashboard import Dashboard +from superset.models.core import FavStar, FavStarClassName +from superset.reports.models import ReportSchedule, ReportScheduleType +from superset.models.slice import Slice +from superset.utils.core import backend +from superset.views.base import generate_download_headers + +from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.importexport import ( + chart_config, + database_config, + dashboard_config, + dashboard_export, + dashboard_metadata_config, + dataset_config, + dataset_metadata_config, +) +from tests.integration_tests.utils.get_dashboards import get_dashboards_ids +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) + +DASHBOARDS_FIXTURE_COUNT = 10 + + +class TestDashboardApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin): + resource_name = "dashboard" + + dashboards: List[Dashboard] = [] + dashboard_data = { + "dashboard_title": "title1_changed", + "slug": "slug1_changed", + "position_json": '{"b": "B"}', + "css": "css_changed", + "json_metadata": '{"refresh_frequency": 30, "timed_refresh_immune_slices": [], "expanded_slices": {}, "color_scheme": "", "label_colors": {}, "shared_label_colors": {}, "color_scheme_domain": [], "cross_filters_enabled": false}', + "published": False, + } + + def insert_dashboard( + self, + dashboard_title: str, + slug: Optional[str], + owners: List[int], + roles: List[int] = [], + created_by=None, + slices: Optional[List[Slice]] = None, + position_json: str = "", + css: str = "", + json_metadata: str = "", + published: bool = False, + certified_by: Optional[str] = None, + certification_details: Optional[str] = None, + ) -> Dashboard: + obj_owners = list() + obj_roles = list() + slices = slices or [] + for owner in owners: + user = db.session.query(security_manager.user_model).get(owner) + obj_owners.append(user) + for role in roles: + role_obj = db.session.query(security_manager.role_model).get(role) + obj_roles.append(role_obj) + dashboard = Dashboard( + dashboard_title=dashboard_title, + slug=slug, + owners=obj_owners, + roles=obj_roles, + position_json=position_json, + css=css, + json_metadata=json_metadata, + slices=slices, + published=published, + created_by=created_by, + certified_by=certified_by, + certification_details=certification_details, + ) + db.session.add(dashboard) + db.session.commit() + return dashboard + + @pytest.fixture() + def create_dashboards(self): + with self.create_app().app_context(): + dashboards = [] + admin = self.get_user("admin") + charts = [] + half_dash_count = round(DASHBOARDS_FIXTURE_COUNT / 2) + for cx in range(DASHBOARDS_FIXTURE_COUNT): + dashboard = self.insert_dashboard( + f"title{cx}", + f"slug{cx}", + [admin.id], + slices=charts if cx < half_dash_count else [], + certified_by="John Doe", + certification_details="Sample certification", + ) + if cx < half_dash_count: + chart = self.insert_chart(f"slice{cx}", [admin.id], 1, params="{}") + charts.append(chart) + dashboard.slices = [chart] + db.session.add(dashboard) + dashboards.append(dashboard) + fav_dashboards = [] + for cx in range(half_dash_count): + fav_star = FavStar( + user_id=admin.id, class_name="Dashboard", obj_id=dashboards[cx].id + ) + db.session.add(fav_star) + db.session.commit() + fav_dashboards.append(fav_star) + self.dashboards = dashboards + yield dashboards + + # rollback changes + for chart in charts: + db.session.delete(chart) + for dashboard in dashboards: + db.session.delete(dashboard) + for fav_dashboard in fav_dashboards: + db.session.delete(fav_dashboard) + db.session.commit() + + @pytest.fixture() + def create_created_by_gamma_dashboards(self): + with self.create_app().app_context(): + dashboards = [] + gamma = self.get_user("gamma") + for cx in range(2): + dashboard = self.insert_dashboard( + f"create_title{cx}", + f"create_slug{cx}", + [gamma.id], + created_by=gamma, + ) + sleep(1) + dashboards.append(dashboard) + + yield dashboards + + for dashboard in dashboards: + db.session.delete(dashboard) + db.session.commit() + + @pytest.fixture() + def create_dashboard_with_report(self): + with self.create_app().app_context(): + admin = self.get_user("admin") + dashboard = self.insert_dashboard( + f"dashboard_report", "dashboard_report", [admin.id] + ) + report_schedule = ReportSchedule( + type=ReportScheduleType.REPORT, + name="report_with_dashboard", + crontab="* * * * *", + dashboard=dashboard, + ) + db.session.commit() + + yield dashboard + + # rollback changes + db.session.delete(report_schedule) + db.session.delete(dashboard) + db.session.commit() + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_get_dashboard_datasets(self): + self.login(username="admin") + uri = "api/v1/dashboard/world_health/datasets" + response = self.get_assert_metric(uri, "get_datasets") + self.assertEqual(response.status_code, 200) + data = json.loads(response.data.decode("utf-8")) + dashboard = Dashboard.get("world_health") + expected_dataset_ids = set([s.datasource_id for s in dashboard.slices]) + result = data["result"] + actual_dataset_ids = set([dataset["id"] for dataset in result]) + self.assertEqual(actual_dataset_ids, expected_dataset_ids) + expected_values = [0, 1] if backend() == "presto" else [0, 1, 2] + self.assertEqual(result[0]["column_types"], expected_values) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_get_dashboard_datasets_not_found(self): + self.login(username="alpha") + uri = "api/v1/dashboard/not_found/datasets" + response = self.get_assert_metric(uri, "get_datasets") + self.assertEqual(response.status_code, 404) + + @pytest.mark.usefixtures("create_dashboards") + def test_get_gamma_dashboard_datasets(self): + """ + Check that a gamma user with data access can access dashboard/datasets + """ + from superset.connectors.sqla.models import SqlaTable + + # Set correct role permissions + gamma_role = security_manager.find_role("Gamma") + fixture_dataset = db.session.query(SqlaTable).get(1) + data_access_pvm = security_manager.add_permission_view_menu( + "datasource_access", fixture_dataset.perm + ) + gamma_role.permissions.append(data_access_pvm) + db.session.commit() + + self.login(username="gamma") + dashboard = self.dashboards[0] + dashboard.published = True + db.session.commit() + + uri = f"api/v1/dashboard/{dashboard.id}/datasets" + response = self.get_assert_metric(uri, "get_datasets") + assert response.status_code == 200 + + # rollback permission change + data_access_pvm = security_manager.find_permission_view_menu( + "datasource_access", fixture_dataset.perm + ) + security_manager.del_permission_role(gamma_role, data_access_pvm) + + @pytest.mark.usefixtures("create_dashboards") + def get_dashboard_by_slug(self): + self.login(username="admin") + dashboard = self.dashboards[0] + uri = f"api/v1/dashboard/{dashboard.slug}" + response = self.get_assert_metric(uri, "get") + self.assertEqual(response.status_code, 200) + data = json.loads(response.data.decode("utf-8")) + self.assertEqual(data["id"], dashboard.id) + + @pytest.mark.usefixtures("create_dashboards") + def get_dashboard_by_bad_slug(self): + self.login(username="admin") + dashboard = self.dashboards[0] + uri = f"api/v1/dashboard/{dashboard.slug}-bad-slug" + response = self.get_assert_metric(uri, "get") + self.assertEqual(response.status_code, 404) + + @pytest.mark.usefixtures("create_dashboards") + def get_draft_dashboard_by_slug(self): + """ + All users should have access to dashboards without roles + """ + self.login(username="gamma") + dashboard = self.dashboards[0] + uri = f"api/v1/dashboard/{dashboard.slug}" + response = self.get_assert_metric(uri, "get") + self.assertEqual(response.status_code, 200) + + @pytest.mark.usefixtures("create_dashboards") + def test_get_dashboard_charts(self): + """ + Dashboard API: Test getting charts belonging to a dashboard + """ + self.login(username="admin") + dashboard = self.dashboards[0] + uri = f"api/v1/dashboard/{dashboard.id}/charts" + response = self.get_assert_metric(uri, "get_charts") + self.assertEqual(response.status_code, 200) + data = json.loads(response.data.decode("utf-8")) + assert len(data["result"]) == 1 + result = data["result"][0] + assert set(result.keys()) == { + "cache_timeout", + "certification_details", + "certified_by", + "changed_on", + "description", + "description_markeddown", + "form_data", + "id", + "slice_name", + "slice_url", + } + assert result["id"] == dashboard.slices[0].id + assert result["slice_name"] == dashboard.slices[0].slice_name + + @pytest.mark.usefixtures("create_dashboards") + def test_get_dashboard_charts_by_slug(self): + """ + Dashboard API: Test getting charts belonging to a dashboard + """ + self.login(username="admin") + dashboard = self.dashboards[0] + uri = f"api/v1/dashboard/{dashboard.slug}/charts" + response = self.get_assert_metric(uri, "get_charts") + self.assertEqual(response.status_code, 200) + data = json.loads(response.data.decode("utf-8")) + self.assertEqual(len(data["result"]), 1) + self.assertEqual( + data["result"][0]["slice_name"], dashboard.slices[0].slice_name + ) + + @pytest.mark.usefixtures("create_dashboards") + def test_get_dashboard_charts_not_found(self): + """ + Dashboard API: Test getting charts belonging to a dashboard that does not exist + """ + self.login(username="admin") + bad_id = self.get_nonexistent_numeric_id(Dashboard) + uri = f"api/v1/dashboard/{bad_id}/charts" + response = self.get_assert_metric(uri, "get_charts") + self.assertEqual(response.status_code, 404) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_get_dashboard_datasets_not_allowed(self): + self.login(username="gamma") + uri = "api/v1/dashboard/world_health/datasets" + response = self.get_assert_metric(uri, "get_datasets") + self.assertEqual(response.status_code, 404) + + @pytest.mark.usefixtures("create_dashboards") + def test_get_gamma_dashboard_charts(self): + """ + Check that a gamma user with data access can access dashboard/charts + """ + from superset.connectors.sqla.models import SqlaTable + + # Set correct role permissions + gamma_role = security_manager.find_role("Gamma") + fixture_dataset = db.session.query(SqlaTable).get(1) + data_access_pvm = security_manager.add_permission_view_menu( + "datasource_access", fixture_dataset.perm + ) + gamma_role.permissions.append(data_access_pvm) + db.session.commit() + + self.login(username="gamma") + + dashboard = self.dashboards[0] + dashboard.published = True + db.session.commit() + + uri = f"api/v1/dashboard/{dashboard.id}/charts" + response = self.get_assert_metric(uri, "get_charts") + assert response.status_code == 200 + + # rollback permission change + data_access_pvm = security_manager.find_permission_view_menu( + "datasource_access", fixture_dataset.perm + ) + security_manager.del_permission_role(gamma_role, data_access_pvm) + + @pytest.mark.usefixtures("create_dashboards") + def test_get_dashboard_charts_empty(self): + """ + Dashboard API: Test getting charts belonging to a dashboard without any charts + """ + self.login(username="admin") + # the fixture setup assigns no charts to the second half of dashboards + uri = f"api/v1/dashboard/{self.dashboards[-1].id}/charts" + response = self.get_assert_metric(uri, "get_charts") + self.assertEqual(response.status_code, 200) + data = json.loads(response.data.decode("utf-8")) + self.assertEqual(data["result"], []) + + def test_get_dashboard(self): + """ + Dashboard API: Test get dashboard + """ + admin = self.get_user("admin") + dashboard = self.insert_dashboard( + "title", "slug1", [admin.id], created_by=admin + ) + self.login(username="admin") + uri = f"api/v1/dashboard/{dashboard.id}" + rv = self.get_assert_metric(uri, "get") + self.assertEqual(rv.status_code, 200) + expected_result = { + "certified_by": None, + "certification_details": None, + "changed_by": None, + "changed_by_name": "", + "changed_by_url": "", + "charts": [], + "created_by": { + "id": 1, + "first_name": "admin", + "last_name": "user", + }, + "id": dashboard.id, + "css": "", + "dashboard_title": "title", + "datasources": [], + "json_metadata": "", + "owners": [ + { + "id": 1, + "username": "admin", + "first_name": "admin", + "last_name": "user", + } + ], + "roles": [], + "position_json": "", + "published": False, + "url": "/superset/dashboard/slug1/", + "slug": "slug1", + "thumbnail_url": dashboard.thumbnail_url, + "is_managed_externally": False, + } + data = json.loads(rv.data.decode("utf-8")) + self.assertIn("changed_on", data["result"]) + self.assertIn("changed_on_delta_humanized", data["result"]) + for key, value in data["result"].items(): + # We can't assert timestamp values + if key not in ( + "changed_on", + "changed_on_delta_humanized", + ): + self.assertEqual(value, expected_result[key]) + # rollback changes + db.session.delete(dashboard) + db.session.commit() + + def test_info_dashboard(self): + """ + Dashboard API: Test info + """ + self.login(username="admin") + uri = "api/v1/dashboard/_info" + rv = self.get_assert_metric(uri, "info") + self.assertEqual(rv.status_code, 200) + + def test_info_security_database(self): + """ + Dashboard API: Test info security + """ + self.login(username="admin") + params = {"keys": ["permissions"]} + uri = f"api/v1/dashboard/_info?q={prison.dumps(params)}" + rv = self.get_assert_metric(uri, "info") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert set(data["permissions"]) == { + "can_read", + "can_write", + "can_export", + "can_get_embedded", + "can_delete_embedded", + "can_set_embedded", + } + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_get_dashboard_not_found(self): + """ + Dashboard API: Test get dashboard not found + """ + bad_id = self.get_nonexistent_numeric_id(Dashboard) + self.login(username="admin") + uri = f"api/v1/dashboard/{bad_id}" + rv = self.get_assert_metric(uri, "get") + self.assertEqual(rv.status_code, 404) + + def test_get_dashboard_no_data_access(self): + """ + Dashboard API: Test get dashboard without data access + """ + admin = self.get_user("admin") + dashboard = self.insert_dashboard("title", "slug1", [admin.id]) + + self.login(username="gamma") + uri = f"api/v1/dashboard/{dashboard.id}" + rv = self.client.get(uri) + assert rv.status_code == 404 + # rollback changes + db.session.delete(dashboard) + db.session.commit() + + def test_get_dashboards_changed_on(self): + """ + Dashboard API: Test get dashboards changed on + """ + from datetime import datetime + import humanize + + with freeze_time("2020-01-01T00:00:00Z"): + admin = self.get_user("admin") + dashboard = self.insert_dashboard("title", "slug1", [admin.id]) + + self.login(username="admin") + + arguments = { + "order_column": "changed_on_delta_humanized", + "order_direction": "desc", + } + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + data["result"][0]["changed_on_delta_humanized"], + humanize.naturaltime(datetime.now()), + ) + + # rollback changes + db.session.delete(dashboard) + db.session.commit() + + def test_get_dashboards_filter(self): + """ + Dashboard API: Test get dashboards filter + """ + admin = self.get_user("admin") + gamma = self.get_user("gamma") + dashboard = self.insert_dashboard("title", "slug1", [admin.id, gamma.id]) + + self.login(username="admin") + + arguments = { + "filters": [{"col": "dashboard_title", "opr": "sw", "value": "ti"}] + } + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 1) + + arguments = { + "filters": [ + {"col": "owners", "opr": "rel_m_m", "value": [admin.id, gamma.id]} + ] + } + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 1) + + # rollback changes + db.session.delete(dashboard) + db.session.commit() + + @pytest.mark.usefixtures("create_dashboards") + def test_get_dashboards_title_or_slug_filter(self): + """ + Dashboard API: Test get dashboards title or slug filter + """ + # Test title filter with ilike + arguments = { + "filters": [ + {"col": "dashboard_title", "opr": "title_or_slug", "value": "title1"} + ], + "order_column": "dashboard_title", + "order_direction": "asc", + "keys": ["none"], + "columns": ["dashboard_title", "slug"], + } + self.login(username="admin") + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 1) + + expected_response = [ + {"slug": "slug1", "dashboard_title": "title1"}, + ] + assert data["result"] == expected_response + + # Test slug filter with ilike + arguments["filters"][0]["value"] = "slug2" + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 1) + + expected_response = [ + {"slug": "slug2", "dashboard_title": "title2"}, + ] + assert data["result"] == expected_response + + self.logout() + self.login(username="gamma") + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 0) + + @pytest.mark.usefixtures("create_dashboards") + def test_get_dashboards_favorite_filter(self): + """ + Dashboard API: Test get dashboards favorite filter + """ + admin = self.get_user("admin") + users_favorite_query = db.session.query(FavStar.obj_id).filter( + and_(FavStar.user_id == admin.id, FavStar.class_name == "Dashboard") + ) + expected_models = ( + db.session.query(Dashboard) + .filter(and_(Dashboard.id.in_(users_favorite_query))) + .order_by(Dashboard.dashboard_title.asc()) + .all() + ) + + arguments = { + "filters": [{"col": "id", "opr": "dashboard_is_favorite", "value": True}], + "order_column": "dashboard_title", + "order_direction": "asc", + "keys": ["none"], + "columns": ["dashboard_title"], + } + self.login(username="admin") + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert len(expected_models) == data["count"] + + for i, expected_model in enumerate(expected_models): + assert ( + expected_model.dashboard_title == data["result"][i]["dashboard_title"] + ) + + @pytest.mark.usefixtures("create_dashboards") + def test_get_current_user_favorite_status(self): + """ + Dataset API: Test get current user favorite stars + """ + admin = self.get_user("admin") + users_favorite_ids = [ + star.obj_id + for star in db.session.query(FavStar.obj_id) + .filter( + and_( + FavStar.user_id == admin.id, + FavStar.class_name == FavStarClassName.DASHBOARD, + ) + ) + .all() + ] + + assert users_favorite_ids + arguments = [dash.id for dash in db.session.query(Dashboard.id).all()] + self.login(username="admin") + uri = f"api/v1/dashboard/favorite_status/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + for res in data["result"]: + if res["id"] in users_favorite_ids: + assert res["value"] + + @pytest.mark.usefixtures("create_dashboards") + def test_get_dashboards_not_favorite_filter(self): + """ + Dashboard API: Test get dashboards not favorite filter + """ + admin = self.get_user("admin") + users_favorite_query = db.session.query(FavStar.obj_id).filter( + and_(FavStar.user_id == admin.id, FavStar.class_name == "Dashboard") + ) + expected_models = ( + db.session.query(Dashboard) + .filter(and_(~Dashboard.id.in_(users_favorite_query))) + .order_by(Dashboard.dashboard_title.asc()) + .all() + ) + arguments = { + "filters": [{"col": "id", "opr": "dashboard_is_favorite", "value": False}], + "order_column": "dashboard_title", + "order_direction": "asc", + "keys": ["none"], + "columns": ["dashboard_title"], + } + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + self.login(username="admin") + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert len(expected_models) == data["count"] + for i, expected_model in enumerate(expected_models): + assert ( + expected_model.dashboard_title == data["result"][i]["dashboard_title"] + ) + + @pytest.mark.usefixtures("create_dashboards") + def test_gets_certified_dashboards_filter(self): + arguments = { + "filters": [ + { + "col": "id", + "opr": "dashboard_is_certified", + "value": True, + } + ], + "keys": ["none"], + "columns": ["dashboard_title"], + } + self.login(username="admin") + + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], DASHBOARDS_FIXTURE_COUNT) + + @pytest.mark.usefixtures("create_dashboards") + def test_gets_not_certified_dashboards_filter(self): + arguments = { + "filters": [ + { + "col": "id", + "opr": "dashboard_is_certified", + "value": False, + } + ], + "keys": ["none"], + "columns": ["dashboard_title"], + } + self.login(username="admin") + + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], 0) + + @pytest.mark.usefixtures("create_created_by_gamma_dashboards") + def test_get_dashboards_created_by_me(self): + """ + Dashboard API: Test get dashboards created by current user + """ + query = { + "columns": ["created_on_delta_humanized", "dashboard_title", "url"], + "filters": [ + {"col": "created_by", "opr": "dashboard_created_by_me", "value": "me"} + ], + "order_column": "changed_on", + "order_direction": "desc", + "page": 0, + "page_size": 100, + } + uri = f"api/v1/dashboard/?q={prison.dumps(query)}" + self.login(username="gamma") + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert len(data["result"]) == 2 + assert list(data["result"][0].keys()) == query["columns"] + expected_results = [ + { + "dashboard_title": "create_title1", + "url": "/superset/dashboard/create_slug1/", + }, + { + "dashboard_title": "create_title0", + "url": "/superset/dashboard/create_slug0/", + }, + ] + for idx, response_item in enumerate(data["result"]): + for key, value in expected_results[idx].items(): + assert response_item[key] == value + + def create_dashboard_import(self): + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("dashboard_export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(dashboard_metadata_config).encode()) + with bundle.open( + "dashboard_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(database_config).encode()) + with bundle.open( + "dashboard_export/datasets/imported_dataset.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(dataset_config).encode()) + with bundle.open("dashboard_export/charts/imported_chart.yaml", "w") as fp: + fp.write(yaml.safe_dump(chart_config).encode()) + with bundle.open( + "dashboard_export/dashboards/imported_dashboard.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(dashboard_config).encode()) + buf.seek(0) + return buf + + def create_invalid_dashboard_import(self): + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("sql/dump.sql", "w") as fp: + fp.write("CREATE TABLE foo (bar INT)".encode()) + buf.seek(0) + return buf + + def test_delete_dashboard(self): + """ + Dashboard API: Test delete + """ + admin_id = self.get_user("admin").id + dashboard_id = self.insert_dashboard("title", "slug1", [admin_id]).id + self.login(username="admin") + uri = f"api/v1/dashboard/{dashboard_id}" + rv = self.delete_assert_metric(uri, "delete") + self.assertEqual(rv.status_code, 200) + model = db.session.query(Dashboard).get(dashboard_id) + self.assertEqual(model, None) + + def test_delete_bulk_dashboards(self): + """ + Dashboard API: Test delete bulk + """ + admin_id = self.get_user("admin").id + dashboard_count = 4 + dashboard_ids = list() + for dashboard_name_index in range(dashboard_count): + dashboard_ids.append( + self.insert_dashboard( + f"title{dashboard_name_index}", + f"slug{dashboard_name_index}", + [admin_id], + ).id + ) + self.login(username="admin") + argument = dashboard_ids + uri = f"api/v1/dashboard/?q={prison.dumps(argument)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": f"Deleted {dashboard_count} dashboards"} + self.assertEqual(response, expected_response) + for dashboard_id in dashboard_ids: + model = db.session.query(Dashboard).get(dashboard_id) + self.assertEqual(model, None) + + def test_delete_bulk_embedded_dashboards(self): + """ + Dashboard API: Test delete bulk embedded + """ + user = self.get_user("admin") + dashboard_count = 4 + dashboard_ids = list() + for dashboard_name_index in range(dashboard_count): + dashboard_ids.append( + self.insert_dashboard( + f"title{dashboard_name_index}", + None, + [user.id], + ).id + ) + self.login(username=user.username) + for dashboard_id in dashboard_ids: + # post succeeds and returns value + allowed_domains = ["test.example", "embedded.example"] + resp = self.post_assert_metric( + f"api/v1/dashboard/{dashboard_id}/embedded", + {"allowed_domains": allowed_domains}, + "set_embedded", + ) + self.assertEqual(resp.status_code, 200) + result = json.loads(resp.data.decode("utf-8"))["result"] + self.assertIsNotNone(result["uuid"]) + self.assertNotEqual(result["uuid"], "") + self.assertEqual(result["allowed_domains"], allowed_domains) + argument = dashboard_ids + uri = f"api/v1/dashboard/?q={prison.dumps(argument)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": f"Deleted {dashboard_count} dashboards"} + self.assertEqual(response, expected_response) + for dashboard_id in dashboard_ids: + model = db.session.query(Dashboard).get(dashboard_id) + self.assertEqual(model, None) + + def test_delete_bulk_dashboards_bad_request(self): + """ + Dashboard API: Test delete bulk bad request + """ + dashboard_ids = [1, "a"] + self.login(username="admin") + argument = dashboard_ids + uri = f"api/v1/dashboard/?q={prison.dumps(argument)}" + rv = self.client.delete(uri) + self.assertEqual(rv.status_code, 400) + + def test_delete_not_found_dashboard(self): + """ + Dashboard API: Test not found delete + """ + self.login(username="admin") + dashboard_id = 1000 + uri = f"api/v1/dashboard/{dashboard_id}" + rv = self.client.delete(uri) + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("create_dashboard_with_report") + def test_delete_dashboard_with_report(self): + """ + Dashboard API: Test delete with associated report + """ + self.login(username="admin") + dashboard = ( + db.session.query(Dashboard.id) + .filter(Dashboard.dashboard_title == "dashboard_report") + .one_or_none() + ) + uri = f"api/v1/dashboard/{dashboard.id}" + rv = self.client.delete(uri) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 422) + expected_response = { + "message": "There are associated alerts or reports: report_with_dashboard" + } + self.assertEqual(response, expected_response) + + def test_delete_bulk_dashboards_not_found(self): + """ + Dashboard API: Test delete bulk not found + """ + dashboard_ids = [1001, 1002] + self.login(username="admin") + argument = dashboard_ids + uri = f"api/v1/dashboard/?q={prison.dumps(argument)}" + rv = self.client.delete(uri) + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("create_dashboard_with_report", "create_dashboards") + def test_delete_bulk_dashboard_with_report(self): + """ + Dashboard API: Test bulk delete with associated report + """ + self.login(username="admin") + dashboard_with_report = ( + db.session.query(Dashboard.id) + .filter(Dashboard.dashboard_title == "dashboard_report") + .one_or_none() + ) + dashboards = ( + db.session.query(Dashboard) + .filter(Dashboard.dashboard_title.like("title%")) + .all() + ) + + dashboard_ids = [dashboard.id for dashboard in dashboards] + dashboard_ids.append(dashboard_with_report.id) + uri = f"api/v1/dashboard/?q={prison.dumps(dashboard_ids)}" + rv = self.client.delete(uri) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 422) + expected_response = { + "message": "There are associated alerts or reports: report_with_dashboard" + } + self.assertEqual(response, expected_response) + + def test_delete_dashboard_admin_not_owned(self): + """ + Dashboard API: Test admin delete not owned + """ + gamma_id = self.get_user("gamma").id + dashboard_id = self.insert_dashboard("title", "slug1", [gamma_id]).id + + self.login(username="admin") + uri = f"api/v1/dashboard/{dashboard_id}" + rv = self.client.delete(uri) + self.assertEqual(rv.status_code, 200) + model = db.session.query(Dashboard).get(dashboard_id) + self.assertEqual(model, None) + + def test_delete_bulk_dashboard_admin_not_owned(self): + """ + Dashboard API: Test admin delete bulk not owned + """ + gamma_id = self.get_user("gamma").id + dashboard_count = 4 + dashboard_ids = list() + for dashboard_name_index in range(dashboard_count): + dashboard_ids.append( + self.insert_dashboard( + f"title{dashboard_name_index}", + f"slug{dashboard_name_index}", + [gamma_id], + ).id + ) + + self.login(username="admin") + argument = dashboard_ids + uri = f"api/v1/dashboard/?q={prison.dumps(argument)}" + rv = self.client.delete(uri) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 200) + expected_response = {"message": f"Deleted {dashboard_count} dashboards"} + self.assertEqual(response, expected_response) + + for dashboard_id in dashboard_ids: + model = db.session.query(Dashboard).get(dashboard_id) + self.assertEqual(model, None) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_delete_dashboard_not_owned(self): + """ + Dashboard API: Test delete try not owned + """ + user_alpha1 = self.create_user( + "alpha1", "password", "Alpha", email="alpha1@superset.org" + ) + user_alpha2 = self.create_user( + "alpha2", "password", "Alpha", email="alpha2@superset.org" + ) + existing_slice = ( + db.session.query(Slice).filter_by(slice_name="Girl Name Cloud").first() + ) + dashboard = self.insert_dashboard( + "title", "slug1", [user_alpha1.id], slices=[existing_slice], published=True + ) + self.login(username="alpha2", password="password") + uri = f"api/v1/dashboard/{dashboard.id}" + rv = self.client.delete(uri) + self.assertEqual(rv.status_code, 403) + db.session.delete(dashboard) + db.session.delete(user_alpha1) + db.session.delete(user_alpha2) + db.session.commit() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_delete_bulk_dashboard_not_owned(self): + """ + Dashboard API: Test delete bulk try not owned + """ + user_alpha1 = self.create_user( + "alpha1", "password", "Alpha", email="alpha1@superset.org" + ) + user_alpha2 = self.create_user( + "alpha2", "password", "Alpha", email="alpha2@superset.org" + ) + existing_slice = ( + db.session.query(Slice).filter_by(slice_name="Girl Name Cloud").first() + ) + + dashboard_count = 4 + dashboards = list() + for dashboard_name_index in range(dashboard_count): + dashboards.append( + self.insert_dashboard( + f"title{dashboard_name_index}", + f"slug{dashboard_name_index}", + [user_alpha1.id], + slices=[existing_slice], + published=True, + ) + ) + + owned_dashboard = self.insert_dashboard( + "title_owned", + "slug_owned", + [user_alpha2.id], + slices=[existing_slice], + published=True, + ) + + self.login(username="alpha2", password="password") + + # verify we can't delete not owned dashboards + arguments = [dashboard.id for dashboard in dashboards] + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + rv = self.client.delete(uri) + self.assertEqual(rv.status_code, 403) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": "Forbidden"} + self.assertEqual(response, expected_response) + + # nothing is deleted in bulk with a list of owned and not owned dashboards + arguments = [dashboard.id for dashboard in dashboards] + [owned_dashboard.id] + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + rv = self.client.delete(uri) + self.assertEqual(rv.status_code, 403) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": "Forbidden"} + self.assertEqual(response, expected_response) + + for dashboard in dashboards: + db.session.delete(dashboard) + db.session.delete(owned_dashboard) + db.session.delete(user_alpha1) + db.session.delete(user_alpha2) + db.session.commit() + + def test_create_dashboard(self): + """ + Dashboard API: Test create dashboard + """ + admin_id = self.get_user("admin").id + dashboard_data = { + "dashboard_title": "title1", + "slug": "slug1", + "owners": [admin_id], + "position_json": '{"a": "A"}', + "css": "css", + "json_metadata": '{"refresh_frequency": 30}', + "published": True, + } + self.login(username="admin") + uri = "api/v1/dashboard/" + rv = self.post_assert_metric(uri, dashboard_data, "post") + self.assertEqual(rv.status_code, 201) + data = json.loads(rv.data.decode("utf-8")) + model = db.session.query(Dashboard).get(data.get("id")) + db.session.delete(model) + db.session.commit() + + def test_create_simple_dashboard(self): + """ + Dashboard API: Test create simple dashboard + """ + dashboard_data = {"dashboard_title": "title1"} + self.login(username="admin") + uri = "api/v1/dashboard/" + rv = self.client.post(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 201) + data = json.loads(rv.data.decode("utf-8")) + model = db.session.query(Dashboard).get(data.get("id")) + db.session.delete(model) + db.session.commit() + + def test_create_dashboard_empty(self): + """ + Dashboard API: Test create empty + """ + dashboard_data = {} + self.login(username="admin") + uri = "api/v1/dashboard/" + rv = self.client.post(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 201) + data = json.loads(rv.data.decode("utf-8")) + model = db.session.query(Dashboard).get(data.get("id")) + db.session.delete(model) + db.session.commit() + + dashboard_data = {"dashboard_title": ""} + self.login(username="admin") + uri = "api/v1/dashboard/" + rv = self.client.post(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 201) + data = json.loads(rv.data.decode("utf-8")) + model = db.session.query(Dashboard).get(data.get("id")) + db.session.delete(model) + db.session.commit() + + def test_create_dashboard_validate_title(self): + """ + Dashboard API: Test create dashboard validate title + """ + dashboard_data = {"dashboard_title": "a" * 600} + self.login(username="admin") + uri = "api/v1/dashboard/" + rv = self.post_assert_metric(uri, dashboard_data, "post") + self.assertEqual(rv.status_code, 400) + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "message": {"dashboard_title": ["Length must be between 0 and 500."]} + } + self.assertEqual(response, expected_response) + + def test_create_dashboard_validate_slug(self): + """ + Dashboard API: Test create validate slug + """ + admin_id = self.get_user("admin").id + dashboard = self.insert_dashboard("title1", "slug1", [admin_id]) + self.login(username="admin") + + # Check for slug uniqueness + dashboard_data = {"dashboard_title": "title2", "slug": "slug1"} + uri = "api/v1/dashboard/" + rv = self.client.post(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 422) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": {"slug": ["Must be unique"]}} + self.assertEqual(response, expected_response) + + # Check for slug max size + dashboard_data = {"dashboard_title": "title2", "slug": "a" * 256} + uri = "api/v1/dashboard/" + rv = self.client.post(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 400) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": {"slug": ["Length must be between 1 and 255."]}} + self.assertEqual(response, expected_response) + + db.session.delete(dashboard) + db.session.commit() + + def test_create_dashboard_validate_owners(self): + """ + Dashboard API: Test create validate owners + """ + dashboard_data = {"dashboard_title": "title1", "owners": [1000]} + self.login(username="admin") + uri = "api/v1/dashboard/" + rv = self.client.post(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 422) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": {"owners": ["Owners are invalid"]}} + self.assertEqual(response, expected_response) + + def test_create_dashboard_validate_roles(self): + """ + Dashboard API: Test create validate roles + """ + dashboard_data = {"dashboard_title": "title1", "roles": [1000]} + self.login(username="admin") + uri = "api/v1/dashboard/" + rv = self.client.post(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 422) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": {"roles": ["Some roles do not exist"]}} + self.assertEqual(response, expected_response) + + def test_create_dashboard_validate_json(self): + """ + Dashboard API: Test create validate json + """ + dashboard_data = {"dashboard_title": "title1", "position_json": '{"A:"a"}'} + self.login(username="admin") + uri = "api/v1/dashboard/" + rv = self.client.post(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 400) + + dashboard_data = {"dashboard_title": "title1", "json_metadata": '{"A:"a"}'} + self.login(username="admin") + uri = "api/v1/dashboard/" + rv = self.client.post(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 400) + + dashboard_data = { + "dashboard_title": "title1", + "json_metadata": '{"refresh_frequency": "A"}', + } + self.login(username="admin") + uri = "api/v1/dashboard/" + rv = self.client.post(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 400) + + def test_update_dashboard(self): + """ + Dashboard API: Test update + """ + admin = self.get_user("admin") + admin_role = self.get_role("Admin") + dashboard_id = self.insert_dashboard( + "title1", "slug1", [admin.id], roles=[admin_role.id] + ).id + self.login(username="admin") + uri = f"api/v1/dashboard/{dashboard_id}" + rv = self.put_assert_metric(uri, self.dashboard_data, "put") + self.assertEqual(rv.status_code, 200) + model = db.session.query(Dashboard).get(dashboard_id) + self.assertEqual(model.dashboard_title, self.dashboard_data["dashboard_title"]) + self.assertEqual(model.slug, self.dashboard_data["slug"]) + self.assertEqual(model.position_json, self.dashboard_data["position_json"]) + self.assertEqual(model.css, self.dashboard_data["css"]) + self.assertEqual(model.json_metadata, self.dashboard_data["json_metadata"]) + self.assertEqual(model.published, self.dashboard_data["published"]) + self.assertEqual(model.owners, [admin]) + self.assertEqual(model.roles, [admin_role]) + + db.session.delete(model) + db.session.commit() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_update_dashboard_chart_owners(self): + """ + Dashboard API: Test update chart owners + """ + user_alpha1 = self.create_user( + "alpha1", "password", "Alpha", email="alpha1@superset.org" + ) + user_alpha2 = self.create_user( + "alpha2", "password", "Alpha", email="alpha2@superset.org" + ) + admin = self.get_user("admin") + slices = [] + slices.append( + db.session.query(Slice).filter_by(slice_name="Girl Name Cloud").first() + ) + slices.append(db.session.query(Slice).filter_by(slice_name="Trends").first()) + slices.append(db.session.query(Slice).filter_by(slice_name="Boys").first()) + + dashboard = self.insert_dashboard( + "title1", + "slug1", + [admin.id], + slices=slices, + ) + self.login(username="admin") + uri = f"api/v1/dashboard/{dashboard.id}" + dashboard_data = {"owners": [user_alpha1.id, user_alpha2.id]} + rv = self.client.put(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 200) + + # verify slices owners include alpha1 and alpha2 users + slices_ids = [slice.id for slice in slices] + # Refetch Slices + slices = db.session.query(Slice).filter(Slice.id.in_(slices_ids)).all() + for slice in slices: + self.assertIn(user_alpha1, slice.owners) + self.assertIn(user_alpha2, slice.owners) + self.assertNotIn(admin, slice.owners) + # Revert owners on slice + slice.owners = [] + db.session.commit() + + # Rollback changes + db.session.delete(dashboard) + db.session.delete(user_alpha1) + db.session.delete(user_alpha2) + db.session.commit() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_update_dashboard_chart_owners_propagation(self): + """ + Dashboard API: Test update chart owners propagation + """ + user_alpha1 = self.create_user( + "alpha1", + "password", + "Alpha", + email="alpha1@superset.org", + first_name="alpha1", + ) + admin = self.get_user("admin") + slices = [] + slices.append(db.session.query(Slice).filter_by(slice_name="Trends").one()) + slices.append(db.session.query(Slice).filter_by(slice_name="Boys").one()) + + # Insert dashboard with admin as owner + dashboard = self.insert_dashboard( + "title1", + "slug1", + [admin.id], + slices=slices, + ) + + # Updates dashboard without Boys in json_metadata positions + # and user_alpha1 as owner + dashboard_data = { + "owners": [user_alpha1.id], + "json_metadata": json.dumps( + { + "positions": { + f"{slices[0].id}": { + "type": "CHART", + "meta": {"chartId": slices[0].id}, + }, + } + } + ), + } + self.login(username="admin") + uri = f"api/v1/dashboard/{dashboard.id}" + rv = self.client.put(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 200) + + # Check that chart named Boys does not contain alpha 1 in its owners + boys = db.session.query(Slice).filter_by(slice_name="Boys").one() + self.assertNotIn(user_alpha1, boys.owners) + + # Revert owners on slice + for slice in slices: + slice.owners = [] + db.session.commit() + + # Rollback changes + db.session.delete(dashboard) + db.session.delete(user_alpha1) + db.session.commit() + + def test_update_partial_dashboard(self): + """ + Dashboard API: Test update partial + """ + admin_id = self.get_user("admin").id + dashboard_id = self.insert_dashboard("title1", "slug1", [admin_id]).id + self.login(username="admin") + uri = f"api/v1/dashboard/{dashboard_id}" + rv = self.client.put( + uri, json={"json_metadata": self.dashboard_data["json_metadata"]} + ) + self.assertEqual(rv.status_code, 200) + + rv = self.client.put( + uri, json={"dashboard_title": self.dashboard_data["dashboard_title"]} + ) + self.assertEqual(rv.status_code, 200) + + rv = self.client.put(uri, json={"slug": self.dashboard_data["slug"]}) + self.assertEqual(rv.status_code, 200) + + model = db.session.query(Dashboard).get(dashboard_id) + self.assertEqual(model.json_metadata, self.dashboard_data["json_metadata"]) + self.assertEqual(model.dashboard_title, self.dashboard_data["dashboard_title"]) + self.assertEqual(model.slug, self.dashboard_data["slug"]) + + db.session.delete(model) + db.session.commit() + + def test_update_dashboard_new_owner_not_admin(self): + """ + Dashboard API: Test update set new owner implicitly adds logged in owner + """ + gamma = self.get_user("gamma") + alpha = self.get_user("alpha") + dashboard_id = self.insert_dashboard("title1", "slug1", [alpha.id]).id + dashboard_data = {"dashboard_title": "title1_changed", "owners": [gamma.id]} + self.login(username="alpha") + uri = f"api/v1/dashboard/{dashboard_id}" + rv = self.client.put(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 200) + model = db.session.query(Dashboard).get(dashboard_id) + self.assertIn(gamma, model.owners) + self.assertIn(alpha, model.owners) + for slc in model.slices: + self.assertIn(gamma, slc.owners) + self.assertIn(alpha, slc.owners) + db.session.delete(model) + db.session.commit() + + def test_update_dashboard_new_owner_admin(self): + """ + Dashboard API: Test update set new owner as admin to other than current user + """ + gamma = self.get_user("gamma") + admin = self.get_user("admin") + dashboard_id = self.insert_dashboard("title1", "slug1", [admin.id]).id + dashboard_data = {"dashboard_title": "title1_changed", "owners": [gamma.id]} + self.login(username="admin") + uri = f"api/v1/dashboard/{dashboard_id}" + rv = self.client.put(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 200) + model = db.session.query(Dashboard).get(dashboard_id) + self.assertIn(gamma, model.owners) + self.assertNotIn(admin, model.owners) + for slc in model.slices: + self.assertIn(gamma, slc.owners) + self.assertNotIn(admin, slc.owners) + db.session.delete(model) + db.session.commit() + + def test_update_dashboard_slug_formatting(self): + """ + Dashboard API: Test update slug formatting + """ + admin_id = self.get_user("admin").id + dashboard_id = self.insert_dashboard("title1", "slug1", [admin_id]).id + dashboard_data = {"dashboard_title": "title1_changed", "slug": "slug1 changed"} + self.login(username="admin") + uri = f"api/v1/dashboard/{dashboard_id}" + rv = self.client.put(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 200) + model = db.session.query(Dashboard).get(dashboard_id) + self.assertEqual(model.dashboard_title, "title1_changed") + self.assertEqual(model.slug, "slug1-changed") + db.session.delete(model) + db.session.commit() + + def test_update_dashboard_validate_slug(self): + """ + Dashboard API: Test update validate slug + """ + admin_id = self.get_user("admin").id + dashboard1 = self.insert_dashboard("title1", "slug-1", [admin_id]) + dashboard2 = self.insert_dashboard("title2", "slug-2", [admin_id]) + + self.login(username="admin") + # Check for slug uniqueness + dashboard_data = {"dashboard_title": "title2", "slug": "slug 1"} + uri = f"api/v1/dashboard/{dashboard2.id}" + rv = self.client.put(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 422) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": {"slug": ["Must be unique"]}} + self.assertEqual(response, expected_response) + + db.session.delete(dashboard1) + db.session.delete(dashboard2) + db.session.commit() + + dashboard1 = self.insert_dashboard("title1", None, [admin_id]) + dashboard2 = self.insert_dashboard("title2", None, [admin_id]) + self.login(username="admin") + # Accept empty slugs and don't validate them has unique + dashboard_data = {"dashboard_title": "title2_changed", "slug": ""} + uri = f"api/v1/dashboard/{dashboard2.id}" + rv = self.client.put(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 200) + + db.session.delete(dashboard1) + db.session.delete(dashboard2) + db.session.commit() + + def test_update_published(self): + """ + Dashboard API: Test update published patch + """ + admin = self.get_user("admin") + gamma = self.get_user("gamma") + + dashboard = self.insert_dashboard("title1", "slug1", [admin.id, gamma.id]) + dashboard_data = {"published": True} + self.login(username="admin") + uri = f"api/v1/dashboard/{dashboard.id}" + rv = self.client.put(uri, json=dashboard_data) + self.assertEqual(rv.status_code, 200) + + model = db.session.query(Dashboard).get(dashboard.id) + self.assertEqual(model.published, True) + self.assertEqual(model.slug, "slug1") + self.assertIn(admin, model.owners) + self.assertIn(gamma, model.owners) + db.session.delete(model) + db.session.commit() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_update_dashboard_not_owned(self): + """ + Dashboard API: Test update dashboard not owned + """ + user_alpha1 = self.create_user( + "alpha1", "password", "Alpha", email="alpha1@superset.org" + ) + user_alpha2 = self.create_user( + "alpha2", "password", "Alpha", email="alpha2@superset.org" + ) + existing_slice = ( + db.session.query(Slice).filter_by(slice_name="Girl Name Cloud").first() + ) + dashboard = self.insert_dashboard( + "title", "slug1", [user_alpha1.id], slices=[existing_slice], published=True + ) + self.login(username="alpha2", password="password") + dashboard_data = {"dashboard_title": "title1_changed", "slug": "slug1 changed"} + uri = f"api/v1/dashboard/{dashboard.id}" + rv = self.put_assert_metric(uri, dashboard_data, "put") + self.assertEqual(rv.status_code, 403) + db.session.delete(dashboard) + db.session.delete(user_alpha1) + db.session.delete(user_alpha2) + db.session.commit() + + @patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"VERSIONED_EXPORT": False}, + clear=True, + ) + @pytest.mark.usefixtures( + "load_world_bank_dashboard_with_slices", + "load_birth_names_dashboard_with_slices", + ) + @freeze_time("2022-01-01") + def test_export(self): + """ + Dashboard API: Test dashboard export + """ + self.login(username="admin") + dashboards_ids = get_dashboards_ids(db, ["world_health", "births"]) + uri = f"api/v1/dashboard/export/?q={prison.dumps(dashboards_ids)}" + + rv = self.get_assert_metric(uri, "export") + headers = generate_download_headers("json")["Content-Disposition"] + + assert rv.status_code == 200 + assert rv.headers["Content-Disposition"] == headers + + def test_export_not_found(self): + """ + Dashboard API: Test dashboard export not found + """ + self.login(username="admin") + argument = [1000] + uri = f"api/v1/dashboard/export/?q={prison.dumps(argument)}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + def test_export_not_allowed(self): + """ + Dashboard API: Test dashboard export not allowed + """ + admin_id = self.get_user("admin").id + dashboard = self.insert_dashboard("title", "slug1", [admin_id], published=False) + + self.login(username="gamma") + argument = [dashboard.id] + uri = f"api/v1/dashboard/export/?q={prison.dumps(argument)}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + db.session.delete(dashboard) + db.session.commit() + + def test_export_bundle(self): + """ + Dashboard API: Test dashboard export + """ + dashboards_ids = get_dashboards_ids(db, ["world_health", "births"]) + uri = f"api/v1/dashboard/export/?q={prison.dumps(dashboards_ids)}" + + self.login(username="admin") + rv = self.client.get(uri) + + assert rv.status_code == 200 + + buf = BytesIO(rv.data) + assert is_zipfile(buf) + + def test_export_bundle_not_found(self): + """ + Dashboard API: Test dashboard export not found + """ + self.login(username="admin") + argument = [1000] + uri = f"api/v1/dashboard/export/?q={prison.dumps(argument)}" + rv = self.client.get(uri) + assert rv.status_code == 404 + + def test_export_bundle_not_allowed(self): + """ + Dashboard API: Test dashboard export not allowed + """ + admin_id = self.get_user("admin").id + dashboard = self.insert_dashboard("title", "slug1", [admin_id], published=False) + + self.login(username="gamma") + argument = [dashboard.id] + uri = f"api/v1/dashboard/export/?q={prison.dumps(argument)}" + rv = self.client.get(uri) + assert rv.status_code == 404 + + db.session.delete(dashboard) + db.session.commit() + + def test_import_dashboard(self): + """ + Dashboard API: Test import dashboard + """ + self.login(username="admin") + uri = "api/v1/dashboard/import/" + + buf = self.create_dashboard_import() + form_data = { + "formData": (buf, "dashboard_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + dashboard = ( + db.session.query(Dashboard).filter_by(uuid=dashboard_config["uuid"]).one() + ) + assert dashboard.dashboard_title == "Test dash" + + assert len(dashboard.slices) == 1 + chart = dashboard.slices[0] + assert str(chart.uuid) == chart_config["uuid"] + + dataset = chart.table + assert str(dataset.uuid) == dataset_config["uuid"] + + database = dataset.database + assert str(database.uuid) == database_config["uuid"] + + db.session.delete(dashboard) + db.session.delete(chart) + db.session.delete(dataset) + db.session.delete(database) + db.session.commit() + + def test_import_dashboard_invalid_file(self): + """ + Dashboard API: Test import invalid dashboard file + """ + self.login(username="admin") + uri = "api/v1/dashboard/import/" + + buf = self.create_invalid_dashboard_import() + form_data = { + "formData": (buf, "dashboard_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 400 + assert response == { + "errors": [ + { + "message": "No valid import files were found", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "issue_codes": [ + { + "code": 1010, + "message": ( + "Issue 1010 - Superset encountered an " + "error while running a command." + ), + } + ] + }, + } + ] + } + + def test_import_dashboard_v0_export(self): + num_dashboards = db.session.query(Dashboard).count() + + self.login(username="admin") + uri = "api/v1/dashboard/import/" + + buf = BytesIO() + buf.write(json.dumps(dashboard_export).encode()) + buf.seek(0) + form_data = { + "formData": (buf, "20201119_181105.json"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + assert db.session.query(Dashboard).count() == num_dashboards + 1 + + dashboard = ( + db.session.query(Dashboard).filter_by(dashboard_title="Births 2").one() + ) + chart = dashboard.slices[0] + dataset = chart.table + + db.session.delete(dashboard) + db.session.delete(chart) + db.session.delete(dataset) + db.session.commit() + + def test_import_dashboard_overwrite(self): + """ + Dashboard API: Test import existing dashboard + """ + self.login(username="admin") + uri = "api/v1/dashboard/import/" + + buf = self.create_dashboard_import() + form_data = { + "formData": (buf, "dashboard_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + # import again without overwrite flag + buf = self.create_dashboard_import() + form_data = { + "formData": (buf, "dashboard_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "Error importing dashboard", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "dashboards/imported_dashboard.yaml": "Dashboard already exists and `overwrite=true` was not passed", + "issue_codes": [ + { + "code": 1010, + "message": ( + "Issue 1010 - Superset encountered an " + "error while running a command." + ), + } + ], + }, + } + ] + } + + # import with overwrite flag + buf = self.create_dashboard_import() + form_data = { + "formData": (buf, "dashboard_export.zip"), + "overwrite": "true", + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + # cleanup + dashboard = ( + db.session.query(Dashboard).filter_by(uuid=dashboard_config["uuid"]).one() + ) + chart = dashboard.slices[0] + dataset = chart.table + database = dataset.database + + db.session.delete(dashboard) + db.session.delete(chart) + db.session.delete(dataset) + db.session.delete(database) + db.session.commit() + + def test_import_dashboard_invalid(self): + """ + Dashboard API: Test import invalid dashboard + """ + self.login(username="admin") + uri = "api/v1/dashboard/import/" + + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("dashboard_export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(dataset_metadata_config).encode()) + with bundle.open( + "dashboard_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(database_config).encode()) + with bundle.open( + "dashboard_export/datasets/imported_dataset.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(dataset_config).encode()) + with bundle.open("dashboard_export/charts/imported_chart.yaml", "w") as fp: + fp.write(yaml.safe_dump(chart_config).encode()) + with bundle.open( + "dashboard_export/dashboards/imported_dashboard.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(dashboard_config).encode()) + buf.seek(0) + + form_data = { + "formData": (buf, "dashboard_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "Error importing dashboard", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "metadata.yaml": {"type": ["Must be equal to Dashboard."]}, + "issue_codes": [ + { + "code": 1010, + "message": ( + "Issue 1010 - Superset encountered " + "an error while running a command." + ), + } + ], + }, + } + ] + } + + def test_get_all_related_roles(self): + """ + API: Test get filter related roles + """ + self.login(username="admin") + uri = f"api/v1/dashboard/related/roles" + + rv = self.client.get(uri) + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + roles = db.session.query(security_manager.role_model).all() + expected_roles = [str(role) for role in roles] + assert response["count"] == len(roles) + + response_roles = [result["text"] for result in response["result"]] + for expected_role in expected_roles: + assert expected_role in response_roles + + def test_get_filter_related_roles(self): + """ + API: Test get filter related roles + """ + self.login(username="admin") + argument = {"filter": "alpha"} + uri = f"api/v1/dashboard/related/roles?q={prison.dumps(argument)}" + + rv = self.client.get(uri) + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + assert response["count"] == 1 + + response_roles = [result["text"] for result in response["result"]] + assert "Alpha" in response_roles + + def test_get_all_related_roles_with_with_extra_filters(self): + """ + API: Test get filter related roles with extra related query filters + """ + self.login(username="admin") + + def _base_filter(query): + return query.filter_by(name="Alpha") + + with patch.dict( + "superset.views.filters.current_app.config", + {"EXTRA_RELATED_QUERY_FILTERS": {"role": _base_filter}}, + ): + uri = f"api/v1/dashboard/related/roles" + rv = self.client.get(uri) + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + response_roles = [result["text"] for result in response["result"]] + assert response_roles == ["Alpha"] + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_embedded_dashboards(self): + self.login(username="admin") + uri = "api/v1/dashboard/world_health/embedded" + + # initial get should return 404 + resp = self.get_assert_metric(uri, "get_embedded") + self.assertEqual(resp.status_code, 404) + + # post succeeds and returns value + allowed_domains = ["test.example", "embedded.example"] + resp = self.post_assert_metric( + uri, + {"allowed_domains": allowed_domains}, + "set_embedded", + ) + self.assertEqual(resp.status_code, 200) + result = json.loads(resp.data.decode("utf-8"))["result"] + self.assertIsNotNone(result["uuid"]) + self.assertNotEqual(result["uuid"], "") + self.assertEqual(result["allowed_domains"], allowed_domains) + + db.session.expire_all() + + # get returns value + resp = self.get_assert_metric(uri, "get_embedded") + self.assertEqual(resp.status_code, 200) + result = json.loads(resp.data.decode("utf-8"))["result"] + self.assertIsNotNone(result["uuid"]) + self.assertNotEqual(result["uuid"], "") + self.assertEqual(result["allowed_domains"], allowed_domains) + + # save uuid for later + original_uuid = result["uuid"] + + # put succeeds and returns value + resp = self.post_assert_metric(uri, {"allowed_domains": []}, "set_embedded") + self.assertEqual(resp.status_code, 200) + result = json.loads(resp.data.decode("utf-8"))["result"] + self.assertEqual(resp.status_code, 200) + self.assertIsNotNone(result["uuid"]) + self.assertNotEqual(result["uuid"], "") + self.assertEqual(result["allowed_domains"], []) + + db.session.expire_all() + + # get returns changed value + resp = self.get_assert_metric(uri, "get_embedded") + self.assertEqual(resp.status_code, 200) + result = json.loads(resp.data.decode("utf-8"))["result"] + self.assertEqual(result["uuid"], original_uuid) + self.assertEqual(result["allowed_domains"], []) + + # delete succeeds + resp = self.delete_assert_metric(uri, "delete_embedded") + self.assertEqual(resp.status_code, 200) + + db.session.expire_all() + + # get returns 404 + resp = self.get_assert_metric(uri, "get_embedded") + self.assertEqual(resp.status_code, 404) + + @pytest.mark.usefixtures("create_created_by_gamma_dashboards") + def test_gets_created_by_user_dashboards_filter(self): + expected_models = ( + db.session.query(Dashboard) + .filter(Dashboard.created_by_fk.isnot(None)) + .all() + ) + + arguments = { + "filters": [ + {"col": "created_by", "opr": "dashboard_has_created_by", "value": True} + ], + "keys": ["none"], + "columns": ["dashboard_title"], + } + self.login(username="admin") + + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], len(expected_models)) + + def test_gets_not_created_by_user_dashboards_filter(self): + dashboard = self.insert_dashboard(f"title", f"slug", []) + expected_models = ( + db.session.query(Dashboard).filter(Dashboard.created_by_fk.is_(None)).all() + ) + + arguments = { + "filters": [ + {"col": "created_by", "opr": "dashboard_has_created_by", "value": False} + ], + "keys": ["none"], + "columns": ["dashboard_title"], + } + self.login(username="admin") + + uri = f"api/v1/dashboard/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data["count"], len(expected_models)) + db.session.delete(dashboard) + db.session.commit() diff --git a/tests/integration_tests/dashboards/base_case.py b/tests/integration_tests/dashboards/base_case.py new file mode 100644 index 0000000000000..a0a1ff630f08d --- /dev/null +++ b/tests/integration_tests/dashboards/base_case.py @@ -0,0 +1,118 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from typing import Any, Dict, Union + +import prison +from flask import Response + +from superset import app, security_manager +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.dashboards.consts import * +from tests.integration_tests.dashboards.dashboard_test_utils import ( + build_save_dash_parts, +) +from tests.integration_tests.dashboards.superset_factory_util import ( + delete_all_inserted_objects, +) + + +class DashboardTestCase(SupersetTestCase): + def get_dashboard_via_api_by_id(self, dashboard_id: int) -> Response: + uri = DASHBOARD_API_URL_FORMAT.format(dashboard_id) + return self.get_assert_metric(uri, "get") + + def get_dashboard_view_response(self, dashboard_to_access) -> Response: + return self.client.get(dashboard_to_access.url) + + def get_dashboard_api_response(self, dashboard_to_access) -> Response: + return self.client.get(DASHBOARD_API_URL_FORMAT.format(dashboard_to_access.id)) + + def get_dashboards_list_response(self) -> Response: + return self.client.get(GET_DASHBOARDS_LIST_VIEW) + + def get_dashboards_api_response(self) -> Response: + return self.client.get(DASHBOARDS_API_URL) + + def save_dashboard_via_view( + self, dashboard_id: Union[str, int], dashboard_data: Dict[str, Any] + ) -> Response: + save_dash_url = SAVE_DASHBOARD_URL_FORMAT.format(dashboard_id) + return self.get_resp(save_dash_url, data=dict(data=json.dumps(dashboard_data))) + + def save_dashboard( + self, dashboard_id: Union[str, int], dashboard_data: Dict[str, Any] + ) -> Response: + return self.save_dashboard_via_view(dashboard_id, dashboard_data) + + def delete_dashboard_via_view(self, dashboard_id: int) -> Response: + delete_dashboard_url = DELETE_DASHBOARD_VIEW_URL_FORMAT.format(dashboard_id) + return self.get_resp(delete_dashboard_url, {}) + + def delete_dashboard_via_api(self, dashboard_id): + uri = DASHBOARD_API_URL_FORMAT.format(dashboard_id) + return self.delete_assert_metric(uri, "delete") + + def bulk_delete_dashboard_via_api(self, dashboard_ids): + uri = DASHBOARDS_API_URL_WITH_QUERY_FORMAT.format(prison.dumps(dashboard_ids)) + return self.delete_assert_metric(uri, "bulk_delete") + + def delete_dashboard(self, dashboard_id: int) -> Response: + return self.delete_dashboard_via_view(dashboard_id) + + def assert_permission_was_created(self, dashboard): + view_menu = security_manager.find_view_menu(dashboard.view_name) + self.assertIsNotNone(view_menu) + self.assertEqual(len(security_manager.find_permissions_view_menu(view_menu)), 1) + + def assert_permission_kept_and_changed(self, updated_dashboard, excepted_view_id): + view_menu_after_title_changed = security_manager.find_view_menu( + updated_dashboard.view_name + ) + self.assertIsNotNone(view_menu_after_title_changed) + self.assertEqual(view_menu_after_title_changed.id, excepted_view_id) + + def assert_permissions_were_deleted(self, deleted_dashboard): + view_menu = security_manager.find_view_menu(deleted_dashboard.view_name) + self.assertIsNone(view_menu) + + def save_dash_basic_case(self, username=ADMIN_USERNAME): + # arrange + self.login(username=username) + ( + dashboard_to_save, + data_before_change, + data_after_change, + ) = build_save_dash_parts() + + # act + save_dash_response = self.save_dashboard_via_view( + dashboard_to_save.id, data_after_change + ) + + # assert + self.assertIn("SUCCESS", save_dash_response) + + # post test + self.save_dashboard(dashboard_to_save.id, data_before_change) + + def clean_created_objects(self): + with app.test_request_context(): + self.logout() + self.login("admin") + delete_all_inserted_objects() + self.logout() diff --git a/tests/integration_tests/dashboards/commands_tests.py b/tests/integration_tests/dashboards/commands_tests.py new file mode 100644 index 0000000000000..d382a5f50d1b2 --- /dev/null +++ b/tests/integration_tests/dashboards/commands_tests.py @@ -0,0 +1,663 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import itertools +import json +from unittest.mock import MagicMock, patch + +import pytest +import yaml +from werkzeug.utils import secure_filename + +from superset import db, security_manager +from superset.commands.exceptions import CommandInvalidError +from superset.commands.importers.exceptions import IncorrectVersionError +from superset.connectors.sqla.models import SqlaTable +from superset.dashboards.commands.exceptions import DashboardNotFoundError +from superset.dashboards.commands.export import ( + append_charts, + ExportDashboardsCommand, + get_default_position, +) +from superset.dashboards.commands.importers import v0, v1 +from superset.models.core import Database +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.importexport import ( + chart_config, + dashboard_config, + dashboard_export, + dashboard_metadata_config, + database_config, + dataset_config, + dataset_metadata_config, +) +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) + + +class TestExportDashboardsCommand(SupersetTestCase): + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @patch("superset.security.manager.g") + @patch("superset.views.base.g") + def test_export_dashboard_command(self, mock_g1, mock_g2): + mock_g1.user = security_manager.find_user("admin") + mock_g2.user = security_manager.find_user("admin") + + example_dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").one() + ) + command = ExportDashboardsCommand([example_dashboard.id]) + contents = dict(command.run()) + + expected_paths = { + "metadata.yaml", + f"dashboards/World_Banks_Data_{example_dashboard.id}.yaml", + "datasets/examples/wb_health_population.yaml", + "databases/examples.yaml", + } + for chart in example_dashboard.slices: + chart_slug = secure_filename(chart.slice_name) + expected_paths.add(f"charts/{chart_slug}_{chart.id}.yaml") + assert expected_paths == set(contents.keys()) + + metadata = yaml.safe_load( + contents[f"dashboards/World_Banks_Data_{example_dashboard.id}.yaml"] + ) + + # remove chart UUIDs from metadata so we can compare + for chart_info in metadata["position"].values(): + if isinstance(chart_info, dict) and "uuid" in chart_info.get("meta", {}): + del chart_info["meta"]["chartId"] + del chart_info["meta"]["uuid"] + + assert metadata == { + "dashboard_title": "World Bank's Data", + "description": None, + "css": None, + "slug": "world_health", + "uuid": str(example_dashboard.uuid), + "position": { + "CHART-36bfc934": { + "children": [], + "id": "CHART-36bfc934", + "meta": {"height": 25, "sliceName": "Region Filter", "width": 2}, + "type": "CHART", + }, + "CHART-37982887": { + "children": [], + "id": "CHART-37982887", + "meta": { + "height": 25, + "sliceName": "World's Population", + "width": 2, + }, + "type": "CHART", + }, + "CHART-17e0f8d8": { + "children": [], + "id": "CHART-17e0f8d8", + "meta": { + "height": 92, + "sliceName": "Most Populated Countries", + "width": 3, + }, + "type": "CHART", + }, + "CHART-2ee52f30": { + "children": [], + "id": "CHART-2ee52f30", + "meta": {"height": 38, "sliceName": "Growth Rate", "width": 6}, + "type": "CHART", + }, + "CHART-2d5b6871": { + "children": [], + "id": "CHART-2d5b6871", + "meta": {"height": 52, "sliceName": "% Rural", "width": 7}, + "type": "CHART", + }, + "CHART-0fd0d252": { + "children": [], + "id": "CHART-0fd0d252", + "meta": { + "height": 50, + "sliceName": "Life Expectancy VS Rural %", + "width": 8, + }, + "type": "CHART", + }, + "CHART-97f4cb48": { + "children": [], + "id": "CHART-97f4cb48", + "meta": {"height": 38, "sliceName": "Rural Breakdown", "width": 3}, + "type": "CHART", + }, + "CHART-b5e05d6f": { + "children": [], + "id": "CHART-b5e05d6f", + "meta": { + "height": 50, + "sliceName": "World's Pop Growth", + "width": 4, + }, + "type": "CHART", + }, + "CHART-e76e9f5f": { + "children": [], + "id": "CHART-e76e9f5f", + "meta": {"height": 50, "sliceName": "Box plot", "width": 4}, + "type": "CHART", + }, + "CHART-a4808bba": { + "children": [], + "id": "CHART-a4808bba", + "meta": {"height": 50, "sliceName": "Treemap", "width": 8}, + "type": "CHART", + }, + "COLUMN-071bbbad": { + "children": ["ROW-1e064e3c", "ROW-afdefba9"], + "id": "COLUMN-071bbbad", + "meta": {"background": "BACKGROUND_TRANSPARENT", "width": 9}, + "type": "COLUMN", + }, + "COLUMN-fe3914b8": { + "children": ["CHART-36bfc934", "CHART-37982887"], + "id": "COLUMN-fe3914b8", + "meta": {"background": "BACKGROUND_TRANSPARENT", "width": 2}, + "type": "COLUMN", + }, + "GRID_ID": { + "children": ["ROW-46632bc2", "ROW-3fa26c5d", "ROW-812b3f13"], + "id": "GRID_ID", + "type": "GRID", + }, + "HEADER_ID": { + "id": "HEADER_ID", + "meta": {"text": "World's Bank Data"}, + "type": "HEADER", + }, + "ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"}, + "ROW-1e064e3c": { + "children": ["COLUMN-fe3914b8", "CHART-2d5b6871"], + "id": "ROW-1e064e3c", + "meta": {"background": "BACKGROUND_TRANSPARENT"}, + "type": "ROW", + }, + "ROW-3fa26c5d": { + "children": ["CHART-b5e05d6f", "CHART-0fd0d252"], + "id": "ROW-3fa26c5d", + "meta": {"background": "BACKGROUND_TRANSPARENT"}, + "type": "ROW", + }, + "ROW-46632bc2": { + "children": ["COLUMN-071bbbad", "CHART-17e0f8d8"], + "id": "ROW-46632bc2", + "meta": {"background": "BACKGROUND_TRANSPARENT"}, + "type": "ROW", + }, + "ROW-812b3f13": { + "children": ["CHART-a4808bba", "CHART-e76e9f5f"], + "id": "ROW-812b3f13", + "meta": {"background": "BACKGROUND_TRANSPARENT"}, + "type": "ROW", + }, + "ROW-afdefba9": { + "children": ["CHART-2ee52f30", "CHART-97f4cb48"], + "id": "ROW-afdefba9", + "meta": {"background": "BACKGROUND_TRANSPARENT"}, + "type": "ROW", + }, + "DASHBOARD_VERSION_KEY": "v2", + }, + "metadata": {"mock_key": "mock_value"}, + "version": "1.0.0", + } + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @patch("superset.security.manager.g") + @patch("superset.views.base.g") + def test_export_dashboard_command_no_access(self, mock_g1, mock_g2): + """Test that users can't export datasets they don't have access to""" + mock_g1.user = security_manager.find_user("gamma") + mock_g2.user = security_manager.find_user("gamma") + + example_dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").one() + ) + command = ExportDashboardsCommand([example_dashboard.id]) + contents = command.run() + with self.assertRaises(DashboardNotFoundError): + next(contents) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @patch("superset.security.manager.g") + @patch("superset.views.base.g") + def test_export_dashboard_command_invalid_dataset(self, mock_g1, mock_g2): + """Test that an error is raised when exporting an invalid dataset""" + mock_g1.user = security_manager.find_user("admin") + mock_g2.user = security_manager.find_user("admin") + command = ExportDashboardsCommand([-1]) + contents = command.run() + with self.assertRaises(DashboardNotFoundError): + next(contents) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @patch("superset.security.manager.g") + @patch("superset.views.base.g") + def test_export_dashboard_command_key_order(self, mock_g1, mock_g2): + """Test that they keys in the YAML have the same order as export_fields""" + mock_g1.user = security_manager.find_user("admin") + mock_g2.user = security_manager.find_user("admin") + + example_dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").one() + ) + command = ExportDashboardsCommand([example_dashboard.id]) + contents = dict(command.run()) + + metadata = yaml.safe_load( + contents[f"dashboards/World_Banks_Data_{example_dashboard.id}.yaml"] + ) + assert list(metadata.keys()) == [ + "dashboard_title", + "description", + "css", + "slug", + "uuid", + "position", + "metadata", + "version", + ] + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @patch("superset.dashboards.commands.export.suffix") + def test_append_charts(self, mock_suffix): + """Test that orphaned charts are added to the dashboard position""" + # return deterministic IDs + mock_suffix.side_effect = (str(i) for i in itertools.count(1)) + + position = get_default_position("example") + chart_1 = db.session.query(Slice).filter_by(slice_name="Region Filter").one() + new_position = append_charts(position, {chart_1}) + assert new_position == { + "DASHBOARD_VERSION_KEY": "v2", + "ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"}, + "GRID_ID": { + "children": ["ROW-N-2"], + "id": "GRID_ID", + "parents": ["ROOT_ID"], + "type": "GRID", + }, + "HEADER_ID": { + "id": "HEADER_ID", + "meta": {"text": "example"}, + "type": "HEADER", + }, + "ROW-N-2": { + "children": ["CHART-1"], + "id": "ROW-N-2", + "meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"}, + "type": "ROW", + "parents": ["ROOT_ID", "GRID_ID"], + }, + "CHART-1": { + "children": [], + "id": "CHART-1", + "meta": { + "chartId": chart_1.id, + "height": 50, + "sliceName": "Region Filter", + "uuid": str(chart_1.uuid), + "width": 4, + }, + "type": "CHART", + "parents": ["ROOT_ID", "GRID_ID", "ROW-N-2"], + }, + } + + chart_2 = ( + db.session.query(Slice).filter_by(slice_name="World's Population").one() + ) + new_position = append_charts(new_position, {chart_2}) + assert new_position == { + "DASHBOARD_VERSION_KEY": "v2", + "ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"}, + "GRID_ID": { + "children": ["ROW-N-2", "ROW-N-4"], + "id": "GRID_ID", + "parents": ["ROOT_ID"], + "type": "GRID", + }, + "HEADER_ID": { + "id": "HEADER_ID", + "meta": {"text": "example"}, + "type": "HEADER", + }, + "ROW-N-2": { + "children": ["CHART-1"], + "id": "ROW-N-2", + "meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"}, + "type": "ROW", + "parents": ["ROOT_ID", "GRID_ID"], + }, + "ROW-N-4": { + "children": ["CHART-3"], + "id": "ROW-N-4", + "meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"}, + "type": "ROW", + "parents": ["ROOT_ID", "GRID_ID"], + }, + "CHART-1": { + "children": [], + "id": "CHART-1", + "meta": { + "chartId": chart_1.id, + "height": 50, + "sliceName": "Region Filter", + "uuid": str(chart_1.uuid), + "width": 4, + }, + "type": "CHART", + "parents": ["ROOT_ID", "GRID_ID", "ROW-N-2"], + }, + "CHART-3": { + "children": [], + "id": "CHART-3", + "meta": { + "chartId": chart_2.id, + "height": 50, + "sliceName": "World's Population", + "uuid": str(chart_2.uuid), + "width": 4, + }, + "type": "CHART", + "parents": ["ROOT_ID", "GRID_ID", "ROW-N-4"], + }, + } + + position = {"DASHBOARD_VERSION_KEY": "v2"} + new_position = append_charts(position, [chart_1, chart_2]) + assert new_position == { + "CHART-5": { + "children": [], + "id": "CHART-5", + "meta": { + "chartId": chart_1.id, + "height": 50, + "sliceName": "Region Filter", + "uuid": str(chart_1.uuid), + "width": 4, + }, + "type": "CHART", + }, + "CHART-6": { + "children": [], + "id": "CHART-6", + "meta": { + "chartId": chart_2.id, + "height": 50, + "sliceName": "World's Population", + "uuid": str(chart_2.uuid), + "width": 4, + }, + "type": "CHART", + }, + "DASHBOARD_VERSION_KEY": "v2", + } + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @patch("superset.security.manager.g") + @patch("superset.views.base.g") + def test_export_dashboard_command_no_related(self, mock_g1, mock_g2): + """ + Test that only the dashboard is exported when export_related=False. + """ + mock_g1.user = security_manager.find_user("admin") + mock_g2.user = security_manager.find_user("admin") + + example_dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").one() + ) + command = ExportDashboardsCommand([example_dashboard.id], export_related=False) + contents = dict(command.run()) + + expected_paths = { + "metadata.yaml", + f"dashboards/World_Banks_Data_{example_dashboard.id}.yaml", + } + assert expected_paths == set(contents.keys()) + + +class TestImportDashboardsCommand(SupersetTestCase): + def test_import_v0_dashboard_cli_export(self): + num_dashboards = db.session.query(Dashboard).count() + num_charts = db.session.query(Slice).count() + num_datasets = db.session.query(SqlaTable).count() + num_databases = db.session.query(Database).count() + + contents = { + "20201119_181105.json": json.dumps(dashboard_export), + } + command = v0.ImportDashboardsCommand(contents) + command.run() + + new_num_dashboards = db.session.query(Dashboard).count() + new_num_charts = db.session.query(Slice).count() + new_num_datasets = db.session.query(SqlaTable).count() + new_num_databases = db.session.query(Database).count() + assert new_num_dashboards == num_dashboards + 1 + assert new_num_charts == num_charts + 1 + assert new_num_datasets == num_datasets + 1 + assert new_num_databases == num_databases + + dashboard = ( + db.session.query(Dashboard).filter_by(dashboard_title="Births 2").one() + ) + assert len(dashboard.slices) == 1 + chart = dashboard.slices[0] + assert chart.slice_name == "Number of California Births" + + dataset = chart.table + assert dataset.table_name == "birth_names_2" + + database = dataset.database + assert database.database_name == "examples" + + db.session.delete(dashboard) + db.session.delete(chart) + db.session.delete(dataset) + db.session.commit() + + @patch("superset.dashboards.commands.importers.v1.utils.g") + def test_import_v1_dashboard(self, mock_g): + """Test that we can import a dashboard""" + mock_g.user = security_manager.find_user("admin") + contents = { + "metadata.yaml": yaml.safe_dump(dashboard_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "charts/imported_chart.yaml": yaml.safe_dump(chart_config), + "dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config), + } + command = v1.ImportDashboardsCommand(contents) + command.run() + + dashboard = ( + db.session.query(Dashboard).filter_by(uuid=dashboard_config["uuid"]).one() + ) + + assert len(dashboard.slices) == 1 + chart = dashboard.slices[0] + assert str(chart.uuid) == chart_config["uuid"] + new_chart_id = chart.id + + assert dashboard.dashboard_title == "Test dash" + assert dashboard.description is None + assert dashboard.css == "" + assert dashboard.slug is None + assert json.loads(dashboard.position_json) == { + "CHART-SVAlICPOSJ": { + "children": [], + "id": "CHART-SVAlICPOSJ", + "meta": { + "chartId": new_chart_id, + "height": 50, + "sliceName": "Number of California Births", + "uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1", + "width": 4, + }, + "parents": ["ROOT_ID", "GRID_ID", "ROW-dP_CHaK2q"], + "type": "CHART", + }, + "DASHBOARD_VERSION_KEY": "v2", + "GRID_ID": { + "children": ["ROW-dP_CHaK2q"], + "id": "GRID_ID", + "parents": ["ROOT_ID"], + "type": "GRID", + }, + "HEADER_ID": { + "id": "HEADER_ID", + "meta": {"text": "Test dash"}, + "type": "HEADER", + }, + "ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"}, + "ROW-dP_CHaK2q": { + "children": ["CHART-SVAlICPOSJ"], + "id": "ROW-dP_CHaK2q", + "meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"}, + "parents": ["ROOT_ID", "GRID_ID"], + "type": "ROW", + }, + } + assert json.loads(dashboard.json_metadata) == { + "color_scheme": None, + "default_filters": "{}", + "expanded_slices": {str(new_chart_id): True}, + "filter_scopes": { + str(new_chart_id): { + "region": {"scope": ["ROOT_ID"], "immune": [new_chart_id]} + }, + }, + "import_time": 1604342885, + "refresh_frequency": 0, + "remote_id": 7, + "timed_refresh_immune_slices": [new_chart_id], + } + + dataset = chart.table + assert str(dataset.uuid) == dataset_config["uuid"] + + database = dataset.database + assert str(database.uuid) == database_config["uuid"] + + assert dashboard.owners == [mock_g.user] + + dashboard.owners = [] + chart.owners = [] + dataset.owners = [] + database.owners = [] + db.session.delete(dashboard) + db.session.delete(chart) + db.session.delete(dataset) + db.session.delete(database) + db.session.commit() + + def test_import_v1_dashboard_multiple(self): + """Test that a dashboard can be imported multiple times""" + num_dashboards = db.session.query(Dashboard).count() + + contents = { + "metadata.yaml": yaml.safe_dump(dashboard_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "charts/imported_chart.yaml": yaml.safe_dump(chart_config), + "dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config), + } + command = v1.ImportDashboardsCommand(contents, overwrite=True) + command.run() + command.run() + + new_num_dashboards = db.session.query(Dashboard).count() + assert new_num_dashboards == num_dashboards + 1 + + dashboard = ( + db.session.query(Dashboard).filter_by(uuid=dashboard_config["uuid"]).one() + ) + chart = dashboard.slices[0] + dataset = chart.table + database = dataset.database + + db.session.delete(dashboard) + db.session.delete(chart) + db.session.delete(dataset) + db.session.delete(database) + db.session.commit() + + def test_import_v1_dashboard_validation(self): + """Test different validations applied when importing a dashboard""" + # metadata.yaml must be present + contents = { + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "charts/imported_chart.yaml": yaml.safe_dump(chart_config), + "dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config), + } + command = v1.ImportDashboardsCommand(contents) + with pytest.raises(IncorrectVersionError) as excinfo: + command.run() + assert str(excinfo.value) == "Missing metadata.yaml" + + # version should be 1.0.0 + contents["metadata.yaml"] = yaml.safe_dump( + { + "version": "2.0.0", + "type": "Database", + "timestamp": "2020-11-04T21:27:44.423819+00:00", + } + ) + command = v1.ImportDashboardsCommand(contents) + with pytest.raises(IncorrectVersionError) as excinfo: + command.run() + assert str(excinfo.value) == "Must be equal to 1.0.0." + + # type should be Database + contents["metadata.yaml"] = yaml.safe_dump(dataset_metadata_config) + command = v1.ImportDashboardsCommand(contents) + with pytest.raises(CommandInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == "Error importing dashboard" + assert excinfo.value.normalized_messages() == { + "metadata.yaml": {"type": ["Must be equal to Dashboard."]} + } + + # must also validate datasets + broken_config = dataset_config.copy() + del broken_config["table_name"] + contents["metadata.yaml"] = yaml.safe_dump(dashboard_metadata_config) + contents["datasets/imported_dataset.yaml"] = yaml.safe_dump(broken_config) + command = v1.ImportDashboardsCommand(contents) + with pytest.raises(CommandInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == "Error importing dashboard" + assert excinfo.value.normalized_messages() == { + "datasets/imported_dataset.yaml": { + "table_name": ["Missing data for required field."], + } + } diff --git a/tests/integration_tests/dashboards/consts.py b/tests/integration_tests/dashboards/consts.py new file mode 100644 index 0000000000000..a6e36839be9ed --- /dev/null +++ b/tests/integration_tests/dashboards/consts.py @@ -0,0 +1,43 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +QUERY_FORMAT = "?q={}" + +DASHBOARDS_API_URL = "api/v1/dashboard/" +DASHBOARDS_API_URL_WITH_QUERY_FORMAT = DASHBOARDS_API_URL + QUERY_FORMAT +DASHBOARD_API_URL_FORMAT = DASHBOARDS_API_URL + "{}" +EXPORT_DASHBOARDS_API_URL = DASHBOARDS_API_URL + "export/" +EXPORT_DASHBOARDS_API_URL_WITH_QUERY_FORMAT = EXPORT_DASHBOARDS_API_URL + QUERY_FORMAT + +GET_DASHBOARD_VIEW_URL_FORMAT = "/superset/dashboard/{}/" +SAVE_DASHBOARD_URL_FORMAT = "/superset/save_dash/{}/" +COPY_DASHBOARD_URL_FORMAT = "/superset/copy_dash/{}/" +ADD_SLICES_URL_FORMAT = "/superset/add_slices/{}/" + +DELETE_DASHBOARD_VIEW_URL_FORMAT = "/dashboard/delete/{}" +GET_DASHBOARDS_LIST_VIEW = "/dashboard/list/" +NEW_DASHBOARD_URL = "/dashboard/new/" +GET_CHARTS_API_URL = "/api/v1/chart/" + +GAMMA_ROLE_NAME = "Gamma" + +ADMIN_USERNAME = "admin" +GAMMA_USERNAME = "gamma" + +DASHBOARD_SLUG_OF_ACCESSIBLE_TABLE = "births" +DEFAULT_DASHBOARD_SLUG_TO_TEST = "births" +WORLD_HEALTH_SLUG = "world_health" diff --git a/tests/integration_tests/dashboards/dao_tests.py b/tests/integration_tests/dashboards/dao_tests.py new file mode 100644 index 0000000000000..672e930364f2a --- /dev/null +++ b/tests/integration_tests/dashboards/dao_tests.py @@ -0,0 +1,129 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +import copy +import json +import time +from unittest.mock import patch +import pytest + +import tests.integration_tests.test_app # pylint: disable=unused-import +from superset import db, security_manager +from superset.dashboards.dao import DashboardDAO +from superset.models.dashboard import Dashboard +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) + + +class TestDashboardDAO(SupersetTestCase): + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_set_dash_metadata(self): + dash: Dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").first() + ) + data = dash.data + positions = data["position_json"] + data.update({"positions": positions}) + original_data = copy.deepcopy(data) + + # add filter scopes + filter_slice = next(slc for slc in dash.slices if slc.viz_type == "filter_box") + immune_slices = [slc for slc in dash.slices if slc != filter_slice] + filter_scopes = { + str(filter_slice.id): { + "region": { + "scope": ["ROOT_ID"], + "immune": [slc.id for slc in immune_slices], + } + } + } + data.update({"filter_scopes": json.dumps(filter_scopes)}) + DashboardDAO.set_dash_metadata(dash, data) + updated_metadata = json.loads(dash.json_metadata) + self.assertEqual(updated_metadata["filter_scopes"], filter_scopes) + + # remove a slice and change slice ids (as copy slices) + removed_slice = immune_slices.pop() + removed_components = [ + key + for (key, value) in positions.items() + if isinstance(value, dict) + and value.get("type") == "CHART" + and value["meta"]["chartId"] == removed_slice.id + ] + for component_id in removed_components: + del positions[component_id] + + data.update({"positions": positions}) + DashboardDAO.set_dash_metadata(dash, data) + updated_metadata = json.loads(dash.json_metadata) + expected_filter_scopes = { + str(filter_slice.id): { + "region": { + "scope": ["ROOT_ID"], + "immune": [slc.id for slc in immune_slices], + } + } + } + self.assertEqual(updated_metadata["filter_scopes"], expected_filter_scopes) + + # reset dash to original data + DashboardDAO.set_dash_metadata(dash, original_data) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @patch("superset.utils.core.g") + @patch("superset.security.manager.g") + def test_get_dashboard_changed_on(self, mock_sm_g, mock_g): + mock_g.user = mock_sm_g.user = security_manager.find_user("admin") + with self.client.application.test_request_context(): + self.login(username="admin") + dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").first() + ) + + changed_on = dashboard.changed_on.replace(microsecond=0) + assert changed_on == DashboardDAO.get_dashboard_changed_on(dashboard) + assert changed_on == DashboardDAO.get_dashboard_changed_on("world_health") + + old_changed_on = dashboard.changed_on + + # freezegun doesn't work for some reason, so we need to sleep here :( + time.sleep(1) + data = dashboard.data + positions = data["position_json"] + data.update({"positions": positions}) + original_data = copy.deepcopy(data) + + data.update({"foo": "bar"}) + DashboardDAO.set_dash_metadata(dashboard, data) + db.session.merge(dashboard) + db.session.commit() + new_changed_on = DashboardDAO.get_dashboard_changed_on(dashboard) + assert old_changed_on.replace(microsecond=0) < new_changed_on + assert new_changed_on == DashboardDAO.get_dashboard_and_datasets_changed_on( + dashboard + ) + assert new_changed_on == DashboardDAO.get_dashboard_and_slices_changed_on( + dashboard + ) + + DashboardDAO.set_dash_metadata(dashboard, original_data) + db.session.merge(dashboard) + db.session.commit() diff --git a/tests/integration_tests/dashboards/dashboard_test_utils.py b/tests/integration_tests/dashboards/dashboard_test_utils.py new file mode 100644 index 0000000000000..df2687fba939f --- /dev/null +++ b/tests/integration_tests/dashboards/dashboard_test_utils.py @@ -0,0 +1,121 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +import random +import string +from typing import Any, Dict, List, Optional, Tuple + +from sqlalchemy import func + +from superset import appbuilder, db, security_manager +from superset.connectors.sqla.models import SqlaTable +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from tests.integration_tests.dashboards.consts import DEFAULT_DASHBOARD_SLUG_TO_TEST + +logger = logging.getLogger(__name__) + +session = appbuilder.get_session + + +def get_mock_positions(dashboard: Dashboard) -> Dict[str, Any]: + positions = {"DASHBOARD_VERSION_KEY": "v2"} + for i, slc in enumerate(dashboard.slices): + id_ = "DASHBOARD_CHART_TYPE-{}".format(i) + position_data: Any = { + "type": "CHART", + "id": id_, + "children": [], + "meta": {"width": 4, "height": 50, "chartId": slc.id}, + } + positions[id_] = position_data + return positions + + +def build_save_dash_parts( + dashboard_slug: Optional[str] = None, dashboard_to_edit: Optional[Dashboard] = None +) -> Tuple[Dashboard, Dict[str, Any], Dict[str, Any]]: + if not dashboard_to_edit: + dashboard_slug = ( + dashboard_slug if dashboard_slug else DEFAULT_DASHBOARD_SLUG_TO_TEST + ) + dashboard_to_edit = get_dashboard_by_slug(dashboard_slug) + + data_before_change = { + "positions": dashboard_to_edit.position, + "dashboard_title": dashboard_to_edit.dashboard_title, + } + data_after_change = { + "css": "", + "expanded_slices": {}, + "positions": get_mock_positions(dashboard_to_edit), + "dashboard_title": dashboard_to_edit.dashboard_title, + } + return dashboard_to_edit, data_before_change, data_after_change + + +def get_all_dashboards() -> List[Dashboard]: + return db.session.query(Dashboard).all() + + +def get_dashboard_by_slug(dashboard_slug: str) -> Dashboard: + return db.session.query(Dashboard).filter_by(slug=dashboard_slug).first() + + +def get_slice_by_name(slice_name: str) -> Slice: + return db.session.query(Slice).filter_by(slice_name=slice_name).first() + + +def get_sql_table_by_name(table_name: str): + return db.session.query(SqlaTable).filter_by(table_name=table_name).one() + + +def count_dashboards() -> int: + return db.session.query(func.count(Dashboard.id)).first()[0] + + +def random_title(): + return f"title{random_str()}" + + +def random_slug(): + return f"slug{random_str()}" + + +def get_random_string(length): + letters = string.ascii_lowercase + result_str = "".join(random.choice(letters) for i in range(length)) + print("Random string of length", length, "is:", result_str) + return result_str + + +def random_str(): + return get_random_string(8) + + +def grant_access_to_dashboard(dashboard, role_name): + role = security_manager.find_role(role_name) + dashboard.roles.append(role) + db.session.merge(dashboard) + db.session.commit() + + +def revoke_access_to_dashboard(dashboard, role_name): + role = security_manager.find_role(role_name) + dashboard.roles.remove(role) + db.session.merge(dashboard) + db.session.commit() diff --git a/tests/integration_tests/dashboards/filter_sets/__init__.py b/tests/integration_tests/dashboards/filter_sets/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/dashboards/filter_sets/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/dashboards/filter_sets/conftest.py b/tests/integration_tests/dashboards/filter_sets/conftest.py new file mode 100644 index 0000000000000..b7a28273b0a7e --- /dev/null +++ b/tests/integration_tests/dashboards/filter_sets/conftest.py @@ -0,0 +1,285 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import json +from typing import Any, Dict, Generator, List, TYPE_CHECKING + +import pytest + +from superset import db, security_manager as sm +from superset.dashboards.filter_sets.consts import ( + DESCRIPTION_FIELD, + JSON_METADATA_FIELD, + NAME_FIELD, + OWNER_ID_FIELD, + OWNER_TYPE_FIELD, + USER_OWNER_TYPE, +) +from superset.models.dashboard import Dashboard +from superset.models.filter_set import FilterSet +from tests.integration_tests.dashboards.filter_sets.consts import ( + ADMIN_USERNAME_FOR_TEST, + DASHBOARD_OWNER_USERNAME, + FILTER_SET_OWNER_USERNAME, + REGULAR_USER, +) +from tests.integration_tests.dashboards.superset_factory_util import ( + create_dashboard, + create_database, + create_datasource_table, + create_slice, +) +from tests.integration_tests.test_app import app + +if TYPE_CHECKING: + from flask.ctx import AppContext + from flask.testing import FlaskClient + from flask_appbuilder.security.manager import BaseSecurityManager + from flask_appbuilder.security.sqla.models import ( + PermissionView, + Role, + User, + ViewMenu, + ) + from sqlalchemy.orm import Session + + from superset.connectors.sqla.models import SqlaTable + from superset.models.core import Database + from superset.models.slice import Slice + + +security_manager: BaseSecurityManager = sm + + +@pytest.fixture(autouse=True, scope="module") +def test_users() -> Generator[Dict[str, int], None, None]: + usernames = [ + ADMIN_USERNAME_FOR_TEST, + DASHBOARD_OWNER_USERNAME, + FILTER_SET_OWNER_USERNAME, + REGULAR_USER, + ] + with app.app_context(): + filter_set_role = build_filter_set_role() + admin_role: Role = security_manager.find_role("Admin") + usernames_to_ids = create_test_users(admin_role, filter_set_role, usernames) + yield usernames_to_ids + delete_users(usernames_to_ids) + + +def delete_users(usernames_to_ids: Dict[str, int]) -> None: + for username in usernames_to_ids.keys(): + db.session.delete(security_manager.find_user(username)) + db.session.commit() + + +def create_test_users( + admin_role: Role, filter_set_role: Role, usernames: List[str] +) -> Dict[str, int]: + users: List[User] = [] + for username in usernames: + user = build_user(username, filter_set_role, admin_role) + users.append(user) + return {user.username: user.id for user in users} + + +def build_user(username: str, filter_set_role: Role, admin_role: Role) -> User: + roles_to_add = ( + [admin_role] if username == ADMIN_USERNAME_FOR_TEST else [filter_set_role] + ) + user: User = security_manager.add_user( + username, "test", "test", username, roles_to_add, password="general" + ) + if not user: + user = security_manager.find_user(username) + if user is None: + raise Exception("Failed to build the user {}".format(username)) + return user + + +def build_filter_set_role() -> Role: + filter_set_role: Role = security_manager.add_role("filter_set_role") + filterset_view_name: ViewMenu = security_manager.find_view_menu("FilterSets") + all_datasource_view_name: ViewMenu = security_manager.find_view_menu( + "all_datasource_access" + ) + pvms: List[PermissionView] = security_manager.find_permissions_view_menu( + filterset_view_name + ) + security_manager.find_permissions_view_menu(all_datasource_view_name) + for pvm in pvms: + security_manager.add_permission_role(filter_set_role, pvm) + return filter_set_role + + +@pytest.fixture +def client() -> Generator[FlaskClient[Any], None, None]: + with app.test_client() as client: + yield client + + +@pytest.fixture +def dashboard(app_context) -> Generator[Dashboard, None, None]: + dashboard_owner_user = security_manager.find_user(DASHBOARD_OWNER_USERNAME) + database = create_database("test_database_filter_sets") + datasource = create_datasource_table( + name="test_datasource", database=database, owners=[dashboard_owner_user] + ) + slice_ = create_slice( + datasource=datasource, name="test_slice", owners=[dashboard_owner_user] + ) + dashboard = create_dashboard( + dashboard_title="test_dashboard", + published=True, + slices=[slice_], + owners=[dashboard_owner_user], + ) + db.session.add(dashboard) + db.session.commit() + + yield dashboard + + db.session.delete(dashboard) + db.session.delete(slice_) + db.session.delete(datasource) + db.session.delete(database) + db.session.commit() + + +@pytest.fixture +def dashboard_id(dashboard: Dashboard) -> Generator[int, None, None]: + yield dashboard.id + + +@pytest.fixture +def filtersets( + dashboard_id: int, test_users: Dict[str, int], dumped_valid_json_metadata: str +) -> Generator[Dict[str, List[FilterSet]], None, None]: + first_filter_set = FilterSet( + name="filter_set_1_of_" + str(dashboard_id), + dashboard_id=dashboard_id, + json_metadata=dumped_valid_json_metadata, + owner_id=dashboard_id, + owner_type="Dashboard", + ) + second_filter_set = FilterSet( + name="filter_set_2_of_" + str(dashboard_id), + json_metadata=dumped_valid_json_metadata, + dashboard_id=dashboard_id, + owner_id=dashboard_id, + owner_type="Dashboard", + ) + third_filter_set = FilterSet( + name="filter_set_3_of_" + str(dashboard_id), + json_metadata=dumped_valid_json_metadata, + dashboard_id=dashboard_id, + owner_id=test_users[FILTER_SET_OWNER_USERNAME], + owner_type="User", + ) + fourth_filter_set = FilterSet( + name="filter_set_4_of_" + str(dashboard_id), + json_metadata=dumped_valid_json_metadata, + dashboard_id=dashboard_id, + owner_id=test_users[FILTER_SET_OWNER_USERNAME], + owner_type="User", + ) + db.session.add(first_filter_set) + db.session.add(second_filter_set) + db.session.add(third_filter_set) + db.session.add(fourth_filter_set) + db.session.commit() + + yield { + "Dashboard": [first_filter_set, second_filter_set], + FILTER_SET_OWNER_USERNAME: [third_filter_set, fourth_filter_set], + } + + db.session.delete(first_filter_set) + db.session.delete(second_filter_set) + db.session.delete(third_filter_set) + db.session.delete(fourth_filter_set) + db.session.commit() + + +@pytest.fixture +def filterset_id(filtersets: Dict[str, List[FilterSet]]) -> int: + return filtersets["Dashboard"][0].id + + +@pytest.fixture +def valid_json_metadata() -> Dict[str, Any]: + return {"nativeFilters": {}} + + +@pytest.fixture +def dumped_valid_json_metadata(valid_json_metadata: Dict[str, Any]) -> str: + return json.dumps(valid_json_metadata) + + +@pytest.fixture +def exists_user_id() -> int: + return 1 + + +@pytest.fixture +def valid_filter_set_data_for_create( + dashboard_id: int, dumped_valid_json_metadata: str, exists_user_id: int +) -> Dict[str, Any]: + name = "test_filter_set_of_dashboard_" + str(dashboard_id) + return { + NAME_FIELD: name, + DESCRIPTION_FIELD: "description of " + name, + JSON_METADATA_FIELD: dumped_valid_json_metadata, + OWNER_TYPE_FIELD: USER_OWNER_TYPE, + OWNER_ID_FIELD: exists_user_id, + } + + +@pytest.fixture +def valid_filter_set_data_for_update( + dashboard_id: int, dumped_valid_json_metadata: str, exists_user_id: int +) -> Dict[str, Any]: + name = "name_changed_test_filter_set_of_dashboard_" + str(dashboard_id) + return { + NAME_FIELD: name, + DESCRIPTION_FIELD: "changed description of " + name, + JSON_METADATA_FIELD: dumped_valid_json_metadata, + } + + +@pytest.fixture +def not_exists_dashboard_id(dashboard_id: int) -> Generator[int, None, None]: + yield dashboard_id + 1 + + +@pytest.fixture +def not_exists_user_id() -> int: + return 99999 + + +@pytest.fixture() +def dashboard_based_filter_set_dict( + filtersets: Dict[str, List[FilterSet]] +) -> Dict[str, Any]: + return filtersets["Dashboard"][0].to_dict() + + +@pytest.fixture() +def user_based_filter_set_dict( + filtersets: Dict[str, List[FilterSet]] +) -> Dict[str, Any]: + return filtersets[FILTER_SET_OWNER_USERNAME][0].to_dict() diff --git a/tests/integration_tests/dashboards/filter_sets/consts.py b/tests/integration_tests/dashboards/filter_sets/consts.py new file mode 100644 index 0000000000000..f54f00fea8b75 --- /dev/null +++ b/tests/integration_tests/dashboards/filter_sets/consts.py @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +FILTER_SET_URI = "api/v1/dashboard/{dashboard_id}/filtersets" + +ADMIN_USERNAME_FOR_TEST = "admin@filterset.com" +DASHBOARD_OWNER_USERNAME = "dash_owner_user@filterset.com" +FILTER_SET_OWNER_USERNAME = "fs_owner_user@filterset.com" +REGULAR_USER = "regular_user@filterset.com" diff --git a/tests/integration_tests/dashboards/filter_sets/create_api_tests.py b/tests/integration_tests/dashboards/filter_sets/create_api_tests.py new file mode 100644 index 0000000000000..b5d1919dd430a --- /dev/null +++ b/tests/integration_tests/dashboards/filter_sets/create_api_tests.py @@ -0,0 +1,629 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Any, Dict + +from flask.testing import FlaskClient + +from superset.dashboards.filter_sets.consts import ( + DASHBOARD_OWNER_TYPE, + DESCRIPTION_FIELD, + JSON_METADATA_FIELD, + NAME_FIELD, + OWNER_ID_FIELD, + OWNER_TYPE_FIELD, + USER_OWNER_TYPE, +) +from tests.integration_tests.dashboards.filter_sets.consts import ( + ADMIN_USERNAME_FOR_TEST, + DASHBOARD_OWNER_USERNAME, + FILTER_SET_OWNER_USERNAME, +) +from tests.integration_tests.dashboards.filter_sets.utils import ( + call_create_filter_set, + get_filter_set_by_dashboard_id, + get_filter_set_by_name, +) +from tests.integration_tests.test_app import login + + +def assert_filterset_was_not_created(filter_set_data: Dict[str, Any]) -> None: + assert get_filter_set_by_name(str(filter_set_data["name"])) is None + + +def assert_filterset_was_created(filter_set_data: Dict[str, Any]) -> None: + assert get_filter_set_by_name(filter_set_data["name"]) is not None + + +class TestCreateFilterSetsApi: + def test_with_extra_field__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create["extra"] = "val" + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert response.json["message"]["extra"][0] == "Unknown field." + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_with_id_field__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create["id"] = 1 + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert response.json["message"]["id"][0] == "Unknown field." + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_with_dashboard_not_exists__404( + self, + not_exists_dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # act + login(client, "admin") + response = call_create_filter_set( + client, not_exists_dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 404 + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_without_name__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create.pop(NAME_FIELD, None) + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert get_filter_set_by_dashboard_id(dashboard_id) == [] + + def test_with_none_name__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[NAME_FIELD] = None + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_with_int_as_name__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[NAME_FIELD] = 4 + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_without_description__201( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create.pop(DESCRIPTION_FIELD, None) + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_with_none_description__201( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[DESCRIPTION_FIELD] = None + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_with_int_as_description__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[DESCRIPTION_FIELD] = 1 + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_without_json_metadata__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create.pop(JSON_METADATA_FIELD, None) + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_with_invalid_json_metadata__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[DESCRIPTION_FIELD] = {} + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_without_owner_type__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create.pop(OWNER_TYPE_FIELD, None) + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_with_invalid_owner_type__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = "OTHER_TYPE" + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_without_owner_id_when_owner_type_is_user__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = USER_OWNER_TYPE + valid_filter_set_data_for_create.pop(OWNER_ID_FIELD, None) + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_without_owner_id_when_owner_type_is_dashboard__201( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = DASHBOARD_OWNER_TYPE + valid_filter_set_data_for_create.pop(OWNER_ID_FIELD, None) + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_with_not_exists_owner__400( + self, + dashboard_id: int, + valid_filter_set_data_for_create: Dict[str, Any], + not_exists_user_id: int, + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = USER_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = not_exists_user_id + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_created(valid_filter_set_data_for_create) + + def test_when_caller_is_admin_and_owner_is_admin__201( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = USER_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = test_users[ + ADMIN_USERNAME_FOR_TEST + ] + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_when_caller_is_admin_and_owner_is_dashboard_owner__201( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = USER_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = test_users[ + DASHBOARD_OWNER_USERNAME + ] + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_when_caller_is_admin_and_owner_is_regular_user__201( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = USER_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = test_users[ + FILTER_SET_OWNER_USERNAME + ] + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_when_caller_is_admin_and_owner_type_is_dashboard__201( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = DASHBOARD_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = dashboard_id + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_when_caller_is_dashboard_owner_and_owner_is_admin__201( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, DASHBOARD_OWNER_USERNAME) + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = USER_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = test_users[ + ADMIN_USERNAME_FOR_TEST + ] + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_when_caller_is_dashboard_owner_and_owner_is_dashboard_owner__201( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, DASHBOARD_OWNER_USERNAME) + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = USER_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = test_users[ + DASHBOARD_OWNER_USERNAME + ] + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_when_caller_is_dashboard_owner_and_owner_is_regular_user__201( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, DASHBOARD_OWNER_USERNAME) + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = USER_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = test_users[ + FILTER_SET_OWNER_USERNAME + ] + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_when_caller_is_dashboard_owner_and_owner_type_is_dashboard__201( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, DASHBOARD_OWNER_USERNAME) + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = DASHBOARD_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = dashboard_id + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_when_caller_is_regular_user_and_owner_is_admin__201( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, FILTER_SET_OWNER_USERNAME) + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = USER_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = test_users[ + ADMIN_USERNAME_FOR_TEST + ] + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_when_caller_is_regular_user_and_owner_is_dashboard_owner__201( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, FILTER_SET_OWNER_USERNAME) + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = USER_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = test_users[ + DASHBOARD_OWNER_USERNAME + ] + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_when_caller_is_regular_user_and_owner_is_regular_user__201( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, FILTER_SET_OWNER_USERNAME) + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = USER_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = test_users[ + FILTER_SET_OWNER_USERNAME + ] + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 201 + assert_filterset_was_created(valid_filter_set_data_for_create) + + def test_when_caller_is_regular_user_and_owner_type_is_dashboard__403( + self, + dashboard_id: int, + test_users: Dict[str, int], + valid_filter_set_data_for_create: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, FILTER_SET_OWNER_USERNAME) + valid_filter_set_data_for_create[OWNER_TYPE_FIELD] = DASHBOARD_OWNER_TYPE + valid_filter_set_data_for_create[OWNER_ID_FIELD] = dashboard_id + + # act + response = call_create_filter_set( + client, dashboard_id, valid_filter_set_data_for_create + ) + + # assert + assert response.status_code == 403 + assert_filterset_was_not_created(valid_filter_set_data_for_create) diff --git a/tests/integration_tests/dashboards/filter_sets/delete_api_tests.py b/tests/integration_tests/dashboards/filter_sets/delete_api_tests.py new file mode 100644 index 0000000000000..7011cb5781282 --- /dev/null +++ b/tests/integration_tests/dashboards/filter_sets/delete_api_tests.py @@ -0,0 +1,210 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Any, Dict, List, TYPE_CHECKING + +from tests.integration_tests.dashboards.filter_sets.consts import ( + DASHBOARD_OWNER_USERNAME, + FILTER_SET_OWNER_USERNAME, + REGULAR_USER, +) +from tests.integration_tests.dashboards.filter_sets.utils import ( + call_delete_filter_set, + collect_all_ids, + get_filter_set_by_name, +) +from tests.integration_tests.test_app import login + +if TYPE_CHECKING: + from flask.testing import FlaskClient + + from superset.models.filter_set import FilterSet + + +def assert_filterset_was_not_deleted(filter_set_dict: Dict[str, Any]) -> None: + assert get_filter_set_by_name(filter_set_dict["name"]) is not None + + +def assert_filterset_deleted(filter_set_dict: Dict[str, Any]) -> None: + assert get_filter_set_by_name(filter_set_dict["name"]) is None + + +class TestDeleteFilterSet: + def test_with_dashboard_exists_filterset_not_exists__200( + self, + dashboard_id: int, + filtersets: Dict[str, List[FilterSet]], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + filter_set_id = max(collect_all_ids(filtersets)) + 1 + + response = call_delete_filter_set(client, {"id": filter_set_id}, dashboard_id) + # assert + assert response.status_code == 200 + + def test_with_dashboard_not_exists_filterset_not_exists__404( + self, + not_exists_dashboard_id: int, + filtersets: Dict[str, List[FilterSet]], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + filter_set_id = max(collect_all_ids(filtersets)) + 1 + + response = call_delete_filter_set( + client, {"id": filter_set_id}, not_exists_dashboard_id + ) + # assert + assert response.status_code == 404 + + def test_with_dashboard_not_exists_filterset_exists__404( + self, + not_exists_dashboard_id: int, + dashboard_based_filter_set_dict: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + + # act + response = call_delete_filter_set( + client, dashboard_based_filter_set_dict, not_exists_dashboard_id + ) + # assert + assert response.status_code == 404 + assert_filterset_was_not_deleted(dashboard_based_filter_set_dict) + + def test_when_caller_is_admin_and_owner_type_is_user__200( + self, + test_users: Dict[str, int], + user_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + # act + response = call_delete_filter_set(client, user_based_filter_set_dict) + + # assert + assert response.status_code == 200 + assert_filterset_deleted(user_based_filter_set_dict) + + def test_when_caller_is_admin_and_owner_type_is_dashboard__200( + self, + test_users: Dict[str, int], + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + # act + response = call_delete_filter_set(client, dashboard_based_filter_set_dict) + + # assert + assert response.status_code == 200 + assert_filterset_deleted(dashboard_based_filter_set_dict) + + def test_when_caller_is_dashboard_owner_and_owner_is_other_user_403( + self, + test_users: Dict[str, int], + user_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, DASHBOARD_OWNER_USERNAME) + + # act + response = call_delete_filter_set(client, user_based_filter_set_dict) + + # assert + assert response.status_code == 403 + assert_filterset_was_not_deleted(user_based_filter_set_dict) + + def test_when_caller_is_dashboard_owner_and_owner_type_is_dashboard__200( + self, + test_users: Dict[str, int], + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, DASHBOARD_OWNER_USERNAME) + + # act + response = call_delete_filter_set(client, dashboard_based_filter_set_dict) + + # assert + assert response.status_code == 200 + assert_filterset_deleted(dashboard_based_filter_set_dict) + + def test_when_caller_is_filterset_owner__200( + self, + test_users: Dict[str, int], + user_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, FILTER_SET_OWNER_USERNAME) + + # act + response = call_delete_filter_set(client, user_based_filter_set_dict) + + # assert + assert response.status_code == 200 + assert_filterset_deleted(user_based_filter_set_dict) + + def test_when_caller_is_regular_user_and_owner_type_is_user__403( + self, + test_users: Dict[str, int], + user_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, REGULAR_USER) + + # act + response = call_delete_filter_set(client, user_based_filter_set_dict) + + # assert + assert response.status_code == 403 + assert_filterset_was_not_deleted(user_based_filter_set_dict) + + def test_when_caller_is_regular_user_and_owner_type_is_dashboard__403( + self, + test_users: Dict[str, int], + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, REGULAR_USER) + + # act + response = call_delete_filter_set(client, dashboard_based_filter_set_dict) + + # assert + assert response.status_code == 403 + assert_filterset_was_not_deleted(dashboard_based_filter_set_dict) diff --git a/tests/integration_tests/dashboards/filter_sets/get_api_tests.py b/tests/integration_tests/dashboards/filter_sets/get_api_tests.py new file mode 100644 index 0000000000000..ad40d0e33c859 --- /dev/null +++ b/tests/integration_tests/dashboards/filter_sets/get_api_tests.py @@ -0,0 +1,132 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Any, Dict, List, Set, TYPE_CHECKING + +from tests.integration_tests.dashboards.filter_sets.consts import ( + DASHBOARD_OWNER_USERNAME, + FILTER_SET_OWNER_USERNAME, + REGULAR_USER, +) +from tests.integration_tests.dashboards.filter_sets.utils import ( + call_get_filter_sets, + collect_all_ids, +) +from tests.integration_tests.test_app import login + +if TYPE_CHECKING: + from flask.testing import FlaskClient + + from superset.models.filter_set import FilterSet + + +class TestGetFilterSetsApi: + def test_with_dashboard_not_exists__404( + self, + not_exists_dashboard_id: int, + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + + # act + response = call_get_filter_sets(client, not_exists_dashboard_id) + + # assert + assert response.status_code == 404 + + def test_dashboards_without_filtersets__200( + self, dashboard_id: int, client: FlaskClient[Any] + ): + # arrange + login(client, "admin") + + # act + response = call_get_filter_sets(client, dashboard_id) + + # assert + assert response.status_code == 200 + assert response.is_json and response.json["count"] == 0 + + def test_when_caller_admin__200( + self, + dashboard_id: int, + filtersets: Dict[str, List[FilterSet]], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + expected_ids: Set[int] = collect_all_ids(filtersets) + + # act + response = call_get_filter_sets(client, dashboard_id) + + # assert + assert response.status_code == 200 + assert response.is_json and set(response.json["ids"]) == expected_ids + + def test_when_caller_dashboard_owner__200( + self, + dashboard_id: int, + filtersets: Dict[str, List[FilterSet]], + client: FlaskClient[Any], + ): + # arrange + login(client, DASHBOARD_OWNER_USERNAME) + expected_ids = collect_all_ids(filtersets["Dashboard"]) + + # act + response = call_get_filter_sets(client, dashboard_id) + + # assert + assert response.status_code == 200 + assert response.is_json and set(response.json["ids"]) == expected_ids + + def test_when_caller_filterset_owner__200( + self, + dashboard_id: int, + filtersets: Dict[str, List[FilterSet]], + client: FlaskClient[Any], + ): + # arrange + login(client, FILTER_SET_OWNER_USERNAME) + expected_ids = collect_all_ids(filtersets[FILTER_SET_OWNER_USERNAME]) + + # act + response = call_get_filter_sets(client, dashboard_id) + + # assert + assert response.status_code == 200 + assert response.is_json and set(response.json["ids"]) == expected_ids + + def test_when_caller_regular_user__200( + self, + dashboard_id: int, + filtersets: Dict[str, List[int]], + client: FlaskClient[Any], + ): + # arrange + login(client, REGULAR_USER) + expected_ids: Set[int] = set() + + # act + response = call_get_filter_sets(client, dashboard_id) + + # assert + assert response.status_code == 200 + assert response.is_json and set(response.json["ids"]) == expected_ids diff --git a/tests/integration_tests/dashboards/filter_sets/update_api_tests.py b/tests/integration_tests/dashboards/filter_sets/update_api_tests.py new file mode 100644 index 0000000000000..07db98f617815 --- /dev/null +++ b/tests/integration_tests/dashboards/filter_sets/update_api_tests.py @@ -0,0 +1,520 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import json +from typing import Any, Dict, List, TYPE_CHECKING + +from superset.dashboards.filter_sets.consts import ( + DESCRIPTION_FIELD, + JSON_METADATA_FIELD, + NAME_FIELD, + OWNER_TYPE_FIELD, + PARAMS_PROPERTY, +) +from tests.integration_tests.dashboards.filter_sets.consts import ( + DASHBOARD_OWNER_USERNAME, + FILTER_SET_OWNER_USERNAME, + REGULAR_USER, +) +from tests.integration_tests.dashboards.filter_sets.utils import ( + call_update_filter_set, + collect_all_ids, + get_filter_set_by_name, +) +from tests.integration_tests.test_app import login + +if TYPE_CHECKING: + from flask.testing import FlaskClient + + from superset.models.filter_set import FilterSet + + +def merge_two_filter_set_dict( + first: Dict[Any, Any], second: Dict[Any, Any] +) -> Dict[Any, Any]: + for d in [first, second]: + if JSON_METADATA_FIELD in d: + if PARAMS_PROPERTY not in d: + d.setdefault(PARAMS_PROPERTY, json.loads(d[JSON_METADATA_FIELD])) + d.pop(JSON_METADATA_FIELD) + return {**first, **second} + + +def assert_filterset_was_not_updated(filter_set_dict: Dict[str, Any]) -> None: + assert filter_set_dict == get_filter_set_by_name(filter_set_dict["name"]).to_dict() + + +def assert_filterset_updated( + filter_set_dict_before: Dict[str, Any], data_updated: Dict[str, Any] +) -> None: + expected_data = merge_two_filter_set_dict(filter_set_dict_before, data_updated) + assert expected_data == get_filter_set_by_name(expected_data["name"]).to_dict() + + +class TestUpdateFilterSet: + def test_with_dashboard_exists_filterset_not_exists__404( + self, + dashboard_id: int, + filtersets: Dict[str, List[FilterSet]], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + filter_set_id = max(collect_all_ids(filtersets)) + 1 + + response = call_update_filter_set( + client, {"id": filter_set_id}, {}, dashboard_id + ) + # assert + assert response.status_code == 404 + + def test_with_dashboard_not_exists_filterset_not_exists__404( + self, + not_exists_dashboard_id: int, + filtersets: Dict[str, List[FilterSet]], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + filter_set_id = max(collect_all_ids(filtersets)) + 1 + + response = call_update_filter_set( + client, {"id": filter_set_id}, {}, not_exists_dashboard_id + ) + # assert + assert response.status_code == 404 + + def test_with_dashboard_not_exists_filterset_exists__404( + self, + not_exists_dashboard_id: int, + dashboard_based_filter_set_dict: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, {}, not_exists_dashboard_id + ) + # assert + assert response.status_code == 404 + assert_filterset_was_not_updated(dashboard_based_filter_set_dict) + + def test_with_extra_field__400( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update["extra"] = "val" + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 400 + assert response.json["message"]["extra"][0] == "Unknown field." + assert_filterset_was_not_updated(dashboard_based_filter_set_dict) + + def test_with_id_field__400( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update["id"] = 1 + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 400 + assert response.json["message"]["id"][0] == "Unknown field." + assert_filterset_was_not_updated(dashboard_based_filter_set_dict) + + def test_with_none_name__400( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update[NAME_FIELD] = None + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_updated(dashboard_based_filter_set_dict) + + def test_with_int_as_name__400( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update[NAME_FIELD] = 4 + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_updated(dashboard_based_filter_set_dict) + + def test_without_name__200( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update.pop(NAME_FIELD, None) + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 200 + assert_filterset_updated( + dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + def test_with_none_description__400( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update[DESCRIPTION_FIELD] = None + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_updated(dashboard_based_filter_set_dict) + + def test_with_int_as_description__400( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update[DESCRIPTION_FIELD] = 1 + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_updated(dashboard_based_filter_set_dict) + + def test_without_description__200( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update.pop(DESCRIPTION_FIELD, None) + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 200 + assert_filterset_updated( + dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + def test_with_invalid_json_metadata__400( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update[DESCRIPTION_FIELD] = {} + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_updated(dashboard_based_filter_set_dict) + + def test_with_json_metadata__200( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + valid_json_metadata: Dict[Any, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_json_metadata["nativeFilters"] = {"changed": "changed"} + valid_filter_set_data_for_update[JSON_METADATA_FIELD] = json.dumps( + valid_json_metadata + ) + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 200 + assert_filterset_updated( + dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + def test_with_invalid_owner_type__400( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update[OWNER_TYPE_FIELD] = "OTHER_TYPE" + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_updated(dashboard_based_filter_set_dict) + + def test_with_user_owner_type__400( + self, + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update[OWNER_TYPE_FIELD] = "User" + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 400 + assert_filterset_was_not_updated(dashboard_based_filter_set_dict) + + def test_with_dashboard_owner_type__200( + self, + user_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + valid_filter_set_data_for_update[OWNER_TYPE_FIELD] = "Dashboard" + + # act + response = call_update_filter_set( + client, user_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 200 + user_based_filter_set_dict["owner_id"] = user_based_filter_set_dict[ + "dashboard_id" + ] + assert_filterset_updated( + user_based_filter_set_dict, valid_filter_set_data_for_update + ) + + def test_when_caller_is_admin_and_owner_type_is_user__200( + self, + test_users: Dict[str, int], + user_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + # act + response = call_update_filter_set( + client, user_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 200 + assert_filterset_updated( + user_based_filter_set_dict, valid_filter_set_data_for_update + ) + + def test_when_caller_is_admin_and_owner_type_is_dashboard__200( + self, + test_users: Dict[str, int], + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, "admin") + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 200 + assert_filterset_updated( + dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + def test_when_caller_is_dashboard_owner_and_owner_is_other_user_403( + self, + test_users: Dict[str, int], + user_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, DASHBOARD_OWNER_USERNAME) + + # act + response = call_update_filter_set( + client, user_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 403 + assert_filterset_was_not_updated(user_based_filter_set_dict) + + def test_when_caller_is_dashboard_owner_and_owner_type_is_dashboard__200( + self, + test_users: Dict[str, int], + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, DASHBOARD_OWNER_USERNAME) + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 200 + assert_filterset_updated( + dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + def test_when_caller_is_filterset_owner__200( + self, + test_users: Dict[str, int], + user_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, FILTER_SET_OWNER_USERNAME) + + # act + response = call_update_filter_set( + client, user_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 200 + assert_filterset_updated( + user_based_filter_set_dict, valid_filter_set_data_for_update + ) + + def test_when_caller_is_regular_user_and_owner_type_is_user__403( + self, + test_users: Dict[str, int], + user_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, REGULAR_USER) + + # act + response = call_update_filter_set( + client, user_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 403 + assert_filterset_was_not_updated(user_based_filter_set_dict) + + def test_when_caller_is_regular_user_and_owner_type_is_dashboard__403( + self, + test_users: Dict[str, int], + dashboard_based_filter_set_dict: Dict[str, Any], + valid_filter_set_data_for_update: Dict[str, Any], + client: FlaskClient[Any], + ): + # arrange + login(client, REGULAR_USER) + + # act + response = call_update_filter_set( + client, dashboard_based_filter_set_dict, valid_filter_set_data_for_update + ) + + # assert + assert response.status_code == 403 + assert_filterset_was_not_updated(dashboard_based_filter_set_dict) diff --git a/tests/integration_tests/dashboards/filter_sets/utils.py b/tests/integration_tests/dashboards/filter_sets/utils.py new file mode 100644 index 0000000000000..a63e4164d8959 --- /dev/null +++ b/tests/integration_tests/dashboards/filter_sets/utils.py @@ -0,0 +1,102 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Any, Dict, List, Optional, Set, TYPE_CHECKING, Union + +from superset.models.filter_set import FilterSet +from tests.integration_tests.dashboards.filter_sets.consts import FILTER_SET_URI +from tests.integration_tests.test_app import app + +if TYPE_CHECKING: + from flask import Response + from flask.testing import FlaskClient + + +def call_create_filter_set( + client: FlaskClient[Any], dashboard_id: int, data: Dict[str, Any] +) -> Response: + uri = FILTER_SET_URI.format(dashboard_id=dashboard_id) + return client.post(uri, json=data) + + +def call_get_filter_sets(client: FlaskClient[Any], dashboard_id: int) -> Response: + uri = FILTER_SET_URI.format(dashboard_id=dashboard_id) + return client.get(uri) + + +def call_delete_filter_set( + client: FlaskClient[Any], + filter_set_dict_to_update: Dict[str, Any], + dashboard_id: Optional[int] = None, +) -> Response: + dashboard_id = ( + dashboard_id + if dashboard_id is not None + else filter_set_dict_to_update["dashboard_id"] + ) + uri = "{}/{}".format( + FILTER_SET_URI.format(dashboard_id=dashboard_id), + filter_set_dict_to_update["id"], + ) + return client.delete(uri) + + +def call_update_filter_set( + client: FlaskClient[Any], + filter_set_dict_to_update: Dict[str, Any], + data: Dict[str, Any], + dashboard_id: Optional[int] = None, +) -> Response: + dashboard_id = ( + dashboard_id + if dashboard_id is not None + else filter_set_dict_to_update["dashboard_id"] + ) + uri = "{}/{}".format( + FILTER_SET_URI.format(dashboard_id=dashboard_id), + filter_set_dict_to_update["id"], + ) + return client.put(uri, json=data) + + +def get_filter_set_by_name(name: str) -> FilterSet: + with app.app_context(): + return FilterSet.get_by_name(name) + + +def get_filter_set_by_id(id_: int) -> FilterSet: + with app.app_context(): + return FilterSet.get(id_) + + +def get_filter_set_by_dashboard_id(dashboard_id: int) -> FilterSet: + with app.app_context(): + return FilterSet.get_by_dashboard_id(dashboard_id) + + +def collect_all_ids( + filtersets: Union[Dict[str, List[FilterSet]], List[FilterSet]] +) -> Set[int]: + if isinstance(filtersets, dict): + filtersets_lists: List[List[FilterSet]] = list(filtersets.values()) + ids: Set[int] = set() + lst: List[FilterSet] + for lst in filtersets_lists: + ids.update(set(map(lambda fs: fs.id, lst))) + return ids + return set(map(lambda fs: fs.id, filtersets)) diff --git a/tests/integration_tests/dashboards/filter_state/__init__.py b/tests/integration_tests/dashboards/filter_state/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/dashboards/filter_state/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/dashboards/filter_state/api_tests.py b/tests/integration_tests/dashboards/filter_state/api_tests.py new file mode 100644 index 0000000000000..15b479686a4ec --- /dev/null +++ b/tests/integration_tests/dashboards/filter_state/api_tests.py @@ -0,0 +1,294 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from unittest.mock import patch + +import pytest +from flask.ctx import AppContext +from flask_appbuilder.security.sqla.models import User +from sqlalchemy.orm import Session + +from superset.dashboards.commands.exceptions import DashboardAccessDeniedError +from superset.extensions import cache_manager +from superset.models.dashboard import Dashboard +from superset.temporary_cache.commands.entry import Entry +from superset.temporary_cache.utils import cache_key +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) +from tests.integration_tests.test_app import app + +KEY = "test-key" +INITIAL_VALUE = json.dumps({"test": "initial value"}) +UPDATED_VALUE = json.dumps({"test": "updated value"}) + + +@pytest.fixture +def dashboard_id(app_context: AppContext, load_world_bank_dashboard_with_slices) -> int: + session: Session = app_context.app.appbuilder.get_session + dashboard = session.query(Dashboard).filter_by(slug="world_health").one() + return dashboard.id + + +@pytest.fixture +def admin_id(app_context: AppContext) -> int: + session: Session = app_context.app.appbuilder.get_session + admin = session.query(User).filter_by(username="admin").one_or_none() + return admin.id + + +@pytest.fixture(autouse=True) +def cache(dashboard_id, admin_id): + entry: Entry = {"owner": admin_id, "value": INITIAL_VALUE} + cache_manager.filter_state_cache.set(cache_key(dashboard_id, KEY), entry) + + +def test_post(test_client, login_as_admin, dashboard_id: int): + resp = test_client.post( + f"api/v1/dashboard/{dashboard_id}/filter_state", + json={ + "value": INITIAL_VALUE, + }, + ) + assert resp.status_code == 201 + + +def test_post_bad_request_non_string(test_client, login_as_admin, dashboard_id: int): + resp = test_client.post( + f"api/v1/dashboard/{dashboard_id}/filter_state", + json={ + "value": 1234, + }, + ) + assert resp.status_code == 400 + + +def test_post_bad_request_non_json_string( + test_client, login_as_admin, dashboard_id: int +): + payload = { + "value": "foo", + } + resp = test_client.post( + f"api/v1/dashboard/{dashboard_id}/filter_state", json=payload + ) + assert resp.status_code == 400 + + +def test_post_access_denied(test_client, login_as, dashboard_id: int): + login_as("gamma") + payload = { + "value": INITIAL_VALUE, + } + resp = test_client.post( + f"api/v1/dashboard/{dashboard_id}/filter_state", json=payload + ) + assert resp.status_code == 404 + + +def test_post_same_key_for_same_tab_id(test_client, login_as_admin, dashboard_id: int): + payload = { + "value": INITIAL_VALUE, + } + resp = test_client.post( + f"api/v1/dashboard/{dashboard_id}/filter_state?tab_id=1", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.post( + f"api/v1/dashboard/{dashboard_id}/filter_state?tab_id=1", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key == second_key + + +def test_post_different_key_for_different_tab_id( + test_client, login_as_admin, dashboard_id: int +): + payload = { + "value": INITIAL_VALUE, + } + resp = test_client.post( + f"api/v1/dashboard/{dashboard_id}/filter_state?tab_id=1", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.post( + f"api/v1/dashboard/{dashboard_id}/filter_state?tab_id=2", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key != second_key + + +def test_post_different_key_for_no_tab_id( + test_client, login_as_admin, dashboard_id: int +): + payload = { + "value": INITIAL_VALUE, + } + resp = test_client.post( + f"api/v1/dashboard/{dashboard_id}/filter_state", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.post( + f"api/v1/dashboard/{dashboard_id}/filter_state", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key != second_key + + +def test_put(test_client, login_as_admin, dashboard_id: int): + resp = test_client.put( + f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", + json={ + "value": UPDATED_VALUE, + }, + ) + assert resp.status_code == 200 + + +def test_put_same_key_for_same_tab_id(test_client, login_as_admin, dashboard_id: int): + payload = { + "value": INITIAL_VALUE, + } + resp = test_client.put( + f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}?tab_id=1", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.put( + f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}?tab_id=1", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key == second_key + + +def test_put_different_key_for_different_tab_id( + test_client, login_as_admin, dashboard_id: int +): + payload = { + "value": INITIAL_VALUE, + } + resp = test_client.put( + f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}?tab_id=1", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.put( + f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}?tab_id=2", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key != second_key + + +def test_put_different_key_for_no_tab_id( + test_client, login_as_admin, dashboard_id: int +): + payload = { + "value": INITIAL_VALUE, + } + resp = test_client.put( + f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.put( + f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", json=payload + ) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key != second_key + + +def test_put_bad_request_non_string(test_client, login_as_admin, dashboard_id: int): + resp = test_client.put( + f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", + json={ + "value": 1234, + }, + ) + assert resp.status_code == 400 + + +def test_put_bad_request_non_json_string( + test_client, login_as_admin, dashboard_id: int +): + resp = test_client.put( + f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", + json={ + "value": "foo", + }, + ) + assert resp.status_code == 400 + + +def test_put_access_denied(test_client, login_as, dashboard_id: int): + login_as("gamma") + resp = test_client.put( + f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", + json={ + "value": UPDATED_VALUE, + }, + ) + assert resp.status_code == 404 + + +def test_get_key_not_found(test_client, login_as_admin, dashboard_id: int): + resp = test_client.get(f"api/v1/dashboard/{dashboard_id}/filter_state/unknown-key/") + assert resp.status_code == 404 + + +def test_get_dashboard_not_found(test_client, login_as_admin): + resp = test_client.get(f"api/v1/dashboard/{-1}/filter_state/{KEY}") + assert resp.status_code == 404 + + +def test_get_dashboard_filter_state(test_client, login_as_admin, dashboard_id: int): + resp = test_client.get(f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}") + assert resp.status_code == 200 + data = json.loads(resp.data.decode("utf-8")) + assert INITIAL_VALUE == data.get("value") + + +def test_get_access_denied(test_client, login_as, dashboard_id): + login_as("gamma") + resp = test_client.get(f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}") + assert resp.status_code == 404 + + +def test_delete(test_client, login_as_admin, dashboard_id: int): + resp = test_client.delete(f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}") + assert resp.status_code == 200 + + +def test_delete_access_denied(test_client, login_as, dashboard_id: int): + login_as("gamma") + resp = test_client.delete(f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}") + assert resp.status_code == 404 + + +def test_delete_not_owner(test_client, login_as, dashboard_id: int): + login_as("gamma") + resp = test_client.delete(f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}") + assert resp.status_code == 404 diff --git a/tests/integration_tests/dashboards/permalink/__init__.py b/tests/integration_tests/dashboards/permalink/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/dashboards/permalink/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/dashboards/permalink/api_tests.py b/tests/integration_tests/dashboards/permalink/api_tests.py new file mode 100644 index 0000000000000..40a312ef855a1 --- /dev/null +++ b/tests/integration_tests/dashboards/permalink/api_tests.py @@ -0,0 +1,114 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from typing import Iterator +from unittest.mock import patch +from uuid import uuid3 + +import pytest +from flask_appbuilder.security.sqla.models import User +from sqlalchemy.orm import Session + +from superset import db +from superset.dashboards.commands.exceptions import DashboardAccessDeniedError +from superset.key_value.models import KeyValueEntry +from superset.key_value.types import KeyValueResource +from superset.key_value.utils import decode_permalink_id +from superset.models.dashboard import Dashboard +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) +from tests.integration_tests.test_app import app + +STATE = { + "dataMask": {"FILTER_1": "foo"}, + "activeTabs": ["my-anchor"], +} + + +@pytest.fixture +def dashboard_id(load_world_bank_dashboard_with_slices) -> int: + with app.app_context() as ctx: + session: Session = ctx.app.appbuilder.get_session + dashboard = session.query(Dashboard).filter_by(slug="world_health").one() + return dashboard.id + + +@pytest.fixture +def permalink_salt() -> Iterator[str]: + from superset.key_value.shared_entries import get_permalink_salt, get_uuid_namespace + from superset.key_value.types import SharedKey + + key = SharedKey.DASHBOARD_PERMALINK_SALT + salt = get_permalink_salt(key) + yield salt + namespace = get_uuid_namespace(salt) + db.session.query(KeyValueEntry).filter_by( + resource=KeyValueResource.APP, + uuid=uuid3(namespace, key), + ) + db.session.commit() + + +def test_post( + test_client, login_as_admin, dashboard_id: int, permalink_salt: str +) -> None: + resp = test_client.post(f"api/v1/dashboard/{dashboard_id}/permalink", json=STATE) + assert resp.status_code == 201 + data = resp.json + key = data["key"] + url = data["url"] + assert key in url + id_ = decode_permalink_id(key, permalink_salt) + + assert ( + data + == test_client.post( + f"api/v1/dashboard/{dashboard_id}/permalink", json=STATE + ).json + ), "Should always return the same permalink key for the same payload" + + db.session.query(KeyValueEntry).filter_by(id=id_).delete() + db.session.commit() + + +def test_post_access_denied(test_client, login_as, dashboard_id: int): + login_as("gamma") + resp = test_client.post(f"api/v1/dashboard/{dashboard_id}/permalink", json=STATE) + assert resp.status_code == 404 + + +def test_post_invalid_schema(test_client, login_as_admin, dashboard_id: int): + resp = test_client.post( + f"api/v1/dashboard/{dashboard_id}/permalink", json={"foo": "bar"} + ) + assert resp.status_code == 400 + + +def test_get(test_client, login_as_admin, dashboard_id: int, permalink_salt: str): + key = test_client.post( + f"api/v1/dashboard/{dashboard_id}/permalink", json=STATE + ).json["key"] + resp = test_client.get(f"api/v1/dashboard/permalink/{key}") + assert resp.status_code == 200 + result = resp.json + assert result["dashboardId"] == str(dashboard_id) + assert result["state"] == STATE + id_ = decode_permalink_id(key, permalink_salt) + db.session.query(KeyValueEntry).filter_by(id=id_).delete() + db.session.commit() diff --git a/tests/integration_tests/dashboards/security/__init__.py b/tests/integration_tests/dashboards/security/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/dashboards/security/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/dashboards/security/base_case.py b/tests/integration_tests/dashboards/security/base_case.py new file mode 100644 index 0000000000000..bbb5fad831166 --- /dev/null +++ b/tests/integration_tests/dashboards/security/base_case.py @@ -0,0 +1,54 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import List, Optional + +import pytest +from flask import escape, Response + +from superset.models.dashboard import Dashboard +from tests.integration_tests.dashboards.base_case import DashboardTestCase + + +class BaseTestDashboardSecurity(DashboardTestCase): + def tearDown(self) -> None: + self.clean_created_objects() + + def assert_dashboard_api_response( + self, response: Response, dashboard_to_access: Dashboard + ) -> None: + self.assert200(response) + assert response.json["id"] == dashboard_to_access.id + + def assert_dashboards_api_response( + self, + response: Response, + expected_counts: int, + expected_dashboards: Optional[List[Dashboard]] = None, + not_expected_dashboards: Optional[List[Dashboard]] = None, + ) -> None: + self.assert200(response) + response_data = response.json + assert response_data["count"] == expected_counts + response_dashboards_url = set( + map(lambda dash: dash["url"], response_data["result"]) + ) + expected_dashboards = expected_dashboards or [] + for dashboard in expected_dashboards: + assert dashboard.url in response_dashboards_url + not_expected_dashboards = not_expected_dashboards or [] + for dashboard in not_expected_dashboards: + assert dashboard.url not in response_dashboards_url diff --git a/tests/integration_tests/dashboards/security/security_dataset_tests.py b/tests/integration_tests/dashboards/security/security_dataset_tests.py new file mode 100644 index 0000000000000..2eafc4b53e0cd --- /dev/null +++ b/tests/integration_tests/dashboards/security/security_dataset_tests.py @@ -0,0 +1,236 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Unit tests for Superset""" +import json + +import prison +import pytest +from flask import escape + +from superset import app +from superset.models import core as models +from tests.integration_tests.dashboards.base_case import DashboardTestCase +from tests.integration_tests.dashboards.consts import * +from tests.integration_tests.dashboards.dashboard_test_utils import * +from tests.integration_tests.dashboards.superset_factory_util import * +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_data, + load_energy_table_with_slice, +) + + +class TestDashboardDatasetSecurity(DashboardTestCase): + @pytest.fixture + def load_dashboard(self): + with app.app_context(): + table = ( + db.session.query(SqlaTable).filter_by(table_name="energy_usage").one() + ) + # get a slice from the allowed table + slice = db.session.query(Slice).filter_by(slice_name="Energy Sankey").one() + + self.grant_public_access_to_table(table) + + pytest.hidden_dash_slug = f"hidden_dash_{random_slug()}" + pytest.published_dash_slug = f"published_dash_{random_slug()}" + + # Create a published and hidden dashboard and add them to the database + published_dash = Dashboard() + published_dash.dashboard_title = "Published Dashboard" + published_dash.slug = pytest.published_dash_slug + published_dash.slices = [slice] + published_dash.published = True + + hidden_dash = Dashboard() + hidden_dash.dashboard_title = "Hidden Dashboard" + hidden_dash.slug = pytest.hidden_dash_slug + hidden_dash.slices = [slice] + hidden_dash.published = False + + db.session.merge(published_dash) + db.session.merge(hidden_dash) + yield db.session.commit() + + self.revoke_public_access_to_table(table) + db.session.delete(published_dash) + db.session.delete(hidden_dash) + db.session.commit() + + def test_dashboard_access__admin_can_access_all(self): + # arrange + self.login(username=ADMIN_USERNAME) + dashboard_title_by_url = { + dash.url: dash.dashboard_title for dash in get_all_dashboards() + } + + # act + responses_by_url = { + url: self.client.get(url) for url in dashboard_title_by_url.keys() + } + + # assert + for dashboard_url, get_dashboard_response in responses_by_url.items(): + self.assert200(get_dashboard_response) + + def test_get_dashboards__users_are_dashboards_owners(self): + # arrange + username = "gamma" + user = security_manager.find_user(username) + my_owned_dashboard = create_dashboard_to_db( + dashboard_title="My Dashboard", + published=False, + owners=[user], + ) + + not_my_owned_dashboard = create_dashboard_to_db( + dashboard_title="Not My Dashboard", + published=False, + ) + + self.login(user.username) + + # act + get_dashboards_response = self.get_resp(DASHBOARDS_API_URL) + + # assert + self.assertIn(my_owned_dashboard.url, get_dashboards_response) + self.assertNotIn(not_my_owned_dashboard.url, get_dashboards_response) + + def test_get_dashboards__owners_can_view_empty_dashboard(self): + # arrange + dash = create_dashboard_to_db("Empty Dashboard", slug="empty_dashboard") + dashboard_url = dash.url + gamma_user = security_manager.find_user("gamma") + self.login(gamma_user.username) + + # act + get_dashboards_response = self.get_resp(DASHBOARDS_API_URL) + + # assert + self.assertNotIn(dashboard_url, get_dashboards_response) + + def test_get_dashboards__users_can_view_favorites_dashboards(self): + # arrange + user = security_manager.find_user("gamma") + fav_dash_slug = f"my_favorite_dash_{random_slug()}" + regular_dash_slug = f"regular_dash_{random_slug()}" + + favorite_dash = Dashboard() + favorite_dash.dashboard_title = "My Favorite Dashboard" + favorite_dash.slug = fav_dash_slug + + regular_dash = Dashboard() + regular_dash.dashboard_title = "A Plain Ol Dashboard" + regular_dash.slug = regular_dash_slug + + db.session.add(favorite_dash) + db.session.add(regular_dash) + db.session.commit() + + dash = db.session.query(Dashboard).filter_by(slug=fav_dash_slug).first() + + favorites = models.FavStar() + favorites.obj_id = dash.id + favorites.class_name = "Dashboard" + favorites.user_id = user.id + + db.session.add(favorites) + db.session.commit() + + self.login(user.username) + + # act + get_dashboards_response = self.get_resp(DASHBOARDS_API_URL) + + # cleanup + db.session.delete(favorites) + db.session.delete(favorite_dash) + db.session.delete(regular_dash) + db.session.commit() + + # assert + self.assertIn(f"/superset/dashboard/{fav_dash_slug}/", get_dashboards_response) + + def test_get_dashboards__user_can_not_view_unpublished_dash(self): + # arrange + admin_user = security_manager.find_user(ADMIN_USERNAME) + gamma_user = security_manager.find_user(GAMMA_USERNAME) + admin_and_draft_dashboard = create_dashboard_to_db( + dashboard_title="admin_owned_unpublished_dash", owners=[admin_user] + ) + + self.login(gamma_user.username) + + # act - list dashboards as a gamma user + get_dashboards_response_as_gamma = self.get_resp(DASHBOARDS_API_URL) + + # assert + self.assertNotIn( + admin_and_draft_dashboard.url, get_dashboards_response_as_gamma + ) + + @pytest.mark.usefixtures("load_energy_table_with_slice", "load_dashboard") + def test_get_dashboards__users_can_view_permitted_dashboard(self): + # arrange + username = random_str() + new_role = f"role_{random_str()}" + self.create_user_with_roles(username, [new_role], should_create_roles=True) + accessed_table = get_sql_table_by_name("energy_usage") + self.grant_role_access_to_table(accessed_table, new_role) + # get a slice from the allowed table + slice_to_add_to_dashboards = get_slice_by_name("Energy Sankey") + # Create a published and hidden dashboard and add them to the database + first_dash = create_dashboard_to_db( + dashboard_title="Published Dashboard", + published=True, + slices=[slice_to_add_to_dashboards], + ) + + second_dash = create_dashboard_to_db( + dashboard_title="Hidden Dashboard", + published=True, + slices=[slice_to_add_to_dashboards], + ) + + try: + self.login(username) + # act + get_dashboards_response = self.get_resp(DASHBOARDS_API_URL) + + # assert + self.assertIn(second_dash.url, get_dashboards_response) + self.assertIn(first_dash.url, get_dashboards_response) + finally: + self.revoke_public_access_to_table(accessed_table) + + def test_get_dashboards_api_no_data_access(self): + """ + Dashboard API: Test get dashboards no data access + """ + admin = self.get_user("admin") + title = f"title{random_str()}" + create_dashboard_to_db(title, "slug1", owners=[admin]) + + self.login(username="gamma") + arguments = { + "filters": [{"col": "dashboard_title", "opr": "sw", "value": title[0:8]}] + } + uri = DASHBOARDS_API_URL_WITH_QUERY_FORMAT.format(prison.dumps(arguments)) + rv = self.client.get(uri) + self.assert200(rv) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(0, data["count"]) diff --git a/tests/integration_tests/dashboards/security/security_rbac_tests.py b/tests/integration_tests/dashboards/security/security_rbac_tests.py new file mode 100644 index 0000000000000..d425c0e71118f --- /dev/null +++ b/tests/integration_tests/dashboards/security/security_rbac_tests.py @@ -0,0 +1,352 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Unit tests for Superset""" +from unittest import mock + +import pytest + +from superset.utils.core import backend +from tests.integration_tests.dashboards.dashboard_test_utils import * +from tests.integration_tests.dashboards.security.base_case import ( + BaseTestDashboardSecurity, +) +from tests.integration_tests.dashboards.superset_factory_util import ( + create_dashboard_to_db, + create_database_to_db, + create_datasource_table_to_db, + create_slice_to_db, +) +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.public_role import public_role_like_gamma +from tests.integration_tests.fixtures.query_context import get_query_context + +CHART_DATA_URI = "api/v1/chart/data" + + +@mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + DASHBOARD_RBAC=True, +) +class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity): + def test_get_dashboard_view__admin_can_access(self): + # arrange + dashboard_to_access = create_dashboard_to_db( + owners=[], slices=[create_slice_to_db()], published=False + ) + self.login("admin") + + # act + response = self.get_dashboard_view_response(dashboard_to_access) + + # assert + self.assert200(response) + + def test_get_dashboard_view__owner_can_access(self): + # arrange + username = random_str() + new_role = f"role_{random_str()}" + owner = self.create_user_with_roles( + username, [new_role], should_create_roles=True + ) + dashboard_to_access = create_dashboard_to_db( + owners=[owner], slices=[create_slice_to_db()], published=False + ) + self.login(username) + + # act + response = self.get_dashboard_view_response(dashboard_to_access) + + # assert + self.assert200(response) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_dashboard_view__user_can_not_access_without_permission(self): + username = random_str() + new_role = f"role_{random_str()}" + self.create_user_with_roles(username, [new_role], should_create_roles=True) + slice = ( + db.session.query(Slice) + .filter_by(slice_name="Girl Name Cloud") + .one_or_none() + ) + dashboard_to_access = create_dashboard_to_db(published=True, slices=[slice]) + self.login(username) + + # act + response = self.get_dashboard_view_response(dashboard_to_access) + + request_payload = get_query_context("birth_names") + rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data") + self.assertEqual(rv.status_code, 403) + + # assert + self.assert403(response) + + def test_get_dashboard_view__user_with_dashboard_permission_can_not_access_draft( + self, + ): + # arrange + dashboard_to_access = create_dashboard_to_db(published=False) + username = random_str() + new_role = f"role_{random_str()}" + self.create_user_with_roles(username, [new_role], should_create_roles=True) + grant_access_to_dashboard(dashboard_to_access, new_role) + self.login(username) + + # act + response = self.get_dashboard_view_response(dashboard_to_access) + + # assert + self.assert403(response) + + # post + revoke_access_to_dashboard(dashboard_to_access, new_role) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_dashboard_view__user_access_with_dashboard_permission(self): + if backend() == "hive": + return + + # arrange + + username = random_str() + new_role = f"role_{random_str()}" + self.create_user_with_roles(username, [new_role], should_create_roles=True) + + slice = ( + db.session.query(Slice) + .filter_by(slice_name="Girl Name Cloud") + .one_or_none() + ) + dashboard_to_access = create_dashboard_to_db(published=True, slices=[slice]) + self.login(username) + grant_access_to_dashboard(dashboard_to_access, new_role) + + # act + response = self.get_dashboard_view_response(dashboard_to_access) + + # assert + self.assert200(response) + + request_payload = get_query_context("birth_names") + rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data") + self.assertEqual(rv.status_code, 200) + + # post + revoke_access_to_dashboard(dashboard_to_access, new_role) + + @pytest.mark.usefixtures("public_role_like_gamma") + def test_get_dashboard_view__public_user_can_not_access_without_permission(self): + dashboard_to_access = create_dashboard_to_db(published=True) + self.logout() + + # act + response = self.get_dashboard_view_response(dashboard_to_access) + + # assert + self.assert403(response) + + @pytest.mark.usefixtures("public_role_like_gamma") + def test_get_dashboard_view__public_user_with_dashboard_permission_can_not_access_draft( + self, + ): + # arrange + dashboard_to_access = create_dashboard_to_db(published=False) + grant_access_to_dashboard(dashboard_to_access, "Public") + self.logout() + # act + response = self.get_dashboard_view_response(dashboard_to_access) + + # assert + self.assert403(response) + + # post + revoke_access_to_dashboard(dashboard_to_access, "Public") + + @pytest.mark.usefixtures("public_role_like_gamma") + def test_get_dashboard_view__public_user_access_with_dashboard_permission(self): + # arrange + dashboard_to_access = create_dashboard_to_db( + published=True, slices=[create_slice_to_db()] + ) + grant_access_to_dashboard(dashboard_to_access, "Public") + + self.logout() + + # act + response = self.get_dashboard_view_response(dashboard_to_access) + + # assert + self.assert200(response) + + # post + revoke_access_to_dashboard(dashboard_to_access, "Public") + + def _create_sample_dashboards_with_owner_access(self): + username = random_str() + new_role = f"role_{random_str()}" + owner = self.create_user_with_roles( + username, [new_role], should_create_roles=True + ) + database = create_database_to_db() + table = create_datasource_table_to_db(db_id=database.id, owners=[owner]) + first_dash = create_dashboard_to_db( + owners=[owner], slices=[create_slice_to_db(datasource_id=table.id)] + ) + second_dash = create_dashboard_to_db( + owners=[owner], slices=[create_slice_to_db(datasource_id=table.id)] + ) + owned_dashboards = [first_dash, second_dash] + not_owned_dashboards = [ + create_dashboard_to_db( + slices=[create_slice_to_db(datasource_id=table.id)], published=True + ) + ] + self.login(username) + return not_owned_dashboards, owned_dashboards + + def _create_sample_only_published_dashboard_with_roles(self): + username = random_str() + new_role = f"role_{random_str()}" + self.create_user_with_roles(username, [new_role], should_create_roles=True) + published_dashboards = [ + create_dashboard_to_db(published=True), + create_dashboard_to_db(published=True), + ] + draft_dashboards = [ + create_dashboard_to_db(published=False), + create_dashboard_to_db(published=False), + ] + for dash in published_dashboards + draft_dashboards: + grant_access_to_dashboard(dash, new_role) + self.login(username) + return new_role, draft_dashboards, published_dashboards + + def test_get_dashboards_api__admin_get_all_dashboards(self): + # arrange + create_dashboard_to_db( + owners=[], slices=[create_slice_to_db()], published=False + ) + dashboard_counts = count_dashboards() + + self.login("admin") + + # act + response = self.get_dashboards_api_response() + + # assert + self.assert_dashboards_api_response(response, dashboard_counts) + + def test_get_dashboards_api__owner_get_all_owned_dashboards(self): + # arrange + ( + not_owned_dashboards, + owned_dashboards, + ) = self._create_sample_dashboards_with_owner_access() + + # act + response = self.get_dashboards_api_response() + + # assert + self.assert_dashboards_api_response( + response, 2, owned_dashboards, not_owned_dashboards + ) + + def test_get_dashboards_api__user_without_any_permissions_get_empty_list(self): + username = random_str() + new_role = f"role_{random_str()}" + self.create_user_with_roles(username, [new_role], should_create_roles=True) + create_dashboard_to_db(published=True) + self.login(username) + + # act + response = self.get_dashboards_api_response() + + # assert + self.assert_dashboards_api_response(response, 0) + + def test_get_dashboards_api__user_get_only_published_permitted_dashboards(self): + ( + new_role, + draft_dashboards, + published_dashboards, + ) = self._create_sample_only_published_dashboard_with_roles() + + # act + response = self.get_dashboards_api_response() + + # assert + self.assert_dashboards_api_response( + response, + len(published_dashboards), + published_dashboards, + draft_dashboards, + ) + + # post + for dash in published_dashboards + draft_dashboards: + revoke_access_to_dashboard(dash, new_role) + + @pytest.mark.usefixtures("public_role_like_gamma") + def test_get_dashboards_api__public_user_without_any_permissions_get_empty_list( + self, + ): + create_dashboard_to_db(published=True) + self.logout() + + # act + response = self.get_dashboards_api_response() + + # assert + self.assert_dashboards_api_response(response, 0) + + @pytest.mark.usefixtures("public_role_like_gamma") + def test_get_dashboards_api__public_user_get_only_published_permitted_dashboards( + self, + ): + # arrange + published_dashboards = [ + create_dashboard_to_db(published=True), + create_dashboard_to_db(published=True), + ] + draft_dashboards = [ + create_dashboard_to_db(published=False), + create_dashboard_to_db(published=False), + ] + + for dash in published_dashboards + draft_dashboards: + grant_access_to_dashboard(dash, "Public") + + self.logout() + + # act + response = self.get_dashboards_api_response() + + # assert + self.assert_dashboards_api_response( + response, + len(published_dashboards), + published_dashboards, + draft_dashboards, + ) + + # post + for dash in published_dashboards + draft_dashboards: + revoke_access_to_dashboard(dash, "Public") diff --git a/tests/integration_tests/dashboards/superset_factory_util.py b/tests/integration_tests/dashboards/superset_factory_util.py new file mode 100644 index 0000000000000..b160a56a33fbf --- /dev/null +++ b/tests/integration_tests/dashboards/superset_factory_util.py @@ -0,0 +1,333 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import List, Optional + +from flask_appbuilder import Model +from flask_appbuilder.security.sqla.models import User + +from superset import db +from superset.connectors.sqla.models import SqlaTable, sqlatable_user +from superset.models.core import Database +from superset.models.dashboard import ( + Dashboard, + dashboard_slices, + dashboard_user, + DashboardRoles, +) +from superset.models.slice import Slice, slice_user +from tests.integration_tests.dashboards.dashboard_test_utils import ( + random_slug, + random_str, + random_title, +) + +logger = logging.getLogger(__name__) + +session = db.session + +inserted_dashboards_ids = [] +inserted_databases_ids = [] +inserted_sqltables_ids = [] +inserted_slices_ids = [] + + +def create_dashboard_to_db( + dashboard_title: Optional[str] = None, + slug: Optional[str] = None, + published: bool = False, + owners: Optional[List[User]] = None, + slices: Optional[List[Slice]] = None, + css: str = "", + json_metadata: str = "", + position_json: str = "", +) -> Dashboard: + dashboard = create_dashboard( + dashboard_title, + slug, + published, + owners, + slices, + css, + json_metadata, + position_json, + ) + + insert_model(dashboard) + inserted_dashboards_ids.append(dashboard.id) + return dashboard + + +def create_dashboard( + dashboard_title: Optional[str] = None, + slug: Optional[str] = None, + published: bool = False, + owners: Optional[List[User]] = None, + slices: Optional[List[Slice]] = None, + css: str = "", + json_metadata: str = "", + position_json: str = "", +) -> Dashboard: + dashboard_title = dashboard_title if dashboard_title is not None else random_title() + slug = slug if slug is not None else random_slug() + owners = owners if owners is not None else [] + slices = slices if slices is not None else [] + return Dashboard( + dashboard_title=dashboard_title, + slug=slug, + published=published, + owners=owners, + css=css, + position_json=position_json, + json_metadata=json_metadata, + slices=slices, + ) + + +def insert_model(dashboard: Model) -> None: + session.add(dashboard) + session.commit() + session.refresh(dashboard) + + +def create_slice_to_db( + name: Optional[str] = None, + datasource_id: Optional[int] = None, + owners: Optional[List[User]] = None, +) -> Slice: + slice_ = create_slice(datasource_id, name=name, owners=owners) + insert_model(slice_) + inserted_slices_ids.append(slice_.id) + return slice_ + + +def create_slice( + datasource_id: Optional[int] = None, + datasource: Optional[SqlaTable] = None, + name: Optional[str] = None, + owners: Optional[List[User]] = None, +) -> Slice: + name = name if name is not None else random_str() + owners = owners if owners is not None else [] + datasource_type = "table" + if datasource: + return Slice( + slice_name=name, + table=datasource, + owners=owners, + datasource_type=datasource_type, + ) + + datasource_id = ( + datasource_id + if datasource_id is not None + else create_datasource_table_to_db(name=name + "_table").id + ) + + return Slice( + slice_name=name, + datasource_id=datasource_id, + owners=owners, + datasource_type=datasource_type, + ) + + +def create_datasource_table_to_db( + name: Optional[str] = None, + db_id: Optional[int] = None, + owners: Optional[List[User]] = None, +) -> SqlaTable: + sqltable = create_datasource_table(name, db_id, owners=owners) + insert_model(sqltable) + inserted_sqltables_ids.append(sqltable.id) + return sqltable + + +def create_datasource_table( + name: Optional[str] = None, + db_id: Optional[int] = None, + database: Optional[Database] = None, + owners: Optional[List[User]] = None, +) -> SqlaTable: + name = name if name is not None else random_str() + owners = owners if owners is not None else [] + if database: + return SqlaTable(table_name=name, database=database, owners=owners) + db_id = db_id if db_id is not None else create_database_to_db(name=name + "_db").id + return SqlaTable(table_name=name, database_id=db_id, owners=owners) + + +def create_database_to_db(name: Optional[str] = None) -> Database: + database = create_database(name) + insert_model(database) + inserted_databases_ids.append(database.id) + return database + + +def create_database(name: Optional[str] = None) -> Database: + name = name if name is not None else random_str() + return Database(database_name=name, sqlalchemy_uri="sqlite:///:memory:") + + +def delete_all_inserted_objects() -> None: + delete_all_inserted_dashboards() + delete_all_inserted_slices() + delete_all_inserted_tables() + delete_all_inserted_dbs() + + +def delete_all_inserted_dashboards(): + try: + dashboards_to_delete: List[Dashboard] = ( + session.query(Dashboard) + .filter(Dashboard.id.in_(inserted_dashboards_ids)) + .all() + ) + for dashboard in dashboards_to_delete: + try: + delete_dashboard(dashboard, False) + except Exception as ex: + logger.error(f"failed to delete {dashboard.id}", exc_info=True) + raise ex + if len(inserted_dashboards_ids) > 0: + session.commit() + inserted_dashboards_ids.clear() + except Exception as ex2: + logger.error("delete_all_inserted_dashboards failed", exc_info=True) + raise ex2 + + +def delete_dashboard(dashboard: Dashboard, do_commit: bool = False) -> None: + logger.info(f"deleting dashboard{dashboard.id}") + delete_dashboard_roles_associations(dashboard) + delete_dashboard_users_associations(dashboard) + delete_dashboard_slices_associations(dashboard) + session.delete(dashboard) + if do_commit: + session.commit() + + +def delete_dashboard_users_associations(dashboard: Dashboard) -> None: + session.execute( + dashboard_user.delete().where(dashboard_user.c.dashboard_id == dashboard.id) + ) + + +def delete_dashboard_roles_associations(dashboard: Dashboard) -> None: + session.execute( + DashboardRoles.delete().where(DashboardRoles.c.dashboard_id == dashboard.id) + ) + + +def delete_dashboard_slices_associations(dashboard: Dashboard) -> None: + session.execute( + dashboard_slices.delete().where(dashboard_slices.c.dashboard_id == dashboard.id) + ) + + +def delete_all_inserted_slices(): + try: + slices_to_delete: List[Slice] = ( + session.query(Slice).filter(Slice.id.in_(inserted_slices_ids)).all() + ) + for slice in slices_to_delete: + try: + delete_slice(slice, False) + except Exception as ex: + logger.error(f"failed to delete {slice.id}", exc_info=True) + raise ex + if len(inserted_slices_ids) > 0: + session.commit() + inserted_slices_ids.clear() + except Exception as ex2: + logger.error("delete_all_inserted_slices failed", exc_info=True) + raise ex2 + + +def delete_slice(slice_: Slice, do_commit: bool = False) -> None: + logger.info(f"deleting slice{slice_.id}") + delete_slice_users_associations(slice_) + session.delete(slice_) + if do_commit: + session.commit() + + +def delete_slice_users_associations(slice_: Slice) -> None: + session.execute(slice_user.delete().where(slice_user.c.slice_id == slice_.id)) + + +def delete_all_inserted_tables(): + try: + tables_to_delete: List[SqlaTable] = ( + session.query(SqlaTable) + .filter(SqlaTable.id.in_(inserted_sqltables_ids)) + .all() + ) + for table in tables_to_delete: + try: + delete_sqltable(table, False) + except Exception as ex: + logger.error(f"failed to delete {table.id}", exc_info=True) + raise ex + if len(inserted_sqltables_ids) > 0: + session.commit() + inserted_sqltables_ids.clear() + except Exception as ex2: + logger.error("delete_all_inserted_tables failed", exc_info=True) + raise ex2 + + +def delete_sqltable(table: SqlaTable, do_commit: bool = False) -> None: + logger.info(f"deleting table{table.id}") + delete_table_users_associations(table) + session.delete(table) + if do_commit: + session.commit() + + +def delete_table_users_associations(table: SqlaTable) -> None: + session.execute( + sqlatable_user.delete().where(sqlatable_user.c.table_id == table.id) + ) + + +def delete_all_inserted_dbs(): + try: + dbs_to_delete: List[Database] = ( + session.query(Database) + .filter(Database.id.in_(inserted_databases_ids)) + .all() + ) + for db in dbs_to_delete: + try: + delete_database(db, False) + except Exception as ex: + logger.error(f"failed to delete {db.id}", exc_info=True) + raise ex + if len(inserted_databases_ids) > 0: + session.commit() + inserted_databases_ids.clear() + except Exception as ex2: + logger.error("delete_all_inserted_databases failed", exc_info=True) + raise ex2 + + +def delete_database(database: Database, do_commit: bool = False) -> None: + logger.info(f"deleting database{database.id}") + session.delete(database) + if do_commit: + session.commit() diff --git a/tests/integration_tests/databases/__init__.py b/tests/integration_tests/databases/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/databases/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py new file mode 100644 index 0000000000000..f4968edae9e7f --- /dev/null +++ b/tests/integration_tests/databases/api_tests.py @@ -0,0 +1,3173 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import dataclasses +import json +from collections import defaultdict +from io import BytesIO +from unittest import mock +from unittest.mock import patch, MagicMock +from zipfile import is_zipfile, ZipFile +from operator import itemgetter + +import prison +import pytest +import yaml + +from sqlalchemy.engine.url import make_url +from sqlalchemy.exc import DBAPIError +from sqlalchemy.sql import func + +from superset import db, security_manager +from superset.connectors.sqla.models import SqlaTable +from superset.databases.ssh_tunnel.models import SSHTunnel +from superset.databases.utils import make_url_safe +from superset.db_engine_specs.mysql import MySQLEngineSpec +from superset.db_engine_specs.postgres import PostgresEngineSpec +from superset.db_engine_specs.redshift import RedshiftEngineSpec +from superset.db_engine_specs.bigquery import BigQueryEngineSpec +from superset.db_engine_specs.gsheets import GSheetsEngineSpec +from superset.db_engine_specs.hana import HanaEngineSpec +from superset.errors import SupersetError +from superset.models.core import Database, ConfigurationMethod +from superset.reports.models import ReportSchedule, ReportScheduleType +from superset.utils.database import get_example_database, get_main_database +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.certificates import ssl_certificate +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_with_slice, + load_energy_table_data, +) +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) +from tests.integration_tests.fixtures.importexport import ( + database_config, + dataset_config, + database_metadata_config, + dataset_metadata_config, +) +from tests.integration_tests.fixtures.unicode_dashboard import ( + load_unicode_dashboard_with_position, + load_unicode_data, +) +from tests.integration_tests.test_app import app + + +SQL_VALIDATORS_BY_ENGINE = { + "presto": "PrestoDBSQLValidator", + "postgresql": "PostgreSQLValidator", +} + +PRESTO_SQL_VALIDATORS_BY_ENGINE = { + "presto": "PrestoDBSQLValidator", + "sqlite": "PrestoDBSQLValidator", + "postgresql": "PrestoDBSQLValidator", + "mysql": "PrestoDBSQLValidator", +} + + +class TestDatabaseApi(SupersetTestCase): + def insert_database( + self, + database_name: str, + sqlalchemy_uri: str, + extra: str = "", + encrypted_extra: str = "", + server_cert: str = "", + expose_in_sqllab: bool = False, + allow_file_upload: bool = False, + ) -> Database: + database = Database( + database_name=database_name, + sqlalchemy_uri=sqlalchemy_uri, + extra=extra, + encrypted_extra=encrypted_extra, + server_cert=server_cert, + expose_in_sqllab=expose_in_sqllab, + allow_file_upload=allow_file_upload, + ) + db.session.add(database) + db.session.commit() + return database + + @pytest.fixture() + def create_database_with_report(self): + with self.create_app().app_context(): + example_db = get_example_database() + database = self.insert_database( + "database_with_report", + example_db.sqlalchemy_uri_decrypted, + expose_in_sqllab=True, + ) + report_schedule = ReportSchedule( + type=ReportScheduleType.ALERT, + name="report_with_database", + crontab="* * * * *", + database=database, + ) + db.session.add(report_schedule) + db.session.commit() + yield database + + # rollback changes + db.session.delete(report_schedule) + db.session.delete(database) + db.session.commit() + + @pytest.fixture() + def create_database_with_dataset(self): + with self.create_app().app_context(): + example_db = get_example_database() + self._database = self.insert_database( + "database_with_dataset", + example_db.sqlalchemy_uri_decrypted, + expose_in_sqllab=True, + ) + table = SqlaTable( + schema="main", table_name="ab_permission", database=self._database + ) + db.session.add(table) + db.session.commit() + yield self._database + + # rollback changes + db.session.delete(table) + db.session.delete(self._database) + db.session.commit() + self._database = None + + def create_database_import(self): + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("database_export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(database_metadata_config).encode()) + with bundle.open( + "database_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(database_config).encode()) + with bundle.open( + "database_export/datasets/imported_dataset.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(dataset_config).encode()) + buf.seek(0) + return buf + + def test_get_items(self): + """ + Database API: Test get items + """ + self.login(username="admin") + uri = "api/v1/database/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + expected_columns = [ + "allow_ctas", + "allow_cvas", + "allow_dml", + "allow_file_upload", + "allow_run_async", + "allows_cost_estimate", + "allows_subquery", + "allows_virtual_table_explore", + "backend", + "changed_on", + "changed_on_delta_humanized", + "created_by", + "database_name", + "disable_data_preview", + "engine_information", + "explore_database_id", + "expose_in_sqllab", + "extra", + "force_ctas_schema", + "id", + "uuid", + ] + + self.assertGreater(response["count"], 0) + self.assertEqual(list(response["result"][0].keys()), expected_columns) + + def test_get_items_filter(self): + """ + Database API: Test get items with filter + """ + example_db = get_example_database() + test_database = self.insert_database( + "test-database", example_db.sqlalchemy_uri_decrypted, expose_in_sqllab=True + ) + dbs = db.session.query(Database).filter_by(expose_in_sqllab=True).all() + + self.login(username="admin") + arguments = { + "keys": ["none"], + "filters": [{"col": "expose_in_sqllab", "opr": "eq", "value": True}], + "order_columns": "database_name", + "order_direction": "asc", + "page": 0, + "page_size": -1, + } + uri = f"api/v1/database/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 200) + self.assertEqual(response["count"], len(dbs)) + + # Cleanup + db.session.delete(test_database) + db.session.commit() + + def test_get_items_not_allowed(self): + """ + Database API: Test get items not allowed + """ + self.login(username="gamma") + uri = "api/v1/database/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["count"], 0) + + def test_create_database(self): + """ + Database API: Test create + """ + extra = { + "metadata_params": {}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": [], + } + + self.login(username="admin") + example_db = get_example_database() + if example_db.backend == "sqlite": + return + database_data = { + "database_name": "test-create-database", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + "server_cert": None, + "extra": json.dumps(extra), + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 201) + # Cleanup + model = db.session.query(Database).get(response.get("id")) + assert model.configuration_method == ConfigurationMethod.SQLALCHEMY_FORM + db.session.delete(model) + db.session.commit() + + @mock.patch( + "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + ) + @mock.patch("superset.databases.commands.create.is_feature_enabled") + @mock.patch( + "superset.models.core.Database.get_all_schema_names", + ) + def test_create_database_with_ssh_tunnel( + self, + mock_test_connection_database_command_run, + mock_create_is_feature_enabled, + mock_get_all_schema_names, + ): + """ + Database API: Test create with SSH Tunnel + """ + mock_create_is_feature_enabled.return_value = True + self.login(username="admin") + example_db = get_example_database() + if example_db.backend == "sqlite": + return + ssh_tunnel_properties = { + "server_address": "123.132.123.1", + "server_port": 8080, + "username": "foo", + "password": "bar", + } + database_data = { + "database_name": "test-db-with-ssh-tunnel", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "ssh_tunnel": ssh_tunnel_properties, + } + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 201) + model_ssh_tunnel = ( + db.session.query(SSHTunnel) + .filter(SSHTunnel.database_id == response.get("id")) + .one() + ) + self.assertEqual(response.get("result")["ssh_tunnel"]["password"], "XXXXXXXXXX") + self.assertEqual(model_ssh_tunnel.database_id, response.get("id")) + # Cleanup + model = db.session.query(Database).get(response.get("id")) + db.session.delete(model) + db.session.commit() + + @mock.patch( + "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + ) + @mock.patch("superset.databases.commands.create.is_feature_enabled") + @mock.patch("superset.databases.commands.update.is_feature_enabled") + @mock.patch( + "superset.models.core.Database.get_all_schema_names", + ) + def test_update_database_with_ssh_tunnel( + self, + mock_test_connection_database_command_run, + mock_create_is_feature_enabled, + mock_update_is_feature_enabled, + mock_get_all_schema_names, + ): + """ + Database API: Test update Database with SSH Tunnel + """ + mock_create_is_feature_enabled.return_value = True + mock_update_is_feature_enabled.return_value = True + self.login(username="admin") + example_db = get_example_database() + if example_db.backend == "sqlite": + return + ssh_tunnel_properties = { + "server_address": "123.132.123.1", + "server_port": 8080, + "username": "foo", + "password": "bar", + } + database_data = { + "database_name": "test-db-with-ssh-tunnel", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + } + database_data_with_ssh_tunnel = { + "database_name": "test-db-with-ssh-tunnel", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "ssh_tunnel": ssh_tunnel_properties, + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 201) + + uri = "api/v1/database/{}".format(response.get("id")) + rv = self.client.put(uri, json=database_data_with_ssh_tunnel) + response_update = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 200) + + model_ssh_tunnel = ( + db.session.query(SSHTunnel) + .filter(SSHTunnel.database_id == response_update.get("id")) + .one() + ) + self.assertEqual(model_ssh_tunnel.database_id, response_update.get("id")) + # Cleanup + model = db.session.query(Database).get(response.get("id")) + db.session.delete(model) + db.session.commit() + + @mock.patch( + "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + ) + @mock.patch("superset.databases.commands.create.is_feature_enabled") + @mock.patch("superset.databases.commands.update.is_feature_enabled") + @mock.patch( + "superset.models.core.Database.get_all_schema_names", + ) + def test_update_ssh_tunnel_via_database_api( + self, + mock_test_connection_database_command_run, + mock_create_is_feature_enabled, + mock_update_is_feature_enabled, + mock_get_all_schema_names, + ): + """ + Database API: Test update SSH Tunnel via Database API + """ + mock_create_is_feature_enabled.return_value = True + mock_update_is_feature_enabled.return_value = True + self.login(username="admin") + example_db = get_example_database() + + if example_db.backend == "sqlite": + return + initial_ssh_tunnel_properties = { + "server_address": "123.132.123.1", + "server_port": 8080, + "username": "foo", + "password": "bar", + } + updated_ssh_tunnel_properties = { + "server_address": "123.132.123.1", + "server_port": 8080, + "username": "Test", + "password": "new_bar", + } + database_data_with_ssh_tunnel = { + "database_name": "test-db-with-ssh-tunnel", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "ssh_tunnel": initial_ssh_tunnel_properties, + } + database_data_with_ssh_tunnel_update = { + "database_name": "test-db-with-ssh-tunnel", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "ssh_tunnel": updated_ssh_tunnel_properties, + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data_with_ssh_tunnel) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 201) + model_ssh_tunnel = ( + db.session.query(SSHTunnel) + .filter(SSHTunnel.database_id == response.get("id")) + .one() + ) + self.assertEqual(model_ssh_tunnel.database_id, response.get("id")) + self.assertEqual(model_ssh_tunnel.username, "foo") + uri = "api/v1/database/{}".format(response.get("id")) + rv = self.client.put(uri, json=database_data_with_ssh_tunnel_update) + response_update = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 200) + model_ssh_tunnel = ( + db.session.query(SSHTunnel) + .filter(SSHTunnel.database_id == response_update.get("id")) + .one() + ) + self.assertEqual(model_ssh_tunnel.database_id, response_update.get("id")) + self.assertEqual( + response_update.get("result")["ssh_tunnel"]["password"], "XXXXXXXXXX" + ) + self.assertEqual(model_ssh_tunnel.username, "Test") + self.assertEqual(model_ssh_tunnel.server_address, "123.132.123.1") + self.assertEqual(model_ssh_tunnel.server_port, 8080) + # Cleanup + model = db.session.query(Database).get(response.get("id")) + db.session.delete(model) + db.session.commit() + + @mock.patch( + "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + ) + @mock.patch( + "superset.models.core.Database.get_all_schema_names", + ) + @mock.patch("superset.databases.commands.create.is_feature_enabled") + def test_cascade_delete_ssh_tunnel( + self, + mock_test_connection_database_command_run, + mock_get_all_schema_names, + mock_create_is_feature_enabled, + ): + """ + Database API: SSH Tunnel gets deleted if Database gets deleted + """ + mock_create_is_feature_enabled.return_value = True + self.login(username="admin") + example_db = get_example_database() + if example_db.backend == "sqlite": + return + ssh_tunnel_properties = { + "server_address": "123.132.123.1", + "server_port": 8080, + "username": "foo", + "password": "bar", + } + database_data = { + "database_name": "test-db-with-ssh-tunnel", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "ssh_tunnel": ssh_tunnel_properties, + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 201) + model_ssh_tunnel = ( + db.session.query(SSHTunnel) + .filter(SSHTunnel.database_id == response.get("id")) + .one() + ) + self.assertEqual(model_ssh_tunnel.database_id, response.get("id")) + # Cleanup + model = db.session.query(Database).get(response.get("id")) + db.session.delete(model) + db.session.commit() + model_ssh_tunnel = ( + db.session.query(SSHTunnel) + .filter(SSHTunnel.database_id == response.get("id")) + .one_or_none() + ) + assert model_ssh_tunnel is None + + @mock.patch( + "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + ) + @mock.patch("superset.databases.commands.create.is_feature_enabled") + @mock.patch( + "superset.models.core.Database.get_all_schema_names", + ) + def test_do_not_create_database_if_ssh_tunnel_creation_fails( + self, + mock_test_connection_database_command_run, + mock_create_is_feature_enabled, + mock_get_all_schema_names, + ): + """ + Database API: Test Database is not created if SSH Tunnel creation fails + """ + mock_create_is_feature_enabled.return_value = True + self.login(username="admin") + example_db = get_example_database() + if example_db.backend == "sqlite": + return + ssh_tunnel_properties = { + "server_address": "123.132.123.1", + } + database_data = { + "database_name": "test-db-failure-ssh-tunnel", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "ssh_tunnel": ssh_tunnel_properties, + } + fail_message = {"message": "SSH Tunnel parameters are invalid."} + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 422) + model_ssh_tunnel = ( + db.session.query(SSHTunnel) + .filter(SSHTunnel.database_id == response.get("id")) + .one_or_none() + ) + assert model_ssh_tunnel is None + self.assertEqual(response, fail_message) + # Cleanup + model = ( + db.session.query(Database) + .filter(Database.database_name == "test-db-failure-ssh-tunnel") + .one_or_none() + ) + # the DB should not be created + assert model is None + + @mock.patch( + "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + ) + @mock.patch("superset.databases.commands.create.is_feature_enabled") + @mock.patch( + "superset.models.core.Database.get_all_schema_names", + ) + def test_get_database_returns_related_ssh_tunnel( + self, + mock_test_connection_database_command_run, + mock_create_is_feature_enabled, + mock_get_all_schema_names, + ): + """ + Database API: Test GET Database returns its related SSH Tunnel + """ + mock_create_is_feature_enabled.return_value = True + self.login(username="admin") + example_db = get_example_database() + if example_db.backend == "sqlite": + return + ssh_tunnel_properties = { + "server_address": "123.132.123.1", + "server_port": 8080, + "username": "foo", + "password": "bar", + } + database_data = { + "database_name": "test-db-with-ssh-tunnel", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "ssh_tunnel": ssh_tunnel_properties, + } + response_ssh_tunnel = { + "server_address": "123.132.123.1", + "server_port": 8080, + "username": "foo", + "password": "XXXXXXXXXX", + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 201) + model_ssh_tunnel = ( + db.session.query(SSHTunnel) + .filter(SSHTunnel.database_id == response.get("id")) + .one() + ) + self.assertEqual(model_ssh_tunnel.database_id, response.get("id")) + self.assertEqual(response.get("result")["ssh_tunnel"], response_ssh_tunnel) + # Cleanup + model = db.session.query(Database).get(response.get("id")) + db.session.delete(model) + db.session.commit() + + @mock.patch( + "superset.databases.commands.test_connection.TestConnectionDatabaseCommand.run", + ) + @mock.patch( + "superset.models.core.Database.get_all_schema_names", + ) + def test_if_ssh_tunneling_flag_is_not_active_it_raises_new_exception( + self, + mock_test_connection_database_command_run, + mock_get_all_schema_names, + ): + """ + Database API: Test raises SSHTunneling feature flag not enabled + """ + self.login(username="admin") + example_db = get_example_database() + if example_db.backend == "sqlite": + return + ssh_tunnel_properties = { + "server_address": "123.132.123.1", + "server_port": 8080, + "username": "foo", + "password": "bar", + } + database_data = { + "database_name": "test-db-with-ssh-tunnel-7", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "ssh_tunnel": ssh_tunnel_properties, + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 400) + self.assertEqual(response, {"message": "SSH Tunneling is not enabled"}) + model_ssh_tunnel = ( + db.session.query(SSHTunnel) + .filter(SSHTunnel.database_id == response.get("id")) + .one_or_none() + ) + assert model_ssh_tunnel is None + # Cleanup + model = ( + db.session.query(Database) + .filter(Database.database_name == "test-db-with-ssh-tunnel-7") + .one_or_none() + ) + # the DB should not be created + assert model is None + + def test_create_database_invalid_configuration_method(self): + """ + Database API: Test create with an invalid configuration method. + """ + extra = { + "metadata_params": {}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": [], + } + + self.login(username="admin") + example_db = get_example_database() + if example_db.backend == "sqlite": + return + database_data = { + "database_name": "test-create-database", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "configuration_method": "BAD_FORM", + "server_cert": None, + "extra": json.dumps(extra), + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + assert response == { + "message": {"configuration_method": ["Invalid enum value BAD_FORM"]} + } + assert rv.status_code == 400 + + def test_create_database_no_configuration_method(self): + """ + Database API: Test create with no config method. + """ + extra = { + "metadata_params": {}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": [], + } + + self.login(username="admin") + example_db = get_example_database() + if example_db.backend == "sqlite": + return + database_data = { + "database_name": "test-create-database", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "server_cert": None, + "extra": json.dumps(extra), + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 201 + self.assertIn("sqlalchemy_form", response["result"]["configuration_method"]) + + def test_create_database_server_cert_validate(self): + """ + Database API: Test create server cert validation + """ + example_db = get_example_database() + if example_db.backend == "sqlite": + return + + self.login(username="admin") + database_data = { + "database_name": "test-create-database-invalid-cert", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + "server_cert": "INVALID CERT", + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": {"server_cert": ["Invalid certificate"]}} + self.assertEqual(rv.status_code, 400) + self.assertEqual(response, expected_response) + + def test_create_database_json_validate(self): + """ + Database API: Test create encrypted extra and extra validation + """ + example_db = get_example_database() + if example_db.backend == "sqlite": + return + + self.login(username="admin") + database_data = { + "database_name": "test-create-database-invalid-json", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + "masked_encrypted_extra": '{"A": "a", "B", "C"}', + "extra": '["A": "a", "B", "C"]', + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "message": { + "masked_encrypted_extra": [ + "Field cannot be decoded by JSON. Expecting ':' " + "delimiter: line 1 column 15 (char 14)" + ], + "extra": [ + "Field cannot be decoded by JSON. Expecting ','" + " delimiter: line 1 column 5 (char 4)" + ], + } + } + self.assertEqual(rv.status_code, 400) + self.assertEqual(response, expected_response) + + def test_create_database_extra_metadata_validate(self): + """ + Database API: Test create extra metadata_params validation + """ + example_db = get_example_database() + if example_db.backend == "sqlite": + return + + extra = { + "metadata_params": {"wrong_param": "some_value"}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": [], + } + self.login(username="admin") + database_data = { + "database_name": "test-create-database-invalid-extra", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + "extra": json.dumps(extra), + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "message": { + "extra": [ + "The metadata_params in Extra field is not configured correctly." + " The key wrong_param is invalid." + ] + } + } + self.assertEqual(rv.status_code, 400) + self.assertEqual(response, expected_response) + + def test_create_database_unique_validate(self): + """ + Database API: Test create database_name already exists + """ + example_db = get_example_database() + if example_db.backend == "sqlite": + return + + self.login(username="admin") + database_data = { + "database_name": "examples", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "message": { + "database_name": "A database with the same name already exists." + } + } + self.assertEqual(rv.status_code, 422) + self.assertEqual(response, expected_response) + + def test_create_database_uri_validate(self): + """ + Database API: Test create fail validate sqlalchemy uri + """ + self.login(username="admin") + database_data = { + "database_name": "test-database-invalid-uri", + "sqlalchemy_uri": "wrong_uri", + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + } + + uri = "api/v1/database/" + rv = self.client.post(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 400) + self.assertIn( + "Invalid connection string", + response["message"]["sqlalchemy_uri"][0], + ) + + @mock.patch( + "superset.views.core.app.config", + {**app.config, "PREVENT_UNSAFE_DB_CONNECTIONS": True}, + ) + def test_create_database_fail_sqllite(self): + """ + Database API: Test create fail with sqllite + """ + database_data = { + "database_name": "test-create-sqlite-database", + "sqlalchemy_uri": "sqlite:////some.db", + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + } + + uri = "api/v1/database/" + self.login(username="admin") + response = self.client.post(uri, json=database_data) + response_data = json.loads(response.data.decode("utf-8")) + expected_response = { + "message": { + "sqlalchemy_uri": [ + "SQLiteDialect_pysqlite cannot be used as a data source " + "for security reasons." + ] + } + } + self.assertEqual(response_data, expected_response) + self.assertEqual(response.status_code, 400) + + def test_create_database_conn_fail(self): + """ + Database API: Test create fails connection + """ + example_db = get_example_database() + if example_db.backend in ("sqlite", "hive", "presto"): + return + example_db.password = "wrong_password" + database_data = { + "database_name": "test-create-database-wrong-password", + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + } + + uri = "api/v1/database/" + self.login(username="admin") + response = self.client.post(uri, json=database_data) + response_data = json.loads(response.data.decode("utf-8")) + superset_error_mysql = SupersetError( + message='Either the username "superset" or the password is incorrect.', + error_type="CONNECTION_ACCESS_DENIED_ERROR", + level="error", + extra={ + "engine_name": "MySQL", + "invalid": ["username", "password"], + "issue_codes": [ + { + "code": 1014, + "message": ( + "Issue 1014 - Either the username or the password is wrong." + ), + }, + { + "code": 1015, + "message": ( + "Issue 1015 - Issue 1015 - Either the database is spelled incorrectly or does not exist." + ), + }, + ], + }, + ) + superset_error_postgres = SupersetError( + message='The password provided for username "superset" is incorrect.', + error_type="CONNECTION_INVALID_PASSWORD_ERROR", + level="error", + extra={ + "engine_name": "PostgreSQL", + "invalid": ["username", "password"], + "issue_codes": [ + { + "code": 1013, + "message": ( + "Issue 1013 - The password provided when connecting to a database is not valid." + ), + } + ], + }, + ) + expected_response_mysql = {"errors": [dataclasses.asdict(superset_error_mysql)]} + expected_response_postgres = { + "errors": [dataclasses.asdict(superset_error_postgres)] + } + self.assertEqual(response.status_code, 500) + if example_db.backend == "mysql": + self.assertEqual(response_data, expected_response_mysql) + else: + self.assertEqual(response_data, expected_response_postgres) + + def test_update_database(self): + """ + Database API: Test update + """ + example_db = get_example_database() + test_database = self.insert_database( + "test-database", example_db.sqlalchemy_uri_decrypted + ) + self.login(username="admin") + database_data = { + "database_name": "test-database-updated", + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + } + uri = f"api/v1/database/{test_database.id}" + rv = self.client.put(uri, json=database_data) + self.assertEqual(rv.status_code, 200) + # Cleanup + model = db.session.query(Database).get(test_database.id) + db.session.delete(model) + db.session.commit() + + def test_update_database_conn_fail(self): + """ + Database API: Test update fails connection + """ + example_db = get_example_database() + if example_db.backend in ("sqlite", "hive", "presto"): + return + + test_database = self.insert_database( + "test-database1", example_db.sqlalchemy_uri_decrypted + ) + example_db.password = "wrong_password" + database_data = { + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + } + + uri = f"api/v1/database/{test_database.id}" + self.login(username="admin") + rv = self.client.put(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "message": "Connection failed, please check your connection settings" + } + self.assertEqual(rv.status_code, 422) + self.assertEqual(response, expected_response) + # Cleanup + model = db.session.query(Database).get(test_database.id) + db.session.delete(model) + db.session.commit() + + def test_update_database_uniqueness(self): + """ + Database API: Test update uniqueness + """ + example_db = get_example_database() + test_database1 = self.insert_database( + "test-database1", example_db.sqlalchemy_uri_decrypted + ) + test_database2 = self.insert_database( + "test-database2", example_db.sqlalchemy_uri_decrypted + ) + + self.login(username="admin") + database_data = {"database_name": "test-database2"} + uri = f"api/v1/database/{test_database1.id}" + rv = self.client.put(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "message": { + "database_name": "A database with the same name already exists." + } + } + self.assertEqual(rv.status_code, 422) + self.assertEqual(response, expected_response) + # Cleanup + db.session.delete(test_database1) + db.session.delete(test_database2) + db.session.commit() + + def test_update_database_invalid(self): + """ + Database API: Test update invalid request + """ + self.login(username="admin") + database_data = {"database_name": "test-database-updated"} + uri = "api/v1/database/invalid" + rv = self.client.put(uri, json=database_data) + self.assertEqual(rv.status_code, 404) + + def test_update_database_uri_validate(self): + """ + Database API: Test update sqlalchemy_uri validate + """ + example_db = get_example_database() + test_database = self.insert_database( + "test-database", example_db.sqlalchemy_uri_decrypted + ) + + self.login(username="admin") + database_data = { + "database_name": "test-database-updated", + "sqlalchemy_uri": "wrong_uri", + } + uri = f"api/v1/database/{test_database.id}" + rv = self.client.put(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 400) + self.assertIn( + "Invalid connection string", + response["message"]["sqlalchemy_uri"][0], + ) + + db.session.delete(test_database) + db.session.commit() + + def test_update_database_with_invalid_configuration_method(self): + """ + Database API: Test update + """ + example_db = get_example_database() + test_database = self.insert_database( + "test-database", example_db.sqlalchemy_uri_decrypted + ) + self.login(username="admin") + database_data = { + "database_name": "test-database-updated", + "configuration_method": "BAD_FORM", + } + uri = f"api/v1/database/{test_database.id}" + rv = self.client.put(uri, json=database_data) + response = json.loads(rv.data.decode("utf-8")) + assert response == { + "message": {"configuration_method": ["Invalid enum value BAD_FORM"]} + } + assert rv.status_code == 400 + + db.session.delete(test_database) + db.session.commit() + + def test_update_database_with_no_configuration_method(self): + """ + Database API: Test update + """ + example_db = get_example_database() + test_database = self.insert_database( + "test-database", example_db.sqlalchemy_uri_decrypted + ) + self.login(username="admin") + database_data = { + "database_name": "test-database-updated", + } + uri = f"api/v1/database/{test_database.id}" + rv = self.client.put(uri, json=database_data) + assert rv.status_code == 200 + + db.session.delete(test_database) + db.session.commit() + + def test_delete_database(self): + """ + Database API: Test delete + """ + database_id = self.insert_database("test-database", "test_uri").id + self.login(username="admin") + uri = f"api/v1/database/{database_id}" + rv = self.delete_assert_metric(uri, "delete") + self.assertEqual(rv.status_code, 200) + model = db.session.query(Database).get(database_id) + self.assertEqual(model, None) + + def test_delete_database_not_found(self): + """ + Database API: Test delete not found + """ + max_id = db.session.query(func.max(Database.id)).scalar() + self.login(username="admin") + uri = f"api/v1/database/{max_id + 1}" + rv = self.delete_assert_metric(uri, "delete") + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("create_database_with_dataset") + def test_delete_database_with_datasets(self): + """ + Database API: Test delete fails because it has depending datasets + """ + self.login(username="admin") + uri = f"api/v1/database/{self._database.id}" + rv = self.delete_assert_metric(uri, "delete") + self.assertEqual(rv.status_code, 422) + + @pytest.mark.usefixtures("create_database_with_report") + def test_delete_database_with_report(self): + """ + Database API: Test delete with associated report + """ + self.login(username="admin") + database = ( + db.session.query(Database) + .filter(Database.database_name == "database_with_report") + .one_or_none() + ) + uri = f"api/v1/database/{database.id}" + rv = self.client.delete(uri) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 422) + expected_response = { + "message": "There are associated alerts or reports: report_with_database" + } + self.assertEqual(response, expected_response) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_table_metadata(self): + """ + Database API: Test get table metadata info + """ + example_db = get_example_database() + self.login(username="admin") + uri = f"api/v1/database/{example_db.id}/table/birth_names/null/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["name"], "birth_names") + self.assertIsNone(response["comment"]) + self.assertTrue(len(response["columns"]) > 5) + self.assertTrue(response.get("selectStar").startswith("SELECT")) + + def test_info_security_database(self): + """ + Database API: Test info security + """ + self.login(username="admin") + params = {"keys": ["permissions"]} + uri = f"api/v1/database/_info?q={prison.dumps(params)}" + rv = self.get_assert_metric(uri, "info") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert set(data["permissions"]) == {"can_read", "can_write", "can_export"} + + def test_get_invalid_database_table_metadata(self): + """ + Database API: Test get invalid database from table metadata + """ + database_id = 1000 + self.login(username="admin") + uri = f"api/v1/database/{database_id}/table/some_table/some_schema/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + uri = "api/v1/database/some_database/table/some_table/some_schema/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + def test_get_invalid_table_table_metadata(self): + """ + Database API: Test get invalid table from table metadata + """ + example_db = get_example_database() + uri = f"api/v1/database/{example_db.id}/table/wrong_table/null/" + self.login(username="admin") + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + if example_db.backend == "sqlite": + self.assertEqual(rv.status_code, 200) + self.assertEqual( + data, + { + "columns": [], + "comment": None, + "foreignKeys": [], + "indexes": [], + "name": "wrong_table", + "primaryKey": {"constrained_columns": None, "name": None}, + "selectStar": "SELECT\nFROM wrong_table\nLIMIT 100\nOFFSET 0", + }, + ) + elif example_db.backend == "mysql": + self.assertEqual(rv.status_code, 422) + self.assertEqual(data, {"message": "`wrong_table`"}) + else: + self.assertEqual(rv.status_code, 422) + self.assertEqual(data, {"message": "wrong_table"}) + + def test_get_table_metadata_no_db_permission(self): + """ + Database API: Test get table metadata from not permitted db + """ + self.login(username="gamma") + example_db = get_example_database() + uri = f"api/v1/database/{example_db.id}/birth_names/null/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_table_extra_metadata(self): + """ + Database API: Test get table extra metadata info + """ + example_db = get_example_database() + self.login(username="admin") + uri = f"api/v1/database/{example_db.id}/table_extra/birth_names/null/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response, {}) + + def test_get_invalid_database_table_extra_metadata(self): + """ + Database API: Test get invalid database from table extra metadata + """ + database_id = 1000 + self.login(username="admin") + uri = f"api/v1/database/{database_id}/table_extra/some_table/some_schema/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + uri = "api/v1/database/some_database/table_extra/some_table/some_schema/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + def test_get_invalid_table_table_extra_metadata(self): + """ + Database API: Test get invalid table from table extra metadata + """ + example_db = get_example_database() + uri = f"api/v1/database/{example_db.id}/table_extra/wrong_table/null/" + self.login(username="admin") + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + + self.assertEqual(rv.status_code, 200) + self.assertEqual(data, {}) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_select_star(self): + """ + Database API: Test get select star + """ + self.login(username="admin") + example_db = get_example_database() + uri = f"api/v1/database/{example_db.id}/select_star/birth_names/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertIn("gender", response["result"]) + + def test_get_select_star_not_allowed(self): + """ + Database API: Test get select star not allowed + """ + self.login(username="gamma") + example_db = get_example_database() + uri = f"api/v1/database/{example_db.id}/select_star/birth_names/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + def test_get_select_star_datasource_access(self): + """ + Database API: Test get select star with datasource access + """ + session = db.session + table = SqlaTable( + schema="main", table_name="ab_permission", database=get_main_database() + ) + session.add(table) + session.commit() + + tmp_table_perm = security_manager.find_permission_view_menu( + "datasource_access", table.get_perm() + ) + gamma_role = security_manager.find_role("Gamma") + security_manager.add_permission_role(gamma_role, tmp_table_perm) + + self.login(username="gamma") + main_db = get_main_database() + uri = f"api/v1/database/{main_db.id}/select_star/ab_permission/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + + # rollback changes + security_manager.del_permission_role(gamma_role, tmp_table_perm) + db.session.delete(table) + db.session.delete(main_db) + db.session.commit() + + def test_get_select_star_not_found_database(self): + """ + Database API: Test get select star not found database + """ + self.login(username="admin") + max_id = db.session.query(func.max(Database.id)).scalar() + uri = f"api/v1/database/{max_id + 1}/select_star/birth_names/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + def test_get_select_star_not_found_table(self): + """ + Database API: Test get select star not found database + """ + self.login(username="admin") + example_db = get_example_database() + # sqllite will not raise a NoSuchTableError + if example_db.backend == "sqlite": + return + uri = f"api/v1/database/{example_db.id}/select_star/table_does_not_exist/" + rv = self.client.get(uri) + # TODO(bkyryliuk): investigate why presto returns 500 + self.assertEqual(rv.status_code, 404 if example_db.backend != "presto" else 500) + + def test_get_allow_file_upload_filter(self): + """ + Database API: Test filter for allow file upload checks for schemas + """ + with self.create_app().app_context(): + example_db = get_example_database() + + extra = { + "metadata_params": {}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": ["public"], + } + self.login(username="admin") + database = self.insert_database( + "database_with_upload", + example_db.sqlalchemy_uri_decrypted, + extra=json.dumps(extra), + allow_file_upload=True, + ) + db.session.commit() + yield database + + arguments = { + "columns": ["allow_file_upload"], + "filters": [ + { + "col": "allow_file_upload", + "opr": "upload_is_enabled", + "value": True, + } + ], + } + uri = f"api/v1/database/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + db.session.delete(database) + db.session.commit() + + def test_get_allow_file_upload_filter_no_schema(self): + """ + Database API: Test filter for allow file upload checks for schemas. + This test has allow_file_upload but no schemas. + """ + with self.create_app().app_context(): + example_db = get_example_database() + + extra = { + "metadata_params": {}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": [], + } + self.login(username="admin") + database = self.insert_database( + "database_with_upload", + example_db.sqlalchemy_uri_decrypted, + extra=json.dumps(extra), + allow_file_upload=True, + ) + db.session.commit() + yield database + + arguments = { + "columns": ["allow_file_upload"], + "filters": [ + { + "col": "allow_file_upload", + "opr": "upload_is_enabled", + "value": True, + } + ], + } + uri = f"api/v1/database/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 0 + db.session.delete(database) + db.session.commit() + + def test_get_allow_file_upload_filter_allow_file_false(self): + """ + Database API: Test filter for allow file upload checks for schemas. + This has a schema but does not allow_file_upload + """ + with self.create_app().app_context(): + example_db = get_example_database() + + extra = { + "metadata_params": {}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": ["public"], + } + self.login(username="admin") + database = self.insert_database( + "database_with_upload", + example_db.sqlalchemy_uri_decrypted, + extra=json.dumps(extra), + allow_file_upload=False, + ) + db.session.commit() + yield database + + arguments = { + "columns": ["allow_file_upload"], + "filters": [ + { + "col": "allow_file_upload", + "opr": "upload_is_enabled", + "value": True, + } + ], + } + uri = f"api/v1/database/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 0 + db.session.delete(database) + db.session.commit() + + def test_get_allow_file_upload_false(self): + """ + Database API: Test filter for allow file upload checks for schemas. + Both databases have false allow_file_upload + """ + with self.create_app().app_context(): + example_db = get_example_database() + + extra = { + "metadata_params": {}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": [], + } + self.login(username="admin") + database = self.insert_database( + "database_with_upload", + example_db.sqlalchemy_uri_decrypted, + extra=json.dumps(extra), + allow_file_upload=False, + ) + db.session.commit() + yield database + arguments = { + "columns": ["allow_file_upload"], + "filters": [ + { + "col": "allow_file_upload", + "opr": "upload_is_enabled", + "value": True, + } + ], + } + uri = f"api/v1/database/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 0 + db.session.delete(database) + db.session.commit() + + def test_get_allow_file_upload_false_no_extra(self): + """ + Database API: Test filter for allow file upload checks for schemas. + Both databases have false allow_file_upload + """ + with self.create_app().app_context(): + example_db = get_example_database() + + self.login(username="admin") + database = self.insert_database( + "database_with_upload", + example_db.sqlalchemy_uri_decrypted, + allow_file_upload=False, + ) + db.session.commit() + yield database + arguments = { + "columns": ["allow_file_upload"], + "filters": [ + { + "col": "allow_file_upload", + "opr": "upload_is_enabled", + "value": True, + } + ], + } + uri = f"api/v1/database/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 0 + db.session.delete(database) + db.session.commit() + + def mock_csv_function(d, user): + return d.get_all_schema_names() + + @mock.patch( + "superset.views.core.app.config", + {**app.config, "ALLOWED_USER_CSV_SCHEMA_FUNC": mock_csv_function}, + ) + def test_get_allow_file_upload_true_csv(self): + """ + Database API: Test filter for allow file upload checks for schemas. + Both databases have false allow_file_upload + """ + with self.create_app().app_context(): + example_db = get_example_database() + + extra = { + "metadata_params": {}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": [], + } + self.login(username="admin") + database = self.insert_database( + "database_with_upload", + example_db.sqlalchemy_uri_decrypted, + extra=json.dumps(extra), + allow_file_upload=True, + ) + db.session.commit() + yield database + arguments = { + "columns": ["allow_file_upload"], + "filters": [ + { + "col": "allow_file_upload", + "opr": "upload_is_enabled", + "value": True, + } + ], + } + uri = f"api/v1/database/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + db.session.delete(database) + db.session.commit() + + def mock_empty_csv_function(d, user): + return [] + + @mock.patch( + "superset.views.core.app.config", + {**app.config, "ALLOWED_USER_CSV_SCHEMA_FUNC": mock_empty_csv_function}, + ) + def test_get_allow_file_upload_false_csv(self): + """ + Database API: Test filter for allow file upload checks for schemas. + Both databases have false allow_file_upload + """ + with self.create_app().app_context(): + self.login(username="admin") + arguments = { + "columns": ["allow_file_upload"], + "filters": [ + { + "col": "allow_file_upload", + "opr": "upload_is_enabled", + "value": True, + } + ], + } + uri = f"api/v1/database/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + + def test_get_allow_file_upload_filter_no_permission(self): + """ + Database API: Test filter for allow file upload checks for schemas + """ + with self.create_app().app_context(): + example_db = get_example_database() + + extra = { + "metadata_params": {}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": ["public"], + } + self.login(username="gamma") + database = self.insert_database( + "database_with_upload", + example_db.sqlalchemy_uri_decrypted, + extra=json.dumps(extra), + allow_file_upload=True, + ) + db.session.commit() + yield database + + arguments = { + "columns": ["allow_file_upload"], + "filters": [ + { + "col": "allow_file_upload", + "opr": "upload_is_enabled", + "value": True, + } + ], + } + uri = f"api/v1/database/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 0 + db.session.delete(database) + db.session.commit() + + def test_get_allow_file_upload_filter_with_permission(self): + """ + Database API: Test filter for allow file upload checks for schemas + """ + with self.create_app().app_context(): + main_db = get_main_database() + main_db.allow_file_upload = True + session = db.session + table = SqlaTable( + schema="public", + table_name="ab_permission", + database=get_main_database(), + ) + + session.add(table) + session.commit() + tmp_table_perm = security_manager.find_permission_view_menu( + "datasource_access", table.get_perm() + ) + gamma_role = security_manager.find_role("Gamma") + security_manager.add_permission_role(gamma_role, tmp_table_perm) + + self.login(username="gamma") + + arguments = { + "columns": ["allow_file_upload"], + "filters": [ + { + "col": "allow_file_upload", + "opr": "upload_is_enabled", + "value": True, + } + ], + } + uri = f"api/v1/database/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + + # rollback changes + security_manager.del_permission_role(gamma_role, tmp_table_perm) + db.session.delete(table) + db.session.delete(main_db) + db.session.commit() + + def test_database_schemas(self): + """ + Database API: Test database schemas + """ + self.login(username="admin") + database = db.session.query(Database).filter_by(database_name="examples").one() + schemas = database.get_all_schema_names() + + rv = self.client.get(f"api/v1/database/{database.id}/schemas/") + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(schemas, response["result"]) + + rv = self.client.get( + f"api/v1/database/{database.id}/schemas/?q={prison.dumps({'force': True})}" + ) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(schemas, response["result"]) + + def test_database_schemas_not_found(self): + """ + Database API: Test database schemas not found + """ + self.logout() + self.login(username="gamma") + example_db = get_example_database() + uri = f"api/v1/database/{example_db.id}/schemas/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + def test_database_schemas_invalid_query(self): + """ + Database API: Test database schemas with invalid query + """ + self.login("admin") + database = db.session.query(Database).first() + rv = self.client.get( + f"api/v1/database/{database.id}/schemas/?q={prison.dumps({'force': 'nop'})}" + ) + self.assertEqual(rv.status_code, 400) + + def test_database_tables(self): + """ + Database API: Test database tables + """ + self.login(username="admin") + database = db.session.query(Database).filter_by(database_name="examples").one() + + schema_name = self.default_schema_backend_map[database.backend] + rv = self.client.get( + f"api/v1/database/{database.id}/tables/?q={prison.dumps({'schema_name': schema_name})}" + ) + + self.assertEqual(rv.status_code, 200) + if database.backend == "postgresql": + response = json.loads(rv.data.decode("utf-8")) + schemas = [ + s[0] for s in database.get_all_table_names_in_schema(schema_name) + ] + self.assertEquals(response["count"], len(schemas)) + for option in response["result"]: + self.assertEquals(option["extra"], None) + self.assertEquals(option["type"], "table") + self.assertTrue(option["value"] in schemas) + + def test_database_tables_not_found(self): + """ + Database API: Test database tables not found + """ + self.logout() + self.login(username="gamma") + example_db = get_example_database() + uri = f"api/v1/database/{example_db.id}/tables/?q={prison.dumps({'schema_name': 'non_existent'})}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + def test_database_tables_invalid_query(self): + """ + Database API: Test database tables with invalid query + """ + self.login("admin") + database = db.session.query(Database).first() + rv = self.client.get( + f"api/v1/database/{database.id}/tables/?q={prison.dumps({'force': 'nop'})}" + ) + self.assertEqual(rv.status_code, 400) + + @mock.patch("superset.security.manager.SupersetSecurityManager.can_access_database") + def test_database_tables_unexpected_error(self, mock_can_access_database): + """ + Database API: Test database tables with unexpected error + """ + self.login(username="admin") + database = db.session.query(Database).filter_by(database_name="examples").one() + mock_can_access_database.side_effect = Exception("Test Error") + + rv = self.client.get( + f"api/v1/database/{database.id}/tables/?q={prison.dumps({'schema_name': 'main'})}" + ) + self.assertEqual(rv.status_code, 422) + + def test_test_connection(self): + """ + Database API: Test test connection + """ + extra = { + "metadata_params": {}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": [], + } + # need to temporarily allow sqlite dbs, teardown will undo this + app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False + self.login("admin") + example_db = get_example_database() + # validate that the endpoint works with the password-masked sqlalchemy uri + data = { + "database_name": "examples", + "masked_encrypted_extra": "{}", + "extra": json.dumps(extra), + "impersonate_user": False, + "sqlalchemy_uri": example_db.safe_sqlalchemy_uri(), + "server_cert": None, + } + url = "api/v1/database/test_connection/" + rv = self.post_assert_metric(url, data, "test_connection") + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.headers["Content-Type"], "application/json; charset=utf-8") + + # validate that the endpoint works with the decrypted sqlalchemy uri + data = { + "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, + "database_name": "examples", + "impersonate_user": False, + "extra": json.dumps(extra), + "server_cert": None, + } + rv = self.post_assert_metric(url, data, "test_connection") + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.headers["Content-Type"], "application/json; charset=utf-8") + + def test_test_connection_failed(self): + """ + Database API: Test test connection failed + """ + self.login("admin") + + data = { + "sqlalchemy_uri": "broken://url", + "database_name": "examples", + "impersonate_user": False, + "server_cert": None, + } + url = "api/v1/database/test_connection/" + rv = self.post_assert_metric(url, data, "test_connection") + self.assertEqual(rv.status_code, 422) + self.assertEqual(rv.headers["Content-Type"], "application/json; charset=utf-8") + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "errors": [ + { + "message": "Could not load database driver: BaseEngineSpec", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "issue_codes": [ + { + "code": 1010, + "message": "Issue 1010 - Superset encountered an error while running a command.", + } + ] + }, + } + ] + } + self.assertEqual(response, expected_response) + + data = { + "sqlalchemy_uri": "mssql+pymssql://url", + "database_name": "examples", + "impersonate_user": False, + "server_cert": None, + } + rv = self.post_assert_metric(url, data, "test_connection") + self.assertEqual(rv.status_code, 422) + self.assertEqual(rv.headers["Content-Type"], "application/json; charset=utf-8") + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "errors": [ + { + "message": "Could not load database driver: MssqlEngineSpec", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "issue_codes": [ + { + "code": 1010, + "message": "Issue 1010 - Superset encountered an error while running a command.", + } + ] + }, + } + ] + } + self.assertEqual(response, expected_response) + + def test_test_connection_unsafe_uri(self): + """ + Database API: Test test connection with unsafe uri + """ + self.login("admin") + + app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = True + data = { + "sqlalchemy_uri": "sqlite:///home/superset/unsafe.db", + "database_name": "unsafe", + "impersonate_user": False, + "server_cert": None, + } + url = "api/v1/database/test_connection/" + rv = self.post_assert_metric(url, data, "test_connection") + self.assertEqual(rv.status_code, 400) + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "message": { + "sqlalchemy_uri": [ + "SQLiteDialect_pysqlite cannot be used as a data source for security reasons." + ] + } + } + self.assertEqual(response, expected_response) + + app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False + + @mock.patch( + "superset.databases.commands.test_connection.DatabaseDAO.build_db_for_connection_test", + ) + @mock.patch( + "superset.databases.commands.test_connection.event_logger", + ) + def test_test_connection_failed_invalid_hostname( + self, mock_event_logger, mock_build_db + ): + """ + Database API: Test test connection failed due to invalid hostname + """ + msg = 'psql: error: could not translate host name "locahost" to address: nodename nor servname provided, or not known' + mock_build_db.return_value.set_sqlalchemy_uri.side_effect = DBAPIError( + msg, None, None + ) + mock_build_db.return_value.db_engine_spec.__name__ = "Some name" + superset_error = SupersetError( + message='Unable to resolve hostname "locahost".', + error_type="CONNECTION_INVALID_HOSTNAME_ERROR", + level="error", + extra={ + "hostname": "locahost", + "issue_codes": [ + { + "code": 1007, + "message": ( + "Issue 1007 - The hostname provided can't be resolved." + ), + } + ], + }, + ) + mock_build_db.return_value.db_engine_spec.extract_errors.return_value = [ + superset_error + ] + + self.login("admin") + data = { + "sqlalchemy_uri": "postgres://username:password@locahost:12345/db", + "database_name": "examples", + "impersonate_user": False, + "server_cert": None, + } + url = "api/v1/database/test_connection/" + rv = self.post_assert_metric(url, data, "test_connection") + + assert rv.status_code == 500 + assert rv.headers["Content-Type"] == "application/json; charset=utf-8" + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"errors": [dataclasses.asdict(superset_error)]} + assert response == expected_response + + @pytest.mark.usefixtures( + "load_unicode_dashboard_with_position", + "load_energy_table_with_slice", + "load_world_bank_dashboard_with_slices", + "load_birth_names_dashboard_with_slices", + ) + def test_get_database_related_objects(self): + """ + Database API: Test get chart and dashboard count related to a database + :return: + """ + self.login(username="admin") + database = get_example_database() + uri = f"api/v1/database/{database.id}/related_objects/" + rv = self.get_assert_metric(uri, "related_objects") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["charts"]["count"], 34) + self.assertEqual(response["dashboards"]["count"], 3) + + def test_get_database_related_objects_not_found(self): + """ + Database API: Test related objects not found + """ + max_id = db.session.query(func.max(Database.id)).scalar() + # id does not exist and we get 404 + invalid_id = max_id + 1 + uri = f"api/v1/database/{invalid_id}/related_objects/" + self.login(username="admin") + rv = self.get_assert_metric(uri, "related_objects") + self.assertEqual(rv.status_code, 404) + self.logout() + self.login(username="gamma") + database = get_example_database() + uri = f"api/v1/database/{database.id}/related_objects/" + rv = self.get_assert_metric(uri, "related_objects") + self.assertEqual(rv.status_code, 404) + + def test_export_database(self): + """ + Database API: Test export database + """ + self.login(username="admin") + database = get_example_database() + argument = [database.id] + uri = f"api/v1/database/export/?q={prison.dumps(argument)}" + rv = self.get_assert_metric(uri, "export") + assert rv.status_code == 200 + + buf = BytesIO(rv.data) + assert is_zipfile(buf) + + def test_export_database_not_allowed(self): + """ + Database API: Test export database not allowed + """ + self.login(username="gamma") + database = get_example_database() + argument = [database.id] + uri = f"api/v1/database/export/?q={prison.dumps(argument)}" + rv = self.client.get(uri) + assert rv.status_code == 403 + + def test_export_database_non_existing(self): + """ + Database API: Test export database not allowed + """ + max_id = db.session.query(func.max(Database.id)).scalar() + # id does not exist and we get 404 + invalid_id = max_id + 1 + + self.login(username="admin") + argument = [invalid_id] + uri = f"api/v1/database/export/?q={prison.dumps(argument)}" + rv = self.get_assert_metric(uri, "export") + assert rv.status_code == 404 + + def test_import_database(self): + """ + Database API: Test import database + """ + self.login(username="admin") + uri = "api/v1/database/import/" + + buf = self.create_database_import() + form_data = { + "formData": (buf, "database_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert database.database_name == "imported_database" + + assert len(database.tables) == 1 + dataset = database.tables[0] + assert dataset.table_name == "imported_dataset" + assert str(dataset.uuid) == dataset_config["uuid"] + + dataset.owners = [] + db.session.delete(dataset) + db.session.commit() + db.session.delete(database) + db.session.commit() + + def test_import_database_overwrite(self): + """ + Database API: Test import existing database + """ + self.login(username="admin") + uri = "api/v1/database/import/" + + buf = self.create_database_import() + form_data = { + "formData": (buf, "database_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + # import again without overwrite flag + buf = self.create_database_import() + form_data = { + "formData": (buf, "database_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "Error importing database", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "databases/imported_database.yaml": "Database already exists and `overwrite=true` was not passed", + "issue_codes": [ + { + "code": 1010, + "message": ( + "Issue 1010 - Superset encountered an " + "error while running a command." + ), + } + ], + }, + } + ] + } + + # import with overwrite flag + buf = self.create_database_import() + form_data = { + "formData": (buf, "database_export.zip"), + "overwrite": "true", + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + # clean up + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + dataset = database.tables[0] + dataset.owners = [] + db.session.delete(dataset) + db.session.commit() + db.session.delete(database) + db.session.commit() + + def test_import_database_invalid(self): + """ + Database API: Test import invalid database + """ + self.login(username="admin") + uri = "api/v1/database/import/" + + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("database_export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(dataset_metadata_config).encode()) + with bundle.open( + "database_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(database_config).encode()) + with bundle.open( + "database_export/datasets/imported_dataset.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(dataset_config).encode()) + buf.seek(0) + + form_data = { + "formData": (buf, "database_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "Error importing database", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "metadata.yaml": {"type": ["Must be equal to Database."]}, + "issue_codes": [ + { + "code": 1010, + "message": ( + "Issue 1010 - Superset encountered an " + "error while running a command." + ), + } + ], + }, + } + ] + } + + def test_import_database_masked_password(self): + """ + Database API: Test import database with masked password + """ + self.login(username="admin") + uri = "api/v1/database/import/" + + masked_database_config = database_config.copy() + masked_database_config[ + "sqlalchemy_uri" + ] = "postgresql://username:XXXXXXXXXX@host:12345/db" + + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("database_export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(database_metadata_config).encode()) + with bundle.open( + "database_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(masked_database_config).encode()) + with bundle.open( + "database_export/datasets/imported_dataset.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(dataset_config).encode()) + buf.seek(0) + + form_data = { + "formData": (buf, "database_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "Error importing database", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "databases/imported_database.yaml": { + "_schema": ["Must provide a password for the database"] + }, + "issue_codes": [ + { + "code": 1010, + "message": ( + "Issue 1010 - Superset encountered an " + "error while running a command." + ), + } + ], + }, + } + ] + } + + def test_import_database_masked_password_provided(self): + """ + Database API: Test import database with masked password provided + """ + self.login(username="admin") + uri = "api/v1/database/import/" + + masked_database_config = database_config.copy() + masked_database_config[ + "sqlalchemy_uri" + ] = "vertica+vertica_python://hackathon:XXXXXXXXXX@host:5433/dbname?ssl=1" + + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("database_export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(database_metadata_config).encode()) + with bundle.open( + "database_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(masked_database_config).encode()) + buf.seek(0) + + form_data = { + "formData": (buf, "database_export.zip"), + "passwords": json.dumps({"databases/imported_database.yaml": "SECRET"}), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert database.database_name == "imported_database" + assert ( + database.sqlalchemy_uri + == "vertica+vertica_python://hackathon:XXXXXXXXXX@host:5433/dbname?ssl=1" + ) + assert database.password == "SECRET" + + db.session.delete(database) + db.session.commit() + + @mock.patch( + "superset.db_engine_specs.base.BaseEngineSpec.get_function_names", + ) + def test_function_names(self, mock_get_function_names): + example_db = get_example_database() + if example_db.backend in {"hive", "presto"}: + return + + mock_get_function_names.return_value = ["AVG", "MAX", "SUM"] + + self.login(username="admin") + uri = "api/v1/database/1/function_names/" + + rv = self.client.get(uri) + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"function_names": ["AVG", "MAX", "SUM"]} + + @mock.patch("superset.databases.api.get_available_engine_specs") + @mock.patch("superset.databases.api.app") + def test_available(self, app, get_available_engine_specs): + app.config = {"PREFERRED_DATABASES": ["PostgreSQL", "Google BigQuery"]} + get_available_engine_specs.return_value = { + PostgresEngineSpec: {"psycopg2"}, + BigQueryEngineSpec: {"bigquery"}, + MySQLEngineSpec: {"mysqlconnector", "mysqldb"}, + GSheetsEngineSpec: {"apsw"}, + RedshiftEngineSpec: {"psycopg2"}, + HanaEngineSpec: {""}, + } + + self.login(username="admin") + uri = "api/v1/database/available/" + + rv = self.client.get(uri) + response = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert response == { + "databases": [ + { + "available_drivers": ["psycopg2"], + "default_driver": "psycopg2", + "engine": "postgresql", + "name": "PostgreSQL", + "parameters": { + "properties": { + "database": { + "description": "Database name", + "type": "string", + }, + "encryption": { + "description": "Use an encrypted connection to the database", + "type": "boolean", + }, + "host": { + "description": "Hostname or IP address", + "type": "string", + }, + "password": { + "description": "Password", + "nullable": True, + "type": "string", + }, + "port": { + "description": "Database port", + "format": "int32", + "maximum": 65536, + "minimum": 0, + "type": "integer", + }, + "query": { + "additionalProperties": {}, + "description": "Additional parameters", + "type": "object", + }, + "username": { + "description": "Username", + "nullable": True, + "type": "string", + }, + }, + "required": ["database", "host", "port", "username"], + "type": "object", + }, + "preferred": True, + "sqlalchemy_uri_placeholder": "postgresql://user:password@host:port/dbname[?key=value&key=value...]", + "engine_information": { + "supports_file_upload": True, + "disable_ssh_tunneling": False, + }, + }, + { + "available_drivers": ["bigquery"], + "default_driver": "bigquery", + "engine": "bigquery", + "name": "Google BigQuery", + "parameters": { + "properties": { + "credentials_info": { + "description": "Contents of BigQuery JSON credentials.", + "type": "string", + "x-encrypted-extra": True, + }, + "query": {"type": "object"}, + }, + "type": "object", + }, + "preferred": True, + "sqlalchemy_uri_placeholder": "bigquery://{project_id}", + "engine_information": { + "supports_file_upload": True, + "disable_ssh_tunneling": True, + }, + }, + { + "available_drivers": ["psycopg2"], + "default_driver": "psycopg2", + "engine": "redshift", + "name": "Amazon Redshift", + "parameters": { + "properties": { + "database": { + "description": "Database name", + "type": "string", + }, + "encryption": { + "description": "Use an encrypted connection to the database", + "type": "boolean", + }, + "host": { + "description": "Hostname or IP address", + "type": "string", + }, + "password": { + "description": "Password", + "nullable": True, + "type": "string", + }, + "port": { + "description": "Database port", + "format": "int32", + "maximum": 65536, + "minimum": 0, + "type": "integer", + }, + "query": { + "additionalProperties": {}, + "description": "Additional parameters", + "type": "object", + }, + "username": { + "description": "Username", + "nullable": True, + "type": "string", + }, + }, + "required": ["database", "host", "port", "username"], + "type": "object", + }, + "preferred": False, + "sqlalchemy_uri_placeholder": "redshift+psycopg2://user:password@host:port/dbname[?key=value&key=value...]", + "engine_information": { + "supports_file_upload": True, + "disable_ssh_tunneling": False, + }, + }, + { + "available_drivers": ["apsw"], + "default_driver": "apsw", + "engine": "gsheets", + "name": "Google Sheets", + "parameters": { + "properties": { + "catalog": {"type": "object"}, + "service_account_info": { + "description": "Contents of GSheets JSON credentials.", + "type": "string", + "x-encrypted-extra": True, + }, + }, + "type": "object", + }, + "preferred": False, + "sqlalchemy_uri_placeholder": "gsheets://", + "engine_information": { + "supports_file_upload": False, + "disable_ssh_tunneling": True, + }, + }, + { + "available_drivers": ["mysqlconnector", "mysqldb"], + "default_driver": "mysqldb", + "engine": "mysql", + "name": "MySQL", + "parameters": { + "properties": { + "database": { + "description": "Database name", + "type": "string", + }, + "encryption": { + "description": "Use an encrypted connection to the database", + "type": "boolean", + }, + "host": { + "description": "Hostname or IP address", + "type": "string", + }, + "password": { + "description": "Password", + "nullable": True, + "type": "string", + }, + "port": { + "description": "Database port", + "format": "int32", + "maximum": 65536, + "minimum": 0, + "type": "integer", + }, + "query": { + "additionalProperties": {}, + "description": "Additional parameters", + "type": "object", + }, + "username": { + "description": "Username", + "nullable": True, + "type": "string", + }, + }, + "required": ["database", "host", "port", "username"], + "type": "object", + }, + "preferred": False, + "sqlalchemy_uri_placeholder": "mysql://user:password@host:port/dbname[?key=value&key=value...]", + "engine_information": { + "supports_file_upload": True, + "disable_ssh_tunneling": False, + }, + }, + { + "available_drivers": [""], + "engine": "hana", + "name": "SAP HANA", + "preferred": False, + "engine_information": { + "supports_file_upload": True, + "disable_ssh_tunneling": False, + }, + }, + ] + } + + @mock.patch("superset.databases.api.get_available_engine_specs") + @mock.patch("superset.databases.api.app") + def test_available_no_default(self, app, get_available_engine_specs): + app.config = {"PREFERRED_DATABASES": ["MySQL"]} + get_available_engine_specs.return_value = { + MySQLEngineSpec: {"mysqlconnector"}, + HanaEngineSpec: {""}, + } + + self.login(username="admin") + uri = "api/v1/database/available/" + + rv = self.client.get(uri) + response = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert response == { + "databases": [ + { + "available_drivers": ["mysqlconnector"], + "default_driver": "mysqldb", + "engine": "mysql", + "name": "MySQL", + "preferred": True, + "engine_information": { + "supports_file_upload": True, + "disable_ssh_tunneling": False, + }, + }, + { + "available_drivers": [""], + "engine": "hana", + "name": "SAP HANA", + "preferred": False, + "engine_information": { + "supports_file_upload": True, + "disable_ssh_tunneling": False, + }, + }, + ] + } + + def test_validate_parameters_invalid_payload_format(self): + self.login(username="admin") + url = "api/v1/database/validate_parameters/" + rv = self.client.post(url, data="INVALID", content_type="text/plain") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 400 + assert response == { + "errors": [ + { + "message": "Request is not JSON", + "error_type": "INVALID_PAYLOAD_FORMAT_ERROR", + "level": "error", + "extra": { + "issue_codes": [ + { + "code": 1019, + "message": "Issue 1019 - The submitted payload has the incorrect format.", + } + ] + }, + } + ] + } + + def test_validate_parameters_invalid_payload_schema(self): + self.login(username="admin") + url = "api/v1/database/validate_parameters/" + payload = {"foo": "bar"} + rv = self.client.post(url, json=payload) + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + response["errors"].sort(key=lambda error: error["extra"]["invalid"][0]) + assert response == { + "errors": [ + { + "message": "Missing data for required field.", + "error_type": "INVALID_PAYLOAD_SCHEMA_ERROR", + "level": "error", + "extra": { + "invalid": ["configuration_method"], + "issue_codes": [ + { + "code": 1020, + "message": "Issue 1020 - The submitted payload" + " has the incorrect schema.", + } + ], + }, + }, + { + "message": "Missing data for required field.", + "error_type": "INVALID_PAYLOAD_SCHEMA_ERROR", + "level": "error", + "extra": { + "invalid": ["engine"], + "issue_codes": [ + { + "code": 1020, + "message": "Issue 1020 - The submitted payload " + "has the incorrect schema.", + } + ], + }, + }, + ] + } + + def test_validate_parameters_missing_fields(self): + self.login(username="admin") + url = "api/v1/database/validate_parameters/" + payload = { + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + "engine": "postgresql", + "parameters": defaultdict(dict), + } + payload["parameters"].update( + { + "host": "", + "port": 5432, + "username": "", + "password": "", + "database": "", + "query": {}, + } + ) + rv = self.client.post(url, json=payload) + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "One or more parameters are missing: database, host," + " username", + "error_type": "CONNECTION_MISSING_PARAMETERS_ERROR", + "level": "warning", + "extra": { + "missing": ["database", "host", "username"], + "issue_codes": [ + { + "code": 1018, + "message": "Issue 1018 - One or more parameters " + "needed to configure a database are missing.", + } + ], + }, + } + ] + } + + @mock.patch("superset.db_engine_specs.base.is_hostname_valid") + @mock.patch("superset.db_engine_specs.base.is_port_open") + @mock.patch("superset.databases.api.ValidateDatabaseParametersCommand") + def test_validate_parameters_valid_payload( + self, ValidateDatabaseParametersCommand, is_port_open, is_hostname_valid + ): + is_hostname_valid.return_value = True + is_port_open.return_value = True + + self.login(username="admin") + url = "api/v1/database/validate_parameters/" + payload = { + "engine": "postgresql", + "parameters": defaultdict(dict), + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + } + payload["parameters"].update( + { + "host": "localhost", + "port": 6789, + "username": "superset", + "password": "XXX", + "database": "test", + "query": {}, + } + ) + rv = self.client.post(url, json=payload) + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + def test_validate_parameters_invalid_port(self): + self.login(username="admin") + url = "api/v1/database/validate_parameters/" + payload = { + "engine": "postgresql", + "parameters": defaultdict(dict), + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + } + payload["parameters"].update( + { + "host": "localhost", + "port": "string", + "username": "superset", + "password": "XXX", + "database": "test", + "query": {}, + } + ) + rv = self.client.post(url, json=payload) + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "Port must be a valid integer.", + "error_type": "CONNECTION_INVALID_PORT_ERROR", + "level": "error", + "extra": { + "invalid": ["port"], + "issue_codes": [ + { + "code": 1034, + "message": "Issue 1034 - The port number is invalid.", + } + ], + }, + }, + { + "message": "The port must be an integer between " + "0 and 65535 (inclusive).", + "error_type": "CONNECTION_INVALID_PORT_ERROR", + "level": "error", + "extra": { + "invalid": ["port"], + "issue_codes": [ + { + "code": 1034, + "message": "Issue 1034 - The port number is invalid.", + } + ], + }, + }, + ] + } + + @mock.patch("superset.db_engine_specs.base.is_hostname_valid") + def test_validate_parameters_invalid_host(self, is_hostname_valid): + is_hostname_valid.return_value = False + + self.login(username="admin") + url = "api/v1/database/validate_parameters/" + payload = { + "engine": "postgresql", + "parameters": defaultdict(dict), + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + } + payload["parameters"].update( + { + "host": "localhost", + "port": 5432, + "username": "", + "password": "", + "database": "", + "query": {}, + } + ) + rv = self.client.post(url, json=payload) + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "One or more parameters are missing: database, username", + "error_type": "CONNECTION_MISSING_PARAMETERS_ERROR", + "level": "warning", + "extra": { + "missing": ["database", "username"], + "issue_codes": [ + { + "code": 1018, + "message": "Issue 1018 - One or more parameters" + " needed to configure a database are missing.", + } + ], + }, + }, + { + "message": "The hostname provided can't be resolved.", + "error_type": "CONNECTION_INVALID_HOSTNAME_ERROR", + "level": "error", + "extra": { + "invalid": ["host"], + "issue_codes": [ + { + "code": 1007, + "message": "Issue 1007 - The hostname " + "provided can't be resolved.", + } + ], + }, + }, + ] + } + + @mock.patch("superset.db_engine_specs.base.is_hostname_valid") + def test_validate_parameters_invalid_port_range(self, is_hostname_valid): + is_hostname_valid.return_value = True + + self.login(username="admin") + url = "api/v1/database/validate_parameters/" + payload = { + "engine": "postgresql", + "parameters": defaultdict(dict), + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + } + payload["parameters"].update( + { + "host": "localhost", + "port": 65536, + "username": "", + "password": "", + "database": "", + "query": {}, + } + ) + rv = self.client.post(url, json=payload) + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "One or more parameters are missing: database, username", + "error_type": "CONNECTION_MISSING_PARAMETERS_ERROR", + "level": "warning", + "extra": { + "missing": ["database", "username"], + "issue_codes": [ + { + "code": 1018, + "message": "Issue 1018 - One or more parameters needed to configure a database are missing.", + } + ], + }, + }, + { + "message": "The port must be an integer between 0 and 65535 (inclusive).", + "error_type": "CONNECTION_INVALID_PORT_ERROR", + "level": "error", + "extra": { + "invalid": ["port"], + "issue_codes": [ + { + "code": 1034, + "message": "Issue 1034 - The port number is invalid.", + } + ], + }, + }, + ] + } + + def test_get_related_objects(self): + example_db = get_example_database() + self.login(username="admin") + uri = f"api/v1/database/{example_db.id}/related_objects/" + rv = self.client.get(uri) + assert rv.status_code == 200 + assert "charts" in rv.json + assert "dashboards" in rv.json + assert "sqllab_tab_states" in rv.json + + @patch.dict( + "superset.config.SQL_VALIDATORS_BY_ENGINE", + SQL_VALIDATORS_BY_ENGINE, + clear=True, + ) + def test_validate_sql(self): + """ + Database API: validate SQL success + """ + request_payload = { + "sql": "SELECT * from birth_names", + "schema": None, + "template_params": None, + } + + example_db = get_example_database() + if example_db.backend not in ("presto", "postgresql"): + pytest.skip("Only presto and PG are implemented") + + self.login(username="admin") + uri = f"api/v1/database/{example_db.id}/validate_sql/" + rv = self.client.post(uri, json=request_payload) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 200) + self.assertEqual(response["result"], []) + + @patch.dict( + "superset.config.SQL_VALIDATORS_BY_ENGINE", + SQL_VALIDATORS_BY_ENGINE, + clear=True, + ) + def test_validate_sql_errors(self): + """ + Database API: validate SQL with errors + """ + request_payload = { + "sql": "SELECT col1 froma table1", + "schema": None, + "template_params": None, + } + + example_db = get_example_database() + if example_db.backend not in ("presto", "postgresql"): + pytest.skip("Only presto and PG are implemented") + + self.login(username="admin") + uri = f"api/v1/database/{example_db.id}/validate_sql/" + rv = self.client.post(uri, json=request_payload) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 200) + self.assertEqual( + response["result"], + [ + { + "end_column": None, + "line_number": 1, + "message": 'ERROR: syntax error at or near "table1"', + "start_column": None, + } + ], + ) + + @patch.dict( + "superset.config.SQL_VALIDATORS_BY_ENGINE", + SQL_VALIDATORS_BY_ENGINE, + clear=True, + ) + def test_validate_sql_not_found(self): + """ + Database API: validate SQL database not found + """ + request_payload = { + "sql": "SELECT * from birth_names", + "schema": None, + "template_params": None, + } + self.login(username="admin") + uri = ( + f"api/v1/database/{self.get_nonexistent_numeric_id(Database)}/validate_sql/" + ) + rv = self.client.post(uri, json=request_payload) + self.assertEqual(rv.status_code, 404) + + @patch.dict( + "superset.config.SQL_VALIDATORS_BY_ENGINE", + SQL_VALIDATORS_BY_ENGINE, + clear=True, + ) + def test_validate_sql_validation_fails(self): + """ + Database API: validate SQL database payload validation fails + """ + request_payload = { + "sql": None, + "schema": None, + "template_params": None, + } + self.login(username="admin") + uri = ( + f"api/v1/database/{self.get_nonexistent_numeric_id(Database)}/validate_sql/" + ) + rv = self.client.post(uri, json=request_payload) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 400) + self.assertEqual(response, {"message": {"sql": ["Field may not be null."]}}) + + @patch.dict( + "superset.config.SQL_VALIDATORS_BY_ENGINE", + {}, + clear=True, + ) + def test_validate_sql_endpoint_noconfig(self): + """Assert that validate_sql_json errors out when no validators are + configured for any db""" + request_payload = { + "sql": "SELECT col1 from table1", + "schema": None, + "template_params": None, + } + + self.login("admin") + + example_db = get_example_database() + + uri = f"api/v1/database/{example_db.id}/validate_sql/" + rv = self.client.post(uri, json=request_payload) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(rv.status_code, 422) + self.assertEqual( + response, + { + "errors": [ + { + "message": f"no SQL validator is configured for " + f"{example_db.backend}", + "error_type": "GENERIC_DB_ENGINE_ERROR", + "level": "error", + "extra": { + "issue_codes": [ + { + "code": 1002, + "message": "Issue 1002 - The database returned an " + "unexpected error.", + } + ] + }, + } + ] + }, + ) + + @patch("superset.databases.commands.validate_sql.get_validator_by_name") + @patch.dict( + "superset.config.SQL_VALIDATORS_BY_ENGINE", + PRESTO_SQL_VALIDATORS_BY_ENGINE, + clear=True, + ) + def test_validate_sql_endpoint_failure(self, get_validator_by_name): + """Assert that validate_sql_json errors out when the selected validator + raises an unexpected exception""" + + request_payload = { + "sql": "SELECT * FROM birth_names", + "schema": None, + "template_params": None, + } + + self.login("admin") + + validator = MagicMock() + get_validator_by_name.return_value = validator + validator.validate.side_effect = Exception("Kaboom!") + + self.login("admin") + + example_db = get_example_database() + + uri = f"api/v1/database/{example_db.id}/validate_sql/" + rv = self.client.post(uri, json=request_payload) + response = json.loads(rv.data.decode("utf-8")) + + # TODO(bkyryliuk): properly handle hive error + if get_example_database().backend == "hive": + return + self.assertEqual(rv.status_code, 422) + self.assertIn("Kaboom!", response["errors"][0]["message"]) diff --git a/tests/integration_tests/databases/commands_tests.py b/tests/integration_tests/databases/commands_tests.py new file mode 100644 index 0000000000000..7e4fcaad789ed --- /dev/null +++ b/tests/integration_tests/databases/commands_tests.py @@ -0,0 +1,962 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from unittest import mock, skip +from unittest.mock import patch + +import pytest +import yaml +from func_timeout import FunctionTimedOut +from sqlalchemy.exc import DBAPIError + +from superset import db, event_logger, security_manager +from superset.commands.exceptions import CommandInvalidError +from superset.commands.importers.exceptions import IncorrectVersionError +from superset.connectors.sqla.models import SqlaTable +from superset.databases.commands.create import CreateDatabaseCommand +from superset.databases.commands.exceptions import ( + DatabaseInvalidError, + DatabaseNotFoundError, + DatabaseSecurityUnsafeError, + DatabaseTablesUnexpectedError, + DatabaseTestConnectionDriverError, + DatabaseTestConnectionUnexpectedError, +) +from superset.databases.commands.export import ExportDatabasesCommand +from superset.databases.commands.importers.v1 import ImportDatabasesCommand +from superset.databases.commands.tables import TablesDatabaseCommand +from superset.databases.commands.test_connection import TestConnectionDatabaseCommand +from superset.databases.commands.validate import ValidateDatabaseParametersCommand +from superset.databases.schemas import DatabaseTestConnectionSchema +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.exceptions import ( + SupersetErrorsException, + SupersetException, + SupersetSecurityException, + SupersetTimeoutException, +) +from superset.models.core import Database +from superset.utils.core import backend +from superset.utils.database import get_example_database +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_data, + load_energy_table_with_slice, +) +from tests.integration_tests.fixtures.importexport import ( + database_config, + database_metadata_config, + dataset_config, + dataset_metadata_config, +) + + +class TestCreateDatabaseCommand(SupersetTestCase): + @mock.patch( + "superset.databases.commands.test_connection.event_logger.log_with_context" + ) + @mock.patch("superset.utils.core.g") + def test_create_duplicate_error(self, mock_g, mock_logger): + example_db = get_example_database() + mock_g.user = security_manager.find_user("admin") + command = CreateDatabaseCommand( + {"database_name": example_db.database_name}, + ) + with pytest.raises(DatabaseInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == ("Database parameters are invalid.") + # logger should list classnames of all errors + mock_logger.assert_called_with( + action="db_connection_failed." + "DatabaseInvalidError." + "DatabaseExistsValidationError." + "DatabaseRequiredFieldValidationError" + ) + + @mock.patch( + "superset.databases.commands.test_connection.event_logger.log_with_context" + ) + @mock.patch("superset.utils.core.g") + def test_multiple_error_logging(self, mock_g, mock_logger): + mock_g.user = security_manager.find_user("admin") + command = CreateDatabaseCommand({}) + with pytest.raises(DatabaseInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == ("Database parameters are invalid.") + # logger should list a unique set of errors with no duplicates + mock_logger.assert_called_with( + action="db_connection_failed." + "DatabaseInvalidError." + "DatabaseRequiredFieldValidationError" + ) + + +class TestExportDatabasesCommand(SupersetTestCase): + @skip("Flaky") + @patch("superset.security.manager.g") + @pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "load_energy_table_with_slice" + ) + def test_export_database_command(self, mock_g): + mock_g.user = security_manager.find_user("admin") + + example_db = get_example_database() + db_uuid = example_db.uuid + + command = ExportDatabasesCommand([example_db.id]) + contents = dict(command.run()) + + # TODO: this list shouldn't depend on the order in which unit tests are run + # or on the backend; for now use a stable subset + core_files = { + "metadata.yaml", + "databases/examples.yaml", + "datasets/examples/energy_usage.yaml", + "datasets/examples/birth_names.yaml", + } + expected_extra = { + "engine_params": {}, + "metadata_cache_timeout": {}, + "metadata_params": {}, + "schemas_allowed_for_file_upload": [], + } + if backend() == "presto": + expected_extra = { + **expected_extra, + "engine_params": {"connect_args": {"poll_interval": 0.1}}, + } + assert core_files.issubset(set(contents.keys())) + + if example_db.backend == "postgresql": + ds_type = "TIMESTAMP WITHOUT TIME ZONE" + elif example_db.backend == "hive": + ds_type = "TIMESTAMP" + elif example_db.backend == "presto": + ds_type = "VARCHAR(255)" + else: + ds_type = "DATETIME" + if example_db.backend == "mysql": + big_int_type = "BIGINT(20)" + else: + big_int_type = "BIGINT" + metadata = yaml.safe_load(contents["databases/examples.yaml"]) + assert metadata == ( + { + "allow_csv_upload": True, + "allow_ctas": True, + "allow_cvas": True, + "allow_dml": True, + "allow_run_async": False, + "cache_timeout": None, + "database_name": "examples", + "expose_in_sqllab": True, + "extra": expected_extra, + "sqlalchemy_uri": example_db.sqlalchemy_uri, + "uuid": str(example_db.uuid), + "version": "1.0.0", + } + ) + + metadata = yaml.safe_load(contents["datasets/examples/birth_names.yaml"]) + metadata.pop("uuid") + + metadata["columns"].sort(key=lambda x: x["column_name"]) + expected_metadata = { + "cache_timeout": None, + "columns": [ + { + "column_name": "ds", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_active": True, + "is_dttm": True, + "python_date_format": None, + "type": ds_type, + "advanced_data_type": None, + "verbose_name": None, + }, + { + "column_name": "gender", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "type": "STRING" if example_db.backend == "hive" else "VARCHAR(16)", + "advanced_data_type": None, + "verbose_name": None, + }, + { + "column_name": "name", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "type": "STRING" + if example_db.backend == "hive" + else "VARCHAR(255)", + "advanced_data_type": None, + "verbose_name": None, + }, + { + "column_name": "num", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "type": big_int_type, + "advanced_data_type": None, + "verbose_name": None, + }, + { + "column_name": "num_california", + "description": None, + "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END", + "filterable": True, + "groupby": True, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "type": None, + "advanced_data_type": None, + "verbose_name": None, + }, + { + "column_name": "state", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "type": "STRING" if example_db.backend == "hive" else "VARCHAR(10)", + "advanced_data_type": None, + "verbose_name": None, + }, + { + "column_name": "num_boys", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "type": big_int_type, + "advanced_data_type": None, + "verbose_name": None, + }, + { + "column_name": "num_girls", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "type": big_int_type, + "advanced_data_type": None, + "verbose_name": None, + }, + ], + "database_uuid": str(db_uuid), + "default_endpoint": None, + "description": "", + "extra": None, + "fetch_values_predicate": "123 = 123", + "filter_select_enabled": True, + "main_dttm_col": "ds", + "metrics": [ + { + "d3format": None, + "description": None, + "expression": "COUNT(*)", + "extra": None, + "metric_name": "count", + "metric_type": "count", + "verbose_name": "COUNT(*)", + "warning_text": None, + }, + { + "d3format": None, + "description": None, + "expression": "SUM(num)", + "extra": None, + "metric_name": "sum__num", + "metric_type": None, + "verbose_name": None, + "warning_text": None, + }, + ], + "offset": 0, + "params": None, + "schema": None, + "sql": None, + "table_name": "birth_names", + "template_params": None, + "version": "1.0.0", + } + expected_metadata["columns"].sort(key=lambda x: x["column_name"]) + assert metadata == expected_metadata + + @patch("superset.security.manager.g") + def test_export_database_command_no_access(self, mock_g): + """Test that users can't export databases they don't have access to""" + mock_g.user = security_manager.find_user("gamma") + + example_db = get_example_database() + command = ExportDatabasesCommand([example_db.id]) + contents = command.run() + with self.assertRaises(DatabaseNotFoundError): + next(contents) + + @patch("superset.security.manager.g") + def test_export_database_command_invalid_database(self, mock_g): + """Test that an error is raised when exporting an invalid database""" + mock_g.user = security_manager.find_user("admin") + command = ExportDatabasesCommand([-1]) + contents = command.run() + with self.assertRaises(DatabaseNotFoundError): + next(contents) + + @patch("superset.security.manager.g") + def test_export_database_command_key_order(self, mock_g): + """Test that they keys in the YAML have the same order as export_fields""" + mock_g.user = security_manager.find_user("admin") + + example_db = get_example_database() + command = ExportDatabasesCommand([example_db.id]) + contents = dict(command.run()) + + metadata = yaml.safe_load(contents["databases/examples.yaml"]) + assert list(metadata.keys()) == [ + "database_name", + "sqlalchemy_uri", + "cache_timeout", + "expose_in_sqllab", + "allow_run_async", + "allow_ctas", + "allow_cvas", + "allow_dml", + "allow_csv_upload", + "extra", + "uuid", + "version", + ] + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "load_energy_table_with_slice" + ) + def test_export_database_command_no_related(self, mock_g): + """ + Test that only databases are exported when export_related=False. + """ + mock_g.user = security_manager.find_user("admin") + + example_db = get_example_database() + db_uuid = example_db.uuid + + command = ExportDatabasesCommand([example_db.id], export_related=False) + contents = dict(command.run()) + prefixes = {path.split("/")[0] for path in contents} + assert "metadata.yaml" in prefixes + assert "databases" in prefixes + assert "datasets" not in prefixes + + +class TestImportDatabasesCommand(SupersetTestCase): + def test_import_v1_database(self): + """Test that a database can be imported""" + contents = { + "metadata.yaml": yaml.safe_dump(database_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + } + command = ImportDatabasesCommand(contents) + command.run() + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert database.allow_file_upload + assert database.allow_ctas + assert database.allow_cvas + assert database.allow_dml + assert not database.allow_run_async + assert database.cache_timeout is None + assert database.database_name == "imported_database" + assert database.expose_in_sqllab + assert database.extra == "{}" + assert database.sqlalchemy_uri == "sqlite:///test.db" + + db.session.delete(database) + db.session.commit() + + def test_import_v1_database_broken_csv_fields(self): + """ + Test that a database can be imported with broken schema. + + https://github.com/apache/superset/pull/16756 renamed some fields, changing + the V1 schema. This test ensures that we can import databases that were + exported with the broken schema. + """ + broken_config = database_config.copy() + broken_config["allow_file_upload"] = broken_config.pop("allow_csv_upload") + broken_config["extra"] = {"schemas_allowed_for_file_upload": ["upload"]} + + contents = { + "metadata.yaml": yaml.safe_dump(database_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(broken_config), + } + command = ImportDatabasesCommand(contents) + command.run() + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert database.allow_file_upload + assert database.allow_ctas + assert database.allow_cvas + assert database.allow_dml + assert not database.allow_run_async + assert database.cache_timeout is None + assert database.database_name == "imported_database" + assert database.expose_in_sqllab + assert database.extra == '{"schemas_allowed_for_file_upload": ["upload"]}' + assert database.sqlalchemy_uri == "sqlite:///test.db" + + db.session.delete(database) + db.session.commit() + + def test_import_v1_database_multiple(self): + """Test that a database can be imported multiple times""" + num_databases = db.session.query(Database).count() + + contents = { + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "metadata.yaml": yaml.safe_dump(database_metadata_config), + } + command = ImportDatabasesCommand(contents, overwrite=True) + + # import twice + command.run() + command.run() + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert database.allow_file_upload + + # update allow_file_upload to False + new_config = database_config.copy() + new_config["allow_csv_upload"] = False + contents = { + "databases/imported_database.yaml": yaml.safe_dump(new_config), + "metadata.yaml": yaml.safe_dump(database_metadata_config), + } + command = ImportDatabasesCommand(contents, overwrite=True) + command.run() + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert not database.allow_file_upload + + # test that only one database was created + new_num_databases = db.session.query(Database).count() + assert new_num_databases == num_databases + 1 + + db.session.delete(database) + db.session.commit() + + def test_import_v1_database_with_dataset(self): + """Test that a database can be imported with datasets""" + contents = { + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "metadata.yaml": yaml.safe_dump(database_metadata_config), + } + command = ImportDatabasesCommand(contents) + command.run() + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert len(database.tables) == 1 + assert str(database.tables[0].uuid) == "10808100-158b-42c4-842e-f32b99d88dfb" + + db.session.delete(database.tables[0]) + db.session.delete(database) + db.session.commit() + + def test_import_v1_database_with_dataset_multiple(self): + """Test that a database can be imported multiple times w/o changing datasets""" + contents = { + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "metadata.yaml": yaml.safe_dump(database_metadata_config), + } + command = ImportDatabasesCommand(contents) + command.run() + + dataset = ( + db.session.query(SqlaTable).filter_by(uuid=dataset_config["uuid"]).one() + ) + assert dataset.offset == 66 + + new_config = dataset_config.copy() + new_config["offset"] = 67 + contents = { + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(new_config), + "metadata.yaml": yaml.safe_dump(database_metadata_config), + } + command = ImportDatabasesCommand(contents, overwrite=True) + command.run() + + # the underlying dataset should not be modified by the second import, since + # we're importing a database, not a dataset + dataset = ( + db.session.query(SqlaTable).filter_by(uuid=dataset_config["uuid"]).one() + ) + assert dataset.offset == 66 + + db.session.delete(dataset) + db.session.delete(dataset.database) + db.session.commit() + + def test_import_v1_database_validation(self): + """Test different validations applied when importing a database""" + # metadata.yaml must be present + contents = { + "databases/imported_database.yaml": yaml.safe_dump(database_config), + } + command = ImportDatabasesCommand(contents) + with pytest.raises(IncorrectVersionError) as excinfo: + command.run() + assert str(excinfo.value) == "Missing metadata.yaml" + + # version should be 1.0.0 + contents["metadata.yaml"] = yaml.safe_dump( + { + "version": "2.0.0", + "type": "Database", + "timestamp": "2020-11-04T21:27:44.423819+00:00", + } + ) + command = ImportDatabasesCommand(contents) + with pytest.raises(IncorrectVersionError) as excinfo: + command.run() + assert str(excinfo.value) == "Must be equal to 1.0.0." + + # type should be Database + contents["metadata.yaml"] = yaml.safe_dump(dataset_metadata_config) + command = ImportDatabasesCommand(contents) + with pytest.raises(CommandInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == "Error importing database" + assert excinfo.value.normalized_messages() == { + "metadata.yaml": {"type": ["Must be equal to Database."]} + } + + # must also validate datasets + broken_config = dataset_config.copy() + del broken_config["table_name"] + contents["metadata.yaml"] = yaml.safe_dump(database_metadata_config) + contents["datasets/imported_dataset.yaml"] = yaml.safe_dump(broken_config) + command = ImportDatabasesCommand(contents) + with pytest.raises(CommandInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == "Error importing database" + assert excinfo.value.normalized_messages() == { + "datasets/imported_dataset.yaml": { + "table_name": ["Missing data for required field."], + } + } + + def test_import_v1_database_masked_password(self): + """Test that database imports with masked passwords are rejected""" + masked_database_config = database_config.copy() + masked_database_config[ + "sqlalchemy_uri" + ] = "postgresql://username:XXXXXXXXXX@host:12345/db" + contents = { + "metadata.yaml": yaml.safe_dump(database_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(masked_database_config), + } + command = ImportDatabasesCommand(contents) + with pytest.raises(CommandInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == "Error importing database" + assert excinfo.value.normalized_messages() == { + "databases/imported_database.yaml": { + "_schema": ["Must provide a password for the database"] + } + } + + @patch("superset.databases.commands.importers.v1.import_dataset") + def test_import_v1_rollback(self, mock_import_dataset): + """Test than on an exception everything is rolled back""" + num_databases = db.session.query(Database).count() + + # raise an exception when importing the dataset, after the database has + # already been imported + mock_import_dataset.side_effect = Exception("A wild exception appears!") + + contents = { + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "metadata.yaml": yaml.safe_dump(database_metadata_config), + } + command = ImportDatabasesCommand(contents) + with pytest.raises(Exception) as excinfo: + command.run() + assert str(excinfo.value) == "Import database failed for an unknown reason" + + # verify that the database was not added + new_num_databases = db.session.query(Database).count() + assert new_num_databases == num_databases + + +class TestTestConnectionDatabaseCommand(SupersetTestCase): + @mock.patch("superset.databases.dao.Database._get_sqla_engine") + @mock.patch( + "superset.databases.commands.test_connection.event_logger.log_with_context" + ) + @mock.patch("superset.utils.core.g") + def test_connection_db_exception( + self, mock_g, mock_event_logger, mock_get_sqla_engine + ): + """Test to make sure event_logger is called when an exception is raised""" + database = get_example_database() + mock_g.user = security_manager.find_user("admin") + mock_get_sqla_engine.side_effect = Exception("An error has occurred!") + db_uri = database.sqlalchemy_uri_decrypted + json_payload = {"sqlalchemy_uri": db_uri} + command_without_db_name = TestConnectionDatabaseCommand(json_payload) + + with pytest.raises(DatabaseTestConnectionUnexpectedError) as excinfo: + command_without_db_name.run() + assert str(excinfo.value) == ( + "Unexpected error occurred, please check your logs for details" + ) + mock_event_logger.assert_called() + + @mock.patch("superset.databases.dao.Database._get_sqla_engine") + @mock.patch( + "superset.databases.commands.test_connection.event_logger.log_with_context" + ) + @mock.patch("superset.utils.core.g") + def test_connection_do_ping_exception( + self, mock_g, mock_event_logger, mock_get_sqla_engine + ): + """Test to make sure do_ping exceptions gets captured""" + database = get_example_database() + mock_g.user = security_manager.find_user("admin") + mock_get_sqla_engine.return_value.dialect.do_ping.side_effect = Exception( + "An error has occurred!" + ) + db_uri = database.sqlalchemy_uri_decrypted + json_payload = {"sqlalchemy_uri": db_uri} + command_without_db_name = TestConnectionDatabaseCommand(json_payload) + + with pytest.raises(SupersetErrorsException) as excinfo: + command_without_db_name.run() + assert ( + excinfo.value.errors[0].error_type + == SupersetErrorType.GENERIC_DB_ENGINE_ERROR + ) + + @mock.patch("superset.databases.commands.test_connection.func_timeout") + @mock.patch( + "superset.databases.commands.test_connection.event_logger.log_with_context" + ) + @mock.patch("superset.utils.core.g") + def test_connection_do_ping_timeout( + self, mock_g, mock_event_logger, mock_func_timeout + ): + """Test to make sure do_ping exceptions gets captured""" + database = get_example_database() + mock_g.user = security_manager.find_user("admin") + mock_func_timeout.side_effect = FunctionTimedOut("Time out") + db_uri = database.sqlalchemy_uri_decrypted + json_payload = {"sqlalchemy_uri": db_uri} + command_without_db_name = TestConnectionDatabaseCommand(json_payload) + + with pytest.raises(SupersetTimeoutException) as excinfo: + command_without_db_name.run() + assert excinfo.value.status == 408 + assert ( + excinfo.value.error.error_type + == SupersetErrorType.CONNECTION_DATABASE_TIMEOUT + ) + + @mock.patch("superset.databases.dao.Database._get_sqla_engine") + @mock.patch( + "superset.databases.commands.test_connection.event_logger.log_with_context" + ) + @mock.patch("superset.utils.core.g") + def test_connection_superset_security_connection( + self, mock_g, mock_event_logger, mock_get_sqla_engine + ): + """Test to make sure event_logger is called when security + connection exc is raised""" + database = get_example_database() + mock_g.user = security_manager.find_user("admin") + mock_get_sqla_engine.side_effect = SupersetSecurityException( + SupersetError(error_type=500, message="test", level="info") + ) + db_uri = database.sqlalchemy_uri_decrypted + json_payload = {"sqlalchemy_uri": db_uri} + command_without_db_name = TestConnectionDatabaseCommand(json_payload) + + with pytest.raises(DatabaseSecurityUnsafeError) as excinfo: + command_without_db_name.run() + assert str(excinfo.value) == ("Stopped an unsafe database connection") + + mock_event_logger.assert_called() + + @mock.patch("superset.databases.dao.Database._get_sqla_engine") + @mock.patch( + "superset.databases.commands.test_connection.event_logger.log_with_context" + ) + @mock.patch("superset.utils.core.g") + def test_connection_db_api_exc( + self, mock_g, mock_event_logger, mock_get_sqla_engine + ): + """Test to make sure event_logger is called when DBAPIError is raised""" + database = get_example_database() + mock_g.user = security_manager.find_user("admin") + mock_get_sqla_engine.side_effect = DBAPIError( + statement="error", params={}, orig={} + ) + db_uri = database.sqlalchemy_uri_decrypted + json_payload = {"sqlalchemy_uri": db_uri} + command_without_db_name = TestConnectionDatabaseCommand(json_payload) + + with pytest.raises(SupersetErrorsException) as excinfo: + command_without_db_name.run() + assert str(excinfo.value) == ( + "Connection failed, please check your connection settings" + ) + + mock_event_logger.assert_called() + + +@mock.patch("superset.db_engine_specs.base.is_hostname_valid") +@mock.patch("superset.db_engine_specs.base.is_port_open") +@mock.patch("superset.databases.commands.validate.DatabaseDAO") +def test_validate(DatabaseDAO, is_port_open, is_hostname_valid, app_context): + """ + Test parameter validation. + """ + is_hostname_valid.return_value = True + is_port_open.return_value = True + + payload = { + "engine": "postgresql", + "parameters": { + "host": "localhost", + "port": 5432, + "username": "superset", + "password": "superset", + "database": "test", + "query": {}, + }, + } + command = ValidateDatabaseParametersCommand(payload) + command.run() + + +@mock.patch("superset.db_engine_specs.base.is_hostname_valid") +@mock.patch("superset.db_engine_specs.base.is_port_open") +def test_validate_partial(is_port_open, is_hostname_valid, app_context): + """ + Test parameter validation when only some parameters are present. + """ + is_hostname_valid.return_value = True + is_port_open.return_value = True + + payload = { + "engine": "postgresql", + "parameters": { + "host": "localhost", + "port": 5432, + "username": "", + "password": "superset", + "database": "test", + "query": {}, + }, + } + command = ValidateDatabaseParametersCommand(payload) + with pytest.raises(SupersetErrorsException) as excinfo: + command.run() + assert excinfo.value.errors == [ + SupersetError( + message="One or more parameters are missing: username", + error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR, + level=ErrorLevel.WARNING, + extra={ + "missing": ["username"], + "issue_codes": [ + { + "code": 1018, + "message": "Issue 1018 - One or more parameters needed to configure a database are missing.", + } + ], + }, + ) + ] + + +@mock.patch("superset.db_engine_specs.base.is_hostname_valid") +def test_validate_partial_invalid_hostname(is_hostname_valid, app_context): + """ + Test parameter validation when only some parameters are present. + """ + is_hostname_valid.return_value = False + + payload = { + "engine": "postgresql", + "parameters": { + "host": "localhost", + "port": None, + "username": "", + "password": "", + "database": "", + "query": {}, + }, + } + command = ValidateDatabaseParametersCommand(payload) + with pytest.raises(SupersetErrorsException) as excinfo: + command.run() + assert excinfo.value.errors == [ + SupersetError( + message="One or more parameters are missing: database, port, username", + error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR, + level=ErrorLevel.WARNING, + extra={ + "missing": ["database", "port", "username"], + "issue_codes": [ + { + "code": 1018, + "message": "Issue 1018 - One or more parameters needed to configure a database are missing.", + } + ], + }, + ), + SupersetError( + message="The hostname provided can't be resolved.", + error_type=SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR, + level=ErrorLevel.ERROR, + extra={ + "invalid": ["host"], + "issue_codes": [ + { + "code": 1007, + "message": "Issue 1007 - The hostname provided can't be resolved.", + } + ], + }, + ), + ] + + +class TestTablesDatabaseCommand(SupersetTestCase): + @mock.patch("superset.databases.dao.DatabaseDAO.find_by_id") + def test_database_tables_list_with_unknown_database(self, mock_find_by_id): + mock_find_by_id.return_value = None + command = TablesDatabaseCommand(1, "test", False) + + with pytest.raises(DatabaseNotFoundError) as excinfo: + command.run() + assert str(excinfo.value) == ("Database not found.") + + @mock.patch("superset.databases.dao.DatabaseDAO.find_by_id") + @mock.patch("superset.security.manager.SupersetSecurityManager.can_access_database") + @mock.patch("superset.utils.core.g") + def test_database_tables_superset_exception( + self, mock_g, mock_can_access_database, mock_find_by_id + ): + database = get_example_database() + if database.backend == "mysql": + return + + mock_find_by_id.return_value = database + mock_can_access_database.side_effect = SupersetException("Test Error") + mock_g.user = security_manager.find_user("admin") + + command = TablesDatabaseCommand(database.id, "main", False) + with pytest.raises(SupersetException) as excinfo: + command.run() + assert str(excinfo.value) == "Test Error" + + @mock.patch("superset.databases.dao.DatabaseDAO.find_by_id") + @mock.patch("superset.security.manager.SupersetSecurityManager.can_access_database") + @mock.patch("superset.utils.core.g") + def test_database_tables_exception( + self, mock_g, mock_can_access_database, mock_find_by_id + ): + database = get_example_database() + mock_find_by_id.return_value = database + mock_can_access_database.side_effect = Exception("Test Error") + mock_g.user = security_manager.find_user("admin") + + command = TablesDatabaseCommand(database.id, "main", False) + with pytest.raises(DatabaseTablesUnexpectedError) as excinfo: + command.run() + assert ( + str(excinfo.value) + == "Unexpected error occurred, please check your logs for details" + ) + + @mock.patch("superset.databases.dao.DatabaseDAO.find_by_id") + @mock.patch("superset.security.manager.SupersetSecurityManager.can_access_database") + @mock.patch("superset.utils.core.g") + def test_database_tables_list_tables( + self, mock_g, mock_can_access_database, mock_find_by_id + ): + database = get_example_database() + mock_find_by_id.return_value = database + mock_can_access_database.return_value = True + mock_g.user = security_manager.find_user("admin") + + schema_name = self.default_schema_backend_map[database.backend] + if database.backend == "postgresql" or database.backend == "mysql": + return + + command = TablesDatabaseCommand(database.id, schema_name, False) + result = command.run() + + assert result["count"] > 0 + assert len(result["result"]) > 0 + assert len(result["result"]) == result["count"] diff --git a/tests/integration_tests/databases/ssh_tunnel/__init__.py b/tests/integration_tests/databases/ssh_tunnel/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/databases/ssh_tunnel/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/databases/ssh_tunnel/commands/__init__.py b/tests/integration_tests/databases/ssh_tunnel/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/databases/ssh_tunnel/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py b/tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py new file mode 100644 index 0000000000000..86c280b9bb1c4 --- /dev/null +++ b/tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py @@ -0,0 +1,78 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from unittest import mock, skip +from unittest.mock import patch + +import pytest + +from superset import security_manager +from superset.databases.ssh_tunnel.commands.create import CreateSSHTunnelCommand +from superset.databases.ssh_tunnel.commands.delete import DeleteSSHTunnelCommand +from superset.databases.ssh_tunnel.commands.exceptions import ( + SSHTunnelInvalidError, + SSHTunnelNotFoundError, +) +from superset.databases.ssh_tunnel.commands.update import UpdateSSHTunnelCommand +from tests.integration_tests.base_tests import SupersetTestCase + + +class TestCreateSSHTunnelCommand(SupersetTestCase): + @mock.patch("superset.utils.core.g") + def test_create_invalid_database_id(self, mock_g): + mock_g.user = security_manager.find_user("admin") + command = CreateSSHTunnelCommand( + None, + { + "server_address": "127.0.0.1", + "server_port": 5432, + "username": "test_user", + }, + ) + with pytest.raises(SSHTunnelInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == ("SSH Tunnel parameters are invalid.") + + +class TestUpdateSSHTunnelCommand(SupersetTestCase): + @mock.patch("superset.utils.core.g") + def test_update_ssh_tunnel_not_found(self, mock_g): + mock_g.user = security_manager.find_user("admin") + # We have not created a SSH Tunnel yet so id = 1 is invalid + command = UpdateSSHTunnelCommand( + 1, + { + "server_address": "127.0.0.1", + "server_port": 5432, + "username": "test_user", + }, + ) + with pytest.raises(SSHTunnelNotFoundError) as excinfo: + command.run() + assert str(excinfo.value) == ("SSH Tunnel not found.") + + +class TestDeleteSSHTunnelCommand(SupersetTestCase): + @mock.patch("superset.utils.core.g") + @mock.patch("superset.databases.ssh_tunnel.commands.delete.is_feature_enabled") + def test_delete_ssh_tunnel_not_found(self, mock_g, mock_delete_is_feature_enabled): + mock_g.user = security_manager.find_user("admin") + mock_delete_is_feature_enabled.return_value = True + # We have not created a SSH Tunnel yet so id = 1 is invalid + command = DeleteSSHTunnelCommand(1) + with pytest.raises(SSHTunnelNotFoundError) as excinfo: + command.run() + assert str(excinfo.value) == ("SSH Tunnel not found.") diff --git a/tests/integration_tests/datasets/__init__.py b/tests/integration_tests/datasets/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/datasets/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py new file mode 100644 index 0000000000000..8071902c455da --- /dev/null +++ b/tests/integration_tests/datasets/api_tests.py @@ -0,0 +1,2414 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Unit tests for Superset""" +import json +import unittest +from io import BytesIO +from typing import List, Optional +from unittest.mock import patch +from zipfile import is_zipfile, ZipFile + +import prison +import pytest +import yaml +from sqlalchemy.orm import joinedload +from sqlalchemy.sql import func + +from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn +from superset.dao.exceptions import ( + DAOCreateFailedError, + DAODeleteFailedError, + DAOUpdateFailedError, +) +from superset.datasets.commands.exceptions import DatasetCreateFailedError +from superset.datasets.models import Dataset +from superset.extensions import db, security_manager +from superset.models.core import Database +from superset.utils.core import backend, get_example_default_schema +from superset.utils.database import get_example_database, get_main_database +from superset.utils.dict_import_export import export_to_dict +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.conftest import CTAS_SCHEMA_NAME +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_data, + load_energy_table_with_slice, +) +from tests.integration_tests.fixtures.importexport import ( + database_config, + database_metadata_config, + dataset_config, + dataset_metadata_config, + dataset_ui_export, +) + + +class TestDatasetApi(SupersetTestCase): + + fixture_tables_names = ("ab_permission", "ab_permission_view", "ab_view_menu") + fixture_virtual_table_names = ("sql_virtual_dataset_1", "sql_virtual_dataset_2") + + @staticmethod + def insert_dataset( + table_name: str, + owners: List[int], + database: Database, + sql: Optional[str] = None, + schema: Optional[str] = None, + ) -> SqlaTable: + obj_owners = list() + for owner in owners: + user = db.session.query(security_manager.user_model).get(owner) + obj_owners.append(user) + table = SqlaTable( + table_name=table_name, + schema=schema, + owners=obj_owners, + database=database, + sql=sql, + ) + db.session.add(table) + db.session.commit() + table.fetch_metadata() + return table + + def insert_default_dataset(self): + return self.insert_dataset( + "ab_permission", [self.get_user("admin").id], get_main_database() + ) + + def get_fixture_datasets(self) -> List[SqlaTable]: + return ( + db.session.query(SqlaTable) + .options(joinedload(SqlaTable.database)) + .filter(SqlaTable.table_name.in_(self.fixture_tables_names)) + .all() + ) + + def get_fixture_virtual_datasets(self) -> List[SqlaTable]: + return ( + db.session.query(SqlaTable) + .filter(SqlaTable.table_name.in_(self.fixture_virtual_table_names)) + .all() + ) + + @pytest.fixture() + def create_virtual_datasets(self): + with self.create_app().app_context(): + if backend() == "sqlite": + yield + return + + datasets = [] + admin = self.get_user("admin") + main_db = get_main_database() + for table_name in self.fixture_virtual_table_names: + datasets.append( + self.insert_dataset( + table_name, + [admin.id], + main_db, + "SELECT * from ab_view_menu;", + ) + ) + yield datasets + + # rollback changes + for dataset in datasets: + db.session.delete(dataset) + db.session.commit() + + @pytest.fixture() + def create_datasets(self): + with self.create_app().app_context(): + if backend() == "sqlite": + yield + return + + datasets = [] + admin = self.get_user("admin") + main_db = get_main_database() + for tables_name in self.fixture_tables_names: + datasets.append(self.insert_dataset(tables_name, [admin.id], main_db)) + + yield datasets + + # rollback changes + for dataset in datasets: + db.session.delete(dataset) + db.session.commit() + + @staticmethod + def get_energy_usage_dataset(): + example_db = get_example_database() + return ( + db.session.query(SqlaTable) + .filter_by( + database=example_db, + table_name="energy_usage", + schema=get_example_default_schema(), + ) + .one() + ) + + def create_dataset_import(self) -> BytesIO: + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("dataset_export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(dataset_metadata_config).encode()) + with bundle.open( + "dataset_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(database_config).encode()) + with bundle.open( + "dataset_export/datasets/imported_dataset.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(dataset_config).encode()) + buf.seek(0) + return buf + + def test_get_dataset_list(self): + """ + Dataset API: Test get dataset list + """ + if backend() == "sqlite": + return + + example_db = get_example_database() + self.login(username="admin") + arguments = { + "filters": [ + {"col": "database", "opr": "rel_o_m", "value": f"{example_db.id}"}, + {"col": "table_name", "opr": "eq", "value": "birth_names"}, + ] + } + uri = f"api/v1/dataset/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + assert response["count"] == 1 + expected_columns = [ + "changed_by", + "changed_by_name", + "changed_by_url", + "changed_on_delta_humanized", + "changed_on_utc", + "database", + "datasource_type", + "default_endpoint", + "description", + "explore_url", + "extra", + "id", + "kind", + "owners", + "schema", + "sql", + "table_name", + ] + assert sorted(list(response["result"][0].keys())) == expected_columns + + def test_get_dataset_list_gamma(self): + """ + Dataset API: Test get dataset list gamma + """ + if backend() == "sqlite": + return + + self.login(username="gamma") + uri = "api/v1/dataset/" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + assert response["result"] == [] + + def test_get_dataset_list_gamma_has_database_access(self): + """ + Dataset API: Test get dataset list with database access + """ + if backend() == "sqlite": + return + + self.login(username="gamma") + + # create new dataset + main_db = get_main_database() + dataset = self.insert_dataset("ab_user", [], main_db) + + # make sure dataset is not visible due to missing perms + uri = "api/v1/dataset/" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + + assert response["count"] == 0 + + # give database access to main db + main_db_pvm = security_manager.find_permission_view_menu( + "database_access", main_db.perm + ) + gamma_role = security_manager.find_role("Gamma") + gamma_role.permissions.append(main_db_pvm) + db.session.commit() + + # make sure dataset is now visible + uri = "api/v1/dataset/" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + + tables = {tbl["table_name"] for tbl in response["result"]} + assert tables == {"ab_user"} + + # revert gamma permission + gamma_role.permissions.remove(main_db_pvm) + db.session.delete(dataset) + db.session.commit() + + def test_get_dataset_related_database_gamma(self): + """ + Dataset API: Test get dataset related databases gamma + """ + if backend() == "sqlite": + return + + # Add main database access to gamma role + main_db = get_main_database() + main_db_pvm = security_manager.find_permission_view_menu( + "database_access", main_db.perm + ) + gamma_role = security_manager.find_role("Gamma") + gamma_role.permissions.append(main_db_pvm) + db.session.commit() + + self.login(username="gamma") + uri = "api/v1/dataset/related/database" + rv = self.client.get(uri) + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + + assert response["count"] == 1 + main_db = get_main_database() + assert filter(lambda x: x.text == main_db, response["result"]) != [] + + # revert gamma permission + gamma_role.permissions.remove(main_db_pvm) + db.session.commit() + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_get_dataset_item(self): + """ + Dataset API: Test get dataset item + """ + if backend() == "sqlite": + return + + table = self.get_energy_usage_dataset() + main_db = get_main_database() + self.login(username="admin") + uri = f"api/v1/dataset/{table.id}" + rv = self.get_assert_metric(uri, "get") + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + expected_result = { + "cache_timeout": None, + "database": { + "backend": main_db.backend, + "database_name": "examples", + "id": 1, + }, + "default_endpoint": None, + "description": "Energy consumption", + "extra": None, + "fetch_values_predicate": None, + "filter_select_enabled": False, + "is_sqllab_view": False, + "kind": "physical", + "main_dttm_col": None, + "offset": 0, + "owners": [], + "schema": get_example_default_schema(), + "sql": None, + "table_name": "energy_usage", + "template_params": None, + } + if response["result"]["database"]["backend"] not in ("presto", "hive"): + assert { + k: v for k, v in response["result"].items() if k in expected_result + } == expected_result + assert len(response["result"]["columns"]) == 3 + assert len(response["result"]["metrics"]) == 2 + + def test_get_dataset_distinct_schema(self): + """ + Dataset API: Test get dataset distinct schema + """ + if backend() == "sqlite": + return + + def pg_test_query_parameter(query_parameter, expected_response): + uri = f"api/v1/dataset/distinct/schema?q={prison.dumps(query_parameter)}" + rv = self.client.get(uri) + response = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert response == expected_response + + example_db = get_example_database() + datasets = [] + if example_db.backend == "postgresql": + datasets.append( + self.insert_dataset( + "ab_permission", [], get_main_database(), schema="public" + ) + ) + datasets.append( + self.insert_dataset( + "columns", + [], + get_main_database(), + schema="information_schema", + ) + ) + all_datasets = db.session.query(SqlaTable).all() + schema_values = sorted( + set( + [ + dataset.schema + for dataset in all_datasets + if dataset.schema is not None + ] + ) + ) + expected_response = { + "count": len(schema_values), + "result": [{"text": val, "value": val} for val in schema_values], + } + self.login(username="admin") + uri = "api/v1/dataset/distinct/schema" + rv = self.client.get(uri) + response = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert response == expected_response + + # Test filter + query_parameter = {"filter": "inf"} + pg_test_query_parameter( + query_parameter, + { + "count": 1, + "result": [ + {"text": "information_schema", "value": "information_schema"} + ], + }, + ) + + query_parameter = {"page": 0, "page_size": 1} + pg_test_query_parameter( + query_parameter, + { + "count": len(schema_values), + "result": [expected_response["result"][0]], + }, + ) + + for dataset in datasets: + db.session.delete(dataset) + db.session.commit() + + def test_get_dataset_distinct_not_allowed(self): + """ + Dataset API: Test get dataset distinct not allowed + """ + if backend() == "sqlite": + return + + self.login(username="admin") + uri = "api/v1/dataset/distinct/table_name" + rv = self.client.get(uri) + assert rv.status_code == 404 + + def test_get_dataset_distinct_gamma(self): + """ + Dataset API: Test get dataset distinct with gamma + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + + self.login(username="gamma") + uri = "api/v1/dataset/distinct/schema" + rv = self.client.get(uri) + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + assert response["count"] == 0 + assert response["result"] == [] + + db.session.delete(dataset) + db.session.commit() + + def test_get_dataset_info(self): + """ + Dataset API: Test get dataset info + """ + if backend() == "sqlite": + return + + self.login(username="admin") + uri = "api/v1/dataset/_info" + rv = self.get_assert_metric(uri, "info") + assert rv.status_code == 200 + + def test_info_security_dataset(self): + """ + Dataset API: Test info security + """ + if backend() == "sqlite": + return + + self.login(username="admin") + params = {"keys": ["permissions"]} + uri = f"api/v1/dataset/_info?q={prison.dumps(params)}" + rv = self.get_assert_metric(uri, "info") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert set(data["permissions"]) == { + "can_read", + "can_write", + "can_export", + "can_duplicate", + "can_get_or_create_dataset", + } + + def test_create_dataset_item(self): + """ + Dataset API: Test create dataset item + """ + if backend() == "sqlite": + return + + main_db = get_main_database() + self.login(username="admin") + table_data = { + "database": main_db.id, + "schema": "", + "table_name": "ab_permission", + } + uri = "api/v1/dataset/" + rv = self.post_assert_metric(uri, table_data, "post") + assert rv.status_code == 201 + data = json.loads(rv.data.decode("utf-8")) + table_id = data.get("id") + model = db.session.query(SqlaTable).get(table_id) + assert model.table_name == table_data["table_name"] + assert model.database_id == table_data["database"] + + # Assert that columns were created + columns = ( + db.session.query(TableColumn) + .filter_by(table_id=table_id) + .order_by("column_name") + .all() + ) + assert columns[0].column_name == "id" + assert columns[1].column_name == "name" + + # Assert that metrics were created + columns = ( + db.session.query(SqlMetric) + .filter_by(table_id=table_id) + .order_by("metric_name") + .all() + ) + assert columns[0].expression == "COUNT(*)" + + db.session.delete(model) + db.session.commit() + + def test_create_dataset_item_gamma(self): + """ + Dataset API: Test create dataset item gamma + """ + if backend() == "sqlite": + return + + self.login(username="gamma") + main_db = get_main_database() + table_data = { + "database": main_db.id, + "schema": "", + "table_name": "ab_permission", + } + uri = "api/v1/dataset/" + rv = self.client.post(uri, json=table_data) + assert rv.status_code == 403 + + def test_create_dataset_item_owner(self): + """ + Dataset API: Test create item owner + """ + if backend() == "sqlite": + return + + main_db = get_main_database() + self.login(username="alpha") + admin = self.get_user("admin") + alpha = self.get_user("alpha") + + table_data = { + "database": main_db.id, + "schema": "", + "table_name": "ab_permission", + "owners": [admin.id], + } + uri = "api/v1/dataset/" + rv = self.post_assert_metric(uri, table_data, "post") + assert rv.status_code == 201 + data = json.loads(rv.data.decode("utf-8")) + model = db.session.query(SqlaTable).get(data.get("id")) + assert admin in model.owners + assert alpha in model.owners + db.session.delete(model) + db.session.commit() + + def test_create_dataset_item_owners_invalid(self): + """ + Dataset API: Test create dataset item owner invalid + """ + if backend() == "sqlite": + return + + admin = self.get_user("admin") + main_db = get_main_database() + self.login(username="admin") + table_data = { + "database": main_db.id, + "schema": "", + "table_name": "ab_permission", + "owners": [admin.id, 1000], + } + uri = "api/v1/dataset/" + rv = self.post_assert_metric(uri, table_data, "post") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + expected_result = {"message": {"owners": ["Owners are invalid"]}} + assert data == expected_result + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_create_dataset_validate_uniqueness(self): + """ + Dataset API: Test create dataset validate table uniqueness + """ + if backend() == "sqlite": + return + + schema = get_example_default_schema() + energy_usage_ds = self.get_energy_usage_dataset() + self.login(username="admin") + table_data = { + "database": energy_usage_ds.database_id, + "table_name": energy_usage_ds.table_name, + } + if schema: + table_data["schema"] = schema + rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == { + "message": {"table_name": ["Dataset energy_usage already exists"]} + } + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_create_dataset_with_sql_validate_uniqueness(self): + """ + Dataset API: Test create dataset with sql + """ + if backend() == "sqlite": + return + + schema = get_example_default_schema() + energy_usage_ds = self.get_energy_usage_dataset() + self.login(username="admin") + table_data = { + "database": energy_usage_ds.database_id, + "table_name": energy_usage_ds.table_name, + "sql": "select * from energy_usage", + } + if schema: + table_data["schema"] = schema + rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == { + "message": {"table_name": ["Dataset energy_usage already exists"]} + } + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_create_dataset_with_sql(self): + """ + Dataset API: Test create dataset with sql + """ + if backend() == "sqlite": + return + + schema = get_example_default_schema() + energy_usage_ds = self.get_energy_usage_dataset() + self.login(username="alpha") + admin = self.get_user("admin") + alpha = self.get_user("alpha") + table_data = { + "database": energy_usage_ds.database_id, + "table_name": "energy_usage_virtual", + "sql": "select * from energy_usage", + "owners": [admin.id], + } + if schema: + table_data["schema"] = schema + rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post") + assert rv.status_code == 201 + data = json.loads(rv.data.decode("utf-8")) + model = db.session.query(SqlaTable).get(data.get("id")) + assert admin in model.owners + assert alpha in model.owners + db.session.delete(model) + db.session.commit() + + @unittest.skip("test is failing stochastically") + def test_create_dataset_same_name_different_schema(self): + if backend() == "sqlite": + # sqlite doesn't support schemas + return + + example_db = get_example_database() + with example_db.get_sqla_engine_with_context() as engine: + engine.execute( + f"CREATE TABLE {CTAS_SCHEMA_NAME}.birth_names AS SELECT 2 as two" + ) + + self.login(username="admin") + table_data = { + "database": example_db.id, + "schema": CTAS_SCHEMA_NAME, + "table_name": "birth_names", + } + + uri = "api/v1/dataset/" + rv = self.post_assert_metric(uri, table_data, "post") + assert rv.status_code == 201 + + # cleanup + data = json.loads(rv.data.decode("utf-8")) + uri = f'api/v1/dataset/{data.get("id")}' + rv = self.client.delete(uri) + assert rv.status_code == 200 + with example_db.get_sqla_engine_with_context() as engine: + engine.execute(f"DROP TABLE {CTAS_SCHEMA_NAME}.birth_names") + + def test_create_dataset_validate_database(self): + """ + Dataset API: Test create dataset validate database exists + """ + if backend() == "sqlite": + return + + self.login(username="admin") + dataset_data = {"database": 1000, "schema": "", "table_name": "birth_names"} + uri = "api/v1/dataset/" + rv = self.post_assert_metric(uri, dataset_data, "post") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"database": ["Database does not exist"]}} + + def test_create_dataset_validate_tables_exists(self): + """ + Dataset API: Test create dataset validate table exists + """ + if backend() == "sqlite": + return + + example_db = get_example_database() + self.login(username="admin") + table_data = { + "database": example_db.id, + "schema": "", + "table_name": "does_not_exist", + } + uri = "api/v1/dataset/" + rv = self.post_assert_metric(uri, table_data, "post") + assert rv.status_code == 422 + + @patch("superset.models.core.Database.get_columns") + @patch("superset.models.core.Database.has_table_by_name") + @patch("superset.models.core.Database.has_view_by_name") + @patch("superset.models.core.Database.get_table") + def test_create_dataset_validate_view_exists( + self, + mock_get_table, + mock_has_table_by_name, + mock_has_view_by_name, + mock_get_columns, + ): + """ + Dataset API: Test create dataset validate view exists + """ + if backend() == "sqlite": + return + + mock_get_columns.return_value = [ + { + "name": "col", + "type": "VARCHAR", + "type_generic": None, + "is_dttm": None, + } + ] + + mock_has_table_by_name.return_value = False + mock_has_view_by_name.return_value = True + mock_get_table.return_value = None + + example_db = get_example_database() + with example_db.get_sqla_engine_with_context() as engine: + engine = engine + dialect = engine.dialect + + with patch.object( + dialect, "get_view_names", wraps=dialect.get_view_names + ) as patch_get_view_names: + patch_get_view_names.return_value = {"test_case_view"} + + self.login(username="admin") + table_data = { + "database": example_db.id, + "schema": "", + "table_name": "test_case_view", + } + + uri = "api/v1/dataset/" + rv = self.post_assert_metric(uri, table_data, "post") + assert rv.status_code == 201 + + # cleanup + data = json.loads(rv.data.decode("utf-8")) + uri = f'api/v1/dataset/{data.get("id")}' + rv = self.client.delete(uri) + assert rv.status_code == 200 + + @patch("superset.datasets.dao.DatasetDAO.create") + def test_create_dataset_sqlalchemy_error(self, mock_dao_create): + """ + Dataset API: Test create dataset sqlalchemy error + """ + if backend() == "sqlite": + return + + mock_dao_create.side_effect = DAOCreateFailedError() + self.login(username="admin") + main_db = get_main_database() + dataset_data = { + "database": main_db.id, + "schema": "", + "table_name": "ab_permission", + } + uri = "api/v1/dataset/" + rv = self.post_assert_metric(uri, dataset_data, "post") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert data == {"message": "Dataset could not be created."} + + def test_update_dataset_item(self): + """ + Dataset API: Test update dataset item + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + self.login(username="admin") + dataset_data = {"description": "changed_description"} + uri = f"api/v1/dataset/{dataset.id}" + rv = self.put_assert_metric(uri, dataset_data, "put") + assert rv.status_code == 200 + model = db.session.query(SqlaTable).get(dataset.id) + assert model.description == dataset_data["description"] + + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_item_w_override_columns(self): + """ + Dataset API: Test update dataset with override columns + """ + if backend() == "sqlite": + return + + # Add default dataset + dataset = self.insert_default_dataset() + self.login(username="admin") + new_col_dict = { + "column_name": "new_col", + "description": "description", + "expression": "expression", + "type": "INTEGER", + "advanced_data_type": "ADVANCED_DATA_TYPE", + "verbose_name": "New Col", + } + dataset_data = { + "columns": [new_col_dict], + "description": "changed description", + } + uri = f"api/v1/dataset/{dataset.id}?override_columns=true" + rv = self.put_assert_metric(uri, dataset_data, "put") + assert rv.status_code == 200 + + columns = db.session.query(TableColumn).filter_by(table_id=dataset.id).all() + + assert new_col_dict["column_name"] in [col.column_name for col in columns] + assert new_col_dict["description"] in [col.description for col in columns] + assert new_col_dict["expression"] in [col.expression for col in columns] + assert new_col_dict["type"] in [col.type for col in columns] + assert new_col_dict["advanced_data_type"] in [ + col.advanced_data_type for col in columns + ] + + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_item_w_override_columns_same_columns(self): + """ + Dataset API: Test update dataset with override columns + """ + if backend() == "sqlite": + return + + # Add default dataset + main_db = get_main_database() + dataset = self.insert_default_dataset() + prev_col_len = len(dataset.columns) + + cols = [ + { + "column_name": c.column_name, + "description": c.description, + "expression": c.expression, + "type": c.type, + "advanced_data_type": c.advanced_data_type, + "verbose_name": c.verbose_name, + } + for c in dataset.columns + ] + + cols.append( + { + "column_name": "new_col", + "description": "description", + "expression": "expression", + "type": "INTEGER", + "advanced_data_type": "ADVANCED_DATA_TYPE", + "verbose_name": "New Col", + } + ) + + self.login(username="admin") + dataset_data = { + "columns": cols, + } + uri = f"api/v1/dataset/{dataset.id}?override_columns=true" + rv = self.put_assert_metric(uri, dataset_data, "put") + + assert rv.status_code == 200 + + columns = db.session.query(TableColumn).filter_by(table_id=dataset.id).all() + assert len(columns) != prev_col_len + assert len(columns) == 3 + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_create_column_and_metric(self): + """ + Dataset API: Test update dataset create column + """ + if backend() == "sqlite": + return + + # create example dataset by Command + dataset = self.insert_default_dataset() + + new_column_data = { + "column_name": "new_col", + "description": "description", + "expression": "expression", + "extra": '{"abc":123}', + "type": "INTEGER", + "advanced_data_type": "ADVANCED_DATA_TYPE", + "verbose_name": "New Col", + "uuid": "c626b60a-3fb2-4e99-9f01-53aca0b17166", + } + new_metric_data = { + "d3format": None, + "description": None, + "expression": "COUNT(*)", + "extra": '{"abc":123}', + "metric_name": "my_count", + "metric_type": None, + "verbose_name": "My Count", + "warning_text": None, + "uuid": "051b5e72-4e6e-4860-b12b-4d530009dd2a", + } + uri = f"api/v1/dataset/{dataset.id}" + + # Get current cols and metrics and append the new ones + self.login(username="admin") + rv = self.get_assert_metric(uri, "get") + data = json.loads(rv.data.decode("utf-8")) + + for column in data["result"]["columns"]: + column.pop("changed_on", None) + column.pop("created_on", None) + column.pop("type_generic", None) + data["result"]["columns"].append(new_column_data) + + for metric in data["result"]["metrics"]: + metric.pop("changed_on", None) + metric.pop("created_on", None) + metric.pop("type_generic", None) + + data["result"]["metrics"].append(new_metric_data) + rv = self.client.put( + uri, + json={ + "columns": data["result"]["columns"], + "metrics": data["result"]["metrics"], + }, + ) + + assert rv.status_code == 200 + + columns = ( + db.session.query(TableColumn) + .filter_by(table_id=dataset.id) + .order_by("column_name") + .all() + ) + + assert columns[0].column_name == "id" + assert columns[1].column_name == "name" + assert columns[2].column_name == new_column_data["column_name"] + assert columns[2].description == new_column_data["description"] + assert columns[2].expression == new_column_data["expression"] + assert columns[2].type == new_column_data["type"] + assert columns[2].advanced_data_type == new_column_data["advanced_data_type"] + assert columns[2].extra == new_column_data["extra"] + assert columns[2].verbose_name == new_column_data["verbose_name"] + assert str(columns[2].uuid) == new_column_data["uuid"] + + metrics = ( + db.session.query(SqlMetric) + .filter_by(table_id=dataset.id) + .order_by("metric_name") + .all() + ) + assert metrics[0].metric_name == "count" + assert metrics[1].metric_name == "my_count" + assert metrics[1].d3format == new_metric_data["d3format"] + assert metrics[1].description == new_metric_data["description"] + assert metrics[1].expression == new_metric_data["expression"] + assert metrics[1].extra == new_metric_data["extra"] + assert metrics[1].metric_type == new_metric_data["metric_type"] + assert metrics[1].verbose_name == new_metric_data["verbose_name"] + assert metrics[1].warning_text == new_metric_data["warning_text"] + assert str(metrics[1].uuid) == new_metric_data["uuid"] + + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_delete_column(self): + """ + Dataset API: Test update dataset delete column + """ + if backend() == "sqlite": + return + + # create example dataset by Command + dataset = self.insert_default_dataset() + + new_column_data = { + "column_name": "new_col", + "description": "description", + "expression": "expression", + "type": "INTEGER", + "advanced_data_type": "ADVANCED_DATA_TYPE", + "verbose_name": "New Col", + } + uri = f"api/v1/dataset/{dataset.id}" + # Get current cols and append the new column + self.login(username="admin") + rv = self.get_assert_metric(uri, "get") + data = json.loads(rv.data.decode("utf-8")) + + for column in data["result"]["columns"]: + column.pop("changed_on", None) + column.pop("created_on", None) + column.pop("type_generic", None) + + data["result"]["columns"].append(new_column_data) + rv = self.client.put(uri, json={"columns": data["result"]["columns"]}) + + assert rv.status_code == 200 + + # Remove this new column + data["result"]["columns"].remove(new_column_data) + rv = self.client.put(uri, json={"columns": data["result"]["columns"]}) + assert rv.status_code == 200 + + columns = ( + db.session.query(TableColumn) + .filter_by(table_id=dataset.id) + .order_by("column_name") + .all() + ) + assert columns[0].column_name == "id" + assert columns[1].column_name == "name" + assert len(columns) == 2 + + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_update_column(self): + """ + Dataset API: Test update dataset columns + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}" + # Get current cols and alter one + rv = self.get_assert_metric(uri, "get") + resp_columns = json.loads(rv.data.decode("utf-8"))["result"]["columns"] + for column in resp_columns: + column.pop("changed_on", None) + column.pop("created_on", None) + column.pop("type_generic", None) + + resp_columns[0]["groupby"] = False + resp_columns[0]["filterable"] = False + rv = self.client.put(uri, json={"columns": resp_columns}) + assert rv.status_code == 200 + columns = ( + db.session.query(TableColumn) + .filter_by(table_id=dataset.id) + .order_by("column_name") + .all() + ) + assert columns[0].column_name == "id" + assert columns[1].column_name, "name" + # TODO(bkyryliuk): find the reason why update is failing for the presto database + if get_example_database().backend != "presto": + assert columns[0].groupby is False + assert columns[0].filterable is False + + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_delete_metric(self): + """ + Dataset API: Test update dataset delete metric + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + metrics_query = ( + db.session.query(SqlMetric) + .filter_by(table_id=dataset.id) + .order_by("metric_name") + ) + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}" + data = { + "metrics": [ + {"metric_name": "metric1", "expression": "COUNT(*)"}, + {"metric_name": "metric2", "expression": "DIFF_COUNT(*)"}, + ] + } + rv = self.put_assert_metric(uri, data, "put") + assert rv.status_code == 200 + + metrics = metrics_query.all() + assert len(metrics) == 2 + + data = { + "metrics": [ + { + "id": metrics[0].id, + "metric_name": "metric1", + "expression": "COUNT(*)", + }, + ] + } + rv = self.put_assert_metric(uri, data, "put") + assert rv.status_code == 200 + + metrics = metrics_query.all() + assert len(metrics) == 1 + + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_update_column_uniqueness(self): + """ + Dataset API: Test update dataset columns uniqueness + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}" + # try to insert a new column ID that already exists + data = {"columns": [{"column_name": "id", "type": "INTEGER"}]} + rv = self.put_assert_metric(uri, data, "put") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + expected_result = { + "message": {"columns": ["One or more columns already exist"]} + } + assert data == expected_result + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_update_metric_uniqueness(self): + """ + Dataset API: Test update dataset metric uniqueness + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}" + # try to insert a new column ID that already exists + data = {"metrics": [{"metric_name": "count", "expression": "COUNT(*)"}]} + rv = self.put_assert_metric(uri, data, "put") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + expected_result = { + "message": {"metrics": ["One or more metrics already exist"]} + } + assert data == expected_result + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_update_column_duplicate(self): + """ + Dataset API: Test update dataset columns duplicate + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}" + # try to insert a new column ID that already exists + data = { + "columns": [ + {"column_name": "id", "type": "INTEGER"}, + {"column_name": "id", "type": "VARCHAR"}, + ] + } + rv = self.put_assert_metric(uri, data, "put") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + expected_result = { + "message": {"columns": ["One or more columns are duplicated"]} + } + assert data == expected_result + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_update_metric_duplicate(self): + """ + Dataset API: Test update dataset metric duplicate + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}" + # try to insert a new column ID that already exists + data = { + "metrics": [ + {"metric_name": "dup", "expression": "COUNT(*)"}, + {"metric_name": "dup", "expression": "DIFF_COUNT(*)"}, + ] + } + rv = self.put_assert_metric(uri, data, "put") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + expected_result = { + "message": {"metrics": ["One or more metrics are duplicated"]} + } + assert data == expected_result + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_item_gamma(self): + """ + Dataset API: Test update dataset item gamma + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + self.login(username="gamma") + table_data = {"description": "changed_description"} + uri = f"api/v1/dataset/{dataset.id}" + rv = self.client.put(uri, json=table_data) + assert rv.status_code == 403 + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_item_not_owned(self): + """ + Dataset API: Test update dataset item not owned + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + self.login(username="alpha") + table_data = {"description": "changed_description"} + uri = f"api/v1/dataset/{dataset.id}" + rv = self.put_assert_metric(uri, table_data, "put") + assert rv.status_code == 403 + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_item_owners_invalid(self): + """ + Dataset API: Test update dataset item owner invalid + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + self.login(username="admin") + table_data = {"description": "changed_description", "owners": [1000]} + uri = f"api/v1/dataset/{dataset.id}" + rv = self.put_assert_metric(uri, table_data, "put") + assert rv.status_code == 422 + db.session.delete(dataset) + db.session.commit() + + def test_update_dataset_item_uniqueness(self): + """ + Dataset API: Test update dataset uniqueness + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + self.login(username="admin") + ab_user = self.insert_dataset( + "ab_user", [self.get_user("admin").id], get_main_database() + ) + table_data = {"table_name": "ab_user"} + uri = f"api/v1/dataset/{dataset.id}" + rv = self.put_assert_metric(uri, table_data, "put") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + expected_response = { + "message": {"table_name": ["Dataset ab_user already exists"]} + } + assert data == expected_response + db.session.delete(dataset) + db.session.delete(ab_user) + db.session.commit() + + @patch("superset.datasets.dao.DatasetDAO.update") + def test_update_dataset_sqlalchemy_error(self, mock_dao_update): + """ + Dataset API: Test update dataset sqlalchemy error + """ + if backend() == "sqlite": + return + + mock_dao_update.side_effect = DAOUpdateFailedError() + + dataset = self.insert_default_dataset() + self.login(username="admin") + table_data = {"description": "changed_description"} + uri = f"api/v1/dataset/{dataset.id}" + rv = self.client.put(uri, json=table_data) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert data == {"message": "Dataset could not be updated."} + + db.session.delete(dataset) + db.session.commit() + + def test_delete_dataset_item(self): + """ + Dataset API: Test delete dataset item + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + view_menu = security_manager.find_view_menu(dataset.get_perm()) + assert view_menu is not None + view_menu_id = view_menu.id + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + non_view_menu = db.session.query(security_manager.viewmenu_model).get( + view_menu_id + ) + assert non_view_menu is None + + def test_delete_item_dataset_not_owned(self): + """ + Dataset API: Test delete item not owned + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + self.login(username="alpha") + uri = f"api/v1/dataset/{dataset.id}" + rv = self.delete_assert_metric(uri, "delete") + assert rv.status_code == 403 + db.session.delete(dataset) + db.session.commit() + + def test_delete_dataset_item_not_authorized(self): + """ + Dataset API: Test delete item not authorized + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + self.login(username="gamma") + uri = f"api/v1/dataset/{dataset.id}" + rv = self.client.delete(uri) + assert rv.status_code == 403 + db.session.delete(dataset) + db.session.commit() + + @patch("superset.datasets.dao.DatasetDAO.delete") + def test_delete_dataset_sqlalchemy_error(self, mock_dao_delete): + """ + Dataset API: Test delete dataset sqlalchemy error + """ + if backend() == "sqlite": + return + + mock_dao_delete.side_effect = DAODeleteFailedError() + + dataset = self.insert_default_dataset() + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}" + rv = self.delete_assert_metric(uri, "delete") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert data == {"message": "Dataset could not be deleted."} + db.session.delete(dataset) + db.session.commit() + + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_column(self): + """ + Dataset API: Test delete dataset column + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_datasets()[0] + column_id = dataset.columns[0].id + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/column/{column_id}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + assert db.session.query(TableColumn).get(column_id) == None + + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_column_not_found(self): + """ + Dataset API: Test delete dataset column not found + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_datasets()[0] + non_id = self.get_nonexistent_numeric_id(TableColumn) + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/column/{non_id}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + non_id = self.get_nonexistent_numeric_id(SqlaTable) + column_id = dataset.columns[0].id + + self.login(username="admin") + uri = f"api/v1/dataset/{non_id}/column/{column_id}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_column_not_owned(self): + """ + Dataset API: Test delete dataset column not owned + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_datasets()[0] + column_id = dataset.columns[0].id + + self.login(username="alpha") + uri = f"api/v1/dataset/{dataset.id}/column/{column_id}" + rv = self.client.delete(uri) + assert rv.status_code == 403 + + @pytest.mark.usefixtures("create_datasets") + @patch("superset.datasets.dao.DatasetDAO.delete") + def test_delete_dataset_column_fail(self, mock_dao_delete): + """ + Dataset API: Test delete dataset column + """ + if backend() == "sqlite": + return + + mock_dao_delete.side_effect = DAODeleteFailedError() + dataset = self.get_fixture_datasets()[0] + column_id = dataset.columns[0].id + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/column/{column_id}" + rv = self.client.delete(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert data == {"message": "Dataset column delete failed."} + + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_metric(self): + """ + Dataset API: Test delete dataset metric + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_datasets()[0] + test_metric = SqlMetric( + metric_name="metric1", expression="COUNT(*)", table=dataset + ) + db.session.add(test_metric) + db.session.commit() + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/metric/{test_metric.id}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + assert db.session.query(SqlMetric).get(test_metric.id) == None + + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_metric_not_found(self): + """ + Dataset API: Test delete dataset metric not found + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_datasets()[0] + non_id = self.get_nonexistent_numeric_id(SqlMetric) + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/metric/{non_id}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + non_id = self.get_nonexistent_numeric_id(SqlaTable) + metric_id = dataset.metrics[0].id + + self.login(username="admin") + uri = f"api/v1/dataset/{non_id}/metric/{metric_id}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_metric_not_owned(self): + """ + Dataset API: Test delete dataset metric not owned + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_datasets()[0] + metric_id = dataset.metrics[0].id + + self.login(username="alpha") + uri = f"api/v1/dataset/{dataset.id}/metric/{metric_id}" + rv = self.client.delete(uri) + assert rv.status_code == 403 + + @pytest.mark.usefixtures("create_datasets") + @patch("superset.datasets.dao.DatasetDAO.delete") + def test_delete_dataset_metric_fail(self, mock_dao_delete): + """ + Dataset API: Test delete dataset metric + """ + if backend() == "sqlite": + return + + mock_dao_delete.side_effect = DAODeleteFailedError() + dataset = self.get_fixture_datasets()[0] + column_id = dataset.metrics[0].id + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/metric/{column_id}" + rv = self.client.delete(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert data == {"message": "Dataset metric delete failed."} + + @pytest.mark.usefixtures("create_datasets") + def test_bulk_delete_dataset_items(self): + """ + Dataset API: Test bulk delete dataset items + """ + if backend() == "sqlite": + return + + datasets = self.get_fixture_datasets() + dataset_ids = [dataset.id for dataset in datasets] + + view_menu_names = [] + for dataset in datasets: + view_menu_names.append(dataset.get_perm()) + + self.login(username="admin") + uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + expected_response = {"message": f"Deleted {len(datasets)} datasets"} + assert data == expected_response + datasets = ( + db.session.query(SqlaTable) + .filter(SqlaTable.table_name.in_(self.fixture_tables_names)) + .all() + ) + assert datasets == [] + # Assert permissions get cleaned + for view_menu_name in view_menu_names: + assert security_manager.find_view_menu(view_menu_name) is None + + @pytest.mark.usefixtures("create_datasets") + def test_bulk_delete_item_dataset_not_owned(self): + """ + Dataset API: Test bulk delete item not owned + """ + if backend() == "sqlite": + return + + datasets = self.get_fixture_datasets() + dataset_ids = [dataset.id for dataset in datasets] + + self.login(username="alpha") + uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 403 + + @pytest.mark.usefixtures("create_datasets") + def test_bulk_delete_item_not_found(self): + """ + Dataset API: Test bulk delete item not found + """ + if backend() == "sqlite": + return + + datasets = self.get_fixture_datasets() + dataset_ids = [dataset.id for dataset in datasets] + dataset_ids.append(db.session.query(func.max(SqlaTable.id)).scalar()) + + self.login(username="admin") + uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_datasets") + def test_bulk_delete_dataset_item_not_authorized(self): + """ + Dataset API: Test bulk delete item not authorized + """ + if backend() == "sqlite": + return + + datasets = self.get_fixture_datasets() + dataset_ids = [dataset.id for dataset in datasets] + + self.login(username="gamma") + uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}" + rv = self.client.delete(uri) + assert rv.status_code == 403 + + @pytest.mark.usefixtures("create_datasets") + def test_bulk_delete_dataset_item_incorrect(self): + """ + Dataset API: Test bulk delete item incorrect request + """ + if backend() == "sqlite": + return + + datasets = self.get_fixture_datasets() + dataset_ids = [dataset.id for dataset in datasets] + dataset_ids.append("Wrong") + + self.login(username="admin") + uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}" + rv = self.client.delete(uri) + assert rv.status_code == 400 + + def test_dataset_item_refresh(self): + """ + Dataset API: Test item refresh + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + # delete a column + id_column = ( + db.session.query(TableColumn) + .filter_by(table_id=dataset.id, column_name="id") + .one() + ) + db.session.delete(id_column) + db.session.commit() + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/refresh" + rv = self.put_assert_metric(uri, {}, "refresh") + assert rv.status_code == 200 + # Assert the column is restored on refresh + id_column = ( + db.session.query(TableColumn) + .filter_by(table_id=dataset.id, column_name="id") + .one() + ) + assert id_column is not None + db.session.delete(dataset) + db.session.commit() + + def test_dataset_item_refresh_not_found(self): + """ + Dataset API: Test item refresh not found dataset + """ + if backend() == "sqlite": + return + + max_id = db.session.query(func.max(SqlaTable.id)).scalar() + + self.login(username="admin") + uri = f"api/v1/dataset/{max_id + 1}/refresh" + rv = self.put_assert_metric(uri, {}, "refresh") + assert rv.status_code == 404 + + def test_dataset_item_refresh_not_owned(self): + """ + Dataset API: Test item refresh not owned dataset + """ + if backend() == "sqlite": + return + + dataset = self.insert_default_dataset() + self.login(username="alpha") + uri = f"api/v1/dataset/{dataset.id}/refresh" + rv = self.put_assert_metric(uri, {}, "refresh") + assert rv.status_code == 403 + + db.session.delete(dataset) + db.session.commit() + + @unittest.skip("test is failing stochastically") + def test_export_dataset(self): + """ + Dataset API: Test export dataset + """ + if backend() == "sqlite": + return + + birth_names_dataset = self.get_birth_names_dataset() + # TODO: fix test for presto + # debug with dump: https://github.com/apache/superset/runs/1092546855 + if birth_names_dataset.database.backend in {"presto", "hive"}: + return + + argument = [birth_names_dataset.id] + uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}" + + self.login(username="admin") + rv = self.get_assert_metric(uri, "export") + assert rv.status_code == 200 + + cli_export = export_to_dict( + session=db.session, + recursive=True, + back_references=False, + include_defaults=False, + ) + cli_export_tables = cli_export["databases"][0]["tables"] + expected_response = {} + for export_table in cli_export_tables: + if export_table["table_name"] == "birth_names": + expected_response = export_table + break + ui_export = yaml.safe_load(rv.data.decode("utf-8")) + assert ui_export[0] == expected_response + + def test_export_dataset_not_found(self): + """ + Dataset API: Test export dataset not found + """ + if backend() == "sqlite": + return + + max_id = db.session.query(func.max(SqlaTable.id)).scalar() + # Just one does not exist and we get 404 + argument = [max_id + 1, 1] + uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}" + self.login(username="admin") + rv = self.get_assert_metric(uri, "export") + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_datasets") + def test_export_dataset_gamma(self): + """ + Dataset API: Test export dataset has gamma + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_datasets()[0] + + argument = [dataset.id] + uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}" + + self.login(username="gamma") + rv = self.client.get(uri) + assert rv.status_code == 403 + + perm1 = security_manager.find_permission_view_menu("can_export", "Dataset") + + perm2 = security_manager.find_permission_view_menu( + "datasource_access", dataset.perm + ) + + # add permissions to allow export + access to query this dataset + gamma_role = security_manager.find_role("Gamma") + security_manager.add_permission_role(gamma_role, perm1) + security_manager.add_permission_role(gamma_role, perm2) + + rv = self.client.get(uri) + assert rv.status_code == 200 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_export_dataset_bundle(self): + """ + Dataset API: Test export dataset + """ + if backend() == "sqlite": + return + + birth_names_dataset = self.get_birth_names_dataset() + # TODO: fix test for presto + # debug with dump: https://github.com/apache/superset/runs/1092546855 + if birth_names_dataset.database.backend in {"presto", "hive"}: + return + + argument = [birth_names_dataset.id] + uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}" + + self.login(username="admin") + rv = self.get_assert_metric(uri, "export") + + assert rv.status_code == 200 + + buf = BytesIO(rv.data) + assert is_zipfile(buf) + + def test_export_dataset_bundle_not_found(self): + """ + Dataset API: Test export dataset not found + """ + if backend() == "sqlite": + return + + # Just one does not exist and we get 404 + argument = [-1, 1] + uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}" + self.login(username="admin") + rv = self.get_assert_metric(uri, "export") + + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_datasets") + def test_export_dataset_bundle_gamma(self): + """ + Dataset API: Test export dataset has gamma + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_datasets()[0] + + argument = [dataset.id] + uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}" + + self.login(username="gamma") + rv = self.client.get(uri) + # gamma users by default do not have access to this dataset + assert rv.status_code == 403 + + @unittest.skip("Number of related objects depend on DB") + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_dataset_related_objects(self): + """ + Dataset API: Test get chart and dashboard count related to a dataset + :return: + """ + if backend() == "sqlite": + return + + self.login(username="admin") + table = self.get_birth_names_dataset() + uri = f"api/v1/dataset/{table.id}/related_objects" + rv = self.get_assert_metric(uri, "related_objects") + response = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert response["charts"]["count"] == 18 + assert response["dashboards"]["count"] == 1 + + def test_get_dataset_related_objects_not_found(self): + """ + Dataset API: Test related objects not found + """ + if backend() == "sqlite": + return + + max_id = db.session.query(func.max(SqlaTable.id)).scalar() + # id does not exist and we get 404 + invalid_id = max_id + 1 + uri = f"api/v1/dataset/{invalid_id}/related_objects/" + self.login(username="admin") + rv = self.client.get(uri) + assert rv.status_code == 404 + self.logout() + + self.login(username="gamma") + table = self.get_birth_names_dataset() + uri = f"api/v1/dataset/{table.id}/related_objects" + rv = self.client.get(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_datasets", "create_virtual_datasets") + def test_get_datasets_custom_filter_sql(self): + """ + Dataset API: Test custom dataset_is_null_or_empty filter for sql + """ + if backend() == "sqlite": + return + + arguments = { + "filters": [ + {"col": "sql", "opr": "dataset_is_null_or_empty", "value": False} + ] + } + self.login(username="admin") + uri = f"api/v1/dataset/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + + assert rv.status_code == 200 + + data = json.loads(rv.data.decode("utf-8")) + for table_name in self.fixture_virtual_table_names: + assert table_name in [ds["table_name"] for ds in data["result"]] + + arguments = { + "filters": [ + {"col": "sql", "opr": "dataset_is_null_or_empty", "value": True} + ] + } + self.login(username="admin") + uri = f"api/v1/dataset/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + assert rv.status_code == 200 + + data = json.loads(rv.data.decode("utf-8")) + for table_name in self.fixture_tables_names: + assert table_name in [ds["table_name"] for ds in data["result"]] + + def test_import_dataset(self): + """ + Dataset API: Test import dataset + """ + if backend() == "sqlite": + return + + self.login(username="admin") + uri = "api/v1/dataset/import/" + + buf = self.create_dataset_import() + form_data = { + "formData": (buf, "dataset_export.zip"), + "sync_columns": "true", + "sync_metrics": "true", + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + + assert database.database_name == "imported_database" + + assert len(database.tables) == 1 + dataset = database.tables[0] + assert dataset.table_name == "imported_dataset" + assert str(dataset.uuid) == dataset_config["uuid"] + + dataset.owners = [] + db.session.delete(dataset) + db.session.commit() + db.session.delete(database) + db.session.commit() + + def test_import_dataset_v0_export(self): + if backend() == "sqlite": + return + + num_datasets = db.session.query(SqlaTable).count() + + self.login(username="admin") + uri = "api/v1/dataset/import/" + + buf = BytesIO() + buf.write(json.dumps(dataset_ui_export).encode()) + buf.seek(0) + form_data = { + "formData": (buf, "dataset_export.zip"), + "sync_columns": "true", + "sync_metrics": "true", + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + assert db.session.query(SqlaTable).count() == num_datasets + 1 + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="birth_names_2").one() + ) + db.session.delete(dataset) + db.session.commit() + + def test_import_dataset_overwrite(self): + """ + Dataset API: Test import existing dataset + """ + if backend() == "sqlite": + return + + self.login(username="admin") + uri = "api/v1/dataset/import/" + + buf = self.create_dataset_import() + form_data = { + "formData": (buf, "dataset_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + # import again without overwrite flag + buf = self.create_dataset_import() + form_data = { + "formData": (buf, "dataset_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "Error importing dataset", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "datasets/imported_dataset.yaml": "Dataset already exists and `overwrite=true` was not passed", + "issue_codes": [ + { + "code": 1010, + "message": "Issue 1010 - Superset encountered an error while running a command.", + } + ], + }, + } + ] + } + + # import with overwrite flag + buf = self.create_dataset_import() + form_data = { + "formData": (buf, "dataset_export.zip"), + "overwrite": "true", + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + # clean up + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + dataset = database.tables[0] + + dataset.owners = [] + db.session.delete(dataset) + db.session.commit() + db.session.delete(database) + db.session.commit() + + def test_import_dataset_invalid(self): + """ + Dataset API: Test import invalid dataset + """ + if backend() == "sqlite": + return + + self.login(username="admin") + uri = "api/v1/dataset/import/" + + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("dataset_export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(database_metadata_config).encode()) + with bundle.open( + "dataset_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(database_config).encode()) + with bundle.open( + "dataset_export/datasets/imported_dataset.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(dataset_config).encode()) + buf.seek(0) + + form_data = { + "formData": (buf, "dataset_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "Error importing dataset", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "metadata.yaml": {"type": ["Must be equal to SqlaTable."]}, + "issue_codes": [ + { + "code": 1010, + "message": ( + "Issue 1010 - Superset encountered " + "an error while running a command." + ), + } + ], + }, + } + ] + } + + def test_import_dataset_invalid_v0_validation(self): + """ + Dataset API: Test import invalid dataset + """ + if backend() == "sqlite": + return + + self.login(username="admin") + uri = "api/v1/dataset/import/" + + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open( + "dataset_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(database_config).encode()) + with bundle.open( + "dataset_export/datasets/imported_dataset.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(dataset_config).encode()) + buf.seek(0) + + form_data = { + "formData": (buf, "dataset_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 422 + assert response == { + "errors": [ + { + "message": "Could not find a valid command to import file", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "issue_codes": [ + { + "code": 1010, + "message": "Issue 1010 - Superset encountered an error while running a command.", + } + ] + }, + } + ] + } + + @pytest.mark.usefixtures("create_datasets") + def test_get_datasets_is_certified_filter(self): + """ + Dataset API: Test custom dataset_is_certified filter + """ + if backend() == "sqlite": + return + + table_w_certification = SqlaTable( + table_name="foo", + schema=None, + owners=[], + database=get_main_database(), + sql=None, + extra='{"certification": 1}', + ) + db.session.add(table_w_certification) + db.session.commit() + + arguments = { + "filters": [{"col": "id", "opr": "dataset_is_certified", "value": True}] + } + self.login(username="admin") + uri = f"api/v1/dataset/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + assert response.get("count") == 1 + + db.session.delete(table_w_certification) + db.session.commit() + + @pytest.mark.usefixtures("create_virtual_datasets") + def test_duplicate_virtual_dataset(self): + """ + Dataset API: Test duplicate virtual dataset + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_virtual_datasets()[0] + + self.login(username="admin") + uri = f"api/v1/dataset/duplicate" + table_data = {"base_model_id": dataset.id, "table_name": "Dupe1"} + rv = self.post_assert_metric(uri, table_data, "duplicate") + assert rv.status_code == 201 + rv_data = json.loads(rv.data) + new_dataset: SqlaTable = ( + db.session.query(SqlaTable).filter_by(id=rv_data["id"]).one_or_none() + ) + assert new_dataset is not None + assert new_dataset.id != dataset.id + assert new_dataset.table_name == "Dupe1" + assert len(new_dataset.columns) == 2 + assert new_dataset.columns[0].column_name == "id" + assert new_dataset.columns[1].column_name == "name" + db.session.delete(new_dataset) + db.session.commit() + + @pytest.mark.usefixtures("create_datasets") + def test_duplicate_physical_dataset(self): + """ + Dataset API: Test duplicate physical dataset + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_datasets()[0] + + self.login(username="admin") + uri = f"api/v1/dataset/duplicate" + table_data = {"base_model_id": dataset.id, "table_name": "Dupe2"} + rv = self.post_assert_metric(uri, table_data, "duplicate") + assert rv.status_code == 422 + + @pytest.mark.usefixtures("create_virtual_datasets") + def test_duplicate_existing_dataset(self): + """ + Dataset API: Test duplicate dataset with existing name + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_virtual_datasets()[0] + + self.login(username="admin") + uri = f"api/v1/dataset/duplicate" + table_data = { + "base_model_id": dataset.id, + "table_name": "sql_virtual_dataset_2", + } + rv = self.post_assert_metric(uri, table_data, "duplicate") + assert rv.status_code == 422 + + def test_duplicate_invalid_dataset(self): + """ + Dataset API: Test duplicate invalid dataset + """ + + self.login(username="admin") + uri = f"api/v1/dataset/duplicate" + table_data = { + "base_model_id": -1, + "table_name": "Dupe3", + } + rv = self.post_assert_metric(uri, table_data, "duplicate") + assert rv.status_code == 422 + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_or_create_dataset_already_exists(self): + """ + Dataset API: Test get or create endpoint when table already exists + """ + self.login(username="admin") + rv = self.client.post( + "api/v1/dataset/get_or_create/", + json={ + "table_name": "virtual_dataset", + "database_id": get_example_database().id, + }, + ) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + dataset = ( + db.session.query(SqlaTable) + .filter(SqlaTable.table_name == "virtual_dataset") + .one() + ) + self.assertEqual(response["result"], {"table_id": dataset.id}) + + def test_get_or_create_dataset_database_not_found(self): + """ + Dataset API: Test get or create endpoint when database doesn't exist + """ + self.login(username="admin") + rv = self.client.post( + "api/v1/dataset/get_or_create/", + json={"table_name": "virtual_dataset", "database_id": 999}, + ) + self.assertEqual(rv.status_code, 422) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["message"], {"database": ["Database does not exist"]}) + + @patch("superset.datasets.commands.create.CreateDatasetCommand.run") + def test_get_or_create_dataset_create_fails(self, command_run_mock): + """ + Dataset API: Test get or create endpoint when create fails + """ + command_run_mock.side_effect = DatasetCreateFailedError + self.login(username="admin") + rv = self.client.post( + "api/v1/dataset/get_or_create/", + json={ + "table_name": "virtual_dataset", + "database_id": get_example_database().id, + }, + ) + self.assertEqual(rv.status_code, 422) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["message"], "Dataset could not be created.") + + def test_get_or_create_dataset_creates_table(self): + """ + Dataset API: Test get or create endpoint when table is created + """ + self.login(username="admin") + + examples_db = get_example_database() + with examples_db.get_sqla_engine_with_context() as engine: + engine.execute("DROP TABLE IF EXISTS test_create_sqla_table_api") + engine.execute("CREATE TABLE test_create_sqla_table_api AS SELECT 2 as col") + + rv = self.client.post( + "api/v1/dataset/get_or_create/", + json={ + "table_name": "test_create_sqla_table_api", + "database_id": examples_db.id, + "template_params": '{"param": 1}', + }, + ) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + table = ( + db.session.query(SqlaTable) + .filter_by(table_name="test_create_sqla_table_api") + .one() + ) + self.assertEqual(response["result"], {"table_id": table.id}) + self.assertEqual(table.template_params, '{"param": 1}') + + db.session.delete(table) + with examples_db.get_sqla_engine_with_context() as engine: + engine.execute("DROP TABLE test_create_sqla_table_api") + db.session.commit() diff --git a/tests/integration_tests/datasets/commands_tests.py b/tests/integration_tests/datasets/commands_tests.py new file mode 100644 index 0000000000000..0ce98477a0b2d --- /dev/null +++ b/tests/integration_tests/datasets/commands_tests.py @@ -0,0 +1,570 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from operator import itemgetter +from typing import Any, List +from unittest.mock import patch + +import pytest +import yaml +from sqlalchemy.exc import SQLAlchemyError + +from superset import db, security_manager +from superset.commands.exceptions import CommandInvalidError +from superset.commands.importers.exceptions import IncorrectVersionError +from superset.connectors.sqla.models import SqlaTable +from superset.databases.commands.importers.v1 import ImportDatabasesCommand +from superset.datasets.commands.create import CreateDatasetCommand +from superset.datasets.commands.exceptions import ( + DatasetInvalidError, + DatasetNotFoundError, +) +from superset.datasets.commands.export import ExportDatasetsCommand +from superset.datasets.commands.importers import v0, v1 +from superset.models.core import Database +from superset.utils.core import get_example_default_schema +from superset.utils.database import get_example_database +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_data, + load_energy_table_with_slice, +) +from tests.integration_tests.fixtures.importexport import ( + database_config, + database_metadata_config, + dataset_cli_export, + dataset_config, + dataset_metadata_config, + dataset_ui_export, +) +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) + + +class TestExportDatasetsCommand(SupersetTestCase): + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_export_dataset_command(self, mock_g): + mock_g.user = security_manager.find_user("admin") + + example_db = get_example_database() + example_dataset = _get_table_from_list_by_name( + "energy_usage", example_db.tables + ) + command = ExportDatasetsCommand([example_dataset.id]) + contents = dict(command.run()) + + assert list(contents.keys()) == [ + "metadata.yaml", + "datasets/examples/energy_usage.yaml", + "databases/examples.yaml", + ] + + metadata = yaml.safe_load(contents["datasets/examples/energy_usage.yaml"]) + + # sort columns for deterministic comparison + metadata["columns"] = sorted(metadata["columns"], key=itemgetter("column_name")) + metadata["metrics"] = sorted(metadata["metrics"], key=itemgetter("metric_name")) + + # types are different depending on the backend + type_map = { + column.column_name: str(column.type) for column in example_dataset.columns + } + + assert metadata == { + "cache_timeout": None, + "columns": [ + { + "column_name": "source", + "description": None, + "expression": "", + "filterable": True, + "groupby": True, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "type": type_map["source"], + "advanced_data_type": None, + "verbose_name": None, + "extra": None, + }, + { + "column_name": "target", + "description": None, + "expression": "", + "filterable": True, + "groupby": True, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "type": type_map["target"], + "advanced_data_type": None, + "verbose_name": None, + "extra": None, + }, + { + "column_name": "value", + "description": None, + "expression": "", + "filterable": True, + "groupby": True, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "type": type_map["value"], + "advanced_data_type": None, + "verbose_name": None, + "extra": None, + }, + ], + "database_uuid": str(example_db.uuid), + "default_endpoint": None, + "description": "Energy consumption", + "extra": None, + "fetch_values_predicate": None, + "filter_select_enabled": False, + "main_dttm_col": None, + "metrics": [ + { + "d3format": None, + "description": None, + "expression": "COUNT(*)", + "extra": None, + "metric_name": "count", + "metric_type": "count", + "verbose_name": "COUNT(*)", + "warning_text": None, + }, + { + "d3format": None, + "description": None, + "expression": "SUM(value)", + "extra": None, + "metric_name": "sum__value", + "metric_type": None, + "verbose_name": None, + "warning_text": None, + }, + ], + "offset": 0, + "params": None, + "schema": get_example_default_schema(), + "sql": None, + "table_name": "energy_usage", + "template_params": None, + "uuid": str(example_dataset.uuid), + "version": "1.0.0", + } + + @patch("superset.security.manager.g") + def test_export_dataset_command_no_access(self, mock_g): + """Test that users can't export datasets they don't have access to""" + mock_g.user = security_manager.find_user("gamma") + + example_db = get_example_database() + example_dataset = example_db.tables[0] + command = ExportDatasetsCommand([example_dataset.id]) + contents = command.run() + with self.assertRaises(DatasetNotFoundError): + next(contents) + + @patch("superset.security.manager.g") + def test_export_dataset_command_invalid_dataset(self, mock_g): + """Test that an error is raised when exporting an invalid dataset""" + mock_g.user = security_manager.find_user("admin") + command = ExportDatasetsCommand([-1]) + contents = command.run() + with self.assertRaises(DatasetNotFoundError): + next(contents) + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_export_dataset_command_key_order(self, mock_g): + """Test that they keys in the YAML have the same order as export_fields""" + mock_g.user = security_manager.find_user("admin") + + example_db = get_example_database() + example_dataset = _get_table_from_list_by_name( + "energy_usage", example_db.tables + ) + command = ExportDatasetsCommand([example_dataset.id]) + contents = dict(command.run()) + + metadata = yaml.safe_load(contents["datasets/examples/energy_usage.yaml"]) + assert list(metadata.keys()) == [ + "table_name", + "main_dttm_col", + "description", + "default_endpoint", + "offset", + "cache_timeout", + "schema", + "sql", + "params", + "template_params", + "filter_select_enabled", + "fetch_values_predicate", + "extra", + "uuid", + "metrics", + "columns", + "version", + "database_uuid", + ] + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_export_dataset_command_no_related(self, mock_g): + """ + Test that only datasets are exported when export_related=False. + """ + mock_g.user = security_manager.find_user("admin") + + example_db = get_example_database() + example_dataset = _get_table_from_list_by_name( + "energy_usage", example_db.tables + ) + command = ExportDatasetsCommand([example_dataset.id], export_related=False) + contents = dict(command.run()) + + assert list(contents.keys()) == [ + "metadata.yaml", + "datasets/examples/energy_usage.yaml", + ] + + +class TestImportDatasetsCommand(SupersetTestCase): + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_import_v0_dataset_cli_export(self): + num_datasets = db.session.query(SqlaTable).count() + + contents = { + "20201119_181105.yaml": yaml.safe_dump(dataset_cli_export), + } + command = v0.ImportDatasetsCommand(contents) + command.run() + + new_num_datasets = db.session.query(SqlaTable).count() + assert new_num_datasets == num_datasets + 1 + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="birth_names_2").one() + ) + assert ( + dataset.params + == '{"remote_id": 3, "database_name": "examples", "import_time": 1604342885}' + ) + assert len(dataset.metrics) == 2 + assert dataset.main_dttm_col == "ds" + assert dataset.filter_select_enabled + dataset.columns.sort(key=lambda obj: obj.column_name) + expected_columns = [ + "num_california", + "ds", + "state", + "gender", + "name", + "num_boys", + "num_girls", + "num", + ] + expected_columns.sort() + assert [col.column_name for col in dataset.columns] == expected_columns + + db.session.delete(dataset) + db.session.commit() + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_import_v0_dataset_ui_export(self): + num_datasets = db.session.query(SqlaTable).count() + + contents = { + "20201119_181105.yaml": yaml.safe_dump(dataset_ui_export), + } + command = v0.ImportDatasetsCommand(contents) + command.run() + + new_num_datasets = db.session.query(SqlaTable).count() + assert new_num_datasets == num_datasets + 1 + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="birth_names_2").one() + ) + assert ( + dataset.params + == '{"remote_id": 3, "database_name": "examples", "import_time": 1604342885}' + ) + assert len(dataset.metrics) == 2 + assert dataset.main_dttm_col == "ds" + assert dataset.filter_select_enabled + assert set(col.column_name for col in dataset.columns) == { + "num_california", + "ds", + "state", + "gender", + "name", + "num_boys", + "num_girls", + "num", + } + + db.session.delete(dataset) + db.session.commit() + + @patch("superset.datasets.commands.importers.v1.utils.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_import_v1_dataset(self, mock_g): + """Test that we can import a dataset""" + mock_g.user = security_manager.find_user("admin") + contents = { + "metadata.yaml": yaml.safe_dump(dataset_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + } + command = v1.ImportDatasetsCommand(contents) + command.run() + + dataset = ( + db.session.query(SqlaTable).filter_by(uuid=dataset_config["uuid"]).one() + ) + assert dataset.table_name == "imported_dataset" + assert dataset.main_dttm_col is None + assert dataset.description == "This is a dataset that was exported" + assert dataset.default_endpoint == "" + assert dataset.offset == 66 + assert dataset.cache_timeout == 55 + assert dataset.schema == "" + assert dataset.sql == "" + assert dataset.params is None + assert dataset.template_params == "{}" + assert dataset.filter_select_enabled + assert dataset.fetch_values_predicate is None + assert ( + dataset.extra + == '{"certification": {"certified_by": "Data Platform Team", "details": "This table is the source of truth."}, "warning_markdown": "This is a warning."}' + ) + + # user should be included as one of the owners + assert dataset.owners == [mock_g.user] + + # database is also imported + assert str(dataset.database.uuid) == "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89" + + assert len(dataset.metrics) == 1 + metric = dataset.metrics[0] + assert metric.metric_name == "count" + assert metric.verbose_name == "" + assert metric.metric_type is None + assert metric.expression == "count(1)" + assert metric.description is None + assert metric.d3format is None + assert metric.extra == "{}" + assert metric.warning_text is None + + assert len(dataset.columns) == 1 + column = dataset.columns[0] + assert column.column_name == "cnt" + assert column.verbose_name == "Count of something" + assert not column.is_dttm + assert column.is_active # imported columns are set to active + assert column.type == "NUMBER" + assert not column.groupby + assert column.filterable + assert column.expression == "" + assert column.description is None + assert column.python_date_format is None + + dataset.owners = [] + dataset.database.owners = [] + db.session.delete(dataset) + db.session.delete(dataset.database) + db.session.commit() + + def test_import_v1_dataset_multiple(self): + """Test that a dataset can be imported multiple times""" + contents = { + "metadata.yaml": yaml.safe_dump(dataset_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + } + command = v1.ImportDatasetsCommand(contents, overwrite=True) + command.run() + command.run() + dataset = ( + db.session.query(SqlaTable).filter_by(uuid=dataset_config["uuid"]).one() + ) + assert dataset.table_name == "imported_dataset" + + # test that columns and metrics sync, ie, old ones not the import + # are removed + new_config = dataset_config.copy() + new_config["metrics"][0]["metric_name"] = "count2" + new_config["columns"][0]["column_name"] = "cnt2" + contents = { + "metadata.yaml": yaml.safe_dump(dataset_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(new_config), + } + command = v1.ImportDatasetsCommand(contents, overwrite=True) + command.run() + dataset = ( + db.session.query(SqlaTable).filter_by(uuid=dataset_config["uuid"]).one() + ) + assert len(dataset.metrics) == 1 + assert dataset.metrics[0].metric_name == "count2" + assert len(dataset.columns) == 1 + assert dataset.columns[0].column_name == "cnt2" + + db.session.delete(dataset) + db.session.delete(dataset.database) + db.session.commit() + + def test_import_v1_dataset_validation(self): + """Test different validations applied when importing a dataset""" + # metadata.yaml must be present + contents = { + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + } + command = v1.ImportDatasetsCommand(contents) + with pytest.raises(IncorrectVersionError) as excinfo: + command.run() + assert str(excinfo.value) == "Missing metadata.yaml" + + # version should be 1.0.0 + contents["metadata.yaml"] = yaml.safe_dump( + { + "version": "2.0.0", + "type": "SqlaTable", + "timestamp": "2020-11-04T21:27:44.423819+00:00", + } + ) + command = v1.ImportDatasetsCommand(contents) + with pytest.raises(IncorrectVersionError) as excinfo: + command.run() + assert str(excinfo.value) == "Must be equal to 1.0.0." + + # type should be SqlaTable + contents["metadata.yaml"] = yaml.safe_dump(database_metadata_config) + command = v1.ImportDatasetsCommand(contents) + with pytest.raises(CommandInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == "Error importing dataset" + assert excinfo.value.normalized_messages() == { + "metadata.yaml": {"type": ["Must be equal to SqlaTable."]} + } + + # must also validate databases + broken_config = database_config.copy() + del broken_config["database_name"] + contents["metadata.yaml"] = yaml.safe_dump(dataset_metadata_config) + contents["databases/imported_database.yaml"] = yaml.safe_dump(broken_config) + command = v1.ImportDatasetsCommand(contents) + with pytest.raises(CommandInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == "Error importing dataset" + assert excinfo.value.normalized_messages() == { + "databases/imported_database.yaml": { + "database_name": ["Missing data for required field."], + } + } + + def test_import_v1_dataset_existing_database(self): + """Test that a dataset can be imported when the database already exists""" + # first import database... + contents = { + "metadata.yaml": yaml.safe_dump(database_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + } + command = ImportDatabasesCommand(contents) + command.run() + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert len(database.tables) == 0 + + # ...then dataset + contents = { + "metadata.yaml": yaml.safe_dump(dataset_metadata_config), + "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + } + command = v1.ImportDatasetsCommand(contents, overwrite=True) + command.run() + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert len(database.tables) == 1 + + database.tables[0].owners = [] + database.owners = [] + db.session.delete(database.tables[0]) + db.session.delete(database) + db.session.commit() + + +def _get_table_from_list_by_name(name: str, tables: List[Any]): + for table in tables: + if table.table_name == name: + return table + raise ValueError(f"Table {name} does not exists in database") + + +class TestCreateDatasetCommand(SupersetTestCase): + def test_database_not_found(self): + self.login(username="admin") + with self.assertRaises(DatasetInvalidError): + CreateDatasetCommand({"table_name": "table", "database": 9999}).run() + + @patch("superset.models.core.Database.get_table") + def test_get_table_from_database_error(self, get_table_mock): + self.login(username="admin") + get_table_mock.side_effect = SQLAlchemyError + with self.assertRaises(DatasetInvalidError): + CreateDatasetCommand( + {"table_name": "table", "database": get_example_database().id} + ).run() + + @patch("superset.security.manager.g") + @patch("superset.commands.utils.g") + def test_create_dataset_command(self, mock_g, mock_g2): + mock_g.user = security_manager.find_user("admin") + mock_g2.user = mock_g.user + examples_db = get_example_database() + with examples_db.get_sqla_engine_with_context() as engine: + engine.execute("DROP TABLE IF EXISTS test_create_dataset_command") + engine.execute( + "CREATE TABLE test_create_dataset_command AS SELECT 2 as col" + ) + + table = CreateDatasetCommand( + {"table_name": "test_create_dataset_command", "database": examples_db.id} + ).run() + fetched_table = ( + db.session.query(SqlaTable) + .filter_by(table_name="test_create_dataset_command") + .one() + ) + self.assertEqual(table, fetched_table) + self.assertEqual([owner.username for owner in table.owners], ["admin"]) + + db.session.delete(table) + with examples_db.get_sqla_engine_with_context() as engine: + engine.execute("DROP TABLE test_create_dataset_command") + db.session.commit() diff --git a/tests/integration_tests/datasource/__init__.py b/tests/integration_tests/datasource/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/datasource/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/datasource/api_tests.py b/tests/integration_tests/datasource/api_tests.py new file mode 100644 index 0000000000000..522aa33383e62 --- /dev/null +++ b/tests/integration_tests/datasource/api_tests.py @@ -0,0 +1,137 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from unittest.mock import Mock, patch + +import pytest + +from superset import db, security_manager +from superset.connectors.sqla.models import SqlaTable +from superset.dao.exceptions import DatasourceTypeNotSupportedError +from tests.integration_tests.base_tests import SupersetTestCase + + +class TestDatasourceApi(SupersetTestCase): + def get_virtual_dataset(self): + return ( + db.session.query(SqlaTable) + .filter(SqlaTable.table_name == "virtual_dataset") + .one() + ) + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_ints(self): + self.login(username="admin") + table = self.get_virtual_dataset() + rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col1/values/") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + for val in range(10): + assert val in response["result"] + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_strs(self): + self.login(username="admin") + table = self.get_virtual_dataset() + rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col2/values/") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + for val in ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]: + assert val in response["result"] + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_floats(self): + self.login(username="admin") + table = self.get_virtual_dataset() + rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col3/values/") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + for val in [1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9]: + assert val in response["result"] + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_nulls(self): + self.login(username="admin") + table = self.get_virtual_dataset() + rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col4/values/") + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["result"], [None]) + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_invalid_datasource_type(self): + self.login(username="admin") + table = self.get_virtual_dataset() + rv = self.client.get( + f"api/v1/datasource/not_table/{table.id}/column/col1/values/" + ) + self.assertEqual(rv.status_code, 400) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["message"], "Invalid datasource type: not_table") + + @patch("superset.datasource.api.DatasourceDAO.get_datasource") + def test_get_column_values_datasource_type_not_supported(self, get_datasource_mock): + get_datasource_mock.side_effect = DatasourceTypeNotSupportedError + self.login(username="admin") + rv = self.client.get("api/v1/datasource/table/1/column/col1/values/") + self.assertEqual(rv.status_code, 400) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response["message"], "DAO datasource query source type is not supported" + ) + + def test_get_column_values_datasource_not_found(self): + self.login(username="admin") + rv = self.client.get("api/v1/datasource/table/999/column/col1/values/") + self.assertEqual(rv.status_code, 404) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["message"], "Datasource does not exist") + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_no_datasource_access(self): + # Allow gamma user to use this endpoint, but does not have datasource access + perm = security_manager.find_permission_view_menu( + "can_get_column_values", "Datasource" + ) + gamma_role = security_manager.find_role("Gamma") + security_manager.add_permission_role(gamma_role, perm) + + self.login(username="gamma") + table = self.get_virtual_dataset() + rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col1/values/") + self.assertEqual(rv.status_code, 403) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response["message"], + "This endpoint requires the datasource virtual_dataset, " + "database or `all_datasource_access` permission", + ) + + @patch("superset.datasource.api.DatasourceDAO.get_datasource") + def test_get_column_values_not_implemented_error(self, get_datasource_mock): + datasource = Mock() + datasource.values_for_column.side_effect = NotImplementedError + get_datasource_mock.return_value = datasource + + self.login(username="admin") + rv = self.client.get("api/v1/datasource/sl_table/1/column/col1/values/") + self.assertEqual(rv.status_code, 400) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response["message"], + "Unable to get column values for datasource type: sl_table", + ) diff --git a/tests/integration_tests/datasource_tests.py b/tests/integration_tests/datasource_tests.py new file mode 100644 index 0000000000000..52bd9ec244cc3 --- /dev/null +++ b/tests/integration_tests/datasource_tests.py @@ -0,0 +1,689 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Unit tests for Superset""" +import json +from contextlib import contextmanager +from unittest import mock + +import prison +import pytest + +from superset import app, db +from superset.common.utils.query_cache_manager import QueryCacheManager +from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn +from superset.constants import CacheRegion +from superset.dao.exceptions import DatasourceNotFound, DatasourceTypeNotSupportedError +from superset.datasets.commands.exceptions import DatasetNotFoundError +from superset.exceptions import SupersetGenericDBErrorException +from superset.models.core import Database +from superset.utils.core import backend, get_example_default_schema +from superset.utils.database import get_example_database, get_main_database +from tests.integration_tests.base_tests import db_insert_temp_object, SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.datasource import get_datasource_post + + +@contextmanager +def create_test_table_context(database: Database): + schema = get_example_default_schema() + full_table_name = f"{schema}.test_table" if schema else "test_table" + + with database.get_sqla_engine_with_context() as engine: + engine.execute( + f"CREATE TABLE IF NOT EXISTS {full_table_name} AS SELECT 1 as first, 2 as second" + ) + engine.execute(f"INSERT INTO {full_table_name} (first, second) VALUES (1, 2)") + engine.execute(f"INSERT INTO {full_table_name} (first, second) VALUES (3, 4)") + + yield db.session + + with database.get_sqla_engine_with_context() as engine: + engine.execute(f"DROP TABLE {full_table_name}") + + +class TestDatasource(SupersetTestCase): + def setUp(self): + db.session.begin(subtransactions=True) + + def tearDown(self): + db.session.rollback() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_external_metadata_for_physical_table(self): + self.login(username="admin") + tbl = self.get_table(name="birth_names") + url = f"/datasource/external_metadata/table/{tbl.id}/" + resp = self.get_json_resp(url) + col_names = {o.get("name") for o in resp} + self.assertEqual( + col_names, {"num_boys", "num", "gender", "name", "ds", "state", "num_girls"} + ) + + def test_external_metadata_for_virtual_table(self): + self.login(username="admin") + session = db.session + table = SqlaTable( + table_name="dummy_sql_table", + database=get_example_database(), + schema=get_example_default_schema(), + sql="select 123 as intcol, 'abc' as strcol", + ) + session.add(table) + session.commit() + + table = self.get_table(name="dummy_sql_table") + url = f"/datasource/external_metadata/table/{table.id}/" + resp = self.get_json_resp(url) + assert {o.get("name") for o in resp} == {"intcol", "strcol"} + session.delete(table) + session.commit() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_external_metadata_by_name_for_physical_table(self): + self.login(username="admin") + tbl = self.get_table(name="birth_names") + params = prison.dumps( + { + "datasource_type": "table", + "database_name": tbl.database.database_name, + "schema_name": tbl.schema, + "table_name": tbl.table_name, + } + ) + url = f"/datasource/external_metadata_by_name/?q={params}" + resp = self.get_json_resp(url) + col_names = {o.get("name") for o in resp} + self.assertEqual( + col_names, {"num_boys", "num", "gender", "name", "ds", "state", "num_girls"} + ) + + def test_external_metadata_by_name_for_virtual_table(self): + self.login(username="admin") + session = db.session + table = SqlaTable( + table_name="dummy_sql_table", + database=get_example_database(), + schema=get_example_default_schema(), + sql="select 123 as intcol, 'abc' as strcol", + ) + session.add(table) + session.commit() + + tbl = self.get_table(name="dummy_sql_table") + params = prison.dumps( + { + "datasource_type": "table", + "database_name": tbl.database.database_name, + "schema_name": tbl.schema, + "table_name": tbl.table_name, + } + ) + url = f"/datasource/external_metadata_by_name/?q={params}" + resp = self.get_json_resp(url) + assert {o.get("name") for o in resp} == {"intcol", "strcol"} + session.delete(tbl) + session.commit() + + def test_external_metadata_by_name_from_sqla_inspector(self): + self.login(username="admin") + example_database = get_example_database() + with create_test_table_context(example_database): + params = prison.dumps( + { + "datasource_type": "table", + "database_name": example_database.database_name, + "table_name": "test_table", + "schema_name": get_example_default_schema(), + } + ) + url = f"/datasource/external_metadata_by_name/?q={params}" + resp = self.get_json_resp(url) + col_names = {o.get("name") for o in resp} + self.assertEqual(col_names, {"first", "second"}) + + # No databases found + params = prison.dumps( + { + "datasource_type": "table", + "database_name": "foo", + "table_name": "bar", + } + ) + url = f"/datasource/external_metadata_by_name/?q={params}" + resp = self.client.get(url) + self.assertEqual(resp.status_code, DatasetNotFoundError.status) + self.assertEqual( + json.loads(resp.data.decode("utf-8")).get("error"), + DatasetNotFoundError.message, + ) + + # No table found + params = prison.dumps( + { + "datasource_type": "table", + "database_name": example_database.database_name, + "table_name": "fooooooooobarrrrrr", + } + ) + url = f"/datasource/external_metadata_by_name/?q={params}" + resp = self.client.get(url) + self.assertEqual(resp.status_code, DatasetNotFoundError.status) + self.assertEqual( + json.loads(resp.data.decode("utf-8")).get("error"), + DatasetNotFoundError.message, + ) + + # invalid query params + params = prison.dumps( + { + "datasource_type": "table", + } + ) + url = f"/datasource/external_metadata_by_name/?q={params}" + resp = self.get_json_resp(url) + self.assertIn("error", resp) + + def test_external_metadata_for_virtual_table_template_params(self): + self.login(username="admin") + session = db.session + table = SqlaTable( + table_name="dummy_sql_table_with_template_params", + database=get_example_database(), + schema=get_example_default_schema(), + sql="select {{ foo }} as intcol", + template_params=json.dumps({"foo": "123"}), + ) + session.add(table) + session.commit() + + table = self.get_table(name="dummy_sql_table_with_template_params") + url = f"/datasource/external_metadata/table/{table.id}/" + resp = self.get_json_resp(url) + assert {o.get("name") for o in resp} == {"intcol"} + session.delete(table) + session.commit() + + def test_external_metadata_for_malicious_virtual_table(self): + self.login(username="admin") + table = SqlaTable( + table_name="malicious_sql_table", + database=get_example_database(), + schema=get_example_default_schema(), + sql="delete table birth_names", + ) + with db_insert_temp_object(table): + url = f"/datasource/external_metadata/table/{table.id}/" + resp = self.get_json_resp(url) + self.assertEqual(resp["error"], "Only `SELECT` statements are allowed") + + def test_external_metadata_for_multistatement_virtual_table(self): + self.login(username="admin") + table = SqlaTable( + table_name="multistatement_sql_table", + database=get_example_database(), + schema=get_example_default_schema(), + sql="select 123 as intcol, 'abc' as strcol;" + "select 123 as intcol, 'abc' as strcol", + ) + with db_insert_temp_object(table): + url = f"/datasource/external_metadata/table/{table.id}/" + resp = self.get_json_resp(url) + self.assertEqual(resp["error"], "Only single queries supported") + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch("superset.connectors.sqla.models.SqlaTable.external_metadata") + def test_external_metadata_error_return_400(self, mock_get_datasource): + self.login(username="admin") + tbl = self.get_table(name="birth_names") + url = f"/datasource/external_metadata/table/{tbl.id}/" + + mock_get_datasource.side_effect = SupersetGenericDBErrorException("oops") + + pytest.raises( + SupersetGenericDBErrorException, + lambda: db.session.query(SqlaTable) + .filter_by(id=tbl.id) + .one_or_none() + .external_metadata(), + ) + + resp = self.client.get(url) + assert resp.status_code == 400 + + def compare_lists(self, l1, l2, key): + l2_lookup = {o.get(key): o for o in l2} + for obj1 in l1: + obj2 = l2_lookup.get(obj1.get(key)) + for k in obj1: + if k not in "id" and obj1.get(k): + self.assertEqual(obj1.get(k), obj2.get(k)) + + def test_save(self): + self.login(username="admin") + tbl_id = self.get_table(name="birth_names").id + + datasource_post = get_datasource_post() + datasource_post["id"] = tbl_id + datasource_post["owners"] = [1] + data = dict(data=json.dumps(datasource_post)) + resp = self.get_json_resp("/datasource/save/", data) + for k in datasource_post: + if k == "columns": + self.compare_lists(datasource_post[k], resp[k], "column_name") + elif k == "metrics": + self.compare_lists(datasource_post[k], resp[k], "metric_name") + elif k == "database": + self.assertEqual(resp[k]["id"], datasource_post[k]["id"]) + elif k == "owners": + self.assertEqual([o["id"] for o in resp[k]], datasource_post["owners"]) + else: + print(k) + self.assertEqual(resp[k], datasource_post[k]) + + def test_save_default_endpoint_validation_fail(self): + self.login(username="admin") + tbl_id = self.get_table(name="birth_names").id + + datasource_post = get_datasource_post() + datasource_post["id"] = tbl_id + datasource_post["owners"] = [1] + datasource_post["default_endpoint"] = "http://www.google.com" + data = dict(data=json.dumps(datasource_post)) + resp = self.client.post("/datasource/save/", data=data) + assert resp.status_code == 400 + + def test_save_default_endpoint_validation_unsafe(self): + self.app.config["PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET"] = False + self.login(username="admin") + tbl_id = self.get_table(name="birth_names").id + + datasource_post = get_datasource_post() + datasource_post["id"] = tbl_id + datasource_post["owners"] = [1] + datasource_post["default_endpoint"] = "http://www.google.com" + data = dict(data=json.dumps(datasource_post)) + resp = self.client.post("/datasource/save/", data=data) + assert resp.status_code == 200 + self.app.config["PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET"] = True + + def test_save_default_endpoint_validation_success(self): + self.login(username="admin") + tbl_id = self.get_table(name="birth_names").id + + datasource_post = get_datasource_post() + datasource_post["id"] = tbl_id + datasource_post["owners"] = [1] + datasource_post["default_endpoint"] = "http://localhost/superset/1" + data = dict(data=json.dumps(datasource_post)) + resp = self.client.post("/datasource/save/", data=data) + assert resp.status_code == 200 + + def save_datasource_from_dict(self, datasource_post): + data = dict(data=json.dumps(datasource_post)) + resp = self.get_json_resp("/datasource/save/", data) + return resp + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_change_database(self): + self.login(username="admin") + admin_user = self.get_user("admin") + + tbl = self.get_table(name="birth_names") + tbl_id = tbl.id + db_id = tbl.database_id + datasource_post = get_datasource_post() + datasource_post["id"] = tbl_id + datasource_post["owners"] = [admin_user.id] + + new_db = self.create_fake_db() + datasource_post["database"]["id"] = new_db.id + resp = self.save_datasource_from_dict(datasource_post) + self.assertEqual(resp["database"]["id"], new_db.id) + + datasource_post["database"]["id"] = db_id + resp = self.save_datasource_from_dict(datasource_post) + self.assertEqual(resp["database"]["id"], db_id) + + self.delete_fake_db() + + def test_save_duplicate_key(self): + self.login(username="admin") + admin_user = self.get_user("admin") + tbl_id = self.get_table(name="birth_names").id + + datasource_post = get_datasource_post() + datasource_post["id"] = tbl_id + datasource_post["owners"] = [admin_user.id] + datasource_post["columns"].extend( + [ + { + "column_name": "", + "filterable": True, + "groupby": True, + "expression": "", + "id": "somerandomid", + }, + { + "column_name": "", + "filterable": True, + "groupby": True, + "expression": "", + "id": "somerandomid2", + }, + ] + ) + data = dict(data=json.dumps(datasource_post)) + resp = self.get_json_resp("/datasource/save/", data, raise_on_error=False) + self.assertIn("Duplicate column name(s): ", resp["error"]) + + def test_get_datasource(self): + self.login(username="admin") + admin_user = self.get_user("admin") + tbl = self.get_table(name="birth_names") + + datasource_post = get_datasource_post() + datasource_post["id"] = tbl.id + datasource_post["owners"] = [admin_user.id] + data = dict(data=json.dumps(datasource_post)) + self.get_json_resp("/datasource/save/", data) + url = f"/datasource/get/{tbl.type}/{tbl.id}/" + resp = self.get_json_resp(url) + self.assertEqual(resp.get("type"), "table") + col_names = {o.get("column_name") for o in resp["columns"]} + self.assertEqual( + col_names, + { + "num_boys", + "num", + "gender", + "name", + "ds", + "state", + "num_girls", + "num_california", + }, + ) + + def test_get_datasource_with_health_check(self): + def my_check(datasource): + return "Warning message!" + + app.config["DATASET_HEALTH_CHECK"] = my_check + self.login(username="admin") + tbl = self.get_table(name="birth_names") + datasource = db.session.query(SqlaTable).filter_by(id=tbl.id).one_or_none() + assert datasource.health_check_message == "Warning message!" + app.config["DATASET_HEALTH_CHECK"] = None + + def test_get_datasource_failed(self): + from superset.datasource.dao import DatasourceDAO + + pytest.raises( + DatasourceNotFound, + lambda: DatasourceDAO.get_datasource(db.session, "table", 9999999), + ) + + self.login(username="admin") + resp = self.get_json_resp("/datasource/get/table/500000/", raise_on_error=False) + self.assertEqual(resp.get("error"), "Datasource does not exist") + + def test_get_datasource_invalid_datasource_failed(self): + from superset.datasource.dao import DatasourceDAO + + pytest.raises( + DatasourceTypeNotSupportedError, + lambda: DatasourceDAO.get_datasource(db.session, "druid", 9999999), + ) + + self.login(username="admin") + resp = self.get_json_resp("/datasource/get/druid/500000/", raise_on_error=False) + self.assertEqual(resp.get("error"), "'druid' is not a valid DatasourceType") + + +def test_get_samples(test_client, login_as_admin, virtual_dataset): + """ + Dataset API: Test get dataset samples + """ + # 1. should cache data + uri = ( + f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table" + ) + # feeds data + test_client.post(uri, json={}) + # get from cache + rv = test_client.post(uri, json={}) + assert rv.status_code == 200 + assert len(rv.json["result"]["data"]) == 10 + assert QueryCacheManager.has( + rv.json["result"]["cache_key"], + region=CacheRegion.DATA, + ) + assert rv.json["result"]["is_cached"] + + # 2. should read through cache data + uri2 = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&force=true" + # feeds data + test_client.post(uri2, json={}) + # force query + rv2 = test_client.post(uri2, json={}) + assert rv2.status_code == 200 + assert len(rv2.json["result"]["data"]) == 10 + assert QueryCacheManager.has( + rv2.json["result"]["cache_key"], + region=CacheRegion.DATA, + ) + assert not rv2.json["result"]["is_cached"] + + # 3. data precision + assert "colnames" in rv2.json["result"] + assert "coltypes" in rv2.json["result"] + assert "data" in rv2.json["result"] + + eager_samples = virtual_dataset.database.get_df( + f"select * from ({virtual_dataset.sql}) as tbl" + f' limit {app.config["SAMPLES_ROW_LIMIT"]}' + ) + # the col3 is Decimal + eager_samples["col3"] = eager_samples["col3"].apply(float) + eager_samples = eager_samples.to_dict(orient="records") + assert eager_samples == rv2.json["result"]["data"] + + +def test_get_samples_with_incorrect_cc(test_client, login_as_admin, virtual_dataset): + TableColumn( + column_name="DUMMY CC", + type="VARCHAR(255)", + table=virtual_dataset, + expression="INCORRECT SQL", + ) + db.session.merge(virtual_dataset) + + uri = ( + f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table" + ) + rv = test_client.post(uri, json={}) + assert rv.status_code == 422 + + assert "error" in rv.json + if virtual_dataset.database.db_engine_spec.engine_name == "PostgreSQL": + assert "INCORRECT SQL" in rv.json.get("error") + + +def test_get_samples_on_physical_dataset(test_client, login_as_admin, physical_dataset): + uri = ( + f"/datasource/samples?datasource_id={physical_dataset.id}&datasource_type=table" + ) + rv = test_client.post(uri, json={}) + assert rv.status_code == 200 + assert QueryCacheManager.has( + rv.json["result"]["cache_key"], region=CacheRegion.DATA + ) + assert len(rv.json["result"]["data"]) == 10 + + +def test_get_samples_with_filters(test_client, login_as_admin, virtual_dataset): + uri = ( + f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table" + ) + rv = test_client.post(uri, json=None) + assert rv.status_code == 400 + + rv = test_client.post(uri, json={}) + assert rv.status_code == 200 + + rv = test_client.post(uri, json={"foo": "bar"}) + assert rv.status_code == 400 + + rv = test_client.post( + uri, json={"filters": [{"col": "col1", "op": "INVALID", "val": 0}]} + ) + assert rv.status_code == 400 + + rv = test_client.post( + uri, + json={ + "filters": [ + {"col": "col2", "op": "==", "val": "a"}, + {"col": "col1", "op": "==", "val": 0}, + ] + }, + ) + assert rv.status_code == 200 + assert rv.json["result"]["colnames"] == ["col1", "col2", "col3", "col4", "col5"] + assert rv.json["result"]["rowcount"] == 1 + + # empty results + rv = test_client.post( + uri, + json={ + "filters": [ + {"col": "col2", "op": "==", "val": "x"}, + ] + }, + ) + assert rv.status_code == 200 + assert rv.json["result"]["colnames"] == [] + assert rv.json["result"]["rowcount"] == 0 + + +def test_get_samples_with_time_filter(test_client, login_as_admin, physical_dataset): + uri = ( + f"/datasource/samples?datasource_id={physical_dataset.id}&datasource_type=table" + ) + payload = { + "granularity": "col5", + "time_range": "2000-01-02 : 2000-01-04", + } + rv = test_client.post(uri, json=payload) + assert len(rv.json["result"]["data"]) == 2 + if physical_dataset.database.backend != "sqlite": + assert [row["col5"] for row in rv.json["result"]["data"]] == [ + 946771200000.0, # 2000-01-02 00:00:00 + 946857600000.0, # 2000-01-03 00:00:00 + ] + assert rv.json["result"]["page"] == 1 + assert rv.json["result"]["per_page"] == app.config["SAMPLES_ROW_LIMIT"] + assert rv.json["result"]["total_count"] == 2 + + +def test_get_samples_with_multiple_filters( + test_client, login_as_admin, physical_dataset +): + # 1. empty response + uri = ( + f"/datasource/samples?datasource_id={physical_dataset.id}&datasource_type=table" + ) + payload = { + "granularity": "col5", + "time_range": "2000-01-02 : 2000-01-04", + "filters": [ + {"col": "col4", "op": "IS NOT NULL"}, + ], + } + rv = test_client.post(uri, json=payload) + assert len(rv.json["result"]["data"]) == 0 + + # 2. adhoc filters, time filters, and custom where + payload = { + "granularity": "col5", + "time_range": "2000-01-02 : 2000-01-04", + "filters": [ + {"col": "col2", "op": "==", "val": "c"}, + ], + "extras": {"where": "col3 = 1.2 and col4 is null"}, + } + rv = test_client.post(uri, json=payload) + assert len(rv.json["result"]["data"]) == 1 + assert rv.json["result"]["total_count"] == 1 + assert "2000-01-02" in rv.json["result"]["query"] + assert "2000-01-04" in rv.json["result"]["query"] + assert "col3 = 1.2" in rv.json["result"]["query"] + assert "col4 is null" in rv.json["result"]["query"] + assert "col2 = 'c'" in rv.json["result"]["query"] + + +def test_get_samples_pagination(test_client, login_as_admin, virtual_dataset): + # 1. default page, per_page and total_count + uri = ( + f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table" + ) + rv = test_client.post(uri, json={}) + assert rv.json["result"]["page"] == 1 + assert rv.json["result"]["per_page"] == app.config["SAMPLES_ROW_LIMIT"] + assert rv.json["result"]["total_count"] == 10 + + # 2. incorrect per_page + per_pages = (app.config["SAMPLES_ROW_LIMIT"] + 1, 0, "xx") + for per_page in per_pages: + uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page={per_page}" + rv = test_client.post(uri, json={}) + assert rv.status_code == 400 + + # 3. incorrect page or datasource_type + uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&page=xx" + rv = test_client.post(uri, json={}) + assert rv.status_code == 400 + + uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=xx" + rv = test_client.post(uri, json={}) + assert rv.status_code == 400 + + # 4. turning pages + uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page=2&page=1" + rv = test_client.post(uri, json={}) + assert rv.json["result"]["page"] == 1 + assert rv.json["result"]["per_page"] == 2 + assert rv.json["result"]["total_count"] == 10 + assert [row["col1"] for row in rv.json["result"]["data"]] == [0, 1] + + uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page=2&page=2" + rv = test_client.post(uri, json={}) + assert rv.json["result"]["page"] == 2 + assert rv.json["result"]["per_page"] == 2 + assert rv.json["result"]["total_count"] == 10 + assert [row["col1"] for row in rv.json["result"]["data"]] == [2, 3] + + # 5. Exceeding the maximum pages + uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page=2&page=6" + rv = test_client.post(uri, json={}) + assert rv.json["result"]["page"] == 6 + assert rv.json["result"]["per_page"] == 2 + assert rv.json["result"]["total_count"] == 10 + assert [row["col1"] for row in rv.json["result"]["data"]] == [] diff --git a/tests/integration_tests/db_engine_specs/__init__.py b/tests/integration_tests/db_engine_specs/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/db_engine_specs/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/db_engine_specs/ascend_tests.py b/tests/integration_tests/db_engine_specs/ascend_tests.py new file mode 100644 index 0000000000000..ff12656743818 --- /dev/null +++ b/tests/integration_tests/db_engine_specs/ascend_tests.py @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from superset.db_engine_specs.ascend import AscendEngineSpec +from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec + + +class TestAscendDbEngineSpec(TestDbEngineSpec): + def test_convert_dttm(self): + dttm = self.get_dttm() + + self.assertEqual( + AscendEngineSpec.convert_dttm("DATE", dttm), "CAST('2019-01-02' AS DATE)" + ) + + self.assertEqual( + AscendEngineSpec.convert_dttm("TIMESTAMP", dttm), + "CAST('2019-01-02T03:04:05.678900' AS TIMESTAMP)", + ) diff --git a/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py b/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py new file mode 100644 index 0000000000000..87de98db1c1d2 --- /dev/null +++ b/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py @@ -0,0 +1,522 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import datetime +from unittest import mock + +import pytest + +from superset.connectors.sqla.models import TableColumn +from superset.db_engine_specs import load_engine_specs +from superset.db_engine_specs.base import ( + BaseEngineSpec, + BasicParametersMixin, + builtin_time_grains, + LimitMethod, +) +from superset.db_engine_specs.mysql import MySQLEngineSpec +from superset.db_engine_specs.sqlite import SqliteEngineSpec +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.sql_parse import ParsedQuery +from superset.utils.database import get_example_database +from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec +from tests.integration_tests.test_app import app + +from ..fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from ..fixtures.energy_dashboard import ( + load_energy_table_data, + load_energy_table_with_slice, +) +from ..fixtures.pyodbcRow import Row + + +class TestDbEngineSpecs(TestDbEngineSpec): + def test_extract_limit_from_query(self, engine_spec_class=BaseEngineSpec): + q0 = "select * from table" + q1 = "select * from mytable limit 10" + q2 = "select * from (select * from my_subquery limit 10) where col=1 limit 20" + q3 = "select * from (select * from my_subquery limit 10);" + q4 = "select * from (select * from my_subquery limit 10) where col=1 limit 20;" + q5 = "select * from mytable limit 20, 10" + q6 = "select * from mytable limit 10 offset 20" + q7 = "select * from mytable limit" + q8 = "select * from mytable limit 10.0" + q9 = "select * from mytable limit x" + q10 = "select * from mytable limit 20, x" + q11 = "select * from mytable limit x offset 20" + + self.assertEqual(engine_spec_class.get_limit_from_sql(q0), None) + self.assertEqual(engine_spec_class.get_limit_from_sql(q1), 10) + self.assertEqual(engine_spec_class.get_limit_from_sql(q2), 20) + self.assertEqual(engine_spec_class.get_limit_from_sql(q3), None) + self.assertEqual(engine_spec_class.get_limit_from_sql(q4), 20) + self.assertEqual(engine_spec_class.get_limit_from_sql(q5), 10) + self.assertEqual(engine_spec_class.get_limit_from_sql(q6), 10) + self.assertEqual(engine_spec_class.get_limit_from_sql(q7), None) + self.assertEqual(engine_spec_class.get_limit_from_sql(q8), None) + self.assertEqual(engine_spec_class.get_limit_from_sql(q9), None) + self.assertEqual(engine_spec_class.get_limit_from_sql(q10), None) + self.assertEqual(engine_spec_class.get_limit_from_sql(q11), None) + + def test_wrapped_semi_tabs(self): + self.sql_limit_regex( + "SELECT * FROM a \t \n ; \t \n ", "SELECT * FROM a\nLIMIT 1000" + ) + + def test_simple_limit_query(self): + self.sql_limit_regex("SELECT * FROM a", "SELECT * FROM a\nLIMIT 1000") + + def test_modify_limit_query(self): + self.sql_limit_regex("SELECT * FROM a LIMIT 9999", "SELECT * FROM a LIMIT 1000") + + def test_limit_query_with_limit_subquery(self): # pylint: disable=invalid-name + self.sql_limit_regex( + "SELECT * FROM (SELECT * FROM a LIMIT 10) LIMIT 9999", + "SELECT * FROM (SELECT * FROM a LIMIT 10) LIMIT 1000", + ) + + def test_limit_query_without_force(self): + self.sql_limit_regex( + "SELECT * FROM a LIMIT 10", + "SELECT * FROM a LIMIT 10", + limit=11, + ) + + def test_limit_query_with_force(self): + self.sql_limit_regex( + "SELECT * FROM a LIMIT 10", + "SELECT * FROM a LIMIT 11", + limit=11, + force=True, + ) + + def test_limit_with_expr(self): + self.sql_limit_regex( + """ + SELECT + 'LIMIT 777' AS a + , b + FROM + table + LIMIT 99990""", + """SELECT + 'LIMIT 777' AS a + , b + FROM + table + LIMIT 1000""", + ) + + def test_limit_expr_and_semicolon(self): + self.sql_limit_regex( + """ + SELECT + 'LIMIT 777' AS a + , b + FROM + table + LIMIT 99990 ;""", + """SELECT + 'LIMIT 777' AS a + , b + FROM + table + LIMIT 1000""", + ) + + def test_get_datatype(self): + self.assertEqual("VARCHAR", BaseEngineSpec.get_datatype("VARCHAR")) + + def test_limit_with_implicit_offset(self): + self.sql_limit_regex( + """ + SELECT + 'LIMIT 777' AS a + , b + FROM + table + LIMIT 99990, 999999""", + """SELECT + 'LIMIT 777' AS a + , b + FROM + table + LIMIT 99990, 1000""", + ) + + def test_limit_with_explicit_offset(self): + self.sql_limit_regex( + """ + SELECT + 'LIMIT 777' AS a + , b + FROM + table + LIMIT 99990 + OFFSET 999999""", + """SELECT + 'LIMIT 777' AS a + , b + FROM + table + LIMIT 1000 + OFFSET 999999""", + ) + + def test_limit_with_non_token_limit(self): + self.sql_limit_regex( + """SELECT 'LIMIT 777'""", """SELECT 'LIMIT 777'\nLIMIT 1000""" + ) + + def test_limit_with_fetch_many(self): + class DummyEngineSpec(BaseEngineSpec): + limit_method = LimitMethod.FETCH_MANY + + self.sql_limit_regex( + "SELECT * FROM table", "SELECT * FROM table", DummyEngineSpec + ) + + def test_engine_time_grain_validity(self): + time_grains = set(builtin_time_grains.keys()) + # loop over all subclasses of BaseEngineSpec + for engine in load_engine_specs(): + if engine is not BaseEngineSpec: + # make sure time grain functions have been defined + self.assertGreater(len(engine.get_time_grain_expressions()), 0) + # make sure all defined time grains are supported + defined_grains = {grain.duration for grain in engine.get_time_grains()} + intersection = time_grains.intersection(defined_grains) + self.assertSetEqual(defined_grains, intersection, engine) + + def test_get_time_grain_expressions(self): + time_grains = MySQLEngineSpec.get_time_grain_expressions() + self.assertEqual( + list(time_grains.keys()), + [ + None, + "PT1S", + "PT1M", + "PT1H", + "P1D", + "P1W", + "P1M", + "P3M", + "P1Y", + "1969-12-29T00:00:00Z/P1W", + ], + ) + + def test_get_table_names(self): + inspector = mock.Mock() + inspector.get_table_names = mock.Mock(return_value=["schema.table", "table_2"]) + inspector.get_foreign_table_names = mock.Mock(return_value=["table_3"]) + + """ Make sure base engine spec removes schema name from table name + ie. when try_remove_schema_from_table_name == True. """ + base_result_expected = {"table", "table_2"} + base_result = BaseEngineSpec.get_table_names( + database=mock.ANY, schema="schema", inspector=inspector + ) + assert base_result_expected == base_result + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_column_datatype_to_string(self): + example_db = get_example_database() + sqla_table = example_db.get_table("energy_usage") + dialect = example_db.get_dialect() + + # TODO: fix column type conversion for presto. + if example_db.backend == "presto": + return + + col_names = [ + example_db.db_engine_spec.column_datatype_to_string(c.type, dialect) + for c in sqla_table.columns + ] + if example_db.backend == "postgresql": + expected = ["VARCHAR(255)", "VARCHAR(255)", "DOUBLE PRECISION"] + elif example_db.backend == "hive": + expected = ["STRING", "STRING", "FLOAT"] + else: + expected = ["VARCHAR(255)", "VARCHAR(255)", "FLOAT"] + self.assertEqual(col_names, expected) + + def test_convert_dttm(self): + dttm = self.get_dttm() + self.assertIsNone(BaseEngineSpec.convert_dttm("", dttm, db_extra=None)) + + def test_pyodbc_rows_to_tuples(self): + # Test for case when pyodbc.Row is returned (odbc driver) + data = [ + Row((1, 1, datetime.datetime(2017, 10, 19, 23, 39, 16, 660000))), + Row((2, 2, datetime.datetime(2018, 10, 19, 23, 39, 16, 660000))), + ] + expected = [ + (1, 1, datetime.datetime(2017, 10, 19, 23, 39, 16, 660000)), + (2, 2, datetime.datetime(2018, 10, 19, 23, 39, 16, 660000)), + ] + result = BaseEngineSpec.pyodbc_rows_to_tuples(data) + self.assertListEqual(result, expected) + + def test_pyodbc_rows_to_tuples_passthrough(self): + # Test for case when tuples are returned + data = [ + (1, 1, datetime.datetime(2017, 10, 19, 23, 39, 16, 660000)), + (2, 2, datetime.datetime(2018, 10, 19, 23, 39, 16, 660000)), + ] + result = BaseEngineSpec.pyodbc_rows_to_tuples(data) + self.assertListEqual(result, data) + + @mock.patch("superset.models.core.Database.db_engine_spec", BaseEngineSpec) + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_calculated_column_in_order_by_base_engine_spec(self): + table = self.get_table(name="birth_names") + TableColumn( + column_name="gender_cc", + type="VARCHAR(255)", + table=table, + expression=""" + case + when gender='boy' then 'male' + else 'female' + end + """, + ) + + table.database.sqlalchemy_uri = "sqlite://" + query_obj = { + "groupby": ["gender_cc"], + "is_timeseries": False, + "filter": [], + "orderby": [["gender_cc", True]], + } + sql = table.get_query_str(query_obj) + assert ( + """ORDER BY case + when gender='boy' then 'male' + else 'female' + end ASC;""" + in sql + ) + + +def test_is_readonly(): + def is_readonly(sql: str) -> bool: + return BaseEngineSpec.is_readonly_query(ParsedQuery(sql)) + + assert is_readonly("SHOW LOCKS test EXTENDED") + assert not is_readonly("SET hivevar:desc='Legislators'") + assert not is_readonly("UPDATE t1 SET col1 = NULL") + assert is_readonly("EXPLAIN SELECT 1") + assert is_readonly("SELECT 1") + assert is_readonly("WITH (SELECT 1) bla SELECT * from bla") + assert is_readonly("SHOW CATALOGS") + assert is_readonly("SHOW TABLES") + + +def test_time_grain_denylist(): + config = app.config.copy() + app.config["TIME_GRAIN_DENYLIST"] = ["PT1M"] + + with app.app_context(): + time_grain_functions = SqliteEngineSpec.get_time_grain_expressions() + assert not "PT1M" in time_grain_functions + + app.config = config + + +def test_time_grain_addons(): + config = app.config.copy() + app.config["TIME_GRAIN_ADDONS"] = {"PTXM": "x seconds"} + app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = {"sqlite": {"PTXM": "ABC({col})"}} + + with app.app_context(): + time_grains = SqliteEngineSpec.get_time_grains() + time_grain_addon = time_grains[-1] + assert "PTXM" == time_grain_addon.duration + assert "x seconds" == time_grain_addon.label + + app.config = config + + +def test_get_time_grain_with_config(): + """Should concatenate from configs and then sort in the proper order""" + config = app.config.copy() + + app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = { + "mysql": { + "PT2H": "foo", + "PT4H": "foo", + "PT6H": "foo", + "PT8H": "foo", + "PT10H": "foo", + "PT12H": "foo", + "PT1S": "foo", + } + } + + with app.app_context(): + time_grains = MySQLEngineSpec.get_time_grain_expressions() + assert set(time_grains.keys()) == { + None, + "PT1S", + "PT1M", + "PT1H", + "PT2H", + "PT4H", + "PT6H", + "PT8H", + "PT10H", + "PT12H", + "P1D", + "P1W", + "P1M", + "P3M", + "P1Y", + "1969-12-29T00:00:00Z/P1W", + } + + app.config = config + + +def test_get_time_grain_with_unknown_values(): + """Should concatenate from configs and then sort in the proper order + putting unknown patterns at the end""" + config = app.config.copy() + + app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = { + "mysql": { + "PT2H": "foo", + "weird": "foo", + "PT12H": "foo", + } + } + + with app.app_context(): + time_grains = MySQLEngineSpec.get_time_grain_expressions() + assert list(time_grains)[-1] == "weird" + + app.config = config + + +@mock.patch("superset.db_engine_specs.base.is_hostname_valid") +@mock.patch("superset.db_engine_specs.base.is_port_open") +def test_validate(is_port_open, is_hostname_valid): + is_hostname_valid.return_value = True + is_port_open.return_value = True + + properties = { + "parameters": { + "host": "localhost", + "port": 5432, + "username": "username", + "password": "password", + "database": "dbname", + "query": {"sslmode": "verify-full"}, + } + } + errors = BasicParametersMixin.validate_parameters(properties) + assert errors == [] + + +def test_validate_parameters_missing(): + properties = { + "parameters": { + "host": "", + "port": None, + "username": "", + "password": "", + "database": "", + "query": {}, + } + } + errors = BasicParametersMixin.validate_parameters(properties) + assert errors == [ + SupersetError( + message=( + "One or more parameters are missing: " "database, host, port, username" + ), + error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR, + level=ErrorLevel.WARNING, + extra={"missing": ["database", "host", "port", "username"]}, + ), + ] + + +@mock.patch("superset.db_engine_specs.base.is_hostname_valid") +def test_validate_parameters_invalid_host(is_hostname_valid): + is_hostname_valid.return_value = False + + properties = { + "parameters": { + "host": "localhost", + "port": None, + "username": "username", + "password": "password", + "database": "dbname", + "query": {"sslmode": "verify-full"}, + } + } + errors = BasicParametersMixin.validate_parameters(properties) + assert errors == [ + SupersetError( + message="One or more parameters are missing: port", + error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR, + level=ErrorLevel.WARNING, + extra={"missing": ["port"]}, + ), + SupersetError( + message="The hostname provided can't be resolved.", + error_type=SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR, + level=ErrorLevel.ERROR, + extra={"invalid": ["host"]}, + ), + ] + + +@mock.patch("superset.db_engine_specs.base.is_hostname_valid") +@mock.patch("superset.db_engine_specs.base.is_port_open") +def test_validate_parameters_port_closed(is_port_open, is_hostname_valid): + is_hostname_valid.return_value = True + is_port_open.return_value = False + + properties = { + "parameters": { + "host": "localhost", + "port": 5432, + "username": "username", + "password": "password", + "database": "dbname", + "query": {"sslmode": "verify-full"}, + } + } + errors = BasicParametersMixin.validate_parameters(properties) + assert errors == [ + SupersetError( + message="The port is closed.", + error_type=SupersetErrorType.CONNECTION_PORT_CLOSED_ERROR, + level=ErrorLevel.ERROR, + extra={ + "invalid": ["port"], + "issue_codes": [ + {"code": 1008, "message": "Issue 1008 - The port is closed."} + ], + }, + ) + ] diff --git a/tests/integration_tests/db_engine_specs/base_tests.py b/tests/integration_tests/db_engine_specs/base_tests.py new file mode 100644 index 0000000000000..e20ea35ae4131 --- /dev/null +++ b/tests/integration_tests/db_engine_specs/base_tests.py @@ -0,0 +1,38 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +from datetime import datetime +from typing import Tuple, Type + +from tests.integration_tests.test_app import app +from tests.integration_tests.base_tests import SupersetTestCase +from superset.db_engine_specs.base import BaseEngineSpec +from superset.models.core import Database + + +class TestDbEngineSpec(SupersetTestCase): + def sql_limit_regex( + self, + sql, + expected_sql, + engine_spec_class=BaseEngineSpec, + limit=1000, + force=False, + ): + main = Database(database_name="test_database", sqlalchemy_uri="sqlite://") + limited = engine_spec_class.apply_limit_to_sql(sql, limit, main, force) + self.assertEqual(expected_sql, limited) diff --git a/tests/integration_tests/db_engine_specs/bigquery_tests.py b/tests/integration_tests/db_engine_specs/bigquery_tests.py new file mode 100644 index 0000000000000..574a2b75e32ca --- /dev/null +++ b/tests/integration_tests/db_engine_specs/bigquery_tests.py @@ -0,0 +1,366 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import sys +import unittest.mock as mock + +import pytest +from pandas import DataFrame +from sqlalchemy import column + +from superset.connectors.sqla.models import TableColumn +from superset.db_engine_specs.base import BaseEngineSpec +from superset.db_engine_specs.bigquery import BigQueryEngineSpec +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.sql_parse import Table +from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + + +class TestBigQueryDbEngineSpec(TestDbEngineSpec): + def test_bigquery_sqla_column_label(self): + """ + DB Eng Specs (bigquery): Test column label + """ + test_cases = { + "Col": "Col", + "SUM(x)": "SUM_x__5f110", + "SUM[x]": "SUM_x__7ebe1", + "12345_col": "_12345_col_8d390", + } + for original, expected in test_cases.items(): + actual = BigQueryEngineSpec.make_label_compatible(column(original).name) + self.assertEqual(actual, expected) + + def test_timegrain_expressions(self): + """ + DB Eng Specs (bigquery): Test time grain expressions + """ + col = column("temporal") + test_cases = { + "DATE": "DATE_TRUNC(temporal, HOUR)", + "TIME": "TIME_TRUNC(temporal, HOUR)", + "DATETIME": "DATETIME_TRUNC(temporal, HOUR)", + "TIMESTAMP": "TIMESTAMP_TRUNC(temporal, HOUR)", + } + for type_, expected in test_cases.items(): + col.type = type_ + actual = BigQueryEngineSpec.get_timestamp_expr( + col=col, pdf=None, time_grain="PT1H" + ) + self.assertEqual(str(actual), expected) + + def test_custom_minute_timegrain_expressions(self): + """ + DB Eng Specs (bigquery): Test time grain expressions + """ + col = column("temporal") + test_cases = { + "DATE": "CAST(TIMESTAMP_SECONDS(" + "5*60 * DIV(UNIX_SECONDS(CAST(temporal AS TIMESTAMP)), 5*60)" + ") AS DATE)", + "DATETIME": "CAST(TIMESTAMP_SECONDS(" + "5*60 * DIV(UNIX_SECONDS(CAST(temporal AS TIMESTAMP)), 5*60)" + ") AS DATETIME)", + "TIMESTAMP": "CAST(TIMESTAMP_SECONDS(" + "5*60 * DIV(UNIX_SECONDS(CAST(temporal AS TIMESTAMP)), 5*60)" + ") AS TIMESTAMP)", + } + for type_, expected in test_cases.items(): + col.type = type_ + actual = BigQueryEngineSpec.get_timestamp_expr( + col=col, pdf=None, time_grain="PT5M" + ) + assert str(actual) == expected + + def test_fetch_data(self): + """ + DB Eng Specs (bigquery): Test fetch data + """ + # Mock a google.cloud.bigquery.table.Row + class Row(object): + def __init__(self, value): + self._value = value + + def values(self): + return self._value + + data1 = [(1, "foo")] + with mock.patch.object(BaseEngineSpec, "fetch_data", return_value=data1): + result = BigQueryEngineSpec.fetch_data(None, 0) + self.assertEqual(result, data1) + + data2 = [Row(1), Row(2)] + with mock.patch.object(BaseEngineSpec, "fetch_data", return_value=data2): + result = BigQueryEngineSpec.fetch_data(None, 0) + self.assertEqual(result, [1, 2]) + + def test_extra_table_metadata(self): + """ + DB Eng Specs (bigquery): Test extra table metadata + """ + database = mock.Mock() + # Test no indexes + database.get_indexes = mock.MagicMock(return_value=None) + result = BigQueryEngineSpec.extra_table_metadata( + database, "some_table", "some_schema" + ) + self.assertEqual(result, {}) + + index_metadata = [ + { + "name": "clustering", + "column_names": ["c_col1", "c_col2", "c_col3"], + }, + { + "name": "partition", + "column_names": ["p_col1", "p_col2", "p_col3"], + }, + ] + expected_result = { + "partitions": {"cols": [["p_col1", "p_col2", "p_col3"]]}, + "clustering": {"cols": [["c_col1", "c_col2", "c_col3"]]}, + } + database.get_indexes = mock.MagicMock(return_value=index_metadata) + result = BigQueryEngineSpec.extra_table_metadata( + database, "some_table", "some_schema" + ) + self.assertEqual(result, expected_result) + + def test_normalize_indexes(self): + """ + DB Eng Specs (bigquery): Test extra table metadata + """ + indexes = [{"name": "partition", "column_names": [None], "unique": False}] + normalized_idx = BigQueryEngineSpec.normalize_indexes(indexes) + self.assertEqual(normalized_idx, []) + + indexes = [{"name": "partition", "column_names": ["dttm"], "unique": False}] + normalized_idx = BigQueryEngineSpec.normalize_indexes(indexes) + self.assertEqual(normalized_idx, indexes) + + indexes = [ + {"name": "partition", "column_names": ["dttm", None], "unique": False} + ] + normalized_idx = BigQueryEngineSpec.normalize_indexes(indexes) + self.assertEqual( + normalized_idx, + [{"name": "partition", "column_names": ["dttm"], "unique": False}], + ) + + @mock.patch("superset.db_engine_specs.bigquery.BigQueryEngineSpec.get_engine") + def test_df_to_sql(self, mock_get_engine): + """ + DB Eng Specs (bigquery): Test DataFrame to SQL contract + """ + # test missing google.oauth2 dependency + sys.modules["pandas_gbq"] = mock.MagicMock() + df = DataFrame() + database = mock.MagicMock() + with self.assertRaises(Exception): + BigQueryEngineSpec.df_to_sql( + database=database, + table=Table(table="name", schema="schema"), + df=df, + to_sql_kwargs={}, + ) + + invalid_kwargs = [ + {"name": "some_name"}, + {"schema": "some_schema"}, + {"con": "some_con"}, + {"name": "some_name", "con": "some_con"}, + {"name": "some_name", "schema": "some_schema"}, + {"con": "some_con", "schema": "some_schema"}, + ] + # Test check for missing schema. + sys.modules["google.oauth2"] = mock.MagicMock() + for invalid_kwarg in invalid_kwargs: + self.assertRaisesRegex( + Exception, + "The table schema must be defined", + BigQueryEngineSpec.df_to_sql, + database=database, + table=Table(table="name"), + df=df, + to_sql_kwargs=invalid_kwarg, + ) + + import pandas_gbq + from google.oauth2 import service_account + + pandas_gbq.to_gbq = mock.Mock() + service_account.Credentials.from_service_account_info = mock.MagicMock( + return_value="account_info" + ) + + mock_get_engine.return_value.__enter__.return_value.url.host = "google-host" + mock_get_engine.return_value.__enter__.return_value.dialect.credentials_info = ( + "secrets" + ) + + BigQueryEngineSpec.df_to_sql( + database=database, + table=Table(table="name", schema="schema"), + df=df, + to_sql_kwargs={"if_exists": "extra_key"}, + ) + + pandas_gbq.to_gbq.assert_called_with( + df, + project_id="google-host", + destination_table="schema.name", + credentials="account_info", + if_exists="extra_key", + ) + + def test_extract_errors(self): + msg = "403 POST https://bigquery.googleapis.com/bigquery/v2/projects/test-keel-310804/jobs?prettyPrint=false: Access Denied: Project profound-keel-310804: User does not have bigquery.jobs.create permission in project profound-keel-310804" + result = BigQueryEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='Unable to connect. Verify that the following roles are set on the service account: "BigQuery Data Viewer", "BigQuery Metadata Viewer", "BigQuery Job User" and the following permissions are set "bigquery.readsessions.create", "bigquery.readsessions.getData"', + error_type=SupersetErrorType.CONNECTION_DATABASE_PERMISSIONS_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Google BigQuery", + "issue_codes": [ + { + "code": 1017, + "message": "", + } + ], + }, + ) + ] + + msg = "bigquery error: 404 Not found: Dataset fakeDataset:bogusSchema was not found in location" + result = BigQueryEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='The schema "bogusSchema" does not exist. A valid schema must be used to run this query.', + error_type=SupersetErrorType.SCHEMA_DOES_NOT_EXIST_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Google BigQuery", + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", + }, + { + "code": 1004, + "message": "Issue 1004 - The column was deleted or renamed in the database.", + }, + ], + }, + ) + ] + + msg = 'Table name "badtable" missing dataset while no default dataset is set in the request' + result = BigQueryEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='The table "badtable" does not exist. A valid table must be used to run this query.', + error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Google BigQuery", + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", + }, + { + "code": 1005, + "message": "Issue 1005 - The table was deleted or renamed in the database.", + }, + ], + }, + ) + ] + + msg = "Unrecognized name: badColumn at [1:8]" + result = BigQueryEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='We can\'t seem to resolve column "badColumn" at line 1:8.', + error_type=SupersetErrorType.COLUMN_DOES_NOT_EXIST_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Google BigQuery", + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", + }, + { + "code": 1004, + "message": "Issue 1004 - The column was deleted or renamed in the database.", + }, + ], + }, + ) + ] + + msg = 'Syntax error: Expected end of input but got identifier "fromm"' + result = BigQueryEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='Please check your query for syntax errors at or near "fromm". Then, try running your query again.', + error_type=SupersetErrorType.SYNTAX_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Google BigQuery", + "issue_codes": [ + { + "code": 1030, + "message": "Issue 1030 - The query has a syntax error.", + } + ], + }, + ) + ] + + @mock.patch("superset.models.core.Database.db_engine_spec", BigQueryEngineSpec) + @mock.patch("sqlalchemy_bigquery._helpers.create_bigquery_client", mock.Mock) + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_calculated_column_in_order_by(self): + table = self.get_table(name="birth_names") + TableColumn( + column_name="gender_cc", + type="VARCHAR(255)", + table=table, + expression=""" + case + when gender='boy' then 'male' + else 'female' + end + """, + ) + + table.database.sqlalchemy_uri = "bigquery://" + query_obj = { + "groupby": ["gender_cc"], + "is_timeseries": False, + "filter": [], + "orderby": [["gender_cc", True]], + } + sql = table.get_query_str(query_obj) + assert "ORDER BY gender_cc ASC" in sql diff --git a/tests/integration_tests/db_engine_specs/databricks_tests.py b/tests/integration_tests/db_engine_specs/databricks_tests.py new file mode 100644 index 0000000000000..5ff20b7347af2 --- /dev/null +++ b/tests/integration_tests/db_engine_specs/databricks_tests.py @@ -0,0 +1,61 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from unittest import mock + +from superset.db_engine_specs import get_engine_spec +from superset.db_engine_specs.databricks import DatabricksNativeEngineSpec +from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec +from tests.integration_tests.fixtures.certificates import ssl_certificate +from tests.integration_tests.fixtures.database import default_db_extra + + +class TestDatabricksDbEngineSpec(TestDbEngineSpec): + def test_get_engine_spec(self): + """ + DB Eng Specs (databricks): Test "databricks" in engine spec + """ + assert get_engine_spec("databricks", "connector").engine == "databricks" + assert get_engine_spec("databricks", "pyodbc").engine == "databricks" + assert get_engine_spec("databricks", "pyhive").engine == "databricks" + + def test_extras_without_ssl(self): + db = mock.Mock() + db.extra = default_db_extra + db.server_cert = None + extras = DatabricksNativeEngineSpec.get_extra_params(db) + assert extras == { + "engine_params": { + "connect_args": { + "_user_agent_entry": "Apache Superset", + "http_headers": [("User-Agent", "Apache Superset")], + }, + }, + "metadata_cache_timeout": {}, + "metadata_params": {}, + "schemas_allowed_for_file_upload": [], + } + + def test_extras_with_ssl_custom(self): + db = mock.Mock() + db.extra = default_db_extra.replace( + '"engine_params": {}', + '"engine_params": {"connect_args": {"ssl": "1"}}', + ) + db.server_cert = ssl_certificate + extras = DatabricksNativeEngineSpec.get_extra_params(db) + connect_args = extras["engine_params"]["connect_args"] + assert connect_args["ssl"] == "1" diff --git a/tests/integration_tests/db_engine_specs/gsheets_tests.py b/tests/integration_tests/db_engine_specs/gsheets_tests.py new file mode 100644 index 0000000000000..fe34b7266753a --- /dev/null +++ b/tests/integration_tests/db_engine_specs/gsheets_tests.py @@ -0,0 +1,44 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from superset.db_engine_specs.gsheets import GSheetsEngineSpec +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec + + +class TestGsheetsDbEngineSpec(TestDbEngineSpec): + def test_extract_errors(self): + """ + Test that custom error messages are extracted correctly. + """ + msg = 'SQLError: near "fromm": syntax error' + result = GSheetsEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='Please check your query for syntax errors near "fromm". Then, try running your query again.', + error_type=SupersetErrorType.SYNTAX_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Google Sheets", + "issue_codes": [ + { + "code": 1030, + "message": "Issue 1030 - The query has a syntax error.", + } + ], + }, + ) + ] diff --git a/tests/integration_tests/db_engine_specs/hive_tests.py b/tests/integration_tests/db_engine_specs/hive_tests.py new file mode 100644 index 0000000000000..b63f64ab03cb8 --- /dev/null +++ b/tests/integration_tests/db_engine_specs/hive_tests.py @@ -0,0 +1,434 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +from datetime import datetime +from unittest import mock +from typing import List + +import pytest +import pandas as pd +from sqlalchemy.sql import select + +from superset.db_engine_specs.hive import HiveEngineSpec, upload_to_s3 +from superset.exceptions import SupersetException +from superset.sql_parse import Table, ParsedQuery +from tests.integration_tests.test_app import app + + +def test_0_progress(): + log = """ + 17/02/07 18:26:27 INFO log.PerfLogger: + 17/02/07 18:26:27 INFO log.PerfLogger: + """.split( + "\n" + ) + assert HiveEngineSpec.progress(log) == 0 + + +def test_number_of_jobs_progress(): + log = """ + 17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2 + """.split( + "\n" + ) + assert HiveEngineSpec.progress(log) == 0 + + +def test_job_1_launched_progress(): + log = """ + 17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2 + 17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2 + """.split( + "\n" + ) + assert HiveEngineSpec.progress(log) == 0 + + +def test_job_1_launched_stage_1(): + log = """ + 17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2 + 17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2 + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0% + """.split( + "\n" + ) + assert HiveEngineSpec.progress(log) == 0 + + +def test_job_1_launched_stage_1_map_40_progress(): # pylint: disable=invalid-name + log = """ + 17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2 + 17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2 + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0% + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0% + """.split( + "\n" + ) + assert HiveEngineSpec.progress(log) == 10 + + +def test_job_1_launched_stage_1_map_80_reduce_40_progress(): # pylint: disable=invalid-name + log = """ + 17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2 + 17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2 + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0% + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0% + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 80%, reduce = 40% + """.split( + "\n" + ) + assert HiveEngineSpec.progress(log) == 30 + + +def test_job_1_launched_stage_2_stages_progress(): # pylint: disable=invalid-name + log = """ + 17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2 + 17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2 + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0% + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0% + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 80%, reduce = 40% + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-2 map = 0%, reduce = 0% + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 100%, reduce = 0% + """.split( + "\n" + ) + assert HiveEngineSpec.progress(log) == 12 + + +def test_job_2_launched_stage_2_stages_progress(): # pylint: disable=invalid-name + log = """ + 17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2 + 17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2 + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 100%, reduce = 0% + 17/02/07 19:15:55 INFO ql.Driver: Launching Job 2 out of 2 + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0% + 17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0% + """.split( + "\n" + ) + assert HiveEngineSpec.progress(log) == 60 + + +def test_hive_error_msg(): + msg = ( + '{...} errorMessage="Error while compiling statement: FAILED: ' + "SemanticException [Error 10001]: Line 4" + ":5 Table not found 'fact_ridesfdslakj'\", statusCode=3, " + "sqlState='42S02', errorCode=10001)){...}" + ) + assert HiveEngineSpec.extract_error_message(Exception(msg)) == ( + "hive error: Error while compiling statement: FAILED: " + "SemanticException [Error 10001]: Line 4:5 " + "Table not found 'fact_ridesfdslakj'" + ) + + e = Exception("Some string that doesn't match the regex") + assert HiveEngineSpec.extract_error_message(e) == f"hive error: {e}" + + msg = ( + "errorCode=10001, " + 'errorMessage="Error while compiling statement"), operationHandle' + '=None)"' + ) + assert ( + HiveEngineSpec.extract_error_message(Exception(msg)) + == "hive error: Error while compiling statement" + ) + + +def test_df_to_csv() -> None: + with pytest.raises(SupersetException): + HiveEngineSpec.df_to_sql( + mock.MagicMock(), + Table("foobar"), + pd.DataFrame(), + {"if_exists": "append"}, + ) + + +@mock.patch("superset.db_engine_specs.hive.g", spec={}) +def test_df_to_sql_if_exists_fail(mock_g): + mock_g.user = True + mock_database = mock.MagicMock() + mock_database.get_df.return_value.empty = False + with pytest.raises(SupersetException, match="Table already exists"): + HiveEngineSpec.df_to_sql( + mock_database, Table("foobar"), pd.DataFrame(), {"if_exists": "fail"} + ) + + +@mock.patch("superset.db_engine_specs.hive.g", spec={}) +def test_df_to_sql_if_exists_fail_with_schema(mock_g): + mock_g.user = True + mock_database = mock.MagicMock() + mock_database.get_df.return_value.empty = False + with pytest.raises(SupersetException, match="Table already exists"): + HiveEngineSpec.df_to_sql( + mock_database, + Table(table="foobar", schema="schema"), + pd.DataFrame(), + {"if_exists": "fail"}, + ) + + +@mock.patch("superset.db_engine_specs.hive.g", spec={}) +@mock.patch("superset.db_engine_specs.hive.upload_to_s3") +def test_df_to_sql_if_exists_replace(mock_upload_to_s3, mock_g): + config = app.config.copy() + app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]: lambda *args: "" + mock_upload_to_s3.return_value = "mock-location" + mock_g.user = True + mock_database = mock.MagicMock() + mock_database.get_df.return_value.empty = False + mock_execute = mock.MagicMock(return_value=True) + mock_database.get_sqla_engine_with_context.return_value.__enter__.return_value.execute = ( + mock_execute + ) + table_name = "foobar" + + with app.app_context(): + HiveEngineSpec.df_to_sql( + mock_database, + Table(table=table_name), + pd.DataFrame(), + {"if_exists": "replace", "header": 1, "na_values": "mock", "sep": "mock"}, + ) + + mock_execute.assert_any_call(f"DROP TABLE IF EXISTS {table_name}") + app.config = config + + +@mock.patch("superset.db_engine_specs.hive.g", spec={}) +@mock.patch("superset.db_engine_specs.hive.upload_to_s3") +def test_df_to_sql_if_exists_replace_with_schema(mock_upload_to_s3, mock_g): + config = app.config.copy() + app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]: lambda *args: "" + mock_upload_to_s3.return_value = "mock-location" + mock_g.user = True + mock_database = mock.MagicMock() + mock_database.get_df.return_value.empty = False + mock_execute = mock.MagicMock(return_value=True) + mock_database.get_sqla_engine_with_context.return_value.__enter__.return_value.execute = ( + mock_execute + ) + table_name = "foobar" + schema = "schema" + + with app.app_context(): + HiveEngineSpec.df_to_sql( + mock_database, + Table(table=table_name, schema=schema), + pd.DataFrame(), + {"if_exists": "replace", "header": 1, "na_values": "mock", "sep": "mock"}, + ) + + mock_execute.assert_any_call(f"DROP TABLE IF EXISTS {schema}.{table_name}") + app.config = config + + +def test_is_readonly(): + def is_readonly(sql: str) -> bool: + return HiveEngineSpec.is_readonly_query(ParsedQuery(sql)) + + assert not is_readonly("UPDATE t1 SET col1 = NULL") + assert not is_readonly("INSERT OVERWRITE TABLE tabB SELECT a.Age FROM TableA") + assert is_readonly("SHOW LOCKS test EXTENDED") + assert is_readonly("SET hivevar:desc='Legislators'") + assert is_readonly("EXPLAIN SELECT 1") + assert is_readonly("SELECT 1") + assert is_readonly("WITH (SELECT 1) bla SELECT * from bla") + + +@pytest.mark.parametrize( + "schema,upload_prefix", + [("foo", "EXTERNAL_HIVE_TABLES/1/foo/"), (None, "EXTERNAL_HIVE_TABLES/1/")], +) +def test_s3_upload_prefix(schema: str, upload_prefix: str) -> None: + mock_database = mock.MagicMock() + mock_database.id = 1 + + assert ( + app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]( + database=mock_database, user=mock.MagicMock(), schema=schema + ) + == upload_prefix + ) + + +def test_upload_to_s3_no_bucket_path(): + with app.app_context(): + with pytest.raises( + Exception, + match="No upload bucket specified. You can specify one in the config file.", + ): + upload_to_s3("filename", "prefix", Table("table")) + + +@mock.patch("boto3.client") +def test_upload_to_s3_client_error(client): + config = app.config.copy() + app.config["CSV_TO_HIVE_UPLOAD_S3_BUCKET"] = "bucket" + from botocore.exceptions import ClientError + + client.return_value.upload_file.side_effect = ClientError( + {"Error": {}}, "operation_name" + ) + + with app.app_context(): + with pytest.raises(ClientError): + upload_to_s3("filename", "prefix", Table("table")) + + app.config = config + + +@mock.patch("boto3.client") +def test_upload_to_s3_success(client): + config = app.config.copy() + app.config["CSV_TO_HIVE_UPLOAD_S3_BUCKET"] = "bucket" + client.return_value.upload_file.return_value = True + + with app.app_context(): + location = upload_to_s3("filename", "prefix", Table("table")) + assert f"s3a://bucket/prefix/table" == location + + app.config = config + + +def test_fetch_data_query_error(): + from TCLIService import ttypes + + err_msg = "error message" + cursor = mock.Mock() + cursor.poll.return_value.operationState = ttypes.TOperationState.ERROR_STATE + cursor.poll.return_value.errorMessage = err_msg + with pytest.raises(Exception, match=f"('Query error', '{err_msg})'"): + HiveEngineSpec.fetch_data(cursor) + + +@mock.patch("superset.db_engine_specs.base.BaseEngineSpec.fetch_data") +def test_fetch_data_programming_error(fetch_data_mock): + from pyhive.exc import ProgrammingError + + fetch_data_mock.side_effect = ProgrammingError + cursor = mock.Mock() + assert HiveEngineSpec.fetch_data(cursor) == [] + + +@mock.patch("superset.db_engine_specs.base.BaseEngineSpec.fetch_data") +def test_fetch_data_success(fetch_data_mock): + return_value = ["a", "b"] + fetch_data_mock.return_value = return_value + cursor = mock.Mock() + assert HiveEngineSpec.fetch_data(cursor) == return_value + + +@mock.patch("superset.db_engine_specs.hive.HiveEngineSpec._latest_partition_from_df") +def test_where_latest_partition(mock_method): + mock_method.return_value = ("01-01-19", 1) + db = mock.Mock() + db.get_indexes = mock.Mock(return_value=[{"column_names": ["ds", "hour"]}]) + db.get_extra = mock.Mock(return_value={}) + db.get_df = mock.Mock() + columns = [{"name": "ds"}, {"name": "hour"}] + with app.app_context(): + result = HiveEngineSpec.where_latest_partition( + "test_table", "test_schema", db, select(), columns + ) + query_result = str(result.compile(compile_kwargs={"literal_binds": True})) + assert "SELECT \nWHERE ds = '01-01-19' AND hour = 1" == query_result + + +@mock.patch("superset.db_engine_specs.presto.PrestoEngineSpec.latest_partition") +def test_where_latest_partition_super_method_exception(mock_method): + mock_method.side_effect = Exception() + db = mock.Mock() + columns = [{"name": "ds"}, {"name": "hour"}] + with app.app_context(): + result = HiveEngineSpec.where_latest_partition( + "test_table", "test_schema", db, select(), columns + ) + assert result is None + mock_method.assert_called() + + +@mock.patch("superset.db_engine_specs.presto.PrestoEngineSpec.latest_partition") +def test_where_latest_partition_no_columns_no_values(mock_method): + mock_method.return_value = ("01-01-19", None) + db = mock.Mock() + with app.app_context(): + result = HiveEngineSpec.where_latest_partition( + "test_table", "test_schema", db, select() + ) + assert result is None + + +def test__latest_partition_from_df(): + def is_correct_result(data: List, result: List) -> bool: + df = pd.DataFrame({"partition": data}) + return HiveEngineSpec._latest_partition_from_df(df) == result + + assert is_correct_result(["ds=01-01-19"], ["01-01-19"]) + assert is_correct_result( + ["ds=01-01-19", "ds=01-03-19", "ds=01-02-19"], ["01-03-19"] + ) + assert is_correct_result(["ds=01-01-19/hour=1"], ["01-01-19", "1"]) + assert is_correct_result( + ["ds=01-01-19/hour=1", "ds=01-03-19/hour=1", "ds=01-02-19/hour=1"], + ["01-03-19", "1"], + ) + assert is_correct_result( + ["ds=01-01-19/hour=1", "ds=01-03-19/hour=1", "ds=01-02-19/hour=2"], + ["01-03-19", "1"], + ) + + +def test_get_view_names_with_schema(): + database = mock.MagicMock() + mock_execute = mock.MagicMock() + database.get_raw_connection().__enter__().cursor().execute = mock_execute + database.get_raw_connection().__enter__().cursor().fetchall = mock.MagicMock( + return_value=[["a", "b,", "c"], ["d", "e"]] + ) + + schema = "schema" + result = HiveEngineSpec.get_view_names(database, mock.Mock(), schema) + mock_execute.assert_called_once_with(f"SHOW VIEWS IN `{schema}`") + assert result == {"a", "d"} + + +def test_get_view_names_without_schema(): + database = mock.MagicMock() + mock_execute = mock.MagicMock() + database.get_raw_connection().__enter__().cursor().execute = mock_execute + database.get_raw_connection().__enter__().cursor().fetchall = mock.MagicMock( + return_value=[["a", "b,", "c"], ["d", "e"]] + ) + result = HiveEngineSpec.get_view_names(database, mock.Mock(), None) + mock_execute.assert_called_once_with("SHOW VIEWS") + assert result == {"a", "d"} + + +@mock.patch("superset.db_engine_specs.base.BaseEngineSpec.get_table_names") +@mock.patch("superset.db_engine_specs.hive.HiveEngineSpec.get_view_names") +def test_get_table_names( + mock_get_view_names, + mock_get_table_names, +): + mock_get_view_names.return_value = {"view1", "view2"} + mock_get_table_names.return_value = {"table1", "table2", "view1", "view2"} + tables = HiveEngineSpec.get_table_names(mock.Mock(), mock.Mock(), None) + assert tables == {"table1", "table2"} diff --git a/tests/integration_tests/db_engine_specs/mysql_tests.py b/tests/integration_tests/db_engine_specs/mysql_tests.py new file mode 100644 index 0000000000000..36b41222b3cc5 --- /dev/null +++ b/tests/integration_tests/db_engine_specs/mysql_tests.py @@ -0,0 +1,197 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import unittest + +from sqlalchemy.dialects import mysql +from sqlalchemy.dialects.mysql import DATE, NVARCHAR, TEXT, VARCHAR + +from superset.db_engine_specs.mysql import MySQLEngineSpec +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec + + +class TestMySQLEngineSpecsDbEngineSpec(TestDbEngineSpec): + @unittest.skipUnless( + TestDbEngineSpec.is_module_installed("MySQLdb"), "mysqlclient not installed" + ) + def test_get_datatype_mysql(self): + """Tests related to datatype mapping for MySQL""" + self.assertEqual("TINY", MySQLEngineSpec.get_datatype(1)) + self.assertEqual("VARCHAR", MySQLEngineSpec.get_datatype(15)) + + def test_column_datatype_to_string(self): + test_cases = ( + (DATE(), "DATE"), + (VARCHAR(length=255), "VARCHAR(255)"), + ( + VARCHAR(length=255, charset="latin1", collation="utf8mb4_general_ci"), + "VARCHAR(255)", + ), + (NVARCHAR(length=128), "NATIONAL VARCHAR(128)"), + (TEXT(), "TEXT"), + ) + + for original, expected in test_cases: + actual = MySQLEngineSpec.column_datatype_to_string( + original, mysql.dialect() + ) + self.assertEqual(actual, expected) + + def test_extract_error_message(self): + from MySQLdb._exceptions import OperationalError + + message = "Unknown table 'BIRTH_NAMES1' in information_schema" + exception = OperationalError(message) + extracted_message = MySQLEngineSpec._extract_error_message(exception) + assert extracted_message == message + + exception = OperationalError(123, message) + extracted_message = MySQLEngineSpec._extract_error_message(exception) + assert extracted_message == message + + def test_extract_errors(self): + """ + Test that custom error messages are extracted correctly. + """ + msg = "mysql: Access denied for user 'test'@'testuser.com'" + result = MySQLEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR, + message='Either the username "test" or the password is incorrect.', + level=ErrorLevel.ERROR, + extra={ + "invalid": ["username", "password"], + "engine_name": "MySQL", + "issue_codes": [ + { + "code": 1014, + "message": "Issue 1014 - Either the" + " username or the password is wrong.", + }, + { + "code": 1015, + "message": "Issue 1015 - Either the database is " + "spelled incorrectly or does not exist.", + }, + ], + }, + ) + ] + + msg = "mysql: Unknown MySQL server host 'badhostname.com'" + result = MySQLEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR, + message='Unknown MySQL server host "badhostname.com".', + level=ErrorLevel.ERROR, + extra={ + "invalid": ["host"], + "engine_name": "MySQL", + "issue_codes": [ + { + "code": 1007, + "message": "Issue 1007 - The hostname" + " provided can't be resolved.", + } + ], + }, + ) + ] + + msg = "mysql: Can't connect to MySQL server on 'badconnection.com'" + result = MySQLEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_HOST_DOWN_ERROR, + message='The host "badconnection.com" might be ' + "down and can't be reached.", + level=ErrorLevel.ERROR, + extra={ + "invalid": ["host", "port"], + "engine_name": "MySQL", + "issue_codes": [ + { + "code": 1007, + "message": "Issue 1007 - The hostname provided" + " can't be resolved.", + } + ], + }, + ) + ] + + msg = "mysql: Can't connect to MySQL server on '93.184.216.34'" + result = MySQLEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_HOST_DOWN_ERROR, + message='The host "93.184.216.34" might be down and can\'t be reached.', + level=ErrorLevel.ERROR, + extra={ + "invalid": ["host", "port"], + "engine_name": "MySQL", + "issue_codes": [ + { + "code": 10007, + "message": "Issue 1007 - The hostname provided " + "can't be resolved.", + } + ], + }, + ) + ] + + msg = "mysql: Unknown database 'badDB'" + result = MySQLEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='Unable to connect to database "badDB".', + error_type=SupersetErrorType.CONNECTION_UNKNOWN_DATABASE_ERROR, + level=ErrorLevel.ERROR, + extra={ + "invalid": ["database"], + "engine_name": "MySQL", + "issue_codes": [ + { + "code": 1015, + "message": "Issue 1015 - Either the database is spelled incorrectly or does not exist.", + } + ], + }, + ) + ] + + msg = "check the manual that corresponds to your MySQL server version for the right syntax to use near 'fromm" + result = MySQLEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='Please check your query for syntax errors near "fromm". Then, try running your query again.', + error_type=SupersetErrorType.SYNTAX_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "MySQL", + "issue_codes": [ + { + "code": 1030, + "message": "Issue 1030 - The query has a syntax error.", + } + ], + }, + ) + ] diff --git a/tests/integration_tests/db_engine_specs/pinot_tests.py b/tests/integration_tests/db_engine_specs/pinot_tests.py new file mode 100644 index 0000000000000..c6e364a8ea5fe --- /dev/null +++ b/tests/integration_tests/db_engine_specs/pinot_tests.py @@ -0,0 +1,89 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from sqlalchemy import column + +from superset.db_engine_specs.pinot import PinotEngineSpec +from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec + + +class TestPinotDbEngineSpec(TestDbEngineSpec): + """Tests pertaining to our Pinot database support""" + + def test_pinot_time_expression_sec_one_1d_grain(self): + col = column("tstamp") + expr = PinotEngineSpec.get_timestamp_expr(col, "epoch_s", "P1D") + result = str(expr.compile()) + self.assertEqual( + result, + "DATETIMECONVERT(tstamp, '1:SECONDS:EPOCH', '1:SECONDS:EPOCH', '1:DAYS')", + ) + + def test_pinot_time_expression_simple_date_format_1d_grain(self): + col = column("tstamp") + expr = PinotEngineSpec.get_timestamp_expr(col, "%Y-%m-%d %H:%M:%S", "P1D") + result = str(expr.compile()) + self.assertEqual( + result, + ( + "DATETIMECONVERT(tstamp, " + + "'1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss', " + + "'1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss', '1:DAYS')" + ), + ) + + def test_pinot_time_expression_simple_date_format_10m_grain(self): + col = column("tstamp") + expr = PinotEngineSpec.get_timestamp_expr(col, "%Y-%m-%d %H:%M:%S", "PT10M") + result = str(expr.compile()) + self.assertEqual( + result, + ( + "DATETIMECONVERT(tstamp, " + + "'1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss', " + + "'1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss', '10:MINUTES')" + ), + ) + + def test_pinot_time_expression_simple_date_format_1w_grain(self): + col = column("tstamp") + expr = PinotEngineSpec.get_timestamp_expr(col, "%Y-%m-%d %H:%M:%S", "P1W") + result = str(expr.compile()) + self.assertEqual( + result, + ( + "ToDateTime(DATETRUNC('week', FromDateTime(tstamp, " + + "'yyyy-MM-dd HH:mm:ss'), 'MILLISECONDS'), 'yyyy-MM-dd HH:mm:ss')" + ), + ) + + def test_pinot_time_expression_sec_one_1m_grain(self): + col = column("tstamp") + expr = PinotEngineSpec.get_timestamp_expr(col, "epoch_s", "P1M") + result = str(expr.compile()) + self.assertEqual( + result, + "DATETRUNC('month', tstamp, 'SECONDS')", + ) + + def test_invalid_get_time_expression_arguments(self): + with self.assertRaises(NotImplementedError): + PinotEngineSpec.get_timestamp_expr(column("tstamp"), None, "P1M") + + with self.assertRaises(NotImplementedError): + PinotEngineSpec.get_timestamp_expr( + column("tstamp"), "epoch_s", "invalid_grain" + ) diff --git a/tests/integration_tests/db_engine_specs/postgres_tests.py b/tests/integration_tests/db_engine_specs/postgres_tests.py new file mode 100644 index 0000000000000..a6145432c2a17 --- /dev/null +++ b/tests/integration_tests/db_engine_specs/postgres_tests.py @@ -0,0 +1,516 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from textwrap import dedent +from unittest import mock + +from sqlalchemy import column, literal_column +from sqlalchemy.dialects import postgresql + +from superset.db_engine_specs import load_engine_specs +from superset.db_engine_specs.postgres import PostgresEngineSpec +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.models.sql_lab import Query +from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec +from tests.integration_tests.fixtures.certificates import ssl_certificate +from tests.integration_tests.fixtures.database import default_db_extra + + +class TestPostgresDbEngineSpec(TestDbEngineSpec): + def test_get_table_names(self): + """ + DB Eng Specs (postgres): Test get table names + """ + + """ Make sure postgres doesn't try to remove schema name from table name + ie. when try_remove_schema_from_table_name == False. """ + inspector = mock.Mock() + inspector.get_table_names = mock.Mock(return_value=["schema.table", "table_2"]) + inspector.get_foreign_table_names = mock.Mock(return_value=["table_3"]) + + pg_result_expected = {"schema.table", "table_2", "table_3"} + pg_result = PostgresEngineSpec.get_table_names( + database=mock.ANY, schema="schema", inspector=inspector + ) + assert pg_result_expected == pg_result + + def test_time_exp_literal_no_grain(self): + """ + DB Eng Specs (postgres): Test no grain literal column + """ + col = literal_column("COALESCE(a, b)") + expr = PostgresEngineSpec.get_timestamp_expr(col, None, None) + result = str(expr.compile(None, dialect=postgresql.dialect())) + self.assertEqual(result, "COALESCE(a, b)") + + def test_time_exp_literal_1y_grain(self): + """ + DB Eng Specs (postgres): Test grain literal column 1 YEAR + """ + col = literal_column("COALESCE(a, b)") + expr = PostgresEngineSpec.get_timestamp_expr(col, None, "P1Y") + result = str(expr.compile(None, dialect=postgresql.dialect())) + self.assertEqual(result, "DATE_TRUNC('year', COALESCE(a, b))") + + def test_time_ex_lowr_col_no_grain(self): + """ + DB Eng Specs (postgres): Test no grain expr lower case + """ + col = column("lower_case") + expr = PostgresEngineSpec.get_timestamp_expr(col, None, None) + result = str(expr.compile(None, dialect=postgresql.dialect())) + self.assertEqual(result, "lower_case") + + def test_time_exp_lowr_col_sec_1y(self): + """ + DB Eng Specs (postgres): Test grain expr lower case 1 YEAR + """ + col = column("lower_case") + expr = PostgresEngineSpec.get_timestamp_expr(col, "epoch_s", "P1Y") + result = str(expr.compile(None, dialect=postgresql.dialect())) + self.assertEqual( + result, + "DATE_TRUNC('year', " + "(timestamp 'epoch' + lower_case * interval '1 second'))", + ) + + def test_time_exp_mixd_case_col_1y(self): + """ + DB Eng Specs (postgres): Test grain expr mixed case 1 YEAR + """ + col = column("MixedCase") + expr = PostgresEngineSpec.get_timestamp_expr(col, None, "P1Y") + result = str(expr.compile(None, dialect=postgresql.dialect())) + self.assertEqual(result, "DATE_TRUNC('year', \"MixedCase\")") + + def test_empty_dbapi_cursor_description(self): + """ + DB Eng Specs (postgres): Test empty cursor description (no columns) + """ + cursor = mock.Mock() + # empty description mean no columns, this mocks the following SQL: "SELECT" + cursor.description = [] + results = PostgresEngineSpec.fetch_data(cursor, 1000) + self.assertEqual(results, []) + + def test_engine_alias_name(self): + """ + DB Eng Specs (postgres): Test "postgres" in engine spec + """ + backends = set() + for engine in load_engine_specs(): + backends.add(engine.engine) + backends.update(engine.engine_aliases) + assert "postgres" in backends + + def test_extras_without_ssl(self): + db = mock.Mock() + db.extra = default_db_extra + db.server_cert = None + extras = PostgresEngineSpec.get_extra_params(db) + assert "connect_args" not in extras["engine_params"] + + def test_extras_with_ssl_default(self): + db = mock.Mock() + db.extra = default_db_extra + db.server_cert = ssl_certificate + extras = PostgresEngineSpec.get_extra_params(db) + connect_args = extras["engine_params"]["connect_args"] + assert connect_args["sslmode"] == "verify-full" + assert "sslrootcert" in connect_args + + def test_extras_with_ssl_custom(self): + db = mock.Mock() + db.extra = default_db_extra.replace( + '"engine_params": {}', + '"engine_params": {"connect_args": {"sslmode": "verify-ca"}}', + ) + db.server_cert = ssl_certificate + extras = PostgresEngineSpec.get_extra_params(db) + connect_args = extras["engine_params"]["connect_args"] + assert connect_args["sslmode"] == "verify-ca" + assert "sslrootcert" in connect_args + + def test_estimate_statement_cost_select_star(self): + """ + DB Eng Specs (postgres): Test estimate_statement_cost select star + """ + + cursor = mock.Mock() + cursor.fetchone.return_value = ( + "Seq Scan on birth_names (cost=0.00..1537.91 rows=75691 width=46)", + ) + sql = "SELECT * FROM birth_names" + results = PostgresEngineSpec.estimate_statement_cost(sql, cursor) + self.assertEqual( + results, + { + "Start-up cost": 0.00, + "Total cost": 1537.91, + }, + ) + + def test_estimate_statement_invalid_syntax(self): + """ + DB Eng Specs (postgres): Test estimate_statement_cost invalid syntax + """ + from psycopg2 import errors + + cursor = mock.Mock() + cursor.execute.side_effect = errors.SyntaxError( + """ + syntax error at or near "EXPLAIN" + LINE 1: EXPLAIN DROP TABLE birth_names + ^ + """ + ) + sql = "DROP TABLE birth_names" + with self.assertRaises(errors.SyntaxError): + PostgresEngineSpec.estimate_statement_cost(sql, cursor) + + def test_query_cost_formatter_example_costs(self): + """ + DB Eng Specs (postgres): Test test_query_cost_formatter example costs + """ + raw_cost = [ + { + "Start-up cost": 0.00, + "Total cost": 1537.91, + }, + { + "Start-up cost": 10.00, + "Total cost": 1537.00, + }, + ] + result = PostgresEngineSpec.query_cost_formatter(raw_cost) + self.assertEqual( + result, + [ + { + "Start-up cost": "0.0", + "Total cost": "1537.91", + }, + { + "Start-up cost": "10.0", + "Total cost": "1537.0", + }, + ], + ) + + def test_extract_errors(self): + """ + Test that custom error messages are extracted correctly. + """ + msg = 'psql: error: FATAL: role "testuser" does not exist' + result = PostgresEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_INVALID_USERNAME_ERROR, + message='The username "testuser" does not exist.', + level=ErrorLevel.ERROR, + extra={ + "engine_name": "PostgreSQL", + "issue_codes": [ + { + "code": 1012, + "message": ( + "Issue 1012 - The username provided when " + "connecting to a database is not valid." + ), + }, + ], + "invalid": ["username"], + }, + ) + ] + + msg = ( + 'psql: error: could not translate host name "locahost" to address: ' + "nodename nor servname provided, or not known" + ) + result = PostgresEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR, + message='The hostname "locahost" cannot be resolved.', + level=ErrorLevel.ERROR, + extra={ + "engine_name": "PostgreSQL", + "issue_codes": [ + { + "code": 1007, + "message": "Issue 1007 - The hostname provided " + "can't be resolved.", + } + ], + "invalid": ["host"], + }, + ) + ] + + msg = dedent( + """ +psql: error: could not connect to server: Connection refused + Is the server running on host "localhost" (::1) and accepting + TCP/IP connections on port 12345? +could not connect to server: Connection refused + Is the server running on host "localhost" (127.0.0.1) and accepting + TCP/IP connections on port 12345? + """ + ) + result = PostgresEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_PORT_CLOSED_ERROR, + message='Port 12345 on hostname "localhost" refused the connection.', + level=ErrorLevel.ERROR, + extra={ + "engine_name": "PostgreSQL", + "issue_codes": [ + {"code": 1008, "message": "Issue 1008 - The port is closed."} + ], + "invalid": ["host", "port"], + }, + ) + ] + + msg = dedent( + """ +psql: error: could not connect to server: Operation timed out + Is the server running on host "example.com" (93.184.216.34) and accepting + TCP/IP connections on port 12345? + """ + ) + result = PostgresEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_HOST_DOWN_ERROR, + message=( + 'The host "example.com" might be down, ' + "and can't be reached on port 12345." + ), + level=ErrorLevel.ERROR, + extra={ + "engine_name": "PostgreSQL", + "issue_codes": [ + { + "code": 1009, + "message": "Issue 1009 - The host might be down, " + "and can't be reached on the provided port.", + } + ], + "invalid": ["host", "port"], + }, + ) + ] + + # response with IP only + msg = dedent( + """ +psql: error: could not connect to server: Operation timed out + Is the server running on host "93.184.216.34" and accepting + TCP/IP connections on port 12345? + """ + ) + result = PostgresEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_HOST_DOWN_ERROR, + message=( + 'The host "93.184.216.34" might be down, ' + "and can't be reached on port 12345." + ), + level=ErrorLevel.ERROR, + extra={ + "engine_name": "PostgreSQL", + "issue_codes": [ + { + "code": 1009, + "message": "Issue 1009 - The host might be down, " + "and can't be reached on the provided port.", + } + ], + "invalid": ["host", "port"], + }, + ) + ] + + msg = 'FATAL: password authentication failed for user "postgres"' + result = PostgresEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_INVALID_PASSWORD_ERROR, + message=('The password provided for username "postgres" is incorrect.'), + level=ErrorLevel.ERROR, + extra={ + "engine_name": "PostgreSQL", + "issue_codes": [ + { + "code": 1013, + "message": ( + "Issue 1013 - The password provided when " + "connecting to a database is not valid." + ), + }, + ], + "invalid": ["username", "password"], + }, + ) + ] + + msg = 'database "badDB" does not exist' + result = PostgresEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='Unable to connect to database "badDB".', + error_type=SupersetErrorType.CONNECTION_UNKNOWN_DATABASE_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "PostgreSQL", + "issue_codes": [ + { + "code": 1015, + "message": ( + "Issue 1015 - Either the database is spelled " + "incorrectly or does not exist.", + ), + } + ], + "invalid": ["database"], + }, + ) + ] + + msg = "no password supplied" + result = PostgresEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message="Please re-enter the password.", + error_type=SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR, + level=ErrorLevel.ERROR, + extra={ + "invalid": ["password"], + "engine_name": "PostgreSQL", + "issue_codes": [ + { + "code": 1014, + "message": "Issue 1014 - Either the username or the password is wrong.", + }, + { + "code": 1015, + "message": "Issue 1015 - Either the database is spelled incorrectly or does not exist.", + }, + ], + }, + ) + ] + + msg = 'syntax error at or near "fromm"' + result = PostgresEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='Please check your query for syntax errors at or near "fromm". Then, try running your query again.', + error_type=SupersetErrorType.SYNTAX_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "PostgreSQL", + "issue_codes": [ + { + "code": 1030, + "message": "Issue 1030 - The query has a syntax error.", + } + ], + }, + ) + ] + + @mock.patch("sqlalchemy.engine.Engine.connect") + def test_get_cancel_query_id(self, engine_mock): + query = Query() + cursor_mock = engine_mock.return_value.__enter__.return_value + cursor_mock.fetchone.return_value = [123] + assert PostgresEngineSpec.get_cancel_query_id(cursor_mock, query) == 123 + + @mock.patch("sqlalchemy.engine.Engine.connect") + def test_cancel_query(self, engine_mock): + query = Query() + cursor_mock = engine_mock.return_value.__enter__.return_value + assert PostgresEngineSpec.cancel_query(cursor_mock, query, 123) is True + + @mock.patch("sqlalchemy.engine.Engine.connect") + def test_cancel_query_failed(self, engine_mock): + query = Query() + cursor_mock = engine_mock.raiseError.side_effect = Exception() + assert PostgresEngineSpec.cancel_query(cursor_mock, query, 123) is False + + +def test_base_parameters_mixin(): + parameters = { + "username": "username", + "password": "password", + "host": "localhost", + "port": 5432, + "database": "dbname", + "query": {"foo": "bar"}, + "encryption": True, + } + encrypted_extra = None + sqlalchemy_uri = PostgresEngineSpec.build_sqlalchemy_uri( + parameters, encrypted_extra + ) + assert sqlalchemy_uri == ( + "postgresql+psycopg2://username:password@localhost:5432/dbname?" + "foo=bar&sslmode=require" + ) + + parameters_from_uri = PostgresEngineSpec.get_parameters_from_uri(sqlalchemy_uri) + assert parameters_from_uri == { + "username": "username", + "password": "password", + "host": "localhost", + "port": 5432, + "database": "dbname", + "query": {"foo": "bar"}, + "encryption": True, + } + + json_schema = PostgresEngineSpec.parameters_json_schema() + assert json_schema == { + "type": "object", + "properties": { + "encryption": { + "type": "boolean", + "description": "Use an encrypted connection to the database", + }, + "host": {"type": "string", "description": "Hostname or IP address"}, + "database": {"type": "string", "description": "Database name"}, + "port": { + "type": "integer", + "format": "int32", + "minimum": 0, + "maximum": 65536, + "description": "Database port", + }, + "password": {"type": "string", "nullable": True, "description": "Password"}, + "username": {"type": "string", "nullable": True, "description": "Username"}, + "query": { + "type": "object", + "description": "Additional parameters", + "additionalProperties": {}, + }, + }, + "required": ["database", "host", "port", "username"], + } diff --git a/tests/integration_tests/db_engine_specs/presto_tests.py b/tests/integration_tests/db_engine_specs/presto_tests.py new file mode 100644 index 0000000000000..78b552ecb8635 --- /dev/null +++ b/tests/integration_tests/db_engine_specs/presto_tests.py @@ -0,0 +1,1034 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from collections import namedtuple +from textwrap import dedent +from unittest import mock, skipUnless + +import pandas as pd +from sqlalchemy import types +from sqlalchemy.sql import select + +from superset.db_engine_specs.presto import PrestoEngineSpec +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.sql_parse import ParsedQuery +from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec + + +class TestPrestoDbEngineSpec(TestDbEngineSpec): + @skipUnless(TestDbEngineSpec.is_module_installed("pyhive"), "pyhive not installed") + def test_get_datatype_presto(self): + self.assertEqual("STRING", PrestoEngineSpec.get_datatype("string")) + + def test_get_view_names_with_schema(self): + database = mock.MagicMock() + mock_execute = mock.MagicMock() + database.get_raw_connection().__enter__().cursor().execute = mock_execute + database.get_raw_connection().__enter__().cursor().fetchall = mock.MagicMock( + return_value=[["a", "b,", "c"], ["d", "e"]] + ) + + schema = "schema" + result = PrestoEngineSpec.get_view_names(database, mock.Mock(), schema) + mock_execute.assert_called_once_with( + dedent( + """ + SELECT table_name FROM information_schema.tables + WHERE table_schema = %(schema)s + AND table_type = 'VIEW' + """ + ).strip(), + {"schema": schema}, + ) + assert result == {"a", "d"} + + def test_get_view_names_without_schema(self): + database = mock.MagicMock() + mock_execute = mock.MagicMock() + database.get_raw_connection().__enter__().cursor().execute = mock_execute + database.get_raw_connection().__enter__().cursor().fetchall = mock.MagicMock( + return_value=[["a", "b,", "c"], ["d", "e"]] + ) + result = PrestoEngineSpec.get_view_names(database, mock.Mock(), None) + mock_execute.assert_called_once_with( + dedent( + """ + SELECT table_name FROM information_schema.tables + WHERE table_type = 'VIEW' + """ + ).strip(), + {}, + ) + assert result == {"a", "d"} + + def verify_presto_column(self, column, expected_results): + inspector = mock.Mock() + inspector.engine.dialect.identifier_preparer.quote_identifier = mock.Mock() + row = mock.Mock() + row.Column, row.Type, row.Null = column + inspector.bind.execute.return_value.fetchall = mock.Mock(return_value=[row]) + results = PrestoEngineSpec.get_columns(inspector, "", "") + self.assertEqual(len(expected_results), len(results)) + for expected_result, result in zip(expected_results, results): + self.assertEqual(expected_result[0], result["name"]) + self.assertEqual(expected_result[1], str(result["type"])) + + def test_presto_get_column(self): + presto_column = ("column_name", "boolean", "") + expected_results = [("column_name", "BOOLEAN")] + self.verify_presto_column(presto_column, expected_results) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"PRESTO_EXPAND_DATA": True}, + clear=True, + ) + def test_presto_get_simple_row_column(self): + presto_column = ("column_name", "row(nested_obj double)", "") + expected_results = [("column_name", "ROW"), ("column_name.nested_obj", "FLOAT")] + self.verify_presto_column(presto_column, expected_results) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"PRESTO_EXPAND_DATA": True}, + clear=True, + ) + def test_presto_get_simple_row_column_with_name_containing_whitespace(self): + presto_column = ("column name", "row(nested_obj double)", "") + expected_results = [("column name", "ROW"), ("column name.nested_obj", "FLOAT")] + self.verify_presto_column(presto_column, expected_results) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"PRESTO_EXPAND_DATA": True}, + clear=True, + ) + def test_presto_get_simple_row_column_with_tricky_nested_field_name(self): + presto_column = ("column_name", 'row("Field Name(Tricky, Name)" double)', "") + expected_results = [ + ("column_name", "ROW"), + ('column_name."Field Name(Tricky, Name)"', "FLOAT"), + ] + self.verify_presto_column(presto_column, expected_results) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"PRESTO_EXPAND_DATA": True}, + clear=True, + ) + def test_presto_get_simple_array_column(self): + presto_column = ("column_name", "array(double)", "") + expected_results = [("column_name", "ARRAY")] + self.verify_presto_column(presto_column, expected_results) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"PRESTO_EXPAND_DATA": True}, + clear=True, + ) + def test_presto_get_row_within_array_within_row_column(self): + presto_column = ( + "column_name", + "row(nested_array array(row(nested_row double)), nested_obj double)", + "", + ) + expected_results = [ + ("column_name", "ROW"), + ("column_name.nested_array", "ARRAY"), + ("column_name.nested_array.nested_row", "FLOAT"), + ("column_name.nested_obj", "FLOAT"), + ] + self.verify_presto_column(presto_column, expected_results) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"PRESTO_EXPAND_DATA": True}, + clear=True, + ) + def test_presto_get_array_within_row_within_array_column(self): + presto_column = ( + "column_name", + "array(row(nested_array array(double), nested_obj double))", + "", + ) + expected_results = [ + ("column_name", "ARRAY"), + ("column_name.nested_array", "ARRAY"), + ("column_name.nested_obj", "FLOAT"), + ] + self.verify_presto_column(presto_column, expected_results) + + def test_presto_get_fields(self): + cols = [ + {"name": "column"}, + {"name": "column.nested_obj"}, + {"name": 'column."quoted.nested obj"'}, + ] + actual_results = PrestoEngineSpec._get_fields(cols) + expected_results = [ + {"name": '"column"', "label": "column"}, + {"name": '"column"."nested_obj"', "label": "column.nested_obj"}, + { + "name": '"column"."quoted.nested obj"', + "label": 'column."quoted.nested obj"', + }, + ] + for actual_result, expected_result in zip(actual_results, expected_results): + self.assertEqual(actual_result.element.name, expected_result["name"]) + self.assertEqual(actual_result.name, expected_result["label"]) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"PRESTO_EXPAND_DATA": True}, + clear=True, + ) + def test_presto_expand_data_with_simple_structural_columns(self): + cols = [ + {"name": "row_column", "type": "ROW(NESTED_OBJ VARCHAR)", "is_dttm": False}, + {"name": "array_column", "type": "ARRAY(BIGINT)", "is_dttm": False}, + ] + data = [ + {"row_column": ["a"], "array_column": [1, 2, 3]}, + {"row_column": ["b"], "array_column": [4, 5, 6]}, + ] + actual_cols, actual_data, actual_expanded_cols = PrestoEngineSpec.expand_data( + cols, data + ) + expected_cols = [ + {"name": "row_column", "type": "ROW(NESTED_OBJ VARCHAR)", "is_dttm": False}, + {"name": "row_column.nested_obj", "type": "VARCHAR", "is_dttm": False}, + {"name": "array_column", "type": "ARRAY(BIGINT)", "is_dttm": False}, + ] + + expected_data = [ + {"array_column": 1, "row_column": ["a"], "row_column.nested_obj": "a"}, + {"array_column": 2, "row_column": "", "row_column.nested_obj": ""}, + {"array_column": 3, "row_column": "", "row_column.nested_obj": ""}, + {"array_column": 4, "row_column": ["b"], "row_column.nested_obj": "b"}, + {"array_column": 5, "row_column": "", "row_column.nested_obj": ""}, + {"array_column": 6, "row_column": "", "row_column.nested_obj": ""}, + ] + + expected_expanded_cols = [ + {"name": "row_column.nested_obj", "type": "VARCHAR", "is_dttm": False} + ] + self.assertEqual(actual_cols, expected_cols) + self.assertEqual(actual_data, expected_data) + self.assertEqual(actual_expanded_cols, expected_expanded_cols) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"PRESTO_EXPAND_DATA": True}, + clear=True, + ) + def test_presto_expand_data_with_complex_row_columns(self): + cols = [ + { + "name": "row_column", + "type": "ROW(NESTED_OBJ1 VARCHAR, NESTED_ROW ROW(NESTED_OBJ2 VARCHAR))", + "is_dttm": False, + } + ] + data = [{"row_column": ["a1", ["a2"]]}, {"row_column": ["b1", ["b2"]]}] + actual_cols, actual_data, actual_expanded_cols = PrestoEngineSpec.expand_data( + cols, data + ) + expected_cols = [ + { + "name": "row_column", + "type": "ROW(NESTED_OBJ1 VARCHAR, NESTED_ROW ROW(NESTED_OBJ2 VARCHAR))", + "is_dttm": False, + }, + {"name": "row_column.nested_obj1", "type": "VARCHAR", "is_dttm": False}, + { + "name": "row_column.nested_row", + "type": "ROW(NESTED_OBJ2 VARCHAR)", + "is_dttm": False, + }, + { + "name": "row_column.nested_row.nested_obj2", + "type": "VARCHAR", + "is_dttm": False, + }, + ] + expected_data = [ + { + "row_column": ["a1", ["a2"]], + "row_column.nested_obj1": "a1", + "row_column.nested_row": ["a2"], + "row_column.nested_row.nested_obj2": "a2", + }, + { + "row_column": ["b1", ["b2"]], + "row_column.nested_obj1": "b1", + "row_column.nested_row": ["b2"], + "row_column.nested_row.nested_obj2": "b2", + }, + ] + + expected_expanded_cols = [ + {"name": "row_column.nested_obj1", "type": "VARCHAR", "is_dttm": False}, + { + "name": "row_column.nested_row", + "type": "ROW(NESTED_OBJ2 VARCHAR)", + "is_dttm": False, + }, + { + "name": "row_column.nested_row.nested_obj2", + "type": "VARCHAR", + "is_dttm": False, + }, + ] + self.assertEqual(actual_cols, expected_cols) + self.assertEqual(actual_data, expected_data) + self.assertEqual(actual_expanded_cols, expected_expanded_cols) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"PRESTO_EXPAND_DATA": True}, + clear=True, + ) + def test_presto_expand_data_with_complex_row_columns_and_null_values(self): + cols = [ + { + "name": "row_column", + "type": "ROW(NESTED_ROW ROW(NESTED_OBJ VARCHAR))", + "is_dttm": False, + } + ] + data = [ + {"row_column": '[["a"]]'}, + {"row_column": "[[null]]"}, + {"row_column": "[null]"}, + {"row_column": "null"}, + ] + actual_cols, actual_data, actual_expanded_cols = PrestoEngineSpec.expand_data( + cols, data + ) + expected_cols = [ + { + "name": "row_column", + "type": "ROW(NESTED_ROW ROW(NESTED_OBJ VARCHAR))", + "is_dttm": False, + }, + { + "name": "row_column.nested_row", + "type": "ROW(NESTED_OBJ VARCHAR)", + "is_dttm": False, + }, + { + "name": "row_column.nested_row.nested_obj", + "type": "VARCHAR", + "is_dttm": False, + }, + ] + expected_data = [ + { + "row_column": [["a"]], + "row_column.nested_row": ["a"], + "row_column.nested_row.nested_obj": "a", + }, + { + "row_column": [[None]], + "row_column.nested_row": [None], + "row_column.nested_row.nested_obj": None, + }, + { + "row_column": [None], + "row_column.nested_row": None, + "row_column.nested_row.nested_obj": "", + }, + { + "row_column": None, + "row_column.nested_row": "", + "row_column.nested_row.nested_obj": "", + }, + ] + + expected_expanded_cols = [ + { + "name": "row_column.nested_row", + "type": "ROW(NESTED_OBJ VARCHAR)", + "is_dttm": False, + }, + { + "name": "row_column.nested_row.nested_obj", + "type": "VARCHAR", + "is_dttm": False, + }, + ] + self.assertEqual(actual_cols, expected_cols) + self.assertEqual(actual_data, expected_data) + self.assertEqual(actual_expanded_cols, expected_expanded_cols) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"PRESTO_EXPAND_DATA": True}, + clear=True, + ) + def test_presto_expand_data_with_complex_array_columns(self): + cols = [ + {"name": "int_column", "type": "BIGINT", "is_dttm": False}, + { + "name": "array_column", + "type": "ARRAY(ROW(NESTED_ARRAY ARRAY(ROW(NESTED_OBJ VARCHAR))))", + "is_dttm": False, + }, + ] + data = [ + {"int_column": 1, "array_column": [[[["a"], ["b"]]], [[["c"], ["d"]]]]}, + {"int_column": 2, "array_column": [[[["e"], ["f"]]], [[["g"], ["h"]]]]}, + ] + actual_cols, actual_data, actual_expanded_cols = PrestoEngineSpec.expand_data( + cols, data + ) + expected_cols = [ + {"name": "int_column", "type": "BIGINT", "is_dttm": False}, + { + "name": "array_column", + "type": "ARRAY(ROW(NESTED_ARRAY ARRAY(ROW(NESTED_OBJ VARCHAR))))", + "is_dttm": False, + }, + { + "name": "array_column.nested_array", + "type": "ARRAY(ROW(NESTED_OBJ VARCHAR))", + "is_dttm": False, + }, + { + "name": "array_column.nested_array.nested_obj", + "type": "VARCHAR", + "is_dttm": False, + }, + ] + expected_data = [ + { + "array_column": [[["a"], ["b"]]], + "array_column.nested_array": ["a"], + "array_column.nested_array.nested_obj": "a", + "int_column": 1, + }, + { + "array_column": "", + "array_column.nested_array": ["b"], + "array_column.nested_array.nested_obj": "b", + "int_column": "", + }, + { + "array_column": [[["c"], ["d"]]], + "array_column.nested_array": ["c"], + "array_column.nested_array.nested_obj": "c", + "int_column": "", + }, + { + "array_column": "", + "array_column.nested_array": ["d"], + "array_column.nested_array.nested_obj": "d", + "int_column": "", + }, + { + "array_column": [[["e"], ["f"]]], + "array_column.nested_array": ["e"], + "array_column.nested_array.nested_obj": "e", + "int_column": 2, + }, + { + "array_column": "", + "array_column.nested_array": ["f"], + "array_column.nested_array.nested_obj": "f", + "int_column": "", + }, + { + "array_column": [[["g"], ["h"]]], + "array_column.nested_array": ["g"], + "array_column.nested_array.nested_obj": "g", + "int_column": "", + }, + { + "array_column": "", + "array_column.nested_array": ["h"], + "array_column.nested_array.nested_obj": "h", + "int_column": "", + }, + ] + expected_expanded_cols = [ + { + "name": "array_column.nested_array", + "type": "ARRAY(ROW(NESTED_OBJ VARCHAR))", + "is_dttm": False, + }, + { + "name": "array_column.nested_array.nested_obj", + "type": "VARCHAR", + "is_dttm": False, + }, + ] + self.assertEqual(actual_cols, expected_cols) + self.assertEqual(actual_data, expected_data) + self.assertEqual(actual_expanded_cols, expected_expanded_cols) + + def test_presto_extra_table_metadata(self): + db = mock.Mock() + db.get_indexes = mock.Mock(return_value=[{"column_names": ["ds", "hour"]}]) + db.get_extra = mock.Mock(return_value={}) + df = pd.DataFrame({"ds": ["01-01-19"], "hour": [1]}) + db.get_df = mock.Mock(return_value=df) + PrestoEngineSpec.get_create_view = mock.Mock(return_value=None) + result = PrestoEngineSpec.extra_table_metadata(db, "test_table", "test_schema") + assert result["partitions"]["cols"] == ["ds", "hour"] + assert result["partitions"]["latest"] == {"ds": "01-01-19", "hour": 1} + + def test_presto_where_latest_partition(self): + db = mock.Mock() + db.get_indexes = mock.Mock(return_value=[{"column_names": ["ds", "hour"]}]) + db.get_extra = mock.Mock(return_value={}) + df = pd.DataFrame({"ds": ["01-01-19"], "hour": [1]}) + db.get_df = mock.Mock(return_value=df) + columns = [{"name": "ds"}, {"name": "hour"}] + result = PrestoEngineSpec.where_latest_partition( + "test_table", "test_schema", db, select(), columns + ) + query_result = str(result.compile(compile_kwargs={"literal_binds": True})) + self.assertEqual("SELECT \nWHERE ds = '01-01-19' AND hour = 1", query_result) + + def test_query_cost_formatter(self): + raw_cost = [ + { + "inputTableColumnInfos": [ + { + "table": { + "catalog": "hive", + "schemaTable": { + "schema": "default", + "table": "fact_passenger_state", + }, + }, + "columnConstraints": [ + { + "columnName": "ds", + "typeSignature": "varchar", + "domain": { + "nullsAllowed": False, + "ranges": [ + { + "low": { + "value": "2019-07-10", + "bound": "EXACTLY", + }, + "high": { + "value": "2019-07-10", + "bound": "EXACTLY", + }, + } + ], + }, + } + ], + "estimate": { + "outputRowCount": 9.04969899e8, + "outputSizeInBytes": 3.54143678301e11, + "cpuCost": 3.54143678301e11, + "maxMemory": 0.0, + "networkCost": 0.0, + }, + } + ], + "estimate": { + "outputRowCount": 9.04969899e8, + "outputSizeInBytes": 3.54143678301e11, + "cpuCost": 3.54143678301e11, + "maxMemory": 0.0, + "networkCost": 3.54143678301e11, + }, + } + ] + formatted_cost = PrestoEngineSpec.query_cost_formatter(raw_cost) + expected = [ + { + "Output count": "904 M rows", + "Output size": "354 GB", + "CPU cost": "354 G", + "Max memory": "0 B", + "Network cost": "354 G", + } + ] + self.assertEqual(formatted_cost, expected) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"PRESTO_EXPAND_DATA": True}, + clear=True, + ) + def test_presto_expand_data_array(self): + cols = [ + {"name": "event_id", "type": "VARCHAR", "is_dttm": False}, + {"name": "timestamp", "type": "BIGINT", "is_dttm": False}, + { + "name": "user", + "type": "ROW(ID BIGINT, FIRST_NAME VARCHAR, LAST_NAME VARCHAR)", + "is_dttm": False, + }, + ] + data = [ + { + "event_id": "abcdef01-2345-6789-abcd-ef0123456789", + "timestamp": "1595895506219", + "user": '[1, "JOHN", "DOE"]', + } + ] + actual_cols, actual_data, actual_expanded_cols = PrestoEngineSpec.expand_data( + cols, data + ) + expected_cols = [ + {"name": "event_id", "type": "VARCHAR", "is_dttm": False}, + {"name": "timestamp", "type": "BIGINT", "is_dttm": False}, + { + "name": "user", + "type": "ROW(ID BIGINT, FIRST_NAME VARCHAR, LAST_NAME VARCHAR)", + "is_dttm": False, + }, + {"name": "user.id", "type": "BIGINT", "is_dttm": False}, + {"name": "user.first_name", "type": "VARCHAR", "is_dttm": False}, + {"name": "user.last_name", "type": "VARCHAR", "is_dttm": False}, + ] + expected_data = [ + { + "event_id": "abcdef01-2345-6789-abcd-ef0123456789", + "timestamp": "1595895506219", + "user": [1, "JOHN", "DOE"], + "user.id": 1, + "user.first_name": "JOHN", + "user.last_name": "DOE", + } + ] + expected_expanded_cols = [ + {"name": "user.id", "type": "BIGINT", "is_dttm": False}, + {"name": "user.first_name", "type": "VARCHAR", "is_dttm": False}, + {"name": "user.last_name", "type": "VARCHAR", "is_dttm": False}, + ] + + self.assertEqual(actual_cols, expected_cols) + self.assertEqual(actual_data, expected_data) + self.assertEqual(actual_expanded_cols, expected_expanded_cols) + + @mock.patch("superset.db_engine_specs.base.BaseEngineSpec.get_table_names") + @mock.patch("superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names") + def test_get_table_names( + self, + mock_get_view_names, + mock_get_table_names, + ): + mock_get_view_names.return_value = {"view1", "view2"} + mock_get_table_names.return_value = {"table1", "table2", "view1", "view2"} + tables = PrestoEngineSpec.get_table_names(mock.Mock(), mock.Mock(), None) + assert tables == {"table1", "table2"} + + def test_get_full_name(self): + names = [ + ("part1", "part2"), + ("part11", "part22"), + ] + result = PrestoEngineSpec._get_full_name(names) + assert result == "part1.part11" + + def test_get_full_name_empty_tuple(self): + names = [ + ("part1", "part2"), + ("", "part3"), + ("part4", "part5"), + ("", "part6"), + ] + result = PrestoEngineSpec._get_full_name(names) + assert result == "part1.part4" + + def test_split_data_type(self): + data_type = "value1 value2" + result = PrestoEngineSpec._split_data_type(data_type, " ") + assert result == ["value1", "value2"] + + data_type = "value1,value2" + result = PrestoEngineSpec._split_data_type(data_type, ",") + assert result == ["value1", "value2"] + + data_type = '"value,1",value2' + result = PrestoEngineSpec._split_data_type(data_type, ",") + assert result == ['"value,1"', "value2"] + + def test_show_columns(self): + inspector = mock.MagicMock() + inspector.engine.dialect.identifier_preparer.quote_identifier = ( + lambda x: f'"{x}"' + ) + inspector.bind.execute.return_value.fetchall = mock.MagicMock( + return_value=["a", "b"] + ) + table_name = "table_name" + result = PrestoEngineSpec._show_columns(inspector, table_name, None) + assert result == ["a", "b"] + inspector.bind.execute.assert_called_once_with( + f'SHOW COLUMNS FROM "{table_name}"' + ) + + def test_show_columns_with_schema(self): + inspector = mock.MagicMock() + inspector.engine.dialect.identifier_preparer.quote_identifier = ( + lambda x: f'"{x}"' + ) + inspector.bind.execute.return_value.fetchall = mock.MagicMock( + return_value=["a", "b"] + ) + table_name = "table_name" + schema = "schema" + result = PrestoEngineSpec._show_columns(inspector, table_name, schema) + assert result == ["a", "b"] + inspector.bind.execute.assert_called_once_with( + f'SHOW COLUMNS FROM "{schema}"."{table_name}"' + ) + + def test_is_column_name_quoted(self): + column_name = "mock" + assert PrestoEngineSpec._is_column_name_quoted(column_name) is False + + column_name = '"mock' + assert PrestoEngineSpec._is_column_name_quoted(column_name) is False + + column_name = '"moc"k' + assert PrestoEngineSpec._is_column_name_quoted(column_name) is False + + column_name = '"moc"k"' + assert PrestoEngineSpec._is_column_name_quoted(column_name) is True + + @mock.patch("superset.db_engine_specs.base.BaseEngineSpec.select_star") + def test_select_star_no_presto_expand_data(self, mock_select_star): + database = mock.Mock() + table_name = "table_name" + engine = mock.Mock() + cols = [ + {"col1": "val1"}, + {"col2": "val2"}, + ] + PrestoEngineSpec.select_star(database, table_name, engine, cols=cols) + mock_select_star.assert_called_once_with( + database, table_name, engine, None, 100, False, True, True, cols + ) + + @mock.patch("superset.db_engine_specs.presto.is_feature_enabled") + @mock.patch("superset.db_engine_specs.base.BaseEngineSpec.select_star") + def test_select_star_presto_expand_data( + self, mock_select_star, mock_is_feature_enabled + ): + mock_is_feature_enabled.return_value = True + database = mock.Mock() + table_name = "table_name" + engine = mock.Mock() + cols = [ + {"name": "val1"}, + {"name": "val2 bool: + return PrestoEngineSpec.is_readonly_query(ParsedQuery(sql)) + + assert not is_readonly("SET hivevar:desc='Legislators'") + assert not is_readonly("UPDATE t1 SET col1 = NULL") + assert not is_readonly("INSERT OVERWRITE TABLE tabB SELECT a.Age FROM TableA") + assert is_readonly("SHOW LOCKS test EXTENDED") + assert is_readonly("EXPLAIN SELECT 1") + assert is_readonly("SELECT 1") + assert is_readonly("WITH (SELECT 1) bla SELECT * from bla") diff --git a/tests/integration_tests/db_engine_specs/redshift_tests.py b/tests/integration_tests/db_engine_specs/redshift_tests.py new file mode 100644 index 0000000000000..cdfe8d16cb714 --- /dev/null +++ b/tests/integration_tests/db_engine_specs/redshift_tests.py @@ -0,0 +1,185 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from textwrap import dedent + +from superset.db_engine_specs.redshift import RedshiftEngineSpec +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec + + +class TestRedshiftDbEngineSpec(TestDbEngineSpec): + def test_extract_errors(self): + """ + Test that custom error messages are extracted correctly. + """ + msg = 'FATAL: password authentication failed for user "wronguser"' + result = RedshiftEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR, + message='Either the username "wronguser" or the password is incorrect.', + level=ErrorLevel.ERROR, + extra={ + "invalid": ["username", "password"], + "engine_name": "Amazon Redshift", + "issue_codes": [ + { + "code": 1014, + "message": "Issue 1014 - Either the username " + "or the password is wrong.", + }, + { + "code": 1015, + "message": "Issue 1015 - Either the database is " + "spelled incorrectly or does not exist.", + }, + ], + }, + ) + ] + + msg = ( + 'redshift: error: could not translate host name "badhost" ' + "to address: nodename nor servname provided, or not known" + ) + result = RedshiftEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR, + message='The hostname "badhost" cannot be resolved.', + level=ErrorLevel.ERROR, + extra={ + "invalid": ["host"], + "engine_name": "Amazon Redshift", + "issue_codes": [ + { + "code": 1007, + "message": "Issue 1007 - The hostname provided " + "can't be resolved.", + } + ], + }, + ) + ] + msg = dedent( + """ +psql: error: could not connect to server: Connection refused + Is the server running on host "localhost" (::1) and accepting + TCP/IP connections on port 12345? +could not connect to server: Connection refused + Is the server running on host "localhost" (127.0.0.1) and accepting + TCP/IP connections on port 12345? + """ + ) + result = RedshiftEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_PORT_CLOSED_ERROR, + message='Port 12345 on hostname "localhost" refused the connection.', + level=ErrorLevel.ERROR, + extra={ + "invalid": ["host", "port"], + "engine_name": "Amazon Redshift", + "issue_codes": [ + {"code": 1008, "message": "Issue 1008 - The port is closed."} + ], + }, + ) + ] + + msg = dedent( + """ +psql: error: could not connect to server: Operation timed out + Is the server running on host "example.com" (93.184.216.34) and accepting + TCP/IP connections on port 12345? + """ + ) + result = RedshiftEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_HOST_DOWN_ERROR, + message=( + 'The host "example.com" might be down, ' + "and can't be reached on port 12345." + ), + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Amazon Redshift", + "issue_codes": [ + { + "code": 1009, + "message": "Issue 1009 - The host might be down, " + "and can't be reached on the provided port.", + } + ], + "invalid": ["host", "port"], + }, + ) + ] + + # response with IP only + msg = dedent( + """ +psql: error: could not connect to server: Operation timed out + Is the server running on host "93.184.216.34" and accepting + TCP/IP connections on port 12345? + """ + ) + result = RedshiftEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_HOST_DOWN_ERROR, + message=( + 'The host "93.184.216.34" might be down, ' + "and can't be reached on port 12345." + ), + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Amazon Redshift", + "issue_codes": [ + { + "code": 1009, + "message": "Issue 1009 - The host might be down, " + "and can't be reached on the provided port.", + } + ], + "invalid": ["host", "port"], + }, + ) + ] + + msg = 'database "badDB" does not exist' + result = RedshiftEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_UNKNOWN_DATABASE_ERROR, + message='We were unable to connect to your database named "badDB".' + " Please verify your database name and try again.", + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Amazon Redshift", + "issue_codes": [ + { + "code": 10015, + "message": "Issue 1015 - Either the database is " + "spelled incorrectly or does not exist.", + } + ], + "invalid": ["database"], + }, + ) + ] diff --git a/tests/integration_tests/dict_import_export_tests.py b/tests/integration_tests/dict_import_export_tests.py new file mode 100644 index 0000000000000..de0aa832626ac --- /dev/null +++ b/tests/integration_tests/dict_import_export_tests.py @@ -0,0 +1,273 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import json +import unittest +from uuid import uuid4 + +import yaml + +from tests.integration_tests.test_app import app +from superset import db + +from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn +from superset.utils.database import get_example_database +from superset.utils.dict_import_export import export_to_dict + +from .base_tests import SupersetTestCase + +DBREF = "dict_import__export_test" +NAME_PREFIX = "dict_" +ID_PREFIX = 20000 + + +class TestDictImportExport(SupersetTestCase): + """Testing export import functionality for dashboards""" + + @classmethod + def delete_imports(cls): + with app.app_context(): + # Imported data clean up + session = db.session + for table in session.query(SqlaTable): + if DBREF in table.params_dict: + session.delete(table) + session.commit() + + @classmethod + def setUpClass(cls): + cls.delete_imports() + + @classmethod + def tearDownClass(cls): + cls.delete_imports() + + def create_table( + self, name, schema=None, id=0, cols_names=[], cols_uuids=None, metric_names=[] + ): + database_name = "main" + name = "{0}{1}".format(NAME_PREFIX, name) + params = {DBREF: id, "database_name": database_name} + + if cols_uuids is None: + cols_uuids = [None] * len(cols_names) + + dict_rep = { + "database_id": get_example_database().id, + "table_name": name, + "schema": schema, + "id": id, + "params": json.dumps(params), + "columns": [ + {"column_name": c, "uuid": u} for c, u in zip(cols_names, cols_uuids) + ], + "metrics": [{"metric_name": c, "expression": ""} for c in metric_names], + } + + table = SqlaTable( + id=id, schema=schema, table_name=name, params=json.dumps(params) + ) + for col_name, uuid in zip(cols_names, cols_uuids): + table.columns.append(TableColumn(column_name=col_name, uuid=uuid)) + for metric_name in metric_names: + table.metrics.append(SqlMetric(metric_name=metric_name, expression="")) + return table, dict_rep + + def yaml_compare(self, obj_1, obj_2): + obj_1_str = yaml.safe_dump(obj_1, default_flow_style=False) + obj_2_str = yaml.safe_dump(obj_2, default_flow_style=False) + self.assertEqual(obj_1_str, obj_2_str) + + def assert_table_equals(self, expected_ds, actual_ds): + self.assertEqual(expected_ds.table_name, actual_ds.table_name) + self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col) + self.assertEqual(expected_ds.schema, actual_ds.schema) + self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics)) + self.assertEqual(len(expected_ds.columns), len(actual_ds.columns)) + self.assertEqual( + set([c.column_name for c in expected_ds.columns]), + set([c.column_name for c in actual_ds.columns]), + ) + self.assertEqual( + set([m.metric_name for m in expected_ds.metrics]), + set([m.metric_name for m in actual_ds.metrics]), + ) + + def assert_datasource_equals(self, expected_ds, actual_ds): + self.assertEqual(expected_ds.datasource_name, actual_ds.datasource_name) + self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col) + self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics)) + self.assertEqual(len(expected_ds.columns), len(actual_ds.columns)) + self.assertEqual( + set([c.column_name for c in expected_ds.columns]), + set([c.column_name for c in actual_ds.columns]), + ) + self.assertEqual( + set([m.metric_name for m in expected_ds.metrics]), + set([m.metric_name for m in actual_ds.metrics]), + ) + + def test_import_table_no_metadata(self): + table, dict_table = self.create_table("pure_table", id=ID_PREFIX + 1) + new_table = SqlaTable.import_from_dict(db.session, dict_table) + db.session.commit() + imported_id = new_table.id + imported = self.get_table_by_id(imported_id) + self.assert_table_equals(table, imported) + self.yaml_compare(table.export_to_dict(), imported.export_to_dict()) + + def test_import_table_1_col_1_met(self): + table, dict_table = self.create_table( + "table_1_col_1_met", + id=ID_PREFIX + 2, + cols_names=["col1"], + cols_uuids=[uuid4()], + metric_names=["metric1"], + ) + imported_table = SqlaTable.import_from_dict(db.session, dict_table) + db.session.commit() + imported = self.get_table_by_id(imported_table.id) + self.assert_table_equals(table, imported) + self.assertEqual( + {DBREF: ID_PREFIX + 2, "database_name": "main"}, json.loads(imported.params) + ) + self.yaml_compare(table.export_to_dict(), imported.export_to_dict()) + + def test_import_table_2_col_2_met(self): + table, dict_table = self.create_table( + "table_2_col_2_met", + id=ID_PREFIX + 3, + cols_names=["c1", "c2"], + cols_uuids=[uuid4(), uuid4()], + metric_names=["m1", "m2"], + ) + imported_table = SqlaTable.import_from_dict(db.session, dict_table) + db.session.commit() + imported = self.get_table_by_id(imported_table.id) + self.assert_table_equals(table, imported) + self.yaml_compare(table.export_to_dict(), imported.export_to_dict()) + + def test_import_table_override_append(self): + table, dict_table = self.create_table( + "table_override", id=ID_PREFIX + 3, cols_names=["col1"], metric_names=["m1"] + ) + imported_table = SqlaTable.import_from_dict(db.session, dict_table) + db.session.commit() + table_over, dict_table_over = self.create_table( + "table_override", + id=ID_PREFIX + 3, + cols_names=["new_col1", "col2", "col3"], + metric_names=["new_metric1"], + ) + imported_over_table = SqlaTable.import_from_dict(db.session, dict_table_over) + db.session.commit() + + imported_over = self.get_table_by_id(imported_over_table.id) + self.assertEqual(imported_table.id, imported_over.id) + expected_table, _ = self.create_table( + "table_override", + id=ID_PREFIX + 3, + metric_names=["new_metric1", "m1"], + cols_names=["col1", "new_col1", "col2", "col3"], + cols_uuids=[col.uuid for col in imported_over.columns], + ) + self.assert_table_equals(expected_table, imported_over) + self.yaml_compare( + expected_table.export_to_dict(), imported_over.export_to_dict() + ) + + def test_import_table_override_sync(self): + table, dict_table = self.create_table( + "table_override", id=ID_PREFIX + 3, cols_names=["col1"], metric_names=["m1"] + ) + imported_table = SqlaTable.import_from_dict(db.session, dict_table) + db.session.commit() + table_over, dict_table_over = self.create_table( + "table_override", + id=ID_PREFIX + 3, + cols_names=["new_col1", "col2", "col3"], + metric_names=["new_metric1"], + ) + imported_over_table = SqlaTable.import_from_dict( + session=db.session, dict_rep=dict_table_over, sync=["metrics", "columns"] + ) + db.session.commit() + + imported_over = self.get_table_by_id(imported_over_table.id) + self.assertEqual(imported_table.id, imported_over.id) + expected_table, _ = self.create_table( + "table_override", + id=ID_PREFIX + 3, + metric_names=["new_metric1"], + cols_names=["new_col1", "col2", "col3"], + cols_uuids=[col.uuid for col in imported_over.columns], + ) + self.assert_table_equals(expected_table, imported_over) + self.yaml_compare( + expected_table.export_to_dict(), imported_over.export_to_dict() + ) + + def test_import_table_override_identical(self): + table, dict_table = self.create_table( + "copy_cat", + id=ID_PREFIX + 4, + cols_names=["new_col1", "col2", "col3"], + metric_names=["new_metric1"], + ) + imported_table = SqlaTable.import_from_dict(db.session, dict_table) + db.session.commit() + copy_table, dict_copy_table = self.create_table( + "copy_cat", + id=ID_PREFIX + 4, + cols_names=["new_col1", "col2", "col3"], + metric_names=["new_metric1"], + ) + imported_copy_table = SqlaTable.import_from_dict(db.session, dict_copy_table) + db.session.commit() + self.assertEqual(imported_table.id, imported_copy_table.id) + self.assert_table_equals(copy_table, self.get_table_by_id(imported_table.id)) + self.yaml_compare( + imported_copy_table.export_to_dict(), imported_table.export_to_dict() + ) + + def test_export_datasource_ui_cli(self): + # TODO(bkyryliuk): find fake db is leaking from + self.delete_fake_db() + + cli_export = export_to_dict( + session=db.session, + recursive=True, + back_references=False, + include_defaults=False, + ) + self.get_resp("/login/", data=dict(username="admin", password="general")) + resp = self.get_resp( + "/databaseview/action_post", {"action": "yaml_export", "rowid": 1} + ) + ui_export = yaml.safe_load(resp) + self.assertEqual( + ui_export["databases"][0]["database_name"], + cli_export["databases"][0]["database_name"], + ) + self.assertEqual( + ui_export["databases"][0]["tables"], cli_export["databases"][0]["tables"] + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/integration_tests/dynamic_plugins_tests.py b/tests/integration_tests/dynamic_plugins_tests.py new file mode 100644 index 0000000000000..bdc9f61552ff2 --- /dev/null +++ b/tests/integration_tests/dynamic_plugins_tests.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from .base_tests import SupersetTestCase +from .conftest import with_feature_flags + + +class TestDynamicPlugins(SupersetTestCase): + @with_feature_flags(DYNAMIC_PLUGINS=False) + def test_dynamic_plugins_disabled(self): + """ + Dynamic Plugins: Responds not found when disabled + """ + self.login(username="admin") + uri = "/dynamic-plugins/api" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + @with_feature_flags(DYNAMIC_PLUGINS=True) + def test_dynamic_plugins_enabled(self): + """ + Dynamic Plugins: Responds successfully when enabled + """ + self.login(username="admin") + uri = "/dynamic-plugins/api" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) diff --git a/tests/integration_tests/email_tests.py b/tests/integration_tests/email_tests.py new file mode 100644 index 0000000000000..381b8cda1b771 --- /dev/null +++ b/tests/integration_tests/email_tests.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Unit tests for email service in Superset""" +import logging +import ssl +import tempfile +import unittest +from email.mime.application import MIMEApplication +from email.mime.image import MIMEImage +from email.mime.multipart import MIMEMultipart +from unittest import mock + +from superset import app +from superset.utils import core as utils +from tests.integration_tests.base_tests import SupersetTestCase + +from .utils import read_fixture + +send_email_test = mock.Mock() +logger = logging.getLogger(__name__) + + +class TestEmailSmtp(SupersetTestCase): + def setUp(self): + app.config["SMTP_SSL"] = False + + @mock.patch("superset.utils.core.send_mime_email") + def test_send_smtp(self, mock_send_mime): + attachment = tempfile.NamedTemporaryFile() + attachment.write(b"attachment") + attachment.seek(0) + utils.send_email_smtp( + "to", "subject", "content", app.config, files=[attachment.name] + ) + assert mock_send_mime.called + call_args = mock_send_mime.call_args[0] + logger.debug(call_args) + assert call_args[0] == app.config["SMTP_MAIL_FROM"] + assert call_args[1] == ["to"] + msg = call_args[2] + assert msg["Subject"] == "subject" + assert msg["From"] == app.config["SMTP_MAIL_FROM"] + assert len(msg.get_payload()) == 2 + mimeapp = MIMEApplication("attachment") + assert msg.get_payload()[-1].get_payload() == mimeapp.get_payload() + + @mock.patch("superset.utils.core.send_mime_email") + def test_send_smtp_with_email_mutator(self, mock_send_mime): + attachment = tempfile.NamedTemporaryFile() + attachment.write(b"attachment") + attachment.seek(0) + + # putting this into a variable so that we can reset after the test + base_email_mutator = app.config["EMAIL_HEADER_MUTATOR"] + + def mutator(msg, **kwargs): + msg["foo"] = "bar" + return msg + + app.config["EMAIL_HEADER_MUTATOR"] = mutator + utils.send_email_smtp( + "to", "subject", "content", app.config, files=[attachment.name] + ) + assert mock_send_mime.called + call_args = mock_send_mime.call_args[0] + logger.debug(call_args) + assert call_args[0] == app.config["SMTP_MAIL_FROM"] + assert call_args[1] == ["to"] + msg = call_args[2] + assert msg["Subject"] == "subject" + assert msg["From"] == app.config["SMTP_MAIL_FROM"] + assert msg["foo"] == "bar" + assert len(msg.get_payload()) == 2 + mimeapp = MIMEApplication("attachment") + assert msg.get_payload()[-1].get_payload() == mimeapp.get_payload() + app.config["EMAIL_HEADER_MUTATOR"] = base_email_mutator + + @mock.patch("superset.utils.core.send_mime_email") + def test_send_smtp_data(self, mock_send_mime): + utils.send_email_smtp( + "to", "subject", "content", app.config, data={"1.txt": b"data"} + ) + assert mock_send_mime.called + call_args = mock_send_mime.call_args[0] + logger.debug(call_args) + assert call_args[0] == app.config["SMTP_MAIL_FROM"] + assert call_args[1] == ["to"] + msg = call_args[2] + assert msg["Subject"] == "subject" + assert msg["From"] == app.config["SMTP_MAIL_FROM"] + assert len(msg.get_payload()) == 2 + mimeapp = MIMEApplication("data") + assert msg.get_payload()[-1].get_payload() == mimeapp.get_payload() + + @mock.patch("superset.utils.core.send_mime_email") + def test_send_smtp_inline_images(self, mock_send_mime): + image = read_fixture("sample.png") + utils.send_email_smtp( + "to", "subject", "content", app.config, images=dict(blah=image) + ) + assert mock_send_mime.called + call_args = mock_send_mime.call_args[0] + logger.debug(call_args) + assert call_args[0] == app.config["SMTP_MAIL_FROM"] + assert call_args[1] == ["to"] + msg = call_args[2] + assert msg["Subject"] == "subject" + assert msg["From"] == app.config["SMTP_MAIL_FROM"] + assert len(msg.get_payload()) == 2 + mimeapp = MIMEImage(image) + assert msg.get_payload()[-1].get_payload() == mimeapp.get_payload() + + @mock.patch("superset.utils.core.send_mime_email") + def test_send_bcc_smtp(self, mock_send_mime): + attachment = tempfile.NamedTemporaryFile() + attachment.write(b"attachment") + attachment.seek(0) + utils.send_email_smtp( + "to", + "subject", + "content", + app.config, + files=[attachment.name], + cc="cc", + bcc="bcc", + ) + assert mock_send_mime.called + call_args = mock_send_mime.call_args[0] + assert call_args[0] == app.config["SMTP_MAIL_FROM"] + assert call_args[1] == ["to", "cc", "bcc"] + msg = call_args[2] + assert msg["Subject"] == "subject" + assert msg["From"] == app.config["SMTP_MAIL_FROM"] + assert len(msg.get_payload()) == 2 + mimeapp = MIMEApplication("attachment") + assert msg.get_payload()[-1].get_payload() == mimeapp.get_payload() + + @mock.patch("smtplib.SMTP_SSL") + @mock.patch("smtplib.SMTP") + def test_send_mime(self, mock_smtp, mock_smtp_ssl): + mock_smtp.return_value = mock.Mock() + mock_smtp_ssl.return_value = mock.Mock() + msg = MIMEMultipart() + utils.send_mime_email("from", "to", msg, app.config, dryrun=False) + mock_smtp.assert_called_with(app.config["SMTP_HOST"], app.config["SMTP_PORT"]) + assert mock_smtp.return_value.starttls.called + mock_smtp.return_value.login.assert_called_with( + app.config["SMTP_USER"], app.config["SMTP_PASSWORD"] + ) + mock_smtp.return_value.sendmail.assert_called_with( + "from", "to", msg.as_string() + ) + assert mock_smtp.return_value.quit.called + + @mock.patch("smtplib.SMTP_SSL") + @mock.patch("smtplib.SMTP") + def test_send_mime_ssl(self, mock_smtp, mock_smtp_ssl): + app.config["SMTP_SSL"] = True + mock_smtp.return_value = mock.Mock() + mock_smtp_ssl.return_value = mock.Mock() + utils.send_mime_email("from", "to", MIMEMultipart(), app.config, dryrun=False) + assert not mock_smtp.called + mock_smtp_ssl.assert_called_with( + app.config["SMTP_HOST"], app.config["SMTP_PORT"], context=None + ) + + @mock.patch("smtplib.SMTP_SSL") + @mock.patch("smtplib.SMTP") + def test_send_mime_ssl_server_auth(self, mock_smtp, mock_smtp_ssl): + app.config["SMTP_SSL"] = True + app.config["SMTP_SSL_SERVER_AUTH"] = True + mock_smtp.return_value = mock.Mock() + mock_smtp_ssl.return_value = mock.Mock() + utils.send_mime_email("from", "to", MIMEMultipart(), app.config, dryrun=False) + assert not mock_smtp.called + mock_smtp_ssl.assert_called_with( + app.config["SMTP_HOST"], app.config["SMTP_PORT"], context=mock.ANY + ) + called_context = mock_smtp_ssl.call_args.kwargs["context"] + self.assertEqual(called_context.verify_mode, ssl.CERT_REQUIRED) + + @mock.patch("smtplib.SMTP") + def test_send_mime_tls_server_auth(self, mock_smtp): + app.config["SMTP_STARTTLS"] = True + app.config["SMTP_SSL_SERVER_AUTH"] = True + mock_smtp.return_value = mock.Mock() + mock_smtp.return_value.starttls.return_value = mock.Mock() + utils.send_mime_email("from", "to", MIMEMultipart(), app.config, dryrun=False) + mock_smtp.return_value.starttls.assert_called_with(context=mock.ANY) + called_context = mock_smtp.return_value.starttls.call_args.kwargs["context"] + self.assertEqual(called_context.verify_mode, ssl.CERT_REQUIRED) + + @mock.patch("smtplib.SMTP_SSL") + @mock.patch("smtplib.SMTP") + def test_send_mime_noauth(self, mock_smtp, mock_smtp_ssl): + smtp_user = app.config["SMTP_USER"] + smtp_password = app.config["SMTP_PASSWORD"] + app.config["SMTP_USER"] = None + app.config["SMTP_PASSWORD"] = None + mock_smtp.return_value = mock.Mock() + mock_smtp_ssl.return_value = mock.Mock() + utils.send_mime_email("from", "to", MIMEMultipart(), app.config, dryrun=False) + assert not mock_smtp_ssl.called + mock_smtp.assert_called_with(app.config["SMTP_HOST"], app.config["SMTP_PORT"]) + assert not mock_smtp.login.called + app.config["SMTP_USER"] = smtp_user + app.config["SMTP_PASSWORD"] = smtp_password + + @mock.patch("smtplib.SMTP_SSL") + @mock.patch("smtplib.SMTP") + def test_send_mime_dryrun(self, mock_smtp, mock_smtp_ssl): + utils.send_mime_email("from", "to", MIMEMultipart(), app.config, dryrun=True) + assert not mock_smtp.called + assert not mock_smtp_ssl.called + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/integration_tests/embedded/__init__.py b/tests/integration_tests/embedded/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/embedded/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/embedded/api_tests.py b/tests/integration_tests/embedded/api_tests.py new file mode 100644 index 0000000000000..8f3950fcf5462 --- /dev/null +++ b/tests/integration_tests/embedded/api_tests.py @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Tests for security api methods""" +from unittest import mock + +import pytest + +from superset import db +from superset.embedded.dao import EmbeddedDAO +from superset.models.dashboard import Dashboard +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + + +class TestEmbeddedDashboardApi(SupersetTestCase): + resource_name = "embedded_dashboard" + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + EMBEDDED_SUPERSET=True, + ) + def test_get_embedded_dashboard(self): + self.login("admin") + self.dash = db.session.query(Dashboard).filter_by(slug="births").first() + self.embedded = EmbeddedDAO.upsert(self.dash, []) + uri = f"api/v1/{self.resource_name}/{self.embedded.uuid}" + response = self.client.get(uri) + self.assert200(response) + + def test_get_embedded_dashboard_non_found(self): + self.login("admin") + uri = f"api/v1/{self.resource_name}/bad-uuid" + response = self.client.get(uri) + self.assert404(response) diff --git a/tests/integration_tests/embedded/dao_tests.py b/tests/integration_tests/embedded/dao_tests.py new file mode 100644 index 0000000000000..8160144a25cbc --- /dev/null +++ b/tests/integration_tests/embedded/dao_tests.py @@ -0,0 +1,51 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +import pytest + +import tests.integration_tests.test_app # pylint: disable=unused-import +from superset import db +from superset.embedded.dao import EmbeddedDAO +from superset.models.dashboard import Dashboard +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) + + +class TestEmbeddedDAO(SupersetTestCase): + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_upsert(self): + dash = db.session.query(Dashboard).filter_by(slug="world_health").first() + assert not dash.embedded + EmbeddedDAO.upsert(dash, ["test.example.com"]) + assert dash.embedded + self.assertEqual(dash.embedded[0].allowed_domains, ["test.example.com"]) + original_uuid = dash.embedded[0].uuid + self.assertIsNotNone(original_uuid) + EmbeddedDAO.upsert(dash, []) + self.assertEqual(dash.embedded[0].allowed_domains, []) + self.assertEqual(dash.embedded[0].uuid, original_uuid) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_get_by_uuid(self): + dash = db.session.query(Dashboard).filter_by(slug="world_health").first() + uuid = str(EmbeddedDAO.upsert(dash, ["test.example.com"]).uuid) + db.session.expire_all() + embedded = EmbeddedDAO.find_by_id(uuid) + self.assertIsNotNone(embedded) diff --git a/tests/integration_tests/embedded/test_view.py b/tests/integration_tests/embedded/test_view.py new file mode 100644 index 0000000000000..9f524e9c09e2b --- /dev/null +++ b/tests/integration_tests/embedded/test_view.py @@ -0,0 +1,72 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING +from unittest import mock + +import pytest + +from superset import db +from superset.embedded.dao import EmbeddedDAO +from superset.models.dashboard import Dashboard +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.client import client + +if TYPE_CHECKING: + from typing import Any + + from flask.testing import FlaskClient + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +@mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + EMBEDDED_SUPERSET=True, +) +def test_get_embedded_dashboard(client: FlaskClient[Any]): + dash = db.session.query(Dashboard).filter_by(slug="births").first() + embedded = EmbeddedDAO.upsert(dash, []) + uri = f"embedded/{embedded.uuid}" + response = client.get(uri) + assert response.status_code == 200 + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +@mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + EMBEDDED_SUPERSET=True, +) +def test_get_embedded_dashboard_referrer_not_allowed(client: FlaskClient[Any]): + dash = db.session.query(Dashboard).filter_by(slug="births").first() + embedded = EmbeddedDAO.upsert(dash, ["test.example.com"]) + uri = f"embedded/{embedded.uuid}" + response = client.get(uri) + assert response.status_code == 403 + + +@mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + EMBEDDED_SUPERSET=True, +) +def test_get_embedded_dashboard_non_found(client: FlaskClient[Any]): + uri = f"embedded/bad-uuid" + response = client.get(uri) + assert response.status_code == 404 diff --git a/tests/integration_tests/event_logger_tests.py b/tests/integration_tests/event_logger_tests.py new file mode 100644 index 0000000000000..4553bb9dc789b --- /dev/null +++ b/tests/integration_tests/event_logger_tests.py @@ -0,0 +1,232 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +import time +import unittest +from datetime import datetime, timedelta +from typing import Any, Callable, cast, Dict, Iterator, Optional, Type, Union +from unittest.mock import patch + +from flask import current_app +from freezegun import freeze_time + +from superset import security_manager +from superset.utils.log import ( + AbstractEventLogger, + DBEventLogger, + get_event_logger_from_cfg_value, +) +from tests.integration_tests.test_app import app + + +class TestEventLogger(unittest.TestCase): + def test_correct_config_object(self): + # test that assignment of concrete AbstractBaseClass impl returns + # unmodified object + obj = DBEventLogger() + res = get_event_logger_from_cfg_value(obj) + self.assertIs(obj, res) + + def test_config_class_deprecation(self): + # test that assignment of a class object to EVENT_LOGGER is correctly + # deprecated + res = None + + # print warning if a class is assigned to EVENT_LOGGER + with self.assertLogs(level="WARNING"): + res = get_event_logger_from_cfg_value(DBEventLogger) + + # class is instantiated and returned + self.assertIsInstance(res, DBEventLogger) + + def test_raises_typerror_if_not_abc(self): + # test that assignment of non AbstractEventLogger derived type raises + # TypeError + with self.assertRaises(TypeError): + get_event_logger_from_cfg_value(logging.getLogger()) + + @patch.object(DBEventLogger, "log") + def test_log_this(self, mock_log): + logger = DBEventLogger() + + @logger.log_this + def test_func(): + time.sleep(0.05) + return 1 + + with app.test_request_context("/superset/dashboard/1/?myparam=foo"): + result = test_func() + payload = mock_log.call_args[1] + self.assertEqual(result, 1) + self.assertEqual( + payload["records"], + [ + { + "myparam": "foo", + "path": "/superset/dashboard/1/", + "url_rule": "/superset/dashboard//", + "object_ref": test_func.__qualname__, + } + ], + ) + self.assertGreaterEqual(payload["duration_ms"], 50) + + @patch.object(DBEventLogger, "log") + def test_log_this_with_extra_payload(self, mock_log): + logger = DBEventLogger() + + @logger.log_this_with_extra_payload + def test_func(arg1, add_extra_log_payload, karg1=1): + time.sleep(0.1) + add_extra_log_payload(foo="bar") + return arg1 * karg1 + + with app.test_request_context(): + result = test_func(1, karg1=2) # pylint: disable=no-value-for-parameter + payload = mock_log.call_args[1] + self.assertEqual(result, 2) + self.assertEqual( + payload["records"], + [ + { + "foo": "bar", + "path": "/", + "karg1": 2, + "object_ref": test_func.__qualname__, + } + ], + ) + self.assertGreaterEqual(payload["duration_ms"], 100) + + @patch("superset.utils.core.g", spec={}) + @freeze_time("Jan 14th, 2020", auto_tick_seconds=15) + def test_context_manager_log(self, mock_g): + class DummyEventLogger(AbstractEventLogger): + def __init__(self): + self.records = [] + + def log( + self, + user_id: Optional[int], + action: str, + dashboard_id: Optional[int], + duration_ms: Optional[int], + slice_id: Optional[int], + referrer: Optional[str], + *args: Any, + **kwargs: Any, + ): + self.records.append( + {**kwargs, "user_id": user_id, "duration": duration_ms} + ) + + logger = DummyEventLogger() + + with app.test_request_context(): + mock_g.user = security_manager.find_user("gamma") + with logger(action="foo", engine="bar"): + pass + + assert logger.records == [ + { + "records": [{"path": "/", "engine": "bar"}], + "user_id": 2, + "duration": 15000.0, + } + ] + + @patch("superset.utils.core.g", spec={}) + def test_context_manager_log_with_context(self, mock_g): + class DummyEventLogger(AbstractEventLogger): + def __init__(self): + self.records = [] + + def log( + self, + user_id: Optional[int], + action: str, + dashboard_id: Optional[int], + duration_ms: Optional[int], + slice_id: Optional[int], + referrer: Optional[str], + *args: Any, + **kwargs: Any, + ): + self.records.append( + {**kwargs, "user_id": user_id, "duration": duration_ms} + ) + + logger = DummyEventLogger() + + with app.test_request_context(): + mock_g.user = security_manager.find_user("gamma") + logger.log_with_context( + action="foo", + duration=timedelta(days=64, seconds=29156, microseconds=10), + object_ref={"baz": "food"}, + log_to_statsd=False, + payload_override={"engine": "sqllite"}, + ) + + assert logger.records == [ + { + "records": [ + { + "path": "/", + "object_ref": {"baz": "food"}, + "payload_override": {"engine": "sqllite"}, + } + ], + "user_id": 2, + "duration": 5558756000, + } + ] + + @patch("superset.utils.core.g", spec={}) + def test_log_with_context_user_null(self, mock_g): + class DummyEventLogger(AbstractEventLogger): + def __init__(self): + self.records = [] + + def log( + self, + user_id: Optional[int], + action: str, + dashboard_id: Optional[int], + duration_ms: Optional[int], + slice_id: Optional[int], + referrer: Optional[str], + *args: Any, + **kwargs: Any, + ): + self.records.append( + {**kwargs, "user_id": user_id, "duration": duration_ms} + ) + + logger = DummyEventLogger() + + with app.test_request_context(): + mock_g.side_effect = Exception("oops") + logger.log_with_context( + action="foo", + duration=timedelta(days=64, seconds=29156, microseconds=10), + object_ref={"baz": "food"}, + log_to_statsd=False, + payload_override={"engine": "sqllite"}, + ) + + assert logger.records[0]["user_id"] == None diff --git a/tests/integration_tests/explore/__init__.py b/tests/integration_tests/explore/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/explore/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/explore/api_tests.py b/tests/integration_tests/explore/api_tests.py new file mode 100644 index 0000000000000..af5bd8813753d --- /dev/null +++ b/tests/integration_tests/explore/api_tests.py @@ -0,0 +1,240 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from unittest.mock import patch + +import pytest +from flask_appbuilder.security.sqla.models import User +from sqlalchemy.orm import Session + +from superset.connectors.sqla.models import SqlaTable +from superset.explore.exceptions import DatasetAccessDeniedError +from superset.explore.form_data.commands.state import TemporaryExploreState +from superset.extensions import cache_manager +from superset.models.slice import Slice +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) +from tests.integration_tests.test_app import app + +FORM_DATA_KEY = "form_data_key" +FORM_DATA = {"test": "test value"} + + +@pytest.fixture +def chart_id(load_world_bank_dashboard_with_slices) -> int: + with app.app_context() as ctx: + session: Session = ctx.app.appbuilder.get_session + chart = session.query(Slice).filter_by(slice_name="World's Population").one() + return chart.id + + +@pytest.fixture +def admin_id() -> int: + with app.app_context() as ctx: + session: Session = ctx.app.appbuilder.get_session + admin = session.query(User).filter_by(username="admin").one() + return admin.id + + +@pytest.fixture +def dataset() -> int: + with app.app_context() as ctx: + session: Session = ctx.app.appbuilder.get_session + dataset = ( + session.query(SqlaTable) + .filter_by(table_name="wb_health_population") + .first() + ) + return dataset + + +@pytest.fixture(autouse=True) +def cache(chart_id, admin_id, dataset): + entry: TemporaryExploreState = { + "owner": admin_id, + "datasource_id": dataset.id, + "datasource_type": dataset.type, + "chart_id": chart_id, + "form_data": json.dumps(FORM_DATA), + } + cache_manager.explore_form_data_cache.set(FORM_DATA_KEY, entry) + + +# partially match the dataset using the most important attributes +def assert_dataset(result, dataset_id): + dataset = result["dataset"] + assert dataset["id"] == dataset_id + assert dataset["datasource_name"] == "wb_health_population" + assert dataset["is_sqllab_view"] == False + assert dataset["main_dttm_col"] == "year" + assert dataset["sql"] == None + assert dataset["type"] == "table" + assert dataset["uid"] == f"{dataset_id}__table" + + +# partially match the slice using the most important attributes +def assert_slice(result, chart_id, dataset_id): + slice = result["slice"] + assert slice["edit_url"] == f"/chart/edit/{chart_id}" + assert slice["is_managed_externally"] == False + assert slice["slice_id"] == chart_id + assert slice["slice_name"] == "World's Population" + assert slice["form_data"]["datasource"] == f"{dataset_id}__table" + assert slice["form_data"]["viz_type"] == "big_number" + + +def test_no_params_provided(test_client, login_as_admin): + resp = test_client.get(f"api/v1/explore/") + assert resp.status_code == 200 + data = json.loads(resp.data.decode("utf-8")) + result = data.get("result") + assert result["dataset"]["name"] == "[Missing Dataset]" + assert result["form_data"]["datasource"] == "None__table" + assert result["message"] == None + assert result["slice"] == None + + +def test_get_from_cache(test_client, login_as_admin, dataset): + resp = test_client.get( + f"api/v1/explore/?form_data_key={FORM_DATA_KEY}&datasource_id={dataset.id}&datasource_type={dataset.type}" + ) + assert resp.status_code == 200 + data = json.loads(resp.data.decode("utf-8")) + result = data.get("result") + assert_dataset(result, dataset.id) + assert result["form_data"]["datasource"] == f"{dataset.id}__table" + assert result["form_data"]["test"] == "test value" + assert result["message"] == None + assert result["slice"] == None + + +def test_get_from_cache_unknown_key_chart_id( + test_client, login_as_admin, chart_id, dataset +): + unknown_key = "unknown_key" + resp = test_client.get( + f"api/v1/explore/?form_data_key={unknown_key}&slice_id={chart_id}" + ) + assert resp.status_code == 200 + data = json.loads(resp.data.decode("utf-8")) + result = data.get("result") + assert_dataset(result, dataset.id) + assert_slice(result, chart_id, dataset.id) + assert result["form_data"]["datasource"] == f"{dataset.id}__table" + assert ( + result["message"] + == "Form data not found in cache, reverting to chart metadata." + ) + + +def test_get_from_cache_unknown_key_dataset(test_client, login_as_admin, dataset): + unknown_key = "unknown_key" + resp = test_client.get( + f"api/v1/explore/?form_data_key={unknown_key}&datasource_id={dataset.id}&datasource_type={dataset.type}" + ) + assert resp.status_code == 200 + data = json.loads(resp.data.decode("utf-8")) + result = data.get("result") + assert_dataset(result, dataset.id) + assert result["form_data"]["datasource"] == f"{dataset.id}__table" + assert ( + result["message"] + == "Form data not found in cache, reverting to dataset metadata." + ) + assert result["slice"] == None + + +def test_get_from_cache_unknown_key_no_extra_parameters(test_client, login_as_admin): + unknown_key = "unknown_key" + resp = test_client.get(f"api/v1/explore/?form_data_key={unknown_key}") + assert resp.status_code == 200 + data = json.loads(resp.data.decode("utf-8")) + result = data.get("result") + assert result["dataset"]["name"] == "[Missing Dataset]" + assert result["form_data"]["datasource"] == "None__table" + assert result["message"] == None + assert result["slice"] == None + + +def test_get_from_permalink(test_client, login_as_admin, chart_id, dataset): + form_data = { + "chart_id": chart_id, + "datasource": f"{dataset.id}__{dataset.type}", + **FORM_DATA, + } + resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data}) + data = json.loads(resp.data.decode("utf-8")) + permalink_key = data["key"] + resp = test_client.get(f"api/v1/explore/?permalink_key={permalink_key}") + assert resp.status_code == 200 + data = json.loads(resp.data.decode("utf-8")) + result = data.get("result") + assert_dataset(result, dataset.id) + assert result["form_data"]["datasource"] == f"{dataset.id}__table" + assert result["form_data"]["test"] == "test value" + assert result["message"] == None + assert result["slice"] == None + + +def test_get_from_permalink_unknown_key(test_client, login_as_admin): + unknown_key = "unknown_key" + resp = test_client.get(f"api/v1/explore/?permalink_key={unknown_key}") + assert resp.status_code == 404 + + +@patch("superset.security.SupersetSecurityManager.can_access_datasource") +def test_get_dataset_access_denied( + mock_can_access_datasource, test_client, login_as_admin, dataset +): + message = "Dataset access denied" + mock_can_access_datasource.side_effect = DatasetAccessDeniedError( + message=message, datasource_id=dataset.id, datasource_type=dataset.type + ) + resp = test_client.get( + f"api/v1/explore/?form_data_key={FORM_DATA_KEY}&datasource_id={dataset.id}&datasource_type={dataset.type}" + ) + data = json.loads(resp.data.decode("utf-8")) + assert resp.status_code == 403 + assert data["datasource_id"] == dataset.id + assert data["datasource_type"] == dataset.type + assert data["message"] == message + + +@patch("superset.datasource.dao.DatasourceDAO.get_datasource") +def test_wrong_endpoint(mock_get_datasource, test_client, login_as_admin, dataset): + dataset.default_endpoint = "another_endpoint" + mock_get_datasource.return_value = dataset + resp = test_client.get( + f"api/v1/explore/?datasource_id={dataset.id}&datasource_type={dataset.type}" + ) + data = json.loads(resp.data.decode("utf-8")) + assert resp.status_code == 302 + assert data["redirect"] == dataset.default_endpoint + + +def test_get_url_params(test_client, login_as_admin, chart_id): + resp = test_client.get(f"api/v1/explore/?slice_id={chart_id}&foo=bar") + assert resp.status_code == 200 + data = json.loads(resp.data.decode("utf-8")) + result = data.get("result") + + assert result["form_data"]["url_params"] == { + "foo": "bar", + "slice_id": str(chart_id), + } diff --git a/tests/integration_tests/explore/form_data/__init__.py b/tests/integration_tests/explore/form_data/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/explore/form_data/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/explore/form_data/api_tests.py b/tests/integration_tests/explore/form_data/api_tests.py new file mode 100644 index 0000000000000..0e73d0b51656a --- /dev/null +++ b/tests/integration_tests/explore/form_data/api_tests.py @@ -0,0 +1,407 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from unittest.mock import patch + +import pytest +from flask_appbuilder.security.sqla.models import User +from sqlalchemy.orm import Session + +from superset.connectors.sqla.models import SqlaTable +from superset.datasets.commands.exceptions import DatasetAccessDeniedError +from superset.explore.form_data.commands.state import TemporaryExploreState +from superset.extensions import cache_manager +from superset.models.slice import Slice +from superset.utils.core import DatasourceType +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) +from tests.integration_tests.test_app import app + +KEY = "test-key" +INITIAL_FORM_DATA = json.dumps({"test": "initial value"}) +UPDATED_FORM_DATA = json.dumps({"test": "updated value"}) + + +@pytest.fixture +def chart_id(load_world_bank_dashboard_with_slices) -> int: + with app.app_context() as ctx: + session: Session = ctx.app.appbuilder.get_session + chart = session.query(Slice).filter_by(slice_name="World's Population").one() + return chart.id + + +@pytest.fixture +def admin_id() -> int: + with app.app_context() as ctx: + session: Session = ctx.app.appbuilder.get_session + admin = session.query(User).filter_by(username="admin").one() + return admin.id + + +@pytest.fixture +def datasource() -> int: + with app.app_context() as ctx: + session: Session = ctx.app.appbuilder.get_session + dataset = ( + session.query(SqlaTable) + .filter_by(table_name="wb_health_population") + .first() + ) + return dataset + + +@pytest.fixture(autouse=True) +def cache(chart_id, admin_id, datasource): + entry: TemporaryExploreState = { + "owner": admin_id, + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": INITIAL_FORM_DATA, + } + cache_manager.explore_form_data_cache.set(KEY, entry) + + +def test_post(test_client, login_as_admin, chart_id: int, datasource: SqlaTable): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": INITIAL_FORM_DATA, + } + resp = test_client.post("api/v1/explore/form_data", json=payload) + assert resp.status_code == 201 + + +def test_post_bad_request_non_string( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": 1234, + } + resp = test_client.post("api/v1/explore/form_data", json=payload) + assert resp.status_code == 400 + + +def test_post_bad_request_non_json_string( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": "foo", + } + resp = test_client.post("api/v1/explore/form_data", json=payload) + assert resp.status_code == 400 + + +def test_post_access_denied( + test_client, login_as, chart_id: int, datasource: SqlaTable +): + login_as("gamma") + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": INITIAL_FORM_DATA, + } + resp = test_client.post("api/v1/explore/form_data", json=payload) + assert resp.status_code == 403 + + +def test_post_same_key_for_same_context( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": UPDATED_FORM_DATA, + } + resp = test_client.post("api/v1/explore/form_data?tab_id=1", json=payload) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.post("api/v1/explore/form_data?tab_id=1", json=payload) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key == second_key + + +def test_post_different_key_for_different_context( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": UPDATED_FORM_DATA, + } + resp = test_client.post("api/v1/explore/form_data?tab_id=1", json=payload) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "form_data": json.dumps({"test": "initial value"}), + } + resp = test_client.post("api/v1/explore/form_data?tab_id=1", json=payload) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key != second_key + + +def test_post_same_key_for_same_tab_id( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": json.dumps({"test": "initial value"}), + } + resp = test_client.post("api/v1/explore/form_data?tab_id=1", json=payload) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.post("api/v1/explore/form_data?tab_id=1", json=payload) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key == second_key + + +def test_post_different_key_for_different_tab_id( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": json.dumps({"test": "initial value"}), + } + resp = test_client.post("api/v1/explore/form_data?tab_id=1", json=payload) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.post("api/v1/explore/form_data?tab_id=2", json=payload) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key != second_key + + +def test_post_different_key_for_no_tab_id( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": INITIAL_FORM_DATA, + } + resp = test_client.post("api/v1/explore/form_data", json=payload) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.post("api/v1/explore/form_data", json=payload) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key != second_key + + +def test_put(test_client, login_as_admin, chart_id: int, datasource: SqlaTable): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": UPDATED_FORM_DATA, + } + resp = test_client.put(f"api/v1/explore/form_data/{KEY}", json=payload) + assert resp.status_code == 200 + + +def test_put_same_key_for_same_tab_id( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": UPDATED_FORM_DATA, + } + resp = test_client.put(f"api/v1/explore/form_data/{KEY}?tab_id=1", json=payload) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.put(f"api/v1/explore/form_data/{KEY}?tab_id=1", json=payload) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key == second_key + + +def test_put_different_key_for_different_tab_id( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": UPDATED_FORM_DATA, + } + resp = test_client.put(f"api/v1/explore/form_data/{KEY}?tab_id=1", json=payload) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.put(f"api/v1/explore/form_data/{KEY}?tab_id=2", json=payload) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key != second_key + + +def test_put_different_key_for_no_tab_id( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": UPDATED_FORM_DATA, + } + resp = test_client.put(f"api/v1/explore/form_data/{KEY}", json=payload) + data = json.loads(resp.data.decode("utf-8")) + first_key = data.get("key") + resp = test_client.put(f"api/v1/explore/form_data/{KEY}", json=payload) + data = json.loads(resp.data.decode("utf-8")) + second_key = data.get("key") + assert first_key != second_key + + +def test_put_bad_request( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": 1234, + } + resp = test_client.put(f"api/v1/explore/form_data/{KEY}", json=payload) + assert resp.status_code == 400 + + +def test_put_bad_request_non_string( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": 1234, + } + resp = test_client.put(f"api/v1/explore/form_data/{KEY}", json=payload) + assert resp.status_code == 400 + + +def test_put_bad_request_non_json_string( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable +): + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": "foo", + } + resp = test_client.put(f"api/v1/explore/form_data/{KEY}", json=payload) + assert resp.status_code == 400 + + +def test_put_access_denied(test_client, login_as, chart_id: int, datasource: SqlaTable): + login_as("gamma") + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": UPDATED_FORM_DATA, + } + resp = test_client.put(f"api/v1/explore/form_data/{KEY}", json=payload) + assert resp.status_code == 403 + + +def test_put_not_owner(test_client, login_as, chart_id: int, datasource: SqlaTable): + login_as("gamma") + payload = { + "datasource_id": datasource.id, + "datasource_type": datasource.type, + "chart_id": chart_id, + "form_data": UPDATED_FORM_DATA, + } + resp = test_client.put(f"api/v1/explore/form_data/{KEY}", json=payload) + assert resp.status_code == 403 + + +def test_get_key_not_found(test_client, login_as_admin): + resp = test_client.get(f"api/v1/explore/form_data/unknown-key") + assert resp.status_code == 404 + + +def test_get(test_client, login_as_admin): + resp = test_client.get(f"api/v1/explore/form_data/{KEY}") + assert resp.status_code == 200 + data = json.loads(resp.data.decode("utf-8")) + assert INITIAL_FORM_DATA == data.get("form_data") + + +def test_get_access_denied(test_client, login_as): + login_as("gamma") + resp = test_client.get(f"api/v1/explore/form_data/{KEY}") + assert resp.status_code == 403 + + +@patch("superset.security.SupersetSecurityManager.can_access_datasource") +def test_get_dataset_access_denied( + mock_can_access_datasource, test_client, login_as_admin +): + mock_can_access_datasource.side_effect = DatasetAccessDeniedError() + resp = test_client.get(f"api/v1/explore/form_data/{KEY}") + assert resp.status_code == 403 + + +def test_delete(test_client, login_as_admin): + resp = test_client.delete(f"api/v1/explore/form_data/{KEY}") + assert resp.status_code == 200 + + +def test_delete_access_denied(test_client, login_as): + login_as("gamma") + resp = test_client.delete(f"api/v1/explore/form_data/{KEY}") + assert resp.status_code == 403 + + +def test_delete_not_owner( + test_client, login_as_admin, chart_id: int, datasource: SqlaTable, admin_id: int +): + another_key = "another_key" + another_owner = admin_id + 1 + entry: TemporaryExploreState = { + "owner": another_owner, + "datasource_id": datasource.id, + "datasource_type": DatasourceType(datasource.type), + "chart_id": chart_id, + "form_data": INITIAL_FORM_DATA, + } + cache_manager.explore_form_data_cache.set(another_key, entry) + resp = test_client.delete(f"api/v1/explore/form_data/{another_key}") + assert resp.status_code == 403 diff --git a/tests/integration_tests/explore/form_data/commands_tests.py b/tests/integration_tests/explore/form_data/commands_tests.py new file mode 100644 index 0000000000000..18dd8415f6c60 --- /dev/null +++ b/tests/integration_tests/explore/form_data/commands_tests.py @@ -0,0 +1,348 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import json +from unittest.mock import patch + +import pytest + +from superset import app, db, security, security_manager +from superset.commands.exceptions import DatasourceTypeInvalidError +from superset.connectors.sqla.models import SqlaTable +from superset.explore.form_data.commands.create import CreateFormDataCommand +from superset.explore.form_data.commands.delete import DeleteFormDataCommand +from superset.explore.form_data.commands.get import GetFormDataCommand +from superset.explore.form_data.commands.parameters import CommandParameters +from superset.explore.form_data.commands.update import UpdateFormDataCommand +from superset.models.slice import Slice +from superset.models.sql_lab import Query +from superset.utils.core import DatasourceType, get_example_default_schema +from superset.utils.database import get_example_database +from tests.integration_tests.base_tests import SupersetTestCase + + +class TestCreateFormDataCommand(SupersetTestCase): + @pytest.fixture() + def create_dataset(self): + with self.create_app().app_context(): + dataset = SqlaTable( + table_name="dummy_sql_table", + database=get_example_database(), + schema=get_example_default_schema(), + sql="select 123 as intcol, 'abc' as strcol", + ) + session = db.session + session.add(dataset) + session.commit() + + yield dataset + + # rollback + session.delete(dataset) + session.commit() + + @pytest.fixture() + def create_slice(self): + with self.create_app().app_context(): + session = db.session + dataset = ( + session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = Slice( + datasource_id=dataset.id, + datasource_type=DatasourceType.TABLE, + datasource_name="tmp_perm_table", + slice_name="slice_name", + ) + + session.add(slice) + session.commit() + + yield slice + + # rollback + session.delete(slice) + session.commit() + + @pytest.fixture() + def create_query(self): + with self.create_app().app_context(): + session = db.session + + query = Query( + sql="select 1 as foo;", + client_id="sldkfjlk", + database=get_example_database(), + ) + + session.add(query) + session.commit() + + yield query + + # rollback + session.delete(query) + session.commit() + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("create_dataset", "create_slice") + def test_create_form_data_command(self, mock_g): + mock_g.user = security_manager.find_user("admin") + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = db.session.query(Slice).filter_by(slice_name="slice_name").first() + + datasource = f"{dataset.id}__{DatasourceType.TABLE}" + args = CommandParameters( + datasource_id=dataset.id, + datasource_type=DatasourceType.TABLE, + chart_id=slice.id, + tab_id=1, + form_data=json.dumps({"datasource": datasource}), + ) + command = CreateFormDataCommand(args) + + assert isinstance(command.run(), str) + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("create_dataset", "create_slice", "create_query") + def test_create_form_data_command_invalid_type(self, mock_g): + mock_g.user = security_manager.find_user("admin") + app.config["EXPLORE_FORM_DATA_CACHE_CONFIG"] = { + "REFRESH_TIMEOUT_ON_RETRIEVAL": True + } + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = db.session.query(Slice).filter_by(slice_name="slice_name").first() + + datasource = f"{dataset.id}__{DatasourceType.TABLE}" + create_args = CommandParameters( + datasource_id=dataset.id, + datasource_type="InvalidType", + chart_id=slice.id, + tab_id=1, + form_data=json.dumps({"datasource": datasource}), + ) + with pytest.raises(DatasourceTypeInvalidError) as exc: + CreateFormDataCommand(create_args).run() + + assert "Datasource type is invalid" in str(exc.value) + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("create_dataset", "create_slice", "create_query") + def test_create_form_data_command_type_as_string(self, mock_g): + mock_g.user = security_manager.find_user("admin") + app.config["EXPLORE_FORM_DATA_CACHE_CONFIG"] = { + "REFRESH_TIMEOUT_ON_RETRIEVAL": True + } + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = db.session.query(Slice).filter_by(slice_name="slice_name").first() + + datasource = f"{dataset.id}__{DatasourceType.TABLE}" + create_args = CommandParameters( + datasource_id=dataset.id, + datasource_type="table", + chart_id=slice.id, + tab_id=1, + form_data=json.dumps({"datasource": datasource}), + ) + command = CreateFormDataCommand(create_args) + + assert isinstance(command.run(), str) + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("create_dataset", "create_slice") + def test_get_form_data_command(self, mock_g): + mock_g.user = security_manager.find_user("admin") + app.config["EXPLORE_FORM_DATA_CACHE_CONFIG"] = { + "REFRESH_TIMEOUT_ON_RETRIEVAL": True + } + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = db.session.query(Slice).filter_by(slice_name="slice_name").first() + + datasource = f"{dataset.id}__{DatasourceType.TABLE}" + create_args = CommandParameters( + datasource_id=dataset.id, + datasource_type=DatasourceType.TABLE, + chart_id=slice.id, + tab_id=1, + form_data=json.dumps({"datasource": datasource}), + ) + key = CreateFormDataCommand(create_args).run() + + key_args = CommandParameters(key=key) + get_command = GetFormDataCommand(key_args) + cache_data = json.loads(get_command.run()) + + assert cache_data.get("datasource") == datasource + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("create_dataset", "create_slice", "create_query") + def test_update_form_data_command(self, mock_g): + mock_g.user = security_manager.find_user("admin") + app.config["EXPLORE_FORM_DATA_CACHE_CONFIG"] = { + "REFRESH_TIMEOUT_ON_RETRIEVAL": True + } + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = db.session.query(Slice).filter_by(slice_name="slice_name").first() + + query = db.session.query(Query).filter_by(sql="select 1 as foo;").first() + + datasource = f"{dataset.id}__{DatasourceType.TABLE}" + create_args = CommandParameters( + datasource_id=dataset.id, + datasource_type=DatasourceType.TABLE, + chart_id=slice.id, + tab_id=1, + form_data=json.dumps({"datasource": datasource}), + ) + key = CreateFormDataCommand(create_args).run() + + query_datasource = f"{dataset.id}__{DatasourceType.TABLE}" + update_args = CommandParameters( + datasource_id=query.id, + datasource_type=DatasourceType.QUERY, + chart_id=slice.id, + tab_id=1, + form_data=json.dumps({"datasource": query_datasource}), + key=key, + ) + + update_command = UpdateFormDataCommand(update_args) + new_key = update_command.run() + + # it should return a key + assert isinstance(new_key, str) + # the updated key returned should be different from the old one + assert new_key != key + + key_args = CommandParameters(key=key) + get_command = GetFormDataCommand(key_args) + + cache_data = json.loads(get_command.run()) + + assert cache_data.get("datasource") == query_datasource + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("create_dataset", "create_slice", "create_query") + def test_update_form_data_command_same_form_data(self, mock_g): + mock_g.user = security_manager.find_user("admin") + app.config["EXPLORE_FORM_DATA_CACHE_CONFIG"] = { + "REFRESH_TIMEOUT_ON_RETRIEVAL": True + } + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = db.session.query(Slice).filter_by(slice_name="slice_name").first() + + datasource = f"{dataset.id}__{DatasourceType.TABLE}" + create_args = CommandParameters( + datasource_id=dataset.id, + datasource_type=DatasourceType.TABLE, + chart_id=slice.id, + tab_id=1, + form_data=json.dumps({"datasource": datasource}), + ) + key = CreateFormDataCommand(create_args).run() + + update_args = CommandParameters( + datasource_id=dataset.id, + datasource_type=DatasourceType.TABLE, + chart_id=slice.id, + tab_id=1, + form_data=json.dumps({"datasource": datasource}), + key=key, + ) + + update_command = UpdateFormDataCommand(update_args) + new_key = update_command.run() + + # it should return a key + assert isinstance(new_key, str) + + # the updated key returned should be the same as the old one + assert new_key == key + + key_args = CommandParameters(key=key) + get_command = GetFormDataCommand(key_args) + + cache_data = json.loads(get_command.run()) + + assert cache_data.get("datasource") == datasource + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("create_dataset", "create_slice", "create_query") + def test_delete_form_data_command(self, mock_g): + mock_g.user = security_manager.find_user("admin") + app.config["EXPLORE_FORM_DATA_CACHE_CONFIG"] = { + "REFRESH_TIMEOUT_ON_RETRIEVAL": True + } + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = db.session.query(Slice).filter_by(slice_name="slice_name").first() + + datasource = f"{dataset.id}__{DatasourceType.TABLE}" + create_args = CommandParameters( + datasource_id=dataset.id, + datasource_type=DatasourceType.TABLE, + chart_id=slice.id, + tab_id=1, + form_data=json.dumps({"datasource": datasource}), + ) + key = CreateFormDataCommand(create_args).run() + + delete_args = CommandParameters( + key=key, + ) + + delete_command = DeleteFormDataCommand(delete_args) + response = delete_command.run() + + assert response == True + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("create_dataset", "create_slice", "create_query") + def test_delete_form_data_command_key_expired(self, mock_g): + mock_g.user = security_manager.find_user("admin") + app.config["EXPLORE_FORM_DATA_CACHE_CONFIG"] = { + "REFRESH_TIMEOUT_ON_RETRIEVAL": True + } + + delete_args = CommandParameters( + key="some_expired_key", + ) + + delete_command = DeleteFormDataCommand(delete_args) + response = delete_command.run() + + assert response == False diff --git a/tests/integration_tests/explore/permalink/__init__.py b/tests/integration_tests/explore/permalink/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/explore/permalink/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/explore/permalink/api_tests.py b/tests/integration_tests/explore/permalink/api_tests.py new file mode 100644 index 0000000000000..22a36f41e1be5 --- /dev/null +++ b/tests/integration_tests/explore/permalink/api_tests.py @@ -0,0 +1,136 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +import pickle +from typing import Any, Dict, Iterator +from uuid import uuid3 + +import pytest +from sqlalchemy.orm import Session + +from superset import db +from superset.key_value.models import KeyValueEntry +from superset.key_value.types import KeyValueResource +from superset.key_value.utils import decode_permalink_id, encode_permalink_key +from superset.models.slice import Slice +from superset.utils.core import DatasourceType +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) +from tests.integration_tests.test_app import app + + +@pytest.fixture +def chart(app_context, load_world_bank_dashboard_with_slices) -> Slice: + session: Session = app_context.app.appbuilder.get_session + chart = session.query(Slice).filter_by(slice_name="World's Population").one() + return chart + + +@pytest.fixture +def form_data(chart) -> Dict[str, Any]: + datasource = f"{chart.datasource.id}__{chart.datasource.type}" + return { + "chart_id": chart.id, + "datasource": datasource, + } + + +@pytest.fixture +def permalink_salt() -> Iterator[str]: + from superset.key_value.shared_entries import get_permalink_salt, get_uuid_namespace + from superset.key_value.types import SharedKey + + key = SharedKey.EXPLORE_PERMALINK_SALT + salt = get_permalink_salt(key) + yield salt + namespace = get_uuid_namespace(salt) + db.session.query(KeyValueEntry).filter_by( + resource=KeyValueResource.APP, + uuid=uuid3(namespace, key), + ) + db.session.commit() + + +def test_post( + test_client, login_as_admin, form_data: Dict[str, Any], permalink_salt: str +): + resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data}) + assert resp.status_code == 201 + data = json.loads(resp.data.decode("utf-8")) + key = data["key"] + url = data["url"] + assert key in url + id_ = decode_permalink_id(key, permalink_salt) + db.session.query(KeyValueEntry).filter_by(id=id_).delete() + db.session.commit() + + +def test_post_access_denied(test_client, login_as, form_data): + login_as("gamma") + resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data}) + assert resp.status_code == 403 + + +def test_get_missing_chart( + test_client, login_as_admin, chart, permalink_salt: str +) -> None: + from superset.key_value.models import KeyValueEntry + + chart_id = 1234 + entry = KeyValueEntry( + resource=KeyValueResource.EXPLORE_PERMALINK, + value=pickle.dumps( + { + "chartId": chart_id, + "datasourceId": chart.datasource.id, + "datasourceType": DatasourceType.TABLE, + "formData": { + "slice_id": chart_id, + "datasource": f"{chart.datasource.id}__{chart.datasource.type}", + }, + } + ), + ) + db.session.add(entry) + db.session.commit() + key = encode_permalink_key(entry.id, permalink_salt) + resp = test_client.get(f"api/v1/explore/permalink/{key}") + assert resp.status_code == 404 + db.session.delete(entry) + db.session.commit() + + +def test_post_invalid_schema(test_client, login_as_admin) -> None: + resp = test_client.post(f"api/v1/explore/permalink", json={"abc": 123}) + assert resp.status_code == 400 + + +def test_get( + test_client, login_as_admin, form_data: Dict[str, Any], permalink_salt: str +) -> None: + resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data}) + data = json.loads(resp.data.decode("utf-8")) + key = data["key"] + resp = test_client.get(f"api/v1/explore/permalink/{key}") + assert resp.status_code == 200 + result = json.loads(resp.data.decode("utf-8")) + assert result["state"]["formData"] == form_data + id_ = decode_permalink_id(key, permalink_salt) + db.session.query(KeyValueEntry).filter_by(id=id_).delete() + db.session.commit() diff --git a/tests/integration_tests/explore/permalink/commands_tests.py b/tests/integration_tests/explore/permalink/commands_tests.py new file mode 100644 index 0000000000000..63ed02cd7bd91 --- /dev/null +++ b/tests/integration_tests/explore/permalink/commands_tests.py @@ -0,0 +1,172 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import json +from unittest.mock import patch + +import pytest + +from superset import app, db, security, security_manager +from superset.commands.exceptions import DatasourceTypeInvalidError +from superset.connectors.sqla.models import SqlaTable +from superset.explore.form_data.commands.parameters import CommandParameters +from superset.explore.permalink.commands.create import CreateExplorePermalinkCommand +from superset.explore.permalink.commands.get import GetExplorePermalinkCommand +from superset.key_value.utils import decode_permalink_id +from superset.models.slice import Slice +from superset.models.sql_lab import Query +from superset.utils.core import DatasourceType, get_example_default_schema +from superset.utils.database import get_example_database +from tests.integration_tests.base_tests import SupersetTestCase + + +class TestCreatePermalinkDataCommand(SupersetTestCase): + @pytest.fixture() + def create_dataset(self): + with self.create_app().app_context(): + dataset = SqlaTable( + table_name="dummy_sql_table", + database=get_example_database(), + schema=get_example_default_schema(), + sql="select 123 as intcol, 'abc' as strcol", + ) + session = db.session + session.add(dataset) + session.commit() + + yield dataset + + # rollback + session.delete(dataset) + session.commit() + + @pytest.fixture() + def create_slice(self): + with self.create_app().app_context(): + session = db.session + dataset = ( + session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = Slice( + datasource_id=dataset.id, + datasource_type=DatasourceType.TABLE, + datasource_name="tmp_perm_table", + slice_name="slice_name", + ) + + session.add(slice) + session.commit() + + yield slice + + # rollback + session.delete(slice) + session.commit() + + @pytest.fixture() + def create_query(self): + with self.create_app().app_context(): + session = db.session + + query = Query( + sql="select 1 as foo;", + client_id="sldkfjlk", + database=get_example_database(), + ) + + session.add(query) + session.commit() + + yield query + + # rollback + session.delete(query) + session.commit() + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("create_dataset", "create_slice") + def test_create_permalink_command(self, mock_g): + mock_g.user = security_manager.find_user("admin") + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = db.session.query(Slice).filter_by(slice_name="slice_name").first() + + datasource = f"{dataset.id}__{DatasourceType.TABLE}" + command = CreateExplorePermalinkCommand( + {"formData": {"datasource": datasource, "slice_id": slice.id}} + ) + + assert isinstance(command.run(), str) + + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("create_dataset", "create_slice") + def test_get_permalink_command(self, mock_g): + mock_g.user = security_manager.find_user("admin") + app.config["EXPLORE_FORM_DATA_CACHE_CONFIG"] = { + "REFRESH_TIMEOUT_ON_RETRIEVAL": True + } + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = db.session.query(Slice).filter_by(slice_name="slice_name").first() + + datasource = f"{dataset.id}__{DatasourceType.TABLE}" + + key = CreateExplorePermalinkCommand( + {"formData": {"datasource": datasource, "slice_id": slice.id}} + ).run() + + get_command = GetExplorePermalinkCommand(key) + cache_data = get_command.run() + + assert cache_data.get("datasource") == datasource + + @patch("superset.security.manager.g") + @patch("superset.key_value.commands.get.GetKeyValueCommand.run") + @patch("superset.explore.permalink.commands.get.decode_permalink_id") + @pytest.mark.usefixtures("create_dataset", "create_slice") + def test_get_permalink_command_with_old_dataset_key( + self, decode_id_mock, get_kv_command_mock, mock_g + ): + mock_g.user = security_manager.find_user("admin") + app.config["EXPLORE_FORM_DATA_CACHE_CONFIG"] = { + "REFRESH_TIMEOUT_ON_RETRIEVAL": True + } + + dataset = ( + db.session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first() + ) + slice = db.session.query(Slice).filter_by(slice_name="slice_name").first() + + datasource_string = f"{dataset.id}__{DatasourceType.TABLE}" + + decode_id_mock.return_value = "123456" + get_kv_command_mock.return_value = { + "chartId": slice.id, + "datasetId": dataset.id, + "datasource": datasource_string, + "state": { + "formData": {"datasource": datasource_string, "slice_id": slice.id} + }, + } + get_command = GetExplorePermalinkCommand("thisisallmocked") + cache_data = get_command.run() + + assert cache_data.get("datasource") == datasource_string diff --git a/tests/integration_tests/extensions/__init__.py b/tests/integration_tests/extensions/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/extensions/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/extensions/metastore_cache_test.py b/tests/integration_tests/extensions/metastore_cache_test.py new file mode 100644 index 0000000000000..d9e0e9ee26f5e --- /dev/null +++ b/tests/integration_tests/extensions/metastore_cache_test.py @@ -0,0 +1,77 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import TYPE_CHECKING +from uuid import UUID + +import pytest +from flask.ctx import AppContext +from freezegun import freeze_time + +if TYPE_CHECKING: + from superset.extensions.metastore_cache import SupersetMetastoreCache + +FIRST_KEY = "foo" +FIRST_KEY_INITIAL_VALUE = {"foo": "bar"} +FIRST_KEY_UPDATED_VALUE = "foo" + +SECOND_KEY = "baz" +SECOND_VALUE = "qwerty" + + +@pytest.fixture +def cache() -> SupersetMetastoreCache: + from superset.extensions.metastore_cache import SupersetMetastoreCache + + return SupersetMetastoreCache( + namespace=UUID("ee173d1b-ccf3-40aa-941c-985c15224496"), + default_timeout=600, + ) + + +def test_caching_flow(app_context: AppContext, cache: SupersetMetastoreCache) -> None: + assert cache.has(FIRST_KEY) is False + assert cache.add(FIRST_KEY, FIRST_KEY_INITIAL_VALUE) is True + assert cache.has(FIRST_KEY) is True + cache.set(SECOND_KEY, SECOND_VALUE) + assert cache.get(FIRST_KEY) == FIRST_KEY_INITIAL_VALUE + assert cache.get(SECOND_KEY) == SECOND_VALUE + assert cache.add(FIRST_KEY, FIRST_KEY_UPDATED_VALUE) is False + assert cache.get(FIRST_KEY) == FIRST_KEY_INITIAL_VALUE + assert cache.set(FIRST_KEY, FIRST_KEY_UPDATED_VALUE) == True + assert cache.get(FIRST_KEY) == FIRST_KEY_UPDATED_VALUE + cache.delete(FIRST_KEY) + assert cache.has(FIRST_KEY) is False + assert cache.get(FIRST_KEY) is None + assert cache.has(SECOND_KEY) + assert cache.get(SECOND_KEY) == SECOND_VALUE + + +def test_expiry(app_context: AppContext, cache: SupersetMetastoreCache) -> None: + delta = timedelta(days=90) + dttm = datetime(2022, 3, 18, 0, 0, 0) + with freeze_time(dttm): + cache.set(FIRST_KEY, FIRST_KEY_INITIAL_VALUE, int(delta.total_seconds())) + assert cache.get(FIRST_KEY) == FIRST_KEY_INITIAL_VALUE + with freeze_time(dttm + delta - timedelta(seconds=1)): + assert cache.has(FIRST_KEY) + assert cache.get(FIRST_KEY) == FIRST_KEY_INITIAL_VALUE + with freeze_time(dttm + delta + timedelta(seconds=1)): + assert cache.has(FIRST_KEY) is False + assert cache.get(FIRST_KEY) is None diff --git a/tests/integration_tests/fixtures/__init__.py b/tests/integration_tests/fixtures/__init__.py new file mode 100644 index 0000000000000..0e06be352ece5 --- /dev/null +++ b/tests/integration_tests/fixtures/__init__.py @@ -0,0 +1,31 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from .birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_dashboard_with_slices_module_scope, +) +from .energy_dashboard import load_energy_table_data, load_energy_table_with_slice +from .public_role import public_role_like_gamma, public_role_like_test_role +from .unicode_dashboard import ( + load_unicode_dashboard_with_position, + load_unicode_dashboard_with_slice, +) +from .world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_dashboard_with_slices_module_scope, +) diff --git a/tests/integration_tests/fixtures/birth_names_dashboard.py b/tests/integration_tests/fixtures/birth_names_dashboard.py new file mode 100644 index 0000000000000..be680a720dd84 --- /dev/null +++ b/tests/integration_tests/fixtures/birth_names_dashboard.py @@ -0,0 +1,108 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Callable, List, Optional + +import pytest + +from superset import db +from superset.connectors.sqla.models import SqlaTable +from superset.models.core import Database +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from superset.utils.core import get_example_default_schema +from superset.utils.database import get_example_database +from tests.example_data.data_loading.base_data_loader import DataLoader +from tests.example_data.data_loading.data_definitions.types import Table +from tests.integration_tests.dashboard_utils import create_table_metadata +from tests.integration_tests.test_app import app + +BIRTH_NAMES_TBL_NAME = "birth_names" + + +@pytest.fixture(scope="session") +def load_birth_names_data( + birth_names_table_factory: Callable[[], Table], data_loader: DataLoader +): + birth_names_table: Table = birth_names_table_factory() + data_loader.load_table(birth_names_table) + yield + data_loader.remove_table(birth_names_table.table_name) + + +@pytest.fixture() +def load_birth_names_dashboard_with_slices(load_birth_names_data): + with app.app_context(): + dash_id_to_delete, slices_ids_to_delete = _create_dashboards() + yield + _cleanup(dash_id_to_delete, slices_ids_to_delete) + + +@pytest.fixture(scope="module") +def load_birth_names_dashboard_with_slices_module_scope(load_birth_names_data): + with app.app_context(): + dash_id_to_delete, slices_ids_to_delete = _create_dashboards() + yield + _cleanup(dash_id_to_delete, slices_ids_to_delete) + + +def _create_dashboards(): + table = _create_table( + table_name=BIRTH_NAMES_TBL_NAME, + database=get_example_database(), + fetch_values_predicate="123 = 123", + ) + + from superset.examples.birth_names import create_dashboard, create_slices + + slices, _ = create_slices(table) + dash = create_dashboard(slices) + slices_ids_to_delete = [slice.id for slice in slices] + dash_id_to_delete = dash.id + return dash_id_to_delete, slices_ids_to_delete + + +def _create_table( + table_name: str, + database: "Database", + fetch_values_predicate: Optional[str] = None, +): + table = create_table_metadata( + table_name=table_name, + database=database, + fetch_values_predicate=fetch_values_predicate, + ) + from superset.examples.birth_names import _add_table_metrics, _set_table_metadata + + _set_table_metadata(table, database) + _add_table_metrics(table) + db.session.commit() + return table + + +def _cleanup(dash_id: int, slice_ids: List[int]) -> None: + schema = get_example_default_schema() + for datasource in db.session.query(SqlaTable).filter_by( + table_name="birth_names", schema=schema + ): + for col in datasource.columns + datasource.metrics: + db.session.delete(col) + + for dash in db.session.query(Dashboard).filter_by(id=dash_id): + db.session.delete(dash) + for slc in db.session.query(Slice).filter(Slice.id.in_(slice_ids)): + db.session.delete(slc) + db.session.commit() diff --git a/tests/integration_tests/fixtures/certificates.py b/tests/integration_tests/fixtures/certificates.py new file mode 100644 index 0000000000000..5cdf917704648 --- /dev/null +++ b/tests/integration_tests/fixtures/certificates.py @@ -0,0 +1,38 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +ssl_certificate = """-----BEGIN CERTIFICATE----- +MIIDnDCCAoQCCQCrdpcNPCA/eDANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMC +VVMxEzARBgNVBAgMCkNhbGlmb3JuaWExEjAQBgNVBAcMCVNhbiBNYXRlbzEPMA0G +A1UECgwGUHJlc2V0MRMwEQYDVQQLDApTa3Vua3dvcmtzMRIwEAYDVQQDDAlwcmVz +ZXQuaW8xHTAbBgkqhkiG9w0BCQEWDmluZm9AcHJlc2V0LmlvMB4XDTIwMDMyNjEw +NTE1NFoXDTQwMDMyNjEwNTE1NFowgY8xCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApD +YWxpZm9ybmlhMRIwEAYDVQQHDAlTYW4gTWF0ZW8xDzANBgNVBAoMBlByZXNldDET +MBEGA1UECwwKU2t1bmt3b3JrczESMBAGA1UEAwwJcHJlc2V0LmlvMR0wGwYJKoZI +hvcNAQkBFg5pbmZvQHByZXNldC5pbzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBAKNHQZcu2L/6HvZfzy4Hnm3POeztfO+NJ7OzppAcNlLbTAatUk1YoDbJ +5m5GUW8m7pVEHb76UL6Xxei9MoMVvHGuXqQeZZnNd+DySW/227wkOPYOCVSuDsWD +1EReG+pv/z8CDhdwmMTkDTZUDr0BUR/yc8qTCPdZoalj2muDl+k2J3LSCkelx4U/ +2iYhoUQD+lzFS3k7ohAfaGc2aZOlwTITopXHSFfuZ7j9muBOYtU7NgpnCl6WgxYP +1+4ddBIauPTBY2gWfZC2FeOfYEqfsUUXRsw1ehEQf4uxxTKNJTfTuVbdgrTYx5QQ +jrM88WvWdyVnIM7u7/x9bawfGX/b/F0CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEA +XYLLk3T5RWIagNa3DPrMI+SjRm4PAI/RsijtBV+9hrkCXOQ1mvlo/ORniaiemHvF +Kh6u6MTl014+f6Ytg/tx/OzuK2ffo9x44ZV/yqkbSmKD1pGftYNqCnBCN0uo1Gzb +HZ+bTozo+9raFN7OGPgbdBmpQT2c+LG5n+7REobHFb7VLeY2/7BKtxNBRXfIxn4X ++MIhpASwLH5X64a1f9LyuPNMyUvKgzDe7jRdX1JZ7uw/1T//OHGQth0jLiapa6FZ +GwgYUaruSZH51ZtxrJSXKSNBA7asPSBbyOmGptLsw2GTAsoBd5sUR4+hbuVo+1ai +XeA3AKTX/OdYWJvr5YIgeQ== +-----END CERTIFICATE-----""" diff --git a/tests/integration_tests/fixtures/client.py b/tests/integration_tests/fixtures/client.py new file mode 100644 index 0000000000000..f532f438fda4f --- /dev/null +++ b/tests/integration_tests/fixtures/client.py @@ -0,0 +1,26 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from tests.integration_tests.test_app import app + + +@pytest.fixture +def client(): + with app.test_client() as client: + with app.app_context(): + yield client diff --git a/tests/integration_tests/fixtures/database.py b/tests/integration_tests/fixtures/database.py new file mode 100644 index 0000000000000..a2ba522126677 --- /dev/null +++ b/tests/integration_tests/fixtures/database.py @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +default_db_extra = """{ + "metadata_params": {}, + "engine_params": {}, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": [] +}""" diff --git a/tests/integration_tests/fixtures/datasource.py b/tests/integration_tests/fixtures/datasource.py new file mode 100644 index 0000000000000..f394d68a0e76b --- /dev/null +++ b/tests/integration_tests/fixtures/datasource.py @@ -0,0 +1,211 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Fixtures for test_datasource.py""" +from typing import Any, Dict, Generator + +import pytest +from sqlalchemy import Column, create_engine, Date, Integer, MetaData, String, Table +from sqlalchemy.ext.declarative import declarative_base + +from superset.columns.models import Column as Sl_Column +from superset.connectors.sqla.models import SqlaTable, TableColumn +from superset.extensions import db +from superset.models.core import Database +from superset.tables.models import Table as Sl_Table +from superset.utils.core import get_example_default_schema +from superset.utils.database import get_example_database +from tests.integration_tests.test_app import app + + +def get_datasource_post() -> Dict[str, Any]: + schema = get_example_default_schema() + + return { + "id": None, + "column_formats": {"ratio": ".2%"}, + "database": {"id": 1}, + "description": "Adding a DESCRip", + "default_endpoint": "", + "filter_select_enabled": True, + "name": f"{schema}.birth_names" if schema else "birth_names", + "table_name": "birth_names", + "datasource_name": "birth_names", + "type": "table", + "schema": schema, + "offset": 66, + "cache_timeout": 55, + "sql": "", + "columns": [ + { + "id": 504, + "column_name": "ds", + "verbose_name": "", + "description": None, + "expression": "", + "filterable": True, + "groupby": True, + "is_dttm": True, + "type": "DATETIME", + }, + { + "id": 505, + "column_name": "gender", + "verbose_name": None, + "description": None, + "expression": "", + "filterable": True, + "groupby": True, + "is_dttm": False, + "type": "VARCHAR(16)", + }, + { + "id": 506, + "column_name": "name", + "verbose_name": None, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_dttm": None, + "type": "VARCHAR(255)", + }, + { + "id": 508, + "column_name": "state", + "verbose_name": None, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_dttm": None, + "type": "VARCHAR(10)", + }, + { + "id": 509, + "column_name": "num_boys", + "verbose_name": None, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_dttm": None, + "type": "BIGINT(20)", + }, + { + "id": 510, + "column_name": "num_girls", + "verbose_name": None, + "description": None, + "expression": "", + "filterable": False, + "groupby": False, + "is_dttm": False, + "type": "BIGINT(20)", + }, + { + "id": 532, + "column_name": "num", + "verbose_name": None, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_dttm": None, + "type": "BIGINT(20)", + }, + { + "id": 522, + "column_name": "num_california", + "verbose_name": None, + "description": None, + "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END", + "filterable": False, + "groupby": False, + "is_dttm": False, + "type": "NUMBER", + }, + ], + "metrics": [ + { + "id": 824, + "metric_name": "sum__num", + "verbose_name": "Babies", + "description": "", + "expression": "SUM(num)", + "warning_text": "", + "d3format": "", + }, + { + "id": 836, + "metric_name": "count", + "verbose_name": "", + "description": None, + "expression": "count(1)", + "warning_text": None, + "d3format": None, + }, + { + "id": 843, + "metric_name": "ratio", + "verbose_name": "Ratio Boys/Girls", + "description": "This represents the ratio of boys/girls", + "expression": "sum(num_boys) / sum(num_girls)", + "warning_text": "no warning", + "d3format": ".2%", + }, + ], + } + + +@pytest.fixture() +def load_dataset_with_columns() -> Generator[SqlaTable, None, None]: + with app.app_context(): + engine = create_engine(app.config["SQLALCHEMY_DATABASE_URI"], echo=True) + meta = MetaData() + session = db.session + + students = Table( + "students", + meta, + Column("id", Integer, primary_key=True), + Column("name", String(255)), + Column("lastname", String(255)), + Column("ds", Date), + ) + meta.create_all(engine) + + students.insert().values(name="George", ds="2021-01-01") + + dataset = SqlaTable( + database_id=db.session.query(Database).first().id, table_name="students" + ) + column = TableColumn(table_id=dataset.id, column_name="name") + dataset.columns = [column] + session.add(dataset) + session.commit() + yield dataset + + # cleanup + students_table = meta.tables.get("students") + if students_table is not None: + base = declarative_base() + # needed for sqlite + session.commit() + base.metadata.drop_all(engine, [students_table], checkfirst=True) + session.delete(dataset) + session.delete(column) + session.commit() diff --git a/tests/integration_tests/fixtures/deck_geojson_form_data.json b/tests/integration_tests/fixtures/deck_geojson_form_data.json new file mode 100644 index 0000000000000..e8258c7d443a1 --- /dev/null +++ b/tests/integration_tests/fixtures/deck_geojson_form_data.json @@ -0,0 +1,47 @@ +{ + "color_picker": { + "a": 1, + "b": 135, + "g": 122, + "r": 0 + }, + "datasource": "12__table", + "filters": [], + "having": "", + "js_columns": [ + "color" + ], + "js_datapoint_mutator": "d => {\n return {\n ...d,\n color: colors.hexToRGB(d.extraProps.color),\n }\n}", + "js_onclick_href": "", + "js_tooltip": "", + "mapbox_style": "mapbox://styles/mapbox/light-v9", + "reverse_long_lat": false, + "row_limit": 5000, + "since": "7 days ago", + "slice_id": 1013, + "time_grain_sqla": null, + "until": "now", + "geojson": "test_col", + "viewport": { + "altitude": 1.5, + "bearing": 0, + "height": 1094, + "latitude": 37.73671752604488, + "longitude": -122.18885402582598, + "maxLatitude": 85.05113, + "maxPitch": 60, + "maxZoom": 20, + "minLatitude": -85.05113, + "minPitch": 0, + "minZoom": 0, + "pitch": 0, + "width": 669, + "zoom": 9.51847667620428 + }, + "viz_type": "deck_geojson", + "where": "", + "granularity_sqla": null, + "autozoom": true, + "url_params": {}, + "size": "100" +} diff --git a/tests/integration_tests/fixtures/deck_path_form_data.json b/tests/integration_tests/fixtures/deck_path_form_data.json new file mode 100644 index 0000000000000..ac2e404d83fb4 --- /dev/null +++ b/tests/integration_tests/fixtures/deck_path_form_data.json @@ -0,0 +1,49 @@ +{ + "color_picker": { + "a": 1, + "b": 135, + "g": 122, + "r": 0 + }, + "datasource": "12__table", + "filters": [], + "having": "", + "js_columns": [ + "color" + ], + "js_datapoint_mutator": "d => {\n return {\n ...d,\n color: colors.hexToRGB(d.extraProps.color),\n }\n}", + "js_onclick_href": "", + "js_tooltip": "", + "line_column": "path_json", + "line_type": "json", + "line_width": 150, + "mapbox_style": "mapbox://styles/mapbox/light-v9", + "reverse_long_lat": false, + "row_limit": 5000, + "since": "7 days ago", + "slice_id": 1013, + "time_grain_sqla": null, + "until": "now", + "viewport": { + "altitude": 1.5, + "bearing": 0, + "height": 1094, + "latitude": 37.73671752604488, + "longitude": -122.18885402582598, + "maxLatitude": 85.05113, + "maxPitch": 60, + "maxZoom": 20, + "minLatitude": -85.05113, + "minPitch": 0, + "minZoom": 0, + "pitch": 0, + "width": 669, + "zoom": 9.51847667620428 + }, + "viz_type": "deck_path", + "where": "", + "granularity_sqla": null, + "autozoom": true, + "url_params": {}, + "size": "100" +} diff --git a/tests/integration_tests/fixtures/energy_dashboard.py b/tests/integration_tests/fixtures/energy_dashboard.py new file mode 100644 index 0000000000000..202f494aa2d15 --- /dev/null +++ b/tests/integration_tests/fixtures/energy_dashboard.py @@ -0,0 +1,195 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import random +from typing import Dict, List, Set + +import pandas as pd +import pytest +from sqlalchemy import column, Float, String + +from superset import db +from superset.connectors.sqla.models import SqlaTable, SqlMetric +from superset.models.slice import Slice +from superset.utils.core import get_example_default_schema +from superset.utils.database import get_example_database +from tests.integration_tests.dashboard_utils import create_slice, create_table_metadata +from tests.integration_tests.test_app import app + +misc_dash_slices: Set[str] = set() + + +ENERGY_USAGE_TBL_NAME = "energy_usage" + + +@pytest.fixture(scope="session") +def load_energy_table_data(): + with app.app_context(): + database = get_example_database() + with database.get_sqla_engine_with_context() as engine: + df = _get_dataframe() + df.to_sql( + ENERGY_USAGE_TBL_NAME, + engine, + if_exists="replace", + chunksize=500, + index=False, + dtype={"source": String(255), "target": String(255), "value": Float()}, + method="multi", + schema=get_example_default_schema(), + ) + yield + with app.app_context(): + with get_example_database().get_sqla_engine_with_context() as engine: + engine.execute("DROP TABLE IF EXISTS energy_usage") + + +@pytest.fixture() +def load_energy_table_with_slice(load_energy_table_data): + with app.app_context(): + slices = _create_energy_table() + yield slices + _cleanup() + + +def _get_dataframe(): + data = _get_energy_data() + return pd.DataFrame.from_dict(data) + + +def _create_energy_table() -> List[Slice]: + table = create_table_metadata( + table_name=ENERGY_USAGE_TBL_NAME, + database=get_example_database(), + table_description="Energy consumption", + ) + table.fetch_metadata() + + if not any(col.metric_name == "sum__value" for col in table.metrics): + col = str(column("value").compile(db.engine)) + table.metrics.append( + SqlMetric(metric_name="sum__value", expression=f"SUM({col})") + ) + db.session.merge(table) + db.session.commit() + table.fetch_metadata() + + slices = [] + for slice_data in _get_energy_slices(): + + slice = _create_and_commit_energy_slice( + table, + slice_data["slice_title"], + slice_data["viz_type"], + slice_data["params"], + ) + slices.append(slice) + return slices + + +def _create_and_commit_energy_slice( + table: SqlaTable, title: str, viz_type: str, param: Dict[str, str] +): + slice = create_slice(title, viz_type, table, param) + existing_slice = ( + db.session.query(Slice).filter_by(slice_name=slice.slice_name).first() + ) + if existing_slice: + db.session.delete(existing_slice) + db.session.add(slice) + db.session.commit() + return slice + + +def _cleanup() -> None: + for slice_data in _get_energy_slices(): + slice = ( + db.session.query(Slice) + .filter_by(slice_name=slice_data["slice_title"]) + .first() + ) + db.session.delete(slice) + + metric = ( + db.session.query(SqlMetric).filter_by(metric_name="sum__value").one_or_none() + ) + if metric: + db.session.delete(metric) + + db.session.commit() + + +def _get_energy_data(): + data = [] + for i in range(85): + data.append( + { + "source": f"energy_source{i}", + "target": f"energy_target{i}", + "value": random.uniform(0.1, 11.0), + } + ) + return data + + +def _get_energy_slices(): + return [ + { + "slice_title": "Energy Sankey", + "viz_type": "sankey", + "params": { + "collapsed_fieldsets": "", + "groupby": ["source", "target"], + "metric": "sum__value", + "row_limit": "5000", + "slice_name": "Energy Sankey", + "viz_type": "sankey", + }, + }, + { + "slice_title": "Energy Force Layout", + "viz_type": "graph_chart", + "params": { + "source": "source", + "target": "target", + "edgeLength": 400, + "repulsion": 1000, + "layout": "force", + "metric": "sum__value", + "row_limit": "5000", + "slice_name": "Force", + "viz_type": "graph_chart", + }, + }, + { + "slice_title": "Heatmap", + "viz_type": "heatmap", + "params": { + "all_columns_x": "source", + "all_columns_y": "target", + "canvas_image_rendering": "pixelated", + "collapsed_fieldsets": "", + "linear_color_scheme": "blue_white_yellow", + "metric": "sum__value", + "normalize_across": "heatmap", + "slice_name": "Heatmap", + "viz_type": "heatmap", + "xscale_interval": "1", + "yscale_interval": "1", + }, + "query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","having_druid":[],"where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}', + }, + ] diff --git a/tests/integration_tests/fixtures/importexport.py b/tests/integration_tests/fixtures/importexport.py new file mode 100644 index 0000000000000..b624f3e63ced6 --- /dev/null +++ b/tests/integration_tests/fixtures/importexport.py @@ -0,0 +1,516 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Dict, List + +# example V0 import/export format +dataset_ui_export: List[Dict[str, Any]] = [ + { + "columns": [ + { + "column_name": "num_california", + "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END", + }, + {"column_name": "ds", "is_dttm": True, "type": "DATETIME"}, + {"column_name": "state", "type": "VARCHAR(10)"}, + {"column_name": "gender", "type": "VARCHAR(16)"}, + {"column_name": "name", "type": "VARCHAR(255)"}, + {"column_name": "num_boys", "type": "BIGINT"}, + {"column_name": "num_girls", "type": "BIGINT"}, + {"column_name": "num", "type": "BIGINT"}, + ], + "filter_select_enabled": True, + "main_dttm_col": "ds", + "metrics": [ + { + "expression": "COUNT(*)", + "metric_name": "count", + "metric_type": "count", + "verbose_name": "COUNT(*)", + }, + {"expression": "SUM(num)", "metric_name": "sum__num"}, + ], + "params": '{"remote_id": 3, "database_name": "examples", "import_time": 1604342885}', + "table_name": "birth_names_2", + } +] + +dataset_cli_export: Dict[str, Any] = { + "databases": [ + { + "allow_run_async": True, + "database_name": "examples", + "sqlalchemy_uri": "sqlite:////Users/beto/.superset/superset.db", + "tables": dataset_ui_export, + } + ] +} + +dashboard_export: Dict[str, Any] = { + "dashboards": [ + { + "__Dashboard__": { + "css": "", + "dashboard_title": "Births 2", + "description": None, + "json_metadata": '{"timed_refresh_immune_slices": [], "expanded_slices": {}, "refresh_frequency": 0, "default_filters": "{}", "color_scheme": null, "remote_id": 1}', + "position_json": '{"CHART--jvaBFZx78":{"children":[],"id":"CHART--jvaBFZx78","meta":{"chartId":83,"height":50,"sliceName":"Number of California Births","uuid":"c77bb4b3-09f4-4d9a-a9e2-66a627c64343","width":4},"parents":["ROOT_ID","GRID_ID","ROW-se_5H8KNiO"],"type":"CHART"},"DASHBOARD_VERSION_KEY":"v2","GRID_ID":{"children":["ROW-se_5H8KNiO"],"id":"GRID_ID","parents":["ROOT_ID"],"type":"GRID"},"HEADER_ID":{"id":"HEADER_ID","meta":{"text":"Births"},"type":"HEADER"},"ROOT_ID":{"children":["GRID_ID"],"id":"ROOT_ID","type":"ROOT"},"ROW-se_5H8KNiO":{"children":["CHART--jvaBFZx78"],"id":"ROW-se_5H8KNiO","meta":{"background":"BACKGROUND_TRANSPARENT"},"parents":["ROOT_ID","GRID_ID"],"type":"ROW"}}', + "slices": [ + { + "__Slice__": { + "cache_timeout": None, + "datasource_name": "birth_names_2", + "datasource_type": "table", + "id": 83, + "params": '{"adhoc_filters": [], "datasource": "3__table", "granularity_sqla": "ds", "header_font_size": 0.4, "metric": {"aggregate": "SUM", "column": {"column_name": "num_california", "expression": "CASE WHEN state = \'CA\' THEN num ELSE 0 END"}, "expressionType": "SIMPLE", "label": "SUM(num_california)"}, "slice_id": 83, "subheader_font_size": 0.15, "time_range": "100 years ago : now", "url_params": {}, "viz_type": "big_number_total", "y_axis_format": "SMART_NUMBER", "remote_id": 83, "datasource_name": "birth_names_2", "schema": null, "database_name": "examples"}', + "slice_name": "Number of California Births", + "viz_type": "big_number_total", + } + } + ], + "slug": None, + } + } + ], + "datasources": [ + { + "__SqlaTable__": { + "cache_timeout": None, + "columns": [ + { + "__TableColumn__": { + "changed_by_fk": None, + "changed_on": {"__datetime__": "2020-10-07T15:50:00"}, + "column_name": "ds", + "created_by_fk": None, + "created_on": {"__datetime__": "2020-10-07T15:50:00"}, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 332, + "is_active": True, + "is_dttm": True, + "python_date_format": None, + "table_id": 3, + "type": "DATETIME", + "uuid": "98e22f20-ed71-4483-b09d-31780ed1fc1b", + "verbose_name": None, + } + }, + { + "__TableColumn__": { + "changed_by_fk": None, + "changed_on": {"__datetime__": "2020-10-07T15:50:00"}, + "column_name": "gender", + "created_by_fk": None, + "created_on": {"__datetime__": "2020-10-07T15:50:00"}, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 333, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "table_id": 3, + "type": "VARCHAR(16)", + "uuid": "08e08f02-fb81-4461-bba6-c8c8dfef0c02", + "verbose_name": None, + } + }, + { + "__TableColumn__": { + "changed_by_fk": None, + "changed_on": {"__datetime__": "2020-10-07T15:50:00"}, + "column_name": "name", + "created_by_fk": None, + "created_on": {"__datetime__": "2020-10-07T15:50:00"}, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 334, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "table_id": 3, + "type": "VARCHAR(255)", + "uuid": "c67b14d9-fc4b-427d-a363-a53af015fb5e", + "verbose_name": None, + } + }, + { + "__TableColumn__": { + "changed_by_fk": None, + "changed_on": {"__datetime__": "2020-10-07T15:50:00"}, + "column_name": "num", + "created_by_fk": None, + "created_on": {"__datetime__": "2020-10-07T15:50:00"}, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 335, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "table_id": 3, + "type": "BIGINT", + "uuid": "69835b93-7169-4a2c-baa7-c1c92f21d10a", + "verbose_name": None, + } + }, + { + "__TableColumn__": { + "changed_by_fk": None, + "changed_on": {"__datetime__": "2020-10-07T15:50:00"}, + "column_name": "state", + "created_by_fk": None, + "created_on": {"__datetime__": "2020-10-07T15:50:00"}, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 336, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "table_id": 3, + "type": "VARCHAR(10)", + "uuid": "80003ad0-bdd0-48d3-ade3-8d1838e07d7a", + "verbose_name": None, + } + }, + { + "__TableColumn__": { + "changed_by_fk": None, + "changed_on": {"__datetime__": "2020-10-07T15:50:00"}, + "column_name": "num_boys", + "created_by_fk": None, + "created_on": {"__datetime__": "2020-10-07T15:50:00"}, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 337, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "table_id": 3, + "type": "BIGINT", + "uuid": "8373ed24-4d4e-4307-9eee-8deefeecbb57", + "verbose_name": None, + } + }, + { + "__TableColumn__": { + "changed_by_fk": None, + "changed_on": {"__datetime__": "2020-10-07T15:50:00"}, + "column_name": "num_girls", + "created_by_fk": None, + "created_on": {"__datetime__": "2020-10-07T15:50:00"}, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 338, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "table_id": 3, + "type": "BIGINT", + "uuid": "46f2de5f-c008-4024-a163-0b5c5f1d5580", + "verbose_name": None, + } + }, + { + "__TableColumn__": { + "changed_by_fk": None, + "changed_on": {"__datetime__": "2020-10-07T15:50:32"}, + "column_name": "num_california", + "created_by_fk": None, + "created_on": {"__datetime__": "2020-10-07T15:50:32"}, + "description": None, + "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END", + "filterable": True, + "groupby": True, + "id": 434, + "is_active": True, + "is_dttm": False, + "python_date_format": None, + "table_id": 3, + "type": None, + "uuid": "35e32aa6-be2b-4086-9c78-4ea3351ec079", + "verbose_name": None, + } + }, + ], + "database_id": 1000, + "default_endpoint": None, + "description": None, + "extra": None, + "fetch_values_predicate": None, + "filter_select_enabled": True, + "main_dttm_col": "ds", + "metrics": [ + { + "__SqlMetric__": { + "changed_by_fk": None, + "changed_on": {"__datetime__": "2020-10-07T15:50:00"}, + "created_by_fk": None, + "created_on": {"__datetime__": "2020-10-07T15:50:00"}, + "d3format": None, + "description": None, + "expression": "COUNT(*)", + "extra": None, + "id": 9, + "metric_name": "count", + "metric_type": "count", + "table_id": 3, + "uuid": "1042ef50-ebf9-4271-b44e-3aaa891f6c21", + "verbose_name": "COUNT(*)", + "warning_text": None, + } + }, + { + "__SqlMetric__": { + "changed_by_fk": None, + "changed_on": {"__datetime__": "2020-10-07T15:50:00"}, + "created_by_fk": None, + "created_on": {"__datetime__": "2020-10-07T15:50:00"}, + "d3format": None, + "description": None, + "expression": "SUM(num)", + "extra": None, + "id": 10, + "metric_name": "sum__num", + "metric_type": None, + "table_id": 3, + "uuid": "d807f208-e3c6-4b89-b790-41f521216ff6", + "verbose_name": None, + "warning_text": None, + } + }, + ], + "offset": 0, + "params": '{"remote_id": 3, "database_name": "examples", "import_time": 1604342885}', + "schema": None, + "sql": None, + "table_name": "birth_names_2", + "template_params": None, + } + } + ], +} + +# example V1 import/export format +database_metadata_config: Dict[str, Any] = { + "version": "1.0.0", + "type": "Database", + "timestamp": "2020-11-04T21:27:44.423819+00:00", +} + +dataset_metadata_config: Dict[str, Any] = { + "version": "1.0.0", + "type": "SqlaTable", + "timestamp": "2020-11-04T21:27:44.423819+00:00", +} + +chart_metadata_config: Dict[str, Any] = { + "version": "1.0.0", + "type": "Slice", + "timestamp": "2020-11-04T21:27:44.423819+00:00", +} + +dashboard_metadata_config: Dict[str, Any] = { + "version": "1.0.0", + "type": "Dashboard", + "timestamp": "2020-11-04T21:27:44.423819+00:00", +} +saved_queries_metadata_config: Dict[str, Any] = { + "version": "1.0.0", + "type": "SavedQuery", + "timestamp": "2021-03-30T20:37:54.791187+00:00", +} +database_config: Dict[str, Any] = { + "allow_csv_upload": True, + "allow_ctas": True, + "allow_cvas": True, + "allow_dml": True, + "allow_run_async": False, + "cache_timeout": None, + "database_name": "imported_database", + "expose_in_sqllab": True, + "extra": {}, + "sqlalchemy_uri": "sqlite:///test.db", + "uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89", + "version": "1.0.0", +} + +dataset_config: Dict[str, Any] = { + "table_name": "imported_dataset", + "main_dttm_col": None, + "description": "This is a dataset that was exported", + "default_endpoint": "", + "offset": 66, + "cache_timeout": 55, + "schema": "", + "sql": "", + "params": None, + "template_params": {}, + "filter_select_enabled": True, + "fetch_values_predicate": None, + "extra": '{ "certification": { "certified_by": "Data Platform Team", "details": "This table is the source of truth." }, "warning_markdown": "This is a warning." }', + "metrics": [ + { + "metric_name": "count", + "verbose_name": "", + "metric_type": None, + "expression": "count(1)", + "description": None, + "d3format": None, + "extra": {}, + "warning_text": None, + }, + ], + "columns": [ + { + "column_name": "cnt", + "verbose_name": "Count of something", + "is_dttm": False, + "is_active": None, + "type": "NUMBER", + "groupby": False, + "filterable": True, + "expression": "", + "description": None, + "python_date_format": None, + }, + ], + "version": "1.0.0", + "uuid": "10808100-158b-42c4-842e-f32b99d88dfb", + "database_uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89", +} + +chart_config: Dict[str, Any] = { + "slice_name": "Deck Path", + "viz_type": "deck_path", + "params": { + "color_picker": {"a": 1, "b": 135, "g": 122, "r": 0}, + "datasource": "12__table", + "js_columns": ["color"], + "js_data_mutator": r"data => data.map(d => ({\n ...d,\n color: colors.hexToRGB(d.extraProps.color)\n}));", + "js_onclick_href": "", + "js_tooltip": "", + "line_column": "path_json", + "line_type": "json", + "line_width": 150, + "mapbox_style": "mapbox://styles/mapbox/light-v9", + "reverse_long_lat": False, + "row_limit": 5000, + "slice_id": 43, + "time_grain_sqla": None, + "time_range": " : ", + "viewport": { + "altitude": 1.5, + "bearing": 0, + "height": 1094, + "latitude": 37.73671752604488, + "longitude": -122.18885402582598, + "maxLatitude": 85.05113, + "maxPitch": 60, + "maxZoom": 20, + "minLatitude": -85.05113, + "minPitch": 0, + "minZoom": 0, + "pitch": 0, + "width": 669, + "zoom": 9.51847667620428, + }, + "viz_type": "deck_path", + }, + "query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","having_druid":[],"where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}', + "cache_timeout": None, + "uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1", + "version": "1.0.0", + "dataset_uuid": "10808100-158b-42c4-842e-f32b99d88dfb", +} + +dashboard_config: Dict[str, Any] = { + "dashboard_title": "Test dash", + "description": None, + "css": "", + "slug": None, + "uuid": "c4b28c4e-a1fe-4cf8-a5ac-d6f11d6fdd51", + "position": { + "CHART-SVAlICPOSJ": { + "children": [], + "id": "CHART-SVAlICPOSJ", + "meta": { + "chartId": 83, + "height": 50, + "sliceName": "Number of California Births", + "uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1", + "width": 4, + }, + "parents": ["ROOT_ID", "GRID_ID", "ROW-dP_CHaK2q"], + "type": "CHART", + }, + "DASHBOARD_VERSION_KEY": "v2", + "GRID_ID": { + "children": ["ROW-dP_CHaK2q"], + "id": "GRID_ID", + "parents": ["ROOT_ID"], + "type": "GRID", + }, + "HEADER_ID": { + "id": "HEADER_ID", + "meta": {"text": "Test dash"}, + "type": "HEADER", + }, + "ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"}, + "ROW-dP_CHaK2q": { + "children": ["CHART-SVAlICPOSJ"], + "id": "ROW-dP_CHaK2q", + "meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"}, + "parents": ["ROOT_ID", "GRID_ID"], + "type": "ROW", + }, + }, + "metadata": { + "timed_refresh_immune_slices": [83], + "filter_scopes": { + "83": {"region": {"scope": ["ROOT_ID"], "immune": [83]}}, + }, + "expanded_slices": {"83": True}, + "refresh_frequency": 0, + "default_filters": "{}", + "color_scheme": None, + "remote_id": 7, + "import_time": 1604342885, + }, + "version": "1.0.0", +} +saved_queries_config = { + "schema": "public", + "label": "Test Saved Query", + "description": None, + "sql": "-- Note: Unless you save your query, these tabs will NOT persist if you clear\nyour cookies or change browsers.\n\n\nSELECT * from birth_names", + "uuid": "05b679b5-8eaf-452c-b874-a7a774cfa4e9", + "version": "1.0.0", + "database_uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89", +} diff --git a/tests/integration_tests/fixtures/public_role.py b/tests/integration_tests/fixtures/public_role.py new file mode 100644 index 0000000000000..892098b401b26 --- /dev/null +++ b/tests/integration_tests/fixtures/public_role.py @@ -0,0 +1,44 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from superset.extensions import db, security_manager +from tests.integration_tests.test_app import app + + +@pytest.fixture() +def public_role_like_gamma(): + with app.app_context(): + app.config["PUBLIC_ROLE_LIKE"] = "Gamma" + security_manager.sync_role_definitions() + + yield + + security_manager.get_public_role().permissions = [] + db.session.commit() + + +@pytest.fixture() +def public_role_like_test_role(): + with app.app_context(): + app.config["PUBLIC_ROLE_LIKE"] = "TestRole" + security_manager.sync_role_definitions() + + yield + + security_manager.get_public_role().permissions = [] + db.session.commit() diff --git a/tests/integration_tests/fixtures/pyodbcRow.py b/tests/integration_tests/fixtures/pyodbcRow.py new file mode 100644 index 0000000000000..237b31524214b --- /dev/null +++ b/tests/integration_tests/fixtures/pyodbcRow.py @@ -0,0 +1,25 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +class Row: + def __init__(self, values): + self.values = values + + def __name__(self): # pylint: disable=no-self-use + return "Row" + + def __iter__(self): + return (item for item in self.values) diff --git a/tests/integration_tests/fixtures/query_context.py b/tests/integration_tests/fixtures/query_context.py new file mode 100644 index 0000000000000..00a3036e01c25 --- /dev/null +++ b/tests/integration_tests/fixtures/query_context.py @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Dict, Optional + +from tests.common.query_context_generator import QueryContextGenerator +from tests.integration_tests.base_tests import SupersetTestCase + + +class QueryContextGeneratorInteg(QueryContextGenerator): + def get_table(self, name, id_, type_): + return SupersetTestCase.get_table(name=name) + + +def get_query_context( + query_name: str, + add_postprocessing_operations: bool = False, + add_time_offsets: bool = False, + form_data: Optional[Dict[str, Any]] = None, +) -> Dict[str, Any]: + """ + Create a request payload for retrieving a QueryContext object via the + `api/v1/chart/data` endpoint. By default returns a payload corresponding to one + generated by the "Boy Name Cloud" chart in the examples. + :param query_name: name of an example query, which is always in the format + of `datasource_name[:test_case_name]`, where `:test_case_name` is optional. + :param add_postprocessing_operations: Add post-processing operations to QueryObject + :param add_time_offsets: Add time offsets to QueryObject(advanced analytics) + :param form_data: chart metadata + :return: Request payload + """ + return QueryContextGeneratorInteg().generate( + query_name=query_name, + add_postprocessing_operations=add_postprocessing_operations, + add_time_offsets=add_time_offsets, + form_data=form_data, + ) diff --git a/tests/integration_tests/fixtures/sample.png b/tests/integration_tests/fixtures/sample.png new file mode 100644 index 0000000000000000000000000000000000000000..a3522415108e9f8a12217512738cf384d5ff5644 GIT binary patch literal 4481 zcmZWsbySpX(_awjPC;S?B!wj;M7ojgrOTyUmIYS2W9jY|kdjtd>5!6K2}zNZ?p6?9 ze4gif-{+j~I%n>gYi52k*UX&zkBQXMP$I&o!v_EWL@LViI(MAPOmuMZct|lg3q0N`)8Mw@E8{$Tgh%=! zH1v(N5;c`60aqb-jEeOBH0I|*W_JkL?r$p@2*m8kjFwk2W^?;_+lgQD)yD8MyBoLt zrjrrC8)*ON$cUmo;4|48K~acrW~9o1+Bi!ffKn@njt}A{-z2H6y$l%KJUKYt$F4`Z zepMvjvDmn+D0B5y!PZr#Bs^DsTVvVrBWN2`R&kKhgwKg2 zOf$%`fvb^uyi5H`(v?^)A33<$)b_95@O6W%Clxv3(%{0#OQ$S8K4aSuhxG`>lTXlR z&@JOy8a@MmMQH6Xjp|w?dAHItxYrGa0P%x#_K0M+ZcC4!ibH;?t{;ll&S!+p-L>i~ zM`tj!WroWRQhjGE--}ts3(OX1A2)pev98oR*tpDri{szP#254+PLGe^C8qzh^aE3%+Zqc)GQnq<92cL(^4|dhh#We z%=#e@<1%MK_+bP*_ls&H(MlJE17*`!HUGpzoOLxm+S5<=LC3o~9cW^au*zJx#W#xM z@NX{I)kM(4NInXtZx7eku^JWyDdSzP42GDdlWT7znVQT8bOuyLnP2aYT*eyxe$ac+ zLg%t9xT~ZJeddn)JUq9D*U||rXlcz}OI=CvoUO|y_fUQguc$&nwF%NCdaeDi#kA<2ubT;@FU^~?v*es;Ii`5q|mp(tq1@a z%SmFfGObP=RMDOJ_2-(;+H0(?7+W$h7Lr8&HKAj?UM;S7aodF@n^==nXENPizzDId zxb=cCd;VtpD*1{rKYI#q^0rWOC7&^xO;T(=g_s~(OW_Re^>7920Wcj~54CkM_`X4` zkY&}Oghj7+9mXz;M$eWg#mAsF+L-=jdT|nqh?*Xe)do)rf4Yt+sUDo88Z0`Qa7zU^ zdnhS6-d5;rD7ox?E0Sg0Wx{LNFWiF&dFBv%MMTgdgq%NFG`=yZF?B{6%6?ADL=mpc znnXPiwWboqTBq{!E%`Xxxa2tAIF%05);*)dN=-`W$CNKOzPue^KPNsk+s&Ww+_K zE&p2ZjPy+HOyKPIw%K+Fog*EE(2CHyy`BU8#3sa&P*)L`CA%)mCBY@+$RTH6G~cDw zg}YBErbS@CZQWBnjZO1|-UscI=5= zPR^!x^1s}Mly{w4*$Q%JmS%=vi(z+RTZiOgn_+h&ZILcW1!OGpB9=q3TG2sqCsr&5 zJN7J={UKVUTBO~k&~n8w$?E;sTF!uvuHXdL1f*o71U>R}M0OZ|WP4;kb7Ht{UnN9e`}#{dOTMoc z@N^4*Lw!cgp8vpF#T%fQ(hwC{uEl07YJH!RW zMbn$mTd3D(ea>!lTH#@Do_JXb(O}T9$T7I8cP2 zyf#^zT0b9Kn3Y)k{pQMoQJB$)(MKXrQy{x1yWVHqMpI4`wV{DF2bCB`O%vCcJKu9g zc{E(Gq>)7r&2@XP+bkzUtI8jNC-RWw&Lq<;-MhLi31=kiDT!#6FZTT3$lu+!zhw z+*a6e8*VbLjJz9b1Yx8pjhEEm6E4v9N6sEjZQXYz;<2dsJSIP0B%`39t@CgTcyZ=y zoyY3%YL5%U(csa<%FfCn`z3n{`zf1-vT1@Rr^@rl{K+J>7jc^3lf|D3S#BfC{U&ls zQq@}c7ofj#wrFha(Vz2Npe4s@=fiTGp(&v$ZJhbDiMx*HT8_%wX_GQ>Om-|A_tSgL z(0dzR$Aezzaq)@4iM&#uk{ZDXoagf$*(zg|0guNeQ>STItLV1TD4j9APQJ7XGL+Hp z3ZFgtKr*78%aSqiY}$1{stJvMk02m39KAZcli|!4FmN4C;7h%08u3KUY^LtBz4lRI zd#_CNt8}fjmpnh)K3A8uZtjhT7FYE)_eMuE-BOA!#6#xOSBHKMwHSz2 z&(yEH;{14OooQyIFuReOlS&hSx@|h%NYrj0aubW?P5R@G5Ok9{d(sA8;A^8fI=s$}F%)pkZ--PeB3{^
LoELSZ4djIq)k$0oIWQaH2qoxlH1DtrtTBeP3O(NRLRZQ1@lGkq4Yrhy8xJ%!_D!bMP-K=4h7DY zIi5vqtNBmG9rcm%BccDpAr+7t5{ep z+~wYX#IQ#vFZ^Vc4c)k6K#@-_T##RL++yi3IT1LvXy@Op33za2Be%P~;qf2BP|;yM z1OPCy9rVBmu)3O@aeyS|1$go6bJoz{=drkyH|f{@0ulvFAn-|<0bJo z56HXkWDuc>#(G5-wa?hxJ)$3Of|#u&=*-MPDCq^row=qVFHlL=s${Dkt!@LM_H zuOj^dWnAKL;VmC`>N35W8CFIg@pEMJ!fgz{_n2cS;6b^&P0`DIaleNvzSpunzn4+isB>pu*IF&{t0A%+9#W`KLQJMg z{$XQRQtcM1-ofDUe9kX7n5*wiT+Ry=jLjZ}uP;}JE}REr2iFJ>JP8cA*&e#cJkA4d z>gjCjZG7LhTI!QGgc_#)l1*_WdOE$x-E%?xlP@49@b*axx3-(n>WnJL!cX@|P}bDr zvfi$BkE?4|h1p@4FPu?wK-k?bmN5Jh&W025HiHH+`c%<6@yZjIkL=U^22JKOorGRq z@JQ7`zbdK8oFg+o&({$sr=dmmXjm}Gxnkvtt&7y=ji-9At449i8rh3B*Y8+8Jxup_ zlRK{jB6%23G^p@_n7!fA;)zfhtH`YMnKYXtv0~h(m6I$S^&Qt zU7^lQ9s89vT`n2HEEX9geeH@d3urR=mi7e>U^WQ2QauI|vm47i zeI#~?Z8Qt=7VYy;)SuK!^_U_{|K5LRJKM@xH4Wr-&7a9s6)wGKL+S-7h5?$$}csH zs1d-@6)eKS@Ot9>-iM;q^Oia!I#;gdTr}TPTu{8u-!mf?rj`n3Rev%KayTEeHq?Ht zpfEtDNr-;is+_rZzqnbC-Xv8aou)cs%6h&bWw`jep2{`}Zwj5j-cTRkpC3&{K|{V; H)-w1%S{FaS literal 0 HcmV?d00001 diff --git a/tests/integration_tests/fixtures/tabbed_dashboard.py b/tests/integration_tests/fixtures/tabbed_dashboard.py new file mode 100644 index 0000000000000..15a871c7aea9e --- /dev/null +++ b/tests/integration_tests/fixtures/tabbed_dashboard.py @@ -0,0 +1,141 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json + +import pytest + +from superset import db +from superset.models.dashboard import Dashboard +from superset.utils.core import shortid +from tests.integration_tests.dashboards.superset_factory_util import create_dashboard + + +@pytest.fixture +def tabbed_dashboard(app_context): + position_json = { + "DASHBOARD_VERSION_KEY": "v2", + "GRID_ID": { + "children": ["TABS-L1A", "TABS-L1B"], + "id": "GRID_ID", + "parents": ["ROOT_ID"], + "type": "GRID", + }, + "HEADER_ID": { + "id": "HEADER_ID", + "meta": {"text": "tabbed dashboard"}, + "type": "HEADER", + }, + "ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"}, + "TAB-L1AA": { + "children": [], + "id": "TAB-L1AA", + "meta": { + "defaultText": "Tab title", + "placeholder": "Tab title", + "text": "Tab L1AA", + }, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L1A"], + "type": "TAB", + }, + "TAB-L1AB": { + "children": [], + "id": "TAB-L1AB", + "meta": { + "defaultText": "Tab title", + "placeholder": "Tab title", + "text": "Tab L1AB", + }, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L1A"], + "type": "TAB", + }, + "TABS-L1A": { + "children": ["TAB-L1AA", "TAB-L1AB"], + "id": "TABS-L1A", + "meta": {}, + "parents": ["ROOT_ID", "GRID_ID"], + "type": "TABS", + }, + "TAB-L1BA": { + "children": [], + "id": "TAB-L1BA", + "meta": { + "defaultText": "Tab title", + "placeholder": "Tab title", + "text": "Tab L1B", + }, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L1B"], + "type": "TAB", + }, + "TAB-L1BB": { + "children": ["TABS-L2A"], + "id": "TAB-L1BB", + "meta": { + "defaultText": "Tab title", + "placeholder": "Tab title", + "text": "Tab 2", + }, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L1B"], + "type": "TAB", + }, + "TABS-L1B": { + "children": ["TAB-L1BA", "TAB-L1BB"], + "id": "TABS-L1B", + "meta": {}, + "parents": ["ROOT_ID", "GRID_ID"], + "type": "TABS", + }, + "TAB-L2AA": { + "children": [], + "id": "TAB-L2AA", + "meta": { + "defaultText": "Tab title", + "placeholder": "Tab title", + "text": "Tab L2AA", + }, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L2A"], + "type": "TAB", + }, + "TAB-L2AB": { + "children": [], + "id": "TAB-L2AB", + "meta": { + "defaultText": "Tab title", + "placeholder": "Tab title", + "text": "Tab L2AB", + }, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L2A"], + "type": "TAB", + }, + "TABS-L2A": { + "children": ["TAB-L2AA", "TAB-L2AB"], + "id": "TABS-L2A", + "meta": {}, + "parents": ["ROOT_ID", "GRID_ID", "TABS-L1BB"], + "type": "TABS", + }, + } + dash = create_dashboard( + slug=f"tabbed-dash-{shortid()}", + dashboard_title="Test tabbed dash", + position_json=json.dumps(position_json), + slices=[], + ) + db.session.add(dash) + db.session.commit() + yield dash + db.session.query(Dashboard).filter_by(id=dash.id).delete() + db.session.commit() diff --git a/tests/integration_tests/fixtures/tags.py b/tests/integration_tests/fixtures/tags.py new file mode 100644 index 0000000000000..57fd4ec7196e2 --- /dev/null +++ b/tests/integration_tests/fixtures/tags.py @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import pytest + +from superset.tags.core import clear_sqla_event_listeners, register_sqla_event_listeners +from tests.integration_tests.test_app import app + + +@pytest.fixture +def with_tagging_system_feature(): + with app.app_context(): + is_enabled = app.config["DEFAULT_FEATURE_FLAGS"]["TAGGING_SYSTEM"] + if not is_enabled: + app.config["DEFAULT_FEATURE_FLAGS"]["TAGGING_SYSTEM"] = True + register_sqla_event_listeners() + yield + app.config["DEFAULT_FEATURE_FLAGS"]["TAGGING_SYSTEM"] = False + clear_sqla_event_listeners() diff --git a/tests/integration_tests/fixtures/trends.csv b/tests/integration_tests/fixtures/trends.csv new file mode 100644 index 0000000000000..1e347d915021d --- /dev/null +++ b/tests/integration_tests/fixtures/trends.csv @@ -0,0 +1,3 @@ +t1,t2,t3__sum +c11,c12,c13 +c21,c22,c23 diff --git a/tests/integration_tests/fixtures/unicode_dashboard.py b/tests/integration_tests/fixtures/unicode_dashboard.py new file mode 100644 index 0000000000000..78178bcde7551 --- /dev/null +++ b/tests/integration_tests/fixtures/unicode_dashboard.py @@ -0,0 +1,120 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pandas as pd +import pytest +from sqlalchemy import String + +from superset import db +from superset.connectors.sqla.models import SqlaTable +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from superset.utils.core import get_example_default_schema +from superset.utils.database import get_example_database +from tests.integration_tests.dashboard_utils import ( + create_dashboard, + create_slice, + create_table_metadata, +) +from tests.integration_tests.test_app import app + +UNICODE_TBL_NAME = "unicode_test" + + +@pytest.fixture(scope="session") +def load_unicode_data(): + with app.app_context(): + with get_example_database().get_sqla_engine_with_context() as engine: + _get_dataframe().to_sql( + UNICODE_TBL_NAME, + engine, + if_exists="replace", + chunksize=500, + dtype={"phrase": String(500)}, + index=False, + method="multi", + schema=get_example_default_schema(), + ) + + yield + with app.app_context(): + with get_example_database().get_sqla_engine_with_context() as engine: + engine.execute("DROP TABLE IF EXISTS unicode_test") + + +@pytest.fixture() +def load_unicode_dashboard_with_slice(load_unicode_data): + slice_name = "Unicode Cloud" + with app.app_context(): + dash = _create_unicode_dashboard(slice_name, None) + yield + _cleanup(dash, slice_name) + + +@pytest.fixture() +def load_unicode_dashboard_with_position(load_unicode_data): + slice_name = "Unicode Cloud" + position = "{}" + with app.app_context(): + dash = _create_unicode_dashboard(slice_name, position) + yield + _cleanup(dash, slice_name) + + +def _get_dataframe(): + data = _get_unicode_data() + return pd.DataFrame.from_dict(data) + + +def _get_unicode_data(): + return [ + {"phrase": "Под"}, + {"phrase": "řšž"}, + {"phrase": "視野無限廣"}, + {"phrase": "微風"}, + {"phrase": "中国智造"}, + {"phrase": "æøå"}, + {"phrase": "ëœéè"}, + {"phrase": "いろはにほ"}, + ] + + +def _create_unicode_dashboard(slice_title: str, position: str) -> Dashboard: + table = create_table_metadata(UNICODE_TBL_NAME, get_example_database()) + table.fetch_metadata() + + if slice_title: + slice = _create_and_commit_unicode_slice(table, slice_title) + + return create_dashboard("unicode-test", "Unicode Test", position, [slice]) + + +def _create_and_commit_unicode_slice(table: SqlaTable, title: str): + slice = create_slice(title, "word_cloud", table, {}) + o = db.session.query(Slice).filter_by(slice_name=slice.slice_name).one_or_none() + if o: + db.session.delete(o) + db.session.add(slice) + db.session.commit() + return slice + + +def _cleanup(dash: Dashboard, slice_name: str) -> None: + db.session.delete(dash) + if slice_name: + slice = db.session.query(Slice).filter_by(slice_name=slice_name).one_or_none() + db.session.delete(slice) + db.session.commit() diff --git a/tests/integration_tests/fixtures/world_bank_dashboard.py b/tests/integration_tests/fixtures/world_bank_dashboard.py new file mode 100644 index 0000000000000..561bbe10b2709 --- /dev/null +++ b/tests/integration_tests/fixtures/world_bank_dashboard.py @@ -0,0 +1,505 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +import string +from random import choice, randint, random, uniform +from typing import Any, Dict, List + +import pandas as pd +import pytest +from pandas import DataFrame +from sqlalchemy import DateTime, String + +from superset import db +from superset.connectors.sqla.models import SqlaTable +from superset.models.core import Database +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from superset.utils.core import get_example_default_schema +from superset.utils.database import get_example_database +from tests.integration_tests.dashboard_utils import ( + create_dashboard, + create_table_metadata, +) +from tests.integration_tests.test_app import app + +WB_HEALTH_POPULATION = "wb_health_population" + + +@pytest.fixture(scope="session") +def load_world_bank_data(): + with app.app_context(): + database = get_example_database() + dtype = { + "year": DateTime if database.backend != "presto" else String(255), + "country_code": String(3), + "country_name": String(255), + "region": String(255), + } + with database.get_sqla_engine_with_context() as engine: + _get_dataframe(database).to_sql( + WB_HEALTH_POPULATION, + engine, + if_exists="replace", + chunksize=500, + dtype=dtype, + index=False, + method="multi", + schema=get_example_default_schema(), + ) + + yield + with app.app_context(): + with get_example_database().get_sqla_engine_with_context() as engine: + engine.execute("DROP TABLE IF EXISTS wb_health_population") + + +@pytest.fixture() +def load_world_bank_dashboard_with_slices(load_world_bank_data): + with app.app_context(): + dash_id_to_delete, slices_ids_to_delete = create_dashboard_for_loaded_data() + yield + _cleanup(dash_id_to_delete, slices_ids_to_delete) + + +@pytest.fixture(scope="module") +def load_world_bank_dashboard_with_slices_module_scope(load_world_bank_data): + with app.app_context(): + dash_id_to_delete, slices_ids_to_delete = create_dashboard_for_loaded_data() + yield + _cleanup(dash_id_to_delete, slices_ids_to_delete) + + +def create_dashboard_for_loaded_data(): + with app.app_context(): + table = create_table_metadata(WB_HEALTH_POPULATION, get_example_database()) + slices = _create_world_bank_slices(table) + dash = _create_world_bank_dashboard(table) + slices_ids_to_delete = [slice.id for slice in slices] + dash_id_to_delete = dash.id + return dash_id_to_delete, slices_ids_to_delete + + +def _create_world_bank_slices(table: SqlaTable) -> List[Slice]: + from superset.examples.world_bank import create_slices + + slices = create_slices(table) + _commit_slices(slices) + return slices + + +def _commit_slices(slices: List[Slice]): + for slice in slices: + o = db.session.query(Slice).filter_by(slice_name=slice.slice_name).one_or_none() + if o: + db.session.delete(o) + db.session.add(slice) + db.session.commit() + + +def _create_world_bank_dashboard(table: SqlaTable) -> Dashboard: + from superset.examples.helpers import update_slice_ids + from superset.examples.world_bank import dashboard_positions + + pos = dashboard_positions + slices = update_slice_ids(pos) + + table.fetch_metadata() + + dash = create_dashboard( + "world_health", "World Bank's Data", json.dumps(pos), slices + ) + dash.json_metadata = '{"mock_key": "mock_value"}' + db.session.commit() + return dash + + +def _cleanup(dash_id: int, slices_ids: List[int]) -> None: + dash = db.session.query(Dashboard).filter_by(id=dash_id).first() + db.session.delete(dash) + for slice_id in slices_ids: + db.session.query(Slice).filter_by(id=slice_id).delete() + db.session.commit() + + +def _get_dataframe(database: Database) -> DataFrame: + data = _get_world_bank_data() + df = pd.DataFrame.from_dict(data) + if database.backend == "presto": + df.year = pd.to_datetime(df.year) + df.year = df.year.dt.strftime("%Y-%m-%d %H:%M%:%S") + else: + df.year = pd.to_datetime(df.year) + + return df + + +def _get_world_bank_data() -> List[Dict[Any, Any]]: + data = [] + for _ in range(100): + data.append( + { + "country_name": "".join( + choice(string.ascii_uppercase + string.ascii_lowercase + " ") + for _ in range(randint(3, 10)) + ), + "country_code": "".join( + choice(string.ascii_uppercase + string.ascii_lowercase) + for _ in range(3) + ), + "region": "".join( + choice(string.ascii_uppercase + string.ascii_lowercase) + for _ in range(randint(3, 10)) + ), + "year": "-".join( + [str(randint(1900, 2020)), str(randint(1, 12)), str(randint(1, 28))] + ), + "NY_GNP_PCAP_CD": get_random_float_or_none(0, 100, 0.3), + "SE_ADT_1524_LT_FM_ZS": get_random_float_or_none(0, 100, 0.3), + "SE_ADT_1524_LT_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SE_ADT_1524_LT_ZS": get_random_float_or_none(0, 100, 0.3), + "SE_ADT_LITR_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SE_ADT_LITR_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SE_ADT_LITR_ZS": get_random_float_or_none(0, 100, 0.3), + "SE_ENR_ORPH": get_random_float_or_none(0, 100, 0.3), + "SE_PRM_CMPT_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SE_PRM_CMPT_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SE_PRM_CMPT_ZS": get_random_float_or_none(0, 100, 0.3), + "SE_PRM_ENRR": get_random_float_or_none(0, 100, 0.3), + "SE_PRM_ENRR_FE": get_random_float_or_none(0, 100, 0.3), + "SE_PRM_ENRR_MA": get_random_float_or_none(0, 100, 0.3), + "SE_PRM_NENR": get_random_float_or_none(0, 100, 0.3), + "SE_PRM_NENR_FE": get_random_float_or_none(0, 100, 0.3), + "SE_PRM_NENR_MA": get_random_float_or_none(0, 100, 0.3), + "SE_SEC_ENRR": get_random_float_or_none(0, 100, 0.3), + "SE_SEC_ENRR_FE": get_random_float_or_none(0, 100, 0.3), + "SE_SEC_ENRR_MA": get_random_float_or_none(0, 100, 0.3), + "SE_SEC_NENR": get_random_float_or_none(0, 100, 0.3), + "SE_SEC_NENR_FE": get_random_float_or_none(0, 100, 0.3), + "SE_SEC_NENR_MA": get_random_float_or_none(0, 100, 0.3), + "SE_TER_ENRR": get_random_float_or_none(0, 100, 0.3), + "SE_TER_ENRR_FE": get_random_float_or_none(0, 100, 0.3), + "SE_XPD_TOTL_GD_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_ANM_CHLD_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_ANM_NPRG_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_CON_1524_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_CON_1524_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_CON_AIDS_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_CON_AIDS_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_DTH_COMM_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_DTH_IMRT": get_random_float_or_none(0, 100, 0.3), + "SH_DTH_INJR_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_DTH_MORT": get_random_float_or_none(0, 100, 0.3), + "SH_DTH_NCOM_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_DTH_NMRT": get_random_float_or_none(0, 100, 0.3), + "SH_DYN_AIDS": get_random_float_or_none(0, 100, 0.3), + "SH_DYN_AIDS_DH": get_random_float_or_none(0, 100, 0.3), + "SH_DYN_AIDS_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_DYN_AIDS_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_DYN_MORT": get_random_float_or_none(0, 100, 0.3), + "SH_DYN_MORT_FE": get_random_float_or_none(0, 100, 0.3), + "SH_DYN_MORT_MA": get_random_float_or_none(0, 100, 0.3), + "SH_DYN_NMRT": get_random_float_or_none(0, 100, 0.3), + "SH_FPL_SATI_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_H2O_SAFE_RU_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_H2O_SAFE_UR_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_H2O_SAFE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_HIV_0014": get_random_float_or_none(0, 100, 0.3), + "SH_HIV_1524_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_HIV_1524_KW_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_HIV_1524_KW_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_HIV_1524_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_HIV_ARTC_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_HIV_KNOW_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_HIV_KNOW_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_HIV_ORPH": get_random_float_or_none(0, 100, 0.3), + "SH_HIV_TOTL": get_random_float_or_none(0, 100, 0.3), + "SH_IMM_HEPB": get_random_float_or_none(0, 100, 0.3), + "SH_IMM_HIB3": get_random_float_or_none(0, 100, 0.3), + "SH_IMM_IBCG": get_random_float_or_none(0, 100, 0.3), + "SH_IMM_IDPT": get_random_float_or_none(0, 100, 0.3), + "SH_IMM_MEAS": get_random_float_or_none(0, 100, 0.3), + "SH_IMM_POL3": get_random_float_or_none(0, 100, 0.3), + "SH_MED_BEDS_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_MED_CMHW_P3": get_random_float_or_none(0, 100, 0.3), + "SH_MED_NUMW_P3": get_random_float_or_none(0, 100, 0.3), + "SH_MED_PHYS_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_MLR_NETS_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_MLR_PREG_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_MLR_SPF2_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_MLR_TRET_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_MMR_DTHS": get_random_float_or_none(0, 100, 0.3), + "SH_MMR_LEVE": get_random_float_or_none(0, 100, 0.3), + "SH_MMR_RISK": get_random_float_or_none(0, 100, 0.3), + "SH_MMR_RISK_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_MMR_WAGE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_PRG_ANEM": get_random_float_or_none(0, 100, 0.3), + "SH_PRG_ARTC_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_PRG_SYPH_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_PRV_SMOK_FE": get_random_float_or_none(0, 100, 0.3), + "SH_PRV_SMOK_MA": get_random_float_or_none(0, 100, 0.3), + "SH_STA_ACSN": get_random_float_or_none(0, 100, 0.3), + "SH_STA_ACSN_RU": get_random_float_or_none(0, 100, 0.3), + "SH_STA_ACSN_UR": get_random_float_or_none(0, 100, 0.3), + "SH_STA_ANV4_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_ANVC_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_ARIC_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_BFED_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_BRTC_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_BRTW_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_DIAB_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_IYCF_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_MALN_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_MALN_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_MALN_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_MALR": get_random_float_or_none(0, 100, 0.3), + "SH_STA_MMRT": get_random_float_or_none(0, 100, 0.3), + "SH_STA_MMRT_NE": get_random_float_or_none(0, 100, 0.3), + "SH_STA_ORCF_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_ORTH": get_random_float_or_none(0, 100, 0.3), + "SH_STA_OW15_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_OW15_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_OW15_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_OWGH_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_OWGH_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_OWGH_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_PNVC_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_STNT_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_STNT_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_STNT_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_WAST_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_WAST_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_STA_WAST_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_SVR_WAST_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_SVR_WAST_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_SVR_WAST_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_TBS_CURE_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_TBS_DTEC_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_TBS_INCD": get_random_float_or_none(0, 100, 0.3), + "SH_TBS_MORT": get_random_float_or_none(0, 100, 0.3), + "SH_TBS_PREV": get_random_float_or_none(0, 100, 0.3), + "SH_VAC_TTNS_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_EXTR_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_OOPC_TO_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_OOPC_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_PCAP": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_PCAP_PP_KD": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_PRIV": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_PRIV_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_PUBL": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_PUBL_GX_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_PUBL_ZS": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_TOTL_CD": get_random_float_or_none(0, 100, 0.3), + "SH_XPD_TOTL_ZS": get_random_float_or_none(0, 100, 0.3), + "SI_POV_NAHC": get_random_float_or_none(0, 100, 0.3), + "SI_POV_RUHC": get_random_float_or_none(0, 100, 0.3), + "SI_POV_URHC": get_random_float_or_none(0, 100, 0.3), + "SL_EMP_INSV_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SL_TLF_TOTL_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SL_TLF_TOTL_IN": get_random_float_or_none(0, 100, 0.3), + "SL_UEM_TOTL_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SL_UEM_TOTL_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SL_UEM_TOTL_ZS": get_random_float_or_none(0, 100, 0.3), + "SM_POP_NETM": get_random_float_or_none(0, 100, 0.3), + "SN_ITK_DEFC": get_random_float_or_none(0, 100, 0.3), + "SN_ITK_DEFC_ZS": get_random_float_or_none(0, 100, 0.3), + "SN_ITK_SALT_ZS": get_random_float_or_none(0, 100, 0.3), + "SN_ITK_VITA_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_ADO_TFRT": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_AMRT_FE": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_AMRT_MA": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_CBRT_IN": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_CDRT_IN": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_CONU_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_IMRT_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_IMRT_IN": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_IMRT_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_LE00_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_LE00_IN": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_LE00_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_SMAM_FE": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_SMAM_MA": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_TFRT_IN": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_TO65_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_TO65_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_DYN_WFRT": get_random_float_or_none(0, 100, 0.3), + "SP_HOU_FEMA_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_MTR_1519_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0004_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0004_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0004_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0004_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0014_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0014_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0014_TO": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0014_TO_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0509_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0509_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0509_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_0509_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1014_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1014_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1014_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1014_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1519_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1519_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1519_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1519_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1564_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1564_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1564_TO": get_random_float_or_none(0, 100, 0.3), + "SP_POP_1564_TO_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_POP_2024_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_2024_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_2024_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_2024_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_2529_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_2529_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_2529_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_2529_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_3034_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_3034_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_3034_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_3034_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_3539_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_3539_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_3539_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_3539_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_4044_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_4044_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_4044_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_4044_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_4549_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_4549_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_4549_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_4549_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_5054_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_5054_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_5054_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_5054_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_5559_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_5559_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_5559_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_5559_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_6064_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_6064_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_6064_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_6064_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_6569_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_6569_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_6569_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_6569_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_65UP_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_POP_65UP_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_POP_65UP_TO": get_random_float_or_none(0, 100, 0.3), + "SP_POP_65UP_TO_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_POP_7074_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_7074_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_7074_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_7074_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_7579_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_7579_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_7579_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_7579_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_80UP_FE": get_random_float_or_none(0, 100, 0.3), + "SP_POP_80UP_FE_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_80UP_MA": get_random_float_or_none(0, 100, 0.3), + "SP_POP_80UP_MA_5Y": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG00_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG00_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG01_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG01_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG02_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG02_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG03_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG03_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG04_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG04_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG05_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG05_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG06_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG06_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG07_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG07_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG08_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG08_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG09_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG09_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG10_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG10_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG11_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG11_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG12_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG12_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG13_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG13_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG14_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG14_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG15_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG15_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG16_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG16_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG17_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG17_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG18_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG18_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG19_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG19_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG20_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG20_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG21_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG21_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG22_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG22_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG23_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG23_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG24_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG24_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG25_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_AG25_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_BRTH_MF": get_random_float_or_none(0, 100, 0.3), + "SP_POP_DPND": get_random_float_or_none(0, 100, 0.3), + "SP_POP_DPND_OL": get_random_float_or_none(0, 100, 0.3), + "SP_POP_DPND_YG": get_random_float_or_none(0, 100, 0.3), + "SP_POP_GROW": get_random_float_or_none(0, 100, 0.3), + "SP_POP_TOTL": get_random_float_or_none(0, 100, 0.3), + "SP_POP_TOTL_FE_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_TOTL_FE_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_POP_TOTL_MA_IN": get_random_float_or_none(0, 100, 0.3), + "SP_POP_TOTL_MA_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_REG_BRTH_RU_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_REG_BRTH_UR_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_REG_BRTH_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_REG_DTHS_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_RUR_TOTL": get_random_float_or_none(0, 100, 0.3), + "SP_RUR_TOTL_ZG": get_random_float_or_none(0, 100, 0.3), + "SP_RUR_TOTL_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_URB_GROW": get_random_float_or_none(0, 100, 0.3), + "SP_URB_TOTL": get_random_float_or_none(0, 100, 0.3), + "SP_URB_TOTL_IN_ZS": get_random_float_or_none(0, 100, 0.3), + "SP_UWT_TFRT": get_random_float_or_none(0, 100, 0.3), + } + ) + + return data + + +def get_random_float_or_none(min_value, max_value, none_probability): + if random() < none_probability: + return None + else: + return uniform(min_value, max_value) diff --git a/tests/integration_tests/form_tests.py b/tests/integration_tests/form_tests.py new file mode 100644 index 0000000000000..078a9866ee975 --- /dev/null +++ b/tests/integration_tests/form_tests.py @@ -0,0 +1,36 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from wtforms.form import Form + +from superset.forms import CommaSeparatedListField, filter_not_empty_values +from tests.integration_tests.base_tests import SupersetTestCase + + +class TestForm(SupersetTestCase): + def test_comma_separated_list_field(self): + field = CommaSeparatedListField().bind(Form(), "foo") + field.process_formdata([""]) + self.assertEqual(field.data, [""]) + + field.process_formdata(["a,comma,separated,list"]) + self.assertEqual(field.data, ["a", "comma", "separated", "list"]) + + def test_filter_not_empty_values(self): + self.assertEqual(filter_not_empty_values(None), None) + self.assertEqual(filter_not_empty_values([]), None) + self.assertEqual(filter_not_empty_values([""]), None) + self.assertEqual(filter_not_empty_values(["hi"]), ["hi"]) diff --git a/tests/integration_tests/import_export_tests.py b/tests/integration_tests/import_export_tests.py new file mode 100644 index 0000000000000..5bbc985a36672 --- /dev/null +++ b/tests/integration_tests/import_export_tests.py @@ -0,0 +1,676 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import json +import unittest +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + +import pytest +from flask import g +from sqlalchemy.orm.session import make_transient + +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_with_slice, + load_energy_table_data, +) +from tests.integration_tests.test_app import app +from superset.dashboards.commands.importers.v0 import decode_dashboards +from superset import db, security_manager + +from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn +from superset.dashboards.commands.importers.v0 import import_chart, import_dashboard +from superset.datasets.commands.importers.v0 import import_dataset +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from superset.utils.core import DatasourceType, get_example_default_schema +from superset.utils.database import get_example_database + +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) +from .base_tests import SupersetTestCase + + +def delete_imports(): + with app.app_context(): + # Imported data clean up + session = db.session + for slc in session.query(Slice): + if "remote_id" in slc.params_dict: + session.delete(slc) + for dash in session.query(Dashboard): + if "remote_id" in dash.params_dict: + session.delete(dash) + for table in session.query(SqlaTable): + if "remote_id" in table.params_dict: + session.delete(table) + session.commit() + + +@pytest.fixture(autouse=True, scope="module") +def clean_imports(): + yield + delete_imports() + + +class TestImportExport(SupersetTestCase): + """Testing export import functionality for dashboards""" + + def create_slice( + self, + name, + ds_id=None, + id=None, + db_name="examples", + table_name="wb_health_population", + schema=None, + ): + params = { + "num_period_compare": "10", + "remote_id": id, + "datasource_name": table_name, + "database_name": db_name, + "schema": schema, + # Test for trailing commas + "metrics": ["sum__signup_attempt_email", "sum__signup_attempt_facebook"], + } + + if table_name and not ds_id: + table = self.get_table(schema=schema, name=table_name) + if table: + ds_id = table.id + + return Slice( + slice_name=name, + datasource_type=DatasourceType.TABLE, + viz_type="bubble", + params=json.dumps(params), + datasource_id=ds_id, + id=id, + ) + + def create_dashboard(self, title, id=0, slcs=[]): + json_metadata = {"remote_id": id} + return Dashboard( + id=id, + dashboard_title=title, + slices=slcs, + position_json='{"size_y": 2, "size_x": 2}', + slug="{}_imported".format(title.lower()), + json_metadata=json.dumps(json_metadata), + ) + + def create_table(self, name, schema=None, id=0, cols_names=[], metric_names=[]): + params = {"remote_id": id, "database_name": "examples"} + table = SqlaTable( + id=id, schema=schema, table_name=name, params=json.dumps(params) + ) + for col_name in cols_names: + table.columns.append(TableColumn(column_name=col_name)) + for metric_name in metric_names: + table.metrics.append(SqlMetric(metric_name=metric_name, expression="")) + return table + + def get_slice(self, slc_id): + return db.session.query(Slice).filter_by(id=slc_id).first() + + def get_slice_by_name(self, name): + return db.session.query(Slice).filter_by(slice_name=name).first() + + def get_dash(self, dash_id): + return db.session.query(Dashboard).filter_by(id=dash_id).first() + + def assert_dash_equals( + self, expected_dash, actual_dash, check_position=True, check_slugs=True + ): + if check_slugs: + self.assertEqual(expected_dash.slug, actual_dash.slug) + self.assertEqual(expected_dash.dashboard_title, actual_dash.dashboard_title) + self.assertEqual(len(expected_dash.slices), len(actual_dash.slices)) + expected_slices = sorted(expected_dash.slices, key=lambda s: s.slice_name or "") + actual_slices = sorted(actual_dash.slices, key=lambda s: s.slice_name or "") + for e_slc, a_slc in zip(expected_slices, actual_slices): + self.assert_slice_equals(e_slc, a_slc) + if check_position: + self.assertEqual(expected_dash.position_json, actual_dash.position_json) + + def assert_table_equals(self, expected_ds, actual_ds): + self.assertEqual(expected_ds.table_name, actual_ds.table_name) + self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col) + self.assertEqual(expected_ds.schema, actual_ds.schema) + self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics)) + self.assertEqual(len(expected_ds.columns), len(actual_ds.columns)) + self.assertEqual( + set([c.column_name for c in expected_ds.columns]), + set([c.column_name for c in actual_ds.columns]), + ) + self.assertEqual( + set([m.metric_name for m in expected_ds.metrics]), + set([m.metric_name for m in actual_ds.metrics]), + ) + + def assert_datasource_equals(self, expected_ds, actual_ds): + self.assertEqual(expected_ds.datasource_name, actual_ds.datasource_name) + self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col) + self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics)) + self.assertEqual(len(expected_ds.columns), len(actual_ds.columns)) + self.assertEqual( + set([c.column_name for c in expected_ds.columns]), + set([c.column_name for c in actual_ds.columns]), + ) + self.assertEqual( + set([m.metric_name for m in expected_ds.metrics]), + set([m.metric_name for m in actual_ds.metrics]), + ) + + def assert_slice_equals(self, expected_slc, actual_slc): + # to avoid bad slice data (no slice_name) + expected_slc_name = expected_slc.slice_name or "" + actual_slc_name = actual_slc.slice_name or "" + self.assertEqual(expected_slc_name, actual_slc_name) + self.assertEqual(expected_slc.datasource_type, actual_slc.datasource_type) + self.assertEqual(expected_slc.viz_type, actual_slc.viz_type) + exp_params = json.loads(expected_slc.params) + actual_params = json.loads(actual_slc.params) + diff_params_keys = ( + "schema", + "database_name", + "datasource_name", + "remote_id", + "import_time", + ) + for k in diff_params_keys: + if k in actual_params: + actual_params.pop(k) + if k in exp_params: + exp_params.pop(k) + self.assertEqual(exp_params, actual_params) + + def assert_only_exported_slc_fields(self, expected_dash, actual_dash): + """only exported json has this params + imported/created dashboard has relationships to other models instead + """ + expected_slices = sorted(expected_dash.slices, key=lambda s: s.slice_name or "") + actual_slices = sorted(actual_dash.slices, key=lambda s: s.slice_name or "") + for e_slc, a_slc in zip(expected_slices, actual_slices): + params = a_slc.params_dict + self.assertEqual(e_slc.datasource.name, params["datasource_name"]) + self.assertEqual(e_slc.datasource.schema, params["schema"]) + self.assertEqual(e_slc.datasource.database.name, params["database_name"]) + + @unittest.skip("Schema needs to be updated") + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_export_1_dashboard(self): + self.login("admin") + birth_dash = self.get_dash_by_slug("births") + id_ = birth_dash.id + export_dash_url = f"/dashboard/export_dashboards_form?id={id_}&action=go" + resp = self.client.get(export_dash_url) + exported_dashboards = json.loads( + resp.data.decode("utf-8"), object_hook=decode_dashboards + )["dashboards"] + + birth_dash = self.get_dash_by_slug("births") + self.assert_only_exported_slc_fields(birth_dash, exported_dashboards[0]) + self.assert_dash_equals(birth_dash, exported_dashboards[0]) + self.assertEqual( + id_, + json.loads( + exported_dashboards[0].json_metadata, object_hook=decode_dashboards + )["remote_id"], + ) + + exported_tables = json.loads( + resp.data.decode("utf-8"), object_hook=decode_dashboards + )["datasources"] + self.assertEqual(1, len(exported_tables)) + self.assert_table_equals(self.get_table(name="birth_names"), exported_tables[0]) + + @unittest.skip("Schema needs to be updated") + @pytest.mark.usefixtures( + "load_world_bank_dashboard_with_slices", + "load_birth_names_dashboard_with_slices", + ) + def test_export_2_dashboards(self): + self.login("admin") + birth_dash = self.get_dash_by_slug("births") + world_health_dash = self.get_dash_by_slug("world_health") + export_dash_url = ( + "/dashboard/export_dashboards_form?id={}&id={}&action=go".format( + birth_dash.id, world_health_dash.id + ) + ) + resp = self.client.get(export_dash_url) + resp_data = json.loads(resp.data.decode("utf-8"), object_hook=decode_dashboards) + exported_dashboards = sorted( + resp_data.get("dashboards"), key=lambda d: d.dashboard_title + ) + self.assertEqual(2, len(exported_dashboards)) + + birth_dash = self.get_dash_by_slug("births") + self.assert_only_exported_slc_fields(birth_dash, exported_dashboards[0]) + self.assert_dash_equals(birth_dash, exported_dashboards[0]) + self.assertEqual( + birth_dash.id, json.loads(exported_dashboards[0].json_metadata)["remote_id"] + ) + + world_health_dash = self.get_dash_by_slug("world_health") + self.assert_only_exported_slc_fields(world_health_dash, exported_dashboards[1]) + self.assert_dash_equals(world_health_dash, exported_dashboards[1]) + self.assertEqual( + world_health_dash.id, + json.loads(exported_dashboards[1].json_metadata)["remote_id"], + ) + + exported_tables = sorted( + resp_data.get("datasources"), key=lambda t: t.table_name + ) + self.assertEqual(2, len(exported_tables)) + self.assert_table_equals(self.get_table(name="birth_names"), exported_tables[0]) + self.assert_table_equals( + self.get_table(name="wb_health_population"), exported_tables[1] + ) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_import_1_slice(self): + expected_slice = self.create_slice( + "Import Me", id=10001, schema=get_example_default_schema() + ) + slc_id = import_chart(expected_slice, None, import_time=1989) + slc = self.get_slice(slc_id) + self.assertEqual(slc.datasource.perm, slc.perm) + self.assert_slice_equals(expected_slice, slc) + + table_id = self.get_table(name="wb_health_population").id + self.assertEqual(table_id, self.get_slice(slc_id).datasource_id) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_import_2_slices_for_same_table(self): + schema = get_example_default_schema() + table_id = self.get_table(name="wb_health_population").id + slc_1 = self.create_slice( + "Import Me 1", ds_id=table_id, id=10002, schema=schema + ) + slc_id_1 = import_chart(slc_1, None) + slc_2 = self.create_slice( + "Import Me 2", ds_id=table_id, id=10003, schema=schema + ) + slc_id_2 = import_chart(slc_2, None) + + imported_slc_1 = self.get_slice(slc_id_1) + imported_slc_2 = self.get_slice(slc_id_2) + self.assertEqual(table_id, imported_slc_1.datasource_id) + self.assert_slice_equals(slc_1, imported_slc_1) + self.assertEqual(imported_slc_1.datasource.perm, imported_slc_1.perm) + + self.assertEqual(table_id, imported_slc_2.datasource_id) + self.assert_slice_equals(slc_2, imported_slc_2) + self.assertEqual(imported_slc_2.datasource.perm, imported_slc_2.perm) + + def test_import_slices_override(self): + schema = get_example_default_schema() + slc = self.create_slice("Import Me New", id=10005, schema=schema) + slc_1_id = import_chart(slc, None, import_time=1990) + slc.slice_name = "Import Me New" + imported_slc_1 = self.get_slice(slc_1_id) + slc_2 = self.create_slice("Import Me New", id=10005, schema=schema) + slc_2_id = import_chart(slc_2, imported_slc_1, import_time=1990) + self.assertEqual(slc_1_id, slc_2_id) + imported_slc_2 = self.get_slice(slc_2_id) + self.assert_slice_equals(slc, imported_slc_2) + + def test_import_empty_dashboard(self): + empty_dash = self.create_dashboard("empty_dashboard", id=10001) + imported_dash_id = import_dashboard(empty_dash, import_time=1989) + imported_dash = self.get_dash(imported_dash_id) + self.assert_dash_equals(empty_dash, imported_dash, check_position=False) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_import_dashboard_1_slice(self): + slc = self.create_slice( + "health_slc", id=10006, schema=get_example_default_schema() + ) + dash_with_1_slice = self.create_dashboard( + "dash_with_1_slice", slcs=[slc], id=10002 + ) + dash_with_1_slice.position_json = """ + {{"DASHBOARD_VERSION_KEY": "v2", + "DASHBOARD_CHART_TYPE-{0}": {{ + "type": "CHART", + "id": {0}, + "children": [], + "meta": {{ + "width": 4, + "height": 50, + "chartId": {0} + }} + }} + }} + """.format( + slc.id + ) + imported_dash_id = import_dashboard(dash_with_1_slice, import_time=1990) + imported_dash = self.get_dash(imported_dash_id) + + expected_dash = self.create_dashboard("dash_with_1_slice", slcs=[slc], id=10002) + make_transient(expected_dash) + self.assert_dash_equals( + expected_dash, imported_dash, check_position=False, check_slugs=False + ) + self.assertEqual( + {"remote_id": 10002, "import_time": 1990}, + json.loads(imported_dash.json_metadata), + ) + + expected_position = dash_with_1_slice.position + # new slice id (auto-incremental) assigned on insert + # id from json is used only for updating position with new id + meta = expected_position["DASHBOARD_CHART_TYPE-10006"]["meta"] + meta["chartId"] = imported_dash.slices[0].id + self.assertEqual(expected_position, imported_dash.position) + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_import_dashboard_2_slices(self): + schema = get_example_default_schema() + e_slc = self.create_slice( + "e_slc", id=10007, table_name="energy_usage", schema=schema + ) + b_slc = self.create_slice( + "b_slc", id=10008, table_name="birth_names", schema=schema + ) + dash_with_2_slices = self.create_dashboard( + "dash_with_2_slices", slcs=[e_slc, b_slc], id=10003 + ) + dash_with_2_slices.json_metadata = json.dumps( + { + "remote_id": 10003, + "expanded_slices": { + "{}".format(e_slc.id): True, + "{}".format(b_slc.id): False, + }, + # mocked filter_scope metadata + "filter_scopes": { + str(e_slc.id): { + "region": {"scope": ["ROOT_ID"], "immune": [b_slc.id]} + } + }, + } + ) + + imported_dash_id = import_dashboard(dash_with_2_slices, import_time=1991) + imported_dash = self.get_dash(imported_dash_id) + + expected_dash = self.create_dashboard( + "dash_with_2_slices", slcs=[e_slc, b_slc], id=10003 + ) + make_transient(expected_dash) + self.assert_dash_equals( + imported_dash, expected_dash, check_position=False, check_slugs=False + ) + i_e_slc = self.get_slice_by_name("e_slc") + i_b_slc = self.get_slice_by_name("b_slc") + expected_json_metadata = { + "remote_id": 10003, + "import_time": 1991, + "filter_scopes": { + str(i_e_slc.id): { + "region": {"scope": ["ROOT_ID"], "immune": [i_b_slc.id]} + } + }, + "expanded_slices": { + "{}".format(i_e_slc.id): True, + "{}".format(i_b_slc.id): False, + }, + } + self.assertEqual( + expected_json_metadata, json.loads(imported_dash.json_metadata) + ) + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_import_override_dashboard_2_slices(self): + schema = get_example_default_schema() + e_slc = self.create_slice( + "e_slc", id=10009, table_name="energy_usage", schema=schema + ) + b_slc = self.create_slice( + "b_slc", id=10010, table_name="birth_names", schema=schema + ) + dash_to_import = self.create_dashboard( + "override_dashboard", slcs=[e_slc, b_slc], id=10004 + ) + imported_dash_id_1 = import_dashboard(dash_to_import, import_time=1992) + + # create new instances of the slices + e_slc = self.create_slice( + "e_slc", id=10009, table_name="energy_usage", schema=schema + ) + b_slc = self.create_slice( + "b_slc", id=10010, table_name="birth_names", schema=schema + ) + c_slc = self.create_slice( + "c_slc", id=10011, table_name="birth_names", schema=schema + ) + dash_to_import_override = self.create_dashboard( + "override_dashboard_new", slcs=[e_slc, b_slc, c_slc], id=10004 + ) + imported_dash_id_2 = import_dashboard(dash_to_import_override, import_time=1992) + + # override doesn't change the id + self.assertEqual(imported_dash_id_1, imported_dash_id_2) + expected_dash = self.create_dashboard( + "override_dashboard_new", slcs=[e_slc, b_slc, c_slc], id=10004 + ) + make_transient(expected_dash) + imported_dash = self.get_dash(imported_dash_id_2) + self.assert_dash_equals( + expected_dash, imported_dash, check_position=False, check_slugs=False + ) + self.assertEqual( + {"remote_id": 10004, "import_time": 1992}, + json.loads(imported_dash.json_metadata), + ) + + def test_import_new_dashboard_slice_reset_ownership(self): + admin_user = security_manager.find_user(username="admin") + self.assertTrue(admin_user) + gamma_user = security_manager.find_user(username="gamma") + self.assertTrue(gamma_user) + g.user = gamma_user + + dash_with_1_slice = self._create_dashboard_for_import(id_=10200) + # set another user as an owner of importing dashboard + dash_with_1_slice.created_by = admin_user + dash_with_1_slice.changed_by = admin_user + dash_with_1_slice.owners = [admin_user] + + imported_dash_id = import_dashboard(dash_with_1_slice) + imported_dash = self.get_dash(imported_dash_id) + self.assertEqual(imported_dash.created_by, gamma_user) + self.assertEqual(imported_dash.changed_by, gamma_user) + self.assertEqual(imported_dash.owners, [gamma_user]) + + imported_slc = imported_dash.slices[0] + self.assertEqual(imported_slc.created_by, gamma_user) + self.assertEqual(imported_slc.changed_by, gamma_user) + self.assertEqual(imported_slc.owners, [gamma_user]) + + def test_import_override_dashboard_slice_reset_ownership(self): + admin_user = security_manager.find_user(username="admin") + self.assertTrue(admin_user) + gamma_user = security_manager.find_user(username="gamma") + self.assertTrue(gamma_user) + g.user = gamma_user + + dash_with_1_slice = self._create_dashboard_for_import(id_=10300) + + imported_dash_id = import_dashboard(dash_with_1_slice) + imported_dash = self.get_dash(imported_dash_id) + self.assertEqual(imported_dash.created_by, gamma_user) + self.assertEqual(imported_dash.changed_by, gamma_user) + self.assertEqual(imported_dash.owners, [gamma_user]) + + imported_slc = imported_dash.slices[0] + self.assertEqual(imported_slc.created_by, gamma_user) + self.assertEqual(imported_slc.changed_by, gamma_user) + self.assertEqual(imported_slc.owners, [gamma_user]) + + # re-import with another user shouldn't change the permissions + g.user = admin_user + + dash_with_1_slice = self._create_dashboard_for_import(id_=10300) + + imported_dash_id = import_dashboard(dash_with_1_slice) + imported_dash = self.get_dash(imported_dash_id) + self.assertEqual(imported_dash.created_by, gamma_user) + self.assertEqual(imported_dash.changed_by, gamma_user) + self.assertEqual(imported_dash.owners, [gamma_user]) + + imported_slc = imported_dash.slices[0] + self.assertEqual(imported_slc.created_by, gamma_user) + self.assertEqual(imported_slc.changed_by, gamma_user) + self.assertEqual(imported_slc.owners, [gamma_user]) + + def _create_dashboard_for_import(self, id_=10100): + slc = self.create_slice( + "health_slc" + str(id_), id=id_ + 1, schema=get_example_default_schema() + ) + dash_with_1_slice = self.create_dashboard( + "dash_with_1_slice" + str(id_), slcs=[slc], id=id_ + 2 + ) + dash_with_1_slice.position_json = """ + {{"DASHBOARD_VERSION_KEY": "v2", + "DASHBOARD_CHART_TYPE-{0}": {{ + "type": "CHART", + "id": {0}, + "children": [], + "meta": {{ + "width": 4, + "height": 50, + "chartId": {0} + }} + }} + }} + """.format( + slc.id + ) + return dash_with_1_slice + + def test_import_table_no_metadata(self): + schema = get_example_default_schema() + db_id = get_example_database().id + table = self.create_table("pure_table", id=10001, schema=schema) + imported_id = import_dataset(table, db_id, import_time=1989) + imported = self.get_table_by_id(imported_id) + self.assert_table_equals(table, imported) + + def test_import_table_1_col_1_met(self): + schema = get_example_default_schema() + table = self.create_table( + "table_1_col_1_met", + id=10002, + cols_names=["col1"], + metric_names=["metric1"], + schema=schema, + ) + db_id = get_example_database().id + imported_id = import_dataset(table, db_id, import_time=1990) + imported = self.get_table_by_id(imported_id) + self.assert_table_equals(table, imported) + self.assertEqual( + {"remote_id": 10002, "import_time": 1990, "database_name": "examples"}, + json.loads(imported.params), + ) + + def test_import_table_2_col_2_met(self): + schema = get_example_default_schema() + table = self.create_table( + "table_2_col_2_met", + id=10003, + cols_names=["c1", "c2"], + metric_names=["m1", "m2"], + schema=schema, + ) + db_id = get_example_database().id + imported_id = import_dataset(table, db_id, import_time=1991) + + imported = self.get_table_by_id(imported_id) + self.assert_table_equals(table, imported) + + def test_import_table_override(self): + schema = get_example_default_schema() + table = self.create_table( + "table_override", + id=10003, + cols_names=["col1"], + metric_names=["m1"], + schema=schema, + ) + db_id = get_example_database().id + imported_id = import_dataset(table, db_id, import_time=1991) + + table_over = self.create_table( + "table_override", + id=10003, + cols_names=["new_col1", "col2", "col3"], + metric_names=["new_metric1"], + schema=schema, + ) + imported_over_id = import_dataset(table_over, db_id, import_time=1992) + + imported_over = self.get_table_by_id(imported_over_id) + self.assertEqual(imported_id, imported_over.id) + expected_table = self.create_table( + "table_override", + id=10003, + metric_names=["new_metric1", "m1"], + cols_names=["col1", "new_col1", "col2", "col3"], + schema=schema, + ) + self.assert_table_equals(expected_table, imported_over) + + def test_import_table_override_identical(self): + schema = get_example_default_schema() + table = self.create_table( + "copy_cat", + id=10004, + cols_names=["new_col1", "col2", "col3"], + metric_names=["new_metric1"], + schema=schema, + ) + db_id = get_example_database().id + imported_id = import_dataset(table, db_id, import_time=1993) + + copy_table = self.create_table( + "copy_cat", + id=10004, + cols_names=["new_col1", "col2", "col3"], + metric_names=["new_metric1"], + schema=schema, + ) + imported_id_copy = import_dataset(copy_table, db_id, import_time=1994) + + self.assertEqual(imported_id, imported_id_copy) + self.assert_table_equals(copy_table, self.get_table_by_id(imported_id)) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/integration_tests/importexport/__init__.py b/tests/integration_tests/importexport/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/importexport/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/importexport/commands_tests.py b/tests/integration_tests/importexport/commands_tests.py new file mode 100644 index 0000000000000..ceaf0975659b3 --- /dev/null +++ b/tests/integration_tests/importexport/commands_tests.py @@ -0,0 +1,48 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from unittest.mock import patch + +import yaml +from freezegun import freeze_time + +from superset import security_manager +from superset.databases.commands.export import ExportDatabasesCommand +from superset.utils.database import get_example_database +from tests.integration_tests.base_tests import SupersetTestCase + + +class TestExportModelsCommand(SupersetTestCase): + @patch("superset.security.manager.g") + def test_export_models_command(self, mock_g): + """Make sure metadata.yaml has the correct content.""" + mock_g.user = security_manager.find_user("admin") + + example_db = get_example_database() + + with freeze_time("2020-01-01T00:00:00Z"): + command = ExportDatabasesCommand([example_db.id]) + contents = dict(command.run()) + + metadata = yaml.safe_load(contents["metadata.yaml"]) + assert metadata == ( + { + "version": "1.0.0", + "type": "Database", + "timestamp": "2020-01-01T00:00:00+00:00", + } + ) diff --git a/tests/integration_tests/insert_chart_mixin.py b/tests/integration_tests/insert_chart_mixin.py new file mode 100644 index 0000000000000..da05d0c49d043 --- /dev/null +++ b/tests/integration_tests/insert_chart_mixin.py @@ -0,0 +1,66 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import List, Optional + +from superset import db, security_manager +from superset.connectors.sqla.models import SqlaTable +from superset.models.slice import Slice + + +class InsertChartMixin: + """ + Implements shared logic for tests to insert charts (slices) in the DB + """ + + def insert_chart( + self, + slice_name: str, + owners: List[int], + datasource_id: int, + created_by=None, + datasource_type: str = "table", + description: Optional[str] = None, + viz_type: Optional[str] = None, + params: Optional[str] = None, + cache_timeout: Optional[int] = None, + certified_by: Optional[str] = None, + certification_details: Optional[str] = None, + ) -> Slice: + obj_owners = list() + for owner in owners: + user = db.session.query(security_manager.user_model).get(owner) + obj_owners.append(user) + datasource = ( + db.session.query(SqlaTable).filter_by(id=datasource_id).one_or_none() + ) + slice = Slice( + cache_timeout=cache_timeout, + certified_by=certified_by, + certification_details=certification_details, + created_by=created_by, + datasource_id=datasource.id, + datasource_name=datasource.name, + datasource_type=datasource.type, + description=description, + owners=obj_owners, + params=params, + slice_name=slice_name, + viz_type=viz_type, + ) + db.session.add(slice) + db.session.commit() + return slice diff --git a/tests/integration_tests/key_value/__init__.py b/tests/integration_tests/key_value/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/key_value/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/key_value/commands/__init__.py b/tests/integration_tests/key_value/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/key_value/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/key_value/commands/create_test.py b/tests/integration_tests/key_value/commands/create_test.py new file mode 100644 index 0000000000000..0e789026baff4 --- /dev/null +++ b/tests/integration_tests/key_value/commands/create_test.py @@ -0,0 +1,63 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pickle +from uuid import UUID + +from flask.ctx import AppContext +from flask_appbuilder.security.sqla.models import User + +from superset.extensions import db +from superset.utils.core import override_user +from tests.integration_tests.key_value.commands.fixtures import ( + admin, + ID_KEY, + RESOURCE, + UUID_KEY, + VALUE, +) + + +def test_create_id_entry(app_context: AppContext, admin: User) -> None: + from superset.key_value.commands.create import CreateKeyValueCommand + from superset.key_value.models import KeyValueEntry + + with override_user(admin): + key = CreateKeyValueCommand(resource=RESOURCE, value=VALUE).run() + entry = ( + db.session.query(KeyValueEntry).filter_by(id=key.id).autoflush(False).one() + ) + assert pickle.loads(entry.value) == VALUE + assert entry.created_by_fk == admin.id + db.session.delete(entry) + db.session.commit() + + +def test_create_uuid_entry(app_context: AppContext, admin: User) -> None: + from superset.key_value.commands.create import CreateKeyValueCommand + from superset.key_value.models import KeyValueEntry + + with override_user(admin): + key = CreateKeyValueCommand(resource=RESOURCE, value=VALUE).run() + entry = ( + db.session.query(KeyValueEntry).filter_by(uuid=key.uuid).autoflush(False).one() + ) + assert pickle.loads(entry.value) == VALUE + assert entry.created_by_fk == admin.id + db.session.delete(entry) + db.session.commit() diff --git a/tests/integration_tests/key_value/commands/delete_test.py b/tests/integration_tests/key_value/commands/delete_test.py new file mode 100644 index 0000000000000..62f9883370cf1 --- /dev/null +++ b/tests/integration_tests/key_value/commands/delete_test.py @@ -0,0 +1,82 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pickle +from typing import TYPE_CHECKING +from uuid import UUID + +import pytest +from flask.ctx import AppContext +from flask_appbuilder.security.sqla.models import User + +from superset.extensions import db +from tests.integration_tests.key_value.commands.fixtures import admin, RESOURCE, VALUE + +if TYPE_CHECKING: + from superset.key_value.models import KeyValueEntry + +ID_KEY = 234 +UUID_KEY = UUID("5aae143c-44f1-478e-9153-ae6154df333a") + + +@pytest.fixture +def key_value_entry() -> KeyValueEntry: + from superset.key_value.models import KeyValueEntry + + entry = KeyValueEntry( + id=ID_KEY, + uuid=UUID_KEY, + resource=RESOURCE, + value=pickle.dumps(VALUE), + ) + db.session.add(entry) + db.session.commit() + return entry + + +def test_delete_id_entry( + app_context: AppContext, + admin: User, + key_value_entry: KeyValueEntry, +) -> None: + from superset.key_value.commands.delete import DeleteKeyValueCommand + from superset.key_value.models import KeyValueEntry + + assert DeleteKeyValueCommand(resource=RESOURCE, key=ID_KEY).run() is True + + +def test_delete_uuid_entry( + app_context: AppContext, + admin: User, + key_value_entry: KeyValueEntry, +) -> None: + from superset.key_value.commands.delete import DeleteKeyValueCommand + from superset.key_value.models import KeyValueEntry + + assert DeleteKeyValueCommand(resource=RESOURCE, key=UUID_KEY).run() is True + + +def test_delete_entry_missing( + app_context: AppContext, + admin: User, + key_value_entry: KeyValueEntry, +) -> None: + from superset.key_value.commands.delete import DeleteKeyValueCommand + from superset.key_value.models import KeyValueEntry + + assert DeleteKeyValueCommand(resource=RESOURCE, key=456).run() is False diff --git a/tests/integration_tests/key_value/commands/fixtures.py b/tests/integration_tests/key_value/commands/fixtures.py new file mode 100644 index 0000000000000..2fd4fde4e1dc3 --- /dev/null +++ b/tests/integration_tests/key_value/commands/fixtures.py @@ -0,0 +1,63 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import pickle +from typing import Generator, TYPE_CHECKING +from uuid import UUID + +import pytest +from flask_appbuilder.security.sqla.models import User +from sqlalchemy.orm import Session + +from superset.extensions import db +from superset.key_value.types import KeyValueResource +from tests.integration_tests.test_app import app + +if TYPE_CHECKING: + from superset.key_value.models import KeyValueEntry + +ID_KEY = 123 +UUID_KEY = UUID("3e7a2ab8-bcaf-49b0-a5df-dfb432f291cc") +RESOURCE = KeyValueResource.APP +VALUE = {"foo": "bar"} + + +@pytest.fixture +def key_value_entry() -> Generator[KeyValueEntry, None, None]: + from superset.key_value.models import KeyValueEntry + + entry = KeyValueEntry( + id=ID_KEY, + uuid=UUID_KEY, + resource=RESOURCE, + value=pickle.dumps(VALUE), + ) + db.session.add(entry) + db.session.commit() + yield entry + db.session.delete(entry) + db.session.commit() + + +@pytest.fixture +def admin() -> User: + with app.app_context() as ctx: + session: Session = ctx.app.appbuilder.get_session + admin = session.query(User).filter_by(username="admin").one() + return admin diff --git a/tests/integration_tests/key_value/commands/get_test.py b/tests/integration_tests/key_value/commands/get_test.py new file mode 100644 index 0000000000000..b1800a4c3b9a3 --- /dev/null +++ b/tests/integration_tests/key_value/commands/get_test.py @@ -0,0 +1,101 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pickle +import uuid +from datetime import datetime, timedelta +from typing import TYPE_CHECKING + +from flask.ctx import AppContext + +from superset.extensions import db +from tests.integration_tests.key_value.commands.fixtures import ( + ID_KEY, + key_value_entry, + RESOURCE, + UUID_KEY, + VALUE, +) + +if TYPE_CHECKING: + from superset.key_value.models import KeyValueEntry + + +def test_get_id_entry(app_context: AppContext, key_value_entry: KeyValueEntry) -> None: + from superset.key_value.commands.get import GetKeyValueCommand + + value = GetKeyValueCommand(resource=RESOURCE, key=ID_KEY).run() + assert value == VALUE + + +def test_get_uuid_entry( + app_context: AppContext, key_value_entry: KeyValueEntry +) -> None: + from superset.key_value.commands.get import GetKeyValueCommand + + value = GetKeyValueCommand(resource=RESOURCE, key=UUID_KEY).run() + assert value == VALUE + + +def test_get_id_entry_missing( + app_context: AppContext, + key_value_entry: KeyValueEntry, +) -> None: + from superset.key_value.commands.get import GetKeyValueCommand + + value = GetKeyValueCommand(resource=RESOURCE, key=456).run() + assert value is None + + +def test_get_expired_entry(app_context: AppContext) -> None: + from superset.key_value.commands.get import GetKeyValueCommand + from superset.key_value.models import KeyValueEntry + + entry = KeyValueEntry( + id=678, + uuid=uuid.uuid4(), + resource=RESOURCE, + value=pickle.dumps(VALUE), + expires_on=datetime.now() - timedelta(days=1), + ) + db.session.add(entry) + db.session.commit() + value = GetKeyValueCommand(resource=RESOURCE, key=ID_KEY).run() + assert value is None + db.session.delete(entry) + db.session.commit() + + +def test_get_future_expiring_entry(app_context: AppContext) -> None: + from superset.key_value.commands.get import GetKeyValueCommand + from superset.key_value.models import KeyValueEntry + + id_ = 789 + entry = KeyValueEntry( + id=id_, + uuid=uuid.uuid4(), + resource=RESOURCE, + value=pickle.dumps(VALUE), + expires_on=datetime.now() + timedelta(days=1), + ) + db.session.add(entry) + db.session.commit() + value = GetKeyValueCommand(resource=RESOURCE, key=id_).run() + assert value == VALUE + db.session.delete(entry) + db.session.commit() diff --git a/tests/integration_tests/key_value/commands/update_test.py b/tests/integration_tests/key_value/commands/update_test.py new file mode 100644 index 0000000000000..8eb03b4eda9eb --- /dev/null +++ b/tests/integration_tests/key_value/commands/update_test.py @@ -0,0 +1,96 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pickle +from typing import TYPE_CHECKING +from uuid import UUID + +from flask.ctx import AppContext +from flask_appbuilder.security.sqla.models import User + +from superset.extensions import db +from superset.utils.core import override_user +from tests.integration_tests.key_value.commands.fixtures import ( + admin, + ID_KEY, + key_value_entry, + RESOURCE, + UUID_KEY, +) + +if TYPE_CHECKING: + from superset.key_value.models import KeyValueEntry + + +NEW_VALUE = "new value" + + +def test_update_id_entry( + app_context: AppContext, + admin: User, + key_value_entry: KeyValueEntry, +) -> None: + from superset.key_value.commands.update import UpdateKeyValueCommand + from superset.key_value.models import KeyValueEntry + + with override_user(admin): + key = UpdateKeyValueCommand( + resource=RESOURCE, + key=ID_KEY, + value=NEW_VALUE, + ).run() + assert key is not None + assert key.id == ID_KEY + entry = db.session.query(KeyValueEntry).filter_by(id=ID_KEY).autoflush(False).one() + assert pickle.loads(entry.value) == NEW_VALUE + assert entry.changed_by_fk == admin.id + + +def test_update_uuid_entry( + app_context: AppContext, + admin: User, + key_value_entry: KeyValueEntry, +) -> None: + from superset.key_value.commands.update import UpdateKeyValueCommand + from superset.key_value.models import KeyValueEntry + + with override_user(admin): + key = UpdateKeyValueCommand( + resource=RESOURCE, + key=UUID_KEY, + value=NEW_VALUE, + ).run() + assert key is not None + assert key.uuid == UUID_KEY + entry = ( + db.session.query(KeyValueEntry).filter_by(uuid=UUID_KEY).autoflush(False).one() + ) + assert pickle.loads(entry.value) == NEW_VALUE + assert entry.changed_by_fk == admin.id + + +def test_update_missing_entry(app_context: AppContext, admin: User) -> None: + from superset.key_value.commands.update import UpdateKeyValueCommand + + with override_user(admin): + key = UpdateKeyValueCommand( + resource=RESOURCE, + key=456, + value=NEW_VALUE, + ).run() + assert key is None diff --git a/tests/integration_tests/key_value/commands/upsert_test.py b/tests/integration_tests/key_value/commands/upsert_test.py new file mode 100644 index 0000000000000..e5cd27e3a6cc8 --- /dev/null +++ b/tests/integration_tests/key_value/commands/upsert_test.py @@ -0,0 +1,102 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pickle +from typing import TYPE_CHECKING +from uuid import UUID + +from flask.ctx import AppContext +from flask_appbuilder.security.sqla.models import User + +from superset.extensions import db +from superset.utils.core import override_user +from tests.integration_tests.key_value.commands.fixtures import ( + admin, + ID_KEY, + key_value_entry, + RESOURCE, + UUID_KEY, +) + +if TYPE_CHECKING: + from superset.key_value.models import KeyValueEntry + + +NEW_VALUE = "new value" + + +def test_upsert_id_entry( + app_context: AppContext, + admin: User, + key_value_entry: KeyValueEntry, +) -> None: + from superset.key_value.commands.upsert import UpsertKeyValueCommand + from superset.key_value.models import KeyValueEntry + + with override_user(admin): + key = UpsertKeyValueCommand( + resource=RESOURCE, + key=ID_KEY, + value=NEW_VALUE, + ).run() + assert key is not None + assert key.id == ID_KEY + entry = ( + db.session.query(KeyValueEntry).filter_by(id=int(ID_KEY)).autoflush(False).one() + ) + assert pickle.loads(entry.value) == NEW_VALUE + assert entry.changed_by_fk == admin.id + + +def test_upsert_uuid_entry( + app_context: AppContext, + admin: User, + key_value_entry: KeyValueEntry, +) -> None: + from superset.key_value.commands.upsert import UpsertKeyValueCommand + from superset.key_value.models import KeyValueEntry + + with override_user(admin): + key = UpsertKeyValueCommand( + resource=RESOURCE, + key=UUID_KEY, + value=NEW_VALUE, + ).run() + assert key is not None + assert key.uuid == UUID_KEY + entry = ( + db.session.query(KeyValueEntry).filter_by(uuid=UUID_KEY).autoflush(False).one() + ) + assert pickle.loads(entry.value) == NEW_VALUE + assert entry.changed_by_fk == admin.id + + +def test_upsert_missing_entry(app_context: AppContext, admin: User) -> None: + from superset.key_value.commands.upsert import UpsertKeyValueCommand + from superset.key_value.models import KeyValueEntry + + with override_user(admin): + key = UpsertKeyValueCommand( + resource=RESOURCE, + key=456, + value=NEW_VALUE, + ).run() + assert key is not None + assert key.id == 456 + db.session.query(KeyValueEntry).filter_by(id=456).delete() + db.session.commit() diff --git a/tests/integration_tests/log_api_tests.py b/tests/integration_tests/log_api_tests.py new file mode 100644 index 0000000000000..83a7f5fd84b31 --- /dev/null +++ b/tests/integration_tests/log_api_tests.py @@ -0,0 +1,335 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +from datetime import datetime, timedelta +import json +from typing import Optional +from unittest.mock import ANY + +from flask_appbuilder.security.sqla.models import User +import prison +from unittest.mock import patch + +from superset import db +from superset.models.core import Log +from superset.views.log.api import LogRestApi +from tests.integration_tests.dashboard_utils import create_dashboard +from tests.integration_tests.test_app import app + +from .base_tests import SupersetTestCase + + +EXPECTED_COLUMNS = [ + "action", + "dashboard_id", + "dttm", + "duration_ms", + "json", + "referrer", + "slice_id", + "user", + "user_id", +] + + +class TestLogApi(SupersetTestCase): + def insert_log( + self, + action: str, + user: "User", + dashboard_id: Optional[int] = 0, + slice_id: Optional[int] = 0, + json: Optional[str] = "", + duration_ms: Optional[int] = 0, + ): + log = Log( + action=action, + user=user, + dashboard_id=dashboard_id, + slice_id=slice_id, + json=json, + duration_ms=duration_ms, + ) + db.session.add(log) + db.session.commit() + return log + + def test_not_enabled(self): + with patch.object(LogRestApi, "is_enabled", return_value=False): + admin_user = self.get_user("admin") + self.insert_log("some_action", admin_user) + self.login(username="admin") + arguments = {"filters": [{"col": "action", "opr": "sw", "value": "some_"}]} + uri = f"api/v1/log/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + def test_get_list(self): + """ + Log API: Test get list + """ + admin_user = self.get_user("admin") + log = self.insert_log("some_action", admin_user) + self.login(username="admin") + arguments = {"filters": [{"col": "action", "opr": "sw", "value": "some_"}]} + uri = f"api/v1/log/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(list(response["result"][0].keys()), EXPECTED_COLUMNS) + self.assertEqual(response["result"][0]["action"], "some_action") + self.assertEqual(response["result"][0]["user"], {"username": "admin"}) + db.session.delete(log) + db.session.commit() + + def test_get_list_not_allowed(self): + """ + Log API: Test get list + """ + admin_user = self.get_user("admin") + log = self.insert_log("action", admin_user) + self.login(username="gamma") + uri = "api/v1/log/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 403) + self.login(username="alpha") + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 403) + db.session.delete(log) + db.session.commit() + + def test_get_item(self): + """ + Log API: Test get item + """ + admin_user = self.get_user("admin") + log = self.insert_log("some_action", admin_user) + self.login(username="admin") + uri = f"api/v1/log/{log.id}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + + self.assertEqual(list(response["result"].keys()), EXPECTED_COLUMNS) + self.assertEqual(response["result"]["action"], "some_action") + self.assertEqual(response["result"]["user"], {"username": "admin"}) + db.session.delete(log) + db.session.commit() + + def test_delete_log(self): + """ + Log API: Test delete (does not exist) + """ + admin_user = self.get_user("admin") + log = self.insert_log("action", admin_user) + self.login(username="admin") + uri = f"api/v1/log/{log.id}" + rv = self.client.delete(uri) + self.assertEqual(rv.status_code, 405) + db.session.delete(log) + db.session.commit() + + def test_update_log(self): + """ + Log API: Test update (does not exist) + """ + admin_user = self.get_user("admin") + log = self.insert_log("action", admin_user) + self.login(username="admin") + + log_data = {"action": "some_action"} + uri = f"api/v1/log/{log.id}" + rv = self.client.put(uri, json=log_data) + self.assertEqual(rv.status_code, 405) + db.session.delete(log) + db.session.commit() + + def test_get_recent_activity_no_broad_access(self): + """ + Log API: Test recent activity not visible for other users without + ENABLE_BROAD_ACTIVITY_ACCESS flag on + """ + admin_user = self.get_user("admin") + self.login(username="admin") + app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = False + + uri = f"api/v1/log/recent_activity/{admin_user.id + 1}/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 403) + app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = True + + def test_get_recent_activity(self): + """ + Log API: Test recent activity endpoint + """ + admin_user = self.get_user("admin") + self.login(username="admin") + dash = create_dashboard("dash_slug", "dash_title", "{}", []) + log1 = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) + log2 = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) + + uri = f"api/v1/log/recent_activity/{admin_user.id}/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + + db.session.delete(log1) + db.session.delete(log2) + db.session.delete(dash) + db.session.commit() + + self.assertEqual( + response, + { + "result": [ + { + "action": "dashboard", + "item_type": "dashboard", + "item_url": "/superset/dashboard/dash_slug/", + "item_title": "dash_title", + "time": ANY, + "time_delta_humanized": ANY, + } + ] + }, + ) + + def test_get_recent_activity_actions_filter(self): + """ + Log API: Test recent activity actions argument + """ + admin_user = self.get_user("admin") + self.login(username="admin") + dash = create_dashboard("dash_slug", "dash_title", "{}", []) + log = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) + log2 = self.insert_log("explore", admin_user, dashboard_id=dash.id) + + arguments = {"actions": ["dashboard"]} + uri = f"api/v1/log/recent_activity/{admin_user.id}/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + + db.session.delete(log) + db.session.delete(log2) + db.session.delete(dash) + db.session.commit() + + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(len(response["result"]), 1) + + def test_get_recent_activity_distinct_false(self): + """ + Log API: Test recent activity when distinct is false + """ + db.session.query(Log).delete(synchronize_session=False) + db.session.commit() + admin_user = self.get_user("admin") + self.login(username="admin") + dash = create_dashboard("dash_slug", "dash_title", "{}", []) + log = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) + log2 = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) + + arguments = {"distinct": False} + uri = f"api/v1/log/recent_activity/{admin_user.id}/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + + db.session.delete(log) + db.session.delete(log2) + db.session.delete(dash) + db.session.commit() + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(len(response["result"]), 2) + + def test_get_recent_activity_pagination(self): + """ + Log API: Test recent activity pagination arguments + """ + admin_user = self.get_user("admin") + self.login(username="admin") + dash = create_dashboard("dash_slug", "dash_title", "{}", []) + dash2 = create_dashboard("dash2_slug", "dash2_title", "{}", []) + dash3 = create_dashboard("dash3_slug", "dash3_title", "{}", []) + log = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) + log2 = self.insert_log("dashboard", admin_user, dashboard_id=dash2.id) + log3 = self.insert_log("dashboard", admin_user, dashboard_id=dash3.id) + + now = datetime.now() + log3.dttm = now + log2.dttm = now - timedelta(days=1) + log.dttm = now - timedelta(days=2) + + arguments = {"page": 0, "page_size": 2} + uri = f"api/v1/log/recent_activity/{admin_user.id}/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response, + { + "result": [ + { + "action": "dashboard", + "item_type": "dashboard", + "item_url": "/superset/dashboard/dash3_slug/", + "item_title": "dash3_title", + "time": ANY, + "time_delta_humanized": ANY, + }, + { + "action": "dashboard", + "item_type": "dashboard", + "item_url": "/superset/dashboard/dash2_slug/", + "item_title": "dash2_title", + "time": ANY, + "time_delta_humanized": ANY, + }, + ] + }, + ) + + arguments = {"page": 1, "page_size": 2} + uri = f"api/v1/log/recent_activity/{admin_user.id}/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + + db.session.delete(log) + db.session.delete(log2) + db.session.delete(log3) + db.session.delete(dash) + db.session.delete(dash2) + db.session.delete(dash3) + db.session.commit() + + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + response, + { + "result": [ + { + "action": "dashboard", + "item_type": "dashboard", + "item_url": "/superset/dashboard/dash_slug/", + "item_title": "dash_title", + "time": ANY, + "time_delta_humanized": ANY, + } + ] + }, + ) diff --git a/tests/integration_tests/log_model_view_tests.py b/tests/integration_tests/log_model_view_tests.py new file mode 100644 index 0000000000000..fa80240a193b0 --- /dev/null +++ b/tests/integration_tests/log_model_view_tests.py @@ -0,0 +1,37 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from unittest.mock import patch + +from superset.views.log.views import LogModelView + +from .base_tests import SupersetTestCase + + +class TestLogModelView(SupersetTestCase): + def test_disabled(self): + with patch.object(LogModelView, "is_enabled", return_value=False): + self.login("admin") + uri = "/logmodelview/list/" + rv = self.client.get(uri) + self.assert404(rv) + + def test_enabled(self): + with patch.object(LogModelView, "is_enabled", return_value=True): + self.login("admin") + uri = "/logmodelview/list/" + rv = self.client.get(uri) + self.assert200(rv) diff --git a/tests/integration_tests/logging_configurator_tests.py b/tests/integration_tests/logging_configurator_tests.py new file mode 100644 index 0000000000000..60e0ded692962 --- /dev/null +++ b/tests/integration_tests/logging_configurator_tests.py @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +import unittest +from unittest.mock import MagicMock + +from superset.utils.logging_configurator import LoggingConfigurator + + +class TestLoggingConfigurator(unittest.TestCase): + def reset_logging(self): + # work around all of the import side-effects in superset + logging.root.manager.loggerDict = {} + logging.root.handlers = [] + + def test_configurator_adding_handler(self): + class MyEventHandler(logging.Handler): + def __init__(self): + super().__init__(level=logging.DEBUG) + self.received = False + + def handle(self, record): + if hasattr(record, "testattr"): + self.received = True + + class MyConfigurator(LoggingConfigurator): + def __init__(self, handler): + self.handler = handler + + def configure_logging(self, app_config, debug_mode): + super().configure_logging(app_config, debug_mode) + logging.getLogger().addHandler(self.handler) + + self.reset_logging() + + handler = MyEventHandler() + cfg = MyConfigurator(handler) + cfg.configure_logging(MagicMock(), True) + + logging.info("test", extra={"testattr": "foo"}) + self.assertTrue(handler.received) diff --git a/tests/integration_tests/migrations/06e1e70058c7_migrate_legacy_area__tests.py b/tests/integration_tests/migrations/06e1e70058c7_migrate_legacy_area__tests.py new file mode 100644 index 0000000000000..f02d069b2bafb --- /dev/null +++ b/tests/integration_tests/migrations/06e1e70058c7_migrate_legacy_area__tests.py @@ -0,0 +1,99 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json + +from superset.app import SupersetApp +from superset.migrations.shared.migrate_viz import MigrateAreaChart + +area_form_data = """{ + "adhoc_filters": [], + "annotation_layers": [], + "bottom_margin": "auto", + "color_scheme": "lyftColors", + "comparison_type": "values", + "contribution": true, + "datasource": "2__table", + "extra_form_data": {}, + "granularity_sqla": "ds", + "groupby": [ + "gender" + ], + "line_interpolation": "linear", + "metrics": [ + "sum__num" + ], + "order_desc": true, + "rich_tooltip": true, + "rolling_type": "None", + "row_limit": 10000, + "show_brush": "auto", + "show_controls": true, + "show_legend": true, + "slice_id": 165, + "stacked_style": "stack", + "time_grain_sqla": "P1D", + "time_range": "No filter", + "viz_type": "area", + "x_axis_format": "smart_date", + "x_axis_label": "x asix label", + "x_axis_showminmax": false, + "x_ticks_layout": "auto", + "y_axis_bounds": [ + null, + null + ], + "y_axis_format": "SMART_NUMBER" +} +""" + + +def test_area_migrate(app_context: SupersetApp) -> None: + from superset.models.slice import Slice + + slc = Slice( + viz_type=MigrateAreaChart.source_viz_type, + datasource_type="table", + params=area_form_data, + query_context=f'{{"form_data": {area_form_data}}}', + ) + + slc = MigrateAreaChart.upgrade_slice(slc) + assert slc.viz_type == MigrateAreaChart.target_viz_type + # verify form_data + new_form_data = json.loads(slc.params) + assert new_form_data["contributionMode"] == "row" + assert "contribution" not in new_form_data + assert new_form_data["show_extra_controls"] is True + assert new_form_data["stack"] == "Stack" + assert new_form_data["x_axis_title"] == "x asix label" + assert new_form_data["x_axis_title_margin"] == 30 + assert json.dumps(new_form_data["form_data_bak"], sort_keys=True) == json.dumps( + json.loads(area_form_data), sort_keys=True + ) + + # verify query_context + new_query_context = json.loads(slc.query_context) + assert ( + new_query_context["form_data"]["viz_type"] == MigrateAreaChart.target_viz_type + ) + + # downgrade + slc = MigrateAreaChart.downgrade_slice(slc) + assert slc.viz_type == MigrateAreaChart.source_viz_type + assert json.dumps(json.loads(slc.params), sort_keys=True) == json.dumps( + json.loads(area_form_data), sort_keys=True + ) diff --git a/tests/integration_tests/migrations/__init__.py b/tests/integration_tests/migrations/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/migrations/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py b/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py new file mode 100644 index 0000000000000..716be9b1fafa8 --- /dev/null +++ b/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py @@ -0,0 +1,138 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from importlib import import_module + +rm_time_range_endpoints_from_qc_3 = import_module( + "superset.migrations.versions." + "2022-04-18_11-20_ad07e4fdbaba_rm_time_range_endpoints_from_qc_3", +) +Slice = rm_time_range_endpoints_from_qc_3.Slice +upgrade_slice = rm_time_range_endpoints_from_qc_3.upgrade_slice + +sample_query_context = { + "datasource": {"id": 27, "type": "table"}, + "force": False, + "queries": [ + { + "time_range": "No filter", + "filters": [], + "extras": { + "time_grain_sqla": "P1D", + "time_range_endpoints": ["inclusive", "exclusive"], + "having": "", + "having_druid": [], + "where": "", + }, + "applied_time_extras": {}, + "columns": ["a", "b"], + "orderby": [], + "annotation_layers": [], + "row_limit": 1000, + "timeseries_limit": 0, + "order_desc": True, + "url_params": {}, + "custom_params": {}, + "custom_form_data": {}, + "post_processing": [], + } + ], + "form_data": { + "viz_type": "table", + "datasource": "27__table", + "slice_id": 545, + "url_params": {}, + "time_grain_sqla": "P1D", + "time_range": "No filter", + "query_mode": "raw", + "groupby": [], + "metrics": [], + "all_columns": ["a", "b"], + "percent_metrics": [], + "adhoc_filters": [], + "order_by_cols": [], + "row_limit": 1000, + "server_page_length": 10, + "include_time": False, + "order_desc": True, + "table_timestamp_format": "smart_date", + "show_cell_bars": True, + "color_pn": True, + "extra_form_data": {}, + "force": False, + "result_format": "json", + "result_type": "full", + }, + "result_format": "json", + "result_type": "full", +} + + +sample_query_context = { + "datasource": {"id": 27, "type": "table"}, + "force": False, + "queries": [ + { + "time_range": "No filter", + "filters": [], + "extras": { + "time_grain_sqla": "P1D", + "time_range_endpoints": ["inclusive", "exclusive"], + "having": "", + "having_druid": [], + "where": "", + }, + "applied_time_extras": {}, + "columns": ["a", "b"], + "orderby": [], + "annotation_layers": [], + "row_limit": 1000, + "timeseries_limit": 0, + "order_desc": True, + "url_params": {}, + "custom_params": {}, + "custom_form_data": {}, + "post_processing": [], + } + ], + "form_data": { + "time_range_endpoints": ["inclusive", "exclusive"], + }, + "result_format": "json", + "result_type": "full", +} + + +def test_upgrade(): + slc = Slice(slice_name="FOO", query_context=json.dumps(sample_query_context)) + + upgrade_slice(slc) + + query_context = json.loads(slc.query_context) + queries = query_context.get("queries") + for q in queries: + extras = q.get("extras", {}) + assert "time_range_endpoints" not in extras + + form_data = query_context.get("form_data", {}) + assert "time_range_endpoints" not in form_data + + +def test_upgrade_bad_json(): + slc = Slice(slice_name="FOO", query_context="abc") + + assert None == upgrade_slice(slc) diff --git a/tests/integration_tests/migrations/c747c78868b6_migrating_legacy_treemap__tests.py b/tests/integration_tests/migrations/c747c78868b6_migrating_legacy_treemap__tests.py new file mode 100644 index 0000000000000..3e9ef330924cb --- /dev/null +++ b/tests/integration_tests/migrations/c747c78868b6_migrating_legacy_treemap__tests.py @@ -0,0 +1,91 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json + +from superset.app import SupersetApp +from superset.migrations.shared.migrate_viz import MigrateTreeMap + +treemap_form_data = """{ + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": [ + "Edward" + ], + "expressionType": "SIMPLE", + "filterOptionName": "filter_xhbus6irfa_r10k9nwmwy", + "isExtra": false, + "isNew": false, + "operator": "IN", + "operatorId": "IN", + "sqlExpression": null, + "subject": "name" + } + ], + "color_scheme": "bnbColors", + "datasource": "2__table", + "extra_form_data": {}, + "granularity_sqla": "ds", + "groupby": [ + "state", + "gender" + ], + "metrics": [ + "sum__num" + ], + "number_format": ",d", + "order_desc": true, + "row_limit": 10, + "time_range": "No filter", + "timeseries_limit_metric": "sum__num", + "treemap_ratio": 1.618033988749895, + "viz_type": "treemap" +} +""" + + +def test_treemap_migrate(app_context: SupersetApp) -> None: + from superset.models.slice import Slice + + slc = Slice( + viz_type=MigrateTreeMap.source_viz_type, + datasource_type="table", + params=treemap_form_data, + query_context=f'{{"form_data": {treemap_form_data}}}', + ) + + slc = MigrateTreeMap.upgrade_slice(slc) + assert slc.viz_type == MigrateTreeMap.target_viz_type + # verify form_data + new_form_data = json.loads(slc.params) + assert new_form_data["metric"] == "sum__num" + assert new_form_data["viz_type"] == "treemap_v2" + assert "metrics" not in new_form_data + assert json.dumps(new_form_data["form_data_bak"], sort_keys=True) == json.dumps( + json.loads(treemap_form_data), sort_keys=True + ) + + # verify query_context + new_query_context = json.loads(slc.query_context) + assert new_query_context["form_data"]["viz_type"] == "treemap_v2" + + # downgrade + slc = MigrateTreeMap.downgrade_slice(slc) + assert slc.viz_type == MigrateTreeMap.source_viz_type + assert json.dumps(json.loads(slc.params), sort_keys=True) == json.dumps( + json.loads(treemap_form_data), sort_keys=True + ) diff --git a/tests/integration_tests/migrations/f1410ed7ec95_migrate_native_filters_to_new_schema__tests.py b/tests/integration_tests/migrations/f1410ed7ec95_migrate_native_filters_to_new_schema__tests.py new file mode 100644 index 0000000000000..a41bd9595d9a3 --- /dev/null +++ b/tests/integration_tests/migrations/f1410ed7ec95_migrate_native_filters_to_new_schema__tests.py @@ -0,0 +1,98 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from copy import deepcopy +from importlib import import_module + +migrate_native_filters_to_new_schema = import_module( + "superset.migrations.versions." + "2021-04-29_15-32_f1410ed7ec95_migrate_native_filters_to_new_schema", +) +downgrade_dashboard = migrate_native_filters_to_new_schema.downgrade_dashboard +upgrade_dashboard = migrate_native_filters_to_new_schema.upgrade_dashboard + +dashboard_v1 = { + "native_filter_configuration": [ + { + "filterType": "filter_select", + "cascadingFilters": True, + "defaultValue": ["Albania", "Algeria"], + }, + ], + "filter_sets_configuration": [ + { + "nativeFilters": { + "FILTER": { + "filterType": "filter_select", + "cascadingFilters": True, + "defaultValue": ["Albania", "Algeria"], + }, + }, + }, + ], +} + + +dashboard_v2 = { + "native_filter_configuration": [ + { + "filterType": "filter_select", + "cascadingFilters": True, + "defaultDataMask": { + "filterState": { + "value": ["Albania", "Algeria"], + }, + }, + } + ], + "filter_sets_configuration": [ + { + "nativeFilters": { + "FILTER": { + "filterType": "filter_select", + "cascadingFilters": True, + "defaultDataMask": { + "filterState": { + "value": ["Albania", "Algeria"], + }, + }, + }, + }, + }, + ], +} + + +def test_upgrade_dashboard(): + """ + ensure that dashboard upgrade operation produces a correct dashboard object + """ + converted_dashboard = deepcopy(dashboard_v1) + filters, filter_sets = upgrade_dashboard(converted_dashboard) + assert filters == 1 + assert filter_sets == 1 + assert dashboard_v2 == converted_dashboard + + +def test_downgrade_dashboard(): + """ + ensure that dashboard downgrade operation produces a correct dashboard object + """ + converted_dashboard = deepcopy(dashboard_v2) + filters, filter_sets = downgrade_dashboard(converted_dashboard) + assert filters == 1 + assert filter_sets == 1 + assert dashboard_v1 == converted_dashboard diff --git a/tests/integration_tests/migrations/fb13d49b72f9_better_filters__tests.py b/tests/integration_tests/migrations/fb13d49b72f9_better_filters__tests.py new file mode 100644 index 0000000000000..60a6c5e819745 --- /dev/null +++ b/tests/integration_tests/migrations/fb13d49b72f9_better_filters__tests.py @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from importlib import import_module + +better_filters = import_module( + "superset.migrations.versions." "2018-12-11_22-03_fb13d49b72f9_better_filters", +) +Slice = better_filters.Slice +upgrade_slice = better_filters.upgrade_slice + + +def test_upgrade_slice(): + slc = Slice( + slice_name="FOO", + viz_type="filter_box", + params=json.dumps(dict(metric="foo", groupby=["bar"])), + ) + upgrade_slice(slc) + params = json.loads(slc.params) + assert "metric" not in params + assert "filter_configs" in params + + cfg = params["filter_configs"][0] + assert cfg.get("metric") == "foo" diff --git a/tests/integration_tests/migrations/fc3a3a8ff221_migrate_filter_sets_to_new_format__tests.py b/tests/integration_tests/migrations/fc3a3a8ff221_migrate_filter_sets_to_new_format__tests.py new file mode 100644 index 0000000000000..8d08214ea5beb --- /dev/null +++ b/tests/integration_tests/migrations/fc3a3a8ff221_migrate_filter_sets_to_new_format__tests.py @@ -0,0 +1,367 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from copy import deepcopy +from importlib import import_module + +migrate_filter_sets_to_new_format = import_module( + "superset.migrations.versions." + "2021-04-12_12-38_fc3a3a8ff221_migrate_filter_sets_to_new_format", +) +downgrade_filter_set = migrate_filter_sets_to_new_format.downgrade_filter_set +upgrade_filter_set = migrate_filter_sets_to_new_format.upgrade_filter_set +upgrade_select_filters = migrate_filter_sets_to_new_format.upgrade_select_filters + +native_filters_v1 = [ + { + "cascadeParentIds": [], + "controlValues": { + "enableEmptyFilter": False, + "inverseSelection": False, + "multiSelect": True, + "sortAscending": True, + }, + "defaultValue": None, + "filterType": "filter_select", + "id": "NATIVE_FILTER-CZpnK0rM-", + "isInstant": True, + "name": "Region", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{"column": {"name": "region"}, "datasetId": 2}], + }, + { + "cascadeParentIds": [], + "defaultValue": "No filter", + "filterType": "filter_time", + "id": "NATIVE_FILTER-gCMse9C7e", + "isInstant": True, + "name": "Time Range", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{}], + }, + { + "cascadeParentIds": ["NATIVE_FILTER-CZpnK0rM-"], + "controlValues": { + "defaultToFirstItem": False, + "enableEmptyFilter": False, + "inverseSelection": False, + "multiSelect": True, + "sortAscending": True, + }, + "defaultValue": None, + "filterType": "filter_select", + "id": "NATIVE_FILTER-oQRgQ25Au", + "isInstant": True, + "name": "Country", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{"column": {"name": "country_name"}, "datasetId": 2}], + }, +] +native_filters_v2 = [ + { + "cascadeParentIds": [], + "controlValues": { + "defaultToFirstItem": False, + "enableEmptyFilter": False, + "inverseSelection": False, + "multiSelect": True, + "sortAscending": True, + }, + "defaultValue": None, + "filterType": "filter_select", + "id": "NATIVE_FILTER-CZpnK0rM-", + "isInstant": True, + "name": "Region", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{"column": {"name": "region"}, "datasetId": 2}], + }, + { + "cascadeParentIds": [], + "defaultValue": "No filter", + "filterType": "filter_time", + "id": "NATIVE_FILTER-gCMse9C7e", + "isInstant": True, + "name": "Time Range", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{}], + }, + { + "cascadeParentIds": ["NATIVE_FILTER-CZpnK0rM-"], + "controlValues": { + "defaultToFirstItem": False, + "enableEmptyFilter": False, + "inverseSelection": False, + "multiSelect": True, + "sortAscending": True, + }, + "defaultValue": None, + "filterType": "filter_select", + "id": "NATIVE_FILTER-oQRgQ25Au", + "isInstant": True, + "name": "Country", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{"column": {"name": "country_name"}, "datasetId": 2}], + }, +] + +filter_sets_v1 = { + "name": "New filter set", + "id": "FILTERS_SET-tt_Ovwy95", + "nativeFilters": { + "NATIVE_FILTER-tx05Ze2Hm": { + "id": "NATIVE_FILTER-tx05Ze2Hm", + "name": "Time range", + "filterType": "filter_time", + "targets": [{}], + "defaultValue": "No filter", + "cascadeParentIds": [], + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "isInstant": False, + }, + "NATIVE_FILTER-JeZ9HYoTP": { + "cascadeParentIds": [], + "controlValues": { + "enableEmptyFilter": False, + "inverseSelection": False, + "multiSelect": True, + "sortAscending": True, + }, + "defaultValue": None, + "filterType": "filter_select", + "id": "NATIVE_FILTER-JeZ9HYoTP", + "isInstant": False, + "name": "Platform", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{"column": {"name": "platform"}, "datasetId": 33}], + }, + "NATIVE_FILTER-B2PFYVIUw": { + "cascadeParentIds": [], + "controlValues": { + "enableEmptyFilter": False, + "inverseSelection": False, + "multiSelect": True, + "sortAscending": True, + }, + "defaultValue": None, + "filterType": "filter_select", + "id": "NATIVE_FILTER-B2PFYVIUw", + "isInstant": False, + "name": "Genre", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{"column": {"name": "genre"}, "datasetId": 33}], + }, + "NATIVE_FILTER-VDLd4Wq-v": { + "cascadeParentIds": [], + "controlValues": { + "enableEmptyFilter": False, + "inverseSelection": False, + "multiSelect": True, + "sortAscending": True, + }, + "defaultValue": None, + "filterType": "filter_select", + "id": "NATIVE_FILTER-VDLd4Wq-v", + "isInstant": False, + "name": "Publisher", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{"column": {"name": "publisher"}, "datasetId": 33}], + }, + }, + "dataMask": { + "nativeFilters": { + "NATIVE_FILTER-tx05Ze2Hm": { + "extraFormData": {"override_form_data": {"time_range": "No filter"}}, + "currentState": {"value": "No filter"}, + "id": "NATIVE_FILTER-tx05Ze2Hm", + }, + "NATIVE_FILTER-B2PFYVIUw": { + "extraFormData": { + "append_form_data": { + "filters": [ + { + "col": "genre", + "op": "IN", + "val": ["Adventure", "Fighting", "Misc"], + } + ] + } + }, + "currentState": {"value": ["Adventure", "Fighting", "Misc"]}, + "id": "NATIVE_FILTER-B2PFYVIUw", + }, + "NATIVE_FILTER-VDLd4Wq-v": { + "extraFormData": {"append_form_data": {"filters": []}}, + "currentState": {"value": None}, + "id": "NATIVE_FILTER-VDLd4Wq-v", + }, + "NATIVE_FILTER-JeZ9HYoTP": { + "extraFormData": { + "append_form_data": { + "filters": [ + { + "col": "platform", + "op": "IN", + "val": ["GB", "GBA", "PSV", "DS", "3DS"], + } + ] + } + }, + "currentState": {"value": ["GB", "GBA", "PSV", "DS", "3DS"]}, + "id": "NATIVE_FILTER-JeZ9HYoTP", + }, + } + }, +} + +filter_sets_v2 = { + "name": "New filter set", + "id": "FILTERS_SET-tt_Ovwy95", + "nativeFilters": { + "NATIVE_FILTER-tx05Ze2Hm": { + "id": "NATIVE_FILTER-tx05Ze2Hm", + "name": "Time range", + "filterType": "filter_time", + "targets": [{}], + "defaultValue": "No filter", + "cascadeParentIds": [], + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "isInstant": False, + }, + "NATIVE_FILTER-JeZ9HYoTP": { + "cascadeParentIds": [], + "controlValues": { + "enableEmptyFilter": False, + "inverseSelection": False, + "multiSelect": True, + "sortAscending": True, + "defaultToFirstItem": False, + }, + "defaultValue": None, + "filterType": "filter_select", + "id": "NATIVE_FILTER-JeZ9HYoTP", + "isInstant": False, + "name": "Platform", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{"column": {"name": "platform"}, "datasetId": 33}], + }, + "NATIVE_FILTER-B2PFYVIUw": { + "cascadeParentIds": [], + "controlValues": { + "enableEmptyFilter": False, + "inverseSelection": False, + "multiSelect": True, + "sortAscending": True, + "defaultToFirstItem": False, + }, + "defaultValue": None, + "filterType": "filter_select", + "id": "NATIVE_FILTER-B2PFYVIUw", + "isInstant": False, + "name": "Genre", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{"column": {"name": "genre"}, "datasetId": 33}], + }, + "NATIVE_FILTER-VDLd4Wq-v": { + "cascadeParentIds": [], + "controlValues": { + "enableEmptyFilter": False, + "inverseSelection": False, + "multiSelect": True, + "sortAscending": True, + "defaultToFirstItem": False, + }, + "defaultValue": None, + "filterType": "filter_select", + "id": "NATIVE_FILTER-VDLd4Wq-v", + "isInstant": False, + "name": "Publisher", + "scope": {"excluded": [], "rootPath": ["ROOT_ID"]}, + "targets": [{"column": {"name": "publisher"}, "datasetId": 33}], + }, + }, + "dataMask": { + "NATIVE_FILTER-tx05Ze2Hm": { + "id": "NATIVE_FILTER-tx05Ze2Hm", + "filterState": {"value": "No filter"}, + "extraFormData": {"time_range": "No filter"}, + }, + "NATIVE_FILTER-B2PFYVIUw": { + "id": "NATIVE_FILTER-B2PFYVIUw", + "filterState": {"value": ["Adventure", "Fighting", "Misc"]}, + "extraFormData": { + "filters": [ + { + "col": "genre", + "op": "IN", + "val": ["Adventure", "Fighting", "Misc"], + } + ] + }, + }, + "NATIVE_FILTER-VDLd4Wq-v": { + "id": "NATIVE_FILTER-VDLd4Wq-v", + "filterState": {"value": None}, + "extraFormData": {"filters": []}, + }, + "NATIVE_FILTER-JeZ9HYoTP": { + "id": "NATIVE_FILTER-JeZ9HYoTP", + "filterState": {"value": ["GB", "GBA", "PSV", "DS", "3DS"]}, + "extraFormData": { + "filters": [ + { + "col": "platform", + "op": "IN", + "val": ["GB", "GBA", "PSV", "DS", "3DS"], + } + ] + }, + }, + }, +} + + +def test_upgrade_select_filters(): + """ + ensure that controlValue.defaultToFirstItem is added if it's missing + """ + converted_filters = deepcopy(native_filters_v1) + upgrade_select_filters(converted_filters) + assert converted_filters == native_filters_v2 + + +def test_upgrade_filter_sets(): + """ + ensure that filter set upgrade operation produces a object that is compatible + with a currently functioning set + """ + converted_filter_set = deepcopy(filter_sets_v1) + upgrade_filter_set(converted_filter_set) + assert converted_filter_set == filter_sets_v2 + + +def test_downgrade_filter_set(): + """ + ensure that the filter set downgrade operation produces an almost identical dict + as the original value + """ + converted_v1_set = deepcopy(filter_sets_v1) + # upgrade the native filter metadata in the comparison fixture, + # as removing the defaultToFirstItem is not necessary + upgrade_select_filters(converted_v1_set["nativeFilters"].values()) + + converted_filter_set = deepcopy(filter_sets_v2) + downgrade_filter_set(converted_filter_set) + assert converted_filter_set == converted_v1_set diff --git a/tests/integration_tests/model_tests.py b/tests/integration_tests/model_tests.py new file mode 100644 index 0000000000000..da6c5e6a3c254 --- /dev/null +++ b/tests/integration_tests/model_tests.py @@ -0,0 +1,648 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +import json +from superset.utils.core import DatasourceType +import textwrap +import unittest +from unittest import mock + +from superset import security_manager +from superset.connectors.sqla.models import SqlaTable +from superset.exceptions import SupersetException +from superset.utils.core import override_user +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + +import pytest +from sqlalchemy.engine.url import make_url +from sqlalchemy.types import DateTime + +import tests.integration_tests.test_app +from superset import app, db as metadata_db +from superset.db_engine_specs.postgres import PostgresEngineSpec +from superset.common.db_query_status import QueryStatus +from superset.models.core import Database +from superset.models.slice import Slice +from superset.utils.database import get_example_database + +from .base_tests import SupersetTestCase +from .fixtures.energy_dashboard import ( + load_energy_table_with_slice, + load_energy_table_data, +) + + +class TestDatabaseModel(SupersetTestCase): + @unittest.skipUnless( + SupersetTestCase.is_module_installed("requests"), "requests not installed" + ) + def test_database_schema_presto(self): + sqlalchemy_uri = "presto://presto.airbnb.io:8080/hive/default" + model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri) + + with model.get_sqla_engine_with_context() as engine: + db = make_url(engine.url).database + self.assertEqual("hive/default", db) + + with model.get_sqla_engine_with_context(schema="core_db") as engine: + db = make_url(engine.url).database + self.assertEqual("hive/core_db", db) + + sqlalchemy_uri = "presto://presto.airbnb.io:8080/hive" + model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri) + + with model.get_sqla_engine_with_context() as engine: + db = make_url(engine.url).database + self.assertEqual("hive", db) + + with model.get_sqla_engine_with_context(schema="core_db") as engine: + db = make_url(engine.url).database + self.assertEqual("hive/core_db", db) + + def test_database_schema_postgres(self): + sqlalchemy_uri = "postgresql+psycopg2://postgres.airbnb.io:5439/prod" + model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri) + + with model.get_sqla_engine_with_context() as engine: + db = make_url(engine.url).database + self.assertEqual("prod", db) + + with model.get_sqla_engine_with_context(schema="foo") as engine: + db = make_url(engine.url).database + self.assertEqual("prod", db) + + @unittest.skipUnless( + SupersetTestCase.is_module_installed("thrift"), "thrift not installed" + ) + @unittest.skipUnless( + SupersetTestCase.is_module_installed("pyhive"), "pyhive not installed" + ) + def test_database_schema_hive(self): + sqlalchemy_uri = "hive://hive@hive.airbnb.io:10000/default?auth=NOSASL" + model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri) + + with model.get_sqla_engine_with_context() as engine: + db = make_url(engine.url).database + self.assertEqual("default", db) + + with model.get_sqla_engine_with_context(schema="core_db") as engine: + db = make_url(engine.url).database + self.assertEqual("core_db", db) + + @unittest.skipUnless( + SupersetTestCase.is_module_installed("MySQLdb"), "mysqlclient not installed" + ) + def test_database_schema_mysql(self): + sqlalchemy_uri = "mysql://root@localhost/superset" + model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri) + + with model.get_sqla_engine_with_context() as engine: + db = make_url(engine.url).database + self.assertEqual("superset", db) + + with model.get_sqla_engine_with_context(schema="staging") as engine: + db = make_url(engine.url).database + self.assertEqual("staging", db) + + @unittest.skipUnless( + SupersetTestCase.is_module_installed("MySQLdb"), "mysqlclient not installed" + ) + def test_database_impersonate_user(self): + uri = "mysql://root@localhost" + example_user = security_manager.find_user(username="gamma") + model = Database(database_name="test_database", sqlalchemy_uri=uri) + + with override_user(example_user): + model.impersonate_user = True + with model.get_sqla_engine_with_context() as engine: + username = make_url(engine.url).username + self.assertEqual(example_user.username, username) + + model.impersonate_user = False + with model.get_sqla_engine_with_context() as engine: + username = make_url(engine.url).username + self.assertNotEqual(example_user.username, username) + + @mock.patch("superset.models.core.create_engine") + def test_impersonate_user_presto(self, mocked_create_engine): + uri = "presto://localhost" + principal_user = security_manager.find_user(username="gamma") + extra = """ + { + "metadata_params": {}, + "engine_params": { + "connect_args":{ + "protocol": "https", + "username":"original_user", + "password":"original_user_password" + } + }, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": [] + } + """ + + with override_user(principal_user): + model = Database( + database_name="test_database", sqlalchemy_uri=uri, extra=extra + ) + model.impersonate_user = True + model._get_sqla_engine() + call_args = mocked_create_engine.call_args + + assert str(call_args[0][0]) == "presto://gamma@localhost" + + assert call_args[1]["connect_args"] == { + "protocol": "https", + "username": "original_user", + "password": "original_user_password", + "principal_username": "gamma", + } + + model.impersonate_user = False + model._get_sqla_engine() + call_args = mocked_create_engine.call_args + + assert str(call_args[0][0]) == "presto://localhost" + + assert call_args[1]["connect_args"] == { + "protocol": "https", + "username": "original_user", + "password": "original_user_password", + } + + @mock.patch("superset.models.core.create_engine") + def test_impersonate_user_trino(self, mocked_create_engine): + principal_user = security_manager.find_user(username="gamma") + + with override_user(principal_user): + model = Database( + database_name="test_database", sqlalchemy_uri="trino://localhost" + ) + model.impersonate_user = True + model._get_sqla_engine() + call_args = mocked_create_engine.call_args + + assert str(call_args[0][0]) == "trino://localhost" + assert call_args[1]["connect_args"]["user"] == "gamma" + + model = Database( + database_name="test_database", + sqlalchemy_uri="trino://original_user:original_user_password@localhost", + ) + + model.impersonate_user = True + model._get_sqla_engine() + call_args = mocked_create_engine.call_args + + assert ( + str(call_args[0][0]) + == "trino://original_user:original_user_password@localhost" + ) + assert call_args[1]["connect_args"]["user"] == "gamma" + + @mock.patch("superset.models.core.create_engine") + def test_impersonate_user_hive(self, mocked_create_engine): + uri = "hive://localhost" + principal_user = security_manager.find_user(username="gamma") + extra = """ + { + "metadata_params": {}, + "engine_params": { + "connect_args":{ + "protocol": "https", + "username":"original_user", + "password":"original_user_password" + } + }, + "metadata_cache_timeout": {}, + "schemas_allowed_for_file_upload": [] + } + """ + + with override_user(principal_user): + model = Database( + database_name="test_database", sqlalchemy_uri=uri, extra=extra + ) + model.impersonate_user = True + model._get_sqla_engine() + call_args = mocked_create_engine.call_args + + assert str(call_args[0][0]) == "hive://localhost" + + assert call_args[1]["connect_args"] == { + "protocol": "https", + "username": "original_user", + "password": "original_user_password", + "configuration": {"hive.server2.proxy.user": "gamma"}, + } + + model.impersonate_user = False + model._get_sqla_engine() + call_args = mocked_create_engine.call_args + + assert str(call_args[0][0]) == "hive://localhost" + + assert call_args[1]["connect_args"] == { + "protocol": "https", + "username": "original_user", + "password": "original_user_password", + } + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_select_star(self): + db = get_example_database() + table_name = "energy_usage" + sql = db.select_star(table_name, show_cols=False, latest_partition=False) + quote = db.inspector.engine.dialect.identifier_preparer.quote_identifier + expected = ( + textwrap.dedent( + f"""\ + SELECT * + FROM {quote(table_name)} + LIMIT 100""" + ) + if db.backend in {"presto", "hive"} + else textwrap.dedent( + f"""\ + SELECT * + FROM {table_name} + LIMIT 100""" + ) + ) + assert expected in sql + sql = db.select_star(table_name, show_cols=True, latest_partition=False) + # TODO(bkyryliuk): unify sql generation + if db.backend == "presto": + assert ( + textwrap.dedent( + """\ + SELECT "source" AS "source", + "target" AS "target", + "value" AS "value" + FROM "energy_usage" + LIMIT 100""" + ) + == sql + ) + elif db.backend == "hive": + assert ( + textwrap.dedent( + """\ + SELECT `source`, + `target`, + `value` + FROM `energy_usage` + LIMIT 100""" + ) + == sql + ) + else: + assert ( + textwrap.dedent( + """\ + SELECT source, + target, + value + FROM energy_usage + LIMIT 100""" + ) + in sql + ) + + def test_select_star_fully_qualified_names(self): + db = get_example_database() + schema = "schema.name" + table_name = "table/name" + sql = db.select_star( + table_name, schema=schema, show_cols=False, latest_partition=False + ) + fully_qualified_names = { + "sqlite": '"schema.name"."table/name"', + "mysql": "`schema.name`.`table/name`", + "postgres": '"schema.name"."table/name"', + } + fully_qualified_name = fully_qualified_names.get(db.db_engine_spec.engine) + if fully_qualified_name: + expected = textwrap.dedent( + f"""\ + SELECT * + FROM {fully_qualified_name} + LIMIT 100""" + ) + assert sql.startswith(expected) + + def test_single_statement(self): + main_db = get_example_database() + + if main_db.backend == "mysql": + df = main_db.get_df("SELECT 1", None) + self.assertEqual(df.iat[0, 0], 1) + + df = main_db.get_df("SELECT 1;", None) + self.assertEqual(df.iat[0, 0], 1) + + def test_multi_statement(self): + main_db = get_example_database() + + if main_db.backend == "mysql": + df = main_db.get_df("USE superset; SELECT 1", None) + self.assertEqual(df.iat[0, 0], 1) + + df = main_db.get_df("USE superset; SELECT ';';", None) + self.assertEqual(df.iat[0, 0], ";") + + @mock.patch("superset.models.core.create_engine") + def test_get_sqla_engine(self, mocked_create_engine): + model = Database( + database_name="test_database", + sqlalchemy_uri="mysql://root@localhost", + ) + model.db_engine_spec.get_dbapi_exception_mapping = mock.Mock( + return_value={Exception: SupersetException} + ) + mocked_create_engine.side_effect = Exception() + with self.assertRaises(SupersetException): + model._get_sqla_engine() + + +class TestSqlaTableModel(SupersetTestCase): + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_timestamp_expression(self): + tbl = self.get_table(name="birth_names") + ds_col = tbl.get_column("ds") + sqla_literal = ds_col.get_timestamp_expression(None) + assert str(sqla_literal.compile()) == "ds" + + sqla_literal = ds_col.get_timestamp_expression("P1D") + compiled = "{}".format(sqla_literal.compile()) + if tbl.database.backend == "mysql": + assert compiled == "DATE(ds)" + + prev_ds_expr = ds_col.expression + ds_col.expression = "DATE_ADD(ds, 1)" + sqla_literal = ds_col.get_timestamp_expression("P1D") + compiled = "{}".format(sqla_literal.compile()) + if tbl.database.backend == "mysql": + assert compiled == "DATE(DATE_ADD(ds, 1))" + ds_col.expression = prev_ds_expr + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_timestamp_expression_epoch(self): + tbl = self.get_table(name="birth_names") + ds_col = tbl.get_column("ds") + + ds_col.expression = None + ds_col.python_date_format = "epoch_s" + sqla_literal = ds_col.get_timestamp_expression(None) + compiled = "{}".format(sqla_literal.compile()) + if tbl.database.backend == "mysql": + self.assertEqual(compiled, "from_unixtime(ds)") + + ds_col.python_date_format = "epoch_s" + sqla_literal = ds_col.get_timestamp_expression("P1D") + compiled = "{}".format(sqla_literal.compile()) + if tbl.database.backend == "mysql": + self.assertEqual(compiled, "DATE(from_unixtime(ds))") + + prev_ds_expr = ds_col.expression + ds_col.expression = "DATE_ADD(ds, 1)" + sqla_literal = ds_col.get_timestamp_expression("P1D") + compiled = "{}".format(sqla_literal.compile()) + if tbl.database.backend == "mysql": + self.assertEqual(compiled, "DATE(from_unixtime(DATE_ADD(ds, 1)))") + ds_col.expression = prev_ds_expr + + def query_with_expr_helper(self, is_timeseries, inner_join=True): + tbl = self.get_table(name="birth_names") + ds_col = tbl.get_column("ds") + ds_col.expression = None + ds_col.python_date_format = None + spec = self.get_database_by_id(tbl.database_id).db_engine_spec + if not spec.allows_joins and inner_join: + # if the db does not support inner joins, we cannot force it so + return None + old_inner_join = spec.allows_joins + spec.allows_joins = inner_join + arbitrary_gby = "state || gender || '_test'" + arbitrary_metric = dict( + label="arbitrary", expressionType="SQL", sqlExpression="SUM(num_boys)" + ) + query_obj = dict( + groupby=[arbitrary_gby, "name"], + metrics=[arbitrary_metric], + filter=[], + is_timeseries=is_timeseries, + columns=[], + granularity="ds", + from_dttm=None, + to_dttm=None, + extras=dict(time_grain_sqla="P1Y"), + series_limit=15 if inner_join and is_timeseries else None, + ) + qr = tbl.query(query_obj) + self.assertEqual(qr.status, QueryStatus.SUCCESS) + sql = qr.query + self.assertIn(arbitrary_gby, sql) + self.assertIn("name", sql) + if inner_join and is_timeseries: + self.assertIn("JOIN", sql.upper()) + else: + self.assertNotIn("JOIN", sql.upper()) + spec.allows_joins = old_inner_join + self.assertFalse(qr.df.empty) + return qr.df + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_query_with_expr_groupby_timeseries(self): + if get_example_database().backend == "presto": + # TODO(bkyryliuk): make it work for presto. + return + + def canonicalize_df(df): + ret = df.sort_values(by=list(df.columns.values), inplace=False) + ret.reset_index(inplace=True, drop=True) + return ret + + df1 = self.query_with_expr_helper(is_timeseries=True, inner_join=True) + name_list1 = canonicalize_df(df1).name.values.tolist() + df2 = self.query_with_expr_helper(is_timeseries=True, inner_join=False) + name_list2 = canonicalize_df(df1).name.values.tolist() + self.assertFalse(df2.empty) + + assert name_list2 == name_list1 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_query_with_expr_groupby(self): + self.query_with_expr_helper(is_timeseries=False) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_sql_mutator(self): + tbl = self.get_table(name="birth_names") + query_obj = dict( + groupby=[], + metrics=None, + filter=[], + is_timeseries=False, + columns=["name"], + granularity=None, + from_dttm=None, + to_dttm=None, + extras={}, + ) + sql = tbl.get_query_str(query_obj) + self.assertNotIn("-- COMMENT", sql) + + def mutator(*args, **kwargs): + return "-- COMMENT\n" + args[0] + + app.config["SQL_QUERY_MUTATOR"] = mutator + sql = tbl.get_query_str(query_obj) + self.assertIn("-- COMMENT", sql) + + app.config["SQL_QUERY_MUTATOR"] = None + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_sql_mutator_different_params(self): + tbl = self.get_table(name="birth_names") + query_obj = dict( + groupby=[], + metrics=None, + filter=[], + is_timeseries=False, + columns=["name"], + granularity=None, + from_dttm=None, + to_dttm=None, + extras={}, + ) + sql = tbl.get_query_str(query_obj) + self.assertNotIn("-- COMMENT", sql) + + def mutator(sql, database=None, **kwargs): + return "-- COMMENT\n--" + "\n" + str(database) + "\n" + sql + + app.config["SQL_QUERY_MUTATOR"] = mutator + mutated_sql = tbl.get_query_str(query_obj) + self.assertIn("-- COMMENT", mutated_sql) + self.assertIn(tbl.database.name, mutated_sql) + + app.config["SQL_QUERY_MUTATOR"] = None + + def test_query_with_non_existent_metrics(self): + tbl = self.get_table(name="birth_names") + + query_obj = dict( + groupby=[], + metrics=["invalid"], + filter=[], + is_timeseries=False, + columns=["name"], + granularity=None, + from_dttm=None, + to_dttm=None, + extras={}, + ) + + with self.assertRaises(Exception) as context: + tbl.get_query_str(query_obj) + + self.assertTrue("Metric 'invalid' does not exist", context.exception) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_data_for_slices_with_no_query_context(self): + tbl = self.get_table(name="birth_names") + slc = ( + metadata_db.session.query(Slice) + .filter_by( + datasource_id=tbl.id, + datasource_type=tbl.type, + slice_name="Genders", + ) + .first() + ) + data_for_slices = tbl.data_for_slices([slc]) + assert len(data_for_slices["metrics"]) == 1 + assert len(data_for_slices["columns"]) == 1 + assert data_for_slices["metrics"][0]["metric_name"] == "sum__num" + assert data_for_slices["columns"][0]["column_name"] == "gender" + assert set(data_for_slices["verbose_map"].keys()) == { + "__timestamp", + "sum__num", + "gender", + } + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_data_for_slices_with_query_context(self): + tbl = self.get_table(name="birth_names") + slc = ( + metadata_db.session.query(Slice) + .filter_by( + datasource_id=tbl.id, + datasource_type=tbl.type, + slice_name="Pivot Table v2", + ) + .first() + ) + data_for_slices = tbl.data_for_slices([slc]) + assert len(data_for_slices["metrics"]) == 1 + assert len(data_for_slices["columns"]) == 2 + assert data_for_slices["metrics"][0]["metric_name"] == "sum__num" + column_names = [col["column_name"] for col in data_for_slices["columns"]] + assert "name" in column_names + assert "state" in column_names + assert set(data_for_slices["verbose_map"].keys()) == { + "__timestamp", + "sum__num", + "name", + "state", + } + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_data_for_slices_with_adhoc_column(self): + # should perform sqla.model.BaseDatasource.data_for_slices() with adhoc + # column and legacy chart + tbl = self.get_table(name="birth_names") + dashboard = self.get_dash_by_slug("births") + slc = Slice( + slice_name="slice with adhoc column", + datasource_type=DatasourceType.TABLE, + viz_type="table", + params=json.dumps( + { + "adhoc_filters": [], + "granularity_sqla": "ds", + "groupby": [ + "name", + {"label": "adhoc_column", "sqlExpression": "name"}, + ], + "metrics": ["sum__num"], + "time_range": "No filter", + "viz_type": "table", + } + ), + datasource_id=tbl.id, + ) + dashboard.slices.append(slc) + datasource_info = slc.datasource.data_for_slices([slc]) + assert "database" in datasource_info + + # clean up and auto commit + metadata_db.session.delete(slc) diff --git a/tests/integration_tests/queries/__init__.py b/tests/integration_tests/queries/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/queries/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/queries/api_tests.py b/tests/integration_tests/queries/api_tests.py new file mode 100644 index 0000000000000..7abcb31df1eff --- /dev/null +++ b/tests/integration_tests/queries/api_tests.py @@ -0,0 +1,505 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +from datetime import datetime, timedelta +from unittest import mock +import json +import random +import string + +import pytest +import prison +from sqlalchemy.sql import func + +import tests.integration_tests.test_app +from superset import db, security_manager +from superset.common.db_query_status import QueryStatus +from superset.models.core import Database +from superset.utils.database import get_example_database, get_main_database +from superset.models.sql_lab import Query + +from tests.integration_tests.base_tests import SupersetTestCase + +QUERIES_FIXTURE_COUNT = 10 + + +class TestQueryApi(SupersetTestCase): + def insert_query( + self, + database_id: int, + user_id: int, + client_id: str, + sql: str = "", + select_sql: str = "", + executed_sql: str = "", + limit: int = 100, + progress: int = 100, + rows: int = 100, + tab_name: str = "", + status: str = "success", + changed_on: datetime = datetime(2020, 1, 1), + ) -> Query: + database = db.session.query(Database).get(database_id) + user = db.session.query(security_manager.user_model).get(user_id) + query = Query( + database=database, + user=user, + client_id=client_id, + sql=sql, + select_sql=select_sql, + executed_sql=executed_sql, + limit=limit, + progress=progress, + rows=rows, + tab_name=tab_name, + status=status, + changed_on=changed_on, + ) + db.session.add(query) + db.session.commit() + return query + + @pytest.fixture() + def create_queries(self): + with self.create_app().app_context(): + queries = [] + admin_id = self.get_user("admin").id + alpha_id = self.get_user("alpha").id + example_database_id = get_example_database().id + main_database_id = get_main_database().id + for cx in range(QUERIES_FIXTURE_COUNT - 1): + queries.append( + self.insert_query( + example_database_id, + admin_id, + self.get_random_string(), + sql=f"SELECT col1, col2 from table{cx}", + rows=cx, + status=QueryStatus.SUCCESS + if (cx % 2) == 0 + else QueryStatus.RUNNING, + ) + ) + queries.append( + self.insert_query( + main_database_id, + alpha_id, + self.get_random_string(), + sql=f"SELECT col1, col2 from table{QUERIES_FIXTURE_COUNT}", + rows=QUERIES_FIXTURE_COUNT, + status=QueryStatus.SUCCESS, + ) + ) + + yield queries + + # rollback changes + for query in queries: + db.session.delete(query) + db.session.commit() + + @staticmethod + def get_random_string(length: int = 10): + letters = string.ascii_letters + return "".join(random.choice(letters) for i in range(length)) + + def test_get_query(self): + """ + Query API: Test get query + """ + admin = self.get_user("admin") + client_id = self.get_random_string() + example_db = get_example_database() + query = self.insert_query( + example_db.id, + admin.id, + client_id, + sql="SELECT col1, col2 from table1", + select_sql="SELECT col1, col2 from table1", + executed_sql="SELECT col1, col2 from table1 LIMIT 100", + ) + self.login(username="admin") + uri = f"api/v1/query/{query.id}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + + expected_result = { + "database": {"id": example_db.id}, + "client_id": client_id, + "end_result_backend_time": None, + "error_message": None, + "executed_sql": "SELECT col1, col2 from table1 LIMIT 100", + "limit": 100, + "progress": 100, + "results_key": None, + "rows": 100, + "schema": None, + "select_as_cta": None, + "select_as_cta_used": False, + "select_sql": "SELECT col1, col2 from table1", + "sql": "SELECT col1, col2 from table1", + "sql_editor_id": None, + "status": "success", + "tab_name": "", + "tmp_schema_name": None, + "tmp_table_name": None, + "tracking_url": None, + } + data = json.loads(rv.data.decode("utf-8")) + self.assertIn("changed_on", data["result"]) + for key, value in data["result"].items(): + # We can't assert timestamp + if key not in ( + "changed_on", + "end_time", + "start_running_time", + "start_time", + "id", + ): + self.assertEqual(value, expected_result[key]) + # rollback changes + db.session.delete(query) + db.session.commit() + + def test_get_query_not_found(self): + """ + Query API: Test get query not found + """ + admin = self.get_user("admin") + client_id = self.get_random_string() + query = self.insert_query(get_example_database().id, admin.id, client_id) + max_id = db.session.query(func.max(Query.id)).scalar() + self.login(username="admin") + uri = f"api/v1/query/{max_id + 1}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + db.session.delete(query) + db.session.commit() + + def test_get_query_no_data_access(self): + """ + Query API: Test get query without data access + """ + gamma1 = self.create_user( + "gamma_1", "password", "Gamma", email="gamma1@superset.org" + ) + gamma2 = self.create_user( + "gamma_2", "password", "Gamma", email="gamma2@superset.org" + ) + # Add SQLLab role to these gamma users, so they have access to queries + sqllab_role = self.get_role("sql_lab") + gamma1.roles.append(sqllab_role) + gamma2.roles.append(sqllab_role) + + gamma1_client_id = self.get_random_string() + gamma2_client_id = self.get_random_string() + query_gamma1 = self.insert_query( + get_example_database().id, gamma1.id, gamma1_client_id + ) + query_gamma2 = self.insert_query( + get_example_database().id, gamma2.id, gamma2_client_id + ) + + # Gamma1 user, only sees their own queries + self.login(username="gamma_1", password="password") + uri = f"api/v1/query/{query_gamma2.id}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + uri = f"api/v1/query/{query_gamma1.id}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + + # Gamma2 user, only sees their own queries + self.logout() + self.login(username="gamma_2", password="password") + uri = f"api/v1/query/{query_gamma1.id}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + uri = f"api/v1/query/{query_gamma2.id}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + + # Admin's have the "all query access" permission + self.logout() + self.login(username="admin") + uri = f"api/v1/query/{query_gamma1.id}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + uri = f"api/v1/query/{query_gamma2.id}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + + # rollback changes + db.session.delete(query_gamma1) + db.session.delete(query_gamma2) + db.session.delete(gamma1) + db.session.delete(gamma2) + db.session.commit() + + @pytest.mark.usefixtures("create_queries") + def test_get_list_query(self): + """ + Query API: Test get list query + """ + self.login(username="admin") + uri = "api/v1/query/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == QUERIES_FIXTURE_COUNT + # check expected columns + assert sorted(list(data["result"][0].keys())) == [ + "changed_on", + "database", + "end_time", + "executed_sql", + "id", + "rows", + "schema", + "sql", + "sql_tables", + "start_time", + "status", + "tab_name", + "tmp_table_name", + "tracking_url", + "user", + ] + assert sorted(list(data["result"][0]["user"].keys())) == [ + "first_name", + "id", + "last_name", + "username", + ] + assert list(data["result"][0]["database"].keys()) == [ + "database_name", + ] + + @pytest.mark.usefixtures("create_queries") + def test_get_list_query_filter_sql(self): + """ + Query API: Test get list query filter + """ + self.login(username="admin") + arguments = {"filters": [{"col": "sql", "opr": "ct", "value": "table2"}]} + uri = f"api/v1/query/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + + @pytest.mark.usefixtures("create_queries") + def test_get_list_query_filter_database(self): + """ + Query API: Test get list query filter database + """ + self.login(username="admin") + database_id = get_main_database().id + arguments = { + "filters": [{"col": "database", "opr": "rel_o_m", "value": database_id}] + } + uri = f"api/v1/query/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + + @pytest.mark.usefixtures("create_queries") + def test_get_list_query_filter_user(self): + """ + Query API: Test get list query filter user + """ + self.login(username="admin") + alpha_id = self.get_user("alpha").id + arguments = {"filters": [{"col": "user", "opr": "rel_o_m", "value": alpha_id}]} + uri = f"api/v1/query/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + + @pytest.mark.usefixtures("create_queries") + def test_get_list_query_filter_changed_on(self): + """ + Query API: Test get list query filter changed_on + """ + self.login(username="admin") + arguments = { + "filters": [ + {"col": "changed_on", "opr": "lt", "value": "2020-02-01T00:00:00Z"}, + {"col": "changed_on", "opr": "gt", "value": "2019-12-30T00:00:00Z"}, + ] + } + uri = f"api/v1/query/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == QUERIES_FIXTURE_COUNT + + @pytest.mark.usefixtures("create_queries") + def test_get_list_query_order(self): + """ + Query API: Test get list query filter changed_on + """ + self.login(username="admin") + order_columns = [ + "changed_on", + "database.database_name", + "rows", + "schema", + "sql", + "tab_name", + "user.first_name", + ] + + for order_column in order_columns: + arguments = {"order_column": order_column, "order_direction": "asc"} + uri = f"api/v1/query/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + assert rv.status_code == 200 + + def test_get_list_query_no_data_access(self): + """ + Query API: Test get queries no data access + """ + admin = self.get_user("admin") + client_id = self.get_random_string() + query = self.insert_query( + get_example_database().id, + admin.id, + client_id, + sql="SELECT col1, col2 from table1", + ) + + self.login(username="gamma_sqllab") + arguments = {"filters": [{"col": "sql", "opr": "sw", "value": "SELECT col1"}]} + uri = f"api/v1/query/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 0 + + # rollback changes + db.session.delete(query) + db.session.commit() + + def test_get_updated_since(self): + """ + Query API: Test get queries updated since timestamp + """ + now = datetime.utcnow() + client_id = self.get_random_string() + + admin = self.get_user("admin") + example_db = get_example_database() + + old_query = self.insert_query( + example_db.id, + admin.id, + self.get_random_string(), + sql="SELECT col1, col2 from table1", + select_sql="SELECT col1, col2 from table1", + executed_sql="SELECT col1, col2 from table1 LIMIT 100", + changed_on=now - timedelta(days=3), + ) + updated_query = self.insert_query( + example_db.id, + admin.id, + client_id, + sql="SELECT col1, col2 from table1", + select_sql="SELECT col1, col2 from table1", + executed_sql="SELECT col1, col2 from table1 LIMIT 100", + changed_on=now - timedelta(days=1), + ) + + self.login(username="admin") + timestamp = datetime.timestamp(now - timedelta(days=2)) * 1000 + uri = f"api/v1/query/updated_since?q={prison.dumps({'last_updated_ms': timestamp})}" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 200) + + expected_result = updated_query.to_dict() + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(len(data["result"]), 1) + for key, value in data["result"][0].items(): + # We can't assert timestamp + if key not in ( + "changedOn", + "changed_on", + "end_time", + "start_running_time", + "start_time", + "id", + ): + self.assertEqual(value, expected_result[key]) + # rollback changes + db.session.delete(old_query) + db.session.delete(updated_query) + db.session.commit() + + @mock.patch("superset.sql_lab.cancel_query") + @mock.patch("superset.views.core.db.session") + def test_stop_query_not_found( + self, mock_superset_db_session, mock_sql_lab_cancel_query + ): + """ + Handles stop query when the DB engine spec does not + have a cancel query method (with invalid client_id). + """ + form_data = {"client_id": "foo2"} + query_mock = mock.Mock() + query_mock.return_value = None + self.login(username="admin") + mock_superset_db_session.query().filter_by().one_or_none = query_mock + mock_sql_lab_cancel_query.return_value = True + rv = self.client.post( + "/api/v1/query/stop", + data=json.dumps(form_data), + content_type="application/json", + ) + + assert rv.status_code == 404 + data = json.loads(rv.data.decode("utf-8")) + assert data["message"] == "Query with client_id foo2 not found" + + @mock.patch("superset.sql_lab.cancel_query") + @mock.patch("superset.views.core.db.session") + def test_stop_query(self, mock_superset_db_session, mock_sql_lab_cancel_query): + """ + Handles stop query when the DB engine spec does not + have a cancel query method. + """ + form_data = {"client_id": "foo"} + query_mock = mock.Mock() + query_mock.client_id = "foo" + query_mock.status = QueryStatus.RUNNING + self.login(username="admin") + mock_superset_db_session.query().filter_by().one_or_none().return_value = ( + query_mock + ) + mock_sql_lab_cancel_query.return_value = True + rv = self.client.post( + "/api/v1/query/stop", + data=json.dumps(form_data), + content_type="application/json", + ) + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["result"] == "OK" diff --git a/tests/integration_tests/queries/saved_queries/__init__.py b/tests/integration_tests/queries/saved_queries/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/queries/saved_queries/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/queries/saved_queries/api_tests.py b/tests/integration_tests/queries/saved_queries/api_tests.py new file mode 100644 index 0000000000000..09929e4d231bd --- /dev/null +++ b/tests/integration_tests/queries/saved_queries/api_tests.py @@ -0,0 +1,804 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import json +from io import BytesIO +from typing import Optional +from zipfile import is_zipfile, ZipFile + +import yaml +import pytest +import prison +from sqlalchemy.sql import func, and_ + +import tests.integration_tests.test_app +from superset import db +from superset.models.core import Database +from superset.models.core import FavStar +from superset.models.sql_lab import SavedQuery +from superset.utils.database import get_example_database + +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.importexport import ( + database_config, + saved_queries_config, + saved_queries_metadata_config, +) + + +SAVED_QUERIES_FIXTURE_COUNT = 10 + + +class TestSavedQueryApi(SupersetTestCase): + def insert_saved_query( + self, + label: str, + sql: str, + db_id: Optional[int] = None, + created_by=None, + schema: Optional[str] = "", + description: Optional[str] = "", + ) -> SavedQuery: + database = None + if db_id: + database = db.session.query(Database).get(db_id) + query = SavedQuery( + database=database, + created_by=created_by, + sql=sql, + label=label, + schema=schema, + description=description, + ) + db.session.add(query) + db.session.commit() + return query + + def insert_default_saved_query( + self, label: str = "saved1", schema: str = "schema1", username: str = "admin" + ) -> SavedQuery: + admin = self.get_user(username) + example_db = get_example_database() + return self.insert_saved_query( + label, + "SELECT col1, col2 from table1", + db_id=example_db.id, + created_by=admin, + schema=schema, + description="cool description", + ) + + @pytest.fixture() + def create_saved_queries(self): + with self.create_app().app_context(): + saved_queries = [] + admin = self.get_user("admin") + for cx in range(SAVED_QUERIES_FIXTURE_COUNT - 1): + saved_queries.append( + self.insert_default_saved_query( + label=f"label{cx}", schema=f"schema{cx}" + ) + ) + saved_queries.append( + self.insert_default_saved_query( + label=f"label{SAVED_QUERIES_FIXTURE_COUNT}", + schema=f"schema{SAVED_QUERIES_FIXTURE_COUNT}", + username="gamma_sqllab", + ) + ) + + fav_saved_queries = [] + for cx in range(round(SAVED_QUERIES_FIXTURE_COUNT / 2)): + fav_star = FavStar( + user_id=admin.id, class_name="query", obj_id=saved_queries[cx].id + ) + db.session.add(fav_star) + db.session.commit() + fav_saved_queries.append(fav_star) + + yield saved_queries + + # rollback changes + for saved_query in saved_queries: + db.session.delete(saved_query) + for fav_saved_query in fav_saved_queries: + db.session.delete(fav_saved_query) + db.session.commit() + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_list_saved_query(self): + """ + Saved Query API: Test get list saved query + """ + admin = self.get_user("admin") + saved_queries = ( + db.session.query(SavedQuery).filter(SavedQuery.created_by == admin).all() + ) + + self.login(username="admin") + uri = f"api/v1/saved_query/" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(saved_queries) + expected_columns = [ + "changed_on_delta_humanized", + "created_on", + "created_by", + "database", + "db_id", + "description", + "id", + "label", + "schema", + "sql", + "sql_tables", + ] + for expected_column in expected_columns: + assert expected_column in data["result"][0] + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_list_saved_query_gamma(self): + """ + Saved Query API: Test get list saved query + """ + user = self.get_user("gamma_sqllab") + saved_queries = ( + db.session.query(SavedQuery).filter(SavedQuery.created_by == user).all() + ) + + self.login(username=user.username) + uri = f"api/v1/saved_query/" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(saved_queries) + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_list_sort_saved_query(self): + """ + Saved Query API: Test get list and sort saved query + """ + admin = self.get_user("admin") + saved_queries = ( + db.session.query(SavedQuery) + .filter(SavedQuery.created_by == admin) + .order_by(SavedQuery.schema.asc()) + ).all() + self.login(username="admin") + query_string = {"order_column": "schema", "order_direction": "asc"} + uri = f"api/v1/saved_query/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(saved_queries) + for i, query in enumerate(saved_queries): + assert query.schema == data["result"][i]["schema"] + + query_string = { + "order_column": "database.database_name", + "order_direction": "asc", + } + uri = f"api/v1/saved_query/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + + query_string = { + "order_column": "created_by.first_name", + "order_direction": "asc", + } + uri = f"api/v1/saved_query/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_list_filter_saved_query(self): + """ + Saved Query API: Test get list and filter saved query + """ + all_queries = ( + db.session.query(SavedQuery).filter(SavedQuery.label.ilike("%2%")).all() + ) + self.login(username="admin") + query_string = { + "filters": [{"col": "label", "opr": "ct", "value": "2"}], + } + uri = f"api/v1/saved_query/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(all_queries) + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_list_filter_database_saved_query(self): + """ + Saved Query API: Test get list and database saved query + """ + example_db = get_example_database() + admin_user = self.get_user("admin") + + all_db_queries = ( + db.session.query(SavedQuery) + .filter(SavedQuery.db_id == example_db.id) + .filter(SavedQuery.created_by_fk == admin_user.id) + .all() + ) + + self.login(username="admin") + query_string = { + "filters": [{"col": "database", "opr": "rel_o_m", "value": example_db.id}], + } + uri = f"api/v1/saved_query/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(all_db_queries) + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_list_filter_schema_saved_query(self): + """ + Saved Query API: Test get list and schema saved query + """ + schema_name = "schema1" + admin_user = self.get_user("admin") + + all_db_queries = ( + db.session.query(SavedQuery) + .filter(SavedQuery.schema == schema_name) + .filter(SavedQuery.created_by_fk == admin_user.id) + .all() + ) + + self.login(username="admin") + query_string = { + "filters": [{"col": "schema", "opr": "eq", "value": schema_name}], + } + uri = f"api/v1/saved_query/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(all_db_queries) + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_list_custom_filter_schema_saved_query(self): + """ + Saved Query API: Test get list and custom filter (schema) saved query + """ + self.login(username="admin") + admin = self.get_user("admin") + + all_queries = ( + db.session.query(SavedQuery) + .filter(SavedQuery.created_by == admin) + .filter(SavedQuery.schema.ilike("%2%")) + .all() + ) + query_string = { + "filters": [{"col": "label", "opr": "all_text", "value": "schema2"}], + } + uri = f"api/v1/saved_query/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(all_queries) + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_list_custom_filter_label_saved_query(self): + """ + Saved Query API: Test get list and custom filter (label) saved query + """ + self.login(username="admin") + admin = self.get_user("admin") + all_queries = ( + db.session.query(SavedQuery) + .filter(SavedQuery.created_by == admin) + .filter(SavedQuery.label.ilike("%3%")) + .all() + ) + query_string = { + "filters": [{"col": "label", "opr": "all_text", "value": "label3"}], + } + uri = f"api/v1/saved_query/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(all_queries) + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_list_custom_filter_sql_saved_query(self): + """ + Saved Query API: Test get list and custom filter (sql) saved query + """ + self.login(username="admin") + admin = self.get_user("admin") + all_queries = ( + db.session.query(SavedQuery) + .filter(SavedQuery.created_by == admin) + .filter(SavedQuery.sql.ilike("%table%")) + .all() + ) + query_string = { + "filters": [{"col": "label", "opr": "all_text", "value": "table"}], + } + uri = f"api/v1/saved_query/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(all_queries) + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_list_custom_filter_description_saved_query(self): + """ + Saved Query API: Test get list and custom filter (description) saved query + """ + self.login(username="admin") + admin = self.get_user("admin") + all_queries = ( + db.session.query(SavedQuery) + .filter(SavedQuery.created_by == admin) + .filter(SavedQuery.description.ilike("%cool%")) + .all() + ) + query_string = { + "filters": [{"col": "label", "opr": "all_text", "value": "cool"}], + } + uri = f"api/v1/saved_query/?q={prison.dumps(query_string)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == len(all_queries) + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_saved_query_favorite_filter(self): + """ + SavedQuery API: Test get saved queries favorite filter + """ + admin = self.get_user("admin") + users_favorite_query = db.session.query(FavStar.obj_id).filter( + and_(FavStar.user_id == admin.id, FavStar.class_name == "query") + ) + expected_models = ( + db.session.query(SavedQuery) + .filter(and_(SavedQuery.id.in_(users_favorite_query))) + .order_by(SavedQuery.label.asc()) + .all() + ) + + arguments = { + "filters": [{"col": "id", "opr": "saved_query_is_fav", "value": True}], + "order_column": "label", + "order_direction": "asc", + "keys": ["none"], + "columns": ["label"], + } + self.login(username="admin") + uri = f"api/v1/saved_query/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert len(expected_models) == data["count"] + + for i, expected_model in enumerate(expected_models): + assert expected_model.label == data["result"][i]["label"] + + # Test not favorite saves queries + expected_models = ( + db.session.query(SavedQuery) + .filter( + and_( + ~SavedQuery.id.in_(users_favorite_query), + SavedQuery.created_by == admin, + ) + ) + .order_by(SavedQuery.label.asc()) + .all() + ) + arguments["filters"][0]["value"] = False + uri = f"api/v1/saved_query/?q={prison.dumps(arguments)}" + rv = self.client.get(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert len(expected_models) == data["count"] + + def test_info_saved_query(self): + """ + SavedQuery API: Test info + """ + self.login(username="admin") + uri = "api/v1/saved_query/_info" + rv = self.get_assert_metric(uri, "info") + assert rv.status_code == 200 + + def test_info_security_saved_query(self): + """ + SavedQuery API: Test info security + """ + self.login(username="admin") + params = {"keys": ["permissions"]} + uri = f"api/v1/saved_query/_info?q={prison.dumps(params)}" + rv = self.get_assert_metric(uri, "info") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert set(data["permissions"]) == {"can_read", "can_write", "can_export"} + + def test_related_saved_query(self): + """ + SavedQuery API: Test related databases + """ + self.login(username="admin") + databases = db.session.query(Database).all() + expected_result = { + "count": len(databases), + "result": [ + {"extra": {}, "text": str(database), "value": database.id} + for database in databases + ], + } + + uri = f"api/v1/saved_query/related/database" + rv = self.client.get(uri) + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data == expected_result + + def test_related_saved_query_not_found(self): + """ + SavedQuery API: Test related user not found + """ + self.login(username="admin") + uri = f"api/v1/saved_query/related/user" + rv = self.client.get(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_saved_queries") + def test_distinct_saved_query(self): + """ + SavedQuery API: Test distinct schemas + """ + admin = self.get_user("admin") + saved_queries = ( + db.session.query(SavedQuery).filter(SavedQuery.created_by == admin).all() + ) + + self.login(username="admin") + uri = f"api/v1/saved_query/distinct/schema" + rv = self.client.get(uri) + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + expected_response = { + "count": len(saved_queries), + "result": [ + {"text": f"schema{i}", "value": f"schema{i}"} + for i in range(len(saved_queries)) + ], + } + assert data == expected_response + + def test_get_saved_query_not_allowed(self): + """ + SavedQuery API: Test related user not allowed + """ + self.login(username="admin") + uri = f"api/v1/saved_query/wrong" + rv = self.client.get(uri) + assert rv.status_code == 405 + + @pytest.mark.usefixtures("create_saved_queries") + def test_get_saved_query(self): + """ + Saved Query API: Test get saved query + """ + saved_query = ( + db.session.query(SavedQuery).filter(SavedQuery.label == "label1").all()[0] + ) + self.login(username="admin") + uri = f"api/v1/saved_query/{saved_query.id}" + rv = self.get_assert_metric(uri, "get") + assert rv.status_code == 200 + + expected_result = { + "id": saved_query.id, + "database": {"id": saved_query.database.id, "database_name": "examples"}, + "description": "cool description", + "created_by": { + "first_name": saved_query.created_by.first_name, + "id": saved_query.created_by.id, + "last_name": saved_query.created_by.last_name, + }, + "sql": "SELECT col1, col2 from table1", + "sql_tables": [{"catalog": None, "schema": None, "table": "table1"}], + "schema": "schema1", + "label": "label1", + "template_parameters": None, + } + data = json.loads(rv.data.decode("utf-8")) + self.assertIn("changed_on_delta_humanized", data["result"]) + for key, value in data["result"].items(): + if key not in ("changed_on_delta_humanized",): + assert value == expected_result[key] + + def test_get_saved_query_not_found(self): + """ + Saved Query API: Test get saved query not found + """ + query = self.insert_default_saved_query() + max_id = db.session.query(func.max(SavedQuery.id)).scalar() + self.login(username="admin") + uri = f"api/v1/saved_query/{max_id + 1}" + rv = self.client.get(uri) + assert rv.status_code == 404 + db.session.delete(query) + db.session.commit() + + def test_create_saved_query(self): + """ + Saved Query API: Test create + """ + admin = self.get_user("admin") + example_db = get_example_database() + + post_data = { + "schema": "schema1", + "label": "label1", + "description": "some description", + "sql": "SELECT col1, col2 from table1", + "db_id": example_db.id, + } + + self.login(username="admin") + uri = f"api/v1/saved_query/" + rv = self.client.post(uri, json=post_data) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 201 + + saved_query_id = data.get("id") + model = db.session.query(SavedQuery).get(saved_query_id) + for key in post_data: + assert getattr(model, key) == data["result"][key] + + # Rollback changes + db.session.delete(model) + db.session.commit() + + @pytest.mark.usefixtures("create_saved_queries") + def test_update_saved_query(self): + """ + Saved Query API: Test update + """ + saved_query = ( + db.session.query(SavedQuery).filter(SavedQuery.label == "label1").all()[0] + ) + + put_data = { + "schema": "schema_changed", + "label": "label_changed", + } + + self.login(username="admin") + uri = f"api/v1/saved_query/{saved_query.id}" + rv = self.client.put(uri, json=put_data) + assert rv.status_code == 200 + + model = db.session.query(SavedQuery).get(saved_query.id) + assert model.label == "label_changed" + assert model.schema == "schema_changed" + + @pytest.mark.usefixtures("create_saved_queries") + def test_update_saved_query_not_found(self): + """ + Saved Query API: Test update not found + """ + max_id = db.session.query(func.max(SavedQuery.id)).scalar() + self.login(username="admin") + + put_data = { + "schema": "schema_changed", + "label": "label_changed", + } + + uri = f"api/v1/saved_query/{max_id + 1}" + rv = self.client.put(uri, json=put_data) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_saved_queries") + def test_delete_saved_query(self): + """ + Saved Query API: Test delete + """ + saved_query = ( + db.session.query(SavedQuery).filter(SavedQuery.label == "label1").all()[0] + ) + + self.login(username="admin") + uri = f"api/v1/saved_query/{saved_query.id}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + + model = db.session.query(SavedQuery).get(saved_query.id) + assert model is None + + @pytest.mark.usefixtures("create_saved_queries") + def test_delete_saved_query_not_found(self): + """ + Saved Query API: Test delete not found + """ + max_id = db.session.query(func.max(SavedQuery.id)).scalar() + self.login(username="admin") + uri = f"api/v1/saved_query/{max_id + 1}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_saved_queries") + def test_delete_bulk_saved_queries(self): + """ + Saved Query API: Test delete bulk + """ + admin = self.get_user("admin") + saved_queries = ( + db.session.query(SavedQuery).filter(SavedQuery.created_by == admin).all() + ) + saved_query_ids = [saved_query.id for saved_query in saved_queries] + + self.login(username="admin") + uri = f"api/v1/saved_query/?q={prison.dumps(saved_query_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": f"Deleted {len(saved_query_ids)} saved queries"} + assert response == expected_response + saved_queries = ( + db.session.query(SavedQuery).filter(SavedQuery.created_by == admin).all() + ) + assert saved_queries == [] + + @pytest.mark.usefixtures("create_saved_queries") + def test_delete_one_bulk_saved_queries(self): + """ + Saved Query API: Test delete one in bulk + """ + saved_query = db.session.query(SavedQuery).first() + saved_query_ids = [saved_query.id] + + self.login(username="admin") + uri = f"api/v1/saved_query/?q={prison.dumps(saved_query_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 200 + response = json.loads(rv.data.decode("utf-8")) + expected_response = {"message": f"Deleted {len(saved_query_ids)} saved query"} + assert response == expected_response + saved_query_ = db.session.query(SavedQuery).get(saved_query_ids[0]) + assert saved_query_ is None + + def test_delete_bulk_saved_query_bad_request(self): + """ + Saved Query API: Test delete bulk bad request + """ + saved_query_ids = [1, "a"] + self.login(username="admin") + uri = f"api/v1/saved_query/?q={prison.dumps(saved_query_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 400 + + @pytest.mark.usefixtures("create_saved_queries") + def test_delete_bulk_saved_query_not_found(self): + """ + Saved Query API: Test delete bulk not found + """ + max_id = db.session.query(func.max(SavedQuery.id)).scalar() + + saved_query_ids = [max_id + 1, max_id + 2] + self.login(username="admin") + uri = f"api/v1/saved_query/?q={prison.dumps(saved_query_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_saved_queries") + def test_export(self): + """ + Saved Query API: Test export + """ + admin = self.get_user("admin") + sample_query = ( + db.session.query(SavedQuery).filter(SavedQuery.created_by == admin).first() + ) + + self.login(username="admin") + argument = [sample_query.id] + uri = f"api/v1/saved_query/export/?q={prison.dumps(argument)}" + rv = self.client.get(uri) + assert rv.status_code == 200 + buf = BytesIO(rv.data) + assert is_zipfile(buf) + + @pytest.mark.usefixtures("create_saved_queries") + def test_export_not_found(self): + """ + Saved Query API: Test export + """ + max_id = db.session.query(func.max(SavedQuery.id)).scalar() + + self.login(username="admin") + argument = [max_id + 1, max_id + 2] + uri = f"api/v1/saved_query/export/?q={prison.dumps(argument)}" + rv = self.client.get(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_saved_queries") + def test_export_not_allowed(self): + """ + Saved Query API: Test export + """ + admin = self.get_user("admin") + sample_query = ( + db.session.query(SavedQuery).filter(SavedQuery.created_by == admin).first() + ) + + self.login(username="gamma_sqllab") + argument = [sample_query.id] + uri = f"api/v1/saved_query/export/?q={prison.dumps(argument)}" + rv = self.client.get(uri) + assert rv.status_code == 404 + + def create_saved_query_import(self): + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("saved_query_export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(saved_queries_metadata_config).encode()) + with bundle.open( + "saved_query_export/databases/imported_database.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(database_config).encode()) + with bundle.open( + "saved_query_export/queries/imported_database/public/imported_saved_query.yaml", + "w", + ) as fp: + fp.write(yaml.safe_dump(saved_queries_config).encode()) + buf.seek(0) + return buf + + def test_import_saved_queries(self): + """ + Saved Query API: Test import + """ + self.login(username="admin") + uri = "api/v1/saved_query/import/" + + buf = self.create_saved_query_import() + form_data = { + "formData": (buf, "saved_query.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + assert database.database_name == "imported_database" + + saved_query = ( + db.session.query(SavedQuery) + .filter_by(uuid=saved_queries_config["uuid"]) + .one() + ) + assert saved_query.database == database + + db.session.delete(saved_query) + db.session.delete(database) + db.session.commit() diff --git a/tests/integration_tests/queries/saved_queries/commands_tests.py b/tests/integration_tests/queries/saved_queries/commands_tests.py new file mode 100644 index 0000000000000..bd90419155422 --- /dev/null +++ b/tests/integration_tests/queries/saved_queries/commands_tests.py @@ -0,0 +1,240 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from unittest.mock import patch + +import pytest +import yaml + +from superset import db, security_manager +from superset.commands.exceptions import CommandInvalidError +from superset.commands.importers.exceptions import IncorrectVersionError +from superset.models.core import Database +from superset.models.sql_lab import SavedQuery +from superset.queries.saved_queries.commands.exceptions import SavedQueryNotFoundError +from superset.queries.saved_queries.commands.export import ExportSavedQueriesCommand +from superset.queries.saved_queries.commands.importers.v1 import ( + ImportSavedQueriesCommand, +) +from superset.utils.database import get_example_database +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.importexport import ( + database_config, + database_metadata_config, + saved_queries_config, + saved_queries_metadata_config, +) + + +class TestExportSavedQueriesCommand(SupersetTestCase): + def setUp(self): + self.example_database = get_example_database() + self.example_query = SavedQuery( + database=self.example_database, + created_by=self.get_user("admin"), + sql="SELECT 42", + label="The answer", + schema="schema1", + description="Answer to the Ultimate Question of Life, the Universe, and Everything", + ) + db.session.add(self.example_query) + db.session.commit() + + def tearDown(self): + db.session.delete(self.example_query) + db.session.commit() + + @patch("superset.queries.saved_queries.filters.g") + def test_export_query_command(self, mock_g): + mock_g.user = security_manager.find_user("admin") + + command = ExportSavedQueriesCommand([self.example_query.id]) + contents = dict(command.run()) + + expected = [ + "metadata.yaml", + "queries/examples/schema1/The_answer.yaml", + "databases/examples.yaml", + ] + assert expected == list(contents.keys()) + + metadata = yaml.safe_load(contents["queries/examples/schema1/The_answer.yaml"]) + assert metadata == { + "schema": "schema1", + "label": "The answer", + "description": "Answer to the Ultimate Question of Life, the Universe, and Everything", + "sql": "SELECT 42", + "uuid": str(self.example_query.uuid), + "version": "1.0.0", + "database_uuid": str(self.example_database.uuid), + } + + @patch("superset.queries.saved_queries.filters.g") + def test_export_query_command_no_related(self, mock_g): + """ + Test that only the query is exported when export_related=False. + """ + mock_g.user = security_manager.find_user("admin") + + command = ExportSavedQueriesCommand( + [self.example_query.id], export_related=False + ) + contents = dict(command.run()) + + expected = [ + "metadata.yaml", + "queries/examples/schema1/The_answer.yaml", + ] + assert expected == list(contents.keys()) + + @patch("superset.queries.saved_queries.filters.g") + def test_export_query_command_no_access(self, mock_g): + """Test that users can't export datasets they don't have access to""" + mock_g.user = security_manager.find_user("gamma") + + command = ExportSavedQueriesCommand([self.example_query.id]) + contents = command.run() + with self.assertRaises(SavedQueryNotFoundError): + next(contents) + + @patch("superset.queries.saved_queries.filters.g") + def test_export_query_command_invalid_dataset(self, mock_g): + """Test that an error is raised when exporting an invalid dataset""" + mock_g.user = security_manager.find_user("admin") + + command = ExportSavedQueriesCommand([-1]) + contents = command.run() + with self.assertRaises(SavedQueryNotFoundError): + next(contents) + + @patch("superset.queries.saved_queries.filters.g") + def test_export_query_command_key_order(self, mock_g): + """Test that they keys in the YAML have the same order as export_fields""" + mock_g.user = security_manager.find_user("admin") + + command = ExportSavedQueriesCommand([self.example_query.id]) + contents = dict(command.run()) + + metadata = yaml.safe_load(contents["queries/examples/schema1/The_answer.yaml"]) + assert list(metadata.keys()) == [ + "schema", + "label", + "description", + "sql", + "uuid", + "version", + "database_uuid", + ] + + +class TestImportSavedQueriesCommand(SupersetTestCase): + def test_import_v1_saved_queries(self): + """Test that we can import a saved query""" + contents = { + "metadata.yaml": yaml.safe_dump(saved_queries_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "queries/imported_query.yaml": yaml.safe_dump(saved_queries_config), + } + + command = ImportSavedQueriesCommand(contents) + command.run() + + saved_query = ( + db.session.query(SavedQuery) + .filter_by(uuid=saved_queries_config["uuid"]) + .one() + ) + + assert saved_query.schema == "public" + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + + db.session.delete(saved_query) + db.session.delete(database) + db.session.commit() + + def test_import_v1_saved_queries_multiple(self): + """Test that a saved query can be imported multiple times""" + contents = { + "metadata.yaml": yaml.safe_dump(saved_queries_metadata_config), + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "queries/imported_query.yaml": yaml.safe_dump(saved_queries_config), + } + command = ImportSavedQueriesCommand(contents, overwrite=True) + command.run() + command.run() + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + saved_query = db.session.query(SavedQuery).filter_by(db_id=database.id).all() + assert len(saved_query) == 1 + + db.session.delete(saved_query[0]) + db.session.delete(database) + db.session.commit() + + def test_import_v1_saved_queries_validation(self): + """Test different validations applied when importing a saved query""" + # metadata.yaml must be present + contents = { + "databases/imported_database.yaml": yaml.safe_dump(database_config), + "queries/imported_query.yaml": yaml.safe_dump(saved_queries_config), + } + command = ImportSavedQueriesCommand(contents) + with pytest.raises(IncorrectVersionError) as excinfo: + command.run() + assert str(excinfo.value) == "Missing metadata.yaml" + + # version should be 1.0.0 + contents["metadata.yaml"] = yaml.safe_dump( + { + "version": "2.0.0", + "type": "SavedQuery", + "timestamp": "2021-03-30T20:37:54.791187+00:00", + } + ) + command = ImportSavedQueriesCommand(contents) + with pytest.raises(IncorrectVersionError) as excinfo: + command.run() + assert str(excinfo.value) == "Must be equal to 1.0.0." + + # type should be a SavedQuery + contents["metadata.yaml"] = yaml.safe_dump(database_metadata_config) + command = ImportSavedQueriesCommand(contents) + with pytest.raises(CommandInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == "Error importing saved_queries" + assert excinfo.value.normalized_messages() == { + "metadata.yaml": {"type": ["Must be equal to SavedQuery."]} + } + + # must also validate databases + broken_config = database_config.copy() + del broken_config["database_name"] + contents["metadata.yaml"] = yaml.safe_dump(saved_queries_metadata_config) + contents["databases/imported_database.yaml"] = yaml.safe_dump(broken_config) + command = ImportSavedQueriesCommand(contents) + with pytest.raises(CommandInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == "Error importing saved_queries" + assert excinfo.value.normalized_messages() == { + "databases/imported_database.yaml": { + "database_name": ["Missing data for required field."], + } + } diff --git a/tests/integration_tests/query_context_tests.py b/tests/integration_tests/query_context_tests.py new file mode 100644 index 0000000000000..5e5beae345b86 --- /dev/null +++ b/tests/integration_tests/query_context_tests.py @@ -0,0 +1,1102 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import re +import time +from typing import Any, Dict + +import numpy as np +import pandas as pd +import pytest +from pandas import DateOffset + +from superset import db +from superset.charts.schemas import ChartDataQueryContextSchema +from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType +from superset.common.query_context import QueryContext +from superset.common.query_context_factory import QueryContextFactory +from superset.common.query_object import QueryObject +from superset.connectors.sqla.models import SqlMetric +from superset.datasource.dao import DatasourceDAO +from superset.extensions import cache_manager +from superset.superset_typing import AdhocColumn +from superset.utils.core import ( + AdhocMetricExpressionType, + backend, + DatasourceType, + QueryStatus, +) +from superset.utils.pandas_postprocessing.utils import FLAT_COLUMN_SEPARATOR +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.conftest import only_postgresql, only_sqlite +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.query_context import get_query_context + + +def get_sql_text(payload: Dict[str, Any]) -> str: + payload["result_type"] = ChartDataResultType.QUERY.value + query_context = ChartDataQueryContextSchema().load(payload) + responses = query_context.get_payload() + assert len(responses) == 1 + response = responses["queries"][0] + assert len(response) == 2 + assert response["language"] == "sql" + return response["query"] + + +class TestQueryContext(SupersetTestCase): + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_schema_deserialization(self): + """ + Ensure that the deserialized QueryContext contains all required fields. + """ + + payload = get_query_context("birth_names", add_postprocessing_operations=True) + query_context = ChartDataQueryContextSchema().load(payload) + self.assertEqual(len(query_context.queries), len(payload["queries"])) + + for query_idx, query in enumerate(query_context.queries): + payload_query = payload["queries"][query_idx] + + # check basic properties + self.assertEqual(query.extras, payload_query["extras"]) + self.assertEqual(query.filter, payload_query["filters"]) + self.assertEqual(query.columns, payload_query["columns"]) + + # metrics are mutated during creation + for metric_idx, metric in enumerate(query.metrics): + payload_metric = payload_query["metrics"][metric_idx] + payload_metric = ( + payload_metric + if "expressionType" in payload_metric + else payload_metric["label"] + ) + self.assertEqual(metric, payload_metric) + + self.assertEqual(query.orderby, payload_query["orderby"]) + self.assertEqual(query.time_range, payload_query["time_range"]) + + # check post processing operation properties + for post_proc_idx, post_proc in enumerate(query.post_processing): + payload_post_proc = payload_query["post_processing"][post_proc_idx] + self.assertEqual(post_proc["operation"], payload_post_proc["operation"]) + self.assertEqual(post_proc["options"], payload_post_proc["options"]) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_cache(self): + table_name = "birth_names" + payload = get_query_context( + query_name=table_name, + add_postprocessing_operations=True, + ) + payload["force"] = True + + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + query_cache_key = query_context.query_cache_key(query_object) + + response = query_context.get_payload(cache_query_context=True) + # MUST BE a successful query + query_dump = response["queries"][0] + assert query_dump["status"] == QueryStatus.SUCCESS + + cache_key = response["cache_key"] + assert cache_key is not None + + cached = cache_manager.cache.get(cache_key) + assert cached is not None + + rehydrated_qc = ChartDataQueryContextSchema().load(cached["data"]) + rehydrated_qo = rehydrated_qc.queries[0] + rehydrated_query_cache_key = rehydrated_qc.query_cache_key(rehydrated_qo) + + self.assertEqual(rehydrated_qc.datasource, query_context.datasource) + self.assertEqual(len(rehydrated_qc.queries), 1) + self.assertEqual(query_cache_key, rehydrated_query_cache_key) + self.assertEqual(rehydrated_qc.result_type, query_context.result_type) + self.assertEqual(rehydrated_qc.result_format, query_context.result_format) + self.assertFalse(rehydrated_qc.force) + + def test_query_cache_key_changes_when_datasource_is_updated(self): + self.login(username="admin") + payload = get_query_context("birth_names") + + # construct baseline query_cache_key + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + cache_key_original = query_context.query_cache_key(query_object) + + # make temporary change and revert it to refresh the changed_on property + datasource = DatasourceDAO.get_datasource( + session=db.session, + datasource_type=DatasourceType(payload["datasource"]["type"]), + datasource_id=payload["datasource"]["id"], + ) + description_original = datasource.description + datasource.description = "temporary description" + db.session.commit() + datasource.description = description_original + db.session.commit() + + # create new QueryContext with unchanged attributes, extract new query_cache_key + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + cache_key_new = query_context.query_cache_key(query_object) + + # the new cache_key should be different due to updated datasource + self.assertNotEqual(cache_key_original, cache_key_new) + + def test_query_cache_key_changes_when_metric_is_updated(self): + self.login(username="admin") + payload = get_query_context("birth_names") + + # make temporary change and revert it to refresh the changed_on property + datasource = DatasourceDAO.get_datasource( + session=db.session, + datasource_type=DatasourceType(payload["datasource"]["type"]), + datasource_id=payload["datasource"]["id"], + ) + + datasource.metrics.append(SqlMetric(metric_name="foo", expression="select 1;")) + db.session.commit() + + # construct baseline query_cache_key + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + cache_key_original = query_context.query_cache_key(query_object) + + # wait a second since mysql records timestamps in second granularity + time.sleep(1) + + datasource.metrics[0].expression = "select 2;" + db.session.commit() + + # create new QueryContext with unchanged attributes, extract new query_cache_key + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + cache_key_new = query_context.query_cache_key(query_object) + + datasource.metrics = [] + db.session.commit() + + # the new cache_key should be different due to updated datasource + self.assertNotEqual(cache_key_original, cache_key_new) + + def test_query_cache_key_does_not_change_for_non_existent_or_null(self): + self.login(username="admin") + payload = get_query_context("birth_names", add_postprocessing_operations=True) + del payload["queries"][0]["granularity"] + + # construct baseline query_cache_key from query_context with post processing operation + query_context: QueryContext = ChartDataQueryContextSchema().load(payload) + query_object: QueryObject = query_context.queries[0] + cache_key_original = query_context.query_cache_key(query_object) + + payload["queries"][0]["granularity"] = None + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + + assert query_context.query_cache_key(query_object) == cache_key_original + + def test_query_cache_key_changes_when_post_processing_is_updated(self): + self.login(username="admin") + payload = get_query_context("birth_names", add_postprocessing_operations=True) + + # construct baseline query_cache_key from query_context with post processing operation + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + cache_key_original = query_context.query_cache_key(query_object) + + # ensure added None post_processing operation doesn't change query_cache_key + payload["queries"][0]["post_processing"].append(None) + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + cache_key = query_context.query_cache_key(query_object) + self.assertEqual(cache_key_original, cache_key) + + # ensure query without post processing operation is different + payload["queries"][0].pop("post_processing") + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + cache_key = query_context.query_cache_key(query_object) + self.assertNotEqual(cache_key_original, cache_key) + + def test_query_cache_key_changes_when_time_offsets_is_updated(self): + self.login(username="admin") + payload = get_query_context("birth_names", add_time_offsets=True) + + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + cache_key_original = query_context.query_cache_key(query_object) + + payload["queries"][0]["time_offsets"].pop() + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + cache_key = query_context.query_cache_key(query_object) + self.assertNotEqual(cache_key_original, cache_key) + + def test_handle_metrics_field(self): + """ + Should support both predefined and adhoc metrics. + """ + self.login(username="admin") + adhoc_metric = { + "expressionType": "SIMPLE", + "column": {"column_name": "num_boys", "type": "BIGINT(20)"}, + "aggregate": "SUM", + "label": "Boys", + "optionName": "metric_11", + } + payload = get_query_context("birth_names") + payload["queries"][0]["metrics"] = ["sum__num", {"label": "abc"}, adhoc_metric] + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + self.assertEqual(query_object.metrics, ["sum__num", "abc", adhoc_metric]) + + def test_convert_deprecated_fields(self): + """ + Ensure that deprecated fields are converted correctly + """ + self.login(username="admin") + payload = get_query_context("birth_names") + columns = payload["queries"][0]["columns"] + payload["queries"][0]["groupby"] = columns + payload["queries"][0]["timeseries_limit"] = 99 + payload["queries"][0]["timeseries_limit_metric"] = "sum__num" + del payload["queries"][0]["columns"] + payload["queries"][0]["granularity_sqla"] = "timecol" + payload["queries"][0]["having_filters"] = [{"col": "a", "op": "==", "val": "b"}] + query_context = ChartDataQueryContextSchema().load(payload) + self.assertEqual(len(query_context.queries), 1) + query_object = query_context.queries[0] + self.assertEqual(query_object.granularity, "timecol") + self.assertEqual(query_object.columns, columns) + self.assertEqual(query_object.series_limit, 99) + self.assertEqual(query_object.series_limit_metric, "sum__num") + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_csv_response_format(self): + """ + Ensure that CSV result format works + """ + self.login(username="admin") + payload = get_query_context("birth_names") + payload["result_format"] = ChartDataResultFormat.CSV.value + payload["queries"][0]["row_limit"] = 10 + query_context: QueryContext = ChartDataQueryContextSchema().load(payload) + responses = query_context.get_payload() + self.assertEqual(len(responses), 1) + data = responses["queries"][0]["data"] + self.assertIn("name,sum__num\n", data) + self.assertEqual(len(data.split("\n")), 12) + + def test_sql_injection_via_groupby(self): + """ + Ensure that calling invalid columns names in groupby are caught + """ + self.login(username="admin") + payload = get_query_context("birth_names") + payload["queries"][0]["groupby"] = ["currentDatabase()"] + query_context = ChartDataQueryContextSchema().load(payload) + query_payload = query_context.get_payload() + assert query_payload["queries"][0].get("error") is not None + + def test_sql_injection_via_columns(self): + """ + Ensure that calling invalid column names in columns are caught + """ + self.login(username="admin") + payload = get_query_context("birth_names") + payload["queries"][0]["groupby"] = [] + payload["queries"][0]["metrics"] = [] + payload["queries"][0]["columns"] = ["*, 'extra'"] + query_context = ChartDataQueryContextSchema().load(payload) + query_payload = query_context.get_payload() + assert query_payload["queries"][0].get("error") is not None + + def test_sql_injection_via_metrics(self): + """ + Ensure that calling invalid column names in filters are caught + """ + self.login(username="admin") + payload = get_query_context("birth_names") + payload["queries"][0]["groupby"] = ["name"] + payload["queries"][0]["metrics"] = [ + { + "expressionType": AdhocMetricExpressionType.SIMPLE.value, + "column": {"column_name": "invalid_col"}, + "aggregate": "SUM", + "label": "My Simple Label", + } + ] + query_context = ChartDataQueryContextSchema().load(payload) + query_payload = query_context.get_payload() + assert query_payload["queries"][0].get("error") is not None + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_samples_response_type(self): + """ + Ensure that samples result type works + """ + self.login(username="admin") + payload = get_query_context("birth_names") + payload["result_type"] = ChartDataResultType.SAMPLES.value + payload["queries"][0]["row_limit"] = 5 + query_context = ChartDataQueryContextSchema().load(payload) + responses = query_context.get_payload() + self.assertEqual(len(responses), 1) + data = responses["queries"][0]["data"] + self.assertIsInstance(data, list) + self.assertEqual(len(data), 5) + self.assertNotIn("sum__num", data[0]) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_query_response_type(self): + """ + Ensure that query result type works + """ + self.login(username="admin") + payload = get_query_context("birth_names") + sql_text = get_sql_text(payload) + assert "SELECT" in sql_text + assert re.search(r'[`"\[]?num[`"\]]? IS NOT NULL', sql_text) + assert re.search( + r"""NOT \([`"\[]?name[`"\]]? IS NULL[\s\n]* """ + r"""OR [`"\[]?name[`"\]]? IN \('"abc"'\)\)""", + sql_text, + ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_handle_sort_by_metrics(self): + """ + Should properly handle sort by metrics in various scenarios. + """ + self.login(username="admin") + + sql_text = get_sql_text(get_query_context("birth_names")) + if backend() == "hive": + # should have no duplicate `SUM(num)` + assert "SUM(num) AS `sum__num`," not in sql_text + assert "SUM(num) AS `sum__num`" in sql_text + # the alias should be in ORDER BY + assert "ORDER BY `sum__num` DESC" in sql_text + else: + assert re.search(r'ORDER BY [`"\[]?sum__num[`"\]]? DESC', sql_text) + + sql_text = get_sql_text( + get_query_context("birth_names:only_orderby_has_metric") + ) + if backend() == "hive": + assert "SUM(num) AS `sum__num`," not in sql_text + assert "SUM(num) AS `sum__num`" in sql_text + assert "ORDER BY `sum__num` DESC" in sql_text + else: + assert re.search( + r'ORDER BY SUM\([`"\[]?num[`"\]]?\) DESC', sql_text, re.IGNORECASE + ) + + sql_text = get_sql_text(get_query_context("birth_names:orderby_dup_alias")) + + # Check SELECT clauses + if backend() == "presto": + # presto cannot have ambiguous alias in order by, so selected column + # alias is renamed. + assert 'sum("num_boys") AS "num_boys__"' in sql_text + else: + assert re.search( + r'SUM\([`"\[]?num_boys[`"\]]?\) AS [`\"\[]?num_boys[`"\]]?', + sql_text, + re.IGNORECASE, + ) + + # Check ORDER BY clauses + if backend() == "hive": + # Hive must add additional SORT BY metrics to SELECT + assert re.search( + r"MAX\(CASE.*END\) AS `MAX\(CASE WHEN...`", + sql_text, + re.IGNORECASE | re.DOTALL, + ) + + # The additional column with the same expression but a different label + # as an existing metric should not be added + assert "sum(`num_girls`) AS `SUM(num_girls)`" not in sql_text + + # Should reference all ORDER BY columns by aliases + assert "ORDER BY `num_girls` DESC," in sql_text + assert "`AVG(num_boys)` DESC," in sql_text + assert "`MAX(CASE WHEN...` ASC" in sql_text + else: + if backend() == "presto": + # since the selected `num_boys` is renamed to `num_boys__` + # it must be references as expression + assert re.search( + r'ORDER BY SUM\([`"\[]?num_girls[`"\]]?\) DESC', + sql_text, + re.IGNORECASE, + ) + else: + # Should reference the adhoc metric by alias when possible + assert re.search( + r'ORDER BY [`"\[]?num_girls[`"\]]? DESC', + sql_text, + re.IGNORECASE, + ) + + # ORDER BY only columns should always be expressions + assert re.search( + r'AVG\([`"\[]?num_boys[`"\]]?\) DESC', + sql_text, + re.IGNORECASE, + ) + assert re.search( + r"MAX\(CASE.*END\) ASC", sql_text, re.IGNORECASE | re.DOTALL + ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_fetch_values_predicate(self): + """ + Ensure that fetch values predicate is added to query if needed + """ + self.login(username="admin") + + payload = get_query_context("birth_names") + sql_text = get_sql_text(payload) + assert "123 = 123" not in sql_text + + payload["queries"][0]["apply_fetch_values_predicate"] = True + sql_text = get_sql_text(payload) + assert "123 = 123" in sql_text + + def test_query_object_unknown_fields(self): + """ + Ensure that query objects with unknown fields don't raise an Exception and + have an identical cache key as one without the unknown field + """ + self.login(username="admin") + payload = get_query_context("birth_names") + query_context = ChartDataQueryContextSchema().load(payload) + responses = query_context.get_payload() + orig_cache_key = responses["queries"][0]["cache_key"] + payload["queries"][0]["foo"] = "bar" + query_context = ChartDataQueryContextSchema().load(payload) + responses = query_context.get_payload() + new_cache_key = responses["queries"][0]["cache_key"] + self.assertEqual(orig_cache_key, new_cache_key) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_time_offsets_in_query_object(self): + """ + Ensure that time_offsets can generate the correct query + """ + self.login(username="admin") + payload = get_query_context("birth_names") + payload["queries"][0]["metrics"] = ["sum__num"] + payload["queries"][0]["groupby"] = ["name"] + payload["queries"][0]["is_timeseries"] = True + payload["queries"][0]["timeseries_limit"] = 5 + payload["queries"][0]["time_offsets"] = ["1 year ago", "1 year later"] + payload["queries"][0]["time_range"] = "1990 : 1991" + query_context = ChartDataQueryContextSchema().load(payload) + responses = query_context.get_payload() + self.assertEqual( + responses["queries"][0]["colnames"], + [ + "__timestamp", + "name", + "sum__num", + "sum__num__1 year ago", + "sum__num__1 year later", + ], + ) + + sqls = [ + sql for sql in responses["queries"][0]["query"].split(";") if sql.strip() + ] + self.assertEqual(len(sqls), 3) + # 1 year ago + assert re.search(r"1989-01-01.+1990-01-01", sqls[1], re.S) + assert re.search(r"1990-01-01.+1991-01-01", sqls[1], re.S) + + # # 1 year later + assert re.search(r"1991-01-01.+1992-01-01", sqls[2], re.S) + assert re.search(r"1990-01-01.+1991-01-01", sqls[2], re.S) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_processing_time_offsets_cache(self): + """ + Ensure that time_offsets can generate the correct query + """ + self.login(username="admin") + payload = get_query_context("birth_names") + payload["queries"][0]["metrics"] = ["sum__num"] + # should process empty dateframe correctly + # due to "name" is random generated, each time_offset slice will be empty + payload["queries"][0]["groupby"] = ["name"] + payload["queries"][0]["is_timeseries"] = True + payload["queries"][0]["timeseries_limit"] = 5 + payload["queries"][0]["time_offsets"] = [] + payload["queries"][0]["time_range"] = "1990 : 1991" + payload["queries"][0]["granularity"] = "ds" + payload["queries"][0]["extras"]["time_grain_sqla"] = "P1Y" + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + query_result = query_context.get_query_result(query_object) + # get main query dataframe + df = query_result.df + + payload["queries"][0]["time_offsets"] = ["1 year ago", "1 year later"] + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + # query without cache + query_context.processing_time_offsets(df, query_object) + # query with cache + rv = query_context.processing_time_offsets(df, query_object) + cache_keys = rv["cache_keys"] + cache_keys__1_year_ago = cache_keys[0] + cache_keys__1_year_later = cache_keys[1] + self.assertIsNotNone(cache_keys__1_year_ago) + self.assertIsNotNone(cache_keys__1_year_later) + self.assertNotEqual(cache_keys__1_year_ago, cache_keys__1_year_later) + + # swap offsets + payload["queries"][0]["time_offsets"] = ["1 year later", "1 year ago"] + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + rv = query_context.processing_time_offsets(df, query_object) + cache_keys = rv["cache_keys"] + self.assertEqual(cache_keys__1_year_ago, cache_keys[1]) + self.assertEqual(cache_keys__1_year_later, cache_keys[0]) + + # remove all offsets + payload["queries"][0]["time_offsets"] = [] + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + rv = query_context.processing_time_offsets( + df, + query_object, + ) + self.assertIs(rv["df"], df) + self.assertEqual(rv["queries"], []) + self.assertEqual(rv["cache_keys"], []) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_time_offsets_sql(self): + payload = get_query_context("birth_names") + payload["queries"][0]["metrics"] = ["sum__num"] + payload["queries"][0]["groupby"] = ["state"] + payload["queries"][0]["is_timeseries"] = True + payload["queries"][0]["timeseries_limit"] = 5 + payload["queries"][0]["time_offsets"] = [] + payload["queries"][0]["time_range"] = "1980 : 1991" + payload["queries"][0]["granularity"] = "ds" + payload["queries"][0]["extras"]["time_grain_sqla"] = "P1Y" + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + query_result = query_context.get_query_result(query_object) + # get main query dataframe + df = query_result.df + + # set time_offsets to query_object + payload["queries"][0]["time_offsets"] = ["3 years ago", "3 years later"] + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + time_offsets_obj = query_context.processing_time_offsets(df, query_object) + query_from_1977_to_1988 = time_offsets_obj["queries"][0] + query_from_1983_to_1994 = time_offsets_obj["queries"][1] + + # should generate expected date range in sql + assert "1977-01-01" in query_from_1977_to_1988 + assert "1988-01-01" in query_from_1977_to_1988 + assert "1983-01-01" in query_from_1983_to_1994 + assert "1994-01-01" in query_from_1983_to_1994 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_time_offsets_accuracy(self): + payload = get_query_context("birth_names") + payload["queries"][0]["metrics"] = ["sum__num"] + payload["queries"][0]["groupby"] = ["state"] + payload["queries"][0]["is_timeseries"] = True + payload["queries"][0]["timeseries_limit"] = 5 + payload["queries"][0]["time_offsets"] = [] + payload["queries"][0]["time_range"] = "1980 : 1991" + payload["queries"][0]["granularity"] = "ds" + payload["queries"][0]["extras"]["time_grain_sqla"] = "P1Y" + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + query_result = query_context.get_query_result(query_object) + # get main query dataframe + df = query_result.df + + # set time_offsets to query_object + payload["queries"][0]["time_offsets"] = ["3 years ago", "3 years later"] + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + time_offsets_obj = query_context.processing_time_offsets(df, query_object) + df_with_offsets = time_offsets_obj["df"] + df_with_offsets = df_with_offsets.set_index(["__timestamp", "state"]) + + # should get correct data when apply "3 years ago" + payload["queries"][0]["time_offsets"] = [] + payload["queries"][0]["time_range"] = "1977 : 1988" + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + query_result = query_context.get_query_result(query_object) + # get df for "3 years ago" + df_3_years_ago = query_result.df + df_3_years_ago["__timestamp"] = df_3_years_ago["__timestamp"] + DateOffset( + years=3 + ) + df_3_years_ago = df_3_years_ago.set_index(["__timestamp", "state"]) + for index, row in df_with_offsets.iterrows(): + if index in df_3_years_ago.index: + assert ( + row["sum__num__3 years ago"] + == df_3_years_ago.loc[index]["sum__num"] + ) + + # should get correct data when apply "3 years later" + payload["queries"][0]["time_offsets"] = [] + payload["queries"][0]["time_range"] = "1983 : 1994" + query_context = ChartDataQueryContextSchema().load(payload) + query_object = query_context.queries[0] + query_result = query_context.get_query_result(query_object) + # get df for "3 years later" + df_3_years_later = query_result.df + df_3_years_later["__timestamp"] = df_3_years_later["__timestamp"] - DateOffset( + years=3 + ) + df_3_years_later = df_3_years_later.set_index(["__timestamp", "state"]) + for index, row in df_with_offsets.iterrows(): + if index in df_3_years_later.index: + assert ( + row["sum__num__3 years later"] + == df_3_years_later.loc[index]["sum__num"] + ) + + +def test_get_label_map(app_context, virtual_dataset_comma_in_column_value): + qc = QueryContextFactory().create( + datasource={ + "type": virtual_dataset_comma_in_column_value.type, + "id": virtual_dataset_comma_in_column_value.id, + }, + queries=[ + { + "columns": ["col1", "col2"], + "metrics": ["count"], + "post_processing": [ + { + "operation": "pivot", + "options": { + "aggregates": {"count": {"operator": "mean"}}, + "columns": ["col2"], + "index": ["col1"], + }, + }, + {"operation": "flatten"}, + ], + } + ], + result_type=ChartDataResultType.FULL, + force=True, + ) + query_object = qc.queries[0] + df = qc.get_df_payload(query_object)["df"] + label_map = qc.get_df_payload(query_object)["label_map"] + assert list(df.columns.values) == [ + "col1", + "count" + FLAT_COLUMN_SEPARATOR + "col2, row1", + "count" + FLAT_COLUMN_SEPARATOR + "col2, row2", + "count" + FLAT_COLUMN_SEPARATOR + "col2, row3", + ] + assert label_map == { + "col1": ["col1"], + "count, col2, row1": ["count", "col2, row1"], + "count, col2, row2": ["count", "col2, row2"], + "count, col2, row3": ["count", "col2, row3"], + } + + +def test_time_column_with_time_grain(app_context, physical_dataset): + column_on_axis: AdhocColumn = { + "label": "I_AM_AN_ORIGINAL_COLUMN", + "sqlExpression": "col5", + "timeGrain": "P1Y", + } + adhoc_column: AdhocColumn = { + "label": "I_AM_A_TRUNC_COLUMN", + "sqlExpression": "col6", + "columnType": "BASE_AXIS", + "timeGrain": "P1Y", + } + qc = QueryContextFactory().create( + datasource={ + "type": physical_dataset.type, + "id": physical_dataset.id, + }, + queries=[ + { + "columns": ["col1", column_on_axis, adhoc_column], + "metrics": ["count"], + "orderby": [["col1", True]], + } + ], + result_type=ChartDataResultType.FULL, + force=True, + ) + query_object = qc.queries[0] + df = qc.get_df_payload(query_object)["df"] + if query_object.datasource.database.backend == "sqlite": + # sqlite returns string as timestamp column + assert df["I_AM_AN_ORIGINAL_COLUMN"][0] == "2000-01-01 00:00:00" + assert df["I_AM_AN_ORIGINAL_COLUMN"][1] == "2000-01-02 00:00:00" + assert df["I_AM_A_TRUNC_COLUMN"][0] == "2002-01-01 00:00:00" + assert df["I_AM_A_TRUNC_COLUMN"][1] == "2002-01-01 00:00:00" + else: + assert df["I_AM_AN_ORIGINAL_COLUMN"][0].strftime("%Y-%m-%d") == "2000-01-01" + assert df["I_AM_AN_ORIGINAL_COLUMN"][1].strftime("%Y-%m-%d") == "2000-01-02" + assert df["I_AM_A_TRUNC_COLUMN"][0].strftime("%Y-%m-%d") == "2002-01-01" + assert df["I_AM_A_TRUNC_COLUMN"][1].strftime("%Y-%m-%d") == "2002-01-01" + + +def test_non_time_column_with_time_grain(app_context, physical_dataset): + qc = QueryContextFactory().create( + datasource={ + "type": physical_dataset.type, + "id": physical_dataset.id, + }, + queries=[ + { + "columns": [ + "col1", + { + "label": "COL2 ALIAS", + "sqlExpression": "col2", + "columnType": "BASE_AXIS", + "timeGrain": "P1Y", + }, + ], + "metrics": ["count"], + "orderby": [["col1", True]], + "row_limit": 1, + } + ], + result_type=ChartDataResultType.FULL, + force=True, + ) + + query_object = qc.queries[0] + df = qc.get_df_payload(query_object)["df"] + assert df["COL2 ALIAS"][0] == "a" + + +def test_special_chars_in_column_name(app_context, physical_dataset): + qc = QueryContextFactory().create( + datasource={ + "type": physical_dataset.type, + "id": physical_dataset.id, + }, + queries=[ + { + "columns": [ + "col1", + "time column with spaces", + { + "label": "I_AM_A_TRUNC_COLUMN", + "sqlExpression": "time column with spaces", + "columnType": "BASE_AXIS", + "timeGrain": "P1Y", + }, + ], + "metrics": ["count"], + "orderby": [["col1", True]], + "row_limit": 1, + } + ], + result_type=ChartDataResultType.FULL, + force=True, + ) + + query_object = qc.queries[0] + df = qc.get_df_payload(query_object)["df"] + if query_object.datasource.database.backend == "sqlite": + # sqlite returns string as timestamp column + assert df["time column with spaces"][0] == "2002-01-03 00:00:00" + assert df["I_AM_A_TRUNC_COLUMN"][0] == "2002-01-01 00:00:00" + else: + assert df["time column with spaces"][0].strftime("%Y-%m-%d") == "2002-01-03" + assert df["I_AM_A_TRUNC_COLUMN"][0].strftime("%Y-%m-%d") == "2002-01-01" + + +@only_postgresql +def test_date_adhoc_column(app_context, physical_dataset): + # sql expression returns date type + column_on_axis: AdhocColumn = { + "label": "ADHOC COLUMN", + "sqlExpression": "col6 + interval '20 year'", + "columnType": "BASE_AXIS", + "timeGrain": "P1Y", + } + qc = QueryContextFactory().create( + datasource={ + "type": physical_dataset.type, + "id": physical_dataset.id, + }, + queries=[ + { + "columns": [column_on_axis], + "metrics": ["count"], + } + ], + result_type=ChartDataResultType.FULL, + force=True, + ) + query_object = qc.queries[0] + df = qc.get_df_payload(query_object)["df"] + # ADHOC COLUMN count + # 0 2022-01-01 10 + assert df["ADHOC COLUMN"][0].strftime("%Y-%m-%d") == "2022-01-01" + assert df["count"][0] == 10 + + +@only_postgresql +def test_non_date_adhoc_column(app_context, physical_dataset): + # sql expression returns non-date type + column_on_axis: AdhocColumn = { + "label": "ADHOC COLUMN", + "sqlExpression": "col1 * 10", + "columnType": "BASE_AXIS", + "timeGrain": "P1Y", + } + qc = QueryContextFactory().create( + datasource={ + "type": physical_dataset.type, + "id": physical_dataset.id, + }, + queries=[ + { + "columns": [column_on_axis], + "metrics": ["count"], + "orderby": [ + [ + { + "expressionType": "SQL", + "sqlExpression": '"ADHOC COLUMN"', + }, + True, + ] + ], + } + ], + result_type=ChartDataResultType.FULL, + force=True, + ) + query_object = qc.queries[0] + df = qc.get_df_payload(query_object)["df"] + assert df["ADHOC COLUMN"][0] == 0 + assert df["ADHOC COLUMN"][1] == 10 + + +@only_sqlite +def test_time_grain_and_time_offset_with_base_axis(app_context, physical_dataset): + column_on_axis: AdhocColumn = { + "label": "col6", + "sqlExpression": "col6", + "columnType": "BASE_AXIS", + "timeGrain": "P3M", + } + qc = QueryContextFactory().create( + datasource={ + "type": physical_dataset.type, + "id": physical_dataset.id, + }, + queries=[ + { + "columns": [column_on_axis], + "metrics": [ + { + "label": "SUM(col1)", + "expressionType": "SQL", + "sqlExpression": "SUM(col1)", + } + ], + "time_offsets": ["3 month ago"], + "granularity": "col6", + "time_range": "2002-01 : 2003-01", + } + ], + result_type=ChartDataResultType.FULL, + force=True, + ) + query_object = qc.queries[0] + df = qc.get_df_payload(query_object)["df"] + # todo: MySQL returns integer and float column as object type + """ + col6 SUM(col1) SUM(col1)__3 month ago +0 2002-01-01 3 NaN +1 2002-04-01 12 3.0 +2 2002-07-01 21 12.0 +3 2002-10-01 9 21.0 + """ + assert df.equals( + pd.DataFrame( + data={ + "col6": pd.to_datetime( + ["2002-01-01", "2002-04-01", "2002-07-01", "2002-10-01"] + ), + "SUM(col1)": [3, 12, 21, 9], + "SUM(col1)__3 month ago": [np.nan, 3, 12, 21], + } + ) + ) + + +@only_sqlite +def test_time_grain_and_time_offset_on_legacy_query(app_context, physical_dataset): + qc = QueryContextFactory().create( + datasource={ + "type": physical_dataset.type, + "id": physical_dataset.id, + }, + queries=[ + { + "columns": [], + "extras": { + "time_grain_sqla": "P3M", + }, + "metrics": [ + { + "label": "SUM(col1)", + "expressionType": "SQL", + "sqlExpression": "SUM(col1)", + } + ], + "time_offsets": ["3 month ago"], + "granularity": "col6", + "time_range": "2002-01 : 2003-01", + "is_timeseries": True, + } + ], + result_type=ChartDataResultType.FULL, + force=True, + ) + query_object = qc.queries[0] + df = qc.get_df_payload(query_object)["df"] + # todo: MySQL returns integer and float column as object type + """ + __timestamp SUM(col1) SUM(col1)__3 month ago +0 2002-01-01 3 NaN +1 2002-04-01 12 3.0 +2 2002-07-01 21 12.0 +3 2002-10-01 9 21.0 + """ + assert df.equals( + pd.DataFrame( + data={ + "__timestamp": pd.to_datetime( + ["2002-01-01", "2002-04-01", "2002-07-01", "2002-10-01"] + ), + "SUM(col1)": [3, 12, 21, 9], + "SUM(col1)__3 month ago": [np.nan, 3, 12, 21], + } + ) + ) + + +def test_time_offset_with_temporal_range_filter(app_context, physical_dataset): + qc = QueryContextFactory().create( + datasource={ + "type": physical_dataset.type, + "id": physical_dataset.id, + }, + queries=[ + { + "columns": [ + { + "label": "col6", + "sqlExpression": "col6", + "columnType": "BASE_AXIS", + "timeGrain": "P3M", + } + ], + "metrics": [ + { + "label": "SUM(col1)", + "expressionType": "SQL", + "sqlExpression": "SUM(col1)", + } + ], + "time_offsets": ["3 month ago"], + "filters": [ + { + "col": "col6", + "op": "TEMPORAL_RANGE", + "val": "2002-01 : 2003-01", + } + ], + } + ], + result_type=ChartDataResultType.FULL, + force=True, + ) + query_payload = qc.get_df_payload(qc.queries[0]) + df = query_payload["df"] + """ + col6 SUM(col1) SUM(col1)__3 month ago +0 2002-01-01 3 NaN +1 2002-04-01 12 3.0 +2 2002-07-01 21 12.0 +3 2002-10-01 9 21.0 + """ + assert df["SUM(col1)"].to_list() == [3, 12, 21, 9] + # df["SUM(col1)__3 month ago"].dtype is object so have to convert to float first + assert df["SUM(col1)__3 month ago"].astype("float").astype("Int64").to_list() == [ + pd.NA, + 3, + 12, + 21, + ] + + sqls = query_payload["query"].split(";") + """ + SELECT DATE_TRUNC('quarter', col6) AS col6, + SUM(col1) AS "SUM(col1)" + FROM physical_dataset + WHERE col6 >= TO_TIMESTAMP('2002-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US') + AND col6 < TO_TIMESTAMP('2003-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US') + GROUP BY DATE_TRUNC('quarter', col6) + LIMIT 10000; + + SELECT DATE_TRUNC('quarter', col6) AS col6, + SUM(col1) AS "SUM(col1)" + FROM physical_dataset + WHERE col6 >= TO_TIMESTAMP('2001-10-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US') + AND col6 < TO_TIMESTAMP('2002-10-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US') + GROUP BY DATE_TRUNC('quarter', col6) + LIMIT 10000; + """ + assert ( + re.search(r"WHERE col6 >= .*2002-01-01", sqls[0]) + and re.search(r"AND col6 < .*2003-01-01", sqls[0]) + ) is not None + assert ( + re.search(r"WHERE col6 >= .*2001-10-01", sqls[1]) + and re.search(r"AND col6 < .*2002-10-01", sqls[1]) + ) is not None diff --git a/tests/integration_tests/reports/__init__.py b/tests/integration_tests/reports/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/reports/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/reports/alert_tests.py b/tests/integration_tests/reports/alert_tests.py new file mode 100644 index 0000000000000..6c5c41a81ff23 --- /dev/null +++ b/tests/integration_tests/reports/alert_tests.py @@ -0,0 +1,200 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=invalid-name, unused-argument, import-outside-toplevel +from contextlib import nullcontext +from typing import List, Optional, Tuple, Union + +import pandas as pd +import pytest +from pytest_mock import MockFixture + +from superset.reports.commands.exceptions import AlertQueryError +from superset.reports.models import ReportCreationMethod, ReportScheduleType +from superset.tasks.types import ExecutorType +from superset.utils.database import get_example_database +from tests.integration_tests.test_app import app + + +@pytest.mark.parametrize( + "owner_names,creator_name,config,expected_result", + [ + (["gamma"], None, [ExecutorType.SELENIUM], "admin"), + (["gamma"], None, [ExecutorType.OWNER], "gamma"), + ( + ["alpha", "gamma"], + "gamma", + [ExecutorType.CREATOR_OWNER], + "gamma", + ), + ( + ["alpha", "gamma"], + "alpha", + [ExecutorType.CREATOR_OWNER], + "alpha", + ), + ( + ["alpha", "gamma"], + "admin", + [ExecutorType.CREATOR_OWNER], + AlertQueryError(), + ), + (["gamma"], None, [ExecutorType.CURRENT_USER], AlertQueryError()), + ], +) +def test_execute_query_as_report_executor( + owner_names: List[str], + creator_name: Optional[str], + config: List[ExecutorType], + expected_result: Union[Tuple[ExecutorType, str], Exception], + mocker: MockFixture, + app_context: None, + get_user, +) -> None: + + from superset.reports.commands.alert import AlertCommand + from superset.reports.models import ReportSchedule + + with app.app_context(): + original_config = app.config["ALERT_REPORTS_EXECUTE_AS"] + app.config["ALERT_REPORTS_EXECUTE_AS"] = config + owners = [get_user(owner_name) for owner_name in owner_names] + report_schedule = ReportSchedule( + created_by=get_user(creator_name) if creator_name else None, + owners=owners, + type=ReportScheduleType.ALERT, + description="description", + crontab="0 9 * * *", + creation_method=ReportCreationMethod.ALERTS_REPORTS, + sql="SELECT 1", + grace_period=14400, + working_timeout=3600, + database=get_example_database(), + validator_config_json='{"op": "==", "threshold": 1}', + ) + command = AlertCommand(report_schedule=report_schedule) + override_user_mock = mocker.patch( + "superset.reports.commands.alert.override_user" + ) + cm = ( + pytest.raises(type(expected_result)) + if isinstance(expected_result, Exception) + else nullcontext() + ) + with cm: + command.run() + assert override_user_mock.call_args[0][0].username == expected_result + + app.config["ALERT_REPORTS_EXECUTE_AS"] = original_config + + +def test_execute_query_succeeded_no_retry( + mocker: MockFixture, app_context: None +) -> None: + + from superset.reports.commands.alert import AlertCommand + + execute_query_mock = mocker.patch( + "superset.reports.commands.alert.AlertCommand._execute_query", + side_effect=lambda: pd.DataFrame([{"sample_col": 0}]), + ) + + command = AlertCommand(report_schedule=mocker.Mock()) + + command.validate() + + assert execute_query_mock.call_count == 1 + + +def test_execute_query_succeeded_with_retries( + mocker: MockFixture, app_context: None +) -> None: + from superset.reports.commands.alert import AlertCommand, AlertQueryError + + execute_query_mock = mocker.patch( + "superset.reports.commands.alert.AlertCommand._execute_query" + ) + + query_executed_count = 0 + # Should match the value defined in superset_test_config.py + expected_max_retries = 3 + + def _mocked_execute_query() -> pd.DataFrame: + nonlocal query_executed_count + query_executed_count += 1 + + if query_executed_count < expected_max_retries: + raise AlertQueryError() + else: + return pd.DataFrame([{"sample_col": 0}]) + + execute_query_mock.side_effect = _mocked_execute_query + execute_query_mock.__name__ = "mocked_execute_query" + + command = AlertCommand(report_schedule=mocker.Mock()) + + command.validate() + + assert execute_query_mock.call_count == expected_max_retries + + +def test_execute_query_failed_no_retry(mocker: MockFixture, app_context: None) -> None: + from superset.reports.commands.alert import AlertCommand, AlertQueryTimeout + + execute_query_mock = mocker.patch( + "superset.reports.commands.alert.AlertCommand._execute_query" + ) + + def _mocked_execute_query() -> None: + raise AlertQueryTimeout + + execute_query_mock.side_effect = _mocked_execute_query + execute_query_mock.__name__ = "mocked_execute_query" + + command = AlertCommand(report_schedule=mocker.Mock()) + + try: + command.validate() + except AlertQueryTimeout: + pass + + assert execute_query_mock.call_count == 1 + + +def test_execute_query_failed_max_retries( + mocker: MockFixture, app_context: None +) -> None: + from superset.reports.commands.alert import AlertCommand, AlertQueryError + + execute_query_mock = mocker.patch( + "superset.reports.commands.alert.AlertCommand._execute_query" + ) + + def _mocked_execute_query() -> None: + raise AlertQueryError + + execute_query_mock.side_effect = _mocked_execute_query + execute_query_mock.__name__ = "mocked_execute_query" + + command = AlertCommand(report_schedule=mocker.Mock()) + + try: + command.validate() + except AlertQueryError: + pass + + # Should match the value defined in superset_test_config.py + assert execute_query_mock.call_count == 3 diff --git a/tests/integration_tests/reports/api_tests.py b/tests/integration_tests/reports/api_tests.py new file mode 100644 index 0000000000000..22b9be9990b74 --- /dev/null +++ b/tests/integration_tests/reports/api_tests.py @@ -0,0 +1,1670 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +from datetime import datetime +import json + +import pytz + +import pytest +import prison +from parameterized import parameterized +from sqlalchemy.sql import func + +from superset import db, security_manager +from superset.models.core import Database +from superset.models.slice import Slice +from superset.models.dashboard import Dashboard +from superset.reports.models import ( + ReportSchedule, + ReportCreationMethod, + ReportRecipients, + ReportExecutionLog, + ReportScheduleType, + ReportRecipientType, + ReportState, +) +from superset.utils.database import get_example_database +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.conftest import with_feature_flags +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.reports.utils import insert_report_schedule + +REPORTS_COUNT = 10 +REPORTS_ROLE_NAME = "reports_role" +REPORTS_GAMMA_USER = "reports_gamma" + + +class TestReportSchedulesApi(SupersetTestCase): + @pytest.fixture() + def gamma_user_with_alerts_role(self): + with self.create_app().app_context(): + user = self.create_user( + REPORTS_GAMMA_USER, + "general", + "Gamma", + email=f"{REPORTS_GAMMA_USER}@superset.org", + ) + + security_manager.add_role(REPORTS_ROLE_NAME) + read_perm = security_manager.find_permission_view_menu( + "can_read", + "ReportSchedule", + ) + write_perm = security_manager.find_permission_view_menu( + "can_write", + "ReportSchedule", + ) + reports_role = security_manager.find_role(REPORTS_ROLE_NAME) + security_manager.add_permission_role(reports_role, read_perm) + security_manager.add_permission_role(reports_role, write_perm) + user.roles.append(reports_role) + + yield user + + # rollback changes (assuming cascade delete) + db.session.delete(reports_role) + db.session.delete(user) + db.session.commit() + + @pytest.fixture() + def create_working_admin_report_schedule(self): + with self.create_app().app_context(): + + admin_user = self.get_user("admin") + chart = db.session.query(Slice).first() + example_db = get_example_database() + + report_schedule = insert_report_schedule( + type=ReportScheduleType.ALERT, + name="name_admin_working", + crontab="* * * * *", + sql="SELECT value from table", + description="Report working", + chart=chart, + database=example_db, + owners=[admin_user], + last_state=ReportState.WORKING, + ) + + yield + + db.session.delete(report_schedule) + db.session.commit() + + @pytest.mark.usefixtures("gamma_user_with_alerts_role") + @pytest.fixture() + def create_working_gamma_report_schedule(self, gamma_user_with_alerts_role): + with self.create_app().app_context(): + + chart = db.session.query(Slice).first() + example_db = get_example_database() + + report_schedule = insert_report_schedule( + type=ReportScheduleType.ALERT, + name="name_gamma_working", + crontab="* * * * *", + sql="SELECT value from table", + description="Report working", + chart=chart, + database=example_db, + owners=[gamma_user_with_alerts_role], + last_state=ReportState.WORKING, + ) + + yield + + db.session.delete(report_schedule) + db.session.commit() + + @pytest.mark.usefixtures("gamma_user_with_alerts_role") + @pytest.fixture() + def create_working_shared_report_schedule(self, gamma_user_with_alerts_role): + with self.create_app().app_context(): + + admin_user = self.get_user("admin") + alpha_user = self.get_user("alpha") + chart = db.session.query(Slice).first() + example_db = get_example_database() + + report_schedule = insert_report_schedule( + type=ReportScheduleType.ALERT, + name="name_shared_working", + crontab="* * * * *", + sql="SELECT value from table", + description="Report working", + chart=chart, + database=example_db, + owners=[admin_user, alpha_user, gamma_user_with_alerts_role], + last_state=ReportState.WORKING, + ) + + yield + + db.session.delete(report_schedule) + db.session.commit() + + @pytest.fixture() + def create_report_schedules(self): + with self.create_app().app_context(): + report_schedules = [] + admin_user = self.get_user("admin") + alpha_user = self.get_user("alpha") + chart = db.session.query(Slice).first() + example_db = get_example_database() + for cx in range(REPORTS_COUNT): + recipients = [] + logs = [] + for cy in range(cx): + config_json = {"target": f"target{cy}@email.com"} + recipients.append( + ReportRecipients( + type=ReportRecipientType.EMAIL, + recipient_config_json=json.dumps(config_json), + ) + ) + logs.append( + ReportExecutionLog( + scheduled_dttm=datetime(2020, 1, 1), + state=ReportState.ERROR, + error_message=f"Error {cy}", + ) + ) + report_schedules.append( + insert_report_schedule( + type=ReportScheduleType.ALERT, + name=f"name{cx}", + crontab=f"*/{cx} * * * *", + sql=f"SELECT value from table{cx}", + description=f"Some description {cx}", + chart=chart, + database=example_db, + owners=[admin_user, alpha_user], + recipients=recipients, + logs=logs, + ) + ) + yield report_schedules + + report_schedules = db.session.query(ReportSchedule).all() + # rollback changes (assuming cascade delete) + for report_schedule in report_schedules: + db.session.delete(report_schedule) + db.session.commit() + + @pytest.fixture() + def create_alpha_users(self): + with self.create_app().app_context(): + + users = [ + self.create_user( + "alpha1", "password", "Alpha", email="alpha1@superset.org" + ), + self.create_user( + "alpha2", "password", "Alpha", email="alpha2@superset.org" + ), + ] + + yield users + + # rollback changes (assuming cascade delete) + for user in users: + db.session.delete(user) + db.session.commit() + + @with_feature_flags(ALERT_REPORTS=False) + @pytest.mark.usefixtures("create_report_schedules") + def test_get_report_schedule_disabled(self): + """ + ReportSchedule Api: Test get report schedule 404s when feature is disabled + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name1") + .first() + ) + + self.login(username="admin") + uri = f"api/v1/report/{report_schedule.id}" + rv = self.client.get(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_report_schedule(self): + """ + ReportSchedule Api: Test get report schedule + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name1") + .first() + ) + + self.login(username="admin") + uri = f"api/v1/report/{report_schedule.id}" + rv = self.get_assert_metric(uri, "get") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + expected_result = { + "active": report_schedule.active, + "chart": { + "id": report_schedule.chart.id, + "slice_name": report_schedule.chart.slice_name, + "viz_type": report_schedule.chart.viz_type, + }, + "context_markdown": report_schedule.context_markdown, + "crontab": report_schedule.crontab, + "dashboard": None, + "database": { + "id": report_schedule.database.id, + "database_name": report_schedule.database.database_name, + }, + "description": report_schedule.description, + "grace_period": report_schedule.grace_period, + "id": report_schedule.id, + "last_eval_dttm": report_schedule.last_eval_dttm, + "last_state": report_schedule.last_state, + "last_value": report_schedule.last_value, + "last_value_row_json": report_schedule.last_value_row_json, + "log_retention": report_schedule.log_retention, + "name": report_schedule.name, + "recipients": [ + { + "id": report_schedule.recipients[0].id, + "recipient_config_json": '{"target": "target0@email.com"}', + "type": "Email", + } + ], + "timezone": report_schedule.timezone, + "type": report_schedule.type, + "validator_config_json": report_schedule.validator_config_json, + "validator_type": report_schedule.validator_type, + } + for key in expected_result: + assert data["result"][key] == expected_result[key] + # needed because order may vary + assert {"first_name": "admin", "id": 1, "last_name": "user"} in data["result"][ + "owners" + ] + assert {"first_name": "alpha", "id": 5, "last_name": "user"} in data["result"][ + "owners" + ] + assert len(data["result"]["owners"]) == 2 + + def test_info_report_schedule(self): + """ + ReportSchedule API: Test info + """ + self.login(username="admin") + uri = f"api/v1/report/_info" + rv = self.get_assert_metric(uri, "info") + assert rv.status_code == 200 + + def test_info_security_report(self): + """ + ReportSchedule API: Test info security + """ + self.login(username="admin") + params = {"keys": ["permissions"]} + uri = f"api/v1/report/_info?q={prison.dumps(params)}" + rv = self.get_assert_metric(uri, "info") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + assert "can_read" in data["permissions"] + assert "can_write" in data["permissions"] + assert len(data["permissions"]) == 2 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_report_schedule_not_found(self): + """ + ReportSchedule Api: Test get report schedule not found + """ + max_id = db.session.query(func.max(ReportSchedule.id)).scalar() + self.login(username="admin") + uri = f"api/v1/report/{max_id + 1}" + rv = self.get_assert_metric(uri, "get") + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule(self): + """ + ReportSchedule Api: Test get list report schedules + """ + self.login(username="admin") + uri = f"api/v1/report/" + rv = self.get_assert_metric(uri, "get_list") + + expected_fields = [ + "active", + "changed_by", + "changed_on", + "changed_on_delta_humanized", + "chart_id", + "created_by", + "created_on", + "creation_method", + "crontab", + "crontab_humanized", + "dashboard_id", + "description", + "extra", + "id", + "last_eval_dttm", + "last_state", + "name", + "owners", + "recipients", + "timezone", + "type", + ] + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == REPORTS_COUNT + data_keys = sorted(list(data["result"][0].keys())) + assert expected_fields == data_keys + + # Assert nested fields + expected_owners_fields = ["first_name", "id", "last_name"] + data_keys = sorted(list(data["result"][0]["owners"][0].keys())) + assert expected_owners_fields == data_keys + + expected_recipients_fields = ["id", "type"] + data_keys = sorted(list(data["result"][1]["recipients"][0].keys())) + assert expected_recipients_fields == data_keys + + @parameterized.expand( + [ + ( + "admin", + { + "name_admin_working", + "name_gamma_working", + "name_shared_working", + }, + ), + ( + "alpha", + { + "name_admin_working", + "name_gamma_working", + "name_shared_working", + }, + ), + ( + REPORTS_GAMMA_USER, + { + "name_gamma_working", + "name_shared_working", + }, + ), + ], + ) + @pytest.mark.usefixtures( + "create_working_admin_report_schedule", + "create_working_gamma_report_schedule", + "create_working_shared_report_schedule", + "gamma_user_with_alerts_role", + ) + def test_get_list_report_schedule_perms(self, username, report_names): + """ + ReportSchedule Api: Test get list report schedules for different roles + """ + self.login(username=username) + uri = f"api/v1/report/" + rv = self.get_assert_metric(uri, "get_list") + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert {report["name"] for report in data["result"]} == report_names + + def test_get_list_report_schedule_gamma(self): + """ + ReportSchedule Api: Test get list report schedules for regular gamma user + """ + self.login(username="gamma") + uri = f"api/v1/report/" + rv = self.client.get(uri) + + assert rv.status_code == 403 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_sorting(self): + """ + ReportSchedule Api: Test sorting on get list report schedules + """ + self.login(username="admin") + uri = "api/v1/report/" + + order_columns = [ + "active", + "created_by.first_name", + "changed_by.first_name", + "changed_on", + "changed_on_delta_humanized", + "created_on", + "crontab", + "description", + "last_eval_dttm", + "name", + "type", + "crontab_humanized", + ] + + for order_column in order_columns: + arguments = {"order_column": order_column, "order_direction": "asc"} + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_filter_name(self): + """ + ReportSchedule Api: Test filter name on get list report schedules + """ + self.login(username="admin") + # Test normal contains filter + arguments = { + "columns": ["name"], + "filters": [{"col": "name", "opr": "ct", "value": "2"}], + } + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + expected_result = { + "name": "name2", + } + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + assert data["result"][0] == expected_result + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_filter_custom(self): + """ + ReportSchedule Api: Test custom filter on get list report schedules + """ + self.login(username="admin") + # Test custom all text filter + arguments = { + "columns": ["name"], + "filters": [{"col": "name", "opr": "report_all_text", "value": "table3"}], + } + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + expected_result = { + "name": "name3", + } + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + assert data["result"][0] == expected_result + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_filter_active(self): + """ + ReportSchedule Api: Test active filter on get list report schedules + """ + self.login(username="admin") + arguments = { + "columns": ["name"], + "filters": [{"col": "active", "opr": "eq", "value": True}], + } + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == REPORTS_COUNT + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_filter_type(self): + """ + ReportSchedule Api: Test type filter on get list report schedules + """ + self.login(username="admin") + arguments = { + "columns": ["name"], + "filters": [ + {"col": "type", "opr": "eq", "value": ReportScheduleType.ALERT} + ], + } + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == REPORTS_COUNT + + # Test type filter + arguments = { + "columns": ["name"], + "filters": [ + {"col": "type", "opr": "eq", "value": ReportScheduleType.REPORT} + ], + } + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 0 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_related_report_schedule(self): + """ + ReportSchedule Api: Test get related report schedule + """ + self.login(username="admin") + related_columns = ["created_by", "chart", "dashboard", "database"] + for related_column in related_columns: + uri = f"api/v1/report/related/{related_column}" + rv = self.client.get(uri) + assert rv.status_code == 200 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_create_report_schedule(self): + """ + ReportSchedule Api: Test create report schedule + """ + self.login(username="admin") + + chart = db.session.query(Slice).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + }, + { + "type": ReportRecipientType.SLACK, + "recipient_config_json": {"target": "channel"}, + }, + ], + "grace_period": 14400, + "working_timeout": 3600, + "chart": chart.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 201 + created_model = db.session.query(ReportSchedule).get(data.get("id")) + assert created_model is not None + assert created_model.name == report_schedule_data["name"] + assert created_model.grace_period == report_schedule_data["grace_period"] + assert created_model.working_timeout == report_schedule_data["working_timeout"] + assert created_model.description == report_schedule_data["description"] + assert created_model.crontab == report_schedule_data["crontab"] + assert created_model.chart.id == report_schedule_data["chart"] + assert created_model.database.id == report_schedule_data["database"] + assert created_model.creation_method == report_schedule_data["creation_method"] + # Rollback changes + db.session.delete(created_model) + db.session.commit() + + @pytest.mark.usefixtures("create_report_schedules") + def test_create_report_schedule_uniqueness(self): + """ + ReportSchedule Api: Test create report schedule uniqueness + """ + self.login(username="admin") + + chart = db.session.query(Slice).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "name3", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "chart": chart.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"name": ['An alert named "name3" already exists']}} + + # Check that uniqueness is composed by name and type + report_schedule_data = { + "type": ReportScheduleType.REPORT, + "name": "name3", + "description": "description", + "crontab": "0 9 * * *", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "chart": chart.id, + } + uri = "api/v1/report/" + rv = self.client.post(uri, json=report_schedule_data) + assert rv.status_code == 201 + data = json.loads(rv.data.decode("utf-8")) + + # Rollback changes + created_model = db.session.query(ReportSchedule).get(data.get("id")) + db.session.delete(created_model) + db.session.commit() + + @pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_schedules" + ) + def test_create_report_schedule_schema(self): + """ + ReportSchedule Api: Test create report schedule schema check + """ + self.login(username="admin") + chart = db.session.query(Slice).first() + dashboard = db.session.query(Dashboard).first() + example_db = get_example_database() + + # Check that a report does not have a database reference + report_schedule_data = { + "type": ReportScheduleType.REPORT, + "name": "name3", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "chart": chart.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 400 + + # Test that report can be created with null grace period + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + }, + { + "type": ReportRecipientType.SLACK, + "recipient_config_json": {"target": "channel"}, + }, + ], + "working_timeout": 3600, + "chart": chart.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 201 + + # Test that grace period and working timeout cannot be < 1 + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + }, + { + "type": ReportRecipientType.SLACK, + "recipient_config_json": {"target": "channel"}, + }, + ], + "working_timeout": -10, + "chart": chart.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 400 + + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + }, + { + "type": ReportRecipientType.SLACK, + "recipient_config_json": {"target": "channel"}, + }, + ], + "grace_period": -10, + "working_timeout": 3600, + "chart": chart.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 400 + + # Test that report can be created with null dashboard + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new4", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + }, + { + "type": ReportRecipientType.SLACK, + "recipient_config_json": {"target": "channel"}, + }, + ], + "working_timeout": 3600, + "chart": chart.id, + "dashboard": None, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 201 + + # Test that report can be created with null chart + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new5", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + }, + { + "type": ReportRecipientType.SLACK, + "recipient_config_json": {"target": "channel"}, + }, + ], + "working_timeout": 3600, + "chart": None, + "dashboard": dashboard.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 201 + + # Test that report cannot be created with null timezone + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new5", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + }, + { + "type": ReportRecipientType.SLACK, + "recipient_config_json": {"target": "channel"}, + }, + ], + "working_timeout": 3600, + "timezone": None, + "dashboard": dashboard.id, + "database": example_db.id, + } + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 400 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"timezone": ["Field may not be null."]}} + + # Test that report cannot be created with an invalid timezone + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new5", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + }, + { + "type": ReportRecipientType.SLACK, + "recipient_config_json": {"target": "channel"}, + }, + ], + "working_timeout": 3600, + "timezone": "this is not a timezone", + "dashboard": dashboard.id, + "database": example_db.id, + } + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 400 + data = json.loads(rv.data.decode("utf-8")) + assert data == { + "message": { + "timezone": [f"Must be one of: {', '.join(pytz.all_timezones)}."] + } + } + + # Test that report should reflect the timezone value passed in + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new6", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + }, + { + "type": ReportRecipientType.SLACK, + "recipient_config_json": {"target": "channel"}, + }, + ], + "working_timeout": 3600, + "timezone": "America/Los_Angeles", + "dashboard": dashboard.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + data = json.loads(rv.data.decode("utf-8")) + assert data["result"]["timezone"] == "America/Los_Angeles" + assert rv.status_code == 201 + + @pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_schedules" + ) + def test_unsaved_report_schedule_schema(self): + """ + ReportSchedule Api: Test create report schedule with unsaved chart + """ + self.login(username="admin") + chart = db.session.query(Slice).first() + dashboard = db.session.query(Dashboard).first() + example_db = get_example_database() + + report_schedule_data = { + "type": ReportScheduleType.REPORT, + "name": "name3", + "description": "description", + "creation_method": ReportCreationMethod.CHARTS, + "crontab": "0 9 * * *", + "chart": 0, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert ( + data["message"]["chart"] + == "Please save your chart first, then try creating a new email report." + ) + + @pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_schedules" + ) + def test_no_dashboard_report_schedule_schema(self): + """ + ReportSchedule Api: Test create report schedule with no dashboard id + """ + self.login(username="admin") + chart = db.session.query(Slice).first() + dashboard = db.session.query(Dashboard).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.REPORT, + "name": "name3", + "description": "description", + "creation_method": ReportCreationMethod.DASHBOARDS, + "crontab": "0 9 * * *", + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert ( + data["message"]["dashboard"] + == "Please save your dashboard first, then try creating a new email report." + ) + + @pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_schedules" + ) + def test_create_multiple_creation_method_report_schedule_charts(self): + """ + ReportSchedule Api: Test create multiple reports with the same creation method + """ + self.login(username="admin") + chart = db.session.query(Slice).first() + dashboard = db.session.query(Dashboard).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.REPORT, + "name": "name4", + "description": "description", + "creation_method": ReportCreationMethod.CHARTS, + "crontab": "0 9 * * *", + "working_timeout": 3600, + "chart": chart.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 201 + + # this second time it should receive an error because the chart has an attached report + # with the same creation method from the same user. + report_schedule_data = { + "type": ReportScheduleType.REPORT, + "name": "name5", + "description": "description", + "creation_method": ReportCreationMethod.CHARTS, + "crontab": "0 9 * * *", + "working_timeout": 3600, + "chart": chart.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 409 + assert data == { + "errors": [ + { + "message": "Resource already has an attached report.", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "issue_codes": [ + { + "code": 1010, + "message": "Issue 1010 - Superset encountered an error while running a command.", + } + ] + }, + } + ] + } + + @pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_schedules" + ) + def test_create_multiple_creation_method_report_schedule_dashboards(self): + """ + ReportSchedule Api: Test create multiple reports with the same creation method + """ + self.login(username="admin") + chart = db.session.query(Slice).first() + dashboard = db.session.query(Dashboard).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.REPORT, + "name": "name4", + "description": "description", + "creation_method": ReportCreationMethod.DASHBOARDS, + "crontab": "0 9 * * *", + "working_timeout": 3600, + "dashboard": dashboard.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 201 + + # this second time it should receive an error because the dashboard has an attached report + # with the same creation method from the same user. + report_schedule_data = { + "type": ReportScheduleType.REPORT, + "name": "name5", + "description": "description", + "creation_method": ReportCreationMethod.DASHBOARDS, + "crontab": "0 9 * * *", + "working_timeout": 3600, + "dashboard": dashboard.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 409 + assert data == { + "errors": [ + { + "message": "Resource already has an attached report.", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "issue_codes": [ + { + "code": 1010, + "message": "Issue 1010 - Superset encountered an error while running a command.", + } + ] + }, + } + ] + } + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_create_report_schedule_chart_dash_validation(self): + """ + ReportSchedule Api: Test create report schedule chart and dashboard validation + """ + self.login(username="admin") + + # Test we can submit a chart or a dashboard not both + chart = db.session.query(Slice).first() + dashboard = db.session.query(Dashboard).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "chart": chart.id, + "dashboard": dashboard.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"chart": "Choose a chart or dashboard not both"}} + + @pytest.mark.usefixtures("create_report_schedules") + def test_create_report_schedule_chart_db_validation(self): + """ + ReportSchedule Api: Test create report schedule chart and database validation + """ + self.login(username="admin") + + # Test database required for alerts + chart = db.session.query(Slice).first() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "chart": chart.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"database": "Database is required for alerts"}} + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_create_report_schedule_relations_exist(self): + """ + ReportSchedule Api: Test create report schedule + relations (chart, dash, db) exist + """ + self.login(username="admin") + + # Test chart and database do not exist + chart_max_id = db.session.query(func.max(Slice.id)).scalar() + database_max_id = db.session.query(func.max(Database.id)).scalar() + examples_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "crontab": "0 9 * * *", + "chart": chart_max_id + 1, + "database": database_max_id + 1, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == { + "message": { + "chart": "Chart does not exist", + "database": "Database does not exist", + } + } + + # Test dashboard does not exist + dashboard_max_id = db.session.query(func.max(Dashboard.id)).scalar() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "dashboard": dashboard_max_id + 1, + "database": examples_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"dashboard": "Dashboard does not exist"}} + + # @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + # TODO (AAfghahi): I am going to enable this when the report schedule feature is fully finished + # def test_create_report_schedule_no_creation_method(self): + # """ + # ReportSchedule Api: Test create report schedule + # """ + # self.login(username="admin") + + # chart = db.session.query(Slice).first() + # example_db = get_example_database() + # report_schedule_data = { + # "type": ReportScheduleType.ALERT, + # "name": "new3", + # "description": "description", + # "crontab": "0 9 * * *", + # "recipients": [ + # { + # "type": ReportRecipientType.EMAIL, + # "recipient_config_json": {"target": "target@superset.org"}, + # }, + # { + # "type": ReportRecipientType.SLACK, + # "recipient_config_json": {"target": "channel"}, + # }, + # ], + # "grace_period": 14400, + # "working_timeout": 3600, + # "chart": chart.id, + # "database": example_db.id, + # } + # uri = "api/v1/report/" + # rv = self.client.post(uri, json=report_schedule_data) + # response = json.loads(rv.data.decode("utf-8")) + # assert response == { + # "message": {"creation_method": ["Missing data for required field."]} + # } + # assert rv.status_code == 400 + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_create_report_schedule_invalid_creation_method(self): + """ + ReportSchedule API: Test create report schedule + """ + self.login(username="admin") + + chart = db.session.query(Slice).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "creation_method": "BAD_CREATION_METHOD", + "crontab": "0 9 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + }, + { + "type": ReportRecipientType.SLACK, + "recipient_config_json": {"target": "channel"}, + }, + ], + "grace_period": 14400, + "working_timeout": 3600, + "chart": chart.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.post_assert_metric(uri, report_schedule_data, "post") + response = json.loads(rv.data.decode("utf-8")) + assert response == { + "message": {"creation_method": ["Invalid enum value BAD_CREATION_METHOD"]} + } + assert rv.status_code == 400 + + @pytest.mark.usefixtures("create_report_schedules") + def test_update_report_schedule(self): + """ + ReportSchedule Api: Test update report schedule + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name2") + .one_or_none() + ) + + self.login(username="admin") + chart = db.session.query(Slice).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "changed", + "description": "description", + "crontab": "0 10 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + } + ], + "chart": chart.id, + "database": example_db.id, + } + + uri = f"api/v1/report/{report_schedule.id}" + + rv = self.put_assert_metric(uri, report_schedule_data, "put") + assert rv.status_code == 200 + updated_model = db.session.query(ReportSchedule).get(report_schedule.id) + assert updated_model is not None + assert updated_model.name == report_schedule_data["name"] + assert updated_model.description == report_schedule_data["description"] + assert len(updated_model.recipients) == 1 + assert updated_model.crontab == report_schedule_data["crontab"] + assert updated_model.chart_id == report_schedule_data["chart"] + assert updated_model.database_id == report_schedule_data["database"] + + @pytest.mark.usefixtures("create_working_shared_report_schedule") + def test_update_report_schedule_state_working(self): + """ + ReportSchedule Api: Test update state in a working report + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name_shared_working") + .one_or_none() + ) + + self.login(username="admin") + report_schedule_data = {"active": False} + uri = f"api/v1/report/{report_schedule.id}" + rv = self.put_assert_metric(uri, report_schedule_data, "put") + assert rv.status_code == 200 + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name_shared_working") + .one_or_none() + ) + assert report_schedule.last_state == ReportState.NOOP + + @pytest.mark.usefixtures("create_report_schedules") + def test_update_report_schedule_uniqueness(self): + """ + ReportSchedule Api: Test update report schedule uniqueness + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name2") + .one_or_none() + ) + + self.login(username="admin") + report_schedule_data = {"name": "name3", "description": "changed_description"} + uri = f"api/v1/report/{report_schedule.id}" + rv = self.put_assert_metric(uri, report_schedule_data, "put") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert data == {"message": {"name": ['An alert named "name3" already exists']}} + + @pytest.mark.usefixtures("create_report_schedules") + def test_update_report_schedule_not_found(self): + """ + ReportSchedule Api: Test update report schedule not found + """ + max_id = db.session.query(func.max(ReportSchedule.id)).scalar() + + self.login(username="admin") + report_schedule_data = {"name": "changed"} + uri = f"api/v1/report/{max_id + 1}" + rv = self.client.put(uri, json=report_schedule_data) + assert rv.status_code == 404 + + @pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_schedules" + ) + def test_update_report_schedule_chart_dash_validation(self): + """ + ReportSchedule Api: Test update report schedule chart and dashboard validation + """ + self.login(username="admin") + + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name2") + .one_or_none() + ) + # Test we can submit a chart or a dashboard not both + chart = db.session.query(Slice).first() + dashboard = db.session.query(Dashboard).first() + example_db = get_example_database() + report_schedule_data = { + "chart": chart.id, + "dashboard": dashboard.id, + "database": example_db.id, + } + uri = f"api/v1/report/{report_schedule.id}" + rv = self.put_assert_metric(uri, report_schedule_data, "put") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"chart": "Choose a chart or dashboard not both"}} + + @pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_schedules" + ) + def test_update_report_schedule_relations_exist(self): + """ + ReportSchedule Api: Test update report schedule relations exist + relations (chart, dash, db) exist + """ + self.login(username="admin") + + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name2") + .one_or_none() + ) + + # Test chart and database do not exist + chart_max_id = db.session.query(func.max(Slice.id)).scalar() + database_max_id = db.session.query(func.max(Database.id)).scalar() + examples_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "chart": chart_max_id + 1, + "database": database_max_id + 1, + } + uri = f"api/v1/report/{report_schedule.id}" + rv = self.put_assert_metric(uri, report_schedule_data, "put") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == { + "message": { + "chart": "Chart does not exist", + "database": "Database does not exist", + } + } + + # Test dashboard does not exist + dashboard_max_id = db.session.query(func.max(Dashboard.id)).scalar() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "dashboard": dashboard_max_id + 1, + "database": examples_db.id, + } + uri = f"api/v1/report/{report_schedule.id}" + rv = self.put_assert_metric(uri, report_schedule_data, "put") + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"dashboard": "Dashboard does not exist"}} + + @pytest.mark.usefixtures("create_report_schedules") + @pytest.mark.usefixtures("create_alpha_users") + def test_update_report_not_owned(self): + """ + ReportSchedule API: Test update report not owned + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name2") + .one_or_none() + ) + + self.login(username="alpha2", password="password") + report_schedule_data = { + "active": False, + } + uri = f"api/v1/report/{report_schedule.id}" + rv = self.put_assert_metric(uri, report_schedule_data, "put") + self.assertEqual(rv.status_code, 403) + + @pytest.mark.usefixtures("create_report_schedules") + def test_delete_report_schedule(self): + """ + ReportSchedule Api: Test update report schedule + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name1") + .one_or_none() + ) + self.login(username="admin") + uri = f"api/v1/report/{report_schedule.id}" + rv = self.delete_assert_metric(uri, "delete") + assert rv.status_code == 200 + deleted_report_schedule = db.session.query(ReportSchedule).get( + report_schedule.id + ) + assert deleted_report_schedule is None + deleted_recipients = ( + db.session.query(ReportRecipients) + .filter(ReportRecipients.report_schedule_id == report_schedule.id) + .all() + ) + assert deleted_recipients == [] + deleted_logs = ( + db.session.query(ReportExecutionLog) + .filter(ReportExecutionLog.report_schedule_id == report_schedule.id) + .all() + ) + assert deleted_logs == [] + + @pytest.mark.usefixtures("create_report_schedules") + def test_delete_report_schedule_not_found(self): + """ + ReportSchedule Api: Test delete report schedule not found + """ + max_id = db.session.query(func.max(ReportSchedule.id)).scalar() + self.login(username="admin") + uri = f"api/v1/report/{max_id + 1}" + rv = self.delete_assert_metric(uri, "delete") + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_report_schedules") + @pytest.mark.usefixtures("create_alpha_users") + def test_delete_report_not_owned(self): + """ + ReportSchedule API: Test delete try not owned + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name2") + .one_or_none() + ) + + self.login(username="alpha2", password="password") + uri = f"api/v1/report/{report_schedule.id}" + rv = self.delete_assert_metric(uri, "delete") + self.assertEqual(rv.status_code, 403) + + @pytest.mark.usefixtures("create_report_schedules") + def test_bulk_delete_report_schedule(self): + """ + ReportSchedule Api: Test bulk delete report schedules + """ + query_report_schedules = db.session.query(ReportSchedule) + report_schedules = query_report_schedules.all() + + report_schedules_ids = [ + report_schedule.id for report_schedule in report_schedules + ] + self.login(username="admin") + uri = f"api/v1/report/?q={prison.dumps(report_schedules_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 200 + deleted_report_schedules = query_report_schedules.all() + assert deleted_report_schedules == [] + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "message": f"Deleted {len(report_schedules_ids)} report schedules" + } + assert response == expected_response + + @pytest.mark.usefixtures("create_report_schedules") + def test_bulk_delete_report_schedule_not_found(self): + """ + ReportSchedule Api: Test bulk delete report schedule not found + """ + report_schedules = db.session.query(ReportSchedule).all() + report_schedules_ids = [ + report_schedule.id for report_schedule in report_schedules + ] + max_id = db.session.query(func.max(ReportSchedule.id)).scalar() + report_schedules_ids.append(max_id + 1) + self.login(username="admin") + uri = f"api/v1/report/?q={prison.dumps(report_schedules_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_report_schedules") + @pytest.mark.usefixtures("create_alpha_users") + def test_bulk_delete_report_not_owned(self): + """ + ReportSchedule API: Test bulk delete try not owned + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name2") + .one_or_none() + ) + report_schedules_ids = [report_schedule.id] + + self.login(username="alpha2", password="password") + uri = f"api/v1/report/?q={prison.dumps(report_schedules_ids)}" + rv = self.delete_assert_metric(uri, "bulk_delete") + self.assertEqual(rv.status_code, 403) + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_logs(self): + """ + ReportSchedule Api: Test get list report schedules logs + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name3") + .one_or_none() + ) + + self.login(username="admin") + uri = f"api/v1/report/{report_schedule.id}/log/" + rv = self.client.get(uri) + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 3 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_logs_sorting(self): + """ + ReportSchedule Api: Test get list report schedules logs + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name3") + .one_or_none() + ) + + self.login(username="admin") + uri = f"api/v1/report/{report_schedule.id}/log/" + + order_columns = [ + "state", + "value", + "error_message", + "end_dttm", + "start_dttm", + "scheduled_dttm", + ] + + for order_column in order_columns: + arguments = {"order_column": order_column, "order_direction": "asc"} + uri = f"api/v1/report/{report_schedule.id}/log/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + if rv.status_code == 400: + raise Exception(json.loads(rv.data.decode("utf-8"))) + assert rv.status_code == 200 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_logs_filters(self): + """ + ReportSchedule Api: Test get list report schedules log filters + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name3") + .one_or_none() + ) + + self.login(username="admin") + arguments = { + "columns": ["name"], + "filters": [{"col": "state", "opr": "eq", "value": ReportState.SUCCESS}], + } + uri = f"api/v1/report/{report_schedule.id}/log/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 0 + + @pytest.mark.usefixtures("create_report_schedules") + def test_report_schedule_logs_no_mutations(self): + """ + ReportSchedule Api: Test assert there's no way to alter logs + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name3") + .one_or_none() + ) + + data = {"state": ReportState.ERROR, "error_message": "New error changed"} + + self.login(username="admin") + uri = f"api/v1/report/{report_schedule.id}/log/" + rv = self.client.post(uri, json=data) + assert rv.status_code == 405 + uri = f"api/v1/report/{report_schedule.id}/log/{report_schedule.logs[0].id}" + rv = self.client.put(uri, json=data) + assert rv.status_code == 405 + rv = self.client.delete(uri) + assert rv.status_code == 405 diff --git a/tests/integration_tests/reports/commands/create_dashboard_report_tests.py b/tests/integration_tests/reports/commands/create_dashboard_report_tests.py new file mode 100644 index 0000000000000..81945c18a9abd --- /dev/null +++ b/tests/integration_tests/reports/commands/create_dashboard_report_tests.py @@ -0,0 +1,91 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import pytest + +from superset import db +from superset.models.dashboard import Dashboard +from superset.reports.commands.create import CreateReportScheduleCommand +from superset.reports.commands.exceptions import ReportScheduleInvalidError +from superset.reports.models import ( + ReportCreationMethod, + ReportRecipientType, + ReportScheduleType, +) +from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard + +DASHBOARD_REPORT_SCHEDULE_DEFAULTS = { + "type": ReportScheduleType.REPORT, + "description": "description", + "crontab": "0 9 * * *", + "creation_method": ReportCreationMethod.ALERTS_REPORTS, + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@example.com"}, + }, + ], + "grace_period": 14400, + "working_timeout": 3600, +} + + +@pytest.mark.usefixtures("login_as_admin") +def test_accept_valid_tab_ids(tabbed_dashboard: Dashboard) -> None: + report_schedule = CreateReportScheduleCommand( + { + **DASHBOARD_REPORT_SCHEDULE_DEFAULTS, + "name": "tabbed dashboard report (valid tabs id)", + "dashboard": tabbed_dashboard.id, + "extra": {"dashboard": {"activeTabs": ["TAB-L1AA", "TAB-L2AB"]}}, + } + ).run() + assert report_schedule.extra == { + "dashboard": {"activeTabs": ["TAB-L1AA", "TAB-L2AB"]} + } + db.session.delete(report_schedule) + db.session.commit() + + +@pytest.mark.usefixtures("login_as_admin") +def test_raise_exception_for_invalid_tab_ids(tabbed_dashboard: Dashboard) -> None: + with pytest.raises(ReportScheduleInvalidError) as exc_info: + CreateReportScheduleCommand( + { + **DASHBOARD_REPORT_SCHEDULE_DEFAULTS, + "name": "tabbed dashboard report (invalid tab ids)", + "dashboard": tabbed_dashboard.id, + "extra": {"dashboard": {"activeTabs": ["TAB-INVALID_ID"]}}, + } + ).run() + assert "Invalid tab ids" in str(exc_info.value.normalized_messages()) + + with pytest.raises(ReportScheduleInvalidError) as exc_info: + CreateReportScheduleCommand( + { + **DASHBOARD_REPORT_SCHEDULE_DEFAULTS, + "name": "tabbed dashboard report (invalid tab ids in anchor)", + "dashboard": tabbed_dashboard.id, + "extra": { + "dashboard": { + "activeTabs": ["TAB-L1AA"], + "anchor": "TAB-INVALID_ID", + } + }, + } + ).run() + assert "Invalid tab ids" in str(exc_info.value.normalized_messages()) diff --git a/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py b/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py new file mode 100644 index 0000000000000..0027738fd9807 --- /dev/null +++ b/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py @@ -0,0 +1,113 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from unittest.mock import MagicMock, patch +from uuid import uuid4 + +from flask import current_app + +from superset.dashboards.permalink.commands.create import ( + CreateDashboardPermalinkCommand, +) +from superset.models.dashboard import Dashboard +from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand +from superset.reports.models import ReportSourceFormat +from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard +from tests.integration_tests.reports.utils import create_dashboard_report + + +@patch("superset.reports.notifications.email.send_email_smtp") +@patch( + "superset.reports.commands.execute.DashboardScreenshot", +) +@patch( + "superset.dashboards.permalink.commands.create.CreateDashboardPermalinkCommand.run" +) +def test_report_for_dashboard_with_tabs( + create_dashboard_permalink_mock: MagicMock, + dashboard_screenshot_mock: MagicMock, + send_email_smtp_mock: MagicMock, + tabbed_dashboard: Dashboard, +) -> None: + create_dashboard_permalink_mock.return_value = "permalink" + dashboard_screenshot_mock.get_screenshot.return_value = b"test-image" + current_app.config["ALERT_REPORTS_NOTIFICATION_DRY_RUN"] = False + + with create_dashboard_report( + dashboard=tabbed_dashboard, + extra={"active_tabs": ["TAB-L1B", "TAB-L2BB"]}, + name="test report tabbed dashboard", + ) as report_schedule: + dashboard: Dashboard = report_schedule.dashboard + AsyncExecuteReportScheduleCommand( + str(uuid4()), report_schedule.id, datetime.utcnow() + ).run() + dashboard_state = report_schedule.extra.get("dashboard", {}) + permalink_key = CreateDashboardPermalinkCommand( + dashboard.id, dashboard_state + ).run() + + assert dashboard_screenshot_mock.call_count == 1 + (url, digest) = dashboard_screenshot_mock.call_args.args + assert url.endswith(f"/superset/dashboard/p/{permalink_key}/") + assert digest == dashboard.digest + assert send_email_smtp_mock.call_count == 1 + assert len(send_email_smtp_mock.call_args.kwargs["images"]) == 1 + + +@patch("superset.reports.notifications.email.send_email_smtp") +@patch( + "superset.reports.commands.execute.DashboardScreenshot", +) +@patch( + "superset.dashboards.permalink.commands.create.CreateDashboardPermalinkCommand.run" +) +def test_report_with_header_data( + create_dashboard_permalink_mock: MagicMock, + dashboard_screenshot_mock: MagicMock, + send_email_smtp_mock: MagicMock, + tabbed_dashboard: Dashboard, +) -> None: + create_dashboard_permalink_mock.return_value = "permalink" + dashboard_screenshot_mock.get_screenshot.return_value = b"test-image" + current_app.config["ALERT_REPORTS_NOTIFICATION_DRY_RUN"] = False + + with create_dashboard_report( + dashboard=tabbed_dashboard, + extra={"active_tabs": ["TAB-L1B"]}, + name="test report tabbed dashboard", + ) as report_schedule: + dashboard: Dashboard = report_schedule.dashboard + AsyncExecuteReportScheduleCommand( + str(uuid4()), report_schedule.id, datetime.utcnow() + ).run() + dashboard_state = report_schedule.extra.get("dashboard", {}) + permalink_key = CreateDashboardPermalinkCommand( + dashboard.id, dashboard_state + ).run() + + assert dashboard_screenshot_mock.call_count == 1 + (url, digest) = dashboard_screenshot_mock.call_args.args + assert url.endswith(f"/superset/dashboard/p/{permalink_key}/") + assert digest == dashboard.digest + assert send_email_smtp_mock.call_count == 1 + header_data = send_email_smtp_mock.call_args.kwargs["header_data"] + assert header_data.get("dashboard_id") == dashboard.id + assert header_data.get("notification_format") == report_schedule.report_format + assert header_data.get("notification_source") == ReportSourceFormat.DASHBOARD + assert header_data.get("notification_type") == report_schedule.type + assert len(send_email_smtp_mock.call_args.kwargs["header_data"]) == 6 diff --git a/tests/integration_tests/reports/commands_tests.py b/tests/integration_tests/reports/commands_tests.py new file mode 100644 index 0000000000000..8d6a76c14f67e --- /dev/null +++ b/tests/integration_tests/reports/commands_tests.py @@ -0,0 +1,2036 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from contextlib import contextmanager +from datetime import datetime, timedelta, timezone +from typing import List, Optional +from unittest.mock import call, Mock, patch +from uuid import uuid4 + +import pytest +from flask import current_app +from flask_appbuilder.security.sqla.models import User +from flask_sqlalchemy import BaseQuery +from freezegun import freeze_time +from slack_sdk.errors import ( + BotUserAccessError, + SlackApiError, + SlackClientConfigurationError, + SlackClientError, + SlackClientNotConnectedError, + SlackObjectFormationError, + SlackRequestError, + SlackTokenRotationError, +) +from sqlalchemy.sql import func + +from superset import db +from superset.exceptions import SupersetException +from superset.models.core import Database +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from superset.reports.commands.exceptions import ( + AlertQueryError, + AlertQueryInvalidTypeError, + AlertQueryMultipleColumnsError, + AlertQueryMultipleRowsError, + ReportScheduleClientErrorsException, + ReportScheduleCsvFailedError, + ReportScheduleCsvTimeout, + ReportScheduleNotFoundError, + ReportSchedulePreviousWorkingError, + ReportScheduleScreenshotFailedError, + ReportScheduleScreenshotTimeout, + ReportScheduleSystemErrorsException, + ReportScheduleWorkingTimeoutError, +) +from superset.reports.commands.execute import ( + AsyncExecuteReportScheduleCommand, + BaseReportState, +) +from superset.reports.commands.log_prune import AsyncPruneReportScheduleLogCommand +from superset.reports.models import ( + ReportDataFormat, + ReportExecutionLog, + ReportSchedule, + ReportScheduleType, + ReportScheduleValidatorType, + ReportState, +) +from superset.reports.notifications.exceptions import ( + NotificationError, + NotificationParamException, +) +from superset.tasks.types import ExecutorType +from superset.utils.database import get_example_database +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices_module_scope, + load_world_bank_data, +) +from tests.integration_tests.reports.utils import ( + cleanup_report_schedule, + create_report_notification, + CSV_FILE, + DEFAULT_OWNER_EMAIL, + SCREENSHOT_FILE, + TEST_ID, +) +from tests.integration_tests.test_app import app + +pytestmark = pytest.mark.usefixtures( + "load_world_bank_dashboard_with_slices_module_scope" +) + + +def get_target_from_report_schedule(report_schedule: ReportSchedule) -> List[str]: + return [ + json.loads(recipient.recipient_config_json)["target"] + for recipient in report_schedule.recipients + ] + + +def get_error_logs_query(report_schedule: ReportSchedule) -> BaseQuery: + return ( + db.session.query(ReportExecutionLog) + .filter( + ReportExecutionLog.report_schedule == report_schedule, + ReportExecutionLog.state == ReportState.ERROR, + ) + .order_by(ReportExecutionLog.end_dttm.desc()) + ) + + +def get_notification_error_sent_count(report_schedule: ReportSchedule) -> int: + logs = get_error_logs_query(report_schedule).all() + notification_sent_logs = [ + log.error_message + for log in logs + if log.error_message == "Notification sent with error" + ] + return len(notification_sent_logs) + + +def assert_log(state: str, error_message: Optional[str] = None): + db.session.commit() + logs = db.session.query(ReportExecutionLog).all() + + if state == ReportState.ERROR: + # On error we send an email + assert len(logs) == 3 + else: + assert len(logs) == 2 + log_states = [log.state for log in logs] + assert ReportState.WORKING in log_states + assert state in log_states + assert error_message in [log.error_message for log in logs] + + for log in logs: + if log.state == ReportState.WORKING: + assert log.value is None + assert log.value_row_json is None + + +@contextmanager +def create_test_table_context(database: Database): + with database.get_sqla_engine_with_context() as engine: + engine.execute("CREATE TABLE test_table AS SELECT 1 as first, 2 as second") + engine.execute("INSERT INTO test_table (first, second) VALUES (1, 2)") + engine.execute("INSERT INTO test_table (first, second) VALUES (3, 4)") + + yield db.session + with database.get_sqla_engine_with_context() as engine: + engine.execute("DROP TABLE test_table") + + +@pytest.fixture() +def create_report_email_chart(): + with app.app_context(): + chart = db.session.query(Slice).first() + report_schedule = create_report_notification( + email_target="target@email.com", chart=chart + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_report_email_chart_alpha_owner(get_user): + with app.app_context(): + owners = [get_user("alpha")] + chart = db.session.query(Slice).first() + report_schedule = create_report_notification( + email_target="target@email.com", chart=chart, owners=owners + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_report_email_chart_force_screenshot(): + with app.app_context(): + chart = db.session.query(Slice).first() + report_schedule = create_report_notification( + email_target="target@email.com", chart=chart, force_screenshot=True + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_report_email_chart_with_csv(): + with app.app_context(): + chart = db.session.query(Slice).first() + chart.query_context = '{"mock": "query_context"}' + report_schedule = create_report_notification( + email_target="target@email.com", + chart=chart, + report_format=ReportDataFormat.DATA, + ) + yield report_schedule + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_report_email_chart_with_text(): + with app.app_context(): + chart = db.session.query(Slice).first() + chart.query_context = '{"mock": "query_context"}' + report_schedule = create_report_notification( + email_target="target@email.com", + chart=chart, + report_format=ReportDataFormat.TEXT, + ) + yield report_schedule + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_report_email_chart_with_csv_no_query_context(): + with app.app_context(): + chart = db.session.query(Slice).first() + chart.query_context = None + report_schedule = create_report_notification( + email_target="target@email.com", + chart=chart, + report_format=ReportDataFormat.DATA, + name="report_csv_no_query_context", + ) + yield report_schedule + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_report_email_dashboard(): + with app.app_context(): + dashboard = db.session.query(Dashboard).first() + report_schedule = create_report_notification( + email_target="target@email.com", dashboard=dashboard + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_report_email_dashboard_force_screenshot(): + with app.app_context(): + dashboard = db.session.query(Dashboard).first() + report_schedule = create_report_notification( + email_target="target@email.com", dashboard=dashboard, force_screenshot=True + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_report_slack_chart(): + with app.app_context(): + chart = db.session.query(Slice).first() + report_schedule = create_report_notification( + slack_channel="slack_channel", chart=chart + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_report_slack_chart_with_csv(): + with app.app_context(): + chart = db.session.query(Slice).first() + chart.query_context = '{"mock": "query_context"}' + report_schedule = create_report_notification( + slack_channel="slack_channel", + chart=chart, + report_format=ReportDataFormat.DATA, + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_report_slack_chart_with_text(): + with app.app_context(): + chart = db.session.query(Slice).first() + chart.query_context = '{"mock": "query_context"}' + report_schedule = create_report_notification( + slack_channel="slack_channel", + chart=chart, + report_format=ReportDataFormat.TEXT, + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_report_slack_chart_working(): + with app.app_context(): + chart = db.session.query(Slice).first() + report_schedule = create_report_notification( + slack_channel="slack_channel", chart=chart + ) + report_schedule.last_state = ReportState.WORKING + report_schedule.last_eval_dttm = datetime(2020, 1, 1, 0, 0) + report_schedule.last_value = None + report_schedule.last_value_row_json = None + db.session.commit() + log = ReportExecutionLog( + scheduled_dttm=report_schedule.last_eval_dttm, + start_dttm=report_schedule.last_eval_dttm, + end_dttm=report_schedule.last_eval_dttm, + value=report_schedule.last_value, + value_row_json=report_schedule.last_value_row_json, + state=ReportState.WORKING, + report_schedule=report_schedule, + uuid=uuid4(), + ) + db.session.add(log) + db.session.commit() + + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture() +def create_alert_slack_chart_success(): + with app.app_context(): + chart = db.session.query(Slice).first() + report_schedule = create_report_notification( + slack_channel="slack_channel", + chart=chart, + report_type=ReportScheduleType.ALERT, + ) + report_schedule.last_state = ReportState.SUCCESS + report_schedule.last_eval_dttm = datetime(2020, 1, 1, 0, 0) + + log = ReportExecutionLog( + report_schedule=report_schedule, + state=ReportState.SUCCESS, + start_dttm=report_schedule.last_eval_dttm, + end_dttm=report_schedule.last_eval_dttm, + scheduled_dttm=report_schedule.last_eval_dttm, + ) + db.session.add(log) + db.session.commit() + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture( + params=[ + "alert1", + ] +) +def create_alert_slack_chart_grace(request): + param_config = { + "alert1": { + "sql": "SELECT count(*) from test_table", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "<", "threshold": 10}', + }, + } + with app.app_context(): + chart = db.session.query(Slice).first() + example_database = get_example_database() + with create_test_table_context(example_database): + report_schedule = create_report_notification( + slack_channel="slack_channel", + chart=chart, + report_type=ReportScheduleType.ALERT, + database=example_database, + sql=param_config[request.param]["sql"], + validator_type=param_config[request.param]["validator_type"], + validator_config_json=param_config[request.param][ + "validator_config_json" + ], + ) + report_schedule.last_state = ReportState.GRACE + report_schedule.last_eval_dttm = datetime(2020, 1, 1, 0, 0) + + log = ReportExecutionLog( + report_schedule=report_schedule, + state=ReportState.SUCCESS, + start_dttm=report_schedule.last_eval_dttm, + end_dttm=report_schedule.last_eval_dttm, + scheduled_dttm=report_schedule.last_eval_dttm, + ) + db.session.add(log) + db.session.commit() + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture( + params=[ + "alert1", + "alert2", + "alert3", + "alert4", + "alert5", + "alert6", + "alert7", + "alert8", + ] +) +def create_alert_email_chart(request): + param_config = { + "alert1": { + "sql": "SELECT 10 as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": ">", "threshold": 9}', + }, + "alert2": { + "sql": "SELECT 10 as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": ">=", "threshold": 10}', + }, + "alert3": { + "sql": "SELECT 10 as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "<", "threshold": 11}', + }, + "alert4": { + "sql": "SELECT 10 as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "<=", "threshold": 10}', + }, + "alert5": { + "sql": "SELECT 10 as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "!=", "threshold": 11}', + }, + "alert6": { + "sql": "SELECT 'something' as metric", + "validator_type": ReportScheduleValidatorType.NOT_NULL, + "validator_config_json": "{}", + }, + "alert7": { + "sql": "SELECT {{ 5 + 5 }} as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "!=", "threshold": 11}', + }, + "alert8": { + "sql": "SELECT 55 as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": ">", "threshold": 54.999}', + }, + } + with app.app_context(): + chart = db.session.query(Slice).first() + example_database = get_example_database() + with create_test_table_context(example_database): + + report_schedule = create_report_notification( + email_target="target@email.com", + chart=chart, + report_type=ReportScheduleType.ALERT, + database=example_database, + sql=param_config[request.param]["sql"], + validator_type=param_config[request.param]["validator_type"], + validator_config_json=param_config[request.param][ + "validator_config_json" + ], + force_screenshot=True, + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture( + params=[ + "alert1", + "alert2", + "alert3", + "alert4", + "alert5", + "alert6", + "alert7", + "alert8", + "alert9", + ] +) +def create_no_alert_email_chart(request): + param_config = { + "alert1": { + "sql": "SELECT 10 as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "<", "threshold": 10}', + }, + "alert2": { + "sql": "SELECT 10 as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": ">=", "threshold": 11}', + }, + "alert3": { + "sql": "SELECT 10 as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "<", "threshold": 10}', + }, + "alert4": { + "sql": "SELECT 10 as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "<=", "threshold": 9}', + }, + "alert5": { + "sql": "SELECT 10 as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "!=", "threshold": 10}', + }, + "alert6": { + "sql": "SELECT first from test_table where 1=0", + "validator_type": ReportScheduleValidatorType.NOT_NULL, + "validator_config_json": "{}", + }, + "alert7": { + "sql": "SELECT first from test_table where 1=0", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": ">", "threshold": 0}', + }, + "alert8": { + "sql": "SELECT Null as metric", + "validator_type": ReportScheduleValidatorType.NOT_NULL, + "validator_config_json": "{}", + }, + "alert9": { + "sql": "SELECT Null as metric", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": ">", "threshold": 0}', + }, + } + with app.app_context(): + chart = db.session.query(Slice).first() + example_database = get_example_database() + with create_test_table_context(example_database): + + report_schedule = create_report_notification( + email_target="target@email.com", + chart=chart, + report_type=ReportScheduleType.ALERT, + database=example_database, + sql=param_config[request.param]["sql"], + validator_type=param_config[request.param]["validator_type"], + validator_config_json=param_config[request.param][ + "validator_config_json" + ], + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture(params=["alert1", "alert2"]) +def create_mul_alert_email_chart(request): + param_config = { + "alert1": { + "sql": "SELECT first, second from test_table", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "<", "threshold": 10}', + }, + "alert2": { + "sql": "SELECT first from test_table", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "<", "threshold": 10}', + }, + } + with app.app_context(): + chart = db.session.query(Slice).first() + example_database = get_example_database() + with create_test_table_context(example_database): + + report_schedule = create_report_notification( + email_target="target@email.com", + chart=chart, + report_type=ReportScheduleType.ALERT, + database=example_database, + sql=param_config[request.param]["sql"], + validator_type=param_config[request.param]["validator_type"], + validator_config_json=param_config[request.param][ + "validator_config_json" + ], + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.fixture(params=["alert1", "alert2"]) +def create_invalid_sql_alert_email_chart(request): + param_config = { + "alert1": { + "sql": "SELECT 'string' ", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "<", "threshold": 10}', + }, + "alert2": { + "sql": "SELECT first from foo_table", + "validator_type": ReportScheduleValidatorType.OPERATOR, + "validator_config_json": '{"op": "<", "threshold": 10}', + }, + } + with app.app_context(): + chart = db.session.query(Slice).first() + example_database = get_example_database() + with create_test_table_context(example_database): + + report_schedule = create_report_notification( + email_target="target@email.com", + chart=chart, + report_type=ReportScheduleType.ALERT, + database=example_database, + sql=param_config[request.param]["sql"], + validator_type=param_config[request.param]["validator_type"], + validator_config_json=param_config[request.param][ + "validator_config_json" + ], + grace_period=60 * 60, + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_chart" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_email_chart_report_schedule( + screenshot_mock, + email_mock, + create_report_email_chart, +): + """ + ExecuteReport Command: Test chart email report schedule with screenshot + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule( + create_report_email_chart + ) + # assert that the link sent is correct + assert ( + 'Explore in Superset' in email_mock.call_args[0][2] + ) + # Assert the email smtp address + assert email_mock.call_args[0][0] == notification_targets[0] + # Assert the email inline screenshot + smtp_images = email_mock.call_args[1]["images"] + assert smtp_images[list(smtp_images.keys())[0]] == SCREENSHOT_FILE + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_chart_alpha_owner" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_email_chart_report_schedule_alpha_owner( + screenshot_mock, + email_mock, + create_report_email_chart_alpha_owner, +): + """ + ExecuteReport Command: Test chart email report schedule with screenshot + executed as the chart owner + """ + config_key = "ALERT_REPORTS_EXECUTE_AS" + original_config_value = app.config[config_key] + app.config[config_key] = [ExecutorType.OWNER] + + # setup screenshot mock + username = "" + + def _screenshot_side_effect(user: User) -> Optional[bytes]: + nonlocal username + username = user.username + + return SCREENSHOT_FILE + + screenshot_mock.side_effect = _screenshot_side_effect + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart_alpha_owner.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule( + create_report_email_chart_alpha_owner + ) + # assert that the screenshot is executed as the chart owner + assert username == "alpha" + + # assert that the link sent is correct + assert ( + 'Explore in Superset' in email_mock.call_args[0][2] + ) + # Assert the email smtp address + assert email_mock.call_args[0][0] == notification_targets[0] + # Assert the email inline screenshot + smtp_images = email_mock.call_args[1]["images"] + assert smtp_images[list(smtp_images.keys())[0]] == SCREENSHOT_FILE + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + app.config[config_key] = original_config_value + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", + "create_report_email_chart_force_screenshot", +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_email_chart_report_schedule_force_screenshot( + screenshot_mock, + email_mock, + create_report_email_chart_force_screenshot, +): + """ + ExecuteReport Command: Test chart email report schedule with screenshot + + In this test ``force_screenshot`` is true, and the screenshot URL should + reflect that. + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart_force_screenshot.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule( + create_report_email_chart_force_screenshot + ) + # assert that the link sent is correct + assert ( + 'Explore in Superset' in email_mock.call_args[0][2] + ) + # Assert the email smtp address + assert email_mock.call_args[0][0] == notification_targets[0] + # Assert the email inline screenshot + smtp_images = email_mock.call_args[1]["images"] + assert smtp_images[list(smtp_images.keys())[0]] == SCREENSHOT_FILE + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_alert_email_chart" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_email_chart_alert_schedule( + screenshot_mock, + email_mock, + create_alert_email_chart, +): + """ + ExecuteReport Command: Test chart email alert schedule with screenshot + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_alert_email_chart.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule(create_alert_email_chart) + # assert that the link sent is correct + assert ( + 'Explore in Superset' in email_mock.call_args[0][2] + ) + # Assert the email smtp address + assert email_mock.call_args[0][0] == notification_targets[0] + # Assert the email inline screenshot + smtp_images = email_mock.call_args[1]["images"] + assert smtp_images[list(smtp_images.keys())[0]] == SCREENSHOT_FILE + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_chart" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_email_chart_report_dry_run( + screenshot_mock, + email_mock, + create_report_email_chart, +): + """ + ExecuteReport Command: Test chart email report schedule dry run + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + app.config["ALERT_REPORTS_NOTIFICATION_DRY_RUN"] = True + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart.id, datetime.utcnow() + ).run() + + email_mock.assert_not_called() + app.config["ALERT_REPORTS_NOTIFICATION_DRY_RUN"] = False + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_chart_with_csv" +) +@patch("superset.utils.csv.urllib.request.urlopen") +@patch("superset.utils.csv.urllib.request.OpenerDirector.open") +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.csv.get_chart_csv_data") +def test_email_chart_report_schedule_with_csv( + csv_mock, + email_mock, + mock_open, + mock_urlopen, + create_report_email_chart_with_csv, +): + """ + ExecuteReport Command: Test chart email report schedule with CSV + """ + # setup csv mock + response = Mock() + mock_open.return_value = response + mock_urlopen.return_value = response + mock_urlopen.return_value.getcode.return_value = 200 + response.read.return_value = CSV_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart_with_csv.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule( + create_report_email_chart_with_csv + ) + # assert that the link sent is correct + assert ( + 'Explore in Superset' in email_mock.call_args[0][2] + ) + # Assert the email smtp address + assert email_mock.call_args[0][0] == notification_targets[0] + # Assert the email csv file + smtp_images = email_mock.call_args[1]["data"] + assert smtp_images[list(smtp_images.keys())[0]] == CSV_FILE + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", + "create_report_email_chart_with_csv_no_query_context", +) +@patch("superset.utils.csv.urllib.request.urlopen") +@patch("superset.utils.csv.urllib.request.OpenerDirector.open") +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.csv.get_chart_csv_data") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_email_chart_report_schedule_with_csv_no_query_context( + screenshot_mock, + csv_mock, + email_mock, + mock_open, + mock_urlopen, + create_report_email_chart_with_csv_no_query_context, +): + """ + ExecuteReport Command: Test chart email report schedule with CSV (no query context) + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + # setup csv mock + response = Mock() + mock_open.return_value = response + mock_urlopen.return_value = response + mock_urlopen.return_value.getcode.return_value = 200 + response.read.return_value = CSV_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, + create_report_email_chart_with_csv_no_query_context.id, + datetime.utcnow(), + ).run() + + # verify that when query context is null we request a screenshot + screenshot_mock.assert_called_once() + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_chart_with_text" +) +@patch("superset.utils.csv.urllib.request.urlopen") +@patch("superset.utils.csv.urllib.request.OpenerDirector.open") +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.csv.get_chart_dataframe") +def test_email_chart_report_schedule_with_text( + dataframe_mock, + email_mock, + mock_open, + mock_urlopen, + create_report_email_chart_with_text, +): + """ + ExecuteReport Command: Test chart email report schedule with text + """ + # setup dataframe mock + response = Mock() + mock_open.return_value = response + mock_urlopen.return_value = response + mock_urlopen.return_value.getcode.return_value = 200 + + # test without date type. + response.read.return_value = json.dumps( + { + "result": [ + { + "data": { + "t1": {0: "c11", 1: "c21"}, + "t2": {0: "c12", 1: "c22"}, + "t3__sum": {0: "c13", 1: "c23"}, + }, + "colnames": [("t1",), ("t2",), ("t3__sum",)], + "indexnames": [(0,), (1,)], + "coltypes": [1, 1], + }, + ], + } + ).encode("utf-8") + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart_with_text.id, datetime.utcnow() + ).run() + + # assert that the data is embedded correctly + table_html = """ + + + + + + + + + + + + + + + + + + + + + + +
t1t2t3__sum
0c11c12c13
1c21c22c23
""" + assert table_html in email_mock.call_args[0][2] + + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + # test with date type. + dt = datetime(2022, 1, 1).replace(tzinfo=timezone.utc) + ts = datetime.timestamp(dt) * 1000 + response.read.return_value = json.dumps( + { + "result": [ + { + "data": { + "t1": {0: "c11", 1: "c21"}, + "t2__date": {0: ts, 1: ts}, + "t3__sum": {0: "c13", 1: "c23"}, + }, + "colnames": [("t1",), ("t2__date",), ("t3__sum",)], + "indexnames": [(0,), (1,)], + "coltypes": [1, 2], + }, + ], + } + ).encode("utf-8") + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart_with_text.id, datetime.utcnow() + ).run() + + # assert that the data is embedded correctly + table_html = """ + + + + + + + + + + + + + + + + + + + + + + +
t1t2__datet3__sum
0c112022-01-01c13
1c212022-01-01c23
""" + + assert table_html in email_mock.call_args[0][2] + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_dashboard" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.DashboardScreenshot.get_screenshot") +def test_email_dashboard_report_schedule( + screenshot_mock, email_mock, create_report_email_dashboard +): + """ + ExecuteReport Command: Test dashboard email report schedule + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock: + + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_dashboard.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule( + create_report_email_dashboard + ) + # Assert the email smtp address + assert email_mock.call_args[0][0] == notification_targets[0] + # Assert the email inline screenshot + smtp_images = email_mock.call_args[1]["images"] + assert smtp_images[list(smtp_images.keys())[0]] == SCREENSHOT_FILE + # Assert logs are correct + assert_log(ReportState.SUCCESS) + statsd_mock.assert_called_once_with("reports.email.send.ok", 1) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", + "create_report_email_dashboard_force_screenshot", +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.DashboardScreenshot.get_screenshot") +def test_email_dashboard_report_schedule_force_screenshot( + screenshot_mock, email_mock, create_report_email_dashboard_force_screenshot +): + """ + ExecuteReport Command: Test dashboard email report schedule + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, + create_report_email_dashboard_force_screenshot.id, + datetime.utcnow(), + ).run() + + notification_targets = get_target_from_report_schedule( + create_report_email_dashboard_force_screenshot + ) + + # Assert the email smtp address + assert email_mock.call_args[0][0] == notification_targets[0] + # Assert the email inline screenshot + smtp_images = email_mock.call_args[1]["images"] + assert smtp_images[list(smtp_images.keys())[0]] == SCREENSHOT_FILE + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_slack_chart" +) +@patch("superset.reports.notifications.slack.WebClient.files_upload") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_slack_chart_report_schedule( + screenshot_mock, + file_upload_mock, + create_report_slack_chart, +): + """ + ExecuteReport Command: Test chart slack report schedule + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock: + + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_slack_chart.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule( + create_report_slack_chart + ) + + assert file_upload_mock.call_args[1]["channels"] == notification_targets[0] + assert file_upload_mock.call_args[1]["file"] == SCREENSHOT_FILE + + # Assert logs are correct + assert_log(ReportState.SUCCESS) + statsd_mock.assert_called_once_with("reports.slack.send.ok", 1) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_slack_chart" +) +@patch("superset.reports.notifications.slack.WebClient") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_slack_chart_report_schedule_with_errors( + screenshot_mock, + web_client_mock, + create_report_slack_chart, +): + """ + ExecuteReport Command: Test that all slack errors will + properly log something + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + slack_errors = [ + BotUserAccessError(), + SlackRequestError(), + SlackClientConfigurationError(), + SlackObjectFormationError(), + SlackTokenRotationError(api_error="foo"), + SlackClientNotConnectedError(), + SlackClientError(), + SlackApiError(message="foo", response="bar"), + ] + + for idx, er in enumerate(slack_errors): + web_client_mock.side_effect = er + + with pytest.raises(ReportScheduleClientErrorsException): + + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_slack_chart.id, datetime.utcnow() + ).run() + + db.session.commit() + + # Assert errors are being logged + + # Only one notification log is sent because it's in grace period + # for the rest of the reports + notification_logs_count = get_notification_error_sent_count( + create_report_slack_chart + ) + error_logs = get_error_logs_query(create_report_slack_chart) + + # check that we have two logs for each error + assert error_logs.count() == (len(slack_errors) + notification_logs_count) * 2 + + # check that each error has a message + assert len([log.error_message for log in error_logs]) == error_logs.count() + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_slack_chart_with_csv" +) +@patch("superset.reports.notifications.slack.WebClient.files_upload") +@patch("superset.utils.csv.urllib.request.urlopen") +@patch("superset.utils.csv.urllib.request.OpenerDirector.open") +@patch("superset.utils.csv.get_chart_csv_data") +def test_slack_chart_report_schedule_with_csv( + csv_mock, + mock_open, + mock_urlopen, + file_upload_mock, + create_report_slack_chart_with_csv, +): + """ + ExecuteReport Command: Test chart slack report schedule with CSV + """ + # setup csv mock + response = Mock() + mock_open.return_value = response + mock_urlopen.return_value = response + mock_urlopen.return_value.getcode.return_value = 200 + response.read.return_value = CSV_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_slack_chart_with_csv.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule( + create_report_slack_chart_with_csv + ) + assert file_upload_mock.call_args[1]["channels"] == notification_targets[0] + assert file_upload_mock.call_args[1]["file"] == CSV_FILE + + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_slack_chart_with_text" +) +@patch("superset.reports.notifications.slack.WebClient.chat_postMessage") +@patch("superset.utils.csv.urllib.request.urlopen") +@patch("superset.utils.csv.urllib.request.OpenerDirector.open") +@patch("superset.utils.csv.get_chart_dataframe") +def test_slack_chart_report_schedule_with_text( + dataframe_mock, + mock_open, + mock_urlopen, + post_message_mock, + create_report_slack_chart_with_text, +): + """ + ExecuteReport Command: Test chart slack report schedule with text + """ + # setup dataframe mock + response = Mock() + mock_open.return_value = response + mock_urlopen.return_value = response + mock_urlopen.return_value.getcode.return_value = 200 + response.read.return_value = json.dumps( + { + "result": [ + { + "data": { + "t1": {0: "c11", 1: "c21"}, + "t2": {0: "c12", 1: "c22"}, + "t3__sum": {0: "c13", 1: "c23"}, + }, + "colnames": [("t1",), ("t2",), ("t3__sum",)], + "indexnames": [(0,), (1,)], + }, + ], + } + ).encode("utf-8") + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_slack_chart_with_text.id, datetime.utcnow() + ).run() + + table_markdown = """| | t1 | t2 | t3__sum | +|---:|:-----|:-----|:----------| +| 0 | c11 | c12 | c13 | +| 1 | c21 | c22 | c23 |""" + assert table_markdown in post_message_mock.call_args[1]["text"] + assert ( + f"" + in post_message_mock.call_args[1]["text"] + ) + + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + +@pytest.mark.usefixtures("create_report_slack_chart") +def test_report_schedule_not_found(create_report_slack_chart): + """ + ExecuteReport Command: Test report schedule not found + """ + max_id = db.session.query(func.max(ReportSchedule.id)).scalar() + with pytest.raises(ReportScheduleNotFoundError): + AsyncExecuteReportScheduleCommand(TEST_ID, max_id + 1, datetime.utcnow()).run() + + +@pytest.mark.usefixtures("create_report_slack_chart_working") +def test_report_schedule_working(create_report_slack_chart_working): + """ + ExecuteReport Command: Test report schedule still working + """ + # setup screenshot mock + with freeze_time("2020-01-01T00:00:00Z"): + with pytest.raises(ReportSchedulePreviousWorkingError): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_slack_chart_working.id, datetime.utcnow() + ).run() + + assert_log( + ReportState.WORKING, + error_message=ReportSchedulePreviousWorkingError.message, + ) + assert create_report_slack_chart_working.last_state == ReportState.WORKING + + +@pytest.mark.usefixtures("create_report_slack_chart_working") +def test_report_schedule_working_timeout(create_report_slack_chart_working): + """ + ExecuteReport Command: Test report schedule still working but should timed out + """ + current_time = create_report_slack_chart_working.last_eval_dttm + timedelta( + seconds=create_report_slack_chart_working.working_timeout + 1 + ) + with freeze_time(current_time): + + with pytest.raises(ReportScheduleWorkingTimeoutError): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_slack_chart_working.id, datetime.utcnow() + ).run() + + # Only needed for MySQL, understand why + db.session.commit() + logs = db.session.query(ReportExecutionLog).all() + # Two logs, first is created by fixture + assert len(logs) == 2 + assert ReportScheduleWorkingTimeoutError.message in [ + log.error_message for log in logs + ] + assert create_report_slack_chart_working.last_state == ReportState.ERROR + + +@pytest.mark.usefixtures("create_alert_slack_chart_success") +def test_report_schedule_success_grace(create_alert_slack_chart_success): + """ + ExecuteReport Command: Test report schedule on success to grace + """ + # set current time to within the grace period + current_time = create_alert_slack_chart_success.last_eval_dttm + timedelta( + seconds=create_alert_slack_chart_success.grace_period - 10 + ) + + with freeze_time(current_time): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_alert_slack_chart_success.id, datetime.utcnow() + ).run() + + db.session.commit() + assert create_alert_slack_chart_success.last_state == ReportState.GRACE + + +@pytest.mark.usefixtures("create_alert_slack_chart_grace") +@patch("superset.reports.notifications.slack.WebClient.files_upload") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_report_schedule_success_grace_end( + screenshot_mock, file_upload_mock, create_alert_slack_chart_grace +): + """ + ExecuteReport Command: Test report schedule on grace to noop + """ + + screenshot_mock.return_value = SCREENSHOT_FILE + + # set current time to after the grace period + current_time = create_alert_slack_chart_grace.last_eval_dttm + timedelta( + seconds=create_alert_slack_chart_grace.grace_period + 1 + ) + + with freeze_time(current_time): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_alert_slack_chart_grace.id, datetime.utcnow() + ).run() + + db.session.commit() + assert create_alert_slack_chart_grace.last_state == ReportState.SUCCESS + + +@pytest.mark.usefixtures("create_alert_email_chart") +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_alert_limit_is_applied( + screenshot_mock, + email_mock, + create_alert_email_chart, +): + """ + ExecuteReport Command: Test that all alerts apply a SQL limit to stmts + """ + screenshot_mock.return_value = SCREENSHOT_FILE + + with patch.object( + create_alert_email_chart.database.db_engine_spec, "execute", return_value=None + ) as execute_mock: + with patch.object( + create_alert_email_chart.database.db_engine_spec, + "fetch_data", + return_value=None, + ) as fetch_data_mock: + AsyncExecuteReportScheduleCommand( + TEST_ID, create_alert_email_chart.id, datetime.utcnow() + ).run() + assert "LIMIT 2" in execute_mock.call_args[0][1] + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_dashboard" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.DashboardScreenshot.get_screenshot") +def test_email_dashboard_report_fails( + screenshot_mock, email_mock, create_report_email_dashboard +): + """ + ExecuteReport Command: Test dashboard email report schedule notification fails + """ + # setup screenshot mock + from smtplib import SMTPException + + screenshot_mock.return_value = SCREENSHOT_FILE + email_mock.side_effect = SMTPException("Could not connect to SMTP XPTO") + + with pytest.raises(ReportScheduleSystemErrorsException): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_dashboard.id, datetime.utcnow() + ).run() + + assert_log(ReportState.ERROR, error_message="Could not connect to SMTP XPTO") + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_dashboard" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.DashboardScreenshot.get_screenshot") +def test_email_dashboard_report_fails_uncaught_exception( + screenshot_mock, email_mock, create_report_email_dashboard +): + """ + ExecuteReport Command: Test dashboard email report schedule notification fails + and logs with uncaught exception + """ + # setup screenshot mock + from smtplib import SMTPException + + screenshot_mock.return_value = SCREENSHOT_FILE + email_mock.side_effect = Exception("Uncaught exception") + + with pytest.raises(Exception): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_dashboard.id, datetime.utcnow() + ).run() + + assert_log(ReportState.ERROR, error_message="Uncaught exception") + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_alert_email_chart" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +@patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + ALERTS_ATTACH_REPORTS=True, +) +def test_slack_chart_alert( + screenshot_mock, + email_mock, + create_alert_email_chart, +): + """ + ExecuteReport Command: Test chart slack alert + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_alert_email_chart.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule(create_alert_email_chart) + # Assert the email smtp address + assert email_mock.call_args[0][0] == notification_targets[0] + # Assert the email inline screenshot + smtp_images = email_mock.call_args[1]["images"] + assert smtp_images[list(smtp_images.keys())[0]] == SCREENSHOT_FILE + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_alert_email_chart" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + ALERTS_ATTACH_REPORTS=False, +) +def test_slack_chart_alert_no_attachment(email_mock, create_alert_email_chart): + """ + ExecuteReport Command: Test chart slack alert + """ + # setup screenshot mock + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_alert_email_chart.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule(create_alert_email_chart) + # Assert the email smtp address + assert email_mock.call_args[0][0] == notification_targets[0] + # Assert the there is no attached image + assert email_mock.call_args[1]["images"] == {} + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_slack_chart" +) +@patch("superset.reports.notifications.slack.WebClient") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_slack_token_callable_chart_report( + screenshot_mock, + slack_client_mock_class, + create_report_slack_chart, +): + """ + ExecuteReport Command: Test chart slack alert (slack token callable) + """ + slack_client_mock_class.return_value = Mock() + app.config["SLACK_API_TOKEN"] = Mock(return_value="cool_code") + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_slack_chart.id, datetime.utcnow() + ).run() + app.config["SLACK_API_TOKEN"].assert_called_once() + assert slack_client_mock_class.called_with(token="cool_code", proxy="") + assert_log(ReportState.SUCCESS) + + +@pytest.mark.usefixtures("create_no_alert_email_chart") +def test_email_chart_no_alert(create_no_alert_email_chart): + """ + ExecuteReport Command: Test chart email no alert + """ + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_no_alert_email_chart.id, datetime.utcnow() + ).run() + assert_log(ReportState.NOOP) + + +@pytest.mark.usefixtures("create_mul_alert_email_chart") +def test_email_mul_alert(create_mul_alert_email_chart): + """ + ExecuteReport Command: Test chart email multiple rows + """ + with freeze_time("2020-01-01T00:00:00Z"): + with pytest.raises( + (AlertQueryMultipleRowsError, AlertQueryMultipleColumnsError) + ): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_mul_alert_email_chart.id, datetime.utcnow() + ).run() + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_alert_email_chart" +) +@patch("superset.reports.notifications.email.send_email_smtp") +def test_soft_timeout_alert(email_mock, create_alert_email_chart): + """ + ExecuteReport Command: Test soft timeout on alert queries + """ + from celery.exceptions import SoftTimeLimitExceeded + + from superset.reports.commands.exceptions import AlertQueryTimeout + + with patch.object( + create_alert_email_chart.database.db_engine_spec, "execute", return_value=None + ) as execute_mock: + execute_mock.side_effect = SoftTimeLimitExceeded() + with pytest.raises(AlertQueryTimeout): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_alert_email_chart.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule(create_alert_email_chart) + # Assert the email smtp address, asserts a notification was sent with the error + assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL + + assert_log( + ReportState.ERROR, error_message="A timeout occurred while executing the query." + ) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_alert_email_chart" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +@patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + ALERTS_ATTACH_REPORTS=True, +) +def test_soft_timeout_screenshot(screenshot_mock, email_mock, create_alert_email_chart): + """ + ExecuteReport Command: Test soft timeout on screenshot + """ + from celery.exceptions import SoftTimeLimitExceeded + + screenshot_mock.side_effect = SoftTimeLimitExceeded() + with pytest.raises(ReportScheduleScreenshotTimeout): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_alert_email_chart.id, datetime.utcnow() + ).run() + + # Assert the email smtp address, asserts a notification was sent with the error + assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL + + assert_log( + ReportState.ERROR, error_message="A timeout occurred while taking a screenshot." + ) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_chart_with_csv" +) +@patch("superset.utils.csv.urllib.request.urlopen") +@patch("superset.utils.csv.urllib.request.OpenerDirector.open") +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.csv.get_chart_csv_data") +def test_soft_timeout_csv( + csv_mock, + email_mock, + mock_open, + mock_urlopen, + create_report_email_chart_with_csv, +): + """ + ExecuteReport Command: Test fail on generating csv + """ + from celery.exceptions import SoftTimeLimitExceeded + + response = Mock() + mock_open.return_value = response + mock_urlopen.return_value = response + mock_urlopen.return_value.getcode.side_effect = SoftTimeLimitExceeded() + + with pytest.raises(ReportScheduleCsvTimeout): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart_with_csv.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule( + create_report_email_chart_with_csv + ) + # Assert the email smtp address, asserts a notification was sent with the error + assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL + + assert_log( + ReportState.ERROR, + error_message="A timeout occurred while generating a csv.", + ) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_chart_with_csv" +) +@patch("superset.utils.csv.urllib.request.urlopen") +@patch("superset.utils.csv.urllib.request.OpenerDirector.open") +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.csv.get_chart_csv_data") +def test_generate_no_csv( + csv_mock, + email_mock, + mock_open, + mock_urlopen, + create_report_email_chart_with_csv, +): + """ + ExecuteReport Command: Test fail on generating csv + """ + response = Mock() + mock_open.return_value = response + mock_urlopen.return_value = response + mock_urlopen.return_value.getcode.return_value = 200 + response.read.return_value = None + + with pytest.raises(ReportScheduleCsvFailedError): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart_with_csv.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule( + create_report_email_chart_with_csv + ) + # Assert the email smtp address, asserts a notification was sent with the error + assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL + + assert_log( + ReportState.ERROR, + error_message="Report Schedule execution failed when generating a csv.", + ) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_chart" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_fail_screenshot(screenshot_mock, email_mock, create_report_email_chart): + """ + ExecuteReport Command: Test soft timeout on screenshot + """ + from celery.exceptions import SoftTimeLimitExceeded + + from superset.reports.commands.exceptions import AlertQueryTimeout + + screenshot_mock.side_effect = Exception("Unexpected error") + with pytest.raises(ReportScheduleScreenshotFailedError): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule(create_report_email_chart) + # Assert the email smtp address, asserts a notification was sent with the error + assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL + + assert_log( + ReportState.ERROR, error_message="Failed taking a screenshot Unexpected error" + ) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_chart_with_csv" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.csv.urllib.request.urlopen") +@patch("superset.utils.csv.urllib.request.OpenerDirector.open") +@patch("superset.utils.csv.get_chart_csv_data") +def test_fail_csv( + csv_mock, mock_open, mock_urlopen, email_mock, create_report_email_chart_with_csv +): + """ + ExecuteReport Command: Test error on csv + """ + + response = Mock() + mock_open.return_value = response + mock_urlopen.return_value = response + mock_urlopen.return_value.getcode.return_value = 500 + + with pytest.raises(ReportScheduleCsvFailedError): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart_with_csv.id, datetime.utcnow() + ).run() + + get_target_from_report_schedule(create_report_email_chart_with_csv) + # Assert the email smtp address, asserts a notification was sent with the error + assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL + + assert_log( + ReportState.ERROR, error_message="Failed generating csv " + ) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_alert_email_chart" +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + ALERTS_ATTACH_REPORTS=False, +) +def test_email_disable_screenshot(email_mock, create_alert_email_chart): + """ + ExecuteReport Command: Test soft timeout on screenshot + """ + + AsyncExecuteReportScheduleCommand( + TEST_ID, create_alert_email_chart.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule(create_alert_email_chart) + # Assert the email smtp address, asserts a notification was sent with the error + assert email_mock.call_args[0][0] == notification_targets[0] + # Assert the there is no attached image + assert email_mock.call_args[1]["images"] == {} + + assert_log(ReportState.SUCCESS) + + +@pytest.mark.usefixtures("create_invalid_sql_alert_email_chart") +@patch("superset.reports.notifications.email.send_email_smtp") +def test_invalid_sql_alert(email_mock, create_invalid_sql_alert_email_chart): + """ + ExecuteReport Command: Test alert with invalid SQL statements + """ + with freeze_time("2020-01-01T00:00:00Z"): + with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + ).run() + + # Assert the email smtp address, asserts a notification was sent with the error + assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL + assert_log(ReportState.ERROR) + + +@pytest.mark.usefixtures("create_invalid_sql_alert_email_chart") +@patch("superset.reports.notifications.email.send_email_smtp") +def test_grace_period_error(email_mock, create_invalid_sql_alert_email_chart): + """ + ExecuteReport Command: Test alert grace period on error + """ + with freeze_time("2020-01-01T00:00:00Z"): + with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + ).run() + + # Only needed for MySQL, understand why + db.session.commit() + + # Assert the email smtp address, asserts a notification was sent with the error + assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL + assert ( + get_notification_error_sent_count(create_invalid_sql_alert_email_chart) == 1 + ) + + with freeze_time("2020-01-01T00:30:00Z"): + with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + ).run() + db.session.commit() + assert ( + get_notification_error_sent_count(create_invalid_sql_alert_email_chart) == 1 + ) + + # Grace period ends, assert a notification was sent + with freeze_time("2020-01-01T01:30:00Z"): + with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + ).run() + db.session.commit() + assert ( + get_notification_error_sent_count(create_invalid_sql_alert_email_chart) == 2 + ) + + +@pytest.mark.usefixtures("create_invalid_sql_alert_email_chart") +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_grace_period_error_flap( + screenshot_mock, + email_mock, + create_invalid_sql_alert_email_chart, +): + """ + ExecuteReport Command: Test alert grace period on error + """ + with freeze_time("2020-01-01T00:00:00Z"): + with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + ).run() + db.session.commit() + # Assert we have 1 notification sent on the log + assert ( + get_notification_error_sent_count(create_invalid_sql_alert_email_chart) == 1 + ) + + with freeze_time("2020-01-01T00:30:00Z"): + with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + ).run() + db.session.commit() + assert ( + get_notification_error_sent_count(create_invalid_sql_alert_email_chart) == 1 + ) + + # Change report_schedule to valid + create_invalid_sql_alert_email_chart.sql = "SELECT 1 AS metric" + create_invalid_sql_alert_email_chart.grace_period = 0 + db.session.merge(create_invalid_sql_alert_email_chart) + db.session.commit() + + with freeze_time("2020-01-01T00:31:00Z"): + # One success + AsyncExecuteReportScheduleCommand( + TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + ).run() + # Grace period ends + AsyncExecuteReportScheduleCommand( + TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + ).run() + + db.session.commit() + + create_invalid_sql_alert_email_chart.sql = "SELECT 'first'" + create_invalid_sql_alert_email_chart.grace_period = 10 + db.session.merge(create_invalid_sql_alert_email_chart) + db.session.commit() + + # assert that after a success, when back to error we send the error notification + # again + with freeze_time("2020-01-01T00:32:00Z"): + with pytest.raises((AlertQueryError, AlertQueryInvalidTypeError)): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_invalid_sql_alert_email_chart.id, datetime.utcnow() + ).run() + db.session.commit() + assert ( + get_notification_error_sent_count(create_invalid_sql_alert_email_chart) == 2 + ) + + +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_email_dashboard" +) +@patch("superset.reports.dao.ReportScheduleDAO.bulk_delete_logs") +def test_prune_log_soft_time_out(bulk_delete_logs, create_report_email_dashboard): + from celery.exceptions import SoftTimeLimitExceeded + + bulk_delete_logs.side_effect = SoftTimeLimitExceeded() + with pytest.raises(SoftTimeLimitExceeded) as excinfo: + AsyncPruneReportScheduleLogCommand().run() + assert str(excinfo.value) == "SoftTimeLimitExceeded()" + + +@patch("superset.reports.commands.execute.logger") +@patch("superset.reports.commands.execute.create_notification") +def test__send_with_client_errors(notification_mock, logger_mock): + notification_content = "I am some content" + recipients = ["test@foo.com"] + notification_mock.return_value.send.side_effect = NotificationParamException() + with pytest.raises(ReportScheduleClientErrorsException) as excinfo: + BaseReportState._send(BaseReportState, notification_content, recipients) + + assert excinfo.errisinstance(SupersetException) + logger_mock.warning.assert_called_with( + ( + "SupersetError(message='', error_type=, level=, extra=None)" + ) + ) + + +@patch("superset.reports.commands.execute.logger") +@patch("superset.reports.commands.execute.create_notification") +def test__send_with_multiple_errors(notification_mock, logger_mock): + notification_content = "I am some content" + recipients = ["test@foo.com", "test2@bar.com"] + notification_mock.return_value.send.side_effect = [ + NotificationParamException(), + NotificationError(), + ] + # it raises the error with a 500 status if present + with pytest.raises(ReportScheduleSystemErrorsException) as excinfo: + BaseReportState._send(BaseReportState, notification_content, recipients) + + assert excinfo.errisinstance(SupersetException) + # it logs both errors as warnings + logger_mock.warning.assert_has_calls( + [ + call( + "SupersetError(message='', error_type=, level=, extra=None)" + ), + call( + "SupersetError(message='', error_type=, level=, extra=None)" + ), + ] + ) + + +@patch("superset.reports.commands.execute.logger") +@patch("superset.reports.commands.execute.create_notification") +def test__send_with_server_errors(notification_mock, logger_mock): + + notification_content = "I am some content" + recipients = ["test@foo.com"] + notification_mock.return_value.send.side_effect = NotificationError() + with pytest.raises(ReportScheduleSystemErrorsException) as excinfo: + BaseReportState._send(BaseReportState, notification_content, recipients) + + assert excinfo.errisinstance(SupersetException) + # it logs the error + logger_mock.warning.assert_called_with( + ( + "SupersetError(message='', error_type=, level=, extra=None)" + ) + ) diff --git a/tests/integration_tests/reports/scheduler_tests.py b/tests/integration_tests/reports/scheduler_tests.py new file mode 100644 index 0000000000000..3dd6e72941e2e --- /dev/null +++ b/tests/integration_tests/reports/scheduler_tests.py @@ -0,0 +1,213 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from random import randint +from typing import List +from unittest.mock import patch + +import pytest +from flask_appbuilder.security.sqla.models import User +from freezegun import freeze_time +from freezegun.api import FakeDatetime # type: ignore + +from superset.extensions import db +from superset.reports.models import ReportScheduleType +from superset.tasks.scheduler import execute, scheduler +from tests.integration_tests.reports.utils import insert_report_schedule +from tests.integration_tests.test_app import app + + +@pytest.fixture +def owners(get_user) -> List[User]: + return [get_user("admin")] + + +@pytest.mark.usefixtures("owners") +@patch("superset.tasks.scheduler.execute.apply_async") +def test_scheduler_celery_timeout_ny(execute_mock, owners): + """ + Reports scheduler: Test scheduler setting celery soft and hard timeout + """ + with app.app_context(): + + report_schedule = insert_report_schedule( + type=ReportScheduleType.ALERT, + name="report", + crontab="0 4 * * *", + timezone="America/New_York", + owners=owners, + ) + + with freeze_time("2020-01-01T09:00:00Z"): + scheduler() + assert execute_mock.call_args[1]["soft_time_limit"] == 3601 + assert execute_mock.call_args[1]["time_limit"] == 3610 + db.session.delete(report_schedule) + db.session.commit() + + +@pytest.mark.usefixtures("owners") +@patch("superset.tasks.scheduler.execute.apply_async") +def test_scheduler_celery_no_timeout_ny(execute_mock, owners): + """ + Reports scheduler: Test scheduler setting celery soft and hard timeout + """ + with app.app_context(): + app.config["ALERT_REPORTS_WORKING_TIME_OUT_KILL"] = False + report_schedule = insert_report_schedule( + type=ReportScheduleType.ALERT, + name="report", + crontab="0 4 * * *", + timezone="America/New_York", + owners=owners, + ) + + with freeze_time("2020-01-01T09:00:00Z"): + scheduler() + assert execute_mock.call_args[1] == {"eta": FakeDatetime(2020, 1, 1, 9, 0)} + db.session.delete(report_schedule) + db.session.commit() + app.config["ALERT_REPORTS_WORKING_TIME_OUT_KILL"] = True + + +@pytest.mark.usefixtures("owners") +@patch("superset.tasks.scheduler.execute.apply_async") +def test_scheduler_celery_timeout_utc(execute_mock, owners): + """ + Reports scheduler: Test scheduler setting celery soft and hard timeout + """ + with app.app_context(): + + report_schedule = insert_report_schedule( + type=ReportScheduleType.ALERT, + name="report", + crontab="0 9 * * *", + timezone="UTC", + owners=owners, + ) + + with freeze_time("2020-01-01T09:00:00Z"): + scheduler() + assert execute_mock.call_args[1]["soft_time_limit"] == 3601 + assert execute_mock.call_args[1]["time_limit"] == 3610 + db.session.delete(report_schedule) + db.session.commit() + + +@pytest.mark.usefixtures("owners") +@patch("superset.tasks.scheduler.execute.apply_async") +def test_scheduler_celery_no_timeout_utc(execute_mock, owners): + """ + Reports scheduler: Test scheduler setting celery soft and hard timeout + """ + with app.app_context(): + app.config["ALERT_REPORTS_WORKING_TIME_OUT_KILL"] = False + report_schedule = insert_report_schedule( + type=ReportScheduleType.ALERT, + name="report", + crontab="0 9 * * *", + timezone="UTC", + owners=owners, + ) + + with freeze_time("2020-01-01T09:00:00Z"): + scheduler() + assert execute_mock.call_args[1] == {"eta": FakeDatetime(2020, 1, 1, 9, 0)} + db.session.delete(report_schedule) + db.session.commit() + app.config["ALERT_REPORTS_WORKING_TIME_OUT_KILL"] = True + + +@pytest.mark.usefixtures("owners") +@patch("superset.tasks.scheduler.is_feature_enabled") +@patch("superset.tasks.scheduler.execute.apply_async") +def test_scheduler_feature_flag_off(execute_mock, is_feature_enabled, owners): + """ + Reports scheduler: Test scheduler with feature flag off + """ + with app.app_context(): + is_feature_enabled.return_value = False + report_schedule = insert_report_schedule( + type=ReportScheduleType.ALERT, + name="report", + crontab="0 9 * * *", + timezone="UTC", + owners=owners, + ) + + with freeze_time("2020-01-01T09:00:00Z"): + scheduler() + execute_mock.assert_not_called() + db.session.delete(report_schedule) + db.session.commit() + + +@pytest.mark.usefixtures("owners") +@patch("superset.reports.commands.execute.AsyncExecuteReportScheduleCommand.__init__") +@patch("superset.reports.commands.execute.AsyncExecuteReportScheduleCommand.run") +@patch("superset.tasks.scheduler.execute.update_state") +def test_execute_task(update_state_mock, command_mock, init_mock, owners): + from superset.reports.commands.exceptions import ReportScheduleUnexpectedError + + with app.app_context(): + report_schedule = insert_report_schedule( + type=ReportScheduleType.ALERT, + name=f"report-{randint(0,1000)}", + crontab="0 4 * * *", + timezone="America/New_York", + owners=owners, + ) + init_mock.return_value = None + command_mock.side_effect = ReportScheduleUnexpectedError("Unexpected error") + with freeze_time("2020-01-01T09:00:00Z"): + execute(report_schedule.id, "2020-01-01T09:00:00Z") + update_state_mock.assert_called_with(state="FAILURE") + + db.session.delete(report_schedule) + db.session.commit() + + +@pytest.mark.usefixtures("owners") +@patch("superset.reports.commands.execute.AsyncExecuteReportScheduleCommand.__init__") +@patch("superset.reports.commands.execute.AsyncExecuteReportScheduleCommand.run") +@patch("superset.tasks.scheduler.execute.update_state") +@patch("superset.utils.log.logger") +def test_execute_task_with_command_exception( + logger_mock, update_state_mock, command_mock, init_mock, owners +): + from superset.commands.exceptions import CommandException + + with app.app_context(): + report_schedule = insert_report_schedule( + type=ReportScheduleType.ALERT, + name=f"report-{randint(0,1000)}", + crontab="0 4 * * *", + timezone="America/New_York", + owners=owners, + ) + init_mock.return_value = None + command_mock.side_effect = CommandException("Unexpected error") + with freeze_time("2020-01-01T09:00:00Z"): + execute(report_schedule.id, "2020-01-01T09:00:00Z") + update_state_mock.assert_called_with(state="FAILURE") + logger_mock.exception.assert_called_with( + "A downstream exception occurred while generating a report: None. Unexpected error", + exc_info=True, + ) + + db.session.delete(report_schedule) + db.session.commit() diff --git a/tests/integration_tests/reports/utils.py b/tests/integration_tests/reports/utils.py new file mode 100644 index 0000000000000..3801beb1a328e --- /dev/null +++ b/tests/integration_tests/reports/utils.py @@ -0,0 +1,201 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import json +from contextlib import contextmanager +from typing import Any, Dict, List, Optional +from uuid import uuid4 + +from flask_appbuilder.security.sqla.models import User + +from superset import db, security_manager +from superset.models.core import Database +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from superset.reports.models import ( + ReportDataFormat, + ReportExecutionLog, + ReportRecipients, + ReportRecipientType, + ReportSchedule, + ReportScheduleType, + ReportState, +) +from superset.utils.core import override_user +from tests.integration_tests.test_app import app +from tests.integration_tests.utils import read_fixture + +TEST_ID = str(uuid4()) +CSV_FILE = read_fixture("trends.csv") +SCREENSHOT_FILE = read_fixture("sample.png") +DEFAULT_OWNER_EMAIL = "admin@fab.org" + + +def insert_report_schedule( + type: str, + name: str, + crontab: str, + owners: List[User], + timezone: Optional[str] = None, + sql: Optional[str] = None, + description: Optional[str] = None, + chart: Optional[Slice] = None, + dashboard: Optional[Dashboard] = None, + database: Optional[Database] = None, + validator_type: Optional[str] = None, + validator_config_json: Optional[str] = None, + log_retention: Optional[int] = None, + last_state: Optional[ReportState] = None, + grace_period: Optional[int] = None, + recipients: Optional[List[ReportRecipients]] = None, + report_format: Optional[ReportDataFormat] = None, + logs: Optional[List[ReportExecutionLog]] = None, + extra: Optional[Dict[Any, Any]] = None, + force_screenshot: bool = False, +) -> ReportSchedule: + owners = owners or [] + recipients = recipients or [] + logs = logs or [] + last_state = last_state or ReportState.NOOP + + with override_user(owners[0]): + report_schedule = ReportSchedule( + type=type, + name=name, + crontab=crontab, + timezone=timezone, + sql=sql, + description=description, + chart=chart, + dashboard=dashboard, + database=database, + owners=owners, + validator_type=validator_type, + validator_config_json=validator_config_json, + log_retention=log_retention, + grace_period=grace_period, + recipients=recipients, + logs=logs, + last_state=last_state, + report_format=report_format, + extra=extra, + force_screenshot=force_screenshot, + ) + db.session.add(report_schedule) + db.session.commit() + return report_schedule + + +def create_report_notification( + email_target: Optional[str] = None, + slack_channel: Optional[str] = None, + chart: Optional[Slice] = None, + dashboard: Optional[Dashboard] = None, + database: Optional[Database] = None, + sql: Optional[str] = None, + report_type: ReportScheduleType = ReportScheduleType.REPORT, + validator_type: Optional[str] = None, + validator_config_json: Optional[str] = None, + grace_period: Optional[int] = None, + report_format: Optional[ReportDataFormat] = None, + name: Optional[str] = None, + extra: Optional[Dict[str, Any]] = None, + force_screenshot: bool = False, + owners: Optional[List[User]] = None, +) -> ReportSchedule: + if not owners: + owners = [ + ( + db.session.query(security_manager.user_model) + .filter_by(email=DEFAULT_OWNER_EMAIL) + .one_or_none() + ) + ] + + if slack_channel: + recipient = ReportRecipients( + type=ReportRecipientType.SLACK, + recipient_config_json=json.dumps( + { + "target": slack_channel, + } + ), + ) + else: + recipient = ReportRecipients( + type=ReportRecipientType.EMAIL, + recipient_config_json=json.dumps({"target": email_target}), + ) + + if name is None: + name = "report_with_csv" if report_format else "report" + + report_schedule = insert_report_schedule( + report_type, + name=name, + crontab="0 9 * * *", + description="Daily report", + sql=sql, + chart=chart, + dashboard=dashboard, + database=database, + recipients=[recipient], + owners=owners, + validator_type=validator_type, + validator_config_json=validator_config_json, + grace_period=grace_period, + report_format=report_format or ReportDataFormat.VISUALIZATION, + extra=extra, + force_screenshot=force_screenshot, + ) + return report_schedule + + +def cleanup_report_schedule(report_schedule: ReportSchedule) -> None: + db.session.query(ReportExecutionLog).filter( + ReportExecutionLog.report_schedule == report_schedule + ).delete() + db.session.query(ReportRecipients).filter( + ReportRecipients.report_schedule == report_schedule + ).delete() + + db.session.delete(report_schedule) + db.session.commit() + + +@contextmanager +def create_dashboard_report(dashboard, extra, **kwargs): + report_schedule = create_report_notification( + email_target="target@example.com", + dashboard=dashboard, + extra={ + "dashboard": extra, + }, + **kwargs + ) + error = None + + try: + yield report_schedule + except Exception as ex: # pylint: disable=broad-except + error = ex + + # make sure to clean up in case of yield exceptions + cleanup_report_schedule(report_schedule) + + if error: + raise error diff --git a/tests/integration_tests/result_set_tests.py b/tests/integration_tests/result_set_tests.py new file mode 100644 index 0000000000000..18135c486dbea --- /dev/null +++ b/tests/integration_tests/result_set_tests.py @@ -0,0 +1,250 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +from datetime import datetime + +import tests.integration_tests.test_app +from superset.dataframe import df_to_records +from superset.db_engine_specs import BaseEngineSpec +from superset.result_set import dedup, SupersetResultSet + +from .base_tests import SupersetTestCase + + +class TestSupersetResultSet(SupersetTestCase): + def test_dedup(self): + self.assertEqual(dedup(["foo", "bar"]), ["foo", "bar"]) + self.assertEqual( + dedup(["foo", "bar", "foo", "bar", "Foo"]), + ["foo", "bar", "foo__1", "bar__1", "Foo"], + ) + self.assertEqual( + dedup(["foo", "bar", "bar", "bar", "Bar"]), + ["foo", "bar", "bar__1", "bar__2", "Bar"], + ) + self.assertEqual( + dedup(["foo", "bar", "bar", "bar", "Bar"], case_sensitive=False), + ["foo", "bar", "bar__1", "bar__2", "Bar__3"], + ) + + def test_get_columns_basic(self): + data = [("a1", "b1", "c1"), ("a2", "b2", "c2")] + cursor_descr = (("a", "string"), ("b", "string"), ("c", "string")) + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual( + results.columns, + [ + {"is_dttm": False, "type": "STRING", "name": "a"}, + {"is_dttm": False, "type": "STRING", "name": "b"}, + {"is_dttm": False, "type": "STRING", "name": "c"}, + ], + ) + + def test_get_columns_with_int(self): + data = [("a1", 1), ("a2", 2)] + cursor_descr = (("a", "string"), ("b", "int")) + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual( + results.columns, + [ + {"is_dttm": False, "type": "STRING", "name": "a"}, + {"is_dttm": False, "type": "INT", "name": "b"}, + ], + ) + + def test_get_columns_type_inference(self): + data = [ + (1.2, 1, "foo", datetime(2018, 10, 19, 23, 39, 16, 660000), True), + (3.14, 2, "bar", datetime(2019, 10, 19, 23, 39, 16, 660000), False), + ] + cursor_descr = (("a", None), ("b", None), ("c", None), ("d", None), ("e", None)) + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual( + results.columns, + [ + {"is_dttm": False, "type": "FLOAT", "name": "a"}, + {"is_dttm": False, "type": "INT", "name": "b"}, + {"is_dttm": False, "type": "STRING", "name": "c"}, + {"is_dttm": True, "type": "DATETIME", "name": "d"}, + {"is_dttm": False, "type": "BOOL", "name": "e"}, + ], + ) + + def test_is_date(self): + data = [("a", 1), ("a", 2)] + cursor_descr = (("a", "string"), ("a", "string")) + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual(results.is_temporal("DATE"), True) + self.assertEqual(results.is_temporal("DATETIME"), True) + self.assertEqual(results.is_temporal("TIME"), True) + self.assertEqual(results.is_temporal("TIMESTAMP"), True) + self.assertEqual(results.is_temporal("STRING"), False) + self.assertEqual(results.is_temporal(""), False) + self.assertEqual(results.is_temporal(None), False) + + def test_dedup_with_data(self): + data = [("a", 1), ("a", 2)] + cursor_descr = (("a", "string"), ("a", "string")) + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + column_names = [col["name"] for col in results.columns] + self.assertListEqual(column_names, ["a", "a__1"]) + + def test_int64_with_missing_data(self): + data = [(None,), (1239162456494753670,), (None,), (None,), (None,), (None,)] + cursor_descr = [("user_id", "bigint", None, None, None, None, True)] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual(results.columns[0]["type"], "BIGINT") + + def test_data_as_list_of_lists(self): + data = [[1, "a"], [2, "b"]] + cursor_descr = [ + ("user_id", "INT", None, None, None, None, True), + ("username", "STRING", None, None, None, None, True), + ] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + df = results.to_pandas_df() + self.assertEqual( + df_to_records(df), + [{"user_id": 1, "username": "a"}, {"user_id": 2, "username": "b"}], + ) + + def test_nullable_bool(self): + data = [(None,), (True,), (None,), (None,), (None,), (None,)] + cursor_descr = [("is_test", "bool", None, None, None, None, True)] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual(results.columns[0]["type"], "BOOL") + df = results.to_pandas_df() + self.assertEqual( + df_to_records(df), + [ + {"is_test": None}, + {"is_test": True}, + {"is_test": None}, + {"is_test": None}, + {"is_test": None}, + {"is_test": None}, + ], + ) + + def test_nested_types(self): + data = [ + ( + 4, + [{"table_name": "unicode_test", "database_id": 1}], + [1, 2, 3], + {"chart_name": "scatter"}, + ), + ( + 3, + [{"table_name": "birth_names", "database_id": 1}], + [4, 5, 6], + {"chart_name": "plot"}, + ), + ] + cursor_descr = [("id",), ("dict_arr",), ("num_arr",), ("map_col",)] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual(results.columns[0]["type"], "INT") + self.assertEqual(results.columns[1]["type"], "STRING") + self.assertEqual(results.columns[2]["type"], "STRING") + self.assertEqual(results.columns[3]["type"], "STRING") + df = results.to_pandas_df() + self.assertEqual( + df_to_records(df), + [ + { + "id": 4, + "dict_arr": '[{"table_name": "unicode_test", "database_id": 1}]', + "num_arr": "[1, 2, 3]", + "map_col": "{'chart_name': 'scatter'}", + }, + { + "id": 3, + "dict_arr": '[{"table_name": "birth_names", "database_id": 1}]', + "num_arr": "[4, 5, 6]", + "map_col": "{'chart_name': 'plot'}", + }, + ], + ) + + def test_single_column_multidim_nested_types(self): + data = [ + ( + [ + "test", + [ + [ + "foo", + 123456, + [ + [["test"], 3432546, 7657658766], + [["fake"], 656756765, 324324324324], + ], + ] + ], + ["test2", 43, 765765765], + None, + None, + ], + ) + ] + cursor_descr = [("metadata",)] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual(results.columns[0]["type"], "STRING") + df = results.to_pandas_df() + self.assertEqual( + df_to_records(df), + [ + { + "metadata": '["test", [["foo", 123456, [[["test"], 3432546, 7657658766], [["fake"], 656756765, 324324324324]]]], ["test2", 43, 765765765], null, null]' + } + ], + ) + + def test_nested_list_types(self): + data = [([{"TestKey": [123456, "foo"]}],)] + cursor_descr = [("metadata",)] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual(results.columns[0]["type"], "STRING") + df = results.to_pandas_df() + self.assertEqual( + df_to_records(df), [{"metadata": '[{"TestKey": [123456, "foo"]}]'}] + ) + + def test_empty_datetime(self): + data = [(None,)] + cursor_descr = [("ds", "timestamp", None, None, None, None, True)] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual(results.columns[0]["type"], "TIMESTAMP") + + def test_no_type_coercion(self): + data = [("a", 1), ("b", 2)] + cursor_descr = [ + ("one", "varchar", None, None, None, None, True), + ("two", "int", None, None, None, None, True), + ] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual(results.columns[0]["type"], "VARCHAR") + self.assertEqual(results.columns[1]["type"], "INT") + + def test_empty_data(self): + data = [] + cursor_descr = [ + ("emptyone", "varchar", None, None, None, None, True), + ("emptytwo", "int", None, None, None, None, True), + ] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + self.assertEqual(results.columns, []) diff --git a/tests/integration_tests/security/__init__.py b/tests/integration_tests/security/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/security/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/security/analytics_db_safety_tests.py b/tests/integration_tests/security/analytics_db_safety_tests.py new file mode 100644 index 0000000000000..f6518fe93564d --- /dev/null +++ b/tests/integration_tests/security/analytics_db_safety_tests.py @@ -0,0 +1,43 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import pytest +from sqlalchemy.engine.url import make_url + +from superset.exceptions import SupersetSecurityException +from superset.security.analytics_db_safety import check_sqlalchemy_uri +from tests.integration_tests.base_tests import SupersetTestCase + + +class TestDBConnections(SupersetTestCase): + def test_check_sqlalchemy_uri_ok(self): + check_sqlalchemy_uri(make_url("postgres://user:password@test.com")) + + def test_check_sqlalchemy_url_sqlite(self): + with pytest.raises(SupersetSecurityException) as excinfo: + check_sqlalchemy_uri(make_url("sqlite:///home/superset/bad.db")) + assert ( + str(excinfo.value) + == "SQLiteDialect_pysqlite cannot be used as a data source for security reasons." + ) + + with pytest.raises(SupersetSecurityException) as excinfo: + check_sqlalchemy_uri(make_url("shillelagh:///home/superset/bad.db")) + assert ( + str(excinfo.value) + == "shillelagh cannot be used as a data source for security reasons." + ) diff --git a/tests/integration_tests/security/api_tests.py b/tests/integration_tests/security/api_tests.py new file mode 100644 index 0000000000000..9a5a085c81c34 --- /dev/null +++ b/tests/integration_tests/security/api_tests.py @@ -0,0 +1,126 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Tests for security api methods""" +import json + +import jwt +import pytest + +from flask_wtf.csrf import generate_csrf +from superset import db +from superset.embedded.dao import EmbeddedDAO +from superset.models.dashboard import Dashboard +from superset.utils.urls import get_url_host +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + + +class TestSecurityCsrfApi(SupersetTestCase): + resource_name = "security" + + def _assert_get_csrf_token(self): + uri = f"api/v1/{self.resource_name}/csrf_token/" + response = self.client.get(uri) + self.assert200(response) + data = json.loads(response.data.decode("utf-8")) + self.assertEqual(generate_csrf(), data["result"]) + + def test_get_csrf_token(self): + """ + Security API: Test get CSRF token + """ + self.login(username="admin") + self._assert_get_csrf_token() + + def test_get_csrf_token_gamma(self): + """ + Security API: Test get CSRF token by gamma + """ + self.login(username="gamma") + self._assert_get_csrf_token() + + def test_get_csrf_unauthorized(self): + """ + Security API: Test get CSRF no login + """ + self.logout() + uri = f"api/v1/{self.resource_name}/csrf_token/" + response = self.client.get(uri) + self.assert401(response) + + +class TestSecurityGuestTokenApi(SupersetTestCase): + uri = f"api/v1/security/guest_token/" + + def test_post_guest_token_unauthenticated(self): + """ + Security API: Cannot create a guest token without authentication + """ + self.logout() + response = self.client.post(self.uri) + self.assert401(response) + + def test_post_guest_token_unauthorized(self): + """ + Security API: Cannot create a guest token without authorization + """ + self.login(username="gamma") + response = self.client.post(self.uri) + self.assert403(response) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_post_guest_token_authorized(self): + self.dash = db.session.query(Dashboard).filter_by(slug="births").first() + self.embedded = EmbeddedDAO.upsert(self.dash, []) + self.login(username="admin") + user = {"username": "bob", "first_name": "Bob", "last_name": "Also Bob"} + resource = {"type": "dashboard", "id": str(self.embedded.uuid)} + rls_rule = {"dataset": 1, "clause": "1=1"} + params = {"user": user, "resources": [resource], "rls": [rls_rule]} + + response = self.client.post( + self.uri, data=json.dumps(params), content_type="application/json" + ) + + self.assert200(response) + token = json.loads(response.data)["token"] + decoded_token = jwt.decode( + token, + self.app.config["GUEST_TOKEN_JWT_SECRET"], + audience=get_url_host(), + algorithms=["HS256"], + ) + self.assertEqual(user, decoded_token["user"]) + self.assertEqual(resource, decoded_token["resources"][0]) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_post_guest_token_bad_resources(self): + self.login(username="admin") + user = {"username": "bob", "first_name": "Bob", "last_name": "Also Bob"} + resource = {"type": "dashboard", "id": "bad-id"} + rls_rule = {"dataset": 1, "clause": "1=1"} + params = {"user": user, "resources": [resource], "rls": [rls_rule]} + + response = self.client.post( + self.uri, data=json.dumps(params), content_type="application/json" + ) + + self.assert400(response) diff --git a/tests/integration_tests/security/guest_token_security_tests.py b/tests/integration_tests/security/guest_token_security_tests.py new file mode 100644 index 0000000000000..78bd8bde86f51 --- /dev/null +++ b/tests/integration_tests/security/guest_token_security_tests.py @@ -0,0 +1,206 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Unit tests for Superset""" +from unittest import mock + +import pytest +from flask import g + +from superset import db, security_manager +from superset.dashboards.commands.exceptions import DashboardAccessDeniedError +from superset.embedded.dao import EmbeddedDAO +from superset.exceptions import SupersetSecurityException +from superset.models.dashboard import Dashboard +from superset.security.guest_token import GuestTokenResourceType +from superset.sql_parse import Table +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + + +@mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + EMBEDDED_SUPERSET=True, +) +class TestGuestUserSecurity(SupersetTestCase): + def authorized_guest(self): + return security_manager.get_guest_user_from_token( + {"user": {}, "resources": [{"type": "dashboard", "id": "some-uuid"}]} + ) + + def test_is_guest_user__regular_user(self): + is_guest = security_manager.is_guest_user(security_manager.find_user("admin")) + self.assertFalse(is_guest) + + def test_is_guest_user__anonymous(self): + is_guest = security_manager.is_guest_user(security_manager.get_anonymous_user()) + self.assertFalse(is_guest) + + def test_is_guest_user__guest_user(self): + is_guest = security_manager.is_guest_user(self.authorized_guest()) + self.assertTrue(is_guest) + + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + EMBEDDED_SUPERSET=False, + ) + def test_is_guest_user__flag_off(self): + is_guest = security_manager.is_guest_user(self.authorized_guest()) + self.assertFalse(is_guest) + + def test_get_guest_user__regular_user(self): + g.user = security_manager.find_user("admin") + guest_user = security_manager.get_current_guest_user_if_guest() + self.assertIsNone(guest_user) + + def test_get_guest_user__anonymous_user(self): + g.user = security_manager.get_anonymous_user() + guest_user = security_manager.get_current_guest_user_if_guest() + self.assertIsNone(guest_user) + + def test_get_guest_user__guest_user(self): + g.user = self.authorized_guest() + guest_user = security_manager.get_current_guest_user_if_guest() + self.assertEqual(guest_user, g.user) + + def test_get_guest_user_roles_explicit(self): + guest = self.authorized_guest() + roles = security_manager.get_user_roles(guest) + self.assertEqual(guest.roles, roles) + + def test_get_guest_user_roles_implicit(self): + guest = self.authorized_guest() + g.user = guest + + roles = security_manager.get_user_roles() + self.assertEqual(guest.roles, roles) + + +@mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + EMBEDDED_SUPERSET=True, +) +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +class TestGuestUserDashboardAccess(SupersetTestCase): + def setUp(self) -> None: + self.dash = db.session.query(Dashboard).filter_by(slug="births").first() + self.embedded = EmbeddedDAO.upsert(self.dash, []) + self.authorized_guest = security_manager.get_guest_user_from_token( + { + "user": {}, + "resources": [{"type": "dashboard", "id": str(self.embedded.uuid)}], + } + ) + self.unauthorized_guest = security_manager.get_guest_user_from_token( + { + "user": {}, + "resources": [ + {"type": "dashboard", "id": "06383667-3e02-4e5e-843f-44e9c5896b6c"} + ], + } + ) + + def test_has_guest_access__regular_user(self): + g.user = security_manager.find_user("admin") + has_guest_access = security_manager.has_guest_access(self.dash) + self.assertFalse(has_guest_access) + + def test_has_guest_access__anonymous_user(self): + g.user = security_manager.get_anonymous_user() + has_guest_access = security_manager.has_guest_access(self.dash) + self.assertFalse(has_guest_access) + + def test_has_guest_access__authorized_guest_user(self): + g.user = self.authorized_guest + has_guest_access = security_manager.has_guest_access(self.dash) + self.assertTrue(has_guest_access) + + def test_has_guest_access__authorized_guest_user__non_zero_resource_index(self): + # set up a user who has authorized access, plus another resource + guest = self.authorized_guest + guest.resources = [ + {"type": "dashboard", "id": "not-a-real-id"} + ] + guest.resources + g.user = guest + + has_guest_access = security_manager.has_guest_access(self.dash) + self.assertTrue(has_guest_access) + + def test_has_guest_access__unauthorized_guest_user__different_resource_id(self): + g.user = security_manager.get_guest_user_from_token( + { + "user": {}, + "resources": [{"type": "dashboard", "id": "not-a-real-id"}], + } + ) + has_guest_access = security_manager.has_guest_access(self.dash) + self.assertFalse(has_guest_access) + + def test_has_guest_access__unauthorized_guest_user__different_resource_type(self): + g.user = security_manager.get_guest_user_from_token( + {"user": {}, "resources": [{"type": "dirt", "id": self.embedded.uuid}]} + ) + has_guest_access = security_manager.has_guest_access(self.dash) + self.assertFalse(has_guest_access) + + def test_chart_raise_for_access_as_guest(self): + chart = self.dash.slices[0] + g.user = self.authorized_guest + + security_manager.raise_for_access(viz=chart) + + def test_chart_raise_for_access_as_unauthorized_guest(self): + chart = self.dash.slices[0] + g.user = self.unauthorized_guest + + with self.assertRaises(SupersetSecurityException): + security_manager.raise_for_access(viz=chart) + + def test_dataset_raise_for_access_as_guest(self): + dataset = self.dash.slices[0].datasource + g.user = self.authorized_guest + + security_manager.raise_for_access(datasource=dataset) + + def test_dataset_raise_for_access_as_unauthorized_guest(self): + dataset = self.dash.slices[0].datasource + g.user = self.unauthorized_guest + + with self.assertRaises(SupersetSecurityException): + security_manager.raise_for_access(datasource=dataset) + + def test_guest_token_does_not_grant_access_to_underlying_table(self): + sqla_table = self.dash.slices[0].table + table = Table(table=sqla_table.table_name) + + g.user = self.authorized_guest + + with self.assertRaises(Exception): + security_manager.raise_for_access(table=table, database=sqla_table.database) + + def test_raise_for_dashboard_access_as_guest(self): + g.user = self.authorized_guest + + security_manager.raise_for_dashboard_access(self.dash) + + def test_raise_for_dashboard_access_as_unauthorized_guest(self): + g.user = self.unauthorized_guest + + with self.assertRaises(DashboardAccessDeniedError): + security_manager.raise_for_dashboard_access(self.dash) diff --git a/tests/integration_tests/security/migrate_roles_tests.py b/tests/integration_tests/security/migrate_roles_tests.py new file mode 100644 index 0000000000000..a541f00952773 --- /dev/null +++ b/tests/integration_tests/security/migrate_roles_tests.py @@ -0,0 +1,280 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Unit tests for alerting in Superset""" +import json +import logging +from contextlib import contextmanager +from unittest.mock import patch + +import pytest +from flask_appbuilder.security.sqla.models import Role + +from superset.extensions import db, security_manager +from superset.migrations.shared.security_converge import ( + add_pvms, + migrate_roles, + Pvm, + PvmMigrationMapType, +) +from tests.integration_tests.test_app import app + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +@contextmanager +def create_old_role(pvm_map: PvmMigrationMapType, external_pvms): + with app.app_context(): + pvms = [] + for old_pvm, new_pvms in pvm_map.items(): + pvms.append( + security_manager.add_permission_view_menu( + old_pvm.permission, old_pvm.view + ) + ) + for external_pvm in external_pvms: + pvms.append( + security_manager.find_permission_view_menu( + external_pvm.permission, external_pvm.view + ) + ) + + new_role = Role(name="Dummy Role", permissions=pvms) + db.session.add(new_role) + db.session.commit() + + yield new_role + + new_role = ( + db.session.query(Role).filter(Role.name == "Dummy Role").one_or_none() + ) + new_role.permissions = [] + db.session.merge(new_role) + for old_pvm, new_pvms in pvm_map.items(): + security_manager.del_permission_view_menu(old_pvm.permission, old_pvm.view) + for new_pvm in new_pvms: + security_manager.del_permission_view_menu( + new_pvm.permission, new_pvm.view + ) + + db.session.delete(new_role) + db.session.commit() + + +@pytest.mark.parametrize( + "description, new_pvms, pvm_map, external_pvms, deleted_views, deleted_permissions", + [ + ( + "Many to one readonly", + {"NewDummy": ("can_read",)}, + { + Pvm("DummyView", "can_list"): (Pvm("NewDummy", "can_read"),), + Pvm("DummyView", "can_show"): (Pvm("NewDummy", "can_read"),), + }, + (), + ("DummyView",), + (), + ), + ( + "Many to one with new permission", + {"NewDummy": ("can_new_perm", "can_write")}, + { + Pvm("DummyView", "can_list"): (Pvm("NewDummy", "can_new_perm"),), + Pvm("DummyView", "can_show"): (Pvm("NewDummy", "can_write"),), + }, + (), + ("DummyView",), + (), + ), + ( + "Many to one with multiple permissions", + { + "NewDummy": ( + "can_read", + "can_write", + ) + }, + { + Pvm("DummyView", "can_list"): (Pvm("NewDummy", "can_read"),), + Pvm("DummyView", "can_show"): (Pvm("NewDummy", "can_read"),), + Pvm("DummyView", "can_add"): (Pvm("NewDummy", "can_write"),), + Pvm("DummyView", "can_delete"): (Pvm("NewDummy", "can_write"),), + }, + (), + ("DummyView",), + (), + ), + ( + "Many to one with multiple views", + { + "NewDummy": ( + "can_read", + "can_write", + ) + }, + { + Pvm("DummyView", "can_list"): (Pvm("NewDummy", "can_read"),), + Pvm("DummyView", "can_show"): (Pvm("NewDummy", "can_read"),), + Pvm("DummyView", "can_add"): (Pvm("NewDummy", "can_write"),), + Pvm("DummyView", "can_delete"): (Pvm("NewDummy", "can_write"),), + Pvm("DummySecondView", "can_list"): (Pvm("NewDummy", "can_read"),), + Pvm("DummySecondView", "can_show"): (Pvm("NewDummy", "can_read"),), + Pvm("DummySecondView", "can_add"): (Pvm("NewDummy", "can_write"),), + Pvm("DummySecondView", "can_delete"): (Pvm("NewDummy", "can_write"),), + }, + (), + ("DummyView", "DummySecondView"), + (), + ), + ( + "Many to one with existing permission-view (pvm)", + { + "NewDummy": ( + "can_read", + "can_write", + ) + }, + { + Pvm("DummyView", "can_list"): (Pvm("NewDummy", "can_read"),), + Pvm("DummyView", "can_add"): (Pvm("NewDummy", "can_write"),), + }, + (Pvm("UserDBModelView", "can_list"),), + ("DummyView",), + (), + ), + ( + "Many to one with existing multiple permission-view (pvm)", + { + "NewDummy": ( + "can_read", + "can_write", + ) + }, + { + Pvm("DummyView", "can_list"): (Pvm("NewDummy", "can_read"),), + Pvm("DummyView", "can_add"): (Pvm("NewDummy", "can_write"),), + Pvm("DummySecondView", "can_list"): (Pvm("NewDummy", "can_read"),), + Pvm("DummySecondView", "can_add"): (Pvm("NewDummy", "can_write"),), + }, + ( + Pvm("UserDBModelView", "can_list"), + Pvm("UserDBModelView", "can_add"), + ), + ("DummyView",), + (), + ), + ( + "Many to one with old permission that gets deleted", + { + "NewDummy": ( + "can_read", + "can_write", + ) + }, + { + Pvm("DummyView", "can_new_perm"): (Pvm("NewDummy", "can_read"),), + Pvm("DummyView", "can_add"): (Pvm("NewDummy", "can_write"),), + }, + (), + ("DummyView",), + ("can_new_perm",), + ), + ( + "Many to Many (normally should be a downgrade)", + { + "DummyView": ( + "can_list", + "can_show", + "can_add", + ) + }, + { + Pvm("NewDummy", "can_read"): ( + Pvm("DummyView", "can_list"), + Pvm("DummyView", "can_show"), + ), + Pvm("NewDummy", "can_write"): (Pvm("DummyView", "can_add"),), + }, + (), + ("NewDummy",), + (), + ), + ( + "Many to Many delete old permissions", + { + "DummyView": ( + "can_list", + "can_show", + "can_add", + ) + }, + { + Pvm("NewDummy", "can_new_perm1"): ( + Pvm("DummyView", "can_list"), + Pvm("DummyView", "can_show"), + ), + Pvm( + "NewDummy", + "can_new_perm2", + ): (Pvm("DummyView", "can_add"),), + }, + (), + ("NewDummy",), + ("can_new_perm1", "can_new_perm2"), + ), + ], +) +def test_migrate_role( + description, new_pvms, pvm_map, external_pvms, deleted_views, deleted_permissions +): + """ + Permission migration: generic tests + """ + logger.info(description) + with create_old_role(pvm_map, external_pvms) as old_role: + role_name = old_role.name + session = db.session + + # Run migrations + add_pvms(session, new_pvms) + migrate_roles(session, pvm_map) + + role = db.session.query(Role).filter(Role.name == role_name).one_or_none() + for old_pvm, new_pvms in pvm_map.items(): + old_pvm_model = security_manager.find_permission_view_menu( + old_pvm.permission, old_pvm.view + ) + assert old_pvm_model is None + new_pvm_model = security_manager.find_permission_view_menu( + new_pvms[0].permission, new_pvms[0].view + ) + assert new_pvm_model is not None + assert new_pvm_model in role.permissions + # assert deleted view menus + for deleted_view in deleted_views: + assert security_manager.find_view_menu(deleted_view) is None + # assert deleted permissions + for deleted_permission in deleted_permissions: + assert security_manager.find_permission(deleted_permission) is None + # assert externals are still there + for external_pvm in external_pvms: + assert ( + security_manager.find_permission_view_menu( + external_pvm.permission, external_pvm.view + ) + is not None + ) diff --git a/tests/integration_tests/security/row_level_security_tests.py b/tests/integration_tests/security/row_level_security_tests.py new file mode 100644 index 0000000000000..ebd95cae39bd7 --- /dev/null +++ b/tests/integration_tests/security/row_level_security_tests.py @@ -0,0 +1,408 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +import re +from typing import Any, Dict, List, Optional +from unittest import mock + +import pytest +from flask import g + +from superset import db, security_manager +from superset.connectors.sqla.models import RowLevelSecurityFilter, SqlaTable +from superset.security.guest_token import ( + GuestTokenResourceType, + GuestUser, +) +from ..base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_with_slice, + load_energy_table_data, +) +from tests.integration_tests.fixtures.unicode_dashboard import ( + load_unicode_dashboard_with_slice, + load_unicode_data, +) + + +class TestRowLevelSecurity(SupersetTestCase): + """ + Testing Row Level Security + """ + + rls_entry = None + query_obj: Dict[str, Any] = dict( + groupby=[], + metrics=None, + filter=[], + is_timeseries=False, + columns=["value"], + granularity=None, + from_dttm=None, + to_dttm=None, + extras={}, + ) + NAME_AB_ROLE = "NameAB" + NAME_Q_ROLE = "NameQ" + NAMES_A_REGEX = re.compile(r"name like 'A%'") + NAMES_B_REGEX = re.compile(r"name like 'B%'") + NAMES_Q_REGEX = re.compile(r"name like 'Q%'") + BASE_FILTER_REGEX = re.compile(r"gender = 'boy'") + + def setUp(self): + session = db.session + + # Create roles + self.role_ab = security_manager.add_role(self.NAME_AB_ROLE) + self.role_q = security_manager.add_role(self.NAME_Q_ROLE) + gamma_user = security_manager.find_user(username="gamma") + gamma_user.roles.append(self.role_ab) + gamma_user.roles.append(self.role_q) + self.create_user_with_roles("NoRlsRoleUser", ["Gamma"]) + session.commit() + + # Create regular RowLevelSecurityFilter (energy_usage, unicode_test) + self.rls_entry1 = RowLevelSecurityFilter() + self.rls_entry1.name = "rls_entry1" + self.rls_entry1.tables.extend( + session.query(SqlaTable) + .filter(SqlaTable.table_name.in_(["energy_usage", "unicode_test"])) + .all() + ) + self.rls_entry1.filter_type = "Regular" + self.rls_entry1.clause = "value > {{ cache_key_wrapper(1) }}" + self.rls_entry1.group_key = None + self.rls_entry1.roles.append(security_manager.find_role("Gamma")) + self.rls_entry1.roles.append(security_manager.find_role("Alpha")) + db.session.add(self.rls_entry1) + + # Create regular RowLevelSecurityFilter (birth_names name starts with A or B) + self.rls_entry2 = RowLevelSecurityFilter() + self.rls_entry2.name = "rls_entry2" + self.rls_entry2.tables.extend( + session.query(SqlaTable) + .filter(SqlaTable.table_name.in_(["birth_names"])) + .all() + ) + self.rls_entry2.filter_type = "Regular" + self.rls_entry2.clause = "name like 'A%' or name like 'B%'" + self.rls_entry2.group_key = "name" + self.rls_entry2.roles.append(security_manager.find_role("NameAB")) + db.session.add(self.rls_entry2) + + # Create Regular RowLevelSecurityFilter (birth_names name starts with Q) + self.rls_entry3 = RowLevelSecurityFilter() + self.rls_entry3.name = "rls_entry3" + self.rls_entry3.tables.extend( + session.query(SqlaTable) + .filter(SqlaTable.table_name.in_(["birth_names"])) + .all() + ) + self.rls_entry3.filter_type = "Regular" + self.rls_entry3.clause = "name like 'Q%'" + self.rls_entry3.group_key = "name" + self.rls_entry3.roles.append(security_manager.find_role("NameQ")) + db.session.add(self.rls_entry3) + + # Create Base RowLevelSecurityFilter (birth_names boys) + self.rls_entry4 = RowLevelSecurityFilter() + self.rls_entry4.name = "rls_entry4" + self.rls_entry4.tables.extend( + session.query(SqlaTable) + .filter(SqlaTable.table_name.in_(["birth_names"])) + .all() + ) + self.rls_entry4.filter_type = "Base" + self.rls_entry4.clause = "gender = 'boy'" + self.rls_entry4.group_key = "gender" + self.rls_entry4.roles.append(security_manager.find_role("Admin")) + db.session.add(self.rls_entry4) + + db.session.commit() + + def tearDown(self): + session = db.session + session.delete(self.rls_entry1) + session.delete(self.rls_entry2) + session.delete(self.rls_entry3) + session.delete(self.rls_entry4) + session.delete(security_manager.find_role("NameAB")) + session.delete(security_manager.find_role("NameQ")) + session.delete(self.get_user("NoRlsRoleUser")) + session.commit() + + @pytest.fixture() + def create_dataset(self): + with self.create_app().app_context(): + + dataset = SqlaTable(database_id=1, schema=None, table_name="table1") + db.session.add(dataset) + db.session.flush() + db.session.commit() + + yield dataset + + # rollback changes (assuming cascade delete) + db.session.delete(dataset) + db.session.commit() + + def _get_test_dataset(self): + return ( + db.session.query(SqlaTable).filter(SqlaTable.table_name == "table1") + ).one_or_none() + + @pytest.mark.usefixtures("create_dataset") + def test_model_view_rls_add_success(self): + self.login(username="admin") + test_dataset = self._get_test_dataset() + rv = self.client.post( + "/rowlevelsecurityfiltersmodelview/add", + data=dict( + name="rls1", + description="Some description", + filter_type="Regular", + tables=[test_dataset.id], + roles=[security_manager.find_role("Alpha").id], + group_key="group_key_1", + clause="client_id=1", + ), + follow_redirects=True, + ) + self.assertEqual(rv.status_code, 200) + rls1 = ( + db.session.query(RowLevelSecurityFilter).filter_by(name="rls1") + ).one_or_none() + assert rls1 is not None + + # Revert data changes + db.session.delete(rls1) + db.session.commit() + + @pytest.mark.usefixtures("create_dataset") + def test_model_view_rls_add_name_unique(self): + self.login(username="admin") + test_dataset = self._get_test_dataset() + rv = self.client.post( + "/rowlevelsecurityfiltersmodelview/add", + data=dict( + name="rls_entry1", + description="Some description", + filter_type="Regular", + tables=[test_dataset.id], + roles=[security_manager.find_role("Alpha").id], + group_key="group_key_1", + clause="client_id=1", + ), + follow_redirects=True, + ) + self.assertEqual(rv.status_code, 200) + data = rv.data.decode("utf-8") + assert "Already exists." in data + + @pytest.mark.usefixtures("create_dataset") + def test_model_view_rls_add_tables_required(self): + self.login(username="admin") + rv = self.client.post( + "/rowlevelsecurityfiltersmodelview/add", + data=dict( + name="rls1", + description="Some description", + filter_type="Regular", + tables=[], + roles=[security_manager.find_role("Alpha").id], + group_key="group_key_1", + clause="client_id=1", + ), + follow_redirects=True, + ) + self.assertEqual(rv.status_code, 200) + data = rv.data.decode("utf-8") + assert "This field is required." in data + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_rls_filter_alters_energy_query(self): + g.user = self.get_user(username="alpha") + tbl = self.get_table(name="energy_usage") + sql = tbl.get_query_str(self.query_obj) + assert tbl.get_extra_cache_keys(self.query_obj) == [1] + assert "value > 1" in sql + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_rls_filter_doesnt_alter_energy_query(self): + g.user = self.get_user( + username="admin" + ) # self.login() doesn't actually set the user + tbl = self.get_table(name="energy_usage") + sql = tbl.get_query_str(self.query_obj) + assert tbl.get_extra_cache_keys(self.query_obj) == [] + assert "value > 1" not in sql + + @pytest.mark.usefixtures("load_unicode_dashboard_with_slice") + def test_multiple_table_filter_alters_another_tables_query(self): + g.user = self.get_user( + username="alpha" + ) # self.login() doesn't actually set the user + tbl = self.get_table(name="unicode_test") + sql = tbl.get_query_str(self.query_obj) + assert tbl.get_extra_cache_keys(self.query_obj) == [1] + assert "value > 1" in sql + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_rls_filter_alters_gamma_birth_names_query(self): + g.user = self.get_user(username="gamma") + tbl = self.get_table(name="birth_names") + sql = tbl.get_query_str(self.query_obj) + + # establish that the filters are grouped together correctly with + # ANDs, ORs and parens in the correct place + assert ( + "WHERE ((name like 'A%'\n or name like 'B%')\n OR (name like 'Q%'))\n AND (gender = 'boy');" + in sql + ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_rls_filter_alters_no_role_user_birth_names_query(self): + g.user = self.get_user(username="NoRlsRoleUser") + tbl = self.get_table(name="birth_names") + sql = tbl.get_query_str(self.query_obj) + + # gamma's filters should not be present query + assert not self.NAMES_A_REGEX.search(sql) + assert not self.NAMES_B_REGEX.search(sql) + assert not self.NAMES_Q_REGEX.search(sql) + # base query should be present + assert self.BASE_FILTER_REGEX.search(sql) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_rls_filter_doesnt_alter_admin_birth_names_query(self): + g.user = self.get_user(username="admin") + tbl = self.get_table(name="birth_names") + sql = tbl.get_query_str(self.query_obj) + + # no filters are applied for admin user + assert not self.NAMES_A_REGEX.search(sql) + assert not self.NAMES_B_REGEX.search(sql) + assert not self.NAMES_Q_REGEX.search(sql) + assert not self.BASE_FILTER_REGEX.search(sql) + + +RLS_ALICE_REGEX = re.compile(r"name = 'Alice'") +RLS_GENDER_REGEX = re.compile(r"AND \(gender = 'girl'\)") + + +@mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + EMBEDDED_SUPERSET=True, +) +class GuestTokenRowLevelSecurityTests(SupersetTestCase): + query_obj: Dict[str, Any] = dict( + groupby=[], + metrics=None, + filter=[], + is_timeseries=False, + columns=["value"], + granularity=None, + from_dttm=None, + to_dttm=None, + extras={}, + ) + + def default_rls_rule(self): + return { + "dataset": self.get_table(name="birth_names").id, + "clause": "name = 'Alice'", + } + + def guest_user_with_rls(self, rules: Optional[List[Any]] = None) -> GuestUser: + if rules is None: + rules = [self.default_rls_rule()] + return security_manager.get_guest_user_from_token( + { + "user": {}, + "resources": [{"type": GuestTokenResourceType.DASHBOARD.value}], + "rls_rules": rules, + } + ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_rls_filter_alters_query(self): + g.user = self.guest_user_with_rls() + tbl = self.get_table(name="birth_names") + sql = tbl.get_query_str(self.query_obj) + + self.assertRegex(sql, RLS_ALICE_REGEX) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_rls_filter_does_not_alter_unrelated_query(self): + g.user = self.guest_user_with_rls( + rules=[ + { + "dataset": self.get_table(name="birth_names").id + 1, + "clause": "name = 'Alice'", + } + ] + ) + tbl = self.get_table(name="birth_names") + sql = tbl.get_query_str(self.query_obj) + + self.assertNotRegex(sql, RLS_ALICE_REGEX) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_multiple_rls_filters_are_unionized(self): + g.user = self.guest_user_with_rls( + rules=[ + self.default_rls_rule(), + { + "dataset": self.get_table(name="birth_names").id, + "clause": "gender = 'girl'", + }, + ] + ) + tbl = self.get_table(name="birth_names") + sql = tbl.get_query_str(self.query_obj) + + self.assertRegex(sql, RLS_ALICE_REGEX) + self.assertRegex(sql, RLS_GENDER_REGEX) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_rls_filter_for_all_datasets(self): + births = self.get_table(name="birth_names") + energy = self.get_table(name="energy_usage") + guest = self.guest_user_with_rls(rules=[{"clause": "name = 'Alice'"}]) + guest.resources.append({type: "dashboard", id: energy.id}) + g.user = guest + births_sql = births.get_query_str(self.query_obj) + energy_sql = energy.get_query_str(self.query_obj) + + self.assertRegex(births_sql, RLS_ALICE_REGEX) + self.assertRegex(energy_sql, RLS_ALICE_REGEX) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_dataset_id_can_be_string(self): + dataset = self.get_table(name="birth_names") + str_id = str(dataset.id) + g.user = self.guest_user_with_rls( + rules=[{"dataset": str_id, "clause": "name = 'Alice'"}] + ) + sql = dataset.get_query_str(self.query_obj) + + self.assertRegex(sql, RLS_ALICE_REGEX) diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py new file mode 100644 index 0000000000000..c65f5a6dd88d2 --- /dev/null +++ b/tests/integration_tests/security_tests.py @@ -0,0 +1,2032 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +import inspect +import time +import unittest +from collections import namedtuple +from unittest import mock +from unittest.mock import Mock, patch, call, ANY +from typing import Any + +import jwt +import prison +import pytest + +from flask import current_app +from flask_appbuilder.security.sqla.models import Role +from superset.datasource.dao import DatasourceDAO +from superset.models.dashboard import Dashboard +from superset import app, appbuilder, db, security_manager, viz +from superset.connectors.sqla.models import SqlaTable +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.exceptions import SupersetSecurityException +from superset.models.core import Database +from superset.models.slice import Slice +from superset.sql_parse import Table +from superset.utils.core import ( + DatasourceType, + backend, + get_example_default_schema, +) +from superset.utils.database import get_example_database +from superset.utils.urls import get_url_host +from superset.views.access_requests import AccessRequestsModelView + +from .base_tests import SupersetTestCase +from tests.integration_tests.fixtures.public_role import ( + public_role_like_gamma, + public_role_like_test_role, +) +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) + +NEW_SECURITY_CONVERGE_VIEWS = ( + "Annotation", + "Database", + "Dataset", + "Dashboard", + "CssTemplate", + "Chart", + "Query", + "SavedQuery", +) + + +def get_perm_tuples(role_name): + perm_set = set() + for perm in security_manager.find_role(role_name).permissions: + perm_set.add((perm.permission.name, perm.view_menu.name)) + return perm_set + + +SCHEMA_ACCESS_ROLE = "schema_access_role" + + +def create_schema_perm(view_menu_name: str) -> None: + permission = "schema_access" + security_manager.add_permission_view_menu(permission, view_menu_name) + perm_view = security_manager.find_permission_view_menu(permission, view_menu_name) + security_manager.add_permission_role( + security_manager.find_role(SCHEMA_ACCESS_ROLE), perm_view + ) + return None + + +def delete_schema_perm(view_menu_name: str) -> None: + pv = security_manager.find_permission_view_menu("schema_access", "[examples].[2]") + security_manager.del_permission_role( + security_manager.find_role(SCHEMA_ACCESS_ROLE), pv + ) + security_manager.del_permission_view_menu("schema_access", "[examples].[2]") + return None + + +class TestRolePermission(SupersetTestCase): + """Testing export role permissions.""" + + def setUp(self): + schema = get_example_default_schema() + session = db.session + security_manager.add_role(SCHEMA_ACCESS_ROLE) + session.commit() + + ds = ( + db.session.query(SqlaTable) + .filter_by(table_name="wb_health_population", schema=schema) + .first() + ) + ds.schema = "temp_schema" + ds.schema_perm = ds.get_schema_perm() + + ds_slices = ( + session.query(Slice) + .filter_by(datasource_type=DatasourceType.TABLE) + .filter_by(datasource_id=ds.id) + .all() + ) + for s in ds_slices: + s.schema_perm = ds.schema_perm + create_schema_perm("[examples].[temp_schema]") + gamma_user = security_manager.find_user(username="gamma") + gamma_user.roles.append(security_manager.find_role(SCHEMA_ACCESS_ROLE)) + session.commit() + + def tearDown(self): + session = db.session + ds = ( + session.query(SqlaTable) + .filter_by(table_name="wb_health_population", schema="temp_schema") + .first() + ) + schema_perm = ds.schema_perm + ds.schema = get_example_default_schema() + ds.schema_perm = None + ds_slices = ( + session.query(Slice) + .filter_by(datasource_type=DatasourceType.TABLE) + .filter_by(datasource_id=ds.id) + .all() + ) + for s in ds_slices: + s.schema_perm = None + + delete_schema_perm(schema_perm) + session.delete(security_manager.find_role(SCHEMA_ACCESS_ROLE)) + session.commit() + + def test_after_insert_dataset(self): + security_manager.on_view_menu_after_insert = Mock() + security_manager.on_permission_view_after_insert = Mock() + + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + + table = SqlaTable( + schema="tmp_schema", + table_name="tmp_perm_table", + database=tmp_db1, + ) + session.add(table) + session.commit() + + table = session.query(SqlaTable).filter_by(table_name="tmp_perm_table").one() + self.assertEqual(table.perm, f"[tmp_db1].[tmp_perm_table](id:{table.id})") + + pvm_dataset = security_manager.find_permission_view_menu( + "datasource_access", table.perm + ) + pvm_schema = security_manager.find_permission_view_menu( + "schema_access", table.schema_perm + ) + + # Assert dataset permission is created and local perms are ok + self.assertIsNotNone(pvm_dataset) + self.assertEqual(table.perm, f"[tmp_db1].[tmp_perm_table](id:{table.id})") + self.assertEqual(table.schema_perm, "[tmp_db1].[tmp_schema]") + self.assertIsNotNone(pvm_schema) + + # assert on permission hooks + call_args = security_manager.on_permission_view_after_insert.call_args + assert call_args.args[2].id == pvm_schema.id + + security_manager.on_permission_view_after_insert.assert_has_calls( + [ + call(ANY, ANY, ANY), + call(ANY, ANY, ANY), + ] + ) + + # Cleanup + session.delete(table) + session.delete(tmp_db1) + session.commit() + + def test_after_insert_dataset_rollback(self): + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.commit() + + table = SqlaTable( + schema="tmp_schema", + table_name="tmp_table", + database=tmp_db1, + ) + session.add(table) + session.flush() + + pvm_dataset = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table](id:{table.id})" + ) + self.assertIsNotNone(pvm_dataset) + table_id = table.id + session.rollback() + + table = session.query(SqlaTable).filter_by(table_name="tmp_table").one_or_none() + self.assertIsNone(table) + pvm_dataset = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table](id:{table_id})" + ) + self.assertIsNone(pvm_dataset) + + session.delete(tmp_db1) + session.commit() + + def test_after_insert_dataset_table_none(self): + session = db.session + table = SqlaTable( + schema="tmp_schema", + table_name="tmp_perm_table", + # Setting database_id instead of database will skip permission creation + database_id=get_example_database().id, + ) + session.add(table) + session.commit() + + stored_table = ( + session.query(SqlaTable).filter_by(table_name="tmp_perm_table").one() + ) + # Assert permission is created + self.assertIsNotNone( + security_manager.find_permission_view_menu( + "datasource_access", stored_table.perm + ) + ) + # Assert no bogus permission is created + self.assertIsNone( + security_manager.find_permission_view_menu( + "datasource_access", f"[None].[tmp_perm_table](id:{stored_table.id})" + ) + ) + + # Cleanup + session.delete(table) + session.commit() + + def test_after_insert_database(self): + security_manager.on_permission_view_after_insert = Mock() + + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + + tmp_db1 = session.query(Database).filter_by(database_name="tmp_db1").one() + self.assertEqual(tmp_db1.perm, f"[tmp_db1].(id:{tmp_db1.id})") + tmp_db1_pvm = security_manager.find_permission_view_menu( + "database_access", tmp_db1.perm + ) + self.assertIsNotNone(tmp_db1_pvm) + + # Assert the hook is called + security_manager.on_permission_view_after_insert.assert_has_calls( + [ + call(ANY, ANY, ANY), + ] + ) + call_args = security_manager.on_permission_view_after_insert.call_args + assert call_args.args[2].id == tmp_db1_pvm.id + session.delete(tmp_db1) + session.commit() + + def test_after_insert_database_rollback(self): + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.flush() + + pvm_database = security_manager.find_permission_view_menu( + "database_access", f"[tmp_db1].(id:{tmp_db1.id})" + ) + self.assertIsNotNone(pvm_database) + session.rollback() + + pvm_database = security_manager.find_permission_view_menu( + "database_access", f"[tmp_db1](id:{tmp_db1.id})" + ) + self.assertIsNone(pvm_database) + + def test_after_update_database__perm_database_access(self): + security_manager.on_view_menu_after_update = Mock() + + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.commit() + tmp_db1 = session.query(Database).filter_by(database_name="tmp_db1").one() + + self.assertIsNotNone( + security_manager.find_permission_view_menu("database_access", tmp_db1.perm) + ) + + tmp_db1.database_name = "tmp_db2" + session.commit() + + # Assert that the old permission was updated + self.assertIsNone( + security_manager.find_permission_view_menu( + "database_access", f"[tmp_db1].(id:{tmp_db1.id})" + ) + ) + # Assert that the db permission was updated + self.assertIsNotNone( + security_manager.find_permission_view_menu( + "database_access", f"[tmp_db2].(id:{tmp_db1.id})" + ) + ) + + # Assert the hook is called + tmp_db1_view_menu = security_manager.find_view_menu( + f"[tmp_db2].(id:{tmp_db1.id})" + ) + security_manager.on_view_menu_after_update.assert_has_calls( + [ + call(ANY, ANY, tmp_db1_view_menu), + ] + ) + + session.delete(tmp_db1) + session.commit() + + def test_after_update_database_rollback(self): + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.commit() + tmp_db1 = session.query(Database).filter_by(database_name="tmp_db1").one() + + self.assertIsNotNone( + security_manager.find_permission_view_menu("database_access", tmp_db1.perm) + ) + + tmp_db1.database_name = "tmp_db2" + session.flush() + + # Assert that the old permission was updated + self.assertIsNone( + security_manager.find_permission_view_menu( + "database_access", f"[tmp_db1].(id:{tmp_db1.id})" + ) + ) + # Assert that the db permission was updated + self.assertIsNotNone( + security_manager.find_permission_view_menu( + "database_access", f"[tmp_db2].(id:{tmp_db1.id})" + ) + ) + + session.rollback() + self.assertIsNotNone( + security_manager.find_permission_view_menu( + "database_access", f"[tmp_db1].(id:{tmp_db1.id})" + ) + ) + # Assert that the db permission was updated + self.assertIsNone( + security_manager.find_permission_view_menu( + "database_access", f"[tmp_db2].(id:{tmp_db1.id})" + ) + ) + + session.delete(tmp_db1) + session.commit() + + def test_after_update_database__perm_database_access_exists(self): + security_manager.on_permission_view_after_delete = Mock() + + session = db.session + # Add a bogus existing permission before the change + + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.commit() + tmp_db1 = session.query(Database).filter_by(database_name="tmp_db1").one() + security_manager.add_permission_view_menu( + "database_access", f"[tmp_db2].(id:{tmp_db1.id})" + ) + + self.assertIsNotNone( + security_manager.find_permission_view_menu("database_access", tmp_db1.perm) + ) + + tmp_db1.database_name = "tmp_db2" + session.commit() + + # Assert that the old permission was updated + self.assertIsNone( + security_manager.find_permission_view_menu( + "database_access", f"[tmp_db1].(id:{tmp_db1.id})" + ) + ) + # Assert that the db permission was updated + self.assertIsNotNone( + security_manager.find_permission_view_menu( + "database_access", f"[tmp_db2].(id:{tmp_db1.id})" + ) + ) + + security_manager.on_permission_view_after_delete.assert_has_calls( + [ + call(ANY, ANY, ANY), + ] + ) + + session.delete(tmp_db1) + session.commit() + + def test_after_update_database__perm_datasource_access(self): + security_manager.on_view_menu_after_update = Mock() + + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.commit() + + table1 = SqlaTable( + schema="tmp_schema", + table_name="tmp_table1", + database=tmp_db1, + ) + session.add(table1) + table2 = SqlaTable( + schema="tmp_schema", + table_name="tmp_table2", + database=tmp_db1, + ) + session.add(table2) + session.commit() + slice1 = Slice( + datasource_id=table1.id, + datasource_type=DatasourceType.TABLE, + datasource_name="tmp_table1", + slice_name="tmp_slice1", + ) + session.add(slice1) + session.commit() + slice1 = session.query(Slice).filter_by(slice_name="tmp_slice1").one() + table1 = session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + table2 = session.query(SqlaTable).filter_by(table_name="tmp_table2").one() + + # assert initial perms + self.assertIsNotNone( + security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table1](id:{table1.id})" + ) + ) + self.assertIsNotNone( + security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table2](id:{table2.id})" + ) + ) + self.assertEqual(slice1.perm, f"[tmp_db1].[tmp_table1](id:{table1.id})") + self.assertEqual(table1.perm, f"[tmp_db1].[tmp_table1](id:{table1.id})") + self.assertEqual(table2.perm, f"[tmp_db1].[tmp_table2](id:{table2.id})") + + # Refresh and update the database name + tmp_db1 = session.query(Database).filter_by(database_name="tmp_db1").one() + tmp_db1.database_name = "tmp_db2" + session.commit() + + # Assert that the old permissions were updated + self.assertIsNone( + security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table1](id:{table1.id})" + ) + ) + self.assertIsNone( + security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table2](id:{table2.id})" + ) + ) + + # Assert that the db permission was updated + self.assertIsNotNone( + security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db2].[tmp_table1](id:{table1.id})" + ) + ) + self.assertIsNotNone( + security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db2].[tmp_table2](id:{table2.id})" + ) + ) + self.assertEqual(slice1.perm, f"[tmp_db2].[tmp_table1](id:{table1.id})") + self.assertEqual(table1.perm, f"[tmp_db2].[tmp_table1](id:{table1.id})") + self.assertEqual(table2.perm, f"[tmp_db2].[tmp_table2](id:{table2.id})") + + # Assert hooks are called + tmp_db1_view_menu = security_manager.find_view_menu( + f"[tmp_db2].(id:{tmp_db1.id})" + ) + table1_view_menu = security_manager.find_view_menu( + f"[tmp_db2].[tmp_table1](id:{table1.id})" + ) + table2_view_menu = security_manager.find_view_menu( + f"[tmp_db2].[tmp_table2](id:{table2.id})" + ) + security_manager.on_view_menu_after_update.assert_has_calls( + [ + call(ANY, ANY, tmp_db1_view_menu), + call(ANY, ANY, table1_view_menu), + call(ANY, ANY, table2_view_menu), + ] + ) + + session.delete(slice1) + session.delete(table1) + session.delete(table2) + session.delete(tmp_db1) + session.commit() + + def test_after_delete_database(self): + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.commit() + tmp_db1 = session.query(Database).filter_by(database_name="tmp_db1").one() + + database_pvm = security_manager.find_permission_view_menu( + "database_access", tmp_db1.perm + ) + self.assertIsNotNone(database_pvm) + role1 = Role(name="tmp_role1") + role1.permissions.append(database_pvm) + session.add(role1) + session.commit() + + session.delete(tmp_db1) + session.commit() + + # Assert that PVM is removed from Role + role1 = security_manager.find_role("tmp_role1") + self.assertEqual(role1.permissions, []) + + # Assert that the old permission was updated + self.assertIsNone( + security_manager.find_permission_view_menu( + "database_access", f"[tmp_db1].(id:{tmp_db1.id})" + ) + ) + + # Cleanup + session.delete(role1) + session.commit() + + def test_after_delete_database_rollback(self): + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.commit() + tmp_db1 = session.query(Database).filter_by(database_name="tmp_db1").one() + + database_pvm = security_manager.find_permission_view_menu( + "database_access", tmp_db1.perm + ) + self.assertIsNotNone(database_pvm) + role1 = Role(name="tmp_role1") + role1.permissions.append(database_pvm) + session.add(role1) + session.commit() + + session.delete(tmp_db1) + session.flush() + + role1 = security_manager.find_role("tmp_role1") + self.assertEqual(role1.permissions, []) + + self.assertIsNone( + security_manager.find_permission_view_menu( + "database_access", f"[tmp_db1].(id:{tmp_db1.id})" + ) + ) + + session.rollback() + + # Test a rollback reverts everything + database_pvm = security_manager.find_permission_view_menu( + "database_access", f"[tmp_db1].(id:{tmp_db1.id})" + ) + + role1 = security_manager.find_role("tmp_role1") + self.assertEqual(role1.permissions, [database_pvm]) + + # Cleanup + session.delete(role1) + session.delete(tmp_db1) + session.commit() + + def test_after_delete_dataset(self): + security_manager.on_permission_view_after_delete = Mock() + + session = db.session + tmp_db = Database(database_name="tmp_db", sqlalchemy_uri="sqlite://") + session.add(tmp_db) + session.commit() + + table1 = SqlaTable( + schema="tmp_schema", + table_name="tmp_table1", + database=tmp_db, + ) + session.add(table1) + session.commit() + + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db].[tmp_table1](id:{table1.id})" + ) + self.assertIsNotNone(table1_pvm) + + role1 = Role(name="tmp_role1") + role1.permissions.append(table1_pvm) + session.add(role1) + session.commit() + + # refresh + table1 = session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + + # Test delete + session.delete(table1) + session.commit() + + role1 = security_manager.find_role("tmp_role1") + self.assertEqual(role1.permissions, []) + + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db].[tmp_table1](id:{table1.id})" + ) + self.assertIsNone(table1_pvm) + table1_view_menu = security_manager.find_view_menu( + f"[tmp_db].[tmp_table1](id:{table1.id})" + ) + self.assertIsNone(table1_view_menu) + + # Assert the hook is called + security_manager.on_permission_view_after_delete.assert_has_calls( + [ + call(ANY, ANY, ANY), + ] + ) + + # cleanup + session.delete(role1) + session.delete(tmp_db) + session.commit() + + def test_after_delete_dataset_rollback(self): + session = db.session + tmp_db = Database(database_name="tmp_db", sqlalchemy_uri="sqlite://") + session.add(tmp_db) + session.commit() + + table1 = SqlaTable( + schema="tmp_schema", + table_name="tmp_table1", + database=tmp_db, + ) + session.add(table1) + session.commit() + + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db].[tmp_table1](id:{table1.id})" + ) + self.assertIsNotNone(table1_pvm) + + role1 = Role(name="tmp_role1") + role1.permissions.append(table1_pvm) + session.add(role1) + session.commit() + + # refresh + table1 = session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + + # Test delete, permissions are correctly deleted + session.delete(table1) + session.flush() + + role1 = security_manager.find_role("tmp_role1") + self.assertEqual(role1.permissions, []) + + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db].[tmp_table1](id:{table1.id})" + ) + self.assertIsNone(table1_pvm) + + # Test rollback, permissions exist everything is correctly rollback + session.rollback() + role1 = security_manager.find_role("tmp_role1") + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db].[tmp_table1](id:{table1.id})" + ) + self.assertIsNotNone(table1_pvm) + self.assertEqual(role1.permissions, [table1_pvm]) + + # cleanup + session.delete(table1) + session.delete(role1) + session.delete(tmp_db) + session.commit() + + def test_after_update_dataset__name_changes(self): + security_manager.on_view_menu_after_update = Mock() + + session = db.session + tmp_db = Database(database_name="tmp_db", sqlalchemy_uri="sqlite://") + session.add(tmp_db) + session.commit() + + table1 = SqlaTable( + schema="tmp_schema", + table_name="tmp_table1", + database=tmp_db, + ) + session.add(table1) + session.commit() + + slice1 = Slice( + datasource_id=table1.id, + datasource_type=DatasourceType.TABLE, + datasource_name="tmp_table1", + slice_name="tmp_slice1", + ) + session.add(slice1) + session.commit() + + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db].[tmp_table1](id:{table1.id})" + ) + self.assertIsNotNone(table1_pvm) + + # refresh + table1 = session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + # Test update + table1.table_name = "tmp_table1_changed" + session.commit() + + # Test old permission does not exist + old_table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db].[tmp_table1](id:{table1.id})" + ) + self.assertIsNone(old_table1_pvm) + + # Test new permission exist + new_table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db].[tmp_table1_changed](id:{table1.id})" + ) + self.assertIsNotNone(new_table1_pvm) + + # test dataset permission changed + changed_table1 = ( + session.query(SqlaTable).filter_by(table_name="tmp_table1_changed").one() + ) + self.assertEqual( + changed_table1.perm, f"[tmp_db].[tmp_table1_changed](id:{table1.id})" + ) + + # Test Chart permission changed + slice1 = session.query(Slice).filter_by(slice_name="tmp_slice1").one() + self.assertEqual(slice1.perm, f"[tmp_db].[tmp_table1_changed](id:{table1.id})") + + # Assert hook is called + view_menu_dataset = security_manager.find_view_menu( + f"[tmp_db].[tmp_table1_changed](id:{table1.id})" + ) + security_manager.on_view_menu_after_update.assert_has_calls( + [ + call(ANY, ANY, view_menu_dataset), + ] + ) + # cleanup + session.delete(slice1) + session.delete(table1) + session.delete(tmp_db) + session.commit() + + def test_after_update_dataset_rollback(self): + session = db.session + tmp_db = Database(database_name="tmp_db", sqlalchemy_uri="sqlite://") + session.add(tmp_db) + session.commit() + + table1 = SqlaTable( + schema="tmp_schema", + table_name="tmp_table1", + database=tmp_db, + ) + session.add(table1) + session.commit() + + slice1 = Slice( + datasource_id=table1.id, + datasource_type=DatasourceType.TABLE, + datasource_name="tmp_table1", + slice_name="tmp_slice1", + ) + session.add(slice1) + session.commit() + + # refresh + table1 = session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + # Test update + table1.table_name = "tmp_table1_changed" + session.flush() + + # Test old permission does not exist + old_table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db].[tmp_table1](id:{table1.id})" + ) + self.assertIsNone(old_table1_pvm) + + # Test new permission exist + new_table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db].[tmp_table1_changed](id:{table1.id})" + ) + self.assertIsNotNone(new_table1_pvm) + + # Test rollback + session.rollback() + + old_table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db].[tmp_table1](id:{table1.id})" + ) + self.assertIsNotNone(old_table1_pvm) + + # cleanup + session.delete(slice1) + session.delete(table1) + session.delete(tmp_db) + session.commit() + + def test_after_update_dataset__db_changes(self): + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + tmp_db2 = Database(database_name="tmp_db2", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.add(tmp_db2) + session.commit() + + table1 = SqlaTable( + schema="tmp_schema", + table_name="tmp_table1", + database=tmp_db1, + ) + session.add(table1) + session.commit() + + slice1 = Slice( + datasource_id=table1.id, + datasource_type=DatasourceType.TABLE, + datasource_name="tmp_table1", + slice_name="tmp_slice1", + ) + session.add(slice1) + session.commit() + + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table1](id:{table1.id})" + ) + self.assertIsNotNone(table1_pvm) + + # refresh + table1 = session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + # Test update + table1.database = tmp_db2 + session.commit() + + # Test old permission does not exist + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table1](id:{table1.id})" + ) + self.assertIsNone(table1_pvm) + + # Test new permission exist + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db2].[tmp_table1](id:{table1.id})" + ) + self.assertIsNotNone(table1_pvm) + + # test dataset permission and schema permission changed + changed_table1 = ( + session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + ) + self.assertEqual(changed_table1.perm, f"[tmp_db2].[tmp_table1](id:{table1.id})") + self.assertEqual(changed_table1.schema_perm, f"[tmp_db2].[tmp_schema]") + + # Test Chart permission changed + slice1 = session.query(Slice).filter_by(slice_name="tmp_slice1").one() + self.assertEqual(slice1.perm, f"[tmp_db2].[tmp_table1](id:{table1.id})") + self.assertEqual(slice1.schema_perm, f"[tmp_db2].[tmp_schema]") + + # cleanup + session.delete(slice1) + session.delete(table1) + session.delete(tmp_db1) + session.delete(tmp_db2) + session.commit() + + def test_after_update_dataset__schema_changes(self): + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.commit() + + table1 = SqlaTable( + schema="tmp_schema", + table_name="tmp_table1", + database=tmp_db1, + ) + session.add(table1) + session.commit() + + slice1 = Slice( + datasource_id=table1.id, + datasource_type=DatasourceType.TABLE, + datasource_name="tmp_table1", + slice_name="tmp_slice1", + ) + session.add(slice1) + session.commit() + + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table1](id:{table1.id})" + ) + self.assertIsNotNone(table1_pvm) + + # refresh + table1 = session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + # Test update + table1.schema = "tmp_schema_changed" + session.commit() + + # Test permission still exists + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table1](id:{table1.id})" + ) + self.assertIsNotNone(table1_pvm) + + # test dataset schema permission changed + changed_table1 = ( + session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + ) + self.assertEqual(changed_table1.perm, f"[tmp_db1].[tmp_table1](id:{table1.id})") + self.assertEqual(changed_table1.schema_perm, f"[tmp_db1].[tmp_schema_changed]") + + # Test Chart schema permission changed + slice1 = session.query(Slice).filter_by(slice_name="tmp_slice1").one() + self.assertEqual(slice1.perm, f"[tmp_db1].[tmp_table1](id:{table1.id})") + self.assertEqual(slice1.schema_perm, f"[tmp_db1].[tmp_schema_changed]") + + # cleanup + session.delete(slice1) + session.delete(table1) + session.delete(tmp_db1) + session.commit() + + def test_after_update_dataset__schema_none(self): + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.commit() + + table1 = SqlaTable( + schema="tmp_schema", + table_name="tmp_table1", + database=tmp_db1, + ) + session.add(table1) + session.commit() + + slice1 = Slice( + datasource_id=table1.id, + datasource_type=DatasourceType.TABLE, + datasource_name="tmp_table1", + slice_name="tmp_slice1", + ) + session.add(slice1) + session.commit() + + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table1](id:{table1.id})" + ) + self.assertIsNotNone(table1_pvm) + + # refresh + table1 = session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + # Test update + table1.schema = None + session.commit() + + # refresh + table1 = session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + + self.assertEqual(table1.perm, f"[tmp_db1].[tmp_table1](id:{table1.id})") + self.assertIsNone(table1.schema_perm) + + # cleanup + session.delete(slice1) + session.delete(table1) + session.delete(tmp_db1) + session.commit() + + def test_after_update_dataset__name_db_changes(self): + session = db.session + tmp_db1 = Database(database_name="tmp_db1", sqlalchemy_uri="sqlite://") + tmp_db2 = Database(database_name="tmp_db2", sqlalchemy_uri="sqlite://") + session.add(tmp_db1) + session.add(tmp_db2) + session.commit() + + table1 = SqlaTable( + schema="tmp_schema", + table_name="tmp_table1", + database=tmp_db1, + ) + session.add(table1) + session.commit() + + slice1 = Slice( + datasource_id=table1.id, + datasource_type=DatasourceType.TABLE, + datasource_name="tmp_table1", + slice_name="tmp_slice1", + ) + session.add(slice1) + session.commit() + + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table1](id:{table1.id})" + ) + self.assertIsNotNone(table1_pvm) + + # refresh + table1 = session.query(SqlaTable).filter_by(table_name="tmp_table1").one() + # Test update + table1.table_name = "tmp_table1_changed" + table1.database = tmp_db2 + session.commit() + + # Test old permission does not exist + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db1].[tmp_table1](id:{table1.id})" + ) + self.assertIsNone(table1_pvm) + + # Test new permission exist + table1_pvm = security_manager.find_permission_view_menu( + "datasource_access", f"[tmp_db2].[tmp_table1_changed](id:{table1.id})" + ) + self.assertIsNotNone(table1_pvm) + + # test dataset permission and schema permission changed + changed_table1 = ( + session.query(SqlaTable).filter_by(table_name="tmp_table1_changed").one() + ) + self.assertEqual( + changed_table1.perm, f"[tmp_db2].[tmp_table1_changed](id:{table1.id})" + ) + self.assertEqual(changed_table1.schema_perm, f"[tmp_db2].[tmp_schema]") + + # Test Chart permission changed + slice1 = session.query(Slice).filter_by(slice_name="tmp_slice1").one() + self.assertEqual(slice1.perm, f"[tmp_db2].[tmp_table1_changed](id:{table1.id})") + self.assertEqual(slice1.schema_perm, f"[tmp_db2].[tmp_schema]") + + # cleanup + session.delete(slice1) + session.delete(table1) + session.delete(tmp_db1) + session.delete(tmp_db2) + session.commit() + + def test_hybrid_perm_database(self): + database = Database(database_name="tmp_database3", sqlalchemy_uri="sqlite://") + + db.session.add(database) + + id_ = ( + db.session.query(Database.id) + .filter_by(database_name="tmp_database3") + .scalar() + ) + + record = ( + db.session.query(Database) + .filter_by(perm=f"[tmp_database3].(id:{id_})") + .one() + ) + + self.assertEqual(record.get_perm(), record.perm) + self.assertEqual(record.id, id_) + self.assertEqual(record.database_name, "tmp_database3") + db.session.delete(database) + db.session.commit() + + def test_set_perm_slice(self): + session = db.session + database = Database(database_name="tmp_database", sqlalchemy_uri="sqlite://") + table = SqlaTable(table_name="tmp_perm_table", database=database) + session.add(database) + session.add(table) + session.commit() + + # no schema permission + slice = Slice( + datasource_id=table.id, + datasource_type=DatasourceType.TABLE, + datasource_name="tmp_perm_table", + slice_name="slice_name", + ) + session.add(slice) + session.commit() + + slice = session.query(Slice).filter_by(slice_name="slice_name").one() + self.assertEqual(slice.perm, table.perm) + self.assertEqual(slice.perm, f"[tmp_database].[tmp_perm_table](id:{table.id})") + self.assertEqual(slice.schema_perm, table.schema_perm) + self.assertIsNone(slice.schema_perm) + + table.schema = "tmp_perm_schema" + table.table_name = "tmp_perm_table_v2" + session.commit() + table = session.query(SqlaTable).filter_by(table_name="tmp_perm_table_v2").one() + self.assertEqual(slice.perm, table.perm) + self.assertEqual( + slice.perm, f"[tmp_database].[tmp_perm_table_v2](id:{table.id})" + ) + self.assertEqual( + table.perm, f"[tmp_database].[tmp_perm_table_v2](id:{table.id})" + ) + self.assertEqual(slice.schema_perm, table.schema_perm) + self.assertEqual(slice.schema_perm, "[tmp_database].[tmp_perm_schema]") + + session.delete(slice) + session.delete(table) + session.delete(database) + + session.commit() + + @patch("superset.utils.core.g") + @patch("superset.security.manager.g") + def test_schemas_accessible_by_user_admin(self, mock_sm_g, mock_g): + mock_g.user = mock_sm_g.user = security_manager.find_user("admin") + with self.client.application.test_request_context(): + database = get_example_database() + schemas = security_manager.get_schemas_accessible_by_user( + database, ["1", "2", "3"] + ) + self.assertEqual(schemas, ["1", "2", "3"]) # no changes + + @patch("superset.utils.core.g") + @patch("superset.security.manager.g") + def test_schemas_accessible_by_user_schema_access(self, mock_sm_g, mock_g): + # User has schema access to the schema 1 + create_schema_perm("[examples].[1]") + mock_g.user = mock_sm_g.user = security_manager.find_user("gamma") + with self.client.application.test_request_context(): + database = get_example_database() + schemas = security_manager.get_schemas_accessible_by_user( + database, ["1", "2", "3"] + ) + # temp_schema is not passed in the params + self.assertEqual(schemas, ["1"]) + delete_schema_perm("[examples].[1]") + + @patch("superset.utils.core.g") + @patch("superset.security.manager.g") + def test_schemas_accessible_by_user_datasource_access(self, mock_sm_g, mock_g): + # User has schema access to the datasource temp_schema.wb_health_population in examples DB. + mock_g.user = mock_sm_g.user = security_manager.find_user("gamma") + with self.client.application.test_request_context(): + database = get_example_database() + schemas = security_manager.get_schemas_accessible_by_user( + database, ["temp_schema", "2", "3"] + ) + self.assertEqual(schemas, ["temp_schema"]) + + @patch("superset.utils.core.g") + @patch("superset.security.manager.g") + def test_schemas_accessible_by_user_datasource_and_schema_access( + self, mock_sm_g, mock_g + ): + # User has schema access to the datasource temp_schema.wb_health_population in examples DB. + create_schema_perm("[examples].[2]") + mock_g.user = mock_sm_g.user = security_manager.find_user("gamma") + with self.client.application.test_request_context(): + database = get_example_database() + schemas = security_manager.get_schemas_accessible_by_user( + database, ["temp_schema", "2", "3"] + ) + self.assertEqual(schemas, ["temp_schema", "2"]) + vm = security_manager.find_permission_view_menu( + "schema_access", "[examples].[2]" + ) + self.assertIsNotNone(vm) + delete_schema_perm("[examples].[2]") + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_gamma_user_schema_access_to_dashboards(self): + dash = db.session.query(Dashboard).filter_by(slug="world_health").first() + dash.published = True + db.session.commit() + + self.login(username="gamma") + data = str(self.client.get("api/v1/dashboard/").data) + self.assertIn("/superset/dashboard/world_health/", data) + self.assertNotIn("/superset/dashboard/births/", data) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @pytest.mark.usefixtures("public_role_like_gamma") + def test_public_sync_role_data_perms(self): + """ + Security: Tests if the sync role method preserves data access permissions + if they already exist on a public role. + Also check that non data access permissions are removed + """ + table = db.session.query(SqlaTable).filter_by(table_name="birth_names").one() + self.grant_public_access_to_table(table) + public_role = security_manager.get_public_role() + unwanted_pvm = security_manager.find_permission_view_menu( + "menu_access", "Security" + ) + public_role.permissions.append(unwanted_pvm) + db.session.commit() + + security_manager.sync_role_definitions() + public_role = security_manager.get_public_role() + public_role_resource_names = [ + permission.view_menu.name for permission in public_role.permissions + ] + + assert table.get_perm() in public_role_resource_names + assert "Security" not in public_role_resource_names + + # Cleanup + self.revoke_public_access_to_table(table) + + @pytest.mark.usefixtures("public_role_like_test_role") + def test_public_sync_role_builtin_perms(self): + """ + Security: Tests public role creation based on a builtin role + """ + public_role = security_manager.get_public_role() + public_role_resource_names = [ + [permission.view_menu.name, permission.permission.name] + for permission in public_role.permissions + ] + for pvm in current_app.config["FAB_ROLES"]["TestRole"]: + assert pvm in public_role_resource_names + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_sqllab_gamma_user_schema_access_to_sqllab(self): + session = db.session + example_db = session.query(Database).filter_by(database_name="examples").one() + example_db.expose_in_sqllab = True + session.commit() + + arguments = { + "keys": ["none"], + "columns": ["expose_in_sqllab"], + "filters": [{"col": "expose_in_sqllab", "opr": "eq", "value": True}], + "order_columns": "database_name", + "order_direction": "asc", + "page": 0, + "page_size": -1, + } + NEW_FLASK_GET_SQL_DBS_REQUEST = f"/api/v1/database/?q={prison.dumps(arguments)}" + self.login(username="gamma") + databases_json = self.client.get(NEW_FLASK_GET_SQL_DBS_REQUEST).json + self.assertEqual(databases_json["count"], 1) + self.logout() + + def assert_can_read(self, view_menu, permissions_set): + if view_menu in NEW_SECURITY_CONVERGE_VIEWS: + self.assertIn(("can_read", view_menu), permissions_set) + else: + self.assertIn(("can_list", view_menu), permissions_set) + + def assert_can_write(self, view_menu, permissions_set): + if view_menu in NEW_SECURITY_CONVERGE_VIEWS: + self.assertIn(("can_write", view_menu), permissions_set) + else: + self.assertIn(("can_add", view_menu), permissions_set) + self.assertIn(("can_delete", view_menu), permissions_set) + self.assertIn(("can_edit", view_menu), permissions_set) + + def assert_cannot_write(self, view_menu, permissions_set): + if view_menu in NEW_SECURITY_CONVERGE_VIEWS: + self.assertNotIn(("can_write", view_menu), permissions_set) + else: + self.assertNotIn(("can_add", view_menu), permissions_set) + self.assertNotIn(("can_delete", view_menu), permissions_set) + self.assertNotIn(("can_edit", view_menu), permissions_set) + self.assertNotIn(("can_save", view_menu), permissions_set) + + def assert_can_all(self, view_menu, permissions_set): + self.assert_can_read(view_menu, permissions_set) + self.assert_can_write(view_menu, permissions_set) + + def assert_can_menu(self, view_menu, permissions_set): + self.assertIn(("menu_access", view_menu), permissions_set) + + def assert_cannot_menu(self, view_menu, permissions_set): + self.assertNotIn(("menu_access", view_menu), permissions_set) + + def assert_cannot_gamma(self, perm_set): + self.assert_cannot_write("Annotation", perm_set) + self.assert_cannot_write("CssTemplate", perm_set) + self.assert_cannot_menu("SQL Lab", perm_set) + self.assert_cannot_menu("CSS Templates", perm_set) + self.assert_cannot_menu("Annotation Layers", perm_set) + self.assert_cannot_menu("Manage", perm_set) + self.assert_cannot_menu("Queries", perm_set) + self.assert_cannot_menu("Import dashboards", perm_set) + self.assert_cannot_menu("Upload a CSV", perm_set) + self.assert_cannot_menu("ReportSchedule", perm_set) + self.assert_cannot_menu("Alerts & Report", perm_set) + + def assert_can_gamma(self, perm_set): + self.assert_can_read("CssTemplate", perm_set) + self.assert_can_read("Dataset", perm_set) + + # make sure that user can create slices and dashboards + self.assert_can_all("Dashboard", perm_set) + self.assert_can_all("Chart", perm_set) + self.assertIn(("can_add_slices", "Superset"), perm_set) + self.assertIn(("can_copy_dash", "Superset"), perm_set) + self.assertIn(("can_created_dashboards", "Superset"), perm_set) + self.assertIn(("can_created_slices", "Superset"), perm_set) + self.assertIn(("can_csv", "Superset"), perm_set) + self.assertIn(("can_dashboard", "Superset"), perm_set) + self.assertIn(("can_explore", "Superset"), perm_set) + self.assertIn(("can_share_chart", "Superset"), perm_set) + self.assertIn(("can_share_dashboard", "Superset"), perm_set) + self.assertIn(("can_explore_json", "Superset"), perm_set) + self.assertIn(("can_fave_dashboards", "Superset"), perm_set) + self.assertIn(("can_fave_slices", "Superset"), perm_set) + self.assertIn(("can_save_dash", "Superset"), perm_set) + self.assertIn(("can_slice", "Superset"), perm_set) + self.assertIn(("can_explore_json", "Superset"), perm_set) + self.assertIn(("can_userinfo", "UserDBModelView"), perm_set) + self.assert_can_menu("Databases", perm_set) + self.assert_can_menu("Datasets", perm_set) + self.assert_can_menu("Data", perm_set) + self.assert_can_menu("Charts", perm_set) + self.assert_can_menu("Dashboards", perm_set) + + def assert_can_alpha(self, perm_set): + self.assert_can_all("Annotation", perm_set) + self.assert_can_all("CssTemplate", perm_set) + self.assert_can_all("Dataset", perm_set) + self.assert_can_read("Database", perm_set) + self.assertIn(("can_import_dashboards", "Superset"), perm_set) + self.assertIn(("can_this_form_post", "CsvToDatabaseView"), perm_set) + self.assertIn(("can_this_form_get", "CsvToDatabaseView"), perm_set) + self.assert_can_menu("Manage", perm_set) + self.assert_can_menu("Annotation Layers", perm_set) + self.assert_can_menu("CSS Templates", perm_set) + self.assertIn(("all_datasource_access", "all_datasource_access"), perm_set) + + def assert_cannot_alpha(self, perm_set): + if app.config["ENABLE_ACCESS_REQUEST"]: + self.assert_cannot_write("AccessRequestsModelView", perm_set) + self.assert_can_all("AccessRequestsModelView", perm_set) + self.assert_cannot_write("Queries", perm_set) + self.assert_cannot_write("RoleModelView", perm_set) + self.assert_cannot_write("UserDBModelView", perm_set) + self.assert_cannot_write("Database", perm_set) + + def assert_can_admin(self, perm_set): + self.assert_can_all("Database", perm_set) + self.assert_can_all("RoleModelView", perm_set) + self.assert_can_all("UserDBModelView", perm_set) + + self.assertIn(("all_database_access", "all_database_access"), perm_set) + self.assertIn(("can_override_role_permissions", "Superset"), perm_set) + self.assertIn(("can_override_role_permissions", "Superset"), perm_set) + self.assertIn(("can_approve", "Superset"), perm_set) + + self.assert_can_menu("Security", perm_set) + self.assert_can_menu("List Users", perm_set) + self.assert_can_menu("List Roles", perm_set) + + def test_is_admin_only(self): + self.assertFalse( + security_manager._is_admin_only( + security_manager.find_permission_view_menu("can_read", "Dataset") + ) + ) + self.assertFalse( + security_manager._is_admin_only( + security_manager.find_permission_view_menu( + "all_datasource_access", "all_datasource_access" + ) + ) + ) + + log_permissions = ["can_read"] + for log_permission in log_permissions: + self.assertTrue( + security_manager._is_admin_only( + security_manager.find_permission_view_menu(log_permission, "Log") + ) + ) + + if app.config["ENABLE_ACCESS_REQUEST"]: + self.assertTrue( + security_manager._is_admin_only( + security_manager.find_permission_view_menu( + "can_list", "AccessRequestsModelView" + ) + ) + ) + self.assertTrue( + security_manager._is_admin_only( + security_manager.find_permission_view_menu( + "can_edit", "UserDBModelView" + ) + ) + ) + self.assertTrue( + security_manager._is_admin_only( + security_manager.find_permission_view_menu("can_approve", "Superset") + ) + ) + + @unittest.skipUnless( + SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed" + ) + def test_is_alpha_only(self): + self.assertFalse( + security_manager._is_alpha_only( + security_manager.find_permission_view_menu("can_read", "Dataset") + ) + ) + + self.assertTrue( + security_manager._is_alpha_only( + security_manager.find_permission_view_menu("can_write", "Dataset") + ) + ) + self.assertTrue( + security_manager._is_alpha_only( + security_manager.find_permission_view_menu( + "all_datasource_access", "all_datasource_access" + ) + ) + ) + self.assertTrue( + security_manager._is_alpha_only( + security_manager.find_permission_view_menu( + "all_database_access", "all_database_access" + ) + ) + ) + + def test_is_gamma_pvm(self): + self.assertTrue( + security_manager._is_gamma_pvm( + security_manager.find_permission_view_menu("can_read", "Dataset") + ) + ) + + def test_gamma_permissions_basic(self): + self.assert_can_gamma(get_perm_tuples("Gamma")) + self.assert_cannot_alpha(get_perm_tuples("Gamma")) + self.assert_cannot_gamma(get_perm_tuples("Gamma")) + + @pytest.mark.usefixtures("public_role_like_gamma") + def test_public_permissions_basic(self): + self.assert_can_gamma(get_perm_tuples("Public")) + + @unittest.skipUnless( + SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed" + ) + def test_alpha_permissions(self): + alpha_perm_tuples = get_perm_tuples("Alpha") + self.assert_can_gamma(alpha_perm_tuples) + self.assert_can_alpha(alpha_perm_tuples) + self.assert_cannot_alpha(alpha_perm_tuples) + self.assertNotIn(("can_this_form_get", "UserInfoEditView"), alpha_perm_tuples) + self.assertNotIn(("can_this_form_post", "UserInfoEditView"), alpha_perm_tuples) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_admin_permissions(self): + if backend() == "hive": + return + + self.assert_can_gamma(get_perm_tuples("Admin")) + self.assert_can_alpha(get_perm_tuples("Admin")) + self.assert_can_admin(get_perm_tuples("Admin")) + + def test_sql_lab_permissions(self): + sql_lab_set = get_perm_tuples("sql_lab") + self.assertIn(("can_csv", "Superset"), sql_lab_set) + self.assertIn(("can_read", "Database"), sql_lab_set) + self.assertIn(("can_read", "SavedQuery"), sql_lab_set) + self.assertIn(("can_sql_json", "Superset"), sql_lab_set) + self.assertIn(("can_sqllab_viz", "Superset"), sql_lab_set) + self.assertIn(("can_sqllab_table_viz", "Superset"), sql_lab_set) + self.assertIn(("can_sqllab", "Superset"), sql_lab_set) + + self.assertIn(("menu_access", "SQL Lab"), sql_lab_set) + self.assertIn(("menu_access", "SQL Editor"), sql_lab_set) + self.assertIn(("menu_access", "Saved Queries"), sql_lab_set) + self.assertIn(("menu_access", "Query Search"), sql_lab_set) + + self.assert_cannot_alpha(sql_lab_set) + + def test_granter_permissions(self): + granter_set = get_perm_tuples("granter") + self.assertIn(("can_override_role_permissions", "Superset"), granter_set) + self.assertIn(("can_approve", "Superset"), granter_set) + + self.assert_cannot_alpha(granter_set) + + def test_gamma_permissions(self): + gamma_perm_set = set() + for perm in security_manager.find_role("Gamma").permissions: + gamma_perm_set.add((perm.permission.name, perm.view_menu.name)) + + # check read only perms + + # make sure that user can create slices and dashboards + self.assert_can_all("Dashboard", gamma_perm_set) + self.assert_can_read("Dataset", gamma_perm_set) + self.assert_can_read("Annotation", gamma_perm_set) + self.assert_can_read("CssTemplate", gamma_perm_set) + + # make sure that user can create slices and dashboards + self.assert_can_all("Chart", gamma_perm_set) + + self.assert_cannot_write("UserDBModelView", gamma_perm_set) + self.assert_cannot_write("RoleModelView", gamma_perm_set) + + self.assertIn(("can_add_slices", "Superset"), gamma_perm_set) + self.assertIn(("can_copy_dash", "Superset"), gamma_perm_set) + self.assertIn(("can_created_dashboards", "Superset"), gamma_perm_set) + self.assertIn(("can_created_slices", "Superset"), gamma_perm_set) + self.assertIn(("can_csv", "Superset"), gamma_perm_set) + self.assertIn(("can_dashboard", "Superset"), gamma_perm_set) + self.assertIn(("can_explore", "Superset"), gamma_perm_set) + self.assertIn(("can_share_chart", "Superset"), gamma_perm_set) + self.assertIn(("can_share_dashboard", "Superset"), gamma_perm_set) + self.assertIn(("can_explore_json", "Superset"), gamma_perm_set) + self.assertIn(("can_fave_dashboards", "Superset"), gamma_perm_set) + self.assertIn(("can_fave_slices", "Superset"), gamma_perm_set) + self.assertIn(("can_save_dash", "Superset"), gamma_perm_set) + self.assertIn(("can_slice", "Superset"), gamma_perm_set) + self.assertIn(("can_userinfo", "UserDBModelView"), gamma_perm_set) + + def test_views_are_secured(self): + """Preventing the addition of unsecured views without has_access decorator""" + # These FAB views are secured in their body as opposed to by decorators + method_allowlist = ("action", "action_post") + # List of redirect & other benign views + views_allowlist = [ + ["MyIndexView", "index"], + ["UtilView", "back"], + ["LocaleView", "index"], + ["AuthDBView", "login"], + ["AuthDBView", "logout"], + ["CurrentUserRestApi", "get_me"], + ["CurrentUserRestApi", "get_my_roles"], + # TODO (embedded) remove Dashboard:embedded after uuids have been shipped + ["Dashboard", "embedded"], + ["EmbeddedView", "embedded"], + ["R", "index"], + ["Superset", "log"], + ["Superset", "theme"], + ["Superset", "welcome"], + ["SecurityApi", "login"], + ["SecurityApi", "refresh"], + ["SupersetIndexView", "index"], + ] + unsecured_views = [] + for view_class in appbuilder.baseviews: + class_name = view_class.__class__.__name__ + for name, value in inspect.getmembers( + view_class, predicate=inspect.ismethod + ): + if ( + name not in method_allowlist + and [class_name, name] not in views_allowlist + and hasattr(value, "_urls") + and not hasattr(value, "_permission_name") + ): + unsecured_views.append((class_name, name)) + if unsecured_views: + view_str = "\n".join([str(v) for v in unsecured_views]) + raise Exception(f"Some views are not secured:\n{view_str}") + + +class TestSecurityManager(SupersetTestCase): + """ + Testing the Security Manager. + """ + + @patch("superset.security.SupersetSecurityManager.raise_for_access") + def test_can_access_datasource(self, mock_raise_for_access): + datasource = self.get_datasource_mock() + + mock_raise_for_access.return_value = None + self.assertTrue(security_manager.can_access_datasource(datasource=datasource)) + + mock_raise_for_access.side_effect = SupersetSecurityException( + SupersetError( + "dummy", + SupersetErrorType.DATASOURCE_SECURITY_ACCESS_ERROR, + ErrorLevel.ERROR, + ) + ) + + self.assertFalse(security_manager.can_access_datasource(datasource=datasource)) + + @patch("superset.security.SupersetSecurityManager.raise_for_access") + def test_can_access_table(self, mock_raise_for_access): + database = get_example_database() + table = Table("bar", "foo") + + mock_raise_for_access.return_value = None + self.assertTrue(security_manager.can_access_table(database, table)) + + mock_raise_for_access.side_effect = SupersetSecurityException( + SupersetError( + "dummy", SupersetErrorType.TABLE_SECURITY_ACCESS_ERROR, ErrorLevel.ERROR + ) + ) + + self.assertFalse(security_manager.can_access_table(database, table)) + + @patch("superset.security.SupersetSecurityManager.is_owner") + @patch("superset.security.SupersetSecurityManager.can_access") + @patch("superset.security.SupersetSecurityManager.can_access_schema") + def test_raise_for_access_datasource( + self, mock_can_access_schema, mock_can_access, mock_is_owner + ): + datasource = self.get_datasource_mock() + + mock_can_access_schema.return_value = True + security_manager.raise_for_access(datasource=datasource) + + mock_can_access.return_value = False + mock_can_access_schema.return_value = False + mock_is_owner.return_value = False + + with self.assertRaises(SupersetSecurityException): + security_manager.raise_for_access(datasource=datasource) + + @patch("superset.security.SupersetSecurityManager.is_owner") + @patch("superset.security.SupersetSecurityManager.can_access") + def test_raise_for_access_query(self, mock_can_access, mock_is_owner): + query = Mock( + database=get_example_database(), schema="bar", sql="SELECT * FROM foo" + ) + + mock_can_access.return_value = True + security_manager.raise_for_access(query=query) + + mock_can_access.return_value = False + mock_is_owner.return_value = False + + with self.assertRaises(SupersetSecurityException): + security_manager.raise_for_access(query=query) + + @patch("superset.security.SupersetSecurityManager.is_owner") + @patch("superset.security.SupersetSecurityManager.can_access") + @patch("superset.security.SupersetSecurityManager.can_access_schema") + def test_raise_for_access_query_context( + self, mock_can_access_schema, mock_can_access, mock_is_owner + ): + query_context = Mock(datasource=self.get_datasource_mock()) + + mock_can_access_schema.return_value = True + security_manager.raise_for_access(query_context=query_context) + + mock_can_access.return_value = False + mock_can_access_schema.return_value = False + mock_is_owner.return_value = False + + with self.assertRaises(SupersetSecurityException): + security_manager.raise_for_access(query_context=query_context) + + @patch("superset.security.SupersetSecurityManager.can_access") + def test_raise_for_access_table(self, mock_can_access): + database = get_example_database() + table = Table("bar", "foo") + + mock_can_access.return_value = True + security_manager.raise_for_access(database=database, table=table) + + mock_can_access.return_value = False + + with self.assertRaises(SupersetSecurityException): + security_manager.raise_for_access(database=database, table=table) + + @patch("superset.security.SupersetSecurityManager.is_owner") + @patch("superset.security.SupersetSecurityManager.can_access") + @patch("superset.security.SupersetSecurityManager.can_access_schema") + def test_raise_for_access_viz( + self, mock_can_access_schema, mock_can_access, mock_is_owner + ): + test_viz = viz.TableViz(self.get_datasource_mock(), form_data={}) + + mock_can_access_schema.return_value = True + security_manager.raise_for_access(viz=test_viz) + + mock_can_access.return_value = False + mock_can_access_schema.return_value = False + mock_is_owner.return_value = False + + with self.assertRaises(SupersetSecurityException): + security_manager.raise_for_access(viz=test_viz) + + @patch("superset.security.manager.g") + def test_get_user_roles(self, mock_g): + admin = security_manager.find_user("admin") + mock_g.user = admin + roles = security_manager.get_user_roles() + self.assertEqual(admin.roles, roles) + + @patch("superset.security.manager.g") + def test_get_anonymous_roles(self, mock_g): + mock_g.user = security_manager.get_anonymous_user() + roles = security_manager.get_user_roles() + self.assertEqual([security_manager.get_public_role()], roles) + + +class TestAccessRequestEndpoints(SupersetTestCase): + def test_access_request_disabled(self): + with patch.object(AccessRequestsModelView, "is_enabled", return_value=False): + self.login("admin") + uri = "/accessrequestsmodelview/list/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + def test_access_request_enabled(self): + with patch.object(AccessRequestsModelView, "is_enabled", return_value=True): + self.login("admin") + uri = "/accessrequestsmodelview/list/" + rv = self.client.get(uri) + self.assertLess(rv.status_code, 400) + + +class TestDatasources(SupersetTestCase): + @patch("superset.security.manager.g") + @patch("superset.security.SupersetSecurityManager.can_access_database") + @patch("superset.security.SupersetSecurityManager.get_session") + def test_get_user_datasources_admin( + self, mock_get_session, mock_can_access_database, mock_g + ): + Datasource = namedtuple("Datasource", ["database", "schema", "name"]) + mock_g.user = security_manager.find_user("admin") + mock_can_access_database.return_value = True + mock_get_session.query.return_value.filter.return_value.all.return_value = [] + + with mock.patch.object( + SqlaTable, "get_all_datasources" + ) as mock_get_all_datasources: + mock_get_all_datasources.return_value = [ + Datasource("database1", "schema1", "table1"), + Datasource("database1", "schema1", "table2"), + Datasource("database2", None, "table1"), + ] + + datasources = security_manager.get_user_datasources() + + assert sorted(datasources) == [ + Datasource("database1", "schema1", "table1"), + Datasource("database1", "schema1", "table2"), + Datasource("database2", None, "table1"), + ] + + @patch("superset.security.manager.g") + @patch("superset.security.SupersetSecurityManager.can_access_database") + @patch("superset.security.SupersetSecurityManager.get_session") + def test_get_user_datasources_gamma( + self, mock_get_session, mock_can_access_database, mock_g + ): + Datasource = namedtuple("Datasource", ["database", "schema", "name"]) + mock_g.user = security_manager.find_user("gamma") + mock_can_access_database.return_value = False + mock_get_session.query.return_value.filter.return_value.all.return_value = [] + + with mock.patch.object( + SqlaTable, "get_all_datasources" + ) as mock_get_all_datasources: + mock_get_all_datasources.return_value = [ + Datasource("database1", "schema1", "table1"), + Datasource("database1", "schema1", "table2"), + Datasource("database2", None, "table1"), + ] + + datasources = security_manager.get_user_datasources() + + assert datasources == [] + + @patch("superset.security.manager.g") + @patch("superset.security.SupersetSecurityManager.can_access_database") + @patch("superset.security.SupersetSecurityManager.get_session") + def test_get_user_datasources_gamma_with_schema( + self, mock_get_session, mock_can_access_database, mock_g + ): + Datasource = namedtuple("Datasource", ["database", "schema", "name"]) + mock_g.user = security_manager.find_user("gamma") + mock_can_access_database.return_value = False + + mock_get_session.query.return_value.filter.return_value.all.return_value = [ + Datasource("database1", "schema1", "table1"), + Datasource("database1", "schema1", "table2"), + ] + + with mock.patch.object( + SqlaTable, "get_all_datasources" + ) as mock_get_all_datasources: + mock_get_all_datasources.return_value = [ + Datasource("database1", "schema1", "table1"), + Datasource("database1", "schema1", "table2"), + Datasource("database2", None, "table1"), + ] + + datasources = security_manager.get_user_datasources() + + assert sorted(datasources) == [ + Datasource("database1", "schema1", "table1"), + Datasource("database1", "schema1", "table2"), + ] + + +class FakeRequest: + headers: Any = {} + form: Any = {} + + +class TestGuestTokens(SupersetTestCase): + def create_guest_token(self): + user = {"username": "test_guest"} + resources = [{"some": "resource"}] + rls = [{"dataset": 1, "clause": "access = 1"}] + return security_manager.create_guest_access_token(user, resources, rls) + + @patch("superset.security.SupersetSecurityManager._get_current_epoch_time") + def test_create_guest_access_token(self, get_time_mock): + now = time.time() + get_time_mock.return_value = now # so we know what it should = + + user = {"username": "test_guest"} + resources = [{"some": "resource"}] + rls = [{"dataset": 1, "clause": "access = 1"}] + token = security_manager.create_guest_access_token(user, resources, rls) + aud = get_url_host() + # unfortunately we cannot mock time in the jwt lib + decoded_token = jwt.decode( + token, + self.app.config["GUEST_TOKEN_JWT_SECRET"], + algorithms=[self.app.config["GUEST_TOKEN_JWT_ALGO"]], + audience=aud, + ) + + self.assertEqual(user, decoded_token["user"]) + self.assertEqual(resources, decoded_token["resources"]) + self.assertEqual(now, decoded_token["iat"]) + self.assertEqual(aud, decoded_token["aud"]) + self.assertEqual("guest", decoded_token["type"]) + self.assertEqual( + now + (self.app.config["GUEST_TOKEN_JWT_EXP_SECONDS"]), + decoded_token["exp"], + ) + + def test_get_guest_user(self): + token = self.create_guest_token() + fake_request = FakeRequest() + fake_request.headers[current_app.config["GUEST_TOKEN_HEADER_NAME"]] = token + + guest_user = security_manager.get_guest_user_from_request(fake_request) + + self.assertIsNotNone(guest_user) + self.assertEqual("test_guest", guest_user.username) + + def test_get_guest_user_with_request_form(self): + token = self.create_guest_token() + fake_request = FakeRequest() + fake_request.headers[current_app.config["GUEST_TOKEN_HEADER_NAME"]] = None + fake_request.form["guest_token"] = token + + guest_user = security_manager.get_guest_user_from_request(fake_request) + + self.assertIsNotNone(guest_user) + self.assertEqual("test_guest", guest_user.username) + + @patch("superset.security.SupersetSecurityManager._get_current_epoch_time") + def test_get_guest_user_expired_token(self, get_time_mock): + # make a just-expired token + get_time_mock.return_value = ( + time.time() - (self.app.config["GUEST_TOKEN_JWT_EXP_SECONDS"] * 1000) - 1 + ) + token = self.create_guest_token() + fake_request = FakeRequest() + fake_request.headers[current_app.config["GUEST_TOKEN_HEADER_NAME"]] = token + + guest_user = security_manager.get_guest_user_from_request(fake_request) + + self.assertIsNone(guest_user) + + def test_get_guest_user_no_user(self): + user = None + resources = [{"type": "dashboard", "id": 1}] + rls = {} + token = security_manager.create_guest_access_token(user, resources, rls) + fake_request = FakeRequest() + fake_request.headers[current_app.config["GUEST_TOKEN_HEADER_NAME"]] = token + guest_user = security_manager.get_guest_user_from_request(fake_request) + + self.assertIsNone(guest_user) + self.assertRaisesRegex(ValueError, "Guest token does not contain a user claim") + + def test_get_guest_user_no_resource(self): + user = {"username": "test_guest"} + resources = [] + rls = {} + token = security_manager.create_guest_access_token(user, resources, rls) + fake_request = FakeRequest() + fake_request.headers[current_app.config["GUEST_TOKEN_HEADER_NAME"]] = token + security_manager.get_guest_user_from_request(fake_request) + + self.assertRaisesRegex( + ValueError, "Guest token does not contain a resources claim" + ) + + def test_get_guest_user_not_guest_type(self): + now = time.time() + user = {"username": "test_guest"} + resources = [{"some": "resource"}] + aud = get_url_host() + + claims = { + "user": user, + "resources": resources, + "rls_rules": [], + # standard jwt claims: + "aud": aud, + "iat": now, # issued at + "type": "not_guest", + } + token = jwt.encode( + claims, + self.app.config["GUEST_TOKEN_JWT_SECRET"], + algorithm=self.app.config["GUEST_TOKEN_JWT_ALGO"], + ) + fake_request = FakeRequest() + fake_request.headers[current_app.config["GUEST_TOKEN_HEADER_NAME"]] = token + guest_user = security_manager.get_guest_user_from_request(fake_request) + + self.assertIsNone(guest_user) + self.assertRaisesRegex(ValueError, "This is not a guest token.") + + def test_get_guest_user_bad_audience(self): + now = time.time() + user = {"username": "test_guest"} + resources = [{"some": "resource"}] + aud = get_url_host() + + claims = { + "user": user, + "resources": resources, + "rls_rules": [], + # standard jwt claims: + "aud": "bad_audience", + "iat": now, # issued at + "type": "guest", + } + token = jwt.encode( + claims, + self.app.config["GUEST_TOKEN_JWT_SECRET"], + algorithm=self.app.config["GUEST_TOKEN_JWT_ALGO"], + ) + fake_request = FakeRequest() + fake_request.headers[current_app.config["GUEST_TOKEN_HEADER_NAME"]] = token + guest_user = security_manager.get_guest_user_from_request(fake_request) + + self.assertRaisesRegex(jwt.exceptions.InvalidAudienceError, "Invalid audience") + self.assertIsNone(guest_user) + + @patch("superset.security.SupersetSecurityManager._get_current_epoch_time") + def test_create_guest_access_token_callable_audience(self, get_time_mock): + now = time.time() + get_time_mock.return_value = now + app.config["GUEST_TOKEN_JWT_AUDIENCE"] = Mock(return_value="cool_code") + + user = {"username": "test_guest"} + resources = [{"some": "resource"}] + rls = [{"dataset": 1, "clause": "access = 1"}] + token = security_manager.create_guest_access_token(user, resources, rls) + + decoded_token = jwt.decode( + token, + self.app.config["GUEST_TOKEN_JWT_SECRET"], + algorithms=[self.app.config["GUEST_TOKEN_JWT_ALGO"]], + audience="cool_code", + ) + app.config["GUEST_TOKEN_JWT_AUDIENCE"].assert_called_once() + self.assertEqual("cool_code", decoded_token["aud"]) + self.assertEqual("guest", decoded_token["type"]) + app.config["GUEST_TOKEN_JWT_AUDIENCE"] = None diff --git a/tests/integration_tests/sql_lab/__init__.py b/tests/integration_tests/sql_lab/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/sql_lab/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/sql_lab/api_tests.py b/tests/integration_tests/sql_lab/api_tests.py new file mode 100644 index 0000000000000..93beb380f0db6 --- /dev/null +++ b/tests/integration_tests/sql_lab/api_tests.py @@ -0,0 +1,215 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import datetime +import json +import random +import csv +import pandas as pd +import io + +import pytest +import prison +from sqlalchemy.sql import func +from unittest import mock + +from tests.integration_tests.test_app import app +from superset import db, sql_lab +from superset.common.db_query_status import QueryStatus +from superset.models.core import Database +from superset.utils.database import get_example_database, get_main_database +from superset.utils import core as utils +from superset.models.sql_lab import Query + +from tests.integration_tests.base_tests import SupersetTestCase + +QUERIES_FIXTURE_COUNT = 10 + + +class TestSqlLabApi(SupersetTestCase): + @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_execute_required_params(self): + self.login() + client_id = "{}".format(random.getrandbits(64))[:10] + + data = {"client_id": client_id} + rv = self.client.post( + "/api/v1/sqllab/execute/", + json=data, + ) + failed_resp = { + "message": { + "sql": ["Missing data for required field."], + "database_id": ["Missing data for required field."], + } + } + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertDictEqual(resp_data, failed_resp) + self.assertEqual(rv.status_code, 400) + + data = {"sql": "SELECT 1", "client_id": client_id} + rv = self.client.post( + "/api/v1/sqllab/execute/", + json=data, + ) + failed_resp = {"message": {"database_id": ["Missing data for required field."]}} + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertDictEqual(resp_data, failed_resp) + self.assertEqual(rv.status_code, 400) + + data = {"database_id": 1, "client_id": client_id} + rv = self.client.post( + "/api/v1/sqllab/execute/", + json=data, + ) + failed_resp = {"message": {"sql": ["Missing data for required field."]}} + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertDictEqual(resp_data, failed_resp) + self.assertEqual(rv.status_code, 400) + + @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_execute_valid_request(self) -> None: + from superset import sql_lab as core + + core.results_backend = mock.Mock() + core.results_backend.get.return_value = {} + + self.login() + client_id = "{}".format(random.getrandbits(64))[:10] + + data = {"sql": "SELECT 1", "database_id": 1, "client_id": client_id} + rv = self.client.post( + "/api/v1/sqllab/execute/", + json=data, + ) + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(resp_data.get("status"), "success") + self.assertEqual(rv.status_code, 200) + + @mock.patch( + "tests.integration_tests.superset_test_custom_template_processors.datetime" + ) + @mock.patch("superset.sqllab.api.get_sql_results") + def test_execute_custom_templated(self, sql_lab_mock, mock_dt) -> None: + mock_dt.utcnow = mock.Mock(return_value=datetime.datetime(1970, 1, 1)) + self.login() + sql = "SELECT '$DATE()' as test" + resp = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 1}, + "data": [{"test": "'1970-01-01'"}], + } + sql_lab_mock.return_value = resp + + dbobj = self.create_fake_db_for_macros() + json_payload = dict(database_id=dbobj.id, sql=sql) + self.get_json_resp( + "/api/v1/sqllab/execute/", raise_on_error=False, json_=json_payload + ) + assert sql_lab_mock.called + self.assertEqual(sql_lab_mock.call_args[0][1], "SELECT '1970-01-01' as test") + + self.delete_fake_db_for_macros() + + @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_get_results_with_display_limit(self): + from superset.sqllab.commands import results as command + + command.results_backend = mock.Mock() + self.login() + + data = [{"col_0": i} for i in range(100)] + payload = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 100}, + "data": data, + } + # limit results to 1 + expected_key = {"status": "success", "query": {"rows": 100}, "data": data} + limited_data = data[:1] + expected_limited = { + "status": "success", + "query": {"rows": 100}, + "data": limited_data, + "displayLimitReached": True, + } + + query_mock = mock.Mock() + query_mock.sql = "SELECT *" + query_mock.database = 1 + query_mock.schema = "superset" + + # do not apply msgpack serialization + use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"] + app.config["RESULTS_BACKEND_USE_MSGPACK"] = False + serialized_payload = sql_lab._serialize_payload(payload, False) + compressed = utils.zlib_compress(serialized_payload) + command.results_backend.get.return_value = compressed + + with mock.patch("superset.sqllab.commands.results.db") as mock_superset_db: + mock_superset_db.session.query().filter_by().one_or_none.return_value = ( + query_mock + ) + # get all results + arguments = {"key": "key"} + result_key = json.loads( + self.get_resp(f"/api/v1/sqllab/results/?q={prison.dumps(arguments)}") + ) + arguments = {"key": "key", "rows": 1} + result_limited = json.loads( + self.get_resp(f"/api/v1/sqllab/results/?q={prison.dumps(arguments)}") + ) + + self.assertEqual(result_key, expected_key) + self.assertEqual(result_limited, expected_limited) + + app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack + + @mock.patch("superset.models.sql_lab.Query.raise_for_access", lambda _: None) + @mock.patch("superset.models.core.Database.get_df") + def test_export_results(self, get_df_mock: mock.Mock) -> None: + self.login() + + database = get_example_database() + query_obj = Query( + client_id="test", + database=database, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql=None, + executed_sql="select * from bar limit 2", + limit=100, + select_as_cta=False, + rows=104, + error_message="none", + results_key="test_abc", + ) + + db.session.add(query_obj) + db.session.commit() + + get_df_mock.return_value = pd.DataFrame({"foo": [1, 2, 3]}) + + resp = self.get_resp("/api/v1/sqllab/export/test/") + data = csv.reader(io.StringIO(resp)) + expected_data = csv.reader(io.StringIO("foo\n1\n2")) + + self.assertEqual(list(expected_data), list(data)) + db.session.delete(query_obj) + db.session.commit() diff --git a/tests/integration_tests/sql_lab/commands_tests.py b/tests/integration_tests/sql_lab/commands_tests.py new file mode 100644 index 0000000000000..cf0aebf001e01 --- /dev/null +++ b/tests/integration_tests/sql_lab/commands_tests.py @@ -0,0 +1,293 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from unittest import mock, skip +from unittest.mock import Mock, patch + +import pandas as pd +import pytest + +from superset import db, sql_lab +from superset.common.db_query_status import QueryStatus +from superset.errors import ErrorLevel, SupersetErrorType +from superset.exceptions import ( + SerializationError, + SupersetError, + SupersetErrorException, + SupersetSecurityException, +) +from superset.models.core import Database +from superset.models.sql_lab import Query +from superset.sqllab.commands import export, results +from superset.sqllab.limiting_factor import LimitingFactor +from superset.utils import core as utils +from superset.utils.database import get_example_database +from tests.integration_tests.base_tests import SupersetTestCase + + +class TestSqlResultExportCommand(SupersetTestCase): + @pytest.fixture() + def create_database_and_query(self): + with self.create_app().app_context(): + database = get_example_database() + query_obj = Query( + client_id="test", + database=database, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql="select * from bar", + executed_sql="select * from bar", + limit=100, + select_as_cta=False, + rows=104, + error_message="none", + results_key="abc_query", + ) + + db.session.add(query_obj) + db.session.commit() + + yield + + db.session.delete(query_obj) + db.session.commit() + + @pytest.mark.usefixtures("create_database_and_query") + def test_validation_query_not_found(self) -> None: + command = export.SqlResultExportCommand("asdf") + + with pytest.raises(SupersetErrorException) as ex_info: + command.run() + assert ex_info.value.error.error_type == SupersetErrorType.RESULTS_BACKEND_ERROR + + @pytest.mark.usefixtures("create_database_and_query") + def test_validation_invalid_access(self) -> None: + command = export.SqlResultExportCommand("test") + + with mock.patch( + "superset.security_manager.raise_for_access", + side_effect=SupersetSecurityException( + SupersetError( + "dummy", + SupersetErrorType.DATASOURCE_SECURITY_ACCESS_ERROR, + ErrorLevel.ERROR, + ) + ), + ): + with pytest.raises(SupersetErrorException) as ex_info: + command.run() + assert ( + ex_info.value.error.error_type + == SupersetErrorType.QUERY_SECURITY_ACCESS_ERROR + ) + + @pytest.mark.usefixtures("create_database_and_query") + @patch("superset.models.sql_lab.Query.raise_for_access", lambda _: None) + @patch("superset.models.core.Database.get_df") + def test_run_no_results_backend_select_sql(self, get_df_mock: Mock) -> None: + command = export.SqlResultExportCommand("test") + + get_df_mock.return_value = pd.DataFrame({"foo": [1, 2, 3]}) + result = command.run() + + assert result["data"] == "foo\n1\n2\n3\n" + assert result["count"] == 3 + assert result["query"].client_id == "test" + + @pytest.mark.usefixtures("create_database_and_query") + @patch("superset.models.sql_lab.Query.raise_for_access", lambda _: None) + @patch("superset.models.core.Database.get_df") + def test_run_no_results_backend_executed_sql(self, get_df_mock: Mock) -> None: + query_obj = db.session.query(Query).filter_by(client_id="test").one() + query_obj.executed_sql = "select * from bar limit 2" + query_obj.select_sql = None + db.session.commit() + + command = export.SqlResultExportCommand("test") + + get_df_mock.return_value = pd.DataFrame({"foo": [1, 2, 3]}) + result = command.run() + + assert result["data"] == "foo\n1\n2\n" + assert result["count"] == 2 + assert result["query"].client_id == "test" + + @pytest.mark.usefixtures("create_database_and_query") + @patch("superset.models.sql_lab.Query.raise_for_access", lambda _: None) + @patch("superset.models.core.Database.get_df") + def test_run_no_results_backend_executed_sql_limiting_factor( + self, get_df_mock: Mock + ) -> None: + query_obj = db.session.query(Query).filter_by(results_key="abc_query").one() + query_obj.executed_sql = "select * from bar limit 2" + query_obj.select_sql = None + query_obj.limiting_factor = LimitingFactor.DROPDOWN + db.session.commit() + + command = export.SqlResultExportCommand("test") + + get_df_mock.return_value = pd.DataFrame({"foo": [1, 2, 3]}) + + result = command.run() + + assert result["data"] == "foo\n1\n" + assert result["count"] == 1 + assert result["query"].client_id == "test" + + @pytest.mark.usefixtures("create_database_and_query") + @patch("superset.models.sql_lab.Query.raise_for_access", lambda _: None) + @patch("superset.sqllab.commands.export.results_backend_use_msgpack", False) + def test_run_with_results_backend(self) -> None: + command = export.SqlResultExportCommand("test") + + data = [{"foo": i} for i in range(5)] + payload = { + "columns": [{"name": "foo"}], + "data": data, + } + serialized_payload = sql_lab._serialize_payload(payload, False) + compressed = utils.zlib_compress(serialized_payload) + + export.results_backend = mock.Mock() + export.results_backend.get.return_value = compressed + + result = command.run() + + assert result["data"] == "foo\n0\n1\n2\n3\n4\n" + assert result["count"] == 5 + assert result["query"].client_id == "test" + + +class TestSqlExecutionResultsCommand(SupersetTestCase): + @pytest.fixture() + def create_database_and_query(self): + with self.create_app().app_context(): + database = get_example_database() + query_obj = Query( + client_id="test", + database=database, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql="select * from bar", + executed_sql="select * from bar", + limit=100, + select_as_cta=False, + rows=104, + error_message="none", + results_key="abc_query", + ) + + db.session.add(query_obj) + db.session.commit() + + yield + + db.session.delete(query_obj) + db.session.commit() + + @patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + @patch("superset.sqllab.commands.results.results_backend", None) + def test_validation_no_results_backend(self) -> None: + command = results.SqlExecutionResultsCommand("test", 1000) + + with pytest.raises(SupersetErrorException) as ex_info: + command.run() + assert ( + ex_info.value.error.error_type + == SupersetErrorType.RESULTS_BACKEND_NOT_CONFIGURED_ERROR + ) + + @patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_validation_data_cannot_be_retrieved(self) -> None: + results.results_backend = mock.Mock() + results.results_backend.get.return_value = None + + command = results.SqlExecutionResultsCommand("test", 1000) + + with pytest.raises(SupersetErrorException) as ex_info: + command.run() + assert ex_info.value.error.error_type == SupersetErrorType.RESULTS_BACKEND_ERROR + + @patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_validation_data_not_found(self) -> None: + data = [{"col_0": i} for i in range(100)] + payload = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 100}, + "data": data, + } + serialized_payload = sql_lab._serialize_payload(payload, False) + compressed = utils.zlib_compress(serialized_payload) + + results.results_backend = mock.Mock() + results.results_backend.get.return_value = compressed + + command = results.SqlExecutionResultsCommand("test", 1000) + + with pytest.raises(SupersetErrorException) as ex_info: + command.run() + assert ex_info.value.error.error_type == SupersetErrorType.RESULTS_BACKEND_ERROR + + @pytest.mark.usefixtures("create_database_and_query") + @patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_validation_query_not_found(self) -> None: + data = [{"col_0": i} for i in range(104)] + payload = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 104}, + "data": data, + } + serialized_payload = sql_lab._serialize_payload(payload, False) + compressed = utils.zlib_compress(serialized_payload) + + results.results_backend = mock.Mock() + results.results_backend.get.return_value = compressed + + with mock.patch( + "superset.views.utils._deserialize_results_payload", + side_effect=SerializationError(), + ): + with pytest.raises(SupersetErrorException) as ex_info: + command = results.SqlExecutionResultsCommand("test_other", 1000) + command.run() + assert ( + ex_info.value.error.error_type + == SupersetErrorType.RESULTS_BACKEND_ERROR + ) + + @pytest.mark.usefixtures("create_database_and_query") + @patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_run_succeeds(self) -> None: + data = [{"col_0": i} for i in range(104)] + payload = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 104}, + "data": data, + } + serialized_payload = sql_lab._serialize_payload(payload, False) + compressed = utils.zlib_compress(serialized_payload) + + results.results_backend = mock.Mock() + results.results_backend.get.return_value = compressed + + command = results.SqlExecutionResultsCommand("abc_query", 1000) + result = command.run() + + assert result.get("status") == "success" + assert result["query"].get("rows") == 104 + assert result.get("data") == data diff --git a/tests/integration_tests/sql_lab/conftest.py b/tests/integration_tests/sql_lab/conftest.py new file mode 100644 index 0000000000000..8b4a0e63a5aac --- /dev/null +++ b/tests/integration_tests/sql_lab/conftest.py @@ -0,0 +1,71 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Callable, ContextManager + +import pytest +from flask_appbuilder.security.sqla import models as ab_models + +from superset import db +from superset.models.sql_lab import Query +from superset.utils.core import shortid +from superset.utils.database import get_example_database + + +def force_async_run(allow_run_async: bool): + example_db = get_example_database() + orig_allow_run_async = example_db.allow_run_async + + example_db.allow_run_async = allow_run_async + db.session.commit() + + yield example_db + + example_db.allow_run_async = orig_allow_run_async + db.session.commit() + + +@pytest.fixture +def non_async_example_db(app_context): + gen = force_async_run(False) + yield next(gen) + try: + next(gen) + except StopIteration: + pass + + +@pytest.fixture +def async_example_db(app_context): + gen = force_async_run(True) + yield next(gen) + try: + next(gen) + except StopIteration: + pass + + +@pytest.fixture +def example_query(get_or_create_user: Callable[..., ContextManager[ab_models.User]]): + with get_or_create_user("sqllab-test-user") as user: + query = Query( + client_id=shortid()[:10], database=get_example_database(), user=user + ) + db.session.add(query) + db.session.commit() + yield query + db.session.delete(query) + db.session.commit() diff --git a/tests/integration_tests/sql_lab/test_execute_sql_statements.py b/tests/integration_tests/sql_lab/test_execute_sql_statements.py new file mode 100644 index 0000000000000..48fcfe31f03cb --- /dev/null +++ b/tests/integration_tests/sql_lab/test_execute_sql_statements.py @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from superset import app, db +from superset.common.db_query_status import QueryStatus +from superset.models.core import Database +from superset.models.sql_lab import Query +from superset.sql_lab import execute_sql_statements +from superset.utils.dates import now_as_float + + +def test_non_async_execute(non_async_example_db: Database, example_query: Query): + """Test query.tracking_url is attached for Presto and Hive queries""" + result = execute_sql_statements( + example_query.id, + "select 1 as foo;", + store_results=False, + return_results=True, + session=db.session, + start_time=now_as_float(), + expand_data=True, + log_params=dict(), + ) + assert result + assert result["query_id"] == example_query.id + assert result["status"] == QueryStatus.SUCCESS + assert result["data"] == [{"foo": 1}] + + # should attach apply tracking URL for Presto & Hive + if non_async_example_db.db_engine_spec.engine == "presto": + assert example_query.tracking_url + assert "/ui/query.html?" in example_query.tracking_url + + app.config["TRACKING_URL_TRANSFORMER"] = lambda url, query: url.replace( + "/ui/query.html?", f"/{query.client_id}/" + ) + assert f"/{example_query.client_id}/" in example_query.tracking_url + + app.config["TRACKING_URL_TRANSFORMER"] = lambda url: url + "&foo=bar" + assert example_query.tracking_url.endswith("&foo=bar") + + if non_async_example_db.db_engine_spec.engine_name == "hive": + assert example_query.tracking_url_raw diff --git a/tests/integration_tests/sql_validator_tests.py b/tests/integration_tests/sql_validator_tests.py new file mode 100644 index 0000000000000..d2f6e7108d42a --- /dev/null +++ b/tests/integration_tests/sql_validator_tests.py @@ -0,0 +1,285 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Sql Lab""" +import unittest +from unittest.mock import MagicMock, patch + +import pytest +from pyhive.exc import DatabaseError + +from superset import app +from superset.sql_validators import SQLValidationAnnotation +from superset.sql_validators.base import BaseSQLValidator +from superset.sql_validators.postgres import PostgreSQLValidator +from superset.sql_validators.presto_db import ( + PrestoDBSQLValidator, + PrestoSQLValidationError, +) +from superset.utils.database import get_example_database + +from .base_tests import SupersetTestCase + +PRESTO_SQL_VALIDATORS_BY_ENGINE = { + "presto": "PrestoDBSQLValidator", + "sqlite": "PrestoDBSQLValidator", + "postgresql": "PrestoDBSQLValidator", + "mysql": "PrestoDBSQLValidator", +} + + +class TestSqlValidatorEndpoint(SupersetTestCase): + """Testing for Sql Lab querytext validation endpoint""" + + def tearDown(self): + self.logout() + + @patch.dict( + "superset.config.SQL_VALIDATORS_BY_ENGINE", + {}, + clear=True, + ) + def test_validate_sql_endpoint_noconfig(self): + """Assert that validate_sql_json errors out when no validators are + configured for any db""" + self.login("admin") + + resp = self.validate_sql( + "SELECT * FROM birth_names", client_id="1", raise_on_error=False + ) + self.assertIn("error", resp) + self.assertIn("no SQL validator is configured", resp["error"]) + + @patch("superset.views.core.get_validator_by_name") + @patch.dict( + "superset.config.SQL_VALIDATORS_BY_ENGINE", + PRESTO_SQL_VALIDATORS_BY_ENGINE, + clear=True, + ) + def test_validate_sql_endpoint_mocked(self, get_validator_by_name): + """Assert that, with a mocked validator, annotations make it back out + from the validate_sql_json endpoint as a list of json dictionaries""" + if get_example_database().backend == "hive": + pytest.skip("Hive validator is not implemented") + self.login("admin") + + validator = MagicMock() + get_validator_by_name.return_value = validator + validator.validate.return_value = [ + SQLValidationAnnotation( + message="I don't know what I expected, but it wasn't this", + line_number=4, + start_column=12, + end_column=42, + ) + ] + + resp = self.validate_sql( + "SELECT * FROM somewhere_over_the_rainbow", + client_id="1", + raise_on_error=False, + ) + + self.assertEqual(1, len(resp)) + self.assertIn("expected,", resp[0]["message"]) + + @patch("superset.views.core.get_validator_by_name") + @patch.dict( + "superset.config.SQL_VALIDATORS_BY_ENGINE", + PRESTO_SQL_VALIDATORS_BY_ENGINE, + clear=True, + ) + def test_validate_sql_endpoint_mocked_params(self, get_validator_by_name): + """Assert that, with a mocked validator, annotations make it back out + from the validate_sql_json endpoint as a list of json dictionaries""" + if get_example_database().backend == "hive": + pytest.skip("Hive validator is not implemented") + self.login("admin") + + validator = MagicMock() + get_validator_by_name.return_value = validator + validator.validate.return_value = [ + SQLValidationAnnotation( + message="This worked", + line_number=4, + start_column=12, + end_column=42, + ) + ] + + resp = self.validate_sql( + "SELECT * FROM somewhere_over_the_rainbow", + client_id="1", + raise_on_error=False, + template_params="null", + ) + + self.assertEqual(1, len(resp)) + self.assertNotIn("error,", resp[0]["message"]) + + @patch("superset.views.core.get_validator_by_name") + @patch.dict( + "superset.config.SQL_VALIDATORS_BY_ENGINE", + PRESTO_SQL_VALIDATORS_BY_ENGINE, + clear=True, + ) + def test_validate_sql_endpoint_failure(self, get_validator_by_name): + """Assert that validate_sql_json errors out when the selected validator + raises an unexpected exception""" + self.login("admin") + + validator = MagicMock() + get_validator_by_name.return_value = validator + validator.validate.side_effect = Exception("Kaboom!") + + resp = self.validate_sql( + "SELECT * FROM birth_names", client_id="1", raise_on_error=False + ) + # TODO(bkyryliuk): properly handle hive error + if get_example_database().backend == "hive": + assert resp["error"] == "no SQL validator is configured for hive" + else: + self.assertIn("error", resp) + self.assertIn("Kaboom!", resp["error"]) + + +class TestBaseValidator(SupersetTestCase): + """Testing for the base sql validator""" + + def setUp(self): + self.validator = BaseSQLValidator + + def test_validator_excepts(self): + with self.assertRaises(NotImplementedError): + self.validator.validate(None, None, None) + + +class TestPrestoValidator(SupersetTestCase): + """Testing for the prestodb sql validator""" + + def setUp(self): + self.validator = PrestoDBSQLValidator + self.database = MagicMock() + self.database_engine = ( + self.database.get_sqla_engine_with_context.return_value.__enter__.return_value + ) + self.database_conn = self.database_engine.raw_connection.return_value + self.database_cursor = self.database_conn.cursor.return_value + self.database_cursor.poll.return_value = None + + def tearDown(self): + self.logout() + + PRESTO_ERROR_TEMPLATE = { + "errorLocation": {"lineNumber": 10, "columnNumber": 20}, + "message": "your query isn't how I like it", + } + + @patch("superset.utils.core.g") + def test_validator_success(self, flask_g): + flask_g.user.username = "nobody" + sql = "SELECT 1 FROM default.notarealtable" + schema = "default" + + errors = self.validator.validate(sql, schema, self.database) + + self.assertEqual([], errors) + + @patch("superset.utils.core.g") + def test_validator_db_error(self, flask_g): + flask_g.user.username = "nobody" + sql = "SELECT 1 FROM default.notarealtable" + schema = "default" + + fetch_fn = self.database.db_engine_spec.fetch_data + fetch_fn.side_effect = DatabaseError("dummy db error") + + with self.assertRaises(PrestoSQLValidationError): + self.validator.validate(sql, schema, self.database) + + @patch("superset.utils.core.g") + def test_validator_unexpected_error(self, flask_g): + flask_g.user.username = "nobody" + sql = "SELECT 1 FROM default.notarealtable" + schema = "default" + + fetch_fn = self.database.db_engine_spec.fetch_data + fetch_fn.side_effect = Exception("a mysterious failure") + + with self.assertRaises(Exception): + self.validator.validate(sql, schema, self.database) + + @patch("superset.utils.core.g") + def test_validator_query_error(self, flask_g): + flask_g.user.username = "nobody" + sql = "SELECT 1 FROM default.notarealtable" + schema = "default" + + fetch_fn = self.database.db_engine_spec.fetch_data + fetch_fn.side_effect = DatabaseError(self.PRESTO_ERROR_TEMPLATE) + + errors = self.validator.validate(sql, schema, self.database) + + self.assertEqual(1, len(errors)) + + @patch.dict( + "superset.config.SQL_VALIDATORS_BY_ENGINE", + {}, + clear=True, + ) + def test_validate_sql_endpoint(self): + self.login("admin") + # NB this is effectively an integration test -- when there's a default + # validator for sqlite, this test will fail because the validator + # will no longer error out. + resp = self.validate_sql( + "SELECT * FROM birth_names", client_id="1", raise_on_error=False + ) + self.assertIn("error", resp) + self.assertIn("no SQL validator is configured", resp["error"]) + + +class TestPostgreSQLValidator(SupersetTestCase): + def test_valid_syntax(self): + if get_example_database().backend != "postgresql": + return + + mock_database = MagicMock() + annotations = PostgreSQLValidator.validate( + sql='SELECT 1, "col" FROM "table"', schema="", database=mock_database + ) + assert annotations == [] + + def test_invalid_syntax(self): + if get_example_database().backend != "postgresql": + return + + mock_database = MagicMock() + annotations = PostgreSQLValidator.validate( + sql='SELECT 1, "col"\nFROOM "table"', schema="", database=mock_database + ) + + assert len(annotations) == 1 + annotation = annotations[0] + assert annotation.line_number == 2 + assert annotation.start_column is None + assert annotation.end_column is None + assert annotation.message == 'ERROR: syntax error at or near """' + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/integration_tests/sqla_models_tests.py b/tests/integration_tests/sqla_models_tests.py new file mode 100644 index 0000000000000..4003913516fee --- /dev/null +++ b/tests/integration_tests/sqla_models_tests.py @@ -0,0 +1,888 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +import re +from datetime import datetime +from typing import Any, Dict, List, NamedTuple, Optional, Pattern, Tuple, Union +from unittest.mock import patch +import pytest + +import numpy as np +import pandas as pd +from flask import Flask +from pytest_mock import MockFixture +from sqlalchemy.sql import text +from sqlalchemy.sql.elements import TextClause + +from superset import db +from superset.connectors.sqla.models import SqlaTable, TableColumn, SqlMetric +from superset.constants import EMPTY_STRING, NULL_STRING +from superset.db_engine_specs.bigquery import BigQueryEngineSpec +from superset.db_engine_specs.druid import DruidEngineSpec +from superset.exceptions import QueryObjectValidationError, SupersetSecurityException +from superset.models.core import Database +from superset.utils.core import ( + AdhocMetricExpressionType, + FilterOperator, + GenericDataType, +) +from superset.utils.database import get_example_database +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.test_app import app + +from .base_tests import SupersetTestCase +from .conftest import only_postgresql + +VIRTUAL_TABLE_INT_TYPES: Dict[str, Pattern[str]] = { + "hive": re.compile(r"^INT_TYPE$"), + "mysql": re.compile("^LONGLONG$"), + "postgresql": re.compile(r"^INTEGER$"), + "presto": re.compile(r"^INTEGER$"), + "sqlite": re.compile(r"^INT$"), +} + +VIRTUAL_TABLE_STRING_TYPES: Dict[str, Pattern[str]] = { + "hive": re.compile(r"^STRING_TYPE$"), + "mysql": re.compile(r"^VAR_STRING$"), + "postgresql": re.compile(r"^STRING$"), + "presto": re.compile(r"^VARCHAR*"), + "sqlite": re.compile(r"^STRING$"), +} + + +class FilterTestCase(NamedTuple): + column: str + operator: str + value: Union[float, int, List[Any], str] + expected: Union[str, List[str]] + + +class TestDatabaseModel(SupersetTestCase): + def test_is_time_druid_time_col(self): + """Druid has a special __time column""" + + database = Database(database_name="druid_db", sqlalchemy_uri="druid://db") + tbl = SqlaTable(table_name="druid_tbl", database=database) + col = TableColumn(column_name="__time", type="INTEGER", table=tbl) + self.assertEqual(col.is_dttm, None) + DruidEngineSpec.alter_new_orm_column(col) + self.assertEqual(col.is_dttm, True) + + col = TableColumn(column_name="__not_time", type="INTEGER", table=tbl) + self.assertEqual(col.is_temporal, False) + + def test_temporal_varchar(self): + """Ensure a column with is_dttm set to true evaluates to is_temporal == True""" + + database = get_example_database() + tbl = SqlaTable(table_name="test_tbl", database=database) + col = TableColumn(column_name="ds", type="VARCHAR", table=tbl) + # by default, VARCHAR should not be assumed to be temporal + assert col.is_temporal is False + # changing to `is_dttm = True`, calling `is_temporal` should return True + col.is_dttm = True + assert col.is_temporal is True + + def test_db_column_types(self): + test_cases: Dict[str, GenericDataType] = { + # string + "CHAR": GenericDataType.STRING, + "VARCHAR": GenericDataType.STRING, + "NVARCHAR": GenericDataType.STRING, + "STRING": GenericDataType.STRING, + "TEXT": GenericDataType.STRING, + "NTEXT": GenericDataType.STRING, + # numeric + "INTEGER": GenericDataType.NUMERIC, + "BIGINT": GenericDataType.NUMERIC, + "DECIMAL": GenericDataType.NUMERIC, + # temporal + "DATE": GenericDataType.TEMPORAL, + "DATETIME": GenericDataType.TEMPORAL, + "TIME": GenericDataType.TEMPORAL, + "TIMESTAMP": GenericDataType.TEMPORAL, + } + + tbl = SqlaTable(table_name="col_type_test_tbl", database=get_example_database()) + for str_type, db_col_type in test_cases.items(): + col = TableColumn(column_name="foo", type=str_type, table=tbl) + self.assertEqual(col.is_temporal, db_col_type == GenericDataType.TEMPORAL) + self.assertEqual(col.is_numeric, db_col_type == GenericDataType.NUMERIC) + self.assertEqual(col.is_string, db_col_type == GenericDataType.STRING) + + for str_type, db_col_type in test_cases.items(): + col = TableColumn(column_name="foo", type=str_type, table=tbl, is_dttm=True) + self.assertTrue(col.is_temporal) + + @patch("superset.jinja_context.g") + def test_extra_cache_keys(self, flask_g): + flask_g.user.username = "abc" + base_query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": ["user"], + "metrics": [], + "is_timeseries": False, + "filter": [], + } + + # Table with Jinja callable. + table1 = SqlaTable( + table_name="test_has_extra_cache_keys_table", + sql="SELECT '{{ current_username() }}' as user", + database=get_example_database(), + ) + + query_obj = dict(**base_query_obj, extras={}) + extra_cache_keys = table1.get_extra_cache_keys(query_obj) + self.assertTrue(table1.has_extra_cache_key_calls(query_obj)) + assert extra_cache_keys == ["abc"] + + # Table with Jinja callable disabled. + table2 = SqlaTable( + table_name="test_has_extra_cache_keys_disabled_table", + sql="SELECT '{{ current_username(False) }}' as user", + database=get_example_database(), + ) + query_obj = dict(**base_query_obj, extras={}) + extra_cache_keys = table2.get_extra_cache_keys(query_obj) + self.assertTrue(table2.has_extra_cache_key_calls(query_obj)) + self.assertListEqual(extra_cache_keys, []) + + # Table with no Jinja callable. + query = "SELECT 'abc' as user" + table3 = SqlaTable( + table_name="test_has_no_extra_cache_keys_table", + sql=query, + database=get_example_database(), + ) + + query_obj = dict(**base_query_obj, extras={"where": "(user != 'abc')"}) + extra_cache_keys = table3.get_extra_cache_keys(query_obj) + self.assertFalse(table3.has_extra_cache_key_calls(query_obj)) + self.assertListEqual(extra_cache_keys, []) + + # With Jinja callable in SQL expression. + query_obj = dict( + **base_query_obj, extras={"where": "(user != '{{ current_username() }}')"} + ) + extra_cache_keys = table3.get_extra_cache_keys(query_obj) + self.assertTrue(table3.has_extra_cache_key_calls(query_obj)) + assert extra_cache_keys == ["abc"] + + # Cleanup + for table in [table1, table2, table3]: + db.session.delete(table) + db.session.commit() + + @patch("superset.jinja_context.g") + def test_jinja_metrics_and_calc_columns(self, flask_g): + flask_g.user.username = "abc" + base_query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "columns": [ + "user", + "expr", + { + "hasCustomLabel": True, + "label": "adhoc_column", + "sqlExpression": "'{{ 'foo_' + time_grain }}'", + }, + ], + "metrics": [ + { + "hasCustomLabel": True, + "label": "adhoc_metric", + "expressionType": AdhocMetricExpressionType.SQL, + "sqlExpression": "SUM(case when user = '{{ 'user_' + " + "current_username() }}' then 1 else 0 end)", + }, + "count_timegrain", + ], + "is_timeseries": False, + "filter": [], + "extras": {"time_grain_sqla": "P1D"}, + } + + table = SqlaTable( + table_name="test_has_jinja_metric_and_expr", + sql="SELECT '{{ 'user_' + current_username() }}' as user, " + "'{{ 'xyz_' + time_grain }}' as time_grain", + database=get_example_database(), + ) + TableColumn( + column_name="expr", + expression="case when '{{ current_username() }}' = 'abc' " + "then 'yes' else 'no' end", + type="VARCHAR(100)", + table=table, + ) + SqlMetric( + metric_name="count_timegrain", + expression="count('{{ 'bar_' + time_grain }}')", + table=table, + ) + db.session.commit() + + sqla_query = table.get_sqla_query(**base_query_obj) + query = table.database.compile_sqla_query(sqla_query.sqla_query) + # assert virtual dataset + assert "SELECT 'user_abc' as user, 'xyz_P1D' as time_grain" in query + # assert dataset calculated column + assert "case when 'abc' = 'abc' then 'yes' else 'no' end AS expr" in query + # assert adhoc column + assert "'foo_P1D'" in query + # assert dataset saved metric + assert "count('bar_P1D')" in query + # assert adhoc metric + assert "SUM(case when user = 'user_abc' then 1 else 0 end)" in query + # Cleanup + db.session.delete(table) + db.session.commit() + + def test_adhoc_metrics_and_calc_columns(self): + base_query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": ["user", "expr"], + "metrics": [ + { + "expressionType": AdhocMetricExpressionType.SQL, + "sqlExpression": "(SELECT (SELECT * from birth_names) " + "from test_validate_adhoc_sql)", + "label": "adhoc_metrics", + } + ], + "is_timeseries": False, + "filter": [], + } + + table = SqlaTable( + table_name="test_validate_adhoc_sql", database=get_example_database() + ) + db.session.commit() + + with pytest.raises(QueryObjectValidationError): + table.get_sqla_query(**base_query_obj) + # Cleanup + db.session.delete(table) + db.session.commit() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_where_operators(self): + filters: Tuple[FilterTestCase, ...] = ( + FilterTestCase("num", FilterOperator.IS_NULL, "", "IS NULL"), + FilterTestCase("num", FilterOperator.IS_NOT_NULL, "", "IS NOT NULL"), + # Some db backends translate true/false to 1/0 + FilterTestCase("num", FilterOperator.IS_TRUE, "", ["IS 1", "IS true"]), + FilterTestCase("num", FilterOperator.IS_FALSE, "", ["IS 0", "IS false"]), + FilterTestCase("num", FilterOperator.GREATER_THAN, 0, "> 0"), + FilterTestCase("num", FilterOperator.GREATER_THAN_OR_EQUALS, 0, ">= 0"), + FilterTestCase("num", FilterOperator.LESS_THAN, 0, "< 0"), + FilterTestCase("num", FilterOperator.LESS_THAN_OR_EQUALS, 0, "<= 0"), + FilterTestCase("num", FilterOperator.EQUALS, 0, "= 0"), + FilterTestCase("num", FilterOperator.NOT_EQUALS, 0, "!= 0"), + FilterTestCase("num", FilterOperator.IN, ["1", "2"], "IN (1, 2)"), + FilterTestCase("num", FilterOperator.NOT_IN, ["1", "2"], "NOT IN (1, 2)"), + FilterTestCase( + "ds", FilterOperator.TEMPORAL_RANGE, "2020 : 2021", "2020-01-01" + ), + ) + table = self.get_table(name="birth_names") + for filter_ in filters: + query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": ["gender"], + "metrics": ["count"], + "is_timeseries": False, + "filter": [ + { + "col": filter_.column, + "op": filter_.operator, + "val": filter_.value, + } + ], + "extras": {}, + } + sqla_query = table.get_sqla_query(**query_obj) + sql = table.database.compile_sqla_query(sqla_query.sqla_query) + if isinstance(filter_.expected, list): + self.assertTrue( + any([candidate in sql for candidate in filter_.expected]) + ) + else: + self.assertIn(filter_.expected, sql) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_boolean_type_where_operators(self): + table = self.get_table(name="birth_names") + db.session.add( + TableColumn( + column_name="boolean_gender", + expression="case when gender = 'boy' then True else False end", + type="BOOLEAN", + table=table, + ) + ) + query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": ["boolean_gender"], + "metrics": ["count"], + "is_timeseries": False, + "filter": [ + { + "col": "boolean_gender", + "op": FilterOperator.IN, + "val": ["true", "false"], + } + ], + "extras": {}, + } + sqla_query = table.get_sqla_query(**query_obj) + sql = table.database.compile_sqla_query(sqla_query.sqla_query) + dialect = table.database.get_dialect() + operand = "(true, false)" + # override native_boolean=False behavior in MySQLCompiler + # https://github.com/sqlalchemy/sqlalchemy/blob/master/lib/sqlalchemy/dialects/mysql/base.py + if not dialect.supports_native_boolean and dialect.name != "mysql": + operand = "(1, 0)" + self.assertIn(f"IN {operand}", sql) + + def test_incorrect_jinja_syntax_raises_correct_exception(self): + query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": ["user"], + "metrics": [], + "is_timeseries": False, + "filter": [], + "extras": {}, + } + + # Table with Jinja callable. + table = SqlaTable( + table_name="test_table", + sql="SELECT '{{ abcd xyz + 1 ASDF }}' as user", + database=get_example_database(), + ) + # TODO(villebro): make it work with presto + if get_example_database().backend != "presto": + with pytest.raises(QueryObjectValidationError): + table.get_sqla_query(**query_obj) + + def test_query_format_strip_trailing_semicolon(self): + query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": ["user"], + "metrics": [], + "is_timeseries": False, + "filter": [], + "extras": {}, + } + + table = SqlaTable( + table_name="another_test_table", + sql="SELECT * from test_table;", + database=get_example_database(), + ) + sqlaq = table.get_sqla_query(**query_obj) + sql = table.database.compile_sqla_query(sqlaq.sqla_query) + assert sql[-1] != ";" + + def test_multiple_sql_statements_raises_exception(self): + base_query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": ["grp"], + "metrics": [], + "is_timeseries": False, + "filter": [], + } + + table = SqlaTable( + table_name="test_has_extra_cache_keys_table", + sql="SELECT 'foo' as grp, 1 as num; SELECT 'bar' as grp, 2 as num", + database=get_example_database(), + ) + + query_obj = dict(**base_query_obj, extras={}) + with pytest.raises(QueryObjectValidationError): + table.get_sqla_query(**query_obj) + + def test_dml_statement_raises_exception(self): + base_query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": ["grp"], + "metrics": [], + "is_timeseries": False, + "filter": [], + } + + table = SqlaTable( + table_name="test_has_extra_cache_keys_table", + sql="DELETE FROM foo", + database=get_example_database(), + ) + + query_obj = dict(**base_query_obj, extras={}) + with pytest.raises(QueryObjectValidationError): + table.get_sqla_query(**query_obj) + + def test_fetch_metadata_for_updated_virtual_table(self): + table = SqlaTable( + table_name="updated_sql_table", + database=get_example_database(), + sql="select 123 as intcol, 'abc' as strcol, 'abc' as mycase", + ) + TableColumn(column_name="intcol", type="FLOAT", table=table) + TableColumn(column_name="oldcol", type="INT", table=table) + TableColumn( + column_name="expr", + expression="case when 1 then 1 else 0 end", + type="INT", + table=table, + ) + TableColumn( + column_name="mycase", + expression="case when 1 then 1 else 0 end", + type="INT", + table=table, + ) + + # make sure the columns have been mapped properly + assert len(table.columns) == 4 + with db.session.no_autoflush: + table.fetch_metadata(commit=False) + + # assert that the removed column has been dropped and + # the physical and calculated columns are present + assert {col.column_name for col in table.columns} == { + "intcol", + "strcol", + "mycase", + "expr", + } + cols: Dict[str, TableColumn] = {col.column_name: col for col in table.columns} + # assert that the type for intcol has been updated (asserting CI types) + backend = table.database.backend + assert VIRTUAL_TABLE_INT_TYPES[backend].match(cols["intcol"].type) + # assert that the expression has been replaced with the new physical column + assert cols["mycase"].expression == "" + assert VIRTUAL_TABLE_STRING_TYPES[backend].match(cols["mycase"].type) + assert cols["expr"].expression == "case when 1 then 1 else 0 end" + + @patch("superset.models.core.Database.db_engine_spec", BigQueryEngineSpec) + def test_labels_expected_on_mutated_query(self): + query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": ["user"], + "metrics": [ + { + "expressionType": "SIMPLE", + "column": {"column_name": "user"}, + "aggregate": "COUNT_DISTINCT", + "label": "COUNT_DISTINCT(user)", + } + ], + "is_timeseries": False, + "filter": [], + "extras": {}, + } + + database = Database(database_name="testdb", sqlalchemy_uri="sqlite://") + table = SqlaTable(table_name="bq_table", database=database) + db.session.add(database) + db.session.add(table) + db.session.commit() + sqlaq = table.get_sqla_query(**query_obj) + assert sqlaq.labels_expected == ["user", "COUNT_DISTINCT(user)"] + sql = table.database.compile_sqla_query(sqlaq.sqla_query) + assert "COUNT_DISTINCT_user__00db1" in sql + db.session.delete(table) + db.session.delete(database) + db.session.commit() + + +@pytest.fixture +def text_column_table(): + with app.app_context(): + table = SqlaTable( + table_name="text_column_table", + sql=( + "SELECT 'foo' as foo " + "UNION SELECT '' " + "UNION SELECT NULL " + "UNION SELECT 'null' " + "UNION SELECT '\"text in double quotes\"' " + "UNION SELECT '''text in single quotes''' " + "UNION SELECT 'double quotes \" in text' " + "UNION SELECT 'single quotes '' in text' " + ), + database=get_example_database(), + ) + TableColumn(column_name="foo", type="VARCHAR(255)", table=table) + SqlMetric(metric_name="count", expression="count(*)", table=table) + yield table + + +def test_values_for_column_on_text_column(text_column_table): + # null value, empty string and text should be retrieved + with_null = text_column_table.values_for_column(column_name="foo", limit=10000) + assert None in with_null + assert len(with_null) == 8 + + +def test_filter_on_text_column(text_column_table): + table = text_column_table + # null value should be replaced + result_object = table.query( + { + "metrics": ["count"], + "filter": [{"col": "foo", "val": [NULL_STRING], "op": "IN"}], + "is_timeseries": False, + } + ) + assert result_object.df["count"][0] == 1 + + # also accept None value + result_object = table.query( + { + "metrics": ["count"], + "filter": [{"col": "foo", "val": [None], "op": "IN"}], + "is_timeseries": False, + } + ) + assert result_object.df["count"][0] == 1 + + # empty string should be replaced + result_object = table.query( + { + "metrics": ["count"], + "filter": [{"col": "foo", "val": [EMPTY_STRING], "op": "IN"}], + "is_timeseries": False, + } + ) + assert result_object.df["count"][0] == 1 + + # also accept "" string + result_object = table.query( + { + "metrics": ["count"], + "filter": [{"col": "foo", "val": [""], "op": "IN"}], + "is_timeseries": False, + } + ) + assert result_object.df["count"][0] == 1 + + # both replaced + result_object = table.query( + { + "metrics": ["count"], + "filter": [ + { + "col": "foo", + "val": [EMPTY_STRING, NULL_STRING, "null", "foo"], + "op": "IN", + } + ], + "is_timeseries": False, + } + ) + assert result_object.df["count"][0] == 4 + + # should filter text in double quotes + result_object = table.query( + { + "metrics": ["count"], + "filter": [ + { + "col": "foo", + "val": ['"text in double quotes"'], + "op": "IN", + } + ], + "is_timeseries": False, + } + ) + assert result_object.df["count"][0] == 1 + + # should filter text in single quotes + result_object = table.query( + { + "metrics": ["count"], + "filter": [ + { + "col": "foo", + "val": ["'text in single quotes'"], + "op": "IN", + } + ], + "is_timeseries": False, + } + ) + assert result_object.df["count"][0] == 1 + + # should filter text with double quote + result_object = table.query( + { + "metrics": ["count"], + "filter": [ + { + "col": "foo", + "val": ['double quotes " in text'], + "op": "IN", + } + ], + "is_timeseries": False, + } + ) + assert result_object.df["count"][0] == 1 + + # should filter text with single quote + result_object = table.query( + { + "metrics": ["count"], + "filter": [ + { + "col": "foo", + "val": ["single quotes ' in text"], + "op": "IN", + } + ], + "is_timeseries": False, + } + ) + assert result_object.df["count"][0] == 1 + + +@only_postgresql +def test_should_generate_closed_and_open_time_filter_range(login_as_admin): + table = SqlaTable( + table_name="temporal_column_table", + sql=( + "SELECT '2021-12-31'::timestamp as datetime_col " + "UNION SELECT '2022-01-01'::timestamp " + "UNION SELECT '2022-03-10'::timestamp " + "UNION SELECT '2023-01-01'::timestamp " + "UNION SELECT '2023-03-10'::timestamp " + ), + database=get_example_database(), + ) + TableColumn( + column_name="datetime_col", + type="TIMESTAMP", + table=table, + is_dttm=True, + ) + SqlMetric(metric_name="count", expression="count(*)", table=table) + result_object = table.query( + { + "metrics": ["count"], + "is_timeseries": False, + "filter": [], + "from_dttm": datetime(2022, 1, 1), + "to_dttm": datetime(2023, 1, 1), + "granularity": "datetime_col", + } + ) + """ >>> result_object.query + SELECT count(*) AS count + FROM + (SELECT '2021-12-31'::timestamp as datetime_col + UNION SELECT '2022-01-01'::timestamp + UNION SELECT '2022-03-10'::timestamp + UNION SELECT '2023-01-01'::timestamp + UNION SELECT '2023-03-10'::timestamp) AS virtual_table + WHERE datetime_col >= TO_TIMESTAMP('2022-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US') + AND datetime_col < TO_TIMESTAMP('2023-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US') + """ + assert result_object.df.iloc[0]["count"] == 2 + + +def test_none_operand_in_filter(login_as_admin, physical_dataset): + expected_results = [ + { + "operator": FilterOperator.EQUALS.value, + "count": 10, + "sql_should_contain": "COL4 IS NULL", + }, + { + "operator": FilterOperator.NOT_EQUALS.value, + "count": 0, + "sql_should_contain": "COL4 IS NOT NULL", + }, + ] + for expected in expected_results: + result = physical_dataset.query( + { + "metrics": ["count"], + "filter": [{"col": "col4", "val": None, "op": expected["operator"]}], + "is_timeseries": False, + } + ) + assert result.df["count"][0] == expected["count"] + assert expected["sql_should_contain"] in result.query.upper() + + with pytest.raises(QueryObjectValidationError): + for flt in [ + FilterOperator.GREATER_THAN, + FilterOperator.LESS_THAN, + FilterOperator.GREATER_THAN_OR_EQUALS, + FilterOperator.LESS_THAN_OR_EQUALS, + FilterOperator.LIKE, + FilterOperator.ILIKE, + ]: + physical_dataset.query( + { + "metrics": ["count"], + "filter": [{"col": "col4", "val": None, "op": flt.value}], + "is_timeseries": False, + } + ) + + +@pytest.mark.parametrize( + "row,dimension,result", + [ + (pd.Series({"foo": "abc"}), "foo", "abc"), + (pd.Series({"bar": True}), "bar", True), + (pd.Series({"baz": 123}), "baz", 123), + (pd.Series({"baz": np.int16(123)}), "baz", 123), + (pd.Series({"baz": np.uint32(123)}), "baz", 123), + (pd.Series({"baz": np.int64(123)}), "baz", 123), + (pd.Series({"qux": 123.456}), "qux", 123.456), + (pd.Series({"qux": np.float32(123.456)}), "qux", 123.45600128173828), + (pd.Series({"qux": np.float64(123.456)}), "qux", 123.456), + (pd.Series({"quux": "2021-01-01"}), "quux", "2021-01-01"), + ( + pd.Series({"quuz": "2021-01-01T00:00:00"}), + "quuz", + text("TIME_PARSE('2021-01-01T00:00:00')"), + ), + ], +) +def test__normalize_prequery_result_type( + app_context: Flask, + mocker: MockFixture, + row: pd.Series, + dimension: str, + result: Any, +) -> None: + def _convert_dttm( + target_type: str, dttm: datetime, db_extra: Optional[Dict[str, Any]] = None + ) -> Optional[str]: + if target_type.upper() == "TIMESTAMP": + return f"""TIME_PARSE('{dttm.isoformat(timespec="seconds")}')""" + + return None + + table = SqlaTable(table_name="foobar", database=get_example_database()) + mocker.patch.object(table.db_engine_spec, "convert_dttm", new=_convert_dttm) + + columns_by_name = { + "foo": TableColumn( + column_name="foo", + is_dttm=False, + table=table, + type="STRING", + ), + "bar": TableColumn( + column_name="bar", + is_dttm=False, + table=table, + type="BOOLEAN", + ), + "baz": TableColumn( + column_name="baz", + is_dttm=False, + table=table, + type="INTEGER", + ), + "qux": TableColumn( + column_name="qux", + is_dttm=False, + table=table, + type="FLOAT", + ), + "quux": TableColumn( + column_name="quuz", + is_dttm=True, + table=table, + type="STRING", + ), + "quuz": TableColumn( + column_name="quux", + is_dttm=True, + table=table, + type="TIMESTAMP", + ), + } + + normalized = table._normalize_prequery_result_type( + row, + dimension, + columns_by_name, + ) + + assert type(normalized) == type(result) + + if isinstance(normalized, TextClause): + assert str(normalized) == str(result) + else: + assert normalized == result + + +def test__temporal_range_operator_in_adhoc_filter(app_context, physical_dataset): + result = physical_dataset.query( + { + "columns": ["col1", "col2"], + "filter": [ + { + "col": "col5", + "val": "2000-01-05 : 2000-01-06", + "op": FilterOperator.TEMPORAL_RANGE.value, + }, + { + "col": "col6", + "val": "2002-05-11 : 2002-05-12", + "op": FilterOperator.TEMPORAL_RANGE.value, + }, + ], + "is_timeseries": False, + } + ) + df = pd.DataFrame(index=[0], data={"col1": 4, "col2": "e"}) + assert df.equals(result.df) diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py new file mode 100644 index 0000000000000..19e397e8f6961 --- /dev/null +++ b/tests/integration_tests/sqllab_tests.py @@ -0,0 +1,1027 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Sql Lab""" +import json +from datetime import datetime, timedelta +from math import ceil, floor + +import pytest +from celery.exceptions import SoftTimeLimitExceeded +from parameterized import parameterized +from random import random +from unittest import mock +import prison + +from freezegun import freeze_time +from superset import db, security_manager +from superset.connectors.sqla.models import SqlaTable +from superset.db_engine_specs import BaseEngineSpec +from superset.db_engine_specs.hive import HiveEngineSpec +from superset.db_engine_specs.presto import PrestoEngineSpec +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.exceptions import SupersetErrorException +from superset.models.sql_lab import Query, SavedQuery +from superset.result_set import SupersetResultSet +from superset.sqllab.limiting_factor import LimitingFactor +from superset.sql_lab import ( + cancel_query, + execute_sql_statements, + apply_limit_if_exists, +) +from superset.sql_parse import CtasMethod +from superset.utils.core import ( + backend, + datetime_to_epoch, +) +from superset.utils.database import get_example_database, get_main_database + +from .base_tests import SupersetTestCase +from .conftest import CTAS_SCHEMA_NAME +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + +QUERY_1 = "SELECT * FROM birth_names LIMIT 1" +QUERY_2 = "SELECT * FROM NO_TABLE" +QUERY_3 = "SELECT * FROM birth_names LIMIT 10" + + +@pytest.mark.sql_json_flow +class TestSqlLab(SupersetTestCase): + """Testings for Sql Lab""" + + @pytest.mark.usefixtures("load_birth_names_data") + def run_some_queries(self): + db.session.query(Query).delete() + db.session.commit() + self.run_sql(QUERY_1, client_id="client_id_1", username="admin") + self.run_sql(QUERY_2, client_id="client_id_2", username="admin") + self.run_sql(QUERY_3, client_id="client_id_3", username="gamma_sqllab") + self.logout() + + def tearDown(self): + self.logout() + db.session.query(Query).delete() + db.session.commit() + db.session.close() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_sql_json(self): + examples_db = get_example_database() + engine_name = examples_db.db_engine_spec.engine_name + + self.login("admin") + + data = self.run_sql("SELECT * FROM birth_names LIMIT 10", "1") + self.assertLess(0, len(data["data"])) + + data = self.run_sql("SELECT * FROM nonexistent_table", "2") + if backend() == "presto": + assert ( + data["errors"][0]["error_type"] + == SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR + ) + assert data["errors"][0]["level"] == ErrorLevel.ERROR + assert data["errors"][0]["extra"] == { + "engine_name": "Presto", + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", + }, + { + "code": 1005, + "message": "Issue 1005 - The table was deleted or renamed in the database.", + }, + ], + } + else: + assert ( + data["errors"][0]["error_type"] + == SupersetErrorType.GENERIC_DB_ENGINE_ERROR + ) + assert data["errors"][0]["level"] == ErrorLevel.ERROR + assert data["errors"][0]["extra"] == { + "issue_codes": [ + { + "code": 1002, + "message": "Issue 1002 - The database returned an unexpected error.", + } + ], + "engine_name": engine_name, + } + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_sql_json_dml_disallowed(self): + self.login("admin") + + data = self.run_sql("DELETE FROM birth_names", "1") + assert data == { + "errors": [ + { + "message": "Only SELECT statements are allowed against this database.", + "error_type": SupersetErrorType.DML_NOT_ALLOWED_ERROR, + "level": ErrorLevel.ERROR, + "extra": { + "issue_codes": [ + { + "code": 1022, + "message": "Issue 1022 - Database does not allow data manipulation.", + } + ] + }, + } + ] + } + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_sql_json_to_saved_query_info(self): + """ + SQLLab: Test SQLLab query execution info propagation to saved queries + """ + self.login("admin") + + sql_statement = "SELECT * FROM birth_names LIMIT 10" + examples_db_id = get_example_database().id + saved_query = SavedQuery(db_id=examples_db_id, sql=sql_statement) + db.session.add(saved_query) + db.session.commit() + + with freeze_time(datetime.now().isoformat(timespec="seconds")): + self.run_sql(sql_statement, "1", username="admin") + saved_query_ = ( + db.session.query(SavedQuery) + .filter( + SavedQuery.db_id == examples_db_id, SavedQuery.sql == sql_statement + ) + .one_or_none() + ) + assert saved_query_.rows is not None + assert saved_query_.last_run == datetime.now() + # Rollback changes + db.session.delete(saved_query_) + db.session.commit() + + @parameterized.expand([CtasMethod.TABLE, CtasMethod.VIEW]) + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_sql_json_cta_dynamic_db(self, ctas_method): + examples_db = get_example_database() + if examples_db.backend == "sqlite": + # sqlite doesn't support database creation + return + + with mock.patch( + "superset.sqllab.sqllab_execution_context.get_cta_schema_name", + lambda d, u, s, sql: f"{u.username}_database", + ): + old_allow_ctas = examples_db.allow_ctas + examples_db.allow_ctas = True # enable cta + + self.login("admin") + tmp_table_name = f"test_target_{ctas_method.lower()}" + self.run_sql( + "SELECT * FROM birth_names", + "1", + database_name="examples", + tmp_table_name=tmp_table_name, + select_as_cta=True, + ctas_method=ctas_method, + ) + + # assertions + db.session.commit() + examples_db = get_example_database() + with examples_db.get_sqla_engine_with_context() as engine: + data = engine.execute( + f"SELECT * FROM admin_database.{tmp_table_name}" + ).fetchall() + names_count = engine.execute( + f"SELECT COUNT(*) FROM birth_names" + ).first() + self.assertEqual( + names_count[0], len(data) + ) # SQL_MAX_ROW not applied due to the SQLLAB_CTAS_NO_LIMIT set to True + + # cleanup + engine.execute(f"DROP {ctas_method} admin_database.{tmp_table_name}") + examples_db.allow_ctas = old_allow_ctas + db.session.commit() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_multi_sql(self): + self.login("admin") + + multi_sql = """ + SELECT * FROM birth_names LIMIT 1; + SELECT * FROM birth_names LIMIT 2; + """ + data = self.run_sql(multi_sql, "2234") + self.assertLess(0, len(data["data"])) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_explain(self): + self.login("admin") + + data = self.run_sql("EXPLAIN SELECT * FROM birth_names", "1") + self.assertLess(0, len(data["data"])) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_sql_json_has_access(self): + examples_db = get_example_database() + examples_db_permission_view = security_manager.add_permission_view_menu( + "database_access", examples_db.perm + ) + astronaut = security_manager.add_role("ExampleDBAccess") + security_manager.add_permission_role(astronaut, examples_db_permission_view) + # Gamma user, with sqllab and db permission + self.create_user_with_roles("Gagarin", ["ExampleDBAccess", "Gamma", "sql_lab"]) + + data = self.run_sql(QUERY_1, "1", username="Gagarin") + db.session.query(Query).delete() + db.session.commit() + self.assertLess(0, len(data["data"])) + + def test_sqllab_has_access(self): + for username in ("admin", "gamma_sqllab"): + self.login(username) + for endpoint in ("/superset/sqllab/", "/superset/sqllab/history/"): + resp = self.client.get(endpoint) + self.assertEqual(200, resp.status_code) + + self.logout() + + def test_sqllab_no_access(self): + self.login("gamma") + for endpoint in ("/superset/sqllab/", "/superset/sqllab/history/"): + resp = self.client.get(endpoint) + # Redirects to the main page + self.assertEqual(302, resp.status_code) + + def test_sql_json_schema_access(self): + examples_db = get_example_database() + db_backend = examples_db.backend + if db_backend == "sqlite": + # sqlite doesn't support database creation + return + + sqllab_test_db_schema_permission_view = ( + security_manager.add_permission_view_menu( + "schema_access", f"[{examples_db.name}].[{CTAS_SCHEMA_NAME}]" + ) + ) + schema_perm_role = security_manager.add_role("SchemaPermission") + security_manager.add_permission_role( + schema_perm_role, sqllab_test_db_schema_permission_view + ) + self.create_user_with_roles( + "SchemaUser", ["SchemaPermission", "Gamma", "sql_lab"] + ) + + with examples_db.get_sqla_engine_with_context() as engine: + engine.execute( + f"CREATE TABLE IF NOT EXISTS {CTAS_SCHEMA_NAME}.test_table AS SELECT 1 as c1, 2 as c2" + ) + + data = self.run_sql( + f"SELECT * FROM {CTAS_SCHEMA_NAME}.test_table", "3", username="SchemaUser" + ) + self.assertEqual(1, len(data["data"])) + + data = self.run_sql( + f"SELECT * FROM {CTAS_SCHEMA_NAME}.test_table", + "4", + username="SchemaUser", + schema=CTAS_SCHEMA_NAME, + ) + self.assertEqual(1, len(data["data"])) + + # postgres needs a schema as a part of the table name. + if db_backend == "mysql": + data = self.run_sql( + "SELECT * FROM test_table", + "5", + username="SchemaUser", + schema=CTAS_SCHEMA_NAME, + ) + self.assertEqual(1, len(data["data"])) + + db.session.query(Query).delete() + with get_example_database().get_sqla_engine_with_context() as engine: + engine.execute(f"DROP TABLE IF EXISTS {CTAS_SCHEMA_NAME}.test_table") + db.session.commit() + + def test_queries_endpoint(self): + self.run_some_queries() + + # Not logged in, should error out + resp = self.client.get("/superset/queries/0") + # Redirects to the login page + self.assertEqual(401, resp.status_code) + + # Admin sees queries + self.login("admin") + data = self.get_json_resp("/superset/queries/0") + self.assertEqual(2, len(data)) + data = self.get_json_resp("/superset/queries/0.0") + self.assertEqual(2, len(data)) + + # Run 2 more queries + self.run_sql("SELECT * FROM birth_names LIMIT 1", client_id="client_id_4") + self.run_sql("SELECT * FROM birth_names LIMIT 2", client_id="client_id_5") + self.login("admin") + data = self.get_json_resp("/superset/queries/0") + self.assertEqual(4, len(data)) + + now = datetime.now() + timedelta(days=1) + query = ( + db.session.query(Query) + .filter_by(sql="SELECT * FROM birth_names LIMIT 1") + .first() + ) + query.changed_on = now + db.session.commit() + + data = self.get_json_resp( + "/superset/queries/{}".format(float(datetime_to_epoch(now)) - 1000) + ) + self.assertEqual(1, len(data)) + + self.logout() + resp = self.client.get("/superset/queries/0") + # Redirects to the login page + self.assertEqual(401, resp.status_code) + + def test_search_query_on_db_id(self): + self.run_some_queries() + self.login("admin") + examples_dbid = get_example_database().id + + # Test search queries on database Id + data = self.get_json_resp( + f"/superset/search_queries?database_id={examples_dbid}" + ) + self.assertEqual(3, len(data)) + db_ids = [k["dbId"] for k in data] + self.assertEqual([examples_dbid for i in range(3)], db_ids) + + resp = self.get_resp("/superset/search_queries?database_id=-1") + data = json.loads(resp) + self.assertEqual(0, len(data)) + + def test_search_query_on_user(self): + self.run_some_queries() + self.login("admin") + + # Test search queries on user Id + user_id = security_manager.find_user("admin").id + data = self.get_json_resp("/superset/search_queries?user_id={}".format(user_id)) + self.assertEqual(2, len(data)) + user_ids = {k["userId"] for k in data} + self.assertEqual(set([user_id]), user_ids) + + user_id = security_manager.find_user("gamma_sqllab").id + resp = self.get_resp("/superset/search_queries?user_id={}".format(user_id)) + data = json.loads(resp) + self.assertEqual(1, len(data)) + self.assertEqual(data[0]["userId"], user_id) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_search_query_on_status(self): + self.run_some_queries() + self.login("admin") + # Test search queries on status + resp = self.get_resp("/superset/search_queries?status=success") + data = json.loads(resp) + self.assertEqual(2, len(data)) + states = [k["state"] for k in data] + self.assertEqual(["success", "success"], states) + + resp = self.get_resp("/superset/search_queries?status=failed") + data = json.loads(resp) + self.assertEqual(1, len(data)) + self.assertEqual(data[0]["state"], "failed") + + def test_search_query_on_text(self): + self.run_some_queries() + self.login("admin") + url = "/superset/search_queries?search_text=birth" + data = self.get_json_resp(url) + self.assertEqual(2, len(data)) + self.assertIn("birth", data[0]["sql"]) + + def test_search_query_filter_by_time(self): + self.run_some_queries() + self.login("admin") + from_time = floor( + (db.session.query(Query).filter_by(sql=QUERY_1).one()).start_time + ) + to_time = ceil( + (db.session.query(Query).filter_by(sql=QUERY_2).one()).start_time + ) + url = f"/superset/search_queries?from={from_time}&to={to_time}" + assert len(self.client.get(url).json) == 2 + + def test_search_query_only_owned(self) -> None: + """ + Test a search query with a user that does not have can_access_all_queries. + """ + # Test search_queries for Alpha user + self.run_some_queries() + self.login("gamma_sqllab") + + user_id = security_manager.find_user("gamma_sqllab").id + data = self.get_json_resp("/superset/search_queries") + + self.assertEqual(1, len(data)) + user_ids = {k["userId"] for k in data} + self.assertEqual(set([user_id]), user_ids) + + def test_alias_duplicate(self): + self.run_sql( + "SELECT name as col, gender as col FROM birth_names LIMIT 10", + client_id="2e2df3", + username="admin", + raise_on_error=True, + ) + + def test_ps_conversion_no_dict(self): + cols = [["string_col", "string"], ["int_col", "int"], ["float_col", "float"]] + data = [["a", 4, 4.0]] + results = SupersetResultSet(data, cols, BaseEngineSpec) + + self.assertEqual(len(data), results.size) + self.assertEqual(len(cols), len(results.columns)) + + def test_pa_conversion_tuple(self): + cols = ["string_col", "int_col", "list_col", "float_col"] + data = [("Text", 111, [123], 1.0)] + results = SupersetResultSet(data, cols, BaseEngineSpec) + + self.assertEqual(len(data), results.size) + self.assertEqual(len(cols), len(results.columns)) + + def test_pa_conversion_dict(self): + cols = ["string_col", "dict_col", "int_col"] + data = [["a", {"c1": 1, "c2": 2, "c3": 3}, 4]] + results = SupersetResultSet(data, cols, BaseEngineSpec) + + self.assertEqual(len(data), results.size) + self.assertEqual(len(cols), len(results.columns)) + + def test_sqllab_viz(self): + self.login("admin") + examples_dbid = get_example_database().id + payload = { + "chartType": "dist_bar", + "datasourceName": f"test_viz_flow_table_{random()}", + "schema": "superset", + "columns": [ + {"is_dttm": False, "type": "STRING", "name": f"viz_type_{random()}"}, + {"is_dttm": False, "type": "OBJECT", "name": f"ccount_{random()}"}, + ], + "sql": """\ + SELECT * + FROM birth_names + LIMIT 10""", + "dbId": examples_dbid, + } + data = {"data": json.dumps(payload)} + resp = self.get_json_resp("/superset/sqllab_viz/", data=data) + self.assertIn("table_id", resp) + + # ensure owner is set correctly + table_id = resp["table_id"] + table = db.session.query(SqlaTable).filter_by(id=table_id).one() + self.assertEqual([owner.username for owner in table.owners], ["admin"]) + view_menu = security_manager.find_view_menu(table.get_perm()) + assert view_menu is not None + + # Cleanup + db.session.delete(table) + db.session.commit() + + def test_sqllab_viz_bad_payload(self): + self.login("admin") + payload = { + "chartType": "dist_bar", + "schema": "superset", + "columns": [ + {"is_dttm": False, "type": "STRING", "name": f"viz_type_{random()}"}, + {"is_dttm": False, "type": "OBJECT", "name": f"ccount_{random()}"}, + ], + "sql": """\ + SELECT * + FROM birth_names + LIMIT 10""", + } + data = {"data": json.dumps(payload)} + url = "/superset/sqllab_viz/" + response = self.client.post(url, data=data, follow_redirects=True) + assert response.status_code == 400 + + def test_sqllab_table_viz(self): + self.login("admin") + examples_db = get_example_database() + with examples_db.get_sqla_engine_with_context() as engine: + engine.execute("DROP TABLE IF EXISTS test_sqllab_table_viz") + engine.execute("CREATE TABLE test_sqllab_table_viz AS SELECT 2 as col") + + examples_dbid = examples_db.id + + payload = { + "datasourceName": "test_sqllab_table_viz", + "columns": [], + "dbId": examples_dbid, + } + + data = {"data": json.dumps(payload)} + resp = self.get_json_resp("/superset/get_or_create_table/", data=data) + self.assertIn("table_id", resp) + + # ensure owner is set correctly + table_id = resp["table_id"] + table = db.session.query(SqlaTable).filter_by(id=table_id).one() + self.assertEqual([owner.username for owner in table.owners], ["admin"]) + db.session.delete(table) + + with get_example_database().get_sqla_engine_with_context() as engine: + engine.execute("DROP TABLE test_sqllab_table_viz") + db.session.commit() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_sql_limit(self): + self.login("admin") + test_limit = 1 + data = self.run_sql("SELECT * FROM birth_names", client_id="sql_limit_1") + self.assertGreater(len(data["data"]), test_limit) + data = self.run_sql( + "SELECT * FROM birth_names", client_id="sql_limit_2", query_limit=test_limit + ) + self.assertEqual(len(data["data"]), test_limit) + + data = self.run_sql( + "SELECT * FROM birth_names LIMIT {}".format(test_limit), + client_id="sql_limit_3", + query_limit=test_limit + 1, + ) + self.assertEqual(len(data["data"]), test_limit) + self.assertEqual(data["query"]["limitingFactor"], LimitingFactor.QUERY) + + data = self.run_sql( + "SELECT * FROM birth_names LIMIT {}".format(test_limit + 1), + client_id="sql_limit_4", + query_limit=test_limit, + ) + self.assertEqual(len(data["data"]), test_limit) + self.assertEqual(data["query"]["limitingFactor"], LimitingFactor.DROPDOWN) + + data = self.run_sql( + "SELECT * FROM birth_names LIMIT {}".format(test_limit), + client_id="sql_limit_5", + query_limit=test_limit, + ) + self.assertEqual(len(data["data"]), test_limit) + self.assertEqual( + data["query"]["limitingFactor"], LimitingFactor.QUERY_AND_DROPDOWN + ) + + data = self.run_sql( + "SELECT * FROM birth_names", + client_id="sql_limit_6", + query_limit=10000, + ) + self.assertEqual(len(data["data"]), 1200) + self.assertEqual(data["query"]["limitingFactor"], LimitingFactor.NOT_LIMITED) + + data = self.run_sql( + "SELECT * FROM birth_names", + client_id="sql_limit_7", + query_limit=1200, + ) + self.assertEqual(len(data["data"]), 1200) + self.assertEqual(data["query"]["limitingFactor"], LimitingFactor.NOT_LIMITED) + + def test_query_api_filter(self) -> None: + """ + Test query api without can_only_access_owned_queries perm added to + Admin and make sure all queries show up. + """ + self.run_some_queries() + self.login(username="admin") + + url = "/api/v1/query/" + data = self.get_json_resp(url) + admin = security_manager.find_user("admin") + gamma_sqllab = security_manager.find_user("gamma_sqllab") + self.assertEqual(3, len(data["result"])) + user_queries = [result.get("user").get("username") for result in data["result"]] + assert admin.username in user_queries + assert gamma_sqllab.username in user_queries + + def test_query_api_can_access_all_queries(self) -> None: + """ + Test query api with can_access_all_queries perm added to + gamma and make sure all queries show up. + """ + session = db.session + + # Add all_query_access perm to Gamma user + all_queries_view = security_manager.find_permission_view_menu( + "all_query_access", "all_query_access" + ) + + security_manager.add_permission_role( + security_manager.find_role("gamma_sqllab"), all_queries_view + ) + session.commit() + + # Test search_queries for Admin user + self.run_some_queries() + self.login("gamma_sqllab") + url = "/api/v1/query/" + data = self.get_json_resp(url) + self.assertEqual(3, len(data["result"])) + + # Remove all_query_access from gamma sqllab + all_queries_view = security_manager.find_permission_view_menu( + "all_query_access", "all_query_access" + ) + security_manager.del_permission_role( + security_manager.find_role("gamma_sqllab"), all_queries_view + ) + + session.commit() + + def test_query_admin_can_access_all_queries(self) -> None: + """ + Test query api with all_query_access perm added to + Admin and make sure only Admin queries show up. This is the default + """ + # Test search_queries for Admin user + self.run_some_queries() + self.login("admin") + + url = "/api/v1/query/" + data = self.get_json_resp(url) + self.assertEqual(3, len(data["result"])) + + def test_api_database(self): + self.login("admin") + self.create_fake_db() + get_example_database() + get_main_database() + + arguments = { + "keys": [], + "filters": [{"col": "expose_in_sqllab", "opr": "eq", "value": True}], + "order_column": "database_name", + "order_direction": "asc", + "page": 0, + "page_size": -1, + } + url = f"api/v1/database/?q={prison.dumps(arguments)}" + + self.assertEqual( + {"examples", "fake_db_100", "main"}, + {r.get("database_name") for r in self.get_json_resp(url)["result"]}, + ) + self.delete_fake_db() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"ENABLE_TEMPLATE_PROCESSING": True}, + clear=True, + ) + def test_sql_json_parameter_error(self): + self.login("admin") + + data = self.run_sql( + "SELECT * FROM birth_names WHERE state = '{{ state }}' LIMIT 10", + "1", + template_params=json.dumps({"state": "CA"}), + ) + assert data["status"] == "success" + + data = self.run_sql( + "SELECT * FROM birth_names WHERE state = '{{ stat }}' LIMIT 10", + "2", + template_params=json.dumps({"state": "CA"}), + ) + assert data["errors"][0]["error_type"] == "MISSING_TEMPLATE_PARAMS_ERROR" + assert data["errors"][0]["extra"] == { + "issue_codes": [ + { + "code": 1006, + "message": "Issue 1006 - One or more parameters specified in the query are missing.", + } + ], + "template_parameters": {"state": "CA"}, + "undefined_parameters": ["stat"], + } + + @mock.patch("superset.sql_lab.get_query") + @mock.patch("superset.sql_lab.execute_sql_statement") + def test_execute_sql_statements(self, mock_execute_sql_statement, mock_get_query): + sql = """ + -- comment + SET @value = 42; + SELECT @value AS foo; + -- comment + """ + mock_session = mock.MagicMock() + mock_query = mock.MagicMock() + mock_query.database.allow_run_async = False + mock_cursor = mock.MagicMock() + mock_query.database.get_raw_connection().__enter__().cursor.return_value = ( + mock_cursor + ) + mock_query.database.db_engine_spec.run_multiple_statements_as_one = False + mock_get_query.return_value = mock_query + + execute_sql_statements( + query_id=1, + rendered_query=sql, + return_results=True, + store_results=False, + session=mock_session, + start_time=None, + expand_data=False, + log_params=None, + ) + mock_execute_sql_statement.assert_has_calls( + [ + mock.call( + "SET @value = 42", + mock_query, + mock_session, + mock_cursor, + None, + False, + ), + mock.call( + "SELECT @value AS foo", + mock_query, + mock_session, + mock_cursor, + None, + False, + ), + ] + ) + + @mock.patch("superset.sql_lab.results_backend", None) + @mock.patch("superset.sql_lab.get_query") + @mock.patch("superset.sql_lab.execute_sql_statement") + def test_execute_sql_statements_no_results_backend( + self, mock_execute_sql_statement, mock_get_query + ): + sql = """ + -- comment + SET @value = 42; + SELECT @value AS foo; + -- comment + """ + mock_session = mock.MagicMock() + mock_query = mock.MagicMock() + mock_query.database.allow_run_async = True + mock_cursor = mock.MagicMock() + mock_query.database.get_raw_connection().__enter__().cursor.return_value = ( + mock_cursor + ) + mock_query.database.db_engine_spec.run_multiple_statements_as_one = False + mock_get_query.return_value = mock_query + + with pytest.raises(SupersetErrorException) as excinfo: + execute_sql_statements( + query_id=1, + rendered_query=sql, + return_results=True, + store_results=False, + session=mock_session, + start_time=None, + expand_data=False, + log_params=None, + ) + + assert excinfo.value.error == SupersetError( + message="Results backend is not configured.", + error_type=SupersetErrorType.RESULTS_BACKEND_NOT_CONFIGURED_ERROR, + level=ErrorLevel.ERROR, + extra={ + "issue_codes": [ + { + "code": 1021, + "message": ( + "Issue 1021 - Results backend needed for asynchronous " + "queries is not configured." + ), + } + ] + }, + ) + + @mock.patch("superset.sql_lab.get_query") + @mock.patch("superset.sql_lab.execute_sql_statement") + def test_execute_sql_statements_ctas( + self, mock_execute_sql_statement, mock_get_query + ): + sql = """ + -- comment + SET @value = 42; + SELECT @value AS foo; + -- comment + """ + mock_session = mock.MagicMock() + mock_query = mock.MagicMock() + mock_query.database.allow_run_async = False + mock_cursor = mock.MagicMock() + mock_query.database.get_raw_connection().__enter__().cursor.return_value = ( + mock_cursor + ) + mock_query.database.db_engine_spec.run_multiple_statements_as_one = False + mock_get_query.return_value = mock_query + + # set the query to CTAS + mock_query.select_as_cta = True + mock_query.ctas_method = CtasMethod.TABLE + + execute_sql_statements( + query_id=1, + rendered_query=sql, + return_results=True, + store_results=False, + session=mock_session, + start_time=None, + expand_data=False, + log_params=None, + ) + mock_execute_sql_statement.assert_has_calls( + [ + mock.call( + "SET @value = 42", + mock_query, + mock_session, + mock_cursor, + None, + False, + ), + mock.call( + "SELECT @value AS foo", + mock_query, + mock_session, + mock_cursor, + None, + True, # apply_ctas + ), + ] + ) + + # try invalid CTAS + sql = "DROP TABLE my_table" + with pytest.raises(SupersetErrorException) as excinfo: + execute_sql_statements( + query_id=1, + rendered_query=sql, + return_results=True, + store_results=False, + session=mock_session, + start_time=None, + expand_data=False, + log_params=None, + ) + assert excinfo.value.error == SupersetError( + message="CTAS (create table as select) can only be run with a query where the last statement is a SELECT. Please make sure your query has a SELECT as its last statement. Then, try running your query again.", + error_type=SupersetErrorType.INVALID_CTAS_QUERY_ERROR, + level=ErrorLevel.ERROR, + extra={ + "issue_codes": [ + { + "code": 1023, + "message": "Issue 1023 - The CTAS (create table as select) doesn't have a SELECT statement at the end. Please make sure your query has a SELECT as its last statement. Then, try running your query again.", + } + ] + }, + ) + + # try invalid CVAS + mock_query.ctas_method = CtasMethod.VIEW + sql = """ + -- comment + SET @value = 42; + SELECT @value AS foo; + -- comment + """ + with pytest.raises(SupersetErrorException) as excinfo: + execute_sql_statements( + query_id=1, + rendered_query=sql, + return_results=True, + store_results=False, + session=mock_session, + start_time=None, + expand_data=False, + log_params=None, + ) + assert excinfo.value.error == SupersetError( + message="CVAS (create view as select) can only be run with a query with a single SELECT statement. Please make sure your query has only a SELECT statement. Then, try running your query again.", + error_type=SupersetErrorType.INVALID_CVAS_QUERY_ERROR, + level=ErrorLevel.ERROR, + extra={ + "issue_codes": [ + { + "code": 1024, + "message": "Issue 1024 - CVAS (create view as select) query has more than one statement.", + }, + { + "code": 1025, + "message": "Issue 1025 - CVAS (create view as select) query is not a SELECT statement.", + }, + ] + }, + ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_sql_json_soft_timeout(self): + examples_db = get_example_database() + if examples_db.backend == "sqlite": + return + + self.login("admin") + + with mock.patch.object( + examples_db.db_engine_spec, "handle_cursor" + ) as handle_cursor: + handle_cursor.side_effect = SoftTimeLimitExceeded() + data = self.run_sql("SELECT * FROM birth_names LIMIT 1", "1") + + assert data == { + "errors": [ + { + "message": ( + "The query was killed after 21600 seconds. It might be too complex, " + "or the database might be under heavy load." + ), + "error_type": SupersetErrorType.SQLLAB_TIMEOUT_ERROR, + "level": ErrorLevel.ERROR, + "extra": { + "issue_codes": [ + { + "code": 1026, + "message": "Issue 1026 - Query is too complex and takes too long to run.", + }, + { + "code": 1027, + "message": "Issue 1027 - The database is currently running too many queries.", + }, + ] + }, + } + ] + } + + def test_apply_limit_if_exists_when_incremented_limit_is_none(self): + sql = """ + SET @value = 42; + SELECT @value AS foo; + """ + database = get_example_database() + mock_query = mock.MagicMock() + mock_query.limit = 300 + final_sql = apply_limit_if_exists(database, None, mock_query, sql) + + assert final_sql == sql + + def test_apply_limit_if_exists_when_increased_limit(self): + sql = """ + SET @value = 42; + SELECT @value AS foo; + """ + database = get_example_database() + mock_query = mock.MagicMock() + mock_query.limit = 300 + final_sql = apply_limit_if_exists(database, 1000, mock_query, sql) + assert "LIMIT 1000" in final_sql + + +@pytest.mark.parametrize("spec", [HiveEngineSpec, PrestoEngineSpec]) +def test_cancel_query_implicit(spec: BaseEngineSpec) -> None: + query = mock.MagicMock() + query.database.db_engine_spec = spec + assert cancel_query(query) diff --git a/tests/integration_tests/stats_logger_tests.py b/tests/integration_tests/stats_logger_tests.py new file mode 100644 index 0000000000000..adf7cc1db6f8d --- /dev/null +++ b/tests/integration_tests/stats_logger_tests.py @@ -0,0 +1,49 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Unit tests for Superset""" +from unittest import TestCase +from unittest.mock import Mock, patch + +from superset.stats_logger import StatsdStatsLogger + + +class TestStatsdStatsLogger(TestCase): + def verify_client_calls(self, logger, client): + logger.incr("foo1") + client.incr.assert_called_once() + client.incr.assert_called_with("foo1") + logger.decr("foo2") + client.decr.assert_called_once() + client.decr.assert_called_with("foo2") + logger.gauge("foo3", 2.21) + client.gauge.assert_called_once() + client.gauge.assert_called_with("foo3", 2.21) + logger.timing("foo4", 1.234) + client.timing.assert_called_once() + client.timing.assert_called_with("foo4", 1.234) + + def test_init_with_statsd_client(self): + client = Mock() + stats_logger = StatsdStatsLogger(statsd_client=client) + self.verify_client_calls(stats_logger, client) + + def test_init_with_params(self): + with patch("superset.stats_logger.StatsClient") as MockStatsdClient: + mock_client = MockStatsdClient.return_value + + stats_logger = StatsdStatsLogger() + self.verify_client_calls(stats_logger, mock_client) diff --git a/tests/integration_tests/strategy_tests.py b/tests/integration_tests/strategy_tests.py new file mode 100644 index 0000000000000..e54ae865e3c15 --- /dev/null +++ b/tests/integration_tests/strategy_tests.py @@ -0,0 +1,153 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset cache warmup""" +import datetime +import json +from unittest.mock import MagicMock +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + +from sqlalchemy import String, Date, Float + +import pytest +import pandas as pd + +from superset.models.slice import Slice +from superset.utils.database import get_example_database + +from superset import db + +from superset.models.core import Log +from superset.tags.models import get_tag, ObjectTypes, TaggedObject, TagTypes +from superset.tasks.cache import ( + DashboardTagsStrategy, + TopNDashboardsStrategy, +) +from superset.utils.urls import get_url_host + +from .base_tests import SupersetTestCase +from .dashboard_utils import create_dashboard, create_slice, create_table_metadata +from .fixtures.unicode_dashboard import ( + load_unicode_dashboard_with_slice, + load_unicode_data, +) + + +mock_positions = { + "DASHBOARD_VERSION_KEY": "v2", + "DASHBOARD_CHART_TYPE-1": { + "type": "CHART", + "id": "DASHBOARD_CHART_TYPE-1", + "children": [], + "meta": {"width": 4, "height": 50, "chartId": 1}, + }, + "DASHBOARD_CHART_TYPE-2": { + "type": "CHART", + "id": "DASHBOARD_CHART_TYPE-2", + "children": [], + "meta": {"width": 4, "height": 50, "chartId": 2}, + }, +} + + +class TestCacheWarmUp(SupersetTestCase): + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_top_n_dashboards_strategy(self): + # create a top visited dashboard + db.session.query(Log).delete() + self.login(username="admin") + dash = self.get_dash_by_slug("births") + for _ in range(10): + self.client.get(f"/superset/dashboard/{dash.id}/") + + strategy = TopNDashboardsStrategy(1) + result = sorted(strategy.get_urls()) + expected = sorted( + [ + f"{get_url_host()}superset/warm_up_cache/?slice_id={slc.id}&dashboard_id={dash.id}" + for slc in dash.slices + ] + ) + self.assertEqual(result, expected) + + def reset_tag(self, tag): + """Remove associated object from tag, used to reset tests""" + if tag.objects: + for o in tag.objects: + db.session.delete(o) + db.session.commit() + + @pytest.mark.usefixtures( + "load_unicode_dashboard_with_slice", "load_birth_names_dashboard_with_slices" + ) + def test_dashboard_tags(self): + tag1 = get_tag("tag1", db.session, TagTypes.custom) + # delete first to make test idempotent + self.reset_tag(tag1) + + strategy = DashboardTagsStrategy(["tag1"]) + result = sorted(strategy.get_urls()) + expected = [] + self.assertEqual(result, expected) + + # tag dashboard 'births' with `tag1` + tag1 = get_tag("tag1", db.session, TagTypes.custom) + dash = self.get_dash_by_slug("births") + tag1_urls = sorted( + [ + f"{get_url_host()}superset/warm_up_cache/?slice_id={slc.id}" + for slc in dash.slices + ] + ) + tagged_object = TaggedObject( + tag_id=tag1.id, object_id=dash.id, object_type=ObjectTypes.dashboard + ) + db.session.add(tagged_object) + db.session.commit() + + self.assertEqual(sorted(strategy.get_urls()), tag1_urls) + + strategy = DashboardTagsStrategy(["tag2"]) + tag2 = get_tag("tag2", db.session, TagTypes.custom) + self.reset_tag(tag2) + + result = sorted(strategy.get_urls()) + expected = [] + self.assertEqual(result, expected) + + # tag first slice + dash = self.get_dash_by_slug("unicode-test") + slc = dash.slices[0] + tag2_urls = [f"{get_url_host()}superset/warm_up_cache/?slice_id={slc.id}"] + object_id = slc.id + tagged_object = TaggedObject( + tag_id=tag2.id, object_id=object_id, object_type=ObjectTypes.chart + ) + db.session.add(tagged_object) + db.session.commit() + + result = sorted(strategy.get_urls()) + self.assertEqual(result, tag2_urls) + + strategy = DashboardTagsStrategy(["tag1", "tag2"]) + + result = sorted(strategy.get_urls()) + expected = sorted(tag1_urls + tag2_urls) + self.assertEqual(result, expected) diff --git a/tests/integration_tests/superset_test_config.py b/tests/integration_tests/superset_test_config.py new file mode 100644 index 0000000000000..19c2cc000f545 --- /dev/null +++ b/tests/integration_tests/superset_test_config.py @@ -0,0 +1,146 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# type: ignore +import logging +import math +from copy import copy +from datetime import timedelta + +from superset.config import * +from tests.integration_tests.superset_test_custom_template_processors import ( + CustomPrestoTemplateProcessor, +) + +logging.getLogger("flask_appbuilder.baseviews").setLevel(logging.WARNING) +logging.getLogger("flask_appbuilder.base").setLevel(logging.WARNING) +logging.getLogger("flask_appbuilder.api").setLevel(logging.WARNING) +logging.getLogger("flask_appbuilder.security.sqla.manager").setLevel(logging.WARNING) +logging.getLogger("sqlalchemy.engine.Engine").setLevel(logging.WARNING) + +AUTH_USER_REGISTRATION_ROLE = "alpha" +SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join( + DATA_DIR, "unittests.integration_tests.db" +) +DEBUG = False +SUPERSET_WEBSERVER_PORT = 8081 +SILENCE_FAB = False +# Allowing SQLALCHEMY_DATABASE_URI and SQLALCHEMY_EXAMPLES_URI to be defined as an env vars for +# continuous integration +if "SUPERSET__SQLALCHEMY_DATABASE_URI" in os.environ: + SQLALCHEMY_DATABASE_URI = os.environ["SUPERSET__SQLALCHEMY_DATABASE_URI"] + +SQLALCHEMY_EXAMPLES_URI = SQLALCHEMY_DATABASE_URI +if "SUPERSET__SQLALCHEMY_EXAMPLES_URI" in os.environ: + SQLALCHEMY_EXAMPLES_URI = os.environ["SUPERSET__SQLALCHEMY_EXAMPLES_URI"] + +if "UPLOAD_FOLDER" in os.environ: + UPLOAD_FOLDER = os.environ["UPLOAD_FOLDER"] + +if "sqlite" in SQLALCHEMY_DATABASE_URI: + logger.warning( + "SQLite Database support for metadata databases will be " + "removed in a future version of Superset." + ) + +# Speeding up the tests.integration_tests. +PRESTO_POLL_INTERVAL = 0.1 +HIVE_POLL_INTERVAL = 0.1 + +SQL_MAX_ROW = 10000 +SQLLAB_CTAS_NO_LIMIT = True # SQL_MAX_ROW will not take affect for the CTA queries +FEATURE_FLAGS = { + **FEATURE_FLAGS, + "foo": "bar", + "KV_STORE": True, + "SHARE_QUERIES_VIA_KV_STORE": True, + "ENABLE_TEMPLATE_PROCESSING": True, + "ALERT_REPORTS": True, + "DASHBOARD_NATIVE_FILTERS": True, + "DRILL_TO_DETAIL": True, + "HORIZONTAL_FILTER_BAR": True, +} + +WEBDRIVER_BASEURL = "http://0.0.0.0:8081/" + + +def GET_FEATURE_FLAGS_FUNC(ff): + ff_copy = copy(ff) + ff_copy["super"] = "set" + return ff_copy + + +TESTING = True +WTF_CSRF_ENABLED = False + +FAB_ROLES = {"TestRole": [["Security", "menu_access"], ["List Users", "menu_access"]]} + +AUTH_ROLE_PUBLIC = "Public" +EMAIL_NOTIFICATIONS = False +REDIS_HOST = os.environ.get("REDIS_HOST", "localhost") +REDIS_PORT = os.environ.get("REDIS_PORT", "6379") +REDIS_CELERY_DB = os.environ.get("REDIS_CELERY_DB", 2) +REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3) +REDIS_CACHE_DB = os.environ.get("REDIS_CACHE_DB", 4) + + +CACHE_CONFIG = { + "CACHE_TYPE": "RedisCache", + "CACHE_DEFAULT_TIMEOUT": int(timedelta(minutes=1).total_seconds()), + "CACHE_KEY_PREFIX": "superset_cache", + "CACHE_REDIS_URL": f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CACHE_DB}", +} + +DATA_CACHE_CONFIG = { + **CACHE_CONFIG, + "CACHE_DEFAULT_TIMEOUT": int(timedelta(seconds=30).total_seconds()), + "CACHE_KEY_PREFIX": "superset_data_cache", +} + +FILTER_STATE_CACHE_CONFIG = { + "CACHE_TYPE": "SimpleCache", + "CACHE_THRESHOLD": math.inf, + "CACHE_DEFAULT_TIMEOUT": int(timedelta(minutes=10).total_seconds()), +} + +EXPLORE_FORM_DATA_CACHE_CONFIG = { + "CACHE_TYPE": "SimpleCache", + "CACHE_THRESHOLD": math.inf, + "CACHE_DEFAULT_TIMEOUT": int(timedelta(minutes=10).total_seconds()), +} + +GLOBAL_ASYNC_QUERIES_JWT_SECRET = "test-secret-change-me-test-secret-change-me" + +ALERT_REPORTS_WORKING_TIME_OUT_KILL = True + +ALERT_REPORTS_QUERY_EXECUTION_MAX_TRIES = 3 + + +class CeleryConfig(object): + BROKER_URL = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}" + CELERY_IMPORTS = ("superset.sql_lab",) + CELERY_RESULT_BACKEND = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_RESULTS_DB}" + CELERY_ANNOTATIONS = {"sql_lab.add": {"rate_limit": "10/s"}} + CONCURRENCY = 1 + + +CELERY_CONFIG = CeleryConfig + +CUSTOM_TEMPLATE_PROCESSORS = { + CustomPrestoTemplateProcessor.engine: CustomPrestoTemplateProcessor +} + +PRESERVE_CONTEXT_ON_EXCEPTION = False diff --git a/tests/integration_tests/superset_test_config_sqllab_backend_persist_off.py b/tests/integration_tests/superset_test_config_sqllab_backend_persist_off.py new file mode 100644 index 0000000000000..9f6dd2ead1fa2 --- /dev/null +++ b/tests/integration_tests/superset_test_config_sqllab_backend_persist_off.py @@ -0,0 +1,24 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# flake8: noqa +# type: ignore +import os +from copy import copy + +from .superset_test_config import * + +FEATURE_FLAGS = {"SQLLAB_BACKEND_PERSISTENCE": False} diff --git a/tests/integration_tests/superset_test_config_thumbnails.py b/tests/integration_tests/superset_test_config_thumbnails.py new file mode 100644 index 0000000000000..9f621efabbf4d --- /dev/null +++ b/tests/integration_tests/superset_test_config_thumbnails.py @@ -0,0 +1,88 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# type: ignore +from copy import copy + +from superset.config import * + +AUTH_USER_REGISTRATION_ROLE = "alpha" +SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join( + DATA_DIR, "unittests.integration_tests.db" +) +DEBUG = True +SUPERSET_WEBSERVER_PORT = 8081 + +# Allowing SQLALCHEMY_DATABASE_URI to be defined as an env var for +# continuous integration +if "SUPERSET__SQLALCHEMY_DATABASE_URI" in os.environ: + SQLALCHEMY_DATABASE_URI = os.environ["SUPERSET__SQLALCHEMY_DATABASE_URI"] + +if "sqlite" in SQLALCHEMY_DATABASE_URI: + logger.warning( + "SQLite Database support for metadata databases will be removed \ + in a future version of Superset." + ) + +SQL_SELECT_AS_CTA = True +SQL_MAX_ROW = 666 + + +def GET_FEATURE_FLAGS_FUNC(ff): + ff_copy = copy(ff) + ff_copy["super"] = "set" + return ff_copy + + +TESTING = True +WTF_CSRF_ENABLED = False +PUBLIC_ROLE_LIKE = "Gamma" +AUTH_ROLE_PUBLIC = "Public" +EMAIL_NOTIFICATIONS = False + +CACHE_CONFIG = {"CACHE_TYPE": "SimpleCache"} + +REDIS_HOST = os.environ.get("REDIS_HOST", "localhost") +REDIS_PORT = os.environ.get("REDIS_PORT", "6379") +REDIS_CELERY_DB = os.environ.get("REDIS_CELERY_DB", 2) +REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3) + + +class CeleryConfig(object): + BROKER_URL = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}" + CELERY_IMPORTS = ("superset.sql_lab", "superset.tasks.thumbnails") + CELERY_ANNOTATIONS = {"sql_lab.add": {"rate_limit": "10/s"}} + CONCURRENCY = 1 + + +CELERY_CONFIG = CeleryConfig + +FEATURE_FLAGS = { + "foo": "bar", + "KV_STORE": False, + "SHARE_QUERIES_VIA_KV_STORE": False, + "THUMBNAILS": True, + "THUMBNAILS_SQLA_LISTENERS": False, +} + +THUMBNAIL_CACHE_CONFIG = { + "CACHE_TYPE": "RedisCache", + "CACHE_DEFAULT_TIMEOUT": 10000, + "CACHE_KEY_PREFIX": "superset_thumbnails_", + "CACHE_REDIS_HOST": REDIS_HOST, + "CACHE_REDIS_PORT": REDIS_PORT, + "CACHE_REDIS_DB": REDIS_CELERY_DB, +} diff --git a/tests/integration_tests/superset_test_custom_template_processors.py b/tests/integration_tests/superset_test_custom_template_processors.py new file mode 100644 index 0000000000000..124c73931f4e5 --- /dev/null +++ b/tests/integration_tests/superset_test_custom_template_processors.py @@ -0,0 +1,59 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import re +from datetime import datetime, timedelta +from functools import partial +from typing import Any, Dict, SupportsInt + +from superset.jinja_context import PrestoTemplateProcessor + + +def DATE( + ts: datetime, day_offset: SupportsInt = 0, hour_offset: SupportsInt = 0 +) -> str: + """Current day as a string""" + day_offset, hour_offset = int(day_offset), int(hour_offset) + offset_day = (ts + timedelta(days=day_offset, hours=hour_offset)).date() + return str(offset_day) + + +class CustomPrestoTemplateProcessor(PrestoTemplateProcessor): + """A custom presto template processor for test.""" + + engine = "db_for_macros_testing" + + def process_template(self, sql: str, **kwargs) -> str: + """Processes a sql template with $ style macro using regex.""" + # Add custom macros functions. + macros = {"DATE": partial(DATE, datetime.utcnow())} # type: Dict[str, Any] + # Update with macros defined in context and kwargs. + macros.update(self._context) + macros.update(kwargs) + + def replacer(match): + """Expands $ style macros with corresponding function calls.""" + macro_name, args_str = match.groups() + args = [a.strip() for a in args_str.split(",")] + if args == [""]: + args = [] + f = macros[macro_name[1:]] + return f(*args) + + macro_names = ["$" + name for name in macros.keys()] + pattern = r"(%s)\s*\(([^()]*)\)" % "|".join(map(re.escape, macro_names)) + return re.sub(pattern, replacer, sql) diff --git a/tests/integration_tests/tagging_tests.py b/tests/integration_tests/tagging_tests.py new file mode 100644 index 0000000000000..4ee10041d2c53 --- /dev/null +++ b/tests/integration_tests/tagging_tests.py @@ -0,0 +1,309 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from unittest import mock + +import pytest + +from superset.connectors.sqla.models import SqlaTable +from superset.extensions import db +from superset.models.core import FavStar +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from superset.models.sql_lab import SavedQuery +from superset.tags.models import TaggedObject +from superset.utils.core import DatasourceType +from superset.utils.database import get_main_database +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.conftest import with_feature_flags +from tests.integration_tests.fixtures.tags import with_tagging_system_feature + + +class TestTagging(SupersetTestCase): + def query_tagged_object_table(self): + query = db.session.query(TaggedObject).all() + return query + + def clear_tagged_object_table(self): + db.session.query(TaggedObject).delete() + db.session.commit() + + @with_feature_flags(TAGGING_SYSTEM=False) + def test_tag_view_disabled(self): + self.login("admin") + response = self.client.get("/tagview/tags/suggestions/") + self.assertEqual(404, response.status_code) + + @with_feature_flags(TAGGING_SYSTEM=True) + def test_tag_view_enabled(self): + self.login("admin") + response = self.client.get("/tagview/tags/suggestions/") + self.assertNotEqual(404, response.status_code) + + @pytest.mark.usefixtures("with_tagging_system_feature") + def test_dataset_tagging(self): + """ + Test to make sure that when a new dataset is created, + a corresponding tag in the tagged_objects table + is created + """ + + # Remove all existing rows in the tagged_object table + self.clear_tagged_object_table() + + # Test to make sure nothing is in the tagged_object table + self.assertEqual([], self.query_tagged_object_table()) + + # Create a dataset and add it to the db + test_dataset = SqlaTable( + table_name="foo", + schema=None, + owners=[], + database=get_main_database(), + sql=None, + extra='{"certification": 1}', + ) + db.session.add(test_dataset) + db.session.commit() + + # Test to make sure that a dataset tag was added to the tagged_object table + tags = self.query_tagged_object_table() + self.assertEqual(1, len(tags)) + self.assertEqual("ObjectTypes.dataset", str(tags[0].object_type)) + self.assertEqual(test_dataset.id, tags[0].object_id) + + # Cleanup the db + db.session.delete(test_dataset) + db.session.commit() + + # Test to make sure the tag is deleted when the associated object is deleted + self.assertEqual([], self.query_tagged_object_table()) + + @pytest.mark.usefixtures("with_tagging_system_feature") + def test_chart_tagging(self): + """ + Test to make sure that when a new chart is created, + a corresponding tag in the tagged_objects table + is created + """ + + # Remove all existing rows in the tagged_object table + self.clear_tagged_object_table() + + # Test to make sure nothing is in the tagged_object table + self.assertEqual([], self.query_tagged_object_table()) + + # Create a chart and add it to the db + test_chart = Slice( + slice_name="test_chart", + datasource_type=DatasourceType.TABLE, + viz_type="bubble", + datasource_id=1, + id=1, + ) + db.session.add(test_chart) + db.session.commit() + + # Test to make sure that a chart tag was added to the tagged_object table + tags = self.query_tagged_object_table() + self.assertEqual(1, len(tags)) + self.assertEqual("ObjectTypes.chart", str(tags[0].object_type)) + self.assertEqual(test_chart.id, tags[0].object_id) + + # Cleanup the db + db.session.delete(test_chart) + db.session.commit() + + # Test to make sure the tag is deleted when the associated object is deleted + self.assertEqual([], self.query_tagged_object_table()) + + @pytest.mark.usefixtures("with_tagging_system_feature") + def test_dashboard_tagging(self): + """ + Test to make sure that when a new dashboard is created, + a corresponding tag in the tagged_objects table + is created + """ + + # Remove all existing rows in the tagged_object table + self.clear_tagged_object_table() + + # Test to make sure nothing is in the tagged_object table + self.assertEqual([], self.query_tagged_object_table()) + + # Create a dashboard and add it to the db + test_dashboard = Dashboard() + test_dashboard.dashboard_title = "test_dashboard" + test_dashboard.slug = "test_slug" + test_dashboard.slices = [] + test_dashboard.published = True + + db.session.add(test_dashboard) + db.session.commit() + + # Test to make sure that a dashboard tag was added to the tagged_object table + tags = self.query_tagged_object_table() + self.assertEqual(1, len(tags)) + self.assertEqual("ObjectTypes.dashboard", str(tags[0].object_type)) + self.assertEqual(test_dashboard.id, tags[0].object_id) + + # Cleanup the db + db.session.delete(test_dashboard) + db.session.commit() + + # Test to make sure the tag is deleted when the associated object is deleted + self.assertEqual([], self.query_tagged_object_table()) + + @pytest.mark.usefixtures("with_tagging_system_feature") + def test_saved_query_tagging(self): + """ + Test to make sure that when a new saved query is + created, a corresponding tag in the tagged_objects + table is created + """ + + # Remove all existing rows in the tagged_object table + self.clear_tagged_object_table() + + # Test to make sure nothing is in the tagged_object table + self.assertEqual([], self.query_tagged_object_table()) + + # Create a saved query and add it to the db + test_saved_query = SavedQuery(id=1, label="test saved query") + db.session.add(test_saved_query) + db.session.commit() + + # Test to make sure that a saved query tag was added to the tagged_object table + tags = self.query_tagged_object_table() + + self.assertEqual(2, len(tags)) + + self.assertEqual("ObjectTypes.query", str(tags[0].object_type)) + self.assertEqual("owner:None", str(tags[0].tag.name)) + self.assertEqual("TagTypes.owner", str(tags[0].tag.type)) + self.assertEqual(test_saved_query.id, tags[0].object_id) + + self.assertEqual("ObjectTypes.query", str(tags[1].object_type)) + self.assertEqual("type:query", str(tags[1].tag.name)) + self.assertEqual("TagTypes.type", str(tags[1].tag.type)) + self.assertEqual(test_saved_query.id, tags[1].object_id) + + # Cleanup the db + db.session.delete(test_saved_query) + db.session.commit() + + # Test to make sure the tag is deleted when the associated object is deleted + self.assertEqual([], self.query_tagged_object_table()) + + @pytest.mark.usefixtures("with_tagging_system_feature") + def test_favorite_tagging(self): + """ + Test to make sure that when a new favorite object is + created, a corresponding tag in the tagged_objects + table is created + """ + + # Remove all existing rows in the tagged_object table + self.clear_tagged_object_table() + + # Test to make sure nothing is in the tagged_object table + self.assertEqual([], self.query_tagged_object_table()) + + # Create a favorited object and add it to the db + test_saved_query = FavStar(user_id=1, class_name="slice", obj_id=1) + db.session.add(test_saved_query) + db.session.commit() + + # Test to make sure that a favorited object tag was added to the tagged_object table + tags = self.query_tagged_object_table() + self.assertEqual(1, len(tags)) + self.assertEqual("ObjectTypes.chart", str(tags[0].object_type)) + self.assertEqual(test_saved_query.obj_id, tags[0].object_id) + + # Cleanup the db + db.session.delete(test_saved_query) + db.session.commit() + + # Test to make sure the tag is deleted when the associated object is deleted + self.assertEqual([], self.query_tagged_object_table()) + + @with_feature_flags(TAGGING_SYSTEM=False) + def test_tagging_system(self): + """ + Test to make sure that when the TAGGING_SYSTEM + feature flag is false, that no tags are created + """ + + # Remove all existing rows in the tagged_object table + self.clear_tagged_object_table() + + # Test to make sure nothing is in the tagged_object table + self.assertEqual([], self.query_tagged_object_table()) + + # Create a dataset and add it to the db + test_dataset = SqlaTable( + table_name="foo", + schema=None, + owners=[], + database=get_main_database(), + sql=None, + extra='{"certification": 1}', + ) + + # Create a chart and add it to the db + test_chart = Slice( + slice_name="test_chart", + datasource_type=DatasourceType.TABLE, + viz_type="bubble", + datasource_id=1, + id=1, + ) + + # Create a dashboard and add it to the db + test_dashboard = Dashboard() + test_dashboard.dashboard_title = "test_dashboard" + test_dashboard.slug = "test_slug" + test_dashboard.slices = [] + test_dashboard.published = True + + # Create a saved query and add it to the db + test_saved_query = SavedQuery(id=1, label="test saved query") + + # Create a favorited object and add it to the db + test_favorited_object = FavStar(user_id=1, class_name="slice", obj_id=1) + + db.session.add(test_dataset) + db.session.add(test_chart) + db.session.add(test_dashboard) + db.session.add(test_saved_query) + db.session.add(test_favorited_object) + db.session.commit() + + # Test to make sure that no tags were added to the tagged_object table + tags = self.query_tagged_object_table() + self.assertEqual(0, len(tags)) + + # Cleanup the db + db.session.delete(test_dataset) + db.session.delete(test_chart) + db.session.delete(test_dashboard) + db.session.delete(test_saved_query) + db.session.delete(test_favorited_object) + db.session.commit() + + # Test to make sure all the tags are deleted when the associated objects are deleted + self.assertEqual([], self.query_tagged_object_table()) diff --git a/tests/integration_tests/tasks/__init__.py b/tests/integration_tests/tasks/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/tasks/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/tasks/async_queries_tests.py b/tests/integration_tests/tasks/async_queries_tests.py new file mode 100644 index 0000000000000..20d0f39eea0f4 --- /dev/null +++ b/tests/integration_tests/tasks/async_queries_tests.py @@ -0,0 +1,190 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Unit tests for async query celery jobs in Superset""" +from unittest import mock +from uuid import uuid4 + +import pytest +from celery.exceptions import SoftTimeLimitExceeded +from flask import g + +from superset.charts.commands.exceptions import ChartDataQueryFailedError +from superset.charts.data.commands.get_data_command import ChartDataCommand +from superset.exceptions import SupersetException +from superset.extensions import async_query_manager, security_manager +from superset.tasks import async_queries +from superset.tasks.async_queries import ( + load_chart_data_into_cache, + load_explore_json_into_cache, +) +from superset.utils.core import get_user_id +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.query_context import get_query_context +from tests.integration_tests.test_app import app + + +class TestAsyncQueries(SupersetTestCase): + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch.object(async_query_manager, "update_job") + @mock.patch.object(async_queries, "set_form_data") + def test_load_chart_data_into_cache(self, mock_set_form_data, mock_update_job): + async_query_manager.init_app(app) + query_context = get_query_context("birth_names") + user = security_manager.find_user("gamma") + job_metadata = { + "channel_id": str(uuid4()), + "job_id": str(uuid4()), + "user_id": user.id, + "status": "pending", + "errors": [], + } + + load_chart_data_into_cache(job_metadata, query_context) + mock_set_form_data.assert_called_once_with(query_context) + mock_update_job.assert_called_once_with( + job_metadata, "done", result_url=mock.ANY + ) + + @mock.patch.object( + ChartDataCommand, "run", side_effect=ChartDataQueryFailedError("Error: foo") + ) + @mock.patch.object(async_query_manager, "update_job") + def test_load_chart_data_into_cache_error(self, mock_update_job, mock_run_command): + async_query_manager.init_app(app) + query_context = get_query_context("birth_names") + user = security_manager.find_user("gamma") + job_metadata = { + "channel_id": str(uuid4()), + "job_id": str(uuid4()), + "user_id": user.id, + "status": "pending", + "errors": [], + } + with pytest.raises(ChartDataQueryFailedError): + load_chart_data_into_cache(job_metadata, query_context) + + mock_run_command.assert_called_once_with(cache=True) + errors = [{"message": "Error: foo"}] + mock_update_job.assert_called_once_with(job_metadata, "error", errors=errors) + + @mock.patch.object(ChartDataCommand, "run") + @mock.patch.object(async_query_manager, "update_job") + def test_soft_timeout_load_chart_data_into_cache( + self, mock_update_job, mock_run_command + ): + async_query_manager.init_app(app) + user = security_manager.find_user("gamma") + form_data = {} + job_metadata = { + "channel_id": str(uuid4()), + "job_id": str(uuid4()), + "user_id": user.id, + "status": "pending", + "errors": [], + } + errors = ["A timeout occurred while loading chart data"] + + with pytest.raises(SoftTimeLimitExceeded): + with mock.patch.object( + async_queries, + "set_form_data", + ) as set_form_data: + set_form_data.side_effect = SoftTimeLimitExceeded() + load_chart_data_into_cache(job_metadata, form_data) + set_form_data.assert_called_once_with(form_data, "error", errors=errors) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @mock.patch.object(async_query_manager, "update_job") + def test_load_explore_json_into_cache(self, mock_update_job): + async_query_manager.init_app(app) + table = self.get_table(name="birth_names") + user = security_manager.find_user("gamma") + form_data = { + "datasource": f"{table.id}__table", + "viz_type": "dist_bar", + "granularity_sqla": "ds", + "time_range": "No filter", + "metrics": ["count"], + "adhoc_filters": [], + "groupby": ["gender"], + "row_limit": 100, + } + job_metadata = { + "channel_id": str(uuid4()), + "job_id": str(uuid4()), + "user_id": user.id, + "status": "pending", + "errors": [], + } + + load_explore_json_into_cache(job_metadata, form_data) + mock_update_job.assert_called_once_with( + job_metadata, "done", result_url=mock.ANY + ) + + @mock.patch.object(async_query_manager, "update_job") + @mock.patch.object(async_queries, "set_form_data") + def test_load_explore_json_into_cache_error( + self, mock_set_form_data, mock_update_job + ): + async_query_manager.init_app(app) + user = security_manager.find_user("gamma") + form_data = {} + job_metadata = { + "channel_id": str(uuid4()), + "job_id": str(uuid4()), + "user_id": user.id, + "status": "pending", + "errors": [], + } + + with pytest.raises(SupersetException): + load_explore_json_into_cache(job_metadata, form_data) + + mock_set_form_data.assert_called_once_with(form_data) + errors = ["The dataset associated with this chart no longer exists"] + mock_update_job.assert_called_once_with(job_metadata, "error", errors=errors) + + @mock.patch.object(ChartDataCommand, "run") + @mock.patch.object(async_query_manager, "update_job") + def test_soft_timeout_load_explore_json_into_cache( + self, mock_update_job, mock_run_command + ): + async_query_manager.init_app(app) + user = security_manager.find_user("gamma") + form_data = {} + job_metadata = { + "channel_id": str(uuid4()), + "job_id": str(uuid4()), + "user_id": user.id, + "status": "pending", + "errors": [], + } + errors = ["A timeout occurred while loading explore json, error"] + + with pytest.raises(SoftTimeLimitExceeded): + with mock.patch.object( + async_queries, + "set_form_data", + ) as set_form_data: + set_form_data.side_effect = SoftTimeLimitExceeded() + load_explore_json_into_cache(job_metadata, form_data) + set_form_data.assert_called_once_with(form_data, "error", errors=errors) diff --git a/tests/integration_tests/test_app.py b/tests/integration_tests/test_app.py new file mode 100644 index 0000000000000..fb7b47b67cb96 --- /dev/null +++ b/tests/integration_tests/test_app.py @@ -0,0 +1,36 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import TYPE_CHECKING + +from superset.app import create_app + +if TYPE_CHECKING: + from typing import Any + + from flask.testing import FlaskClient + +app = create_app() + + +def login( + client: "FlaskClient[Any]", username: str = "admin", password: str = "general" +): + resp = client.post( + "/login/", + data=dict(username=username, password=password), + ).get_data(as_text=True) + assert "User confirmation needed" not in resp diff --git a/tests/integration_tests/test_jinja_context.py b/tests/integration_tests/test_jinja_context.py new file mode 100644 index 0000000000000..8c2db6920dcef --- /dev/null +++ b/tests/integration_tests/test_jinja_context.py @@ -0,0 +1,207 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from unittest import mock + +import pytest +from flask.ctx import AppContext +from pytest_mock import MockFixture + +import superset.utils.database +from superset.exceptions import SupersetTemplateException +from superset.jinja_context import get_template_processor + + +def test_process_template(app_context: AppContext) -> None: + maindb = superset.utils.database.get_example_database() + template = "SELECT '{{ 1+1 }}'" + tp = get_template_processor(database=maindb) + assert tp.process_template(template) == "SELECT '2'" + + +def test_get_template_kwarg(app_context: AppContext) -> None: + maindb = superset.utils.database.get_example_database() + template = "{{ foo }}" + tp = get_template_processor(database=maindb, foo="bar") + assert tp.process_template(template) == "bar" + + +def test_template_kwarg(app_context: AppContext) -> None: + maindb = superset.utils.database.get_example_database() + template = "{{ foo }}" + tp = get_template_processor(database=maindb) + assert tp.process_template(template, foo="bar") == "bar" + + +def test_get_template_kwarg_dict(app_context: AppContext) -> None: + maindb = superset.utils.database.get_example_database() + template = "{{ foo.bar }}" + tp = get_template_processor(database=maindb, foo={"bar": "baz"}) + assert tp.process_template(template) == "baz" + + +def test_template_kwarg_dict(app_context: AppContext) -> None: + maindb = superset.utils.database.get_example_database() + template = "{{ foo.bar }}" + tp = get_template_processor(database=maindb) + assert tp.process_template(template, foo={"bar": "baz"}) == "baz" + + +def test_get_template_kwarg_lambda(app_context: AppContext) -> None: + maindb = superset.utils.database.get_example_database() + template = "{{ foo() }}" + tp = get_template_processor(database=maindb, foo=lambda: "bar") + with pytest.raises(SupersetTemplateException): + tp.process_template(template) + + +def test_template_kwarg_lambda(app_context: AppContext) -> None: + maindb = superset.utils.database.get_example_database() + template = "{{ foo() }}" + tp = get_template_processor(database=maindb) + with pytest.raises(SupersetTemplateException): + tp.process_template(template, foo=lambda: "bar") + + +def test_get_template_kwarg_module(app_context: AppContext) -> None: + maindb = superset.utils.database.get_example_database() + template = "{{ dt(2017, 1, 1).isoformat() }}" + tp = get_template_processor(database=maindb, dt=datetime) + with pytest.raises(SupersetTemplateException): + tp.process_template(template) + + +def test_template_kwarg_module(app_context: AppContext) -> None: + maindb = superset.utils.database.get_example_database() + template = "{{ dt(2017, 1, 1).isoformat() }}" + tp = get_template_processor(database=maindb) + with pytest.raises(SupersetTemplateException): + tp.process_template(template, dt=datetime) + + +def test_get_template_kwarg_nested_module(app_context: AppContext) -> None: + maindb = superset.utils.database.get_example_database() + template = "{{ foo.dt }}" + tp = get_template_processor(database=maindb, foo={"dt": datetime}) + with pytest.raises(SupersetTemplateException): + tp.process_template(template) + + +def test_template_kwarg_nested_module(app_context: AppContext) -> None: + maindb = superset.utils.database.get_example_database() + template = "{{ foo.dt }}" + tp = get_template_processor(database=maindb) + with pytest.raises(SupersetTemplateException): + tp.process_template(template, foo={"bar": datetime}) + + +def test_template_hive(app_context: AppContext, mocker: MockFixture) -> None: + lp_mock = mocker.patch( + "superset.jinja_context.HiveTemplateProcessor.latest_partition" + ) + lp_mock.return_value = "the_latest" + db = mock.Mock() + db.backend = "hive" + template = "{{ hive.latest_partition('my_table') }}" + tp = get_template_processor(database=db) + assert tp.process_template(template) == "the_latest" + + +def test_template_trino(app_context: AppContext, mocker: MockFixture) -> None: + lp_mock = mocker.patch( + "superset.jinja_context.TrinoTemplateProcessor.latest_partition" + ) + lp_mock.return_value = "the_latest" + db = mock.Mock() + db.backend = "trino" + template = "{{ trino.latest_partition('my_table') }}" + tp = get_template_processor(database=db) + assert tp.process_template(template) == "the_latest" + + # Backwards compatibility if migrating from Presto. + template = "{{ presto.latest_partition('my_table') }}" + tp = get_template_processor(database=db) + assert tp.process_template(template) == "the_latest" + + +def test_template_context_addons(app_context: AppContext, mocker: MockFixture) -> None: + addons_mock = mocker.patch("superset.jinja_context.context_addons") + addons_mock.return_value = {"datetime": datetime} + maindb = superset.utils.database.get_example_database() + template = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'" + tp = get_template_processor(database=maindb) + assert tp.process_template(template) == "SELECT '2017-01-01T00:00:00'" + + +def test_custom_process_template(app_context: AppContext, mocker: MockFixture) -> None: + """Test macro defined in custom template processor works.""" + + mock_dt = mocker.patch( + "tests.integration_tests.superset_test_custom_template_processors.datetime" + ) + mock_dt.utcnow = mock.Mock(return_value=datetime(1970, 1, 1)) + db = mock.Mock() + db.backend = "db_for_macros_testing" + tp = get_template_processor(database=db) + + template = "SELECT '$DATE()'" + assert tp.process_template(template) == f"SELECT '1970-01-01'" + + template = "SELECT '$DATE(1, 2)'" + assert tp.process_template(template) == "SELECT '1970-01-02'" + + +def test_custom_get_template_kwarg(app_context: AppContext) -> None: + """Test macro passed as kwargs when getting template processor + works in custom template processor.""" + db = mock.Mock() + db.backend = "db_for_macros_testing" + template = "$foo()" + tp = get_template_processor(database=db, foo=lambda: "bar") + assert tp.process_template(template) == "bar" + + +def test_custom_template_kwarg(app_context: AppContext) -> None: + """Test macro passed as kwargs when processing template + works in custom template processor.""" + db = mock.Mock() + db.backend = "db_for_macros_testing" + template = "$foo()" + tp = get_template_processor(database=db) + assert tp.process_template(template, foo=lambda: "bar") == "bar" + + +def test_custom_template_processors_overwrite(app_context: AppContext) -> None: + """Test template processor for presto gets overwritten by custom one.""" + db = mock.Mock() + db.backend = "db_for_macros_testing" + tp = get_template_processor(database=db) + + template = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'" + assert tp.process_template(template) == template + + template = "SELECT '{{ DATE(1, 2) }}'" + assert tp.process_template(template) == template + + +def test_custom_template_processors_ignored(app_context: AppContext) -> None: + """Test custom template processor is ignored for a difference backend + database.""" + maindb = superset.utils.database.get_example_database() + template = "SELECT '$DATE()'" + tp = get_template_processor(database=maindb) + assert tp.process_template(template) == template diff --git a/tests/integration_tests/thumbnails_tests.py b/tests/integration_tests/thumbnails_tests.py new file mode 100644 index 0000000000000..efa0d73cb49f0 --- /dev/null +++ b/tests/integration_tests/thumbnails_tests.py @@ -0,0 +1,416 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# from superset import db +# from superset.models.dashboard import Dashboard + +import json +import urllib.request +from io import BytesIO +from typing import Tuple +from unittest import skipUnless +from unittest.mock import ANY, call, MagicMock, patch + +import pytest +from flask_testing import LiveServerTestCase +from sqlalchemy.sql import func + +from superset import db, is_feature_enabled, security_manager +from superset.extensions import machine_auth_provider_factory +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from superset.tasks.types import ExecutorType +from superset.utils.screenshots import ChartScreenshot, DashboardScreenshot +from superset.utils.urls import get_url_path +from superset.utils.webdriver import find_unexpected_errors, WebDriverProxy +from tests.integration_tests.conftest import with_feature_flags +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.test_app import app + +from .base_tests import SupersetTestCase + +CHART_URL = "/api/v1/chart/" +DASHBOARD_URL = "/api/v1/dashboard/" + + +class TestThumbnailsSeleniumLive(LiveServerTestCase): + def create_app(self): + return app + + def url_open_auth(self, username: str, url: str): + admin_user = security_manager.find_user(username=username) + cookies = machine_auth_provider_factory.instance.get_auth_cookies(admin_user) + opener = urllib.request.build_opener() + opener.addheaders.append(("Cookie", f"session={cookies['session']}")) + return opener.open(f"{self.get_server_url()}/{url}") + + @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature") + def test_get_async_dashboard_screenshot(self): + """ + Thumbnails: Simple get async dashboard screenshot + """ + with patch("superset.dashboards.api.DashboardRestApi.get") as mock_get: + rv = self.client.get(DASHBOARD_URL) + resp = json.loads(rv.data.decode("utf-8")) + thumbnail_url = resp["result"][0]["thumbnail_url"] + + response = self.url_open_auth( + "admin", + thumbnail_url, + ) + self.assertEqual(response.getcode(), 202) + + +class TestWebDriverScreenshotErrorDetector(SupersetTestCase): + @patch("superset.utils.webdriver.WebDriverWait") + @patch("superset.utils.webdriver.firefox") + @patch("superset.utils.webdriver.find_unexpected_errors") + def test_not_call_find_unexpected_errors_if_feature_disabled( + self, mock_find_unexpected_errors, mock_firefox, mock_webdriver_wait + ): + webdriver_proxy = WebDriverProxy("firefox") + user = security_manager.get_user_by_username( + app.config["THUMBNAIL_SELENIUM_USER"] + ) + url = get_url_path("Superset.dashboard", dashboard_id_or_slug=1) + webdriver_proxy.get_screenshot(url, "grid-container", user=user) + + assert not mock_find_unexpected_errors.called + + @patch("superset.utils.webdriver.WebDriverWait") + @patch("superset.utils.webdriver.firefox") + @patch("superset.utils.webdriver.find_unexpected_errors") + def test_call_find_unexpected_errors_if_feature_enabled( + self, mock_find_unexpected_errors, mock_firefox, mock_webdriver_wait + ): + app.config["SCREENSHOT_REPLACE_UNEXPECTED_ERRORS"] = True + webdriver_proxy = WebDriverProxy("firefox") + user = security_manager.get_user_by_username( + app.config["THUMBNAIL_SELENIUM_USER"] + ) + url = get_url_path("Superset.dashboard", dashboard_id_or_slug=1) + webdriver_proxy.get_screenshot(url, "grid-container", user=user) + + assert mock_find_unexpected_errors.called + + app.config["SCREENSHOT_REPLACE_UNEXPECTED_ERRORS"] = False + + def test_find_unexpected_errors_no_alert(self): + webdriver = MagicMock() + + webdriver.find_elements.return_value = [] + + unexpected_errors = find_unexpected_errors(driver=webdriver) + assert len(unexpected_errors) == 0 + + assert "alert" in webdriver.find_elements.call_args_list[0][0][1] + + @patch("superset.utils.webdriver.WebDriverWait") + def test_find_unexpected_errors(self, mock_webdriver_wait): + webdriver = MagicMock() + alert_div = MagicMock() + + webdriver.find_elements.return_value = [alert_div] + alert_div.find_elements.return_value = MagicMock() + + unexpected_errors = find_unexpected_errors(driver=webdriver) + assert len(unexpected_errors) == 1 + + # attempt to find alerts + assert "alert" in webdriver.find_elements.call_args_list[0][0][1] + # attempt to click on "See more" buttons + assert "button" in alert_div.find_element.call_args_list[0][0][1] + # Wait for error modal to show up and to hide + assert 2 == len(mock_webdriver_wait.call_args_list) + # replace the text in alert div, eg, "unexpected errors" + assert alert_div == webdriver.execute_script.call_args_list[0][0][1] + + +class TestWebDriverProxy(SupersetTestCase): + @patch("superset.utils.webdriver.WebDriverWait") + @patch("superset.utils.webdriver.firefox") + @patch("superset.utils.webdriver.sleep") + def test_screenshot_selenium_headstart( + self, mock_sleep, mock_webdriver, mock_webdriver_wait + ): + webdriver = WebDriverProxy("firefox") + user = security_manager.get_user_by_username( + app.config["THUMBNAIL_SELENIUM_USER"] + ) + url = get_url_path("Superset.slice", slice_id=1, standalone="true") + app.config["SCREENSHOT_SELENIUM_HEADSTART"] = 5 + webdriver.get_screenshot(url, "chart-container", user=user) + assert mock_sleep.call_args_list[0] == call(5) + + @patch("superset.utils.webdriver.WebDriverWait") + @patch("superset.utils.webdriver.firefox") + def test_screenshot_selenium_locate_wait(self, mock_webdriver, mock_webdriver_wait): + app.config["SCREENSHOT_LOCATE_WAIT"] = 15 + webdriver = WebDriverProxy("firefox") + user = security_manager.get_user_by_username( + app.config["THUMBNAIL_SELENIUM_USER"] + ) + url = get_url_path("Superset.slice", slice_id=1, standalone="true") + webdriver.get_screenshot(url, "chart-container", user=user) + assert mock_webdriver_wait.call_args_list[0] == call(ANY, 15) + + @patch("superset.utils.webdriver.WebDriverWait") + @patch("superset.utils.webdriver.firefox") + def test_screenshot_selenium_load_wait(self, mock_webdriver, mock_webdriver_wait): + app.config["SCREENSHOT_LOAD_WAIT"] = 15 + webdriver = WebDriverProxy("firefox") + user = security_manager.get_user_by_username( + app.config["THUMBNAIL_SELENIUM_USER"] + ) + url = get_url_path("Superset.slice", slice_id=1, standalone="true") + webdriver.get_screenshot(url, "chart-container", user=user) + assert mock_webdriver_wait.call_args_list[2] == call(ANY, 15) + + @patch("superset.utils.webdriver.WebDriverWait") + @patch("superset.utils.webdriver.firefox") + @patch("superset.utils.webdriver.sleep") + def test_screenshot_selenium_animation_wait( + self, mock_sleep, mock_webdriver, mock_webdriver_wait + ): + webdriver = WebDriverProxy("firefox") + user = security_manager.get_user_by_username( + app.config["THUMBNAIL_SELENIUM_USER"] + ) + url = get_url_path("Superset.slice", slice_id=1, standalone="true") + app.config["SCREENSHOT_SELENIUM_ANIMATION_WAIT"] = 4 + webdriver.get_screenshot(url, "chart-container", user=user) + assert mock_sleep.call_args_list[1] == call(4) + + +class TestThumbnails(SupersetTestCase): + + mock_image = b"bytes mock image" + digest_return_value = "foo_bar" + digest_hash = "5c7d96a3dd7a87850a2ef34087565a6e" + + def _get_id_and_thumbnail_url(self, url: str) -> Tuple[int, str]: + rv = self.client.get(url) + resp = json.loads(rv.data.decode("utf-8")) + obj = resp["result"][0] + return obj["id"], obj["thumbnail_url"] + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=False) + def test_dashboard_thumbnail_disabled(self): + """ + Thumbnails: Dashboard thumbnail disabled + """ + self.login(username="admin") + _, thumbnail_url = self._get_id_and_thumbnail_url(DASHBOARD_URL) + rv = self.client.get(thumbnail_url) + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=False) + def test_chart_thumbnail_disabled(self): + """ + Thumbnails: Chart thumbnail disabled + """ + self.login(username="admin") + _, thumbnail_url = self._get_id_and_thumbnail_url(CHART_URL) + rv = self.client.get(thumbnail_url) + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=True) + def test_get_async_dashboard_screenshot_as_selenium(self): + """ + Thumbnails: Simple get async dashboard screenshot as selenium user + """ + self.login(username="alpha") + with patch( + "superset.thumbnails.digest._adjust_string_for_executor" + ) as mock_adjust_string: + mock_adjust_string.return_value = self.digest_return_value + _, thumbnail_url = self._get_id_and_thumbnail_url(DASHBOARD_URL) + assert self.digest_hash in thumbnail_url + assert mock_adjust_string.call_args[0][1] == ExecutorType.SELENIUM + assert mock_adjust_string.call_args[0][2] == "admin" + + rv = self.client.get(thumbnail_url) + self.assertEqual(rv.status_code, 202) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=True) + def test_get_async_dashboard_screenshot_as_current_user(self): + """ + Thumbnails: Simple get async dashboard screenshot as current user + """ + username = "alpha" + self.login(username=username) + with patch.dict( + "superset.thumbnails.digest.current_app.config", + { + "THUMBNAIL_EXECUTE_AS": [ExecutorType.CURRENT_USER], + }, + ), patch( + "superset.thumbnails.digest._adjust_string_for_executor" + ) as mock_adjust_string: + mock_adjust_string.return_value = self.digest_return_value + _, thumbnail_url = self._get_id_and_thumbnail_url(DASHBOARD_URL) + assert self.digest_hash in thumbnail_url + assert mock_adjust_string.call_args[0][1] == ExecutorType.CURRENT_USER + assert mock_adjust_string.call_args[0][2] == username + + rv = self.client.get(thumbnail_url) + self.assertEqual(rv.status_code, 202) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=True) + def test_get_async_dashboard_notfound(self): + """ + Thumbnails: Simple get async dashboard not found + """ + max_id = db.session.query(func.max(Dashboard.id)).scalar() + self.login(username="admin") + uri = f"api/v1/dashboard/{max_id + 1}/thumbnail/1234/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature") + def test_get_async_dashboard_not_allowed(self): + """ + Thumbnails: Simple get async dashboard not allowed + """ + self.login(username="gamma") + _, thumbnail_url = self._get_id_and_thumbnail_url(DASHBOARD_URL) + rv = self.client.get(thumbnail_url) + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=True) + def test_get_async_chart_screenshot_as_selenium(self): + """ + Thumbnails: Simple get async chart screenshot as selenium user + """ + self.login(username="alpha") + with patch( + "superset.thumbnails.digest._adjust_string_for_executor" + ) as mock_adjust_string: + mock_adjust_string.return_value = self.digest_return_value + _, thumbnail_url = self._get_id_and_thumbnail_url(CHART_URL) + assert self.digest_hash in thumbnail_url + assert mock_adjust_string.call_args[0][1] == ExecutorType.SELENIUM + assert mock_adjust_string.call_args[0][2] == "admin" + + rv = self.client.get(thumbnail_url) + self.assertEqual(rv.status_code, 202) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=True) + def test_get_async_chart_screenshot_as_current_user(self): + """ + Thumbnails: Simple get async chart screenshot as current user + """ + username = "alpha" + self.login(username=username) + with patch.dict( + "superset.thumbnails.digest.current_app.config", + { + "THUMBNAIL_EXECUTE_AS": [ExecutorType.CURRENT_USER], + }, + ), patch( + "superset.thumbnails.digest._adjust_string_for_executor" + ) as mock_adjust_string: + mock_adjust_string.return_value = self.digest_return_value + _, thumbnail_url = self._get_id_and_thumbnail_url(CHART_URL) + assert self.digest_hash in thumbnail_url + assert mock_adjust_string.call_args[0][1] == ExecutorType.CURRENT_USER + assert mock_adjust_string.call_args[0][2] == username + + rv = self.client.get(thumbnail_url) + self.assertEqual(rv.status_code, 202) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=True) + def test_get_async_chart_notfound(self): + """ + Thumbnails: Simple get async chart not found + """ + max_id = db.session.query(func.max(Slice.id)).scalar() + self.login(username="admin") + uri = f"api/v1/chart/{max_id + 1}/thumbnail/1234/" + rv = self.client.get(uri) + self.assertEqual(rv.status_code, 404) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=True) + def test_get_cached_chart_wrong_digest(self): + """ + Thumbnails: Simple get chart with wrong digest + """ + with patch.object( + ChartScreenshot, "get_from_cache", return_value=BytesIO(self.mock_image) + ): + self.login(username="admin") + id_, thumbnail_url = self._get_id_and_thumbnail_url(CHART_URL) + rv = self.client.get(f"api/v1/chart/{id_}/thumbnail/1234/") + self.assertEqual(rv.status_code, 302) + self.assertEqual(rv.headers["Location"], thumbnail_url) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=True) + def test_get_cached_dashboard_screenshot(self): + """ + Thumbnails: Simple get cached dashboard screenshot + """ + with patch.object( + DashboardScreenshot, "get_from_cache", return_value=BytesIO(self.mock_image) + ): + self.login(username="admin") + _, thumbnail_url = self._get_id_and_thumbnail_url(DASHBOARD_URL) + rv = self.client.get(thumbnail_url) + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.data, self.mock_image) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=True) + def test_get_cached_chart_screenshot(self): + """ + Thumbnails: Simple get cached chart screenshot + """ + with patch.object( + ChartScreenshot, "get_from_cache", return_value=BytesIO(self.mock_image) + ): + self.login(username="admin") + id_, thumbnail_url = self._get_id_and_thumbnail_url(CHART_URL) + rv = self.client.get(thumbnail_url) + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.data, self.mock_image) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + @with_feature_flags(THUMBNAILS=True) + def test_get_cached_dashboard_wrong_digest(self): + """ + Thumbnails: Simple get dashboard with wrong digest + """ + with patch.object( + DashboardScreenshot, "get_from_cache", return_value=BytesIO(self.mock_image) + ): + self.login(username="admin") + id_, thumbnail_url = self._get_id_and_thumbnail_url(DASHBOARD_URL) + rv = self.client.get(f"api/v1/dashboard/{id_}/thumbnail/1234/") + self.assertEqual(rv.status_code, 302) + self.assertEqual(rv.headers["Location"], thumbnail_url) diff --git a/tests/integration_tests/users/__init__.py b/tests/integration_tests/users/__init__.py new file mode 100644 index 0000000000000..fd9417fe5c1e9 --- /dev/null +++ b/tests/integration_tests/users/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/users/api_tests.py b/tests/integration_tests/users/api_tests.py new file mode 100644 index 0000000000000..f4c897b6a0ca1 --- /dev/null +++ b/tests/integration_tests/users/api_tests.py @@ -0,0 +1,64 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# type: ignore +"""Unit tests for Superset""" +import json +from unittest.mock import patch + +from superset import security_manager +from tests.integration_tests.base_tests import SupersetTestCase + +meUri = "/api/v1/me/" + + +class TestCurrentUserApi(SupersetTestCase): + def test_get_me_logged_in(self): + self.login(username="admin") + + rv = self.client.get(meUri) + + self.assertEqual(200, rv.status_code) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual("admin", response["result"]["username"]) + self.assertEqual(True, response["result"]["is_active"]) + self.assertEqual(False, response["result"]["is_anonymous"]) + + def test_get_me_with_roles(self): + self.login(username="admin") + + rv = self.client.get(meUri + "roles/") + self.assertEqual(200, rv.status_code) + response = json.loads(rv.data.decode("utf-8")) + roles = list(response["result"]["roles"].keys()) + self.assertEqual("Admin", roles.pop()) + + @patch("superset.security.manager.g") + def test_get_my_roles_anonymous(self, mock_g): + mock_g.user = security_manager.get_anonymous_user + rv = self.client.get(meUri + "roles/") + self.assertEqual(401, rv.status_code) + + def test_get_me_unauthorized(self): + self.logout() + rv = self.client.get(meUri) + self.assertEqual(401, rv.status_code) + + @patch("superset.security.manager.g") + def test_get_me_anonymous(self, mock_g): + mock_g.user = security_manager.get_anonymous_user + rv = self.client.get(meUri) + self.assertEqual(401, rv.status_code) diff --git a/tests/integration_tests/utils/__init__.py b/tests/integration_tests/utils/__init__.py new file mode 100644 index 0000000000000..d493d5b634640 --- /dev/null +++ b/tests/integration_tests/utils/__init__.py @@ -0,0 +1,29 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from os import path + +FIXTURES_DIR = "tests/integration_tests/fixtures" + + +def read_fixture(fixture_file_name): + with open(path.join(FIXTURES_DIR, fixture_file_name), "rb") as fixture_file: + return fixture_file.read() + + +def load_fixture(fixture_file_name): + return json.loads(read_fixture(fixture_file_name)) diff --git a/tests/integration_tests/utils/cache_manager_tests.py b/tests/integration_tests/utils/cache_manager_tests.py new file mode 100644 index 0000000000000..c5d4b390f9c90 --- /dev/null +++ b/tests/integration_tests/utils/cache_manager_tests.py @@ -0,0 +1,49 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from superset.extensions import cache_manager +from superset.utils.core import backend, DatasourceType +from tests.integration_tests.base_tests import SupersetTestCase + + +class UtilsCacheManagerTests(SupersetTestCase): + def test_get_set_explore_form_data_cache(self): + key = "12345" + data = {"foo": "bar", "datasource_type": "query"} + cache_manager.explore_form_data_cache.set(key, data) + assert cache_manager.explore_form_data_cache.get(key) == data + + def test_get_same_context_twice(self): + key = "12345" + data = {"foo": "bar", "datasource_type": "query"} + cache_manager.explore_form_data_cache.set(key, data) + assert cache_manager.explore_form_data_cache.get(key) == data + assert cache_manager.explore_form_data_cache.get(key) == data + + def test_get_set_explore_form_data_cache_no_datasource_type(self): + key = "12345" + data = {"foo": "bar"} + cache_manager.explore_form_data_cache.set(key, data) + # datasource_type should be added because it is not present + assert cache_manager.explore_form_data_cache.get(key) == { + "datasource_type": DatasourceType.TABLE, + **data, + } + + def test_get_explore_form_data_cache_invalid_key(self): + assert cache_manager.explore_form_data_cache.get("foo") == None diff --git a/tests/integration_tests/utils/core_tests.py b/tests/integration_tests/utils/core_tests.py new file mode 100644 index 0000000000000..29b94d6d37eef --- /dev/null +++ b/tests/integration_tests/utils/core_tests.py @@ -0,0 +1,84 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from superset.utils.core import form_data_to_adhoc, simple_filter_to_adhoc + + +def test_simple_filter_to_adhoc_generates_deterministic_values(): + input_1 = { + "op": "IS NOT NULL", + "col": "LATITUDE", + "val": "", + } + + input_2 = {**input_1, "col": "LONGITUDE"} + + # The result is the same when given the same input + assert simple_filter_to_adhoc(input_1) == simple_filter_to_adhoc(input_1) + assert simple_filter_to_adhoc(input_1) == { + "clause": "WHERE", + "expressionType": "SIMPLE", + "comparator": "", + "operator": "IS NOT NULL", + "subject": "LATITUDE", + "filterOptionName": "6ac89d498115da22396f80a765cffc70", + } + + # The result is different when given different input + assert simple_filter_to_adhoc(input_1) != simple_filter_to_adhoc(input_2) + assert simple_filter_to_adhoc(input_2) == { + "clause": "WHERE", + "expressionType": "SIMPLE", + "comparator": "", + "operator": "IS NOT NULL", + "subject": "LONGITUDE", + "filterOptionName": "9c984bd3714883ca859948354ce26ab9", + } + + +def test_form_data_to_adhoc_generates_deterministic_values(): + form_data = {"where": "1 = 1", "having": "count(*) > 1"} + + # The result is the same when given the same input + assert form_data_to_adhoc(form_data, "where") == form_data_to_adhoc( + form_data, "where" + ) + assert form_data_to_adhoc(form_data, "where") == { + "clause": "WHERE", + "expressionType": "SQL", + "sqlExpression": "1 = 1", + "filterOptionName": "99fe79985afbddea4492626dc6a87b74", + } + + # The result is different when given different input + assert form_data_to_adhoc(form_data, "having") == form_data_to_adhoc( + form_data, "having" + ) + assert form_data_to_adhoc(form_data, "having") == { + "clause": "HAVING", + "expressionType": "SQL", + "sqlExpression": "count(*) > 1", + "filterOptionName": "1da11f6b709c3190daeabb84f77fc8c2", + } + + +def test_form_data_to_adhoc_incorrect_clause_type(): + form_data = {"where": "1 = 1", "having": "count(*) > 1"} + + with pytest.raises(ValueError): + form_data_to_adhoc(form_data, "foobar") diff --git a/tests/integration_tests/utils/csv_tests.py b/tests/integration_tests/utils/csv_tests.py new file mode 100644 index 0000000000000..e514efb1d2108 --- /dev/null +++ b/tests/integration_tests/utils/csv_tests.py @@ -0,0 +1,83 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import io + +import pandas as pd +import pyarrow as pa +import pytest + +from superset.utils import csv + + +def test_escape_value(): + result = csv.escape_value("value") + assert result == "value" + + result = csv.escape_value("-10") + assert result == "-10" + + result = csv.escape_value("@value") + assert result == "'@value" + + result = csv.escape_value("+value") + assert result == "'+value" + + result = csv.escape_value("-value") + assert result == "'-value" + + result = csv.escape_value("=value") + assert result == "'=value" + + result = csv.escape_value("|value") + assert result == "'\|value" + + result = csv.escape_value("%value") + assert result == "'%value" + + result = csv.escape_value("=cmd|' /C calc'!A0") + assert result == "'=cmd\|' /C calc'!A0" + + result = csv.escape_value('""=10+2') + assert result == '\'""=10+2' + + result = csv.escape_value(" =10+2") + assert result == "' =10+2" + + +def test_df_to_escaped_csv(): + csv_rows = [ + ["col_a", "=func()"], + ["-10", "=cmd|' /C calc'!A0"], + ["a", '""=b'], + [" =a", "b"], + ] + csv_str = "\n".join([",".join(row) for row in csv_rows]) + + df = pd.read_csv(io.StringIO(csv_str)) + + escaped_csv_str = csv.df_to_escaped_csv(df, encoding="utf8", index=False) + escaped_csv_rows = [row.split(",") for row in escaped_csv_str.strip().split("\n")] + + assert escaped_csv_rows == [ + ["col_a", "'=func()"], + ["-10", "'=cmd\|' /C calc'!A0"], + ["a", "'=b"], # pandas seems to be removing the leading "" + ["' =a", "b"], + ] + + df = pa.array([1, None]).to_pandas(integer_object_nulls=True).to_frame() + assert csv.df_to_escaped_csv(df, encoding="utf8", index=False) == '0\n1\n""\n' diff --git a/tests/integration_tests/utils/encrypt_tests.py b/tests/integration_tests/utils/encrypt_tests.py new file mode 100644 index 0000000000000..2199783529b88 --- /dev/null +++ b/tests/integration_tests/utils/encrypt_tests.py @@ -0,0 +1,63 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Dict, List, Optional + +from sqlalchemy import String, TypeDecorator +from sqlalchemy_utils import EncryptedType +from sqlalchemy_utils.types.encrypted.encrypted_type import StringEncryptedType + +from superset.extensions import encrypted_field_factory +from superset.utils.encrypt import AbstractEncryptedFieldAdapter, SQLAlchemyUtilsAdapter +from tests.integration_tests.base_tests import SupersetTestCase + + +class CustomEncFieldAdapter(AbstractEncryptedFieldAdapter): + def create( + self, + app_config: Optional[Dict[str, Any]], + *args: List[Any], + **kwargs: Optional[Dict[str, Any]] + ) -> TypeDecorator: + if app_config: + return StringEncryptedType(*args, app_config["SECRET_KEY"], **kwargs) + else: + raise Exception("Missing app_config kwarg") + + +class EncryptedFieldTest(SupersetTestCase): + def setUp(self) -> None: + self.app.config[ + "SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER" + ] = SQLAlchemyUtilsAdapter + encrypted_field_factory.init_app(self.app) + + super().setUp() + + def test_create_field(self): + field = encrypted_field_factory.create(String(1024)) + self.assertTrue(isinstance(field, EncryptedType)) + self.assertEqual(self.app.config["SECRET_KEY"], field.key) + + def test_custom_adapter(self): + self.app.config[ + "SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER" + ] = CustomEncFieldAdapter + encrypted_field_factory.init_app(self.app) + field = encrypted_field_factory.create(String(1024)) + self.assertTrue(isinstance(field, StringEncryptedType)) + self.assertFalse(isinstance(field, EncryptedType)) + self.assertEqual(self.app.config["SECRET_KEY"], field.key) diff --git a/tests/integration_tests/utils/get_dashboards.py b/tests/integration_tests/utils/get_dashboards.py new file mode 100644 index 0000000000000..03260fb94d07f --- /dev/null +++ b/tests/integration_tests/utils/get_dashboards.py @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import List + +from flask_appbuilder import SQLA + +from superset.models.dashboard import Dashboard + + +def get_dashboards_ids(db: SQLA, dashboard_slugs: List[str]) -> List[int]: + result = ( + db.session.query(Dashboard.id).filter(Dashboard.slug.in_(dashboard_slugs)).all() + ) + return [row[0] for row in result] diff --git a/tests/integration_tests/utils/hashing_tests.py b/tests/integration_tests/utils/hashing_tests.py new file mode 100644 index 0000000000000..406d383d7cfdd --- /dev/null +++ b/tests/integration_tests/utils/hashing_tests.py @@ -0,0 +1,96 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import datetime +import math +from typing import Any + +import pytest + +from superset.utils.hashing import md5_sha_from_dict, md5_sha_from_str + + +def test_basic_md5_sha(): + obj = { + "product": "Coffee", + "company": "Gobias Industries", + "price_in_cents": 4000, + } + + serialized_obj = ( + '{"company": "Gobias Industries", "price_in_cents": 4000, "product": "Coffee"}' + ) + + assert md5_sha_from_str(serialized_obj) == md5_sha_from_dict(obj) + assert md5_sha_from_str(serialized_obj) == "35f22273cd6a6798b04f8ddef51135e3" + + +def test_sort_order_md5_sha(): + obj_1 = { + "product": "Coffee", + "price_in_cents": 4000, + "company": "Gobias Industries", + } + + obj_2 = { + "product": "Coffee", + "company": "Gobias Industries", + "price_in_cents": 4000, + } + + assert md5_sha_from_dict(obj_1) == md5_sha_from_dict(obj_2) + assert md5_sha_from_dict(obj_1) == "35f22273cd6a6798b04f8ddef51135e3" + + +def test_custom_default_md5_sha(): + def custom_datetime_serializer(obj: Any): + if isinstance(obj, datetime.datetime): + return "" + + obj = { + "product": "Coffee", + "company": "Gobias Industries", + "datetime": datetime.datetime.now(), + } + + serialized_obj = '{"company": "Gobias Industries", "datetime": "", "product": "Coffee"}' + + assert md5_sha_from_str(serialized_obj) == md5_sha_from_dict( + obj, default=custom_datetime_serializer + ) + assert md5_sha_from_str(serialized_obj) == "dc280121213aabcaeb8087aef268fd0d" + + +def test_ignore_nan_md5_sha(): + obj = { + "product": "Coffee", + "company": "Gobias Industries", + "price": math.nan, + } + + serialized_obj = ( + '{"company": "Gobias Industries", "price": NaN, "product": "Coffee"}' + ) + + assert md5_sha_from_str(serialized_obj) == md5_sha_from_dict(obj) + assert md5_sha_from_str(serialized_obj) == "5d129d1dffebc0bacc734366476d586d" + + serialized_obj = ( + '{"company": "Gobias Industries", "price": null, "product": "Coffee"}' + ) + + assert md5_sha_from_str(serialized_obj) == md5_sha_from_dict(obj, ignore_nan=True) + assert md5_sha_from_str(serialized_obj) == "40e87d61f6add03816bccdeac5713b9f" diff --git a/tests/integration_tests/utils/machine_auth_tests.py b/tests/integration_tests/utils/machine_auth_tests.py new file mode 100644 index 0000000000000..0dc8d4a1249f6 --- /dev/null +++ b/tests/integration_tests/utils/machine_auth_tests.py @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from unittest.mock import call, Mock, patch + +from superset.extensions import machine_auth_provider_factory +from tests.integration_tests.base_tests import SupersetTestCase + + +class MachineAuthProviderTests(SupersetTestCase): + def test_get_auth_cookies(self): + user = self.get_user("admin") + auth_cookies = machine_auth_provider_factory.instance.get_auth_cookies(user) + self.assertIsNotNone(auth_cookies["session"]) + + @patch("superset.utils.machine_auth.MachineAuthProvider.get_auth_cookies") + def test_auth_driver_user(self, get_auth_cookies): + user = self.get_user("admin") + driver = Mock() + get_auth_cookies.return_value = { + "session": "session_val", + "other_cookie": "other_val", + } + machine_auth_provider_factory.instance.authenticate_webdriver(driver, user) + driver.add_cookie.assert_has_calls( + [ + call({"name": "session", "value": "session_val"}), + call({"name": "other_cookie", "value": "other_val"}), + ] + ) + + @patch("superset.utils.machine_auth.request") + def test_auth_driver_request(self, request): + driver = Mock() + request.cookies = {"session": "session_val", "other_cookie": "other_val"} + machine_auth_provider_factory.instance.authenticate_webdriver(driver, None) + driver.add_cookie.assert_has_calls( + [ + call({"name": "session", "value": "session_val"}), + call({"name": "other_cookie", "value": "other_val"}), + ] + ) diff --git a/tests/integration_tests/utils/public_interfaces_test.py b/tests/integration_tests/utils/public_interfaces_test.py new file mode 100644 index 0000000000000..7b5d6712464df --- /dev/null +++ b/tests/integration_tests/utils/public_interfaces_test.py @@ -0,0 +1,103 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Callable, Dict + +import pytest + +from superset.utils.public_interfaces import compute_hash, get_warning_message + +# These are public interfaces exposed by Superset. Make sure +# to only change the interfaces and update the hashes in new +# major versions of Superset. +hashes: Dict[Callable[..., Any], str] = {} + + +@pytest.mark.parametrize("interface,expected_hash", list(hashes.items())) +def test_public_interfaces(interface, expected_hash): + """Test that public interfaces have not been accidentally changed.""" + current_hash = compute_hash(interface) + assert current_hash == expected_hash, get_warning_message(interface, current_hash) + + +def test_func_hash(): + """Test that changing a function signature changes its hash.""" + + def some_function(a, b): + return a + b + + original_hash = compute_hash(some_function) + + # pylint: disable=function-redefined + def some_function(a, b, c): + return a + b + c + + assert original_hash != compute_hash(some_function) + + +def test_class_hash(): + """Test that changing a class changes its hash.""" + + # pylint: disable=too-few-public-methods, invalid-name + class SomeClass: + def __init__(self, a, b): + self.a = a + self.b = b + + def add(self): + return self.a + self.b + + original_hash = compute_hash(SomeClass) + + # changing the __init__ should change the hash + # pylint: disable=function-redefined, too-few-public-methods, invalid-name + class SomeClass: + def __init__(self, a, b, c): + self.a = a + self.b = b + self.c = c + + def add(self): + return self.a + self.b + + assert original_hash != compute_hash(SomeClass) + + # renaming a public method should change the hash + # pylint: disable=function-redefined, too-few-public-methods, invalid-name + class SomeClass: + def __init__(self, a, b): + self.a = a + self.b = b + + def sum(self): + return self.a + self.b + + assert original_hash != compute_hash(SomeClass) + + # adding a private method should not change the hash + # pylint: disable=function-redefined, too-few-public-methods, invalid-name + class SomeClass: + def __init__(self, a, b): + self.a = a + self.b = b + + def add(self): + return self._sum() + + def _sum(self): + return self.a + self.b + + assert original_hash == compute_hash(SomeClass) diff --git a/tests/integration_tests/utils_tests.py b/tests/integration_tests/utils_tests.py new file mode 100644 index 0000000000000..e27ad6ec3c5e9 --- /dev/null +++ b/tests/integration_tests/utils_tests.py @@ -0,0 +1,1134 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +import uuid +from datetime import date, datetime, time, timedelta +from decimal import Decimal +import json +import os +import re +from typing import Any, Tuple, List, Optional +from unittest.mock import Mock, patch + +from superset.databases.commands.exceptions import DatabaseInvalidError +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) + +import numpy as np +import pandas as pd +import pytest +from flask import Flask, g +import marshmallow +from sqlalchemy.exc import ArgumentError + +import tests.integration_tests.test_app +from superset import app, db, security_manager +from superset.constants import NO_TIME_RANGE +from superset.exceptions import CertificateException, SupersetException +from superset.models.core import Database, Log +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice +from superset.utils.core import ( + base_json_conv, + cast_to_num, + convert_legacy_filters_into_adhoc, + create_ssl_cert_file, + DTTM_ALIAS, + extract_dataframe_dtypes, + format_timedelta, + GenericDataType, + get_form_data_token, + get_iterable, + get_email_address_list, + get_stacktrace, + json_int_dttm_ser, + json_iso_dttm_ser, + JSONEncodedDict, + merge_extra_filters, + merge_extra_form_data, + merge_request_params, + normalize_dttm_col, + parse_ssl_cert, + parse_js_uri_path_item, + split, + validate_json, + zlib_compress, + zlib_decompress, + DateColumn, +) +from superset.utils.database import get_or_create_db +from superset.utils import schema +from superset.utils.hashing import md5_sha_from_str +from superset.views.utils import build_extra_filters, get_form_data +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) + +from .fixtures.certificates import ssl_certificate + + +class TestUtils(SupersetTestCase): + def test_json_int_dttm_ser(self): + dttm = datetime(2020, 1, 1) + ts = 1577836800000.0 + assert json_int_dttm_ser(dttm) == ts + assert json_int_dttm_ser(date(2020, 1, 1)) == ts + assert json_int_dttm_ser(datetime(1970, 1, 1)) == 0 + assert json_int_dttm_ser(date(1970, 1, 1)) == 0 + assert json_int_dttm_ser(dttm + timedelta(milliseconds=1)) == (ts + 1) + assert json_int_dttm_ser(np.int64(1)) == 1 + + with self.assertRaises(TypeError): + json_int_dttm_ser(np.datetime64()) + + def test_json_iso_dttm_ser(self): + dttm = datetime(2020, 1, 1) + dt = date(2020, 1, 1) + t = time() + assert json_iso_dttm_ser(dttm) == dttm.isoformat() + assert json_iso_dttm_ser(dt) == dt.isoformat() + assert json_iso_dttm_ser(t) == t.isoformat() + assert json_iso_dttm_ser(np.int64(1)) == 1 + + assert ( + json_iso_dttm_ser(np.datetime64(), pessimistic=True) + == "Unserializable []" + ) + + with self.assertRaises(TypeError): + json_iso_dttm_ser(np.datetime64()) + + def test_base_json_conv(self): + assert isinstance(base_json_conv(np.bool_(1)), bool) + assert isinstance(base_json_conv(np.int64(1)), int) + assert isinstance(base_json_conv(np.array([1, 2, 3])), list) + assert base_json_conv(np.array(None)) is None + assert isinstance(base_json_conv(set([1])), list) + assert isinstance(base_json_conv(Decimal("1.0")), float) + assert isinstance(base_json_conv(uuid.uuid4()), str) + assert isinstance(base_json_conv(time()), str) + assert isinstance(base_json_conv(timedelta(0)), str) + assert isinstance(base_json_conv(bytes()), str) + assert base_json_conv(bytes("", encoding="utf-16")) == "[bytes]" + + with pytest.raises(TypeError): + base_json_conv(np.datetime64()) + + def test_zlib_compression(self): + json_str = '{"test": 1}' + blob = zlib_compress(json_str) + got_str = zlib_decompress(blob) + self.assertEqual(json_str, got_str) + + def test_merge_extra_filters(self): + # does nothing if no extra filters + form_data = {"A": 1, "B": 2, "c": "test"} + expected = {**form_data, "adhoc_filters": [], "applied_time_extras": {}} + merge_extra_filters(form_data) + self.assertEqual(form_data, expected) + # empty extra_filters + form_data = {"A": 1, "B": 2, "c": "test", "extra_filters": []} + expected = { + "A": 1, + "B": 2, + "c": "test", + "adhoc_filters": [], + "applied_time_extras": {}, + } + merge_extra_filters(form_data) + self.assertEqual(form_data, expected) + # copy over extra filters into empty filters + form_data = { + "extra_filters": [ + {"col": "a", "op": "in", "val": "someval"}, + {"col": "B", "op": "==", "val": ["c1", "c2"]}, + ] + } + expected = { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "someval", + "expressionType": "SIMPLE", + "filterOptionName": "90cfb3c34852eb3bc741b0cc20053b46", + "isExtra": True, + "operator": "in", + "subject": "a", + }, + { + "clause": "WHERE", + "comparator": ["c1", "c2"], + "expressionType": "SIMPLE", + "filterOptionName": "6c178d069965f1c02640661280415d96", + "isExtra": True, + "operator": "==", + "subject": "B", + }, + ], + "applied_time_extras": {}, + } + merge_extra_filters(form_data) + self.assertEqual(form_data, expected) + # adds extra filters to existing filters + form_data = { + "extra_filters": [ + {"col": "a", "op": "in", "val": "someval"}, + {"col": "B", "op": "==", "val": ["c1", "c2"]}, + ], + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": ["G1", "g2"], + "expressionType": "SIMPLE", + "operator": "!=", + "subject": "D", + } + ], + } + expected = { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": ["G1", "g2"], + "expressionType": "SIMPLE", + "operator": "!=", + "subject": "D", + }, + { + "clause": "WHERE", + "comparator": "someval", + "expressionType": "SIMPLE", + "filterOptionName": "90cfb3c34852eb3bc741b0cc20053b46", + "isExtra": True, + "operator": "in", + "subject": "a", + }, + { + "clause": "WHERE", + "comparator": ["c1", "c2"], + "expressionType": "SIMPLE", + "filterOptionName": "6c178d069965f1c02640661280415d96", + "isExtra": True, + "operator": "==", + "subject": "B", + }, + ], + "applied_time_extras": {}, + } + merge_extra_filters(form_data) + self.assertEqual(form_data, expected) + # adds extra filters to existing filters and sets time options + form_data = { + "extra_filters": [ + {"col": "__time_range", "op": "in", "val": "1 year ago :"}, + {"col": "__time_col", "op": "in", "val": "birth_year"}, + {"col": "__time_grain", "op": "in", "val": "years"}, + {"col": "A", "op": "like", "val": "hello"}, + {"col": "__granularity", "op": "in", "val": "90 seconds"}, + ] + } + expected = { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "hello", + "expressionType": "SIMPLE", + "filterOptionName": "e3cbdd92a2ae23ca92c6d7fca42e36a6", + "isExtra": True, + "operator": "like", + "subject": "A", + } + ], + "time_range": "1 year ago :", + "granularity_sqla": "birth_year", + "time_grain_sqla": "years", + "granularity": "90 seconds", + "applied_time_extras": { + "__time_range": "1 year ago :", + "__time_col": "birth_year", + "__time_grain": "years", + "__granularity": "90 seconds", + }, + } + merge_extra_filters(form_data) + self.assertEqual(form_data, expected) + + def test_merge_extra_filters_ignores_empty_filters(self): + form_data = { + "extra_filters": [ + {"col": "a", "op": "in", "val": ""}, + {"col": "B", "op": "==", "val": []}, + ] + } + expected = {"adhoc_filters": [], "applied_time_extras": {}} + merge_extra_filters(form_data) + self.assertEqual(form_data, expected) + + def test_merge_extra_filters_ignores_nones(self): + form_data = { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "", + "expressionType": "SIMPLE", + "operator": "in", + "subject": None, + } + ], + "extra_filters": [{"col": "B", "op": "==", "val": []}], + } + expected = { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "", + "expressionType": "SIMPLE", + "operator": "in", + "subject": None, + } + ], + "applied_time_extras": {}, + } + merge_extra_filters(form_data) + self.assertEqual(form_data, expected) + + def test_merge_extra_filters_ignores_equal_filters(self): + form_data = { + "extra_filters": [ + {"col": "a", "op": "in", "val": "someval"}, + {"col": "B", "op": "==", "val": ["c1", "c2"]}, + {"col": "c", "op": "in", "val": ["c1", 1, None]}, + ], + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "someval", + "expressionType": "SIMPLE", + "operator": "in", + "subject": "a", + }, + { + "clause": "WHERE", + "comparator": ["c1", "c2"], + "expressionType": "SIMPLE", + "operator": "==", + "subject": "B", + }, + { + "clause": "WHERE", + "comparator": ["c1", 1, None], + "expressionType": "SIMPLE", + "operator": "in", + "subject": "c", + }, + ], + } + expected = { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "someval", + "expressionType": "SIMPLE", + "operator": "in", + "subject": "a", + }, + { + "clause": "WHERE", + "comparator": ["c1", "c2"], + "expressionType": "SIMPLE", + "operator": "==", + "subject": "B", + }, + { + "clause": "WHERE", + "comparator": ["c1", 1, None], + "expressionType": "SIMPLE", + "operator": "in", + "subject": "c", + }, + ], + "applied_time_extras": {}, + } + merge_extra_filters(form_data) + self.assertEqual(form_data, expected) + + def test_merge_extra_filters_merges_different_val_types(self): + form_data = { + "extra_filters": [ + {"col": "a", "op": "in", "val": ["g1", "g2"]}, + {"col": "B", "op": "==", "val": ["c1", "c2"]}, + ], + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "someval", + "expressionType": "SIMPLE", + "operator": "in", + "subject": "a", + }, + { + "clause": "WHERE", + "comparator": ["c1", "c2"], + "expressionType": "SIMPLE", + "operator": "==", + "subject": "B", + }, + ], + } + expected = { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "someval", + "expressionType": "SIMPLE", + "operator": "in", + "subject": "a", + }, + { + "clause": "WHERE", + "comparator": ["c1", "c2"], + "expressionType": "SIMPLE", + "operator": "==", + "subject": "B", + }, + { + "clause": "WHERE", + "comparator": ["g1", "g2"], + "expressionType": "SIMPLE", + "filterOptionName": "c11969c994b40a83a4ae7d48ff1ea28e", + "isExtra": True, + "operator": "in", + "subject": "a", + }, + ], + "applied_time_extras": {}, + } + merge_extra_filters(form_data) + self.assertEqual(form_data, expected) + form_data = { + "extra_filters": [ + {"col": "a", "op": "in", "val": "someval"}, + {"col": "B", "op": "==", "val": ["c1", "c2"]}, + ], + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": ["g1", "g2"], + "expressionType": "SIMPLE", + "operator": "in", + "subject": "a", + }, + { + "clause": "WHERE", + "comparator": ["c1", "c2"], + "expressionType": "SIMPLE", + "operator": "==", + "subject": "B", + }, + ], + } + expected = { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": ["g1", "g2"], + "expressionType": "SIMPLE", + "operator": "in", + "subject": "a", + }, + { + "clause": "WHERE", + "comparator": ["c1", "c2"], + "expressionType": "SIMPLE", + "operator": "==", + "subject": "B", + }, + { + "clause": "WHERE", + "comparator": "someval", + "expressionType": "SIMPLE", + "filterOptionName": "90cfb3c34852eb3bc741b0cc20053b46", + "isExtra": True, + "operator": "in", + "subject": "a", + }, + ], + "applied_time_extras": {}, + } + merge_extra_filters(form_data) + self.assertEqual(form_data, expected) + + def test_merge_extra_filters_adds_unequal_lists(self): + form_data = { + "extra_filters": [ + {"col": "a", "op": "in", "val": ["g1", "g2", "g3"]}, + {"col": "B", "op": "==", "val": ["c1", "c2", "c3"]}, + ], + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": ["g1", "g2"], + "expressionType": "SIMPLE", + "operator": "in", + "subject": "a", + }, + { + "clause": "WHERE", + "comparator": ["c1", "c2"], + "expressionType": "SIMPLE", + "operator": "==", + "subject": "B", + }, + ], + } + expected = { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": ["g1", "g2"], + "expressionType": "SIMPLE", + "operator": "in", + "subject": "a", + }, + { + "clause": "WHERE", + "comparator": ["c1", "c2"], + "expressionType": "SIMPLE", + "operator": "==", + "subject": "B", + }, + { + "clause": "WHERE", + "comparator": ["g1", "g2", "g3"], + "expressionType": "SIMPLE", + "filterOptionName": "21cbb68af7b17e62b3b2f75e2190bfd7", + "isExtra": True, + "operator": "in", + "subject": "a", + }, + { + "clause": "WHERE", + "comparator": ["c1", "c2", "c3"], + "expressionType": "SIMPLE", + "filterOptionName": "0a8dcb928f1f4bba97643c6e68d672f1", + "isExtra": True, + "operator": "==", + "subject": "B", + }, + ], + "applied_time_extras": {}, + } + merge_extra_filters(form_data) + self.assertEqual(form_data, expected) + + def test_merge_extra_filters_when_applied_time_extras_predefined(self): + form_data = {"applied_time_extras": {"__time_range": "Last week"}} + merge_extra_filters(form_data) + + self.assertEqual( + form_data, + { + "applied_time_extras": {"__time_range": "Last week"}, + "adhoc_filters": [], + }, + ) + + def test_merge_request_params_when_url_params_undefined(self): + form_data = {"since": "2000", "until": "now"} + url_params = {"form_data": form_data, "dashboard_ids": "(1,2,3,4,5)"} + merge_request_params(form_data, url_params) + self.assertIn("url_params", form_data.keys()) + self.assertIn("dashboard_ids", form_data["url_params"]) + self.assertNotIn("form_data", form_data.keys()) + + def test_merge_request_params_when_url_params_predefined(self): + form_data = { + "since": "2000", + "until": "now", + "url_params": {"abc": "123", "dashboard_ids": "(1,2,3)"}, + } + url_params = {"form_data": form_data, "dashboard_ids": "(1,2,3,4,5)"} + merge_request_params(form_data, url_params) + self.assertIn("url_params", form_data.keys()) + self.assertIn("abc", form_data["url_params"]) + self.assertEqual( + url_params["dashboard_ids"], form_data["url_params"]["dashboard_ids"] + ) + + def test_format_timedelta(self): + self.assertEqual(format_timedelta(timedelta(0)), "0:00:00") + self.assertEqual(format_timedelta(timedelta(days=1)), "1 day, 0:00:00") + self.assertEqual(format_timedelta(timedelta(minutes=-6)), "-0:06:00") + self.assertEqual( + format_timedelta(timedelta(0) - timedelta(days=1, hours=5, minutes=6)), + "-1 day, 5:06:00", + ) + self.assertEqual( + format_timedelta(timedelta(0) - timedelta(days=16, hours=4, minutes=3)), + "-16 days, 4:03:00", + ) + + def test_json_encoded_obj(self): + obj = {"a": 5, "b": ["a", "g", 5]} + val = '{"a": 5, "b": ["a", "g", 5]}' + jsonObj = JSONEncodedDict() + resp = jsonObj.process_bind_param(obj, "dialect") + self.assertIn('"a": 5', resp) + self.assertIn('"b": ["a", "g", 5]', resp) + self.assertEqual(jsonObj.process_result_value(val, "dialect"), obj) + + def test_validate_json(self): + valid = '{"a": 5, "b": [1, 5, ["g", "h"]]}' + self.assertIsNone(validate_json(valid)) + invalid = '{"a": 5, "b": [1, 5, ["g", "h]]}' + with self.assertRaises(SupersetException): + validate_json(invalid) + + def test_convert_legacy_filters_into_adhoc_where(self): + form_data = {"where": "a = 1"} + expected = { + "adhoc_filters": [ + { + "clause": "WHERE", + "expressionType": "SQL", + "filterOptionName": "46fb6d7891e23596e42ae38da94a57e0", + "sqlExpression": "a = 1", + } + ] + } + convert_legacy_filters_into_adhoc(form_data) + self.assertEqual(form_data, expected) + + def test_convert_legacy_filters_into_adhoc_filters(self): + form_data = {"filters": [{"col": "a", "op": "in", "val": "someval"}]} + expected = { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "someval", + "expressionType": "SIMPLE", + "filterOptionName": "135c7ee246666b840a3d7a9c3a30cf38", + "operator": "in", + "subject": "a", + } + ] + } + convert_legacy_filters_into_adhoc(form_data) + self.assertEqual(form_data, expected) + + def test_convert_legacy_filters_into_adhoc_having(self): + form_data = {"having": "COUNT(1) = 1"} + expected = { + "adhoc_filters": [ + { + "clause": "HAVING", + "expressionType": "SQL", + "filterOptionName": "683f1c26466ab912f75a00842e0f2f7b", + "sqlExpression": "COUNT(1) = 1", + } + ] + } + convert_legacy_filters_into_adhoc(form_data) + self.assertEqual(form_data, expected) + + def test_convert_legacy_filters_into_adhoc_having_filters(self): + form_data = {"having_filters": [{"col": "COUNT(1)", "op": "==", "val": 1}]} + expected = { + "adhoc_filters": [ + { + "clause": "HAVING", + "comparator": 1, + "expressionType": "SIMPLE", + "filterOptionName": "967d0fb409f6d9c7a6c03a46cf933c9c", + "operator": "==", + "subject": "COUNT(1)", + } + ] + } + convert_legacy_filters_into_adhoc(form_data) + self.assertEqual(form_data, expected) + + def test_convert_legacy_filters_into_adhoc_present_and_empty(self): + form_data = {"adhoc_filters": [], "where": "a = 1"} + expected = { + "adhoc_filters": [ + { + "clause": "WHERE", + "expressionType": "SQL", + "filterOptionName": "46fb6d7891e23596e42ae38da94a57e0", + "sqlExpression": "a = 1", + } + ] + } + convert_legacy_filters_into_adhoc(form_data) + self.assertEqual(form_data, expected) + + def test_convert_legacy_filters_into_adhoc_present_and_nonempty(self): + form_data = { + "adhoc_filters": [ + {"clause": "WHERE", "expressionType": "SQL", "sqlExpression": "a = 1"} + ], + "filters": [{"col": "a", "op": "in", "val": "someval"}], + "having": "COUNT(1) = 1", + "having_filters": [{"col": "COUNT(1)", "op": "==", "val": 1}], + } + expected = { + "adhoc_filters": [ + {"clause": "WHERE", "expressionType": "SQL", "sqlExpression": "a = 1"} + ] + } + convert_legacy_filters_into_adhoc(form_data) + self.assertEqual(form_data, expected) + + def test_parse_js_uri_path_items_eval_undefined(self): + self.assertIsNone(parse_js_uri_path_item("undefined", eval_undefined=True)) + self.assertIsNone(parse_js_uri_path_item("null", eval_undefined=True)) + self.assertEqual("undefined", parse_js_uri_path_item("undefined")) + self.assertEqual("null", parse_js_uri_path_item("null")) + + def test_parse_js_uri_path_items_unquote(self): + self.assertEqual("slashed/name", parse_js_uri_path_item("slashed%2fname")) + self.assertEqual( + "slashed%2fname", parse_js_uri_path_item("slashed%2fname", unquote=False) + ) + + def test_parse_js_uri_path_items_item_optional(self): + self.assertIsNone(parse_js_uri_path_item(None)) + self.assertIsNotNone(parse_js_uri_path_item("item")) + + def test_get_stacktrace(self): + with app.app_context(): + app.config["SHOW_STACKTRACE"] = True + try: + raise Exception("NONONO!") + except Exception: + stacktrace = get_stacktrace() + self.assertIn("NONONO", stacktrace) + + app.config["SHOW_STACKTRACE"] = False + try: + raise Exception("NONONO!") + except Exception: + stacktrace = get_stacktrace() + assert stacktrace is None + + def test_split(self): + self.assertEqual(list(split("a b")), ["a", "b"]) + self.assertEqual(list(split("a,b", delimiter=",")), ["a", "b"]) + self.assertEqual(list(split("a,(b,a)", delimiter=",")), ["a", "(b,a)"]) + self.assertEqual( + list(split('a,(b,a),"foo , bar"', delimiter=",")), + ["a", "(b,a)", '"foo , bar"'], + ) + self.assertEqual( + list(split("a,'b,c'", delimiter=",", quote="'")), ["a", "'b,c'"] + ) + self.assertEqual(list(split('a "b c"')), ["a", '"b c"']) + self.assertEqual(list(split(r'a "b \" c"')), ["a", r'"b \" c"']) + + def test_get_or_create_db(self): + get_or_create_db("test_db", "sqlite:///superset.db") + database = db.session.query(Database).filter_by(database_name="test_db").one() + self.assertIsNotNone(database) + self.assertEqual(database.sqlalchemy_uri, "sqlite:///superset.db") + self.assertIsNotNone( + security_manager.find_permission_view_menu("database_access", database.perm) + ) + # Test change URI + get_or_create_db("test_db", "sqlite:///changed.db") + database = db.session.query(Database).filter_by(database_name="test_db").one() + self.assertEqual(database.sqlalchemy_uri, "sqlite:///changed.db") + db.session.delete(database) + db.session.commit() + + def test_get_or_create_db_invalid_uri(self): + with self.assertRaises(DatabaseInvalidError): + get_or_create_db("test_db", "yoursql:superset.db/()") + + def test_get_iterable(self): + self.assertListEqual(get_iterable(123), [123]) + self.assertListEqual(get_iterable([123]), [123]) + self.assertListEqual(get_iterable("foo"), ["foo"]) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_build_extra_filters(self): + world_health = db.session.query(Dashboard).filter_by(slug="world_health").one() + layout = json.loads(world_health.position_json) + filter_ = db.session.query(Slice).filter_by(slice_name="Region Filter").one() + world = db.session.query(Slice).filter_by(slice_name="World's Population").one() + box_plot = db.session.query(Slice).filter_by(slice_name="Box plot").one() + treemap = db.session.query(Slice).filter_by(slice_name="Treemap").one() + + filter_scopes = { + str(filter_.id): { + "region": {"scope": ["ROOT_ID"], "immune": [treemap.id]}, + "country_name": { + "scope": ["ROOT_ID"], + "immune": [treemap.id, box_plot.id], + }, + } + } + + default_filters = { + str(filter_.id): { + "region": ["North America"], + "country_name": ["United States"], + } + } + + # immune to all filters + assert ( + build_extra_filters(layout, filter_scopes, default_filters, treemap.id) + == [] + ) + + # in scope + assert build_extra_filters( + layout, filter_scopes, default_filters, world.id + ) == [ + {"col": "region", "op": "==", "val": "North America"}, + {"col": "country_name", "op": "in", "val": ["United States"]}, + ] + + assert build_extra_filters( + layout, filter_scopes, default_filters, box_plot.id + ) == [{"col": "region", "op": "==", "val": "North America"}] + + def test_merge_extra_filters_with_no_extras(self): + form_data = { + "time_range": "Last 10 days", + } + merge_extra_form_data(form_data) + self.assertEqual( + form_data, + { + "time_range": "Last 10 days", + "adhoc_filters": [], + }, + ) + + def test_merge_extra_filters_with_unset_legacy_time_range(self): + """ + Make sure native filter is applied if filter box time range is unset. + """ + form_data = { + "time_range": "Last 10 days", + "extra_filters": [ + {"col": "__time_range", "op": "==", "val": NO_TIME_RANGE}, + ], + "extra_form_data": {"time_range": "Last year"}, + } + merge_extra_filters(form_data) + self.assertEqual( + form_data, + { + "time_range": "Last year", + "applied_time_extras": {}, + "adhoc_filters": [], + }, + ) + + def test_merge_extra_filters_with_conflicting_time_ranges(self): + """ + Make sure filter box takes precedence if both native filter and filter box + time ranges are set. + """ + form_data = { + "time_range": "Last 10 days", + "extra_filters": [{"col": "__time_range", "op": "==", "val": "Last week"}], + "extra_form_data": { + "time_range": "Last year", + }, + } + merge_extra_filters(form_data) + self.assertEqual( + form_data, + { + "time_range": "Last week", + "applied_time_extras": {"__time_range": "Last week"}, + "adhoc_filters": [], + }, + ) + + def test_merge_extra_filters_with_extras(self): + form_data = { + "time_range": "Last 10 days", + "extra_form_data": { + "filters": [{"col": "foo", "op": "IN", "val": ["bar"]}], + "adhoc_filters": [ + { + "expressionType": "SQL", + "clause": "WHERE", + "sqlExpression": "1 = 0", + } + ], + "time_range": "Last 100 years", + "time_grain_sqla": "PT1M", + "relative_start": "now", + }, + } + merge_extra_form_data(form_data) + adhoc_filters = form_data["adhoc_filters"] + assert adhoc_filters[0] == { + "clause": "WHERE", + "expressionType": "SQL", + "isExtra": True, + "sqlExpression": "1 = 0", + } + converted_filter = adhoc_filters[1] + del converted_filter["filterOptionName"] + assert converted_filter == { + "clause": "WHERE", + "comparator": ["bar"], + "expressionType": "SIMPLE", + "isExtra": True, + "operator": "IN", + "subject": "foo", + } + assert form_data["time_range"] == "Last 100 years" + assert form_data["time_grain_sqla"] == "PT1M" + assert form_data["extras"]["relative_start"] == "now" + + def test_ssl_certificate_parse(self): + parsed_certificate = parse_ssl_cert(ssl_certificate) + self.assertEqual(parsed_certificate.serial_number, 12355228710836649848) + + def test_ssl_certificate_file_creation(self): + path = create_ssl_cert_file(ssl_certificate) + expected_filename = md5_sha_from_str(ssl_certificate) + self.assertIn(expected_filename, path) + self.assertTrue(os.path.exists(path)) + + def test_get_email_address_list(self): + self.assertEqual(get_email_address_list("a@a"), ["a@a"]) + self.assertEqual(get_email_address_list(" a@a "), ["a@a"]) + self.assertEqual(get_email_address_list("a@a\n"), ["a@a"]) + self.assertEqual(get_email_address_list(",a@a;"), ["a@a"]) + self.assertEqual( + get_email_address_list(",a@a; b@b c@c a-c@c; d@d, f@f"), + ["a@a", "b@b", "c@c", "a-c@c", "d@d", "f@f"], + ) + + def test_get_form_data_default(self) -> None: + with app.test_request_context(): + form_data, slc = get_form_data() + self.assertEqual(slc, None) + + def test_get_form_data_request_args(self) -> None: + with app.test_request_context( + query_string={"form_data": json.dumps({"foo": "bar"})} + ): + form_data, slc = get_form_data() + self.assertEqual(form_data, {"foo": "bar"}) + self.assertEqual(slc, None) + + def test_get_form_data_request_form(self) -> None: + with app.test_request_context(data={"form_data": json.dumps({"foo": "bar"})}): + form_data, slc = get_form_data() + self.assertEqual(form_data, {"foo": "bar"}) + self.assertEqual(slc, None) + + def test_get_form_data_request_form_with_queries(self) -> None: + # the CSV export uses for requests, even when sending requests to + # /api/v1/chart/data + with app.test_request_context( + data={ + "form_data": json.dumps({"queries": [{"url_params": {"foo": "bar"}}]}) + } + ): + form_data, slc = get_form_data() + self.assertEqual(form_data, {"url_params": {"foo": "bar"}}) + self.assertEqual(slc, None) + + def test_get_form_data_request_args_and_form(self) -> None: + with app.test_request_context( + data={"form_data": json.dumps({"foo": "bar"})}, + query_string={"form_data": json.dumps({"baz": "bar"})}, + ): + form_data, slc = get_form_data() + self.assertEqual(form_data, {"baz": "bar", "foo": "bar"}) + self.assertEqual(slc, None) + + def test_get_form_data_globals(self) -> None: + with app.test_request_context(): + g.form_data = {"foo": "bar"} + form_data, slc = get_form_data() + delattr(g, "form_data") + self.assertEqual(form_data, {"foo": "bar"}) + self.assertEqual(slc, None) + + def test_get_form_data_corrupted_json(self) -> None: + with app.test_request_context( + data={"form_data": "{x: '2324'}"}, + query_string={"form_data": '{"baz": "bar"'}, + ): + form_data, slc = get_form_data() + self.assertEqual(form_data, {}) + self.assertEqual(slc, None) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_log_this(self) -> None: + # TODO: Add additional scenarios. + self.login(username="admin") + slc = self.get_slice("Girls", db.session) + dashboard_id = 1 + + assert slc.viz is not None + resp = self.get_json_resp( + f"/superset/explore_json/{slc.datasource_type}/{slc.datasource_id}/" + + f'?form_data={{"slice_id": {slc.id}}}&dashboard_id={dashboard_id}', + {"form_data": json.dumps(slc.viz.form_data)}, + ) + + record = ( + db.session.query(Log) + .filter_by(action="explore_json", slice_id=slc.id) + .order_by(Log.dttm.desc()) + .first() + ) + + self.assertEqual(record.dashboard_id, dashboard_id) + self.assertEqual(json.loads(record.json)["dashboard_id"], str(dashboard_id)) + self.assertEqual(json.loads(record.json)["form_data"]["slice_id"], slc.id) + + self.assertEqual( + json.loads(record.json)["form_data"]["viz_type"], + slc.viz.form_data["viz_type"], + ) + + def test_schema_validate_json(self): + valid = '{"a": 5, "b": [1, 5, ["g", "h"]]}' + self.assertIsNone(schema.validate_json(valid)) + invalid = '{"a": 5, "b": [1, 5, ["g", "h]]}' + self.assertRaises(marshmallow.ValidationError, schema.validate_json, invalid) + + def test_schema_one_of_case_insensitive(self): + validator = schema.OneOfCaseInsensitive(choices=[1, 2, 3, "FoO", "BAR", "baz"]) + self.assertEqual(1, validator(1)) + self.assertEqual(2, validator(2)) + self.assertEqual("FoO", validator("FoO")) + self.assertEqual("FOO", validator("FOO")) + self.assertEqual("bar", validator("bar")) + self.assertEqual("BaZ", validator("BaZ")) + self.assertRaises(marshmallow.ValidationError, validator, "qwerty") + self.assertRaises(marshmallow.ValidationError, validator, 4) + + def test_cast_to_num(self) -> None: + assert cast_to_num("5") == 5 + assert cast_to_num("5.2") == 5.2 + assert cast_to_num(10) == 10 + assert cast_to_num(10.1) == 10.1 + assert cast_to_num(None) is None + assert cast_to_num("this is not a string") is None + + def test_get_form_data_token(self): + assert get_form_data_token({"token": "token_abcdefg1"}) == "token_abcdefg1" + generated_token = get_form_data_token({}) + assert re.match(r"^token_[a-z0-9]{8}$", generated_token) is not None + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_extract_dataframe_dtypes(self): + slc = self.get_slice("Girls", db.session) + cols: Tuple[Tuple[str, GenericDataType, List[Any]], ...] = ( + ("dt", GenericDataType.TEMPORAL, [date(2021, 2, 4), date(2021, 2, 4)]), + ( + "dttm", + GenericDataType.TEMPORAL, + [datetime(2021, 2, 4, 1, 1, 1), datetime(2021, 2, 4, 1, 1, 1)], + ), + ("str", GenericDataType.STRING, ["foo", "foo"]), + ("int", GenericDataType.NUMERIC, [1, 1]), + ("float", GenericDataType.NUMERIC, [0.5, 0.5]), + ("mixed-int-float", GenericDataType.NUMERIC, [0.5, 1.0]), + ("bool", GenericDataType.BOOLEAN, [True, False]), + ("mixed-str-int", GenericDataType.STRING, ["abc", 1.0]), + ("obj", GenericDataType.STRING, [{"a": 1}, {"a": 1}]), + ("dt_null", GenericDataType.TEMPORAL, [None, date(2021, 2, 4)]), + ( + "dttm_null", + GenericDataType.TEMPORAL, + [None, datetime(2021, 2, 4, 1, 1, 1)], + ), + ("str_null", GenericDataType.STRING, [None, "foo"]), + ("int_null", GenericDataType.NUMERIC, [None, 1]), + ("float_null", GenericDataType.NUMERIC, [None, 0.5]), + ("bool_null", GenericDataType.BOOLEAN, [None, False]), + ("obj_null", GenericDataType.STRING, [None, {"a": 1}]), + # Non-timestamp columns should be identified as temporal if + # `is_dttm` is set to `True` in the underlying datasource + ("ds", GenericDataType.TEMPORAL, [None, {"ds": "2017-01-01"}]), + ) + + df = pd.DataFrame(data={col[0]: col[2] for col in cols}) + assert extract_dataframe_dtypes(df, slc.datasource) == [col[1] for col in cols] + + def test_normalize_dttm_col(self): + def normalize_col( + df: pd.DataFrame, + timestamp_format: Optional[str], + offset: int, + time_shift: Optional[str], + ) -> pd.DataFrame: + df = df.copy() + normalize_dttm_col( + df, + tuple( + [ + DateColumn.get_legacy_time_column( + timestamp_format=timestamp_format, + offset=offset, + time_shift=time_shift, + ) + ] + ), + ) + return df + + ts = pd.Timestamp(2021, 2, 15, 19, 0, 0, 0) + df = pd.DataFrame([{"__timestamp": ts, "a": 1}]) + + # test regular (non-numeric) format + assert normalize_col(df, None, 0, None)[DTTM_ALIAS][0] == ts + assert normalize_col(df, "epoch_ms", 0, None)[DTTM_ALIAS][0] == ts + assert normalize_col(df, "epoch_s", 0, None)[DTTM_ALIAS][0] == ts + + # test offset + assert normalize_col(df, None, 1, None)[DTTM_ALIAS][0] == pd.Timestamp( + 2021, 2, 15, 20, 0, 0, 0 + ) + + # test offset and timedelta + assert normalize_col(df, None, 1, "30 minutes")[DTTM_ALIAS][0] == pd.Timestamp( + 2021, 2, 15, 20, 30, 0, 0 + ) + + # test numeric epoch_s format + df = pd.DataFrame([{"__timestamp": ts.timestamp(), "a": 1}]) + assert normalize_col(df, "epoch_s", 0, None)[DTTM_ALIAS][0] == ts + + # test numeric epoch_ms format + df = pd.DataFrame([{"__timestamp": ts.timestamp() * 1000, "a": 1}]) + assert normalize_col(df, "epoch_ms", 0, None)[DTTM_ALIAS][0] == ts + + # test that out of bounds timestamps are coerced to None instead of + # erroring out + df = pd.DataFrame([{"__timestamp": "1677-09-21 00:00:00", "a": 1}]) + assert pd.isnull(normalize_col(df, None, 0, None)[DTTM_ALIAS][0]) diff --git a/tests/integration_tests/viz_tests.py b/tests/integration_tests/viz_tests.py new file mode 100644 index 0000000000000..137e2a474c344 --- /dev/null +++ b/tests/integration_tests/viz_tests.py @@ -0,0 +1,1540 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +from datetime import date, datetime, timezone +import logging +from math import nan +from unittest.mock import Mock, patch +from typing import Any, Dict, List, Set + +import numpy as np +import pandas as pd +import pytest + +import tests.integration_tests.test_app +import superset.viz as viz +from superset import app +from superset.constants import NULL_STRING +from superset.exceptions import QueryObjectValidationError, SpatialException +from superset.utils.core import DTTM_ALIAS + +from .base_tests import SupersetTestCase +from .utils import load_fixture + +logger = logging.getLogger(__name__) + + +class TestBaseViz(SupersetTestCase): + def test_constructor_exception_no_datasource(self): + form_data = {} + datasource = None + with self.assertRaises(Exception): + viz.BaseViz(datasource, form_data) + + def test_process_metrics(self): + # test TableViz metrics in correct order + form_data = { + "url_params": {}, + "row_limit": 500, + "metric": "sum__SP_POP_TOTL", + "entity": "country_code", + "secondary_metric": "sum__SP_POP_TOTL", + "granularity_sqla": "year", + "page_length": 0, + "all_columns": [], + "viz_type": "table", + "since": "2014-01-01", + "until": "2014-01-02", + "metrics": ["sum__SP_POP_TOTL", "SUM(SE_PRM_NENR_MA)", "SUM(SP_URB_TOTL)"], + "country_fieldtype": "cca3", + "percent_metrics": ["count"], + "slice_id": 74, + "time_grain_sqla": None, + "order_by_cols": [], + "groupby": ["country_name"], + "compare_lag": "10", + "limit": "25", + "datasource": "2__table", + "table_timestamp_format": "%Y-%m-%d %H:%M:%S", + "markup_type": "markdown", + "where": "", + "compare_suffix": "o10Y", + } + datasource = Mock() + datasource.type = "table" + test_viz = viz.BaseViz(datasource, form_data) + expect_metric_labels = [ + "sum__SP_POP_TOTL", + "SUM(SE_PRM_NENR_MA)", + "SUM(SP_URB_TOTL)", + "count", + ] + self.assertEqual(test_viz.metric_labels, expect_metric_labels) + self.assertEqual(test_viz.all_metrics, expect_metric_labels) + + def test_get_df_returns_empty_df(self): + form_data = {"dummy": 123} + query_obj = {"granularity": "day"} + datasource = self.get_datasource_mock() + test_viz = viz.BaseViz(datasource, form_data) + result = test_viz.get_df(query_obj) + self.assertEqual(type(result), pd.DataFrame) + self.assertTrue(result.empty) + + def test_get_df_handles_dttm_col(self): + form_data = {"dummy": 123} + query_obj = {"granularity": "day"} + results = Mock() + results.query = Mock() + results.status = Mock() + results.error_message = Mock() + datasource = Mock() + datasource.type = "table" + datasource.query = Mock(return_value=results) + mock_dttm_col = Mock() + datasource.get_column = Mock(return_value=mock_dttm_col) + + test_viz = viz.BaseViz(datasource, form_data) + test_viz.df_metrics_to_num = Mock() + test_viz.get_fillna_for_columns = Mock(return_value=0) + + results.df = pd.DataFrame(data={DTTM_ALIAS: ["1960-01-01 05:00:00"]}) + datasource.offset = 0 + mock_dttm_col = Mock() + datasource.get_column = Mock(return_value=mock_dttm_col) + mock_dttm_col.python_date_format = "epoch_ms" + result = test_viz.get_df(query_obj) + import logging + + logger.info(result) + pd.testing.assert_series_equal( + result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 5, 0)], name=DTTM_ALIAS) + ) + + mock_dttm_col.python_date_format = None + result = test_viz.get_df(query_obj) + pd.testing.assert_series_equal( + result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 5, 0)], name=DTTM_ALIAS) + ) + + datasource.offset = 1 + result = test_viz.get_df(query_obj) + pd.testing.assert_series_equal( + result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 6, 0)], name=DTTM_ALIAS) + ) + + datasource.offset = 0 + results.df = pd.DataFrame(data={DTTM_ALIAS: ["1960-01-01"]}) + mock_dttm_col.python_date_format = "%Y-%m-%d" + result = test_viz.get_df(query_obj) + pd.testing.assert_series_equal( + result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 0, 0)], name=DTTM_ALIAS) + ) + + def test_cache_timeout(self): + datasource = self.get_datasource_mock() + datasource.cache_timeout = 0 + test_viz = viz.BaseViz(datasource, form_data={}) + self.assertEqual(0, test_viz.cache_timeout) + + datasource.cache_timeout = 156 + test_viz = viz.BaseViz(datasource, form_data={}) + self.assertEqual(156, test_viz.cache_timeout) + + datasource.cache_timeout = None + datasource.database.cache_timeout = 0 + self.assertEqual(0, test_viz.cache_timeout) + + datasource.database.cache_timeout = 1666 + self.assertEqual(1666, test_viz.cache_timeout) + + datasource.database.cache_timeout = None + test_viz = viz.BaseViz(datasource, form_data={}) + self.assertEqual( + app.config["DATA_CACHE_CONFIG"]["CACHE_DEFAULT_TIMEOUT"], + test_viz.cache_timeout, + ) + + data_cache_timeout = app.config["DATA_CACHE_CONFIG"]["CACHE_DEFAULT_TIMEOUT"] + app.config["DATA_CACHE_CONFIG"]["CACHE_DEFAULT_TIMEOUT"] = None + datasource.database.cache_timeout = None + test_viz = viz.BaseViz(datasource, form_data={}) + self.assertEqual(app.config["CACHE_DEFAULT_TIMEOUT"], test_viz.cache_timeout) + # restore DATA_CACHE_CONFIG timeout + app.config["DATA_CACHE_CONFIG"]["CACHE_DEFAULT_TIMEOUT"] = data_cache_timeout + + +class TestTableViz(SupersetTestCase): + def test_get_data_applies_percentage(self): + form_data = { + "groupby": ["groupA", "groupB"], + "metrics": [ + { + "expressionType": "SIMPLE", + "aggregate": "SUM", + "label": "SUM(value1)", + "column": {"column_name": "value1", "type": "DOUBLE"}, + }, + "count", + "avg__C", + ], + "percent_metrics": [ + { + "expressionType": "SIMPLE", + "aggregate": "SUM", + "label": "SUM(value1)", + "column": {"column_name": "value1", "type": "DOUBLE"}, + }, + "avg__B", + ], + } + datasource = self.get_datasource_mock() + + df = pd.DataFrame( + { + "SUM(value1)": [15, 20, 25, 40], + "avg__B": [10, 20, 5, 15], + "avg__C": [11, 22, 33, 44], + "count": [6, 7, 8, 9], + "groupA": ["A", "B", "C", "C"], + "groupB": ["x", "x", "y", "z"], + } + ) + + test_viz = viz.TableViz(datasource, form_data) + data = test_viz.get_data(df) + # Check method correctly transforms data and computes percents + self.assertEqual( + [ + "groupA", + "groupB", + "SUM(value1)", + "count", + "avg__C", + "%SUM(value1)", + "%avg__B", + ], + list(data["columns"]), + ) + expected = [ + { + "groupA": "A", + "groupB": "x", + "SUM(value1)": 15, + "count": 6, + "avg__C": 11, + "%SUM(value1)": 0.15, + "%avg__B": 0.2, + }, + { + "groupA": "B", + "groupB": "x", + "SUM(value1)": 20, + "count": 7, + "avg__C": 22, + "%SUM(value1)": 0.2, + "%avg__B": 0.4, + }, + { + "groupA": "C", + "groupB": "y", + "SUM(value1)": 25, + "count": 8, + "avg__C": 33, + "%SUM(value1)": 0.25, + "%avg__B": 0.1, + }, + { + "groupA": "C", + "groupB": "z", + "SUM(value1)": 40, + "count": 9, + "avg__C": 44, + "%SUM(value1)": 0.4, + "%avg__B": 0.3, + }, + ] + self.assertEqual(expected, data["records"]) + + def test_parse_adhoc_filters(self): + form_data = { + "metrics": [ + { + "expressionType": "SIMPLE", + "aggregate": "SUM", + "label": "SUM(value1)", + "column": {"column_name": "value1", "type": "DOUBLE"}, + } + ], + "adhoc_filters": [ + { + "expressionType": "SIMPLE", + "clause": "WHERE", + "subject": "value2", + "operator": ">", + "comparator": "100", + }, + { + "expressionType": "SQL", + "clause": "HAVING", + "sqlExpression": "SUM(value1) > 5", + }, + { + "expressionType": "SQL", + "clause": "WHERE", + "sqlExpression": "value3 in ('North America')", + }, + ], + } + datasource = self.get_datasource_mock() + test_viz = viz.TableViz(datasource, form_data) + query_obj = test_viz.query_obj() + self.assertEqual( + [{"col": "value2", "val": "100", "op": ">"}], query_obj["filter"] + ) + self.assertEqual("(value3 in ('North America'))", query_obj["extras"]["where"]) + self.assertEqual("(SUM(value1) > 5)", query_obj["extras"]["having"]) + + def test_adhoc_filters_overwrite_legacy_filters(self): + form_data = { + "metrics": [ + { + "expressionType": "SIMPLE", + "aggregate": "SUM", + "label": "SUM(value1)", + "column": {"column_name": "value1", "type": "DOUBLE"}, + } + ], + "adhoc_filters": [ + { + "expressionType": "SIMPLE", + "clause": "WHERE", + "subject": "value2", + "operator": ">", + "comparator": "100", + }, + { + "expressionType": "SQL", + "clause": "WHERE", + "sqlExpression": "value3 in ('North America')", + }, + ], + "having": "SUM(value1) > 5", + } + datasource = self.get_datasource_mock() + test_viz = viz.TableViz(datasource, form_data) + query_obj = test_viz.query_obj() + self.assertEqual( + [{"col": "value2", "val": "100", "op": ">"}], query_obj["filter"] + ) + self.assertEqual("(value3 in ('North America'))", query_obj["extras"]["where"]) + self.assertEqual("", query_obj["extras"]["having"]) + + def test_query_obj_merges_percent_metrics(self): + datasource = self.get_datasource_mock() + form_data = { + "metrics": ["sum__A", "count", "avg__C"], + "percent_metrics": ["sum__A", "avg__B", "max__Y"], + } + test_viz = viz.TableViz(datasource, form_data) + query_obj = test_viz.query_obj() + self.assertEqual( + ["sum__A", "count", "avg__C", "avg__B", "max__Y"], query_obj["metrics"] + ) + + def test_query_obj_throws_columns_and_metrics(self): + datasource = self.get_datasource_mock() + form_data = {"all_columns": ["A", "B"], "metrics": ["x", "y"]} + with self.assertRaises(Exception): + test_viz = viz.TableViz(datasource, form_data) + test_viz.query_obj() + del form_data["metrics"] + form_data["groupby"] = ["B", "C"] + with self.assertRaises(Exception): + test_viz = viz.TableViz(datasource, form_data) + test_viz.query_obj() + + @patch("superset.viz.BaseViz.query_obj") + def test_query_obj_merges_all_columns(self, super_query_obj): + datasource = self.get_datasource_mock() + form_data = { + "all_columns": ["colA", "colB", "colC"], + "order_by_cols": ['["colA", "colB"]', '["colC"]'], + } + super_query_obj.return_value = { + "columns": ["colD", "colC"], + "groupby": ["colA", "colB"], + } + test_viz = viz.TableViz(datasource, form_data) + query_obj = test_viz.query_obj() + self.assertEqual(form_data["all_columns"], query_obj["columns"]) + self.assertEqual([], query_obj["groupby"]) + self.assertEqual([["colA", "colB"], ["colC"]], query_obj["orderby"]) + + def test_query_obj_uses_sortby(self): + datasource = self.get_datasource_mock() + form_data = { + "metrics": ["colA", "colB"], + "order_desc": False, + } + + def run_test(metric): + form_data["timeseries_limit_metric"] = metric + test_viz = viz.TableViz(datasource, form_data) + query_obj = test_viz.query_obj() + self.assertEqual(["colA", "colB", metric], query_obj["metrics"]) + self.assertEqual([(metric, True)], query_obj["orderby"]) + + run_test("simple_metric") + run_test( + { + "label": "adhoc_metric", + "expressionType": "SIMPLE", + "aggregate": "SUM", + "column": { + "column_name": "sort_column", + }, + } + ) + + def test_should_be_timeseries_raises_when_no_granularity(self): + datasource = self.get_datasource_mock() + form_data = {"include_time": True} + with self.assertRaises(Exception): + test_viz = viz.TableViz(datasource, form_data) + test_viz.should_be_timeseries() + + def test_adhoc_metric_with_sortby(self): + metrics = [ + { + "expressionType": "SIMPLE", + "aggregate": "SUM", + "label": "sum_value", + "column": {"column_name": "value1", "type": "DOUBLE"}, + } + ] + form_data = { + "metrics": metrics, + "timeseries_limit_metric": { + "expressionType": "SIMPLE", + "aggregate": "SUM", + "label": "SUM(value1)", + "column": {"column_name": "value1", "type": "DOUBLE"}, + }, + "order_desc": False, + } + + df = pd.DataFrame({"SUM(value1)": [15], "sum_value": [15]}) + datasource = self.get_datasource_mock() + test_viz = viz.TableViz(datasource, form_data) + data = test_viz.get_data(df) + self.assertEqual(["sum_value"], data["columns"]) + + +class TestDistBarViz(SupersetTestCase): + def test_groupby_nulls(self): + form_data = { + "metrics": ["votes"], + "adhoc_filters": [], + "groupby": ["toppings"], + "columns": [], + "order_desc": True, + } + datasource = self.get_datasource_mock() + df = pd.DataFrame( + { + "toppings": ["cheese", "pepperoni", "anchovies", None], + "votes": [3, 5, 1, 2], + } + ) + test_viz = viz.DistributionBarViz(datasource, form_data) + data = test_viz.get_data(df)[0] + self.assertEqual("votes", data["key"]) + expected_values = [ + {"x": "pepperoni", "y": 5}, + {"x": "cheese", "y": 3}, + {"x": NULL_STRING, "y": 2}, + {"x": "anchovies", "y": 1}, + ] + self.assertEqual(expected_values, data["values"]) + + def test_groupby_nans(self): + form_data = { + "metrics": ["count"], + "adhoc_filters": [], + "groupby": ["beds"], + "columns": [], + "order_desc": True, + } + datasource = self.get_datasource_mock() + df = pd.DataFrame({"beds": [0, 1, nan, 2], "count": [30, 42, 3, 29]}) + test_viz = viz.DistributionBarViz(datasource, form_data) + data = test_viz.get_data(df)[0] + self.assertEqual("count", data["key"]) + expected_values = [ + {"x": "1.0", "y": 42}, + {"x": "0.0", "y": 30}, + {"x": "2.0", "y": 29}, + {"x": NULL_STRING, "y": 3}, + ] + + self.assertEqual(expected_values, data["values"]) + + def test_column_nulls(self): + form_data = { + "metrics": ["votes"], + "adhoc_filters": [], + "groupby": ["toppings"], + "columns": ["role"], + "order_desc": True, + } + datasource = self.get_datasource_mock() + df = pd.DataFrame( + { + "toppings": ["cheese", "pepperoni", "cheese", "pepperoni"], + "role": ["engineer", "engineer", None, None], + "votes": [3, 5, 1, 2], + } + ) + test_viz = viz.DistributionBarViz(datasource, form_data) + data = test_viz.get_data(df) + expected = [ + { + "key": NULL_STRING, + "values": [{"x": "pepperoni", "y": 2}, {"x": "cheese", "y": 1}], + }, + { + "key": "engineer", + "values": [{"x": "pepperoni", "y": 5}, {"x": "cheese", "y": 3}], + }, + ] + self.assertEqual(expected, data) + + def test_column_metrics_in_order(self): + form_data = { + "metrics": ["z_column", "votes", "a_column"], + "adhoc_filters": [], + "groupby": ["toppings"], + "columns": [], + "order_desc": True, + } + datasource = self.get_datasource_mock() + df = pd.DataFrame( + { + "toppings": ["cheese", "pepperoni", "cheese", "pepperoni"], + "role": ["engineer", "engineer", None, None], + "votes": [3, 5, 1, 2], + "a_column": [3, 5, 1, 2], + "z_column": [3, 5, 1, 2], + } + ) + test_viz = viz.DistributionBarViz(datasource, form_data) + data = test_viz.get_data(df) + + expected = [ + { + "key": "z_column", + "values": [{"x": "pepperoni", "y": 3.5}, {"x": "cheese", "y": 2.0}], + }, + { + "key": "votes", + "values": [{"x": "pepperoni", "y": 3.5}, {"x": "cheese", "y": 2.0}], + }, + { + "key": "a_column", + "values": [{"x": "pepperoni", "y": 3.5}, {"x": "cheese", "y": 2.0}], + }, + ] + + self.assertEqual(expected, data) + + def test_column_metrics_in_order_with_breakdowns(self): + form_data = { + "metrics": ["z_column", "votes", "a_column"], + "adhoc_filters": [], + "groupby": ["toppings"], + "columns": ["role"], + "order_desc": True, + } + datasource = self.get_datasource_mock() + df = pd.DataFrame( + { + "toppings": ["cheese", "pepperoni", "cheese", "pepperoni"], + "role": ["engineer", "engineer", None, None], + "votes": [3, 5, 1, 2], + "a_column": [3, 5, 1, 2], + "z_column": [3, 5, 1, 2], + } + ) + test_viz = viz.DistributionBarViz(datasource, form_data) + data = test_viz.get_data(df) + + expected = [ + { + "key": f"z_column, {NULL_STRING}", + "values": [{"x": "pepperoni", "y": 2}, {"x": "cheese", "y": 1}], + }, + { + "key": "z_column, engineer", + "values": [{"x": "pepperoni", "y": 5}, {"x": "cheese", "y": 3}], + }, + { + "key": f"votes, {NULL_STRING}", + "values": [{"x": "pepperoni", "y": 2}, {"x": "cheese", "y": 1}], + }, + { + "key": "votes, engineer", + "values": [{"x": "pepperoni", "y": 5}, {"x": "cheese", "y": 3}], + }, + { + "key": f"a_column, {NULL_STRING}", + "values": [{"x": "pepperoni", "y": 2}, {"x": "cheese", "y": 1}], + }, + { + "key": "a_column, engineer", + "values": [{"x": "pepperoni", "y": 5}, {"x": "cheese", "y": 3}], + }, + ] + + self.assertEqual(expected, data) + + +class TestPairedTTest(SupersetTestCase): + def test_get_data_transforms_dataframe(self): + form_data = { + "groupby": ["groupA", "groupB", "groupC"], + "metrics": ["metric1", "metric2", "metric3"], + } + datasource = self.get_datasource_mock() + # Test data + raw = {} + raw[DTTM_ALIAS] = [100, 200, 300, 100, 200, 300, 100, 200, 300] + raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"] + raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"] + raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"] + raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9] + raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90] + raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900] + df = pd.DataFrame(raw) + pairedTTestViz = viz.viz_types["paired_ttest"](datasource, form_data) + data = pairedTTestViz.get_data(df) + # Check method correctly transforms data + expected = { + "metric1": [ + { + "values": [ + {"x": 100, "y": 1}, + {"x": 200, "y": 2}, + {"x": 300, "y": 3}, + ], + "group": ("a1", "a2", "a3"), + }, + { + "values": [ + {"x": 100, "y": 4}, + {"x": 200, "y": 5}, + {"x": 300, "y": 6}, + ], + "group": ("b1", "b2", "b3"), + }, + { + "values": [ + {"x": 100, "y": 7}, + {"x": 200, "y": 8}, + {"x": 300, "y": 9}, + ], + "group": ("c1", "c2", "c3"), + }, + ], + "metric2": [ + { + "values": [ + {"x": 100, "y": 10}, + {"x": 200, "y": 20}, + {"x": 300, "y": 30}, + ], + "group": ("a1", "a2", "a3"), + }, + { + "values": [ + {"x": 100, "y": 40}, + {"x": 200, "y": 50}, + {"x": 300, "y": 60}, + ], + "group": ("b1", "b2", "b3"), + }, + { + "values": [ + {"x": 100, "y": 70}, + {"x": 200, "y": 80}, + {"x": 300, "y": 90}, + ], + "group": ("c1", "c2", "c3"), + }, + ], + "metric3": [ + { + "values": [ + {"x": 100, "y": 100}, + {"x": 200, "y": 200}, + {"x": 300, "y": 300}, + ], + "group": ("a1", "a2", "a3"), + }, + { + "values": [ + {"x": 100, "y": 400}, + {"x": 200, "y": 500}, + {"x": 300, "y": 600}, + ], + "group": ("b1", "b2", "b3"), + }, + { + "values": [ + {"x": 100, "y": 700}, + {"x": 200, "y": 800}, + {"x": 300, "y": 900}, + ], + "group": ("c1", "c2", "c3"), + }, + ], + } + self.assertEqual(data, expected) + + def test_get_data_empty_null_keys(self): + form_data = {"groupby": [], "metrics": [""]} + datasource = self.get_datasource_mock() + # Test data + raw = {} + raw[DTTM_ALIAS] = [100, 200, 300] + raw[""] = [1, 2, 3] + raw[None] = [10, 20, 30] + + df = pd.DataFrame(raw) + pairedTTestViz = viz.viz_types["paired_ttest"](datasource, form_data) + data = pairedTTestViz.get_data(df) + # Check method correctly transforms data + expected = { + "N/A": [ + { + "values": [ + {"x": 100, "y": 1}, + {"x": 200, "y": 2}, + {"x": 300, "y": 3}, + ], + "group": "All", + } + ], + } + self.assertEqual(data, expected) + + form_data = {"groupby": [], "metrics": [None]} + with self.assertRaises(ValueError): + viz.viz_types["paired_ttest"](datasource, form_data) + + +class TestPartitionViz(SupersetTestCase): + @patch("superset.viz.BaseViz.query_obj") + def test_query_obj_time_series_option(self, super_query_obj): + datasource = self.get_datasource_mock() + form_data = {} + test_viz = viz.PartitionViz(datasource, form_data) + super_query_obj.return_value = {} + query_obj = test_viz.query_obj() + self.assertFalse(query_obj["is_timeseries"]) + test_viz.form_data["time_series_option"] = "agg_sum" + query_obj = test_viz.query_obj() + self.assertTrue(query_obj["is_timeseries"]) + + def test_levels_for_computes_levels(self): + raw = {} + raw[DTTM_ALIAS] = [100, 200, 300, 100, 200, 300, 100, 200, 300] + raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"] + raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"] + raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"] + raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9] + raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90] + raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900] + df = pd.DataFrame(raw) + groups = ["groupA", "groupB", "groupC"] + time_op = "agg_sum" + test_viz = viz.PartitionViz(Mock(), {}) + levels = test_viz.levels_for(time_op, groups, df) + self.assertEqual(4, len(levels)) + expected = {DTTM_ALIAS: 1800, "metric1": 45, "metric2": 450, "metric3": 4500} + self.assertEqual(expected, levels[0].to_dict()) + expected = { + DTTM_ALIAS: {"a1": 600, "b1": 600, "c1": 600}, + "metric1": {"a1": 6, "b1": 15, "c1": 24}, + "metric2": {"a1": 60, "b1": 150, "c1": 240}, + "metric3": {"a1": 600, "b1": 1500, "c1": 2400}, + } + self.assertEqual(expected, levels[1].to_dict()) + self.assertEqual(["groupA", "groupB"], levels[2].index.names) + self.assertEqual(["groupA", "groupB", "groupC"], levels[3].index.names) + time_op = "agg_mean" + levels = test_viz.levels_for(time_op, groups, df) + self.assertEqual(4, len(levels)) + expected = { + DTTM_ALIAS: 200.0, + "metric1": 5.0, + "metric2": 50.0, + "metric3": 500.0, + } + self.assertEqual(expected, levels[0].to_dict()) + expected = { + DTTM_ALIAS: {"a1": 200, "c1": 200, "b1": 200}, + "metric1": {"a1": 2, "b1": 5, "c1": 8}, + "metric2": {"a1": 20, "b1": 50, "c1": 80}, + "metric3": {"a1": 200, "b1": 500, "c1": 800}, + } + self.assertEqual(expected, levels[1].to_dict()) + self.assertEqual(["groupA", "groupB"], levels[2].index.names) + self.assertEqual(["groupA", "groupB", "groupC"], levels[3].index.names) + + def test_levels_for_diff_computes_difference(self): + raw = {} + raw[DTTM_ALIAS] = [100, 200, 300, 100, 200, 300, 100, 200, 300] + raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"] + raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"] + raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"] + raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9] + raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90] + raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900] + df = pd.DataFrame(raw) + groups = ["groupA", "groupB", "groupC"] + test_viz = viz.PartitionViz(Mock(), {}) + time_op = "point_diff" + levels = test_viz.levels_for_diff(time_op, groups, df) + expected = {"metric1": 6, "metric2": 60, "metric3": 600} + self.assertEqual(expected, levels[0].to_dict()) + expected = { + "metric1": {"a1": 2, "b1": 2, "c1": 2}, + "metric2": {"a1": 20, "b1": 20, "c1": 20}, + "metric3": {"a1": 200, "b1": 200, "c1": 200}, + } + self.assertEqual(expected, levels[1].to_dict()) + self.assertEqual(4, len(levels)) + self.assertEqual(["groupA", "groupB", "groupC"], levels[3].index.names) + + def test_levels_for_time_calls_process_data_and_drops_cols(self): + raw = {} + raw[DTTM_ALIAS] = [100, 200, 300, 100, 200, 300, 100, 200, 300] + raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"] + raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"] + raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"] + raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9] + raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90] + raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900] + df = pd.DataFrame(raw) + groups = ["groupA", "groupB", "groupC"] + test_viz = viz.PartitionViz(Mock(), {"groupby": groups}) + + def return_args(df_drop, aggregate): + return df_drop + + test_viz.process_data = Mock(side_effect=return_args) + levels = test_viz.levels_for_time(groups, df) + self.assertEqual(4, len(levels)) + cols = [DTTM_ALIAS, "metric1", "metric2", "metric3"] + self.assertEqual(sorted(cols), sorted(levels[0].columns.tolist())) + cols += ["groupA"] + self.assertEqual(sorted(cols), sorted(levels[1].columns.tolist())) + cols += ["groupB"] + self.assertEqual(sorted(cols), sorted(levels[2].columns.tolist())) + cols += ["groupC"] + self.assertEqual(sorted(cols), sorted(levels[3].columns.tolist())) + self.assertEqual(4, len(test_viz.process_data.mock_calls)) + + def test_nest_values_returns_hierarchy(self): + raw = {} + raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"] + raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"] + raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"] + raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9] + raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90] + raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900] + df = pd.DataFrame(raw) + test_viz = viz.PartitionViz(Mock(), {}) + groups = ["groupA", "groupB", "groupC"] + levels = test_viz.levels_for("agg_sum", groups, df) + nest = test_viz.nest_values(levels) + self.assertEqual(3, len(nest)) + for i in range(0, 3): + self.assertEqual("metric" + str(i + 1), nest[i]["name"]) + self.assertEqual(3, len(nest[0]["children"])) + self.assertEqual(1, len(nest[0]["children"][0]["children"])) + self.assertEqual(1, len(nest[0]["children"][0]["children"][0]["children"])) + + def test_nest_procs_returns_hierarchy(self): + raw = {} + raw[DTTM_ALIAS] = [100, 200, 300, 100, 200, 300, 100, 200, 300] + raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"] + raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"] + raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"] + raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9] + raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90] + raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900] + df = pd.DataFrame(raw) + test_viz = viz.PartitionViz(Mock(), {}) + groups = ["groupA", "groupB", "groupC"] + metrics = ["metric1", "metric2", "metric3"] + procs = {} + for i in range(0, 4): + df_drop = df.drop(groups[i:], 1) + pivot = df_drop.pivot_table( + index=DTTM_ALIAS, columns=groups[:i], values=metrics + ) + procs[i] = pivot + nest = test_viz.nest_procs(procs) + self.assertEqual(3, len(nest)) + for i in range(0, 3): + self.assertEqual("metric" + str(i + 1), nest[i]["name"]) + self.assertEqual(None, nest[i].get("val")) + self.assertEqual(3, len(nest[0]["children"])) + self.assertEqual(3, len(nest[0]["children"][0]["children"])) + self.assertEqual(1, len(nest[0]["children"][0]["children"][0]["children"])) + self.assertEqual( + 1, len(nest[0]["children"][0]["children"][0]["children"][0]["children"]) + ) + + def test_get_data_calls_correct_method(self): + raw = {} + raw[DTTM_ALIAS] = [100, 200, 300, 100, 200, 300, 100, 200, 300] + raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"] + raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"] + raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"] + raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9] + raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90] + raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900] + df = pd.DataFrame(raw) + test_viz = viz.PartitionViz(Mock(), {}) + with self.assertRaises(ValueError): + test_viz.get_data(df) + test_viz.levels_for = Mock(return_value=1) + test_viz.nest_values = Mock(return_value=1) + test_viz.form_data["groupby"] = ["groups"] + test_viz.form_data["time_series_option"] = "not_time" + test_viz.get_data(df) + self.assertEqual("agg_sum", test_viz.levels_for.mock_calls[0][1][0]) + test_viz.form_data["time_series_option"] = "agg_sum" + test_viz.get_data(df) + self.assertEqual("agg_sum", test_viz.levels_for.mock_calls[1][1][0]) + test_viz.form_data["time_series_option"] = "agg_mean" + test_viz.get_data(df) + self.assertEqual("agg_mean", test_viz.levels_for.mock_calls[2][1][0]) + test_viz.form_data["time_series_option"] = "point_diff" + test_viz.levels_for_diff = Mock(return_value=1) + test_viz.get_data(df) + self.assertEqual("point_diff", test_viz.levels_for_diff.mock_calls[0][1][0]) + test_viz.form_data["time_series_option"] = "point_percent" + test_viz.get_data(df) + self.assertEqual("point_percent", test_viz.levels_for_diff.mock_calls[1][1][0]) + test_viz.form_data["time_series_option"] = "point_factor" + test_viz.get_data(df) + self.assertEqual("point_factor", test_viz.levels_for_diff.mock_calls[2][1][0]) + test_viz.levels_for_time = Mock(return_value=1) + test_viz.nest_procs = Mock(return_value=1) + test_viz.form_data["time_series_option"] = "adv_anal" + test_viz.get_data(df) + self.assertEqual(1, len(test_viz.levels_for_time.mock_calls)) + self.assertEqual(1, len(test_viz.nest_procs.mock_calls)) + test_viz.form_data["time_series_option"] = "time_series" + test_viz.get_data(df) + self.assertEqual("agg_sum", test_viz.levels_for.mock_calls[3][1][0]) + self.assertEqual(7, len(test_viz.nest_values.mock_calls)) + + +class TestRoseVis(SupersetTestCase): + def test_rose_vis_get_data(self): + raw = {} + t1 = pd.Timestamp("2000") + t2 = pd.Timestamp("2002") + t3 = pd.Timestamp("2004") + raw[DTTM_ALIAS] = [t1, t2, t3, t1, t2, t3, t1, t2, t3] + raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"] + raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"] + raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"] + raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9] + df = pd.DataFrame(raw) + fd = {"metrics": ["metric1"], "groupby": ["groupA"]} + test_viz = viz.RoseViz(Mock(), fd) + test_viz.metrics = fd["metrics"] + res = test_viz.get_data(df) + expected = { + 946684800000000000: [ + {"time": t1, "value": 1, "key": ("a1",), "name": ("a1",)}, + {"time": t1, "value": 4, "key": ("b1",), "name": ("b1",)}, + {"time": t1, "value": 7, "key": ("c1",), "name": ("c1",)}, + ], + 1009843200000000000: [ + {"time": t2, "value": 2, "key": ("a1",), "name": ("a1",)}, + {"time": t2, "value": 5, "key": ("b1",), "name": ("b1",)}, + {"time": t2, "value": 8, "key": ("c1",), "name": ("c1",)}, + ], + 1072915200000000000: [ + {"time": t3, "value": 3, "key": ("a1",), "name": ("a1",)}, + {"time": t3, "value": 6, "key": ("b1",), "name": ("b1",)}, + {"time": t3, "value": 9, "key": ("c1",), "name": ("c1",)}, + ], + } + self.assertEqual(expected, res) + + +class TestTimeSeriesTableViz(SupersetTestCase): + def test_get_data_metrics(self): + form_data = {"metrics": ["sum__A", "count"], "groupby": []} + datasource = self.get_datasource_mock() + raw = {} + t1 = pd.Timestamp("2000") + t2 = pd.Timestamp("2002") + raw[DTTM_ALIAS] = [t1, t2] + raw["sum__A"] = [15, 20] + raw["count"] = [6, 7] + df = pd.DataFrame(raw) + test_viz = viz.TimeTableViz(datasource, form_data) + data = test_viz.get_data(df) + # Check method correctly transforms data + self.assertEqual(set(["count", "sum__A"]), set(data["columns"])) + time_format = "%Y-%m-%d %H:%M:%S" + expected = { + t1.strftime(time_format): {"sum__A": 15, "count": 6}, + t2.strftime(time_format): {"sum__A": 20, "count": 7}, + } + self.assertEqual(expected, data["records"]) + + def test_get_data_group_by(self): + form_data = {"metrics": ["sum__A"], "groupby": ["groupby1"]} + datasource = self.get_datasource_mock() + raw = {} + t1 = pd.Timestamp("2000") + t2 = pd.Timestamp("2002") + raw[DTTM_ALIAS] = [t1, t1, t1, t2, t2, t2] + raw["sum__A"] = [15, 20, 25, 30, 35, 40] + raw["groupby1"] = ["a1", "a2", "a3", "a1", "a2", "a3"] + df = pd.DataFrame(raw) + test_viz = viz.TimeTableViz(datasource, form_data) + data = test_viz.get_data(df) + # Check method correctly transforms data + self.assertEqual(set(["a1", "a2", "a3"]), set(data["columns"])) + time_format = "%Y-%m-%d %H:%M:%S" + expected = { + t1.strftime(time_format): {"a1": 15, "a2": 20, "a3": 25}, + t2.strftime(time_format): {"a1": 30, "a2": 35, "a3": 40}, + } + self.assertEqual(expected, data["records"]) + + @patch("superset.viz.BaseViz.query_obj") + def test_query_obj_throws_metrics_and_groupby(self, super_query_obj): + datasource = self.get_datasource_mock() + form_data = {"groupby": ["a"]} + super_query_obj.return_value = {} + test_viz = viz.TimeTableViz(datasource, form_data) + with self.assertRaises(Exception): + test_viz.query_obj() + form_data["metrics"] = ["x", "y"] + test_viz = viz.TimeTableViz(datasource, form_data) + with self.assertRaises(Exception): + test_viz.query_obj() + + def test_query_obj_order_by(self): + test_viz = viz.TimeTableViz( + self.get_datasource_mock(), {"metrics": ["sum__A", "count"], "groupby": []} + ) + query_obj = test_viz.query_obj() + self.assertEqual(query_obj["orderby"], [("sum__A", False)]) + + +class TestBaseDeckGLViz(SupersetTestCase): + def test_get_metrics(self): + form_data = load_fixture("deck_path_form_data.json") + datasource = self.get_datasource_mock() + test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data) + result = test_viz_deckgl.get_metrics() + assert result == [form_data.get("size")] + + form_data = {} + test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data) + result = test_viz_deckgl.get_metrics() + assert result == [] + + def test_scatterviz_get_metrics(self): + form_data = load_fixture("deck_path_form_data.json") + datasource = self.get_datasource_mock() + + form_data = {} + test_viz_deckgl = viz.DeckScatterViz(datasource, form_data) + test_viz_deckgl.point_radius_fixed = {"type": "metric", "value": "int"} + result = test_viz_deckgl.get_metrics() + assert result == ["int"] + + form_data = {} + test_viz_deckgl = viz.DeckScatterViz(datasource, form_data) + test_viz_deckgl.point_radius_fixed = {} + result = test_viz_deckgl.get_metrics() + assert result == [] + + def test_get_js_columns(self): + form_data = load_fixture("deck_path_form_data.json") + datasource = self.get_datasource_mock() + mock_d = {"a": "dummy1", "b": "dummy2", "c": "dummy3"} + test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data) + result = test_viz_deckgl.get_js_columns(mock_d) + + assert result == {"color": None} + + def test_get_properties(self): + mock_d = {} + form_data = load_fixture("deck_path_form_data.json") + datasource = self.get_datasource_mock() + test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data) + + with self.assertRaises(NotImplementedError) as context: + test_viz_deckgl.get_properties(mock_d) + + self.assertTrue("" in str(context.exception)) + + def test_process_spatial_query_obj(self): + form_data = load_fixture("deck_path_form_data.json") + datasource = self.get_datasource_mock() + mock_key = "spatial_key" + mock_gb = [] + test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data) + + with self.assertRaises(ValueError) as context: + test_viz_deckgl.process_spatial_query_obj(mock_key, mock_gb) + + self.assertTrue("Bad spatial key" in str(context.exception)) + + test_form_data = { + "latlong_key": {"type": "latlong", "lonCol": "lon", "latCol": "lat"}, + "delimited_key": {"type": "delimited", "lonlatCol": "lonlat"}, + "geohash_key": {"type": "geohash", "geohashCol": "geo"}, + } + + datasource = self.get_datasource_mock() + expected_results = { + "latlong_key": ["lon", "lat"], + "delimited_key": ["lonlat"], + "geohash_key": ["geo"], + } + for mock_key in ["latlong_key", "delimited_key", "geohash_key"]: + mock_gb = [] + test_viz_deckgl = viz.BaseDeckGLViz(datasource, test_form_data) + test_viz_deckgl.process_spatial_query_obj(mock_key, mock_gb) + assert expected_results.get(mock_key) == mock_gb + + def test_geojson_query_obj(self): + form_data = load_fixture("deck_geojson_form_data.json") + datasource = self.get_datasource_mock() + test_viz_deckgl = viz.DeckGeoJson(datasource, form_data) + results = test_viz_deckgl.query_obj() + + assert results["metrics"] == [] + assert results["groupby"] == [] + assert results["columns"] == ["test_col"] + + def test_parse_coordinates(self): + form_data = load_fixture("deck_path_form_data.json") + datasource = self.get_datasource_mock() + viz_instance = viz.BaseDeckGLViz(datasource, form_data) + + coord = viz_instance.parse_coordinates("1.23, 3.21") + self.assertEqual(coord, (1.23, 3.21)) + + coord = viz_instance.parse_coordinates("1.23 3.21") + self.assertEqual(coord, (1.23, 3.21)) + + self.assertEqual(viz_instance.parse_coordinates(None), None) + + self.assertEqual(viz_instance.parse_coordinates(""), None) + + def test_parse_coordinates_raises(self): + form_data = load_fixture("deck_path_form_data.json") + datasource = self.get_datasource_mock() + test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data) + + with self.assertRaises(SpatialException): + test_viz_deckgl.parse_coordinates("NULL") + + with self.assertRaises(SpatialException): + test_viz_deckgl.parse_coordinates("fldkjsalkj,fdlaskjfjadlksj") + + def test_filter_nulls(self): + test_form_data = { + "latlong_key": {"type": "latlong", "lonCol": "lon", "latCol": "lat"}, + "delimited_key": {"type": "delimited", "lonlatCol": "lonlat"}, + "geohash_key": {"type": "geohash", "geohashCol": "geo"}, + } + + datasource = self.get_datasource_mock() + expected_results = { + "latlong_key": [ + { + "clause": "WHERE", + "expressionType": "SIMPLE", + "filterOptionName": "c7f171cf3204bcbf456acfeac5cd9afd", + "comparator": "", + "operator": "IS NOT NULL", + "subject": "lat", + }, + { + "clause": "WHERE", + "expressionType": "SIMPLE", + "filterOptionName": "52634073fbb8ae0a3aa59ad48abac55e", + "comparator": "", + "operator": "IS NOT NULL", + "subject": "lon", + }, + ], + "delimited_key": [ + { + "clause": "WHERE", + "expressionType": "SIMPLE", + "filterOptionName": "cae5c925c140593743da08499e6fb207", + "comparator": "", + "operator": "IS NOT NULL", + "subject": "lonlat", + } + ], + "geohash_key": [ + { + "clause": "WHERE", + "expressionType": "SIMPLE", + "filterOptionName": "d84f55222d8e414e888fa5f990b341d2", + "comparator": "", + "operator": "IS NOT NULL", + "subject": "geo", + } + ], + } + for mock_key in ["latlong_key", "delimited_key", "geohash_key"]: + test_viz_deckgl = viz.BaseDeckGLViz(datasource, test_form_data.copy()) + test_viz_deckgl.spatial_control_keys = [mock_key] + test_viz_deckgl.add_null_filters() + adhoc_filters = test_viz_deckgl.form_data["adhoc_filters"] + assert expected_results.get(mock_key) == adhoc_filters + + +class TestTimeSeriesViz(SupersetTestCase): + def test_timeseries_unicode_data(self): + datasource = self.get_datasource_mock() + form_data = {"groupby": ["name"], "metrics": ["sum__payout"]} + raw = {} + raw["name"] = [ + "Real Madrid C.F.🇺🇸🇬🇧", + "Real Madrid C.F.🇺🇸🇬🇧", + "Real Madrid Basket", + "Real Madrid Basket", + ] + raw["__timestamp"] = [ + "2018-02-20T00:00:00", + "2018-03-09T00:00:00", + "2018-02-20T00:00:00", + "2018-03-09T00:00:00", + ] + raw["sum__payout"] = [2, 2, 4, 4] + df = pd.DataFrame(raw) + + test_viz = viz.NVD3TimeSeriesViz(datasource, form_data) + viz_data = {} + viz_data = test_viz.get_data(df) + expected = [ + { + "values": [ + {"y": 4, "x": "2018-02-20T00:00:00"}, + {"y": 4, "x": "2018-03-09T00:00:00"}, + ], + "key": ("Real Madrid Basket",), + }, + { + "values": [ + {"y": 2, "x": "2018-02-20T00:00:00"}, + {"y": 2, "x": "2018-03-09T00:00:00"}, + ], + "key": ("Real Madrid C.F.\U0001f1fa\U0001f1f8\U0001f1ec\U0001f1e7",), + }, + ] + self.assertEqual(expected, viz_data) + + def test_process_data_resample(self): + datasource = self.get_datasource_mock() + + df = pd.DataFrame( + { + "__timestamp": pd.to_datetime( + ["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"] + ), + "y": [1.0, 2.0, 5.0, 7.0], + } + ) + + self.assertEqual( + viz.NVD3TimeSeriesViz( + datasource, + {"metrics": ["y"], "resample_method": "sum", "resample_rule": "1D"}, + ) + .process_data(df)["y"] + .tolist(), + [1.0, 2.0, 0.0, 0.0, 5.0, 0.0, 7.0], + ) + + np.testing.assert_equal( + viz.NVD3TimeSeriesViz( + datasource, + {"metrics": ["y"], "resample_method": "asfreq", "resample_rule": "1D"}, + ) + .process_data(df)["y"] + .tolist(), + [1.0, 2.0, np.nan, np.nan, 5.0, np.nan, 7.0], + ) + + def test_apply_rolling(self): + datasource = self.get_datasource_mock() + df = pd.DataFrame( + index=pd.to_datetime( + ["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"] + ), + data={"y": [1.0, 2.0, 3.0, 4.0]}, + ) + self.assertEqual( + viz.BigNumberViz( + datasource, + { + "metrics": ["y"], + "rolling_type": "cumsum", + "rolling_periods": 0, + "min_periods": 0, + }, + ) + .apply_rolling(df)["y"] + .tolist(), + [1.0, 3.0, 6.0, 10.0], + ) + self.assertEqual( + viz.BigNumberViz( + datasource, + { + "metrics": ["y"], + "rolling_type": "sum", + "rolling_periods": 2, + "min_periods": 0, + }, + ) + .apply_rolling(df)["y"] + .tolist(), + [1.0, 3.0, 5.0, 7.0], + ) + self.assertEqual( + viz.BigNumberViz( + datasource, + { + "metrics": ["y"], + "rolling_type": "mean", + "rolling_periods": 10, + "min_periods": 0, + }, + ) + .apply_rolling(df)["y"] + .tolist(), + [1.0, 1.5, 2.0, 2.5], + ) + + def test_apply_rolling_without_data(self): + datasource = self.get_datasource_mock() + df = pd.DataFrame( + index=pd.to_datetime( + ["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"] + ), + data={"y": [1.0, 2.0, 3.0, 4.0]}, + ) + test_viz = viz.BigNumberViz( + datasource, + { + "metrics": ["y"], + "rolling_type": "cumsum", + "rolling_periods": 4, + "min_periods": 4, + }, + ) + with pytest.raises(QueryObjectValidationError): + test_viz.apply_rolling(df) + + +class TestBigNumberViz(SupersetTestCase): + def test_get_data(self): + datasource = self.get_datasource_mock() + df = pd.DataFrame( + data={ + DTTM_ALIAS: pd.to_datetime( + ["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"] + ), + "y": [1.0, 2.0, 3.0, 4.0], + } + ) + data = viz.BigNumberViz(datasource, {"metrics": ["y"]}).get_data(df) + self.assertEqual(data[2], {DTTM_ALIAS: pd.Timestamp("2019-01-05"), "y": 3}) + + def test_get_data_with_none(self): + datasource = self.get_datasource_mock() + df = pd.DataFrame( + data={ + DTTM_ALIAS: pd.to_datetime( + ["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"] + ), + "y": [1.0, 2.0, None, 4.0], + } + ) + data = viz.BigNumberViz(datasource, {"metrics": ["y"]}).get_data(df) + assert np.isnan(data[2]["y"]) + + +class TestPivotTableViz(SupersetTestCase): + df = pd.DataFrame( + data={ + "intcol": [1, 2, 3, None], + "floatcol": [0.1, 0.2, 0.3, None], + "strcol": ["a", "b", "c", None], + } + ) + + def test_get_aggfunc_numeric(self): + # is a sum function + func = viz.PivotTableViz.get_aggfunc("intcol", self.df, {}) + assert hasattr(func, "__call__") + assert func(self.df["intcol"]) == 6 + + assert ( + viz.PivotTableViz.get_aggfunc("intcol", self.df, {"pandas_aggfunc": "min"}) + == "min" + ) + assert ( + viz.PivotTableViz.get_aggfunc( + "floatcol", self.df, {"pandas_aggfunc": "max"} + ) + == "max" + ) + + def test_get_aggfunc_non_numeric(self): + assert viz.PivotTableViz.get_aggfunc("strcol", self.df, {}) == "max" + assert ( + viz.PivotTableViz.get_aggfunc("strcol", self.df, {"pandas_aggfunc": "sum"}) + == "max" + ) + assert ( + viz.PivotTableViz.get_aggfunc("strcol", self.df, {"pandas_aggfunc": "min"}) + == "min" + ) + + def test_format_datetime_from_pd_timestamp(self): + tstamp = pd.Timestamp(datetime(2020, 9, 3, tzinfo=timezone.utc)) + assert ( + viz.PivotTableViz._format_datetime(tstamp) == "__timestamp:1599091200000.0" + ) + + def test_format_datetime_from_datetime(self): + tstamp = datetime(2020, 9, 3, tzinfo=timezone.utc) + assert ( + viz.PivotTableViz._format_datetime(tstamp) == "__timestamp:1599091200000.0" + ) + + def test_format_datetime_from_date(self): + tstamp = date(2020, 9, 3) + assert ( + viz.PivotTableViz._format_datetime(tstamp) == "__timestamp:1599091200000.0" + ) + + def test_format_datetime_from_string(self): + tstamp = "2020-09-03T00:00:00" + assert ( + viz.PivotTableViz._format_datetime(tstamp) == "__timestamp:1599091200000.0" + ) + + def test_format_datetime_from_invalid_string(self): + tstamp = "abracadabra" + assert viz.PivotTableViz._format_datetime(tstamp) == tstamp + + def test_format_datetime_from_int(self): + assert viz.PivotTableViz._format_datetime(123) == 123 + assert viz.PivotTableViz._format_datetime(123.0) == 123.0 + + +class TestFilterBoxViz(SupersetTestCase): + def test_get_data(self): + form_data = { + "filter_configs": [ + {"column": "value1", "metric": "metric1"}, + {"column": "value2", "metric": "metric2", "asc": True}, + {"column": "value3"}, + {"column": "value4", "asc": True}, + {"column": "value5"}, + {"column": "value6"}, + ], + } + datasource = self.get_datasource_mock() + test_viz = viz.FilterBoxViz(datasource, form_data) + test_viz.dataframes = { + "value1": pd.DataFrame( + data=[ + {"value1": "v1", "metric1": 1}, + {"value1": "v2", "metric1": 2}, + ] + ), + "value2": pd.DataFrame( + data=[ + {"value2": "v3", "metric2": 3}, + {"value2": "v4", "metric2": 4}, + ] + ), + "value3": pd.DataFrame( + data=[ + {"value3": "v5"}, + {"value3": "v6"}, + ] + ), + "value4": pd.DataFrame( + data=[ + {"value4": "v7"}, + {"value4": "v8"}, + ] + ), + "value5": pd.DataFrame(), + } + + df = pd.DataFrame() + data = test_viz.get_data(df) + expected = { + "value1": [ + {"id": "v2", "text": "v2", "metric": 2}, + {"id": "v1", "text": "v1", "metric": 1}, + ], + "value2": [ + {"id": "v3", "text": "v3", "metric": 3}, + {"id": "v4", "text": "v4", "metric": 4}, + ], + "value3": [ + {"id": "v6", "text": "v6"}, + {"id": "v5", "text": "v5"}, + ], + "value4": [ + {"id": "v7", "text": "v7"}, + {"id": "v8", "text": "v8"}, + ], + "value5": [], + "value6": [], + } + self.assertEqual(expected, data) diff --git a/tests/unit_tests/__init__.py b/tests/unit_tests/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/advanced_data_type/__init__.py b/tests/unit_tests/advanced_data_type/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/advanced_data_type/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/advanced_data_type/types_tests.py b/tests/unit_tests/advanced_data_type/types_tests.py new file mode 100644 index 0000000000000..189b9e1aab22d --- /dev/null +++ b/tests/unit_tests/advanced_data_type/types_tests.py @@ -0,0 +1,515 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" + +import sqlalchemy +from sqlalchemy import Column, Integer +from superset.advanced_data_type.types import ( + AdvancedDataTypeRequest, + AdvancedDataTypeResponse, +) +from superset.utils.core import FilterOperator, FilterStringOperators + +from superset.advanced_data_type.plugins.internet_address import internet_address +from superset.advanced_data_type.plugins.internet_port import internet_port as port + + +# To run the unit tests below, use the following command in the root Superset folder: +# tox -e py38 -- tests/unit_tests/advanced_data_type/types_tests.py + + +def test_ip_func_valid_ip(): + """Test to see if the cidr_func behaves as expected when a valid IP is passed in""" + cidr_request: AdvancedDataTypeRequest = { + "advanced_data_type": "cidr", + "values": ["1.1.1.1"], + } + cidr_response: AdvancedDataTypeResponse = { + "values": [16843009], + "error_message": "", + "display_value": "16843009", + "valid_filter_operators": [ + FilterStringOperators.EQUALS, + FilterStringOperators.GREATER_THAN_OR_EQUAL, + FilterStringOperators.GREATER_THAN, + FilterStringOperators.IN, + FilterStringOperators.LESS_THAN, + FilterStringOperators.LESS_THAN_OR_EQUAL, + ], + } + + assert internet_address.translate_type(cidr_request) == cidr_response + + +def test_cidr_func_invalid_ip(): + """Test to see if the cidr_func behaves as expected when an invalid IP is passed in""" + cidr_request: AdvancedDataTypeRequest = { + "advanced_data_type": "cidr", + "values": ["abc"], + } + cidr_response: AdvancedDataTypeResponse = { + "values": [], + "error_message": "'abc' does not appear to be an IPv4 or IPv6 network", + "display_value": "", + "valid_filter_operators": [ + FilterStringOperators.EQUALS, + FilterStringOperators.GREATER_THAN_OR_EQUAL, + FilterStringOperators.GREATER_THAN, + FilterStringOperators.IN, + FilterStringOperators.LESS_THAN, + FilterStringOperators.LESS_THAN_OR_EQUAL, + ], + } + + assert internet_address.translate_type(cidr_request) == cidr_response + + +def test_port_translation_func_valid_port_number(): + """Test to see if the port_translation_func behaves as expected when a valid port number + is passed in""" + port_request: AdvancedDataTypeRequest = { + "advanced_data_type": "port", + "values": ["80"], + } + port_response: AdvancedDataTypeResponse = { + "values": [[80]], + "error_message": "", + "display_value": "[80]", + "valid_filter_operators": [ + FilterStringOperators.EQUALS, + FilterStringOperators.GREATER_THAN_OR_EQUAL, + FilterStringOperators.GREATER_THAN, + FilterStringOperators.IN, + FilterStringOperators.LESS_THAN, + FilterStringOperators.LESS_THAN_OR_EQUAL, + ], + } + + assert port.translate_type(port_request) == port_response + + +def test_port_translation_func_valid_port_name(): + """Test to see if the port_translation_func behaves as expected when a valid port name + is passed in""" + port_request: AdvancedDataTypeRequest = { + "advanced_data_type": "port", + "values": ["https"], + } + port_response: AdvancedDataTypeResponse = { + "values": [[443]], + "error_message": "", + "display_value": "[443]", + "valid_filter_operators": [ + FilterStringOperators.EQUALS, + FilterStringOperators.GREATER_THAN_OR_EQUAL, + FilterStringOperators.GREATER_THAN, + FilterStringOperators.IN, + FilterStringOperators.LESS_THAN, + FilterStringOperators.LESS_THAN_OR_EQUAL, + ], + } + + assert port.translate_type(port_request) == port_response + + +def test_port_translation_func_invalid_port_name(): + """Test to see if the port_translation_func behaves as expected when an invalid port name + is passed in""" + port_request: AdvancedDataTypeRequest = { + "advanced_data_type": "port", + "values": ["abc"], + } + port_response: AdvancedDataTypeResponse = { + "values": [], + "error_message": "'abc' does not appear to be a port name or number", + "display_value": "", + "valid_filter_operators": [ + FilterStringOperators.EQUALS, + FilterStringOperators.GREATER_THAN_OR_EQUAL, + FilterStringOperators.GREATER_THAN, + FilterStringOperators.IN, + FilterStringOperators.LESS_THAN, + FilterStringOperators.LESS_THAN_OR_EQUAL, + ], + } + + assert port.translate_type(port_request) == port_response + + +def test_port_translation_func_invalid_port_number(): + """Test to see if the port_translation_func behaves as expected when an invalid port + number is passed in""" + port_request: AdvancedDataTypeRequest = { + "advanced_data_type": "port", + "values": ["123456789"], + } + port_response: AdvancedDataTypeResponse = { + "values": [], + "error_message": "'123456789' does not appear to be a port name or number", + "display_value": "", + "valid_filter_operators": [ + FilterStringOperators.EQUALS, + FilterStringOperators.GREATER_THAN_OR_EQUAL, + FilterStringOperators.GREATER_THAN, + FilterStringOperators.IN, + FilterStringOperators.LESS_THAN, + FilterStringOperators.LESS_THAN_OR_EQUAL, + ], + } + + assert port.translate_type(port_request) == port_response + + +def test_cidr_translate_filter_func_equals(): + """Test to see if the cidr_translate_filter_func behaves as expected when the EQUALS + operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.EQUALS + input_values = [16843009] + + cidr_translate_filter_response = input_column == input_values[0] + + assert internet_address.translate_filter( + input_column, input_operation, input_values + ).compare(cidr_translate_filter_response) + + +def test_cidr_translate_filter_func_not_equals(): + """Test to see if the cidr_translate_filter_func behaves as expected when the NOT_EQUALS + operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.NOT_EQUALS + input_values = [16843009] + + cidr_translate_filter_response = input_column != input_values[0] + + assert internet_address.translate_filter( + input_column, input_operation, input_values + ).compare(cidr_translate_filter_response) + + +def test_cidr_translate_filter_func_greater_than_or_equals(): + """Test to see if the cidr_translate_filter_func behaves as expected when the + GREATER_THAN_OR_EQUALS operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.GREATER_THAN_OR_EQUALS + input_values = [16843009] + + cidr_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column >= input_values[0] + ) + + assert internet_address.translate_filter( + input_column, input_operation, input_values + ).compare(cidr_translate_filter_response) + + +def test_cidr_translate_filter_func_greater_than(): + """Test to see if the cidr_translate_filter_func behaves as expected when the + GREATER_THAN operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.GREATER_THAN + input_values = [16843009] + + cidr_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column > input_values[0] + ) + + assert internet_address.translate_filter( + input_column, input_operation, input_values + ).compare(cidr_translate_filter_response) + + +def test_cidr_translate_filter_func_less_than(): + """Test to see if the cidr_translate_filter_func behaves as expected when the LESS_THAN + operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.LESS_THAN + input_values = [16843009] + + cidr_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column < input_values[0] + ) + + assert internet_address.translate_filter( + input_column, input_operation, input_values + ).compare(cidr_translate_filter_response) + + +def test_cidr_translate_filter_func_less_than_or_equals(): + """Test to see if the cidr_translate_filter_func behaves as expected when the + LESS_THAN_OR_EQUALS operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.LESS_THAN_OR_EQUALS + input_values = [16843009] + + cidr_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column <= input_values[0] + ) + + assert internet_address.translate_filter( + input_column, input_operation, input_values + ).compare(cidr_translate_filter_response) + + +def test_cidr_translate_filter_func_in_single(): + """Test to see if the cidr_translate_filter_func behaves as expected when the IN operator + is used with a single IP""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.IN + input_values = [16843009] + + cidr_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column.in_(input_values) + ) + + assert internet_address.translate_filter( + input_column, input_operation, input_values + ).compare(cidr_translate_filter_response) + + +def test_cidr_translate_filter_func_in_double(): + """Test to see if the cidr_translate_filter_func behaves as expected when the IN operator + is used with two IP's""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.IN + input_values = [{"start": 16843009, "end": 33686018}] + + input_condition = input_column.in_([]) + + cidr_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_condition | ((input_column <= 33686018) & (input_column >= 16843009)) + ) + + assert internet_address.translate_filter( + input_column, input_operation, input_values + ).compare(cidr_translate_filter_response) + + +def test_cidr_translate_filter_func_not_in_single(): + """Test to see if the cidr_translate_filter_func behaves as expected when the NOT_IN + operator is used with a single IP""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.NOT_IN + input_values = [16843009] + + cidr_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ~( + input_column.in_(input_values) + ) + + assert internet_address.translate_filter( + input_column, input_operation, input_values + ).compare(cidr_translate_filter_response) + + +def test_cidr_translate_filter_func_not_in_double(): + """Test to see if the cidr_translate_filter_func behaves as expected when the NOT_IN + operator is used with two IP's""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.NOT_IN + input_values = [{"start": 16843009, "end": 33686018}] + + input_condition = ~(input_column.in_([])) + + cidr_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_condition & (input_column > 33686018) & (input_column < 16843009) + ) + + assert internet_address.translate_filter( + input_column, input_operation, input_values + ).compare(cidr_translate_filter_response) + + +def test_port_translate_filter_func_equals(): + """Test to see if the port_translate_filter_func behaves as expected when the EQUALS + operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.EQUALS + input_values = [[443]] + + port_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column.in_(input_values[0]) + ) + + assert port.translate_filter(input_column, input_operation, input_values).compare( + port_translate_filter_response + ) + + +def test_port_translate_filter_func_not_equals(): + """Test to see if the port_translate_filter_func behaves as expected when the NOT_EQUALS + operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.NOT_EQUALS + input_values = [[443]] + + port_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ~( + input_column.in_(input_values[0]) + ) + + assert port.translate_filter(input_column, input_operation, input_values).compare( + port_translate_filter_response + ) + + +def test_port_translate_filter_func_greater_than_or_equals(): + """Test to see if the port_translate_filter_func behaves as expected when the + GREATER_THAN_OR_EQUALS operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.GREATER_THAN_OR_EQUALS + input_values = [[443]] + + port_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column >= input_values[0][0] + ) + + assert port.translate_filter(input_column, input_operation, input_values).compare( + port_translate_filter_response + ) + + +def test_port_translate_filter_func_greater_than(): + """Test to see if the port_translate_filter_func behaves as expected when the + GREATER_THAN operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.GREATER_THAN + input_values = [[443]] + + port_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column > input_values[0][0] + ) + + assert port.translate_filter(input_column, input_operation, input_values).compare( + port_translate_filter_response + ) + + +def test_port_translate_filter_func_less_than_or_equals(): + """Test to see if the port_translate_filter_func behaves as expected when the + LESS_THAN_OR_EQUALS operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.LESS_THAN_OR_EQUALS + input_values = [[443]] + + port_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column <= input_values[0][0] + ) + + assert port.translate_filter(input_column, input_operation, input_values).compare( + port_translate_filter_response + ) + + +def test_port_translate_filter_func_less_than(): + """Test to see if the port_translate_filter_func behaves as expected when the LESS_THAN + operator is used""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.LESS_THAN + input_values = [[443]] + + port_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column < input_values[0][0] + ) + + assert port.translate_filter(input_column, input_operation, input_values).compare( + port_translate_filter_response + ) + + +def test_port_translate_filter_func_in_single(): + """Test to see if the port_translate_filter_func behaves as expected when the IN operator + is used with a single port""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.IN + input_values = [[443]] + + port_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column.in_(input_values[0]) + ) + + assert port.translate_filter(input_column, input_operation, input_values).compare( + port_translate_filter_response + ) + + +def test_port_translate_filter_func_in_double(): + """Test to see if the port_translate_filter_func behaves as expected when the IN operator + is used with two ports""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.IN + input_values = [[443, 80]] + + port_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ( + input_column.in_(input_values[0]) + ) + + assert port.translate_filter(input_column, input_operation, input_values).compare( + port_translate_filter_response + ) + + +def test_port_translate_filter_func_not_in_single(): + """Test to see if the port_translate_filter_func behaves as expected when the NOT_IN + operator is used with a single port""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.NOT_IN + input_values = [[443]] + + port_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ~( + input_column.in_(input_values[0]) + ) + + assert port.translate_filter(input_column, input_operation, input_values).compare( + port_translate_filter_response + ) + + +def test_port_translate_filter_func_not_in_double(): + """Test to see if the port_translate_filter_func behaves as expected when the NOT_IN + operator is used with two ports""" + + input_column = Column("user_ip", Integer) + input_operation = FilterOperator.NOT_IN + input_values = [[443, 80]] + + port_translate_filter_response: sqlalchemy.sql.expression.BinaryExpression = ~( + input_column.in_(input_values[0]) + ) + + assert port.translate_filter(input_column, input_operation, input_values).compare( + port_translate_filter_response + ) diff --git a/tests/unit_tests/annotation_layers/fixtures.py b/tests/unit_tests/annotation_layers/fixtures.py new file mode 100644 index 0000000000000..fd07774fd7f9a --- /dev/null +++ b/tests/unit_tests/annotation_layers/fixtures.py @@ -0,0 +1,24 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +import dateutil.parser + + +START_STR = "2019-01-02T03:04:05.678900" +END_STR = "2020-01-02T03:04:05.678900" +START_DTTM = dateutil.parser.parse(START_STR) +END_DTTM = dateutil.parser.parse(END_STR) diff --git a/tests/unit_tests/annotation_layers/schema_tests.py b/tests/unit_tests/annotation_layers/schema_tests.py new file mode 100644 index 0000000000000..b8efcbb0033f9 --- /dev/null +++ b/tests/unit_tests/annotation_layers/schema_tests.py @@ -0,0 +1,157 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest +from marshmallow.exceptions import ValidationError + +from superset.annotation_layers.annotations.schemas import ( + AnnotationPostSchema, + AnnotationPutSchema, +) +from superset.annotation_layers.schemas import ( + AnnotationLayerPostSchema, + AnnotationLayerPutSchema, +) +from tests.unit_tests.annotation_layers.fixtures import ( + END_DTTM, + END_STR, + START_DTTM, + START_STR, +) + + +def test_annotation_layer_post_schema_with_name() -> None: + result = AnnotationLayerPostSchema().load({"name": "foo"}) + assert result["name"] == "foo" + assert "descr" not in result + + +def test_annotation_layer_post_schema_with_name_and_descr() -> None: + result = AnnotationLayerPostSchema().load({"name": "foo", "descr": "bar"}) + assert result["name"] == "foo" + assert result["descr"] == "bar" + + +def test_annotation_layer_post_schema_with_null_name() -> None: + with pytest.raises(ValidationError): + AnnotationLayerPostSchema().load({"name": None}) + + +def test_annotation_layer_post_schema_empty() -> None: + with pytest.raises(ValidationError): + AnnotationLayerPostSchema().load({}) + + +def test_annotation_layer_put_schema_empty() -> None: + result = AnnotationLayerPutSchema().load({}) + assert result == {} + + +def test_annotation_layer_put_schema_with_null_name() -> None: + with pytest.raises(ValidationError): + AnnotationLayerPutSchema().load({"name": None}) + + +def test_annotation_layer_put_schema_with_null_descr() -> None: + with pytest.raises(ValidationError): + AnnotationLayerPutSchema().load({"descr": None}) + + +def test_annotation_post_schema_basic() -> None: + result = AnnotationPostSchema().load( + {"short_descr": "foo", "start_dttm": START_STR, "end_dttm": END_STR} + ) + assert result["short_descr"] == "foo" + assert result["start_dttm"] == START_DTTM + assert result["end_dttm"] == END_DTTM + + +def test_annotation_post_schema_full() -> None: + result = AnnotationPostSchema().load( + { + "short_descr": "foo", + "long_descr": "bar", + "start_dttm": START_STR, + "end_dttm": END_STR, + "json_metadata": '{"abc": 123}', + } + ) + assert result["short_descr"] == "foo" + assert result["long_descr"] == "bar" + assert result["start_dttm"] == START_DTTM + assert result["end_dttm"] == END_DTTM + assert result["json_metadata"] == '{"abc": 123}' + + +def test_annotation_post_schema_short_descr_null() -> None: + with pytest.raises(ValidationError): + AnnotationPostSchema().load( + {"short_descr": None, "start_dttm": START_STR, "end_dttm": END_STR} + ) + + +def test_annotation_post_schema_start_dttm_null() -> None: + with pytest.raises(ValidationError): + result = AnnotationPostSchema().load( + {"short_descr": "foo", "start_dttm": None, "end_dttm": END_STR} + ) + + +def test_annotation_post_schema_end_dttm_null() -> None: + with pytest.raises(ValidationError): + AnnotationPostSchema().load( + {"short_descr": "foo", "start_dttm": START_STR, "end_dttm": None} + ) + + +def test_annotation_put_schema_empty() -> None: + result = AnnotationPutSchema().load({}) + assert result == {} + + +def test_annotation_put_schema_short_descr_null() -> None: + with pytest.raises(ValidationError): + AnnotationPutSchema().load({"short_descr": None}) + + +def test_annotation_put_schema_start_dttm_null() -> None: + with pytest.raises(ValidationError): + AnnotationPutSchema().load({"start_dttm": None}) + + +def test_annotation_put_schema_end_dttm_null() -> None: + with pytest.raises(ValidationError): + AnnotationPutSchema().load({"end_dttm": None}) + + +def test_annotation_put_schema_json_metadata() -> None: + result = AnnotationPutSchema().load({"json_metadata": '{"abc": 123}'}) + assert result["json_metadata"] == '{"abc": 123}' + + +def test_annotation_put_schema_json_metadata_null() -> None: + result = AnnotationPutSchema().load({"json_metadata": None}) + assert result["json_metadata"] is None + + +def test_annotation_put_schema_json_metadata_empty() -> None: + result = AnnotationPutSchema().load({"json_metadata": ""}) + assert result["json_metadata"] == "" + + +def test_annotation_put_schema_json_metadata_invalid() -> None: + with pytest.raises(ValidationError): + AnnotationPutSchema().load({"json_metadata": "foo bar"}) diff --git a/tests/unit_tests/charts/__init__.py b/tests/unit_tests/charts/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/charts/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/charts/commands/__init__.py b/tests/unit_tests/charts/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/charts/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/charts/commands/importers/__init__.py b/tests/unit_tests/charts/commands/importers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/charts/commands/importers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/charts/commands/importers/v1/__init__.py b/tests/unit_tests/charts/commands/importers/v1/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/charts/commands/importers/v1/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/charts/commands/importers/v1/import_test.py b/tests/unit_tests/charts/commands/importers/v1/import_test.py new file mode 100644 index 0000000000000..e29fd70fb8a70 --- /dev/null +++ b/tests/unit_tests/charts/commands/importers/v1/import_test.py @@ -0,0 +1,69 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel, unused-import, invalid-name + +import copy + +from sqlalchemy.orm.session import Session + + +def test_import_chart(session: Session) -> None: + """ + Test importing a chart. + """ + from superset.charts.commands.importers.v1.utils import import_chart + from superset.connectors.sqla.models import SqlaTable + from superset.models.core import Database + from superset.models.slice import Slice + from tests.integration_tests.fixtures.importexport import chart_config + + engine = session.get_bind() + Slice.metadata.create_all(engine) # pylint: disable=no-member + + config = copy.deepcopy(chart_config) + config["datasource_id"] = 1 + config["datasource_type"] = "table" + + chart = import_chart(session, config) + assert chart.slice_name == "Deck Path" + assert chart.viz_type == "deck_path" + assert chart.is_managed_externally is False + assert chart.external_url is None + + +def test_import_chart_managed_externally(session: Session) -> None: + """ + Test importing a chart that is managed externally. + """ + from superset.charts.commands.importers.v1.utils import import_chart + from superset.connectors.sqla.models import SqlaTable + from superset.models.core import Database + from superset.models.slice import Slice + from tests.integration_tests.fixtures.importexport import chart_config + + engine = session.get_bind() + Slice.metadata.create_all(engine) # pylint: disable=no-member + + config = copy.deepcopy(chart_config) + config["datasource_id"] = 1 + config["datasource_type"] = "table" + config["is_managed_externally"] = True + config["external_url"] = "https://example.org/my_chart" + + chart = import_chart(session, config) + assert chart.is_managed_externally is True + assert chart.external_url == "https://example.org/my_chart" diff --git a/tests/unit_tests/charts/dao/__init__.py b/tests/unit_tests/charts/dao/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/charts/dao/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/charts/dao/dao_tests.py b/tests/unit_tests/charts/dao/dao_tests.py new file mode 100644 index 0000000000000..15310712a5f8a --- /dev/null +++ b/tests/unit_tests/charts/dao/dao_tests.py @@ -0,0 +1,67 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Iterator + +import pytest +from sqlalchemy.orm.session import Session + +from superset.utils.core import DatasourceType + + +@pytest.fixture +def session_with_data(session: Session) -> Iterator[Session]: + from superset.models.slice import Slice + + engine = session.get_bind() + Slice.metadata.create_all(engine) # pylint: disable=no-member + + slice_obj = Slice( + id=1, + datasource_id=1, + datasource_type=DatasourceType.TABLE, + datasource_name="tmp_perm_table", + slice_name="slice_name", + ) + + session.add(slice_obj) + session.commit() + yield session + session.rollback() + + +def test_slice_find_by_id_skip_base_filter(session_with_data: Session) -> None: + from superset.charts.dao import ChartDAO + from superset.models.slice import Slice + + result = ChartDAO.find_by_id(1, session=session_with_data, skip_base_filter=True) + + assert result + assert 1 == result.id + assert "slice_name" == result.slice_name + assert isinstance(result, Slice) + + +def test_datasource_find_by_id_skip_base_filter_not_found( + session_with_data: Session, +) -> None: + from superset.charts.dao import ChartDAO + + result = ChartDAO.find_by_id( + 125326326, session=session_with_data, skip_base_filter=True + ) + assert result is None diff --git a/tests/unit_tests/charts/test_post_processing.py b/tests/unit_tests/charts/test_post_processing.py new file mode 100644 index 0000000000000..be28aba922037 --- /dev/null +++ b/tests/unit_tests/charts/test_post_processing.py @@ -0,0 +1,2031 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import json + +import pandas as pd +from flask_babel import lazy_gettext as _ +from numpy import True_ +from pytest import raises +from sqlalchemy.orm.session import Session + +from superset.charts.post_processing import apply_post_process, pivot_df, table +from superset.common.chart_data import ChartDataResultFormat +from superset.utils.core import GenericDataType + + +def test_pivot_df_no_cols_no_rows_single_metric(): + """ + Pivot table when no cols/rows and 1 metric are selected. + """ + # when no cols/rows are selected there are no groupbys in the query, + # and the data has only the metric(s) + df = pd.DataFrame.from_dict({"SUM(num)": {0: 80679663}}) + assert ( + df.to_markdown() + == """ +| | SUM(num) | +|---:|------------:| +| 0 | 8.06797e+07 | + """.strip() + ) + + pivoted = pivot_df( + df, + rows=[], + columns=[], + metrics=["SUM(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('SUM(num)',) | +|:-----------------|----------------:| +| ('{_("Total")} (Sum)',) | 8.06797e+07 | + """.strip() + ) + + # transpose_pivot and combine_metrics do nothing in this case + pivoted = pivot_df( + df, + rows=[], + columns=[], + metrics=["SUM(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('SUM(num)',) | +|:-----------------|----------------:| +| ('{_("Total")} (Sum)',) | 8.06797e+07 | + """.strip() + ) + + # apply_metrics_on_rows will pivot the table, moving the metrics + # to rows + pivoted = pivot_df( + df, + rows=[], + columns=[], + metrics=["SUM(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('{_("Total")} (Sum)',) | +|:--------------|-------------------:| +| ('SUM(num)',) | 8.06797e+07 | + """.strip() + ) + + # showing totals + pivoted = pivot_df( + df, + rows=[], + columns=[], + metrics=["SUM(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=True, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('SUM(num)',) | ('Total (Sum)',) | +|:-----------------|----------------:|-------------------:| +| ('{_("Total")} (Sum)',) | 8.06797e+07 | 8.06797e+07 | + """.strip() + ) + + +def test_pivot_df_no_cols_no_rows_two_metrics(): + """ + Pivot table when no cols/rows and 2 metrics are selected. + """ + # when no cols/rows are selected there are no groupbys in the query, + # and the data has only the metrics + df = pd.DataFrame.from_dict({"SUM(num)": {0: 80679663}, "MAX(num)": {0: 37296}}) + assert ( + df.to_markdown() + == """ +| | SUM(num) | MAX(num) | +|---:|------------:|-----------:| +| 0 | 8.06797e+07 | 37296 | + """.strip() + ) + + pivoted = pivot_df( + df, + rows=[], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('SUM(num)',) | ('MAX(num)',) | +|:-----------------|----------------:|----------------:| +| ('{_("Total")} (Sum)',) | 8.06797e+07 | 37296 | + """.strip() + ) + + # transpose_pivot and combine_metrics do nothing in this case + pivoted = pivot_df( + df, + rows=[], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | +|:-----------------|----------------:|----------------:| +| ('Total (Sum)',) | 8.06797e+07 | 37296 | + """.strip() + ) + + # apply_metrics_on_rows will pivot the table, moving the metrics + # to rows + pivoted = pivot_df( + df, + rows=[], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('{_("Total")} (Sum)',) | +|:--------------|-------------------:| +| ('SUM(num)',) | 8.06797e+07 | +| ('MAX(num)',) | 37296 | + """.strip() + ) + + # when showing totals we only add a column, since adding a row + # would be redundant + pivoted = pivot_df( + df, + rows=[], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=True, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('SUM(num)',) | ('MAX(num)',) | ('{_("Total")} (Sum)',) | +|:-----------------|----------------:|----------------:|-------------------:| +| ('{_("Total")} (Sum)',) | 8.06797e+07 | 37296 | 8.0717e+07 | + """.strip() + ) + + +def test_pivot_df_single_row_two_metrics(): + """ + Pivot table when a single column and 2 metrics are selected. + """ + df = pd.DataFrame.from_dict( + { + "gender": {0: "girl", 1: "boy"}, + "SUM(num)": {0: 118065, 1: 47123}, + "MAX(num)": {0: 2588, 1: 1280}, + } + ) + assert ( + df.to_markdown() + == """ +| | gender | SUM(num) | MAX(num) | +|---:|:---------|-----------:|-----------:| +| 0 | girl | 118065 | 2588 | +| 1 | boy | 47123 | 1280 | + """.strip() + ) + + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | +|:----------|----------------:|----------------:| +| ('boy',) | 47123 | 1280 | +| ('girl',) | 118065 | 2588 | + """.strip() + ) + + # transpose_pivot + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('SUM(num)', 'boy') | ('SUM(num)', 'girl') | ('MAX(num)', 'boy') | ('MAX(num)', 'girl') | +|:-----------------|----------------------:|-----------------------:|----------------------:|-----------------------:| +| ('{_("Total")} (Sum)',) | 47123 | 118065 | 1280 | 2588 | + """.strip() + ) + + # combine_metrics does nothing in this case + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | +|:----------|----------------:|----------------:| +| ('boy',) | 47123 | 1280 | +| ('girl',) | 118065 | 2588 | + """.strip() + ) + + # show totals + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('SUM(num)',) | ('MAX(num)',) | ('{_("Total")} (Sum)',) | +|:-----------------|----------------:|----------------:|-------------------:| +| ('boy',) | 47123 | 1280 | 48403 | +| ('girl',) | 118065 | 2588 | 120653 | +| ('{_("Total")} (Sum)',) | 165188 | 3868 | 169056 | + """.strip() + ) + + # apply_metrics_on_rows + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('{_("Total")} (Sum)',) | +|:-------------------------|-------------------:| +| ('SUM(num)', 'boy') | 47123 | +| ('SUM(num)', 'girl') | 118065 | +| ('SUM(num)', 'Subtotal') | 165188 | +| ('MAX(num)', 'boy') | 1280 | +| ('MAX(num)', 'girl') | 2588 | +| ('MAX(num)', 'Subtotal') | 3868 | +| ('{_("Total")} (Sum)', '') | 169056 | + """.strip() + ) + + # apply_metrics_on_rows with combine_metrics + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('{_("Total")} (Sum)',) | +|:---------------------|-------------------:| +| ('boy', 'SUM(num)') | 47123 | +| ('boy', 'MAX(num)') | 1280 | +| ('boy', 'Subtotal') | 48403 | +| ('girl', 'SUM(num)') | 118065 | +| ('girl', 'MAX(num)') | 2588 | +| ('girl', 'Subtotal') | 120653 | +| ('{_("Total")} (Sum)', '') | 169056 | + """.strip() + ) + + +def test_pivot_df_complex(): + """ + Pivot table when a column, rows and 2 metrics are selected. + """ + df = pd.DataFrame.from_dict( + { + "state": { + 0: "CA", + 1: "CA", + 2: "CA", + 3: "FL", + 4: "CA", + 5: "CA", + 6: "FL", + 7: "FL", + 8: "FL", + 9: "CA", + 10: "FL", + 11: "FL", + }, + "gender": { + 0: "girl", + 1: "boy", + 2: "girl", + 3: "girl", + 4: "girl", + 5: "girl", + 6: "boy", + 7: "girl", + 8: "girl", + 9: "boy", + 10: "boy", + 11: "girl", + }, + "name": { + 0: "Amy", + 1: "Edward", + 2: "Sophia", + 3: "Amy", + 4: "Cindy", + 5: "Dawn", + 6: "Edward", + 7: "Sophia", + 8: "Dawn", + 9: "Tony", + 10: "Tony", + 11: "Cindy", + }, + "SUM(num)": { + 0: 45426, + 1: 31290, + 2: 18859, + 3: 14740, + 4: 14149, + 5: 11403, + 6: 9395, + 7: 7181, + 8: 5089, + 9: 3765, + 10: 2673, + 11: 1218, + }, + "MAX(num)": { + 0: 2227, + 1: 1280, + 2: 2588, + 3: 854, + 4: 842, + 5: 1157, + 6: 389, + 7: 1187, + 8: 461, + 9: 598, + 10: 247, + 11: 217, + }, + } + ) + assert ( + df.to_markdown() + == """ +| | state | gender | name | SUM(num) | MAX(num) | +|---:|:--------|:---------|:-------|-----------:|-----------:| +| 0 | CA | girl | Amy | 45426 | 2227 | +| 1 | CA | boy | Edward | 31290 | 1280 | +| 2 | CA | girl | Sophia | 18859 | 2588 | +| 3 | FL | girl | Amy | 14740 | 854 | +| 4 | CA | girl | Cindy | 14149 | 842 | +| 5 | CA | girl | Dawn | 11403 | 1157 | +| 6 | FL | boy | Edward | 9395 | 389 | +| 7 | FL | girl | Sophia | 7181 | 1187 | +| 8 | FL | girl | Dawn | 5089 | 461 | +| 9 | CA | boy | Tony | 3765 | 598 | +| 10 | FL | boy | Tony | 2673 | 247 | +| 11 | FL | girl | Cindy | 1218 | 217 | + """.strip() + ) + + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'CA') | ('SUM(num)', 'FL') | ('MAX(num)', 'CA') | ('MAX(num)', 'FL') | +|:-------------------|---------------------:|---------------------:|---------------------:|---------------------:| +| ('boy', 'Edward') | 31290 | 9395 | 1280 | 389 | +| ('boy', 'Tony') | 3765 | 2673 | 598 | 247 | +| ('girl', 'Amy') | 45426 | 14740 | 2227 | 854 | +| ('girl', 'Cindy') | 14149 | 1218 | 842 | 217 | +| ('girl', 'Dawn') | 11403 | 5089 | 1157 | 461 | +| ('girl', 'Sophia') | 18859 | 7181 | 2588 | 1187 | + """.strip() + ) + + # transpose_pivot + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'boy', 'Edward') | ('SUM(num)', 'boy', 'Tony') | ('SUM(num)', 'girl', 'Amy') | ('SUM(num)', 'girl', 'Cindy') | ('SUM(num)', 'girl', 'Dawn') | ('SUM(num)', 'girl', 'Sophia') | ('MAX(num)', 'boy', 'Edward') | ('MAX(num)', 'boy', 'Tony') | ('MAX(num)', 'girl', 'Amy') | ('MAX(num)', 'girl', 'Cindy') | ('MAX(num)', 'girl', 'Dawn') | ('MAX(num)', 'girl', 'Sophia') | +|:--------|--------------------------------:|------------------------------:|------------------------------:|--------------------------------:|-------------------------------:|---------------------------------:|--------------------------------:|------------------------------:|------------------------------:|--------------------------------:|-------------------------------:|---------------------------------:| +| ('CA',) | 31290 | 3765 | 45426 | 14149 | 11403 | 18859 | 1280 | 598 | 2227 | 842 | 1157 | 2588 | +| ('FL',) | 9395 | 2673 | 14740 | 1218 | 5089 | 7181 | 389 | 247 | 854 | 217 | 461 | 1187 | + """.strip() + ) + + # combine_metrics + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('CA', 'SUM(num)') | ('CA', 'MAX(num)') | ('FL', 'SUM(num)') | ('FL', 'MAX(num)') | +|:-------------------|---------------------:|---------------------:|---------------------:|---------------------:| +| ('boy', 'Edward') | 31290 | 1280 | 9395 | 389 | +| ('boy', 'Tony') | 3765 | 598 | 2673 | 247 | +| ('girl', 'Amy') | 45426 | 2227 | 14740 | 854 | +| ('girl', 'Cindy') | 14149 | 842 | 1218 | 217 | +| ('girl', 'Dawn') | 11403 | 1157 | 5089 | 461 | +| ('girl', 'Sophia') | 18859 | 2588 | 7181 | 1187 | + """.strip() + ) + + # show totals + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'CA') | ('SUM(num)', 'FL') | ('SUM(num)', 'Subtotal') | ('MAX(num)', 'CA') | ('MAX(num)', 'FL') | ('MAX(num)', 'Subtotal') | ('Total (Sum)', '') | +|:---------------------|---------------------:|---------------------:|---------------------------:|---------------------:|---------------------:|---------------------------:|----------------------:| +| ('boy', 'Edward') | 31290 | 9395 | 40685 | 1280 | 389 | 1669 | 42354 | +| ('boy', 'Tony') | 3765 | 2673 | 6438 | 598 | 247 | 845 | 7283 | +| ('boy', 'Subtotal') | 35055 | 12068 | 47123 | 1878 | 636 | 2514 | 49637 | +| ('girl', 'Amy') | 45426 | 14740 | 60166 | 2227 | 854 | 3081 | 63247 | +| ('girl', 'Cindy') | 14149 | 1218 | 15367 | 842 | 217 | 1059 | 16426 | +| ('girl', 'Dawn') | 11403 | 5089 | 16492 | 1157 | 461 | 1618 | 18110 | +| ('girl', 'Sophia') | 18859 | 7181 | 26040 | 2588 | 1187 | 3775 | 29815 | +| ('girl', 'Subtotal') | 89837 | 28228 | 118065 | 6814 | 2719 | 9533 | 127598 | +| ('Total (Sum)', '') | 124892 | 40296 | 165188 | 8692 | 3355 | 12047 | 177235 | + """.strip() + ) + + # apply_metrics_on_rows + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('CA',) | ('FL',) | +|:-------------------------------|----------:|----------:| +| ('SUM(num)', 'boy', 'Edward') | 31290 | 9395 | +| ('SUM(num)', 'boy', 'Tony') | 3765 | 2673 | +| ('SUM(num)', 'girl', 'Amy') | 45426 | 14740 | +| ('SUM(num)', 'girl', 'Cindy') | 14149 | 1218 | +| ('SUM(num)', 'girl', 'Dawn') | 11403 | 5089 | +| ('SUM(num)', 'girl', 'Sophia') | 18859 | 7181 | +| ('MAX(num)', 'boy', 'Edward') | 1280 | 389 | +| ('MAX(num)', 'boy', 'Tony') | 598 | 247 | +| ('MAX(num)', 'girl', 'Amy') | 2227 | 854 | +| ('MAX(num)', 'girl', 'Cindy') | 842 | 217 | +| ('MAX(num)', 'girl', 'Dawn') | 1157 | 461 | +| ('MAX(num)', 'girl', 'Sophia') | 2588 | 1187 | + """.strip() + ) + + # apply_metrics_on_rows with combine_metrics + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('CA',) | ('FL',) | +|:-------------------------------|----------:|----------:| +| ('boy', 'Edward', 'SUM(num)') | 31290 | 9395 | +| ('boy', 'Edward', 'MAX(num)') | 1280 | 389 | +| ('boy', 'Tony', 'SUM(num)') | 3765 | 2673 | +| ('boy', 'Tony', 'MAX(num)') | 598 | 247 | +| ('girl', 'Amy', 'SUM(num)') | 45426 | 14740 | +| ('girl', 'Amy', 'MAX(num)') | 2227 | 854 | +| ('girl', 'Cindy', 'SUM(num)') | 14149 | 1218 | +| ('girl', 'Cindy', 'MAX(num)') | 842 | 217 | +| ('girl', 'Dawn', 'SUM(num)') | 11403 | 5089 | +| ('girl', 'Dawn', 'MAX(num)') | 1157 | 461 | +| ('girl', 'Sophia', 'SUM(num)') | 18859 | 7181 | +| ('girl', 'Sophia', 'MAX(num)') | 2588 | 1187 | + """.strip() + ) + + # everything + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=True, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('boy', 'Edward') | ('boy', 'Tony') | ('boy', 'Subtotal') | ('girl', 'Amy') | ('girl', 'Cindy') | ('girl', 'Dawn') | ('girl', 'Sophia') | ('girl', 'Subtotal') | ('Total (Sum)', '') | +|:--------------------|--------------------:|------------------:|----------------------:|------------------:|--------------------:|-------------------:|---------------------:|-----------------------:|----------------------:| +| ('CA', 'SUM(num)') | 31290 | 3765 | 35055 | 45426 | 14149 | 11403 | 18859 | 89837 | 124892 | +| ('CA', 'MAX(num)') | 1280 | 598 | 1878 | 2227 | 842 | 1157 | 2588 | 6814 | 8692 | +| ('CA', 'Subtotal') | 32570 | 4363 | 36933 | 47653 | 14991 | 12560 | 21447 | 96651 | 133584 | +| ('FL', 'SUM(num)') | 9395 | 2673 | 12068 | 14740 | 1218 | 5089 | 7181 | 28228 | 40296 | +| ('FL', 'MAX(num)') | 389 | 247 | 636 | 854 | 217 | 461 | 1187 | 2719 | 3355 | +| ('FL', 'Subtotal') | 9784 | 2920 | 12704 | 15594 | 1435 | 5550 | 8368 | 30947 | 43651 | +| ('Total (Sum)', '') | 42354 | 7283 | 49637 | 63247 | 16426 | 18110 | 29815 | 127598 | 177235 | + """.strip() + ) + + # fraction + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum as Fraction of Columns", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'CA') | ('SUM(num)', 'FL') | ('MAX(num)', 'CA') | ('MAX(num)', 'FL') | +|:-------------------------------------------|---------------------:|---------------------:|---------------------:|---------------------:| +| ('boy', 'Edward') | 0.250536 | 0.23315 | 0.147262 | 0.115946 | +| ('boy', 'Tony') | 0.030146 | 0.0663341 | 0.0687989 | 0.0736215 | +| ('boy', 'Subtotal') | 0.280683 | 0.299484 | 0.216061 | 0.189568 | +| ('girl', 'Amy') | 0.363722 | 0.365793 | 0.256213 | 0.254545 | +| ('girl', 'Cindy') | 0.11329 | 0.0302263 | 0.0968707 | 0.0646796 | +| ('girl', 'Dawn') | 0.0913029 | 0.12629 | 0.133111 | 0.137407 | +| ('girl', 'Sophia') | 0.151002 | 0.178206 | 0.297745 | 0.3538 | +| ('girl', 'Subtotal') | 0.719317 | 0.700516 | 0.783939 | 0.810432 | +| ('Total (Sum as Fraction of Columns)', '') | 1 | 1 | 1 | 1 | + """.strip() + ) + + +def test_pivot_df_multi_column(): + """ + Pivot table when 2 columns, no rows and 2 metrics are selected. + """ + df = pd.DataFrame.from_dict( + { + "state": { + 0: "CA", + 1: "CA", + 2: "CA", + 3: "FL", + 4: "CA", + 5: "CA", + 6: "FL", + 7: "FL", + 8: "FL", + 9: "CA", + 10: "FL", + 11: "FL", + }, + "gender": { + 0: "girl", + 1: "boy", + 2: "girl", + 3: "girl", + 4: "girl", + 5: "girl", + 6: "boy", + 7: "girl", + 8: "girl", + 9: "boy", + 10: "boy", + 11: "girl", + }, + "SUM(num)": { + 0: 45426, + 1: 31290, + 2: 18859, + 3: 14740, + 4: 14149, + 5: 11403, + 6: 9395, + 7: 7181, + 8: 5089, + 9: 3765, + 10: 2673, + 11: 1218, + }, + "MAX(num)": { + 0: 2227, + 1: 1280, + 2: 2588, + 3: 854, + 4: 842, + 5: 1157, + 6: 389, + 7: 1187, + 8: 461, + 9: 598, + 10: 247, + 11: 217, + }, + } + ) + + pivoted = pivot_df( + df, + rows=None, + columns=["state", "gender"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'boy') | ('SUM(num)', 'girl') | ('MAX(num)', 'boy') | ('MAX(num)', 'girl') | +|:-----------------|----------------------:|-----------------------:|----------------------:|-----------------------:| +| ('CA',) | 35055 | 89837 | 1878 | 6814 | +| ('Total (Sum)',) | 12068 | 28228 | 636 | 2719 | + """.strip() + ) + + # transpose_pivot + pivoted = pivot_df( + df, + rows=None, + columns=["state", "gender"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | +|:---------------|----------------:|----------------:| +| ('CA', 'boy') | 35055 | 1878 | +| ('CA', 'girl') | 89837 | 6814 | +| ('FL', 'boy') | 12068 | 636 | +| ('FL', 'girl') | 28228 | 2719 | + """.strip() + ) + + # combine_metrics + pivoted = pivot_df( + df, + rows=None, + columns=["state", "gender"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('boy', 'SUM(num)') | ('boy', 'MAX(num)') | ('girl', 'SUM(num)') | ('girl', 'MAX(num)') | +|:-----------------|----------------------:|----------------------:|-----------------------:|-----------------------:| +| ('CA',) | 35055 | 1878 | 89837 | 6814 | +| ('Total (Sum)',) | 12068 | 636 | 28228 | 2719 | + """.strip() + ) + + # show totals + pivoted = pivot_df( + df, + rows=None, + columns=["state", "gender"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'boy') | ('SUM(num)', 'girl') | ('SUM(num)', 'Subtotal') | ('MAX(num)', 'boy') | ('MAX(num)', 'girl') | ('MAX(num)', 'Subtotal') | ('Total (Sum)', '') | +|:-----------------|----------------------:|-----------------------:|---------------------------:|----------------------:|-----------------------:|---------------------------:|----------------------:| +| ('CA',) | 35055 | 89837 | 124892 | 1878 | 6814 | 8692 | 133584 | +| ('Total (Sum)',) | 12068 | 28228 | 40296 | 636 | 2719 | 3355 | 43651 | + + """.strip() + ) + + # apply_metrics_on_rows + pivoted = pivot_df( + df, + rows=None, + columns=["state", "gender"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('CA', 'boy') | ('CA', 'girl') | ('FL', 'boy') | ('FL', 'girl') | +|:--------------|----------------:|-----------------:|----------------:|-----------------:| +| ('SUM(num)',) | 35055 | 89837 | 12068 | 28228 | +| ('MAX(num)',) | 1878 | 6814 | 636 | 2719 | + """.strip() + ) + + # apply_metrics_on_rows with combine_metrics + pivoted = pivot_df( + df, + rows=None, + columns=["state", "gender"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('CA', 'boy') | ('CA', 'girl') | ('FL', 'boy') | ('FL', 'girl') | +|:--------------|----------------:|-----------------:|----------------:|-----------------:| +| ('SUM(num)',) | 35055 | 89837 | 12068 | 28228 | +| ('MAX(num)',) | 1878 | 6814 | 636 | 2719 | + """.strip() + ) + + # everything + pivoted = pivot_df( + df, + rows=None, + columns=["state", "gender"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=True, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('CA',) | ('Total (Sum)',) | +|:---------------------|----------:|-------------------:| +| ('boy', 'SUM(num)') | 35055 | 12068 | +| ('boy', 'MAX(num)') | 1878 | 636 | +| ('boy', 'Subtotal') | 36933 | 12704 | +| ('girl', 'SUM(num)') | 89837 | 28228 | +| ('girl', 'MAX(num)') | 6814 | 2719 | +| ('girl', 'Subtotal') | 96651 | 30947 | +| ('Total (Sum)', '') | 133584 | 43651 | + """.strip() + ) + + # fraction + pivoted = pivot_df( + df, + rows=None, + columns=["state", "gender"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum as Fraction of Columns", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'boy') | ('SUM(num)', 'girl') | ('MAX(num)', 'boy') | ('MAX(num)', 'girl') | +|:----------------------------------------|----------------------:|-----------------------:|----------------------:|-----------------------:| +| ('CA',) | 0.743904 | 0.760911 | 0.747017 | 0.71478 | +| ('Total (Sum as Fraction of Columns)',) | 0.256096 | 0.239089 | 0.252983 | 0.28522 | + """.strip() + ) + + +def test_pivot_df_complex_null_values(): + """ + Pivot table when a column, rows and 2 metrics are selected. + """ + df = pd.DataFrame.from_dict( + { + "state": { + 0: None, + 1: None, + 2: None, + 3: None, + 4: None, + 5: None, + 6: None, + 7: None, + 8: None, + 9: None, + 10: None, + 11: None, + }, + "gender": { + 0: "girl", + 1: "boy", + 2: "girl", + 3: "girl", + 4: "girl", + 5: "girl", + 6: "boy", + 7: "girl", + 8: "girl", + 9: "boy", + 10: "boy", + 11: "girl", + }, + "name": { + 0: "Amy", + 1: "Edward", + 2: "Sophia", + 3: "Amy", + 4: "Cindy", + 5: "Dawn", + 6: "Edward", + 7: "Sophia", + 8: "Dawn", + 9: "Tony", + 10: "Tony", + 11: "Cindy", + }, + "SUM(num)": { + 0: 45426, + 1: 31290, + 2: 18859, + 3: 14740, + 4: 14149, + 5: 11403, + 6: 9395, + 7: 7181, + 8: 5089, + 9: 3765, + 10: 2673, + 11: 1218, + }, + "MAX(num)": { + 0: 2227, + 1: 1280, + 2: 2588, + 3: 854, + 4: 842, + 5: 1157, + 6: 389, + 7: 1187, + 8: 461, + 9: 598, + 10: 247, + 11: 217, + }, + } + ) + assert ( + df.to_markdown() + == """ +| | state | gender | name | SUM(num) | MAX(num) | +|---:|:--------|:---------|:-------|-----------:|-----------:| +| 0 | | girl | Amy | 45426 | 2227 | +| 1 | | boy | Edward | 31290 | 1280 | +| 2 | | girl | Sophia | 18859 | 2588 | +| 3 | | girl | Amy | 14740 | 854 | +| 4 | | girl | Cindy | 14149 | 842 | +| 5 | | girl | Dawn | 11403 | 1157 | +| 6 | | boy | Edward | 9395 | 389 | +| 7 | | girl | Sophia | 7181 | 1187 | +| 8 | | girl | Dawn | 5089 | 461 | +| 9 | | boy | Tony | 3765 | 598 | +| 10 | | boy | Tony | 2673 | 247 | +| 11 | | girl | Cindy | 1218 | 217 | + """.strip() + ) + + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'NULL') | ('MAX(num)', 'NULL') | +|:-------------------|-----------------------:|-----------------------:| +| ('boy', 'Edward') | 40685 | 1669 | +| ('boy', 'Tony') | 6438 | 845 | +| ('girl', 'Amy') | 60166 | 3081 | +| ('girl', 'Cindy') | 15367 | 1059 | +| ('girl', 'Dawn') | 16492 | 1618 | +| ('girl', 'Sophia') | 26040 | 3775 | + + """.strip() + ) + + # transpose_pivot + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'boy', 'Edward') | ('SUM(num)', 'boy', 'Tony') | ('SUM(num)', 'girl', 'Amy') | ('SUM(num)', 'girl', 'Cindy') | ('SUM(num)', 'girl', 'Dawn') | ('SUM(num)', 'girl', 'Sophia') | ('MAX(num)', 'boy', 'Edward') | ('MAX(num)', 'boy', 'Tony') | ('MAX(num)', 'girl', 'Amy') | ('MAX(num)', 'girl', 'Cindy') | ('MAX(num)', 'girl', 'Dawn') | ('MAX(num)', 'girl', 'Sophia') | +|:----------|--------------------------------:|------------------------------:|------------------------------:|--------------------------------:|-------------------------------:|---------------------------------:|--------------------------------:|------------------------------:|------------------------------:|--------------------------------:|-------------------------------:|---------------------------------:| +| ('NULL',) | 40685 | 6438 | 60166 | 15367 | 16492 | 26040 | 1669 | 845 | 3081 | 1059 | 1618 | 3775 | + """.strip() + ) + + # combine_metrics + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('NULL', 'SUM(num)') | ('NULL', 'MAX(num)') | +|:-------------------|-----------------------:|-----------------------:| +| ('boy', 'Edward') | 40685 | 1669 | +| ('boy', 'Tony') | 6438 | 845 | +| ('girl', 'Amy') | 60166 | 3081 | +| ('girl', 'Cindy') | 15367 | 1059 | +| ('girl', 'Dawn') | 16492 | 1618 | +| ('girl', 'Sophia') | 26040 | 3775 | + """.strip() + ) + + # show totals + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'NULL') | ('SUM(num)', 'Subtotal') | ('MAX(num)', 'NULL') | ('MAX(num)', 'Subtotal') | ('Total (Sum)', '') | +|:---------------------|-----------------------:|---------------------------:|-----------------------:|---------------------------:|----------------------:| +| ('boy', 'Edward') | 40685 | 40685 | 1669 | 1669 | 42354 | +| ('boy', 'Tony') | 6438 | 6438 | 845 | 845 | 7283 | +| ('boy', 'Subtotal') | 47123 | 47123 | 2514 | 2514 | 49637 | +| ('girl', 'Amy') | 60166 | 60166 | 3081 | 3081 | 63247 | +| ('girl', 'Cindy') | 15367 | 15367 | 1059 | 1059 | 16426 | +| ('girl', 'Dawn') | 16492 | 16492 | 1618 | 1618 | 18110 | +| ('girl', 'Sophia') | 26040 | 26040 | 3775 | 3775 | 29815 | +| ('girl', 'Subtotal') | 118065 | 118065 | 9533 | 9533 | 127598 | +| ('Total (Sum)', '') | 165188 | 165188 | 12047 | 12047 | 177235 | + """.strip() + ) + + # apply_metrics_on_rows + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('NULL',) | +|:-------------------------------|------------:| +| ('SUM(num)', 'boy', 'Edward') | 40685 | +| ('SUM(num)', 'boy', 'Tony') | 6438 | +| ('SUM(num)', 'girl', 'Amy') | 60166 | +| ('SUM(num)', 'girl', 'Cindy') | 15367 | +| ('SUM(num)', 'girl', 'Dawn') | 16492 | +| ('SUM(num)', 'girl', 'Sophia') | 26040 | +| ('MAX(num)', 'boy', 'Edward') | 1669 | +| ('MAX(num)', 'boy', 'Tony') | 845 | +| ('MAX(num)', 'girl', 'Amy') | 3081 | +| ('MAX(num)', 'girl', 'Cindy') | 1059 | +| ('MAX(num)', 'girl', 'Dawn') | 1618 | +| ('MAX(num)', 'girl', 'Sophia') | 3775 | + """.strip() + ) + + # apply_metrics_on_rows with combine_metrics + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('NULL',) | +|:-------------------------------|------------:| +| ('boy', 'Edward', 'SUM(num)') | 40685 | +| ('boy', 'Edward', 'MAX(num)') | 1669 | +| ('boy', 'Tony', 'SUM(num)') | 6438 | +| ('boy', 'Tony', 'MAX(num)') | 845 | +| ('girl', 'Amy', 'SUM(num)') | 60166 | +| ('girl', 'Amy', 'MAX(num)') | 3081 | +| ('girl', 'Cindy', 'SUM(num)') | 15367 | +| ('girl', 'Cindy', 'MAX(num)') | 1059 | +| ('girl', 'Dawn', 'SUM(num)') | 16492 | +| ('girl', 'Dawn', 'MAX(num)') | 1618 | +| ('girl', 'Sophia', 'SUM(num)') | 26040 | +| ('girl', 'Sophia', 'MAX(num)') | 3775 | + """.strip() + ) + + # everything + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=True, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('boy', 'Edward') | ('boy', 'Tony') | ('boy', 'Subtotal') | ('girl', 'Amy') | ('girl', 'Cindy') | ('girl', 'Dawn') | ('girl', 'Sophia') | ('girl', 'Subtotal') | ('Total (Sum)', '') | +|:---------------------|--------------------:|------------------:|----------------------:|------------------:|--------------------:|-------------------:|---------------------:|-----------------------:|----------------------:| +| ('NULL', 'SUM(num)') | 40685 | 6438 | 47123 | 60166 | 15367 | 16492 | 26040 | 118065 | 165188 | +| ('NULL', 'MAX(num)') | 1669 | 845 | 2514 | 3081 | 1059 | 1618 | 3775 | 9533 | 12047 | +| ('NULL', 'Subtotal') | 42354 | 7283 | 49637 | 63247 | 16426 | 18110 | 29815 | 127598 | 177235 | +| ('Total (Sum)', '') | 42354 | 7283 | 49637 | 63247 | 16426 | 18110 | 29815 | 127598 | 177235 | + """.strip() + ) + + # fraction + pivoted = pivot_df( + df, + rows=["gender", "name"], + columns=["state"], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum as Fraction of Columns", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'NULL') | ('MAX(num)', 'NULL') | +|:-------------------------------------------|-----------------------:|-----------------------:| +| ('boy', 'Edward') | 0.246295 | 0.138541 | +| ('boy', 'Tony') | 0.0389738 | 0.0701419 | +| ('boy', 'Subtotal') | 0.285269 | 0.208683 | +| ('girl', 'Amy') | 0.364227 | 0.255748 | +| ('girl', 'Cindy') | 0.0930273 | 0.0879057 | +| ('girl', 'Dawn') | 0.0998378 | 0.134307 | +| ('girl', 'Sophia') | 0.157639 | 0.313356 | +| ('girl', 'Subtotal') | 0.714731 | 0.791317 | +| ('Total (Sum as Fraction of Columns)', '') | 1 | 1 | + """.strip() + ) + + +def test_table(): + """ + Test that the table reports honor `d3NumberFormat`. + """ + df = pd.DataFrame.from_dict({"count": {0: 80679663}}) + form_data = { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "NULL", + "expressionType": "SIMPLE", + "filterOptionName": "filter_ameaka2efjv_rfv1et5nwng", + "isExtra": False, + "isNew": False, + "operator": "!=", + "sqlExpression": None, + "subject": "lang_at_home", + } + ], + "all_columns": [], + "color_pn": True, + "column_config": {"count": {"d3NumberFormat": ",d"}}, + "conditional_formatting": [], + "datasource": "8__table", + "extra_form_data": {}, + "granularity_sqla": "time_start", + "groupby": ["lang_at_home"], + "metrics": ["count"], + "order_by_cols": [], + "order_desc": True, + "percent_metrics": [], + "query_mode": "aggregate", + "row_limit": "15", + "server_page_length": 10, + "show_cell_bars": True, + "table_timestamp_format": "smart_date", + "time_grain_sqla": "P1D", + "time_range": "No filter", + "url_params": {}, + "viz_type": "table", + } + formatted = table(df, form_data) + assert ( + formatted.to_markdown() + == """ +| | count | +|---:|:-----------| +| 0 | 80,679,663 | + """.strip() + ) + + +def test_apply_post_process_no_form_invalid_viz_type(): + """ + Test with invalid viz type. It should just return the result + """ + result = {"foo": "bar"} + form_data = {"viz_type": "baz"} + assert apply_post_process(result, form_data) == result + + +def test_apply_post_process_without_result_format(): + """ + A query without result_format should raise an exception + """ + result = {"queries": [{"result_format": "foo"}]} + form_data = {"viz_type": "pivot_table"} + + with raises(Exception) as ex: + apply_post_process(result, form_data) + + assert ex.match("Result format foo not supported") == True + + +def test_apply_post_process_json_format(): + """ + It should be able to process json results + """ + + result = { + "queries": [ + { + "result_format": ChartDataResultFormat.JSON, + "data": { + "result": [ + { + "data": [{"COUNT(is_software_dev)": 4725}], + "colnames": ["COUNT(is_software_dev)"], + "coltypes": [0], + } + ] + }, + } + ] + } + form_data = { + "datasource": "19__table", + "viz_type": "pivot_table_v2", + "slice_id": 69, + "url_params": {}, + "granularity_sqla": "time_start", + "time_grain_sqla": "P1D", + "time_range": "No filter", + "groupbyColumns": [], + "groupbyRows": [], + "metrics": [ + { + "aggregate": "COUNT", + "column": { + "column_name": "is_software_dev", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 1463, + "is_dttm": False, + "python_date_format": None, + "type": "DOUBLE PRECISION", + "verbose_name": None, + }, + "expressionType": "SIMPLE", + "hasCustomLabel": False, + "isNew": False, + "label": "COUNT(is_software_dev)", + "optionName": "metric_9i1kctig9yr_sizo6ihd2o", + "sqlExpression": None, + } + ], + "metricsLayout": "COLUMNS", + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "Currently A Developer", + "expressionType": "SIMPLE", + "filterOptionName": "filter_fvi0jg9aii_2lekqrhy7qk", + "isExtra": False, + "isNew": False, + "operator": "==", + "sqlExpression": None, + "subject": "developer_type", + } + ], + "row_limit": 10000, + "order_desc": True, + "aggregateFunction": "Sum", + "valueFormat": "SMART_NUMBER", + "date_format": "smart_date", + "rowOrder": "key_a_to_z", + "colOrder": "key_a_to_z", + "extra_form_data": {}, + "force": False, + "result_format": "json", + "result_type": "results", + } + + assert apply_post_process(result, form_data) == { + "queries": [ + { + "result_format": ChartDataResultFormat.JSON, + "data": { + "result": { + "Total (Sum)": { + "data": [{"COUNT(is_software_dev)": 4725}], + "colnames": ["COUNT(is_software_dev)"], + "coltypes": [0], + } + } + }, + "colnames": [("result",)], + "indexnames": [("Total (Sum)",)], + "coltypes": [GenericDataType.STRING], + "rowcount": 1, + } + ] + } + + +def test_apply_post_process_csv_format(): + """ + It should be able to process csv results + """ + + result = { + "queries": [ + { + "result_format": ChartDataResultFormat.CSV, + "data": """ +COUNT(is_software_dev) +4725 +""", + } + ] + } + form_data = { + "datasource": "19__table", + "viz_type": "pivot_table_v2", + "slice_id": 69, + "url_params": {}, + "granularity_sqla": "time_start", + "time_grain_sqla": "P1D", + "time_range": "No filter", + "groupbyColumns": [], + "groupbyRows": [], + "metrics": [ + { + "aggregate": "COUNT", + "column": { + "column_name": "is_software_dev", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 1463, + "is_dttm": False, + "python_date_format": None, + "type": "DOUBLE PRECISION", + "verbose_name": None, + }, + "expressionType": "SIMPLE", + "hasCustomLabel": False, + "isNew": False, + "label": "COUNT(is_software_dev)", + "optionName": "metric_9i1kctig9yr_sizo6ihd2o", + "sqlExpression": None, + } + ], + "metricsLayout": "COLUMNS", + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "Currently A Developer", + "expressionType": "SIMPLE", + "filterOptionName": "filter_fvi0jg9aii_2lekqrhy7qk", + "isExtra": False, + "isNew": False, + "operator": "==", + "sqlExpression": None, + "subject": "developer_type", + } + ], + "row_limit": 10000, + "order_desc": True, + "aggregateFunction": "Sum", + "valueFormat": "SMART_NUMBER", + "date_format": "smart_date", + "rowOrder": "key_a_to_z", + "colOrder": "key_a_to_z", + "extra_form_data": {}, + "force": False, + "result_format": "json", + "result_type": "results", + } + + assert apply_post_process(result, form_data) == { + "queries": [ + { + "result_format": ChartDataResultFormat.CSV, + "data": ",COUNT(is_software_dev)\nTotal (Sum),4725\n", + "colnames": [("COUNT(is_software_dev)",)], + "indexnames": [("Total (Sum)",)], + "coltypes": [GenericDataType.NUMERIC], + "rowcount": 1, + } + ] + } + + +def test_apply_post_process_csv_format_empty_string(): + """ + It should be able to process csv results with no data + """ + + result = {"queries": [{"result_format": ChartDataResultFormat.CSV, "data": ""}]} + form_data = { + "datasource": "19__table", + "viz_type": "pivot_table_v2", + "slice_id": 69, + "url_params": {}, + "granularity_sqla": "time_start", + "time_grain_sqla": "P1D", + "time_range": "No filter", + "groupbyColumns": [], + "groupbyRows": [], + "metrics": [ + { + "aggregate": "COUNT", + "column": { + "column_name": "is_software_dev", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 1463, + "is_dttm": False, + "python_date_format": None, + "type": "DOUBLE PRECISION", + "verbose_name": None, + }, + "expressionType": "SIMPLE", + "hasCustomLabel": False, + "isNew": False, + "label": "COUNT(is_software_dev)", + "optionName": "metric_9i1kctig9yr_sizo6ihd2o", + "sqlExpression": None, + } + ], + "metricsLayout": "COLUMNS", + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "Currently A Developer", + "expressionType": "SIMPLE", + "filterOptionName": "filter_fvi0jg9aii_2lekqrhy7qk", + "isExtra": False, + "isNew": False, + "operator": "==", + "sqlExpression": None, + "subject": "developer_type", + } + ], + "row_limit": 10000, + "order_desc": True, + "aggregateFunction": "Sum", + "valueFormat": "SMART_NUMBER", + "date_format": "smart_date", + "rowOrder": "key_a_to_z", + "colOrder": "key_a_to_z", + "extra_form_data": {}, + "force": False, + "result_format": "json", + "result_type": "results", + } + + assert apply_post_process(result, form_data) == { + "queries": [{"result_format": ChartDataResultFormat.CSV, "data": ""}] + } + + +def test_apply_post_process_csv_format_no_data(): + """ + It should be able to process csv results with no data + """ + + result = {"queries": [{"result_format": ChartDataResultFormat.CSV, "data": None}]} + form_data = { + "datasource": "19__table", + "viz_type": "pivot_table_v2", + "slice_id": 69, + "url_params": {}, + "granularity_sqla": "time_start", + "time_grain_sqla": "P1D", + "time_range": "No filter", + "groupbyColumns": [], + "groupbyRows": [], + "metrics": [ + { + "aggregate": "COUNT", + "column": { + "column_name": "is_software_dev", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 1463, + "is_dttm": False, + "python_date_format": None, + "type": "DOUBLE PRECISION", + "verbose_name": None, + }, + "expressionType": "SIMPLE", + "hasCustomLabel": False, + "isNew": False, + "label": "COUNT(is_software_dev)", + "optionName": "metric_9i1kctig9yr_sizo6ihd2o", + "sqlExpression": None, + } + ], + "metricsLayout": "COLUMNS", + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "Currently A Developer", + "expressionType": "SIMPLE", + "filterOptionName": "filter_fvi0jg9aii_2lekqrhy7qk", + "isExtra": False, + "isNew": False, + "operator": "==", + "sqlExpression": None, + "subject": "developer_type", + } + ], + "row_limit": 10000, + "order_desc": True, + "aggregateFunction": "Sum", + "valueFormat": "SMART_NUMBER", + "date_format": "smart_date", + "rowOrder": "key_a_to_z", + "colOrder": "key_a_to_z", + "extra_form_data": {}, + "force": False, + "result_format": "json", + "result_type": "results", + } + + assert apply_post_process(result, form_data) == { + "queries": [{"result_format": ChartDataResultFormat.CSV, "data": None}] + } + + +def test_apply_post_process_csv_format_no_data_multiple_queries(): + """ + It should be able to process csv results multiple queries if one query has no data + """ + + result = { + "queries": [ + {"result_format": ChartDataResultFormat.CSV, "data": ""}, + { + "result_format": ChartDataResultFormat.CSV, + "data": """ +COUNT(is_software_dev) +4725 +""", + }, + ] + } + form_data = { + "datasource": "19__table", + "viz_type": "pivot_table_v2", + "slice_id": 69, + "url_params": {}, + "granularity_sqla": "time_start", + "time_grain_sqla": "P1D", + "time_range": "No filter", + "groupbyColumns": [], + "groupbyRows": [], + "metrics": [ + { + "aggregate": "COUNT", + "column": { + "column_name": "is_software_dev", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 1463, + "is_dttm": False, + "python_date_format": None, + "type": "DOUBLE PRECISION", + "verbose_name": None, + }, + "expressionType": "SIMPLE", + "hasCustomLabel": False, + "isNew": False, + "label": "COUNT(is_software_dev)", + "optionName": "metric_9i1kctig9yr_sizo6ihd2o", + "sqlExpression": None, + } + ], + "metricsLayout": "COLUMNS", + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "Currently A Developer", + "expressionType": "SIMPLE", + "filterOptionName": "filter_fvi0jg9aii_2lekqrhy7qk", + "isExtra": False, + "isNew": False, + "operator": "==", + "sqlExpression": None, + "subject": "developer_type", + } + ], + "row_limit": 10000, + "order_desc": True, + "aggregateFunction": "Sum", + "valueFormat": "SMART_NUMBER", + "date_format": "smart_date", + "rowOrder": "key_a_to_z", + "colOrder": "key_a_to_z", + "extra_form_data": {}, + "force": False, + "result_format": "json", + "result_type": "results", + } + + assert apply_post_process(result, form_data) == { + "queries": [ + {"result_format": ChartDataResultFormat.CSV, "data": ""}, + { + "result_format": ChartDataResultFormat.CSV, + "data": ",COUNT(is_software_dev)\nTotal (Sum),4725\n", + "colnames": [("COUNT(is_software_dev)",)], + "indexnames": [("Total (Sum)",)], + "coltypes": [GenericDataType.NUMERIC], + "rowcount": 1, + }, + ] + } + + +def test_apply_post_process_json_format_empty_string(): + """ + It should be able to process json results with no data + """ + + result = {"queries": [{"result_format": ChartDataResultFormat.JSON, "data": ""}]} + form_data = { + "datasource": "19__table", + "viz_type": "pivot_table_v2", + "slice_id": 69, + "url_params": {}, + "granularity_sqla": "time_start", + "time_grain_sqla": "P1D", + "time_range": "No filter", + "groupbyColumns": [], + "groupbyRows": [], + "metrics": [ + { + "aggregate": "COUNT", + "column": { + "column_name": "is_software_dev", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 1463, + "is_dttm": False, + "python_date_format": None, + "type": "DOUBLE PRECISION", + "verbose_name": None, + }, + "expressionType": "SIMPLE", + "hasCustomLabel": False, + "isNew": False, + "label": "COUNT(is_software_dev)", + "optionName": "metric_9i1kctig9yr_sizo6ihd2o", + "sqlExpression": None, + } + ], + "metricsLayout": "COLUMNS", + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "Currently A Developer", + "expressionType": "SIMPLE", + "filterOptionName": "filter_fvi0jg9aii_2lekqrhy7qk", + "isExtra": False, + "isNew": False, + "operator": "==", + "sqlExpression": None, + "subject": "developer_type", + } + ], + "row_limit": 10000, + "order_desc": True, + "aggregateFunction": "Sum", + "valueFormat": "SMART_NUMBER", + "date_format": "smart_date", + "rowOrder": "key_a_to_z", + "colOrder": "key_a_to_z", + "extra_form_data": {}, + "force": False, + "result_format": "json", + "result_type": "results", + } + + assert apply_post_process(result, form_data) == { + "queries": [{"result_format": ChartDataResultFormat.JSON, "data": ""}] + } + + +def test_apply_post_process_json_format_data_is_none(): + """ + It should be able to process json results with no data + """ + + result = {"queries": [{"result_format": ChartDataResultFormat.JSON, "data": None}]} + form_data = { + "datasource": "19__table", + "viz_type": "pivot_table_v2", + "slice_id": 69, + "url_params": {}, + "granularity_sqla": "time_start", + "time_grain_sqla": "P1D", + "time_range": "No filter", + "groupbyColumns": [], + "groupbyRows": [], + "metrics": [ + { + "aggregate": "COUNT", + "column": { + "column_name": "is_software_dev", + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "id": 1463, + "is_dttm": False, + "python_date_format": None, + "type": "DOUBLE PRECISION", + "verbose_name": None, + }, + "expressionType": "SIMPLE", + "hasCustomLabel": False, + "isNew": False, + "label": "COUNT(is_software_dev)", + "optionName": "metric_9i1kctig9yr_sizo6ihd2o", + "sqlExpression": None, + } + ], + "metricsLayout": "COLUMNS", + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "Currently A Developer", + "expressionType": "SIMPLE", + "filterOptionName": "filter_fvi0jg9aii_2lekqrhy7qk", + "isExtra": False, + "isNew": False, + "operator": "==", + "sqlExpression": None, + "subject": "developer_type", + } + ], + "row_limit": 10000, + "order_desc": True, + "aggregateFunction": "Sum", + "valueFormat": "SMART_NUMBER", + "date_format": "smart_date", + "rowOrder": "key_a_to_z", + "colOrder": "key_a_to_z", + "extra_form_data": {}, + "force": False, + "result_format": "json", + "result_type": "results", + } + + assert apply_post_process(result, form_data) == { + "queries": [{"result_format": ChartDataResultFormat.JSON, "data": None}] + } + + +def test_apply_post_process_verbose_map(session: Session): + from superset.connectors.sqla.models import SqlaTable, SqlMetric + from superset.models.core import Database + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + sqla_table = SqlaTable( + table_name="my_sqla_table", + columns=[], + metrics=[ + SqlMetric( + metric_name="count", + verbose_name="COUNT(*)", + metric_type="count", + expression="COUNT(*)", + ) + ], + database=db, + ) + + result = { + "queries": [ + { + "result_format": ChartDataResultFormat.JSON, + "data": [{"count": 4725}], + } + ] + } + form_data = { + "datasource": "19__table", + "viz_type": "pivot_table_v2", + "slice_id": 69, + "url_params": {}, + "granularity_sqla": "time_start", + "time_grain_sqla": "P1D", + "time_range": "No filter", + "groupbyColumns": [], + "groupbyRows": [], + "metrics": ["COUNT(*)"], + "metricsLayout": "COLUMNS", + "row_limit": 10000, + "order_desc": True, + "valueFormat": "SMART_NUMBER", + "date_format": "smart_date", + "rowOrder": "key_a_to_z", + "colOrder": "key_a_to_z", + "extra_form_data": {}, + "force": False, + "result_format": "json", + "result_type": "results", + } + + assert apply_post_process(result, form_data, datasource=sqla_table) == { + "queries": [ + { + "result_format": ChartDataResultFormat.JSON, + "data": {"COUNT(*)": {"Total (Sum)": 4725}}, + "colnames": [("COUNT(*)",)], + "indexnames": [("Total (Sum)",)], + "coltypes": [GenericDataType.NUMERIC], + "rowcount": 1, + } + ] + } diff --git a/tests/unit_tests/columns/__init__.py b/tests/unit_tests/columns/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/columns/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/columns/test_models.py b/tests/unit_tests/columns/test_models.py new file mode 100644 index 0000000000000..068557e7a6a7f --- /dev/null +++ b/tests/unit_tests/columns/test_models.py @@ -0,0 +1,57 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=import-outside-toplevel, unused-argument + +from sqlalchemy.orm.session import Session + + +def test_column_model(session: Session) -> None: + """ + Test basic attributes of a ``Column``. + """ + from superset.columns.models import Column + + engine = session.get_bind() + Column.metadata.create_all(engine) # pylint: disable=no-member + + column = Column( + name="ds", + type="TIMESTAMP", + expression="ds", + ) + + session.add(column) + session.flush() + + assert column.id == 1 + assert column.uuid is not None + + assert column.name == "ds" + assert column.type == "TIMESTAMP" + assert column.expression == "ds" + + # test that default values are set correctly + assert column.description is None + assert column.warning_text is None + assert column.unit is None + assert column.is_temporal is False + assert column.is_spatial is False + assert column.is_partition is False + assert column.is_aggregation is False + assert column.is_additive is False + assert column.is_increase_desired is True diff --git a/tests/unit_tests/commands/__init__.py b/tests/unit_tests/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/commands/export_test.py b/tests/unit_tests/commands/export_test.py new file mode 100644 index 0000000000000..24fa491664042 --- /dev/null +++ b/tests/unit_tests/commands/export_test.py @@ -0,0 +1,94 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=invalid-name, unused-argument, import-outside-toplevel + +from freezegun import freeze_time +from pytest_mock import MockFixture + + +def test_export_assets_command(mocker: MockFixture) -> None: + """ + Test that all assets are exported correctly. + """ + from superset.commands.export.assets import ExportAssetsCommand + + ExportDatabasesCommand = mocker.patch( + "superset.commands.export.assets.ExportDatabasesCommand" + ) + ExportDatabasesCommand.return_value.run.return_value = [ + ( + "metadata.yaml", + "version: 1.0.0\ntype: Database\ntimestamp: '2022-01-01T00:00:00+00:00'\n", + ), + ("databases/example.yaml", ""), + ] + ExportDatasetsCommand = mocker.patch( + "superset.commands.export.assets.ExportDatasetsCommand" + ) + ExportDatasetsCommand.return_value.run.return_value = [ + ( + "metadata.yaml", + "version: 1.0.0\ntype: Dataset\ntimestamp: '2022-01-01T00:00:00+00:00'\n", + ), + ("datasets/example/dataset.yaml", ""), + ] + ExportChartsCommand = mocker.patch( + "superset.commands.export.assets.ExportChartsCommand" + ) + ExportChartsCommand.return_value.run.return_value = [ + ( + "metadata.yaml", + "version: 1.0.0\ntype: Slice\ntimestamp: '2022-01-01T00:00:00+00:00'\n", + ), + ("charts/pie.yaml", ""), + ] + ExportDashboardsCommand = mocker.patch( + "superset.commands.export.assets.ExportDashboardsCommand" + ) + ExportDashboardsCommand.return_value.run.return_value = [ + ( + "metadata.yaml", + "version: 1.0.0\ntype: Dashboard\ntimestamp: '2022-01-01T00:00:00+00:00'\n", + ), + ("dashboards/sales.yaml", ""), + ] + ExportSavedQueriesCommand = mocker.patch( + "superset.commands.export.assets.ExportSavedQueriesCommand" + ) + ExportSavedQueriesCommand.return_value.run.return_value = [ + ( + "metadata.yaml", + "version: 1.0.0\ntype: SavedQuery\ntimestamp: '2022-01-01T00:00:00+00:00'\n", + ), + ("queries/example/metric.yaml", ""), + ] + + with freeze_time("2022-01-01T00:00:00Z"): + command = ExportAssetsCommand() + output = list(command.run()) + + assert output == [ + ( + "metadata.yaml", + "version: 1.0.0\ntype: assets\ntimestamp: '2022-01-01T00:00:00+00:00'\n", + ), + ("databases/example.yaml", ""), + ("datasets/example/dataset.yaml", ""), + ("charts/pie.yaml", ""), + ("dashboards/sales.yaml", ""), + ("queries/example/metric.yaml", ""), + ] diff --git a/tests/unit_tests/commands/importers/__init__.py b/tests/unit_tests/commands/importers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/commands/importers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/commands/importers/v1/__init__.py b/tests/unit_tests/commands/importers/v1/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/commands/importers/v1/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/commands/importers/v1/assets_test.py b/tests/unit_tests/commands/importers/v1/assets_test.py new file mode 100644 index 0000000000000..1a345ff2b913b --- /dev/null +++ b/tests/unit_tests/commands/importers/v1/assets_test.py @@ -0,0 +1,131 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import copy + +from sqlalchemy.orm.session import Session +from sqlalchemy.sql import select + +from tests.unit_tests.fixtures.assets_configs import ( + charts_config_1, + charts_config_2, + dashboards_config_1, + dashboards_config_2, + databases_config, + datasets_config, +) + + +def test_import_new_assets(session: Session) -> None: + """ + Test that all new assets are imported correctly. + """ + from superset.commands.importers.v1.assets import ImportAssetsCommand + from superset.models.dashboard import dashboard_slices + from superset.models.slice import Slice + + engine = session.get_bind() + Slice.metadata.create_all(engine) # pylint: disable=no-member + configs = { + **copy.deepcopy(databases_config), + **copy.deepcopy(datasets_config), + **copy.deepcopy(charts_config_1), + **copy.deepcopy(dashboards_config_1), + } + expected_number_of_dashboards = len(dashboards_config_1) + expected_number_of_charts = len(charts_config_1) + + ImportAssetsCommand._import(session, configs) + dashboard_ids = session.scalars( + select(dashboard_slices.c.dashboard_id).distinct() + ).all() + chart_ids = session.scalars(select(dashboard_slices.c.slice_id)).all() + + assert len(chart_ids) == expected_number_of_charts + assert len(dashboard_ids) == expected_number_of_dashboards + + +def test_import_adds_dashboard_charts(session: Session) -> None: + """ + Test that existing dashboards are updated with new charts. + """ + from superset.commands.importers.v1.assets import ImportAssetsCommand + from superset.models.dashboard import dashboard_slices + from superset.models.slice import Slice + + engine = session.get_bind() + Slice.metadata.create_all(engine) # pylint: disable=no-member + base_configs = { + **copy.deepcopy(databases_config), + **copy.deepcopy(datasets_config), + **copy.deepcopy(charts_config_2), + **copy.deepcopy(dashboards_config_2), + } + new_configs = { + **copy.deepcopy(databases_config), + **copy.deepcopy(datasets_config), + **copy.deepcopy(charts_config_1), + **copy.deepcopy(dashboards_config_1), + } + expected_number_of_dashboards = len(dashboards_config_1) + expected_number_of_charts = len(charts_config_1) + + ImportAssetsCommand._import(session, base_configs) + ImportAssetsCommand._import(session, new_configs) + dashboard_ids = session.scalars( + select(dashboard_slices.c.dashboard_id).distinct() + ).all() + chart_ids = session.scalars(select(dashboard_slices.c.slice_id)).all() + + assert len(chart_ids) == expected_number_of_charts + assert len(dashboard_ids) == expected_number_of_dashboards + + +def test_import_removes_dashboard_charts(session: Session) -> None: + """ + Test that existing dashboards are updated without old charts. + """ + from superset.commands.importers.v1.assets import ImportAssetsCommand + from superset.models.dashboard import dashboard_slices + from superset.models.slice import Slice + + engine = session.get_bind() + Slice.metadata.create_all(engine) # pylint: disable=no-member + base_configs = { + **copy.deepcopy(databases_config), + **copy.deepcopy(datasets_config), + **copy.deepcopy(charts_config_1), + **copy.deepcopy(dashboards_config_1), + } + new_configs = { + **copy.deepcopy(databases_config), + **copy.deepcopy(datasets_config), + **copy.deepcopy(charts_config_2), + **copy.deepcopy(dashboards_config_2), + } + expected_number_of_dashboards = len(dashboards_config_2) + expected_number_of_charts = len(charts_config_2) + + ImportAssetsCommand._import(session, base_configs) + ImportAssetsCommand._import(session, new_configs) + dashboard_ids = session.scalars( + select(dashboard_slices.c.dashboard_id).distinct() + ).all() + chart_ids = session.scalars(select(dashboard_slices.c.slice_id)).all() + + assert len(chart_ids) == expected_number_of_charts + assert len(dashboard_ids) == expected_number_of_dashboards diff --git a/tests/unit_tests/common/__init__.py b/tests/unit_tests/common/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/common/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/common/test_dataframe_utils.py b/tests/unit_tests/common/test_dataframe_utils.py new file mode 100644 index 0000000000000..01fa4224c3c83 --- /dev/null +++ b/tests/unit_tests/common/test_dataframe_utils.py @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import datetime + +import pandas as pd + +from superset.common.utils import dataframe_utils + + +def test_is_datetime_series(): + assert not dataframe_utils.is_datetime_series(None) + assert not dataframe_utils.is_datetime_series(pd.DataFrame({"foo": [1]})) + assert not dataframe_utils.is_datetime_series(pd.Series([1, 2, 3])) + assert not dataframe_utils.is_datetime_series(pd.Series(["1", "2", "3"])) + assert not dataframe_utils.is_datetime_series(pd.Series()) + assert not dataframe_utils.is_datetime_series(pd.Series([None, None])) + assert dataframe_utils.is_datetime_series( + pd.Series([datetime.date(2018, 1, 1), datetime.date(2018, 1, 2), None]) + ) + assert dataframe_utils.is_datetime_series( + pd.Series([datetime.date(2018, 1, 1), datetime.date(2018, 1, 2)]) + ) + assert dataframe_utils.is_datetime_series( + pd.Series([datetime.datetime(2018, 1, 1), datetime.datetime(2018, 1, 2), None]) + ) + assert dataframe_utils.is_datetime_series( + pd.Series([datetime.datetime(2018, 1, 1), datetime.datetime(2018, 1, 2)]) + ) + assert dataframe_utils.is_datetime_series( + pd.date_range(datetime.date(2018, 1, 1), datetime.date(2018, 2, 1)).to_series() + ) + assert dataframe_utils.is_datetime_series( + pd.date_range( + datetime.datetime(2018, 1, 1), datetime.datetime(2018, 2, 1) + ).to_series() + ) diff --git a/tests/unit_tests/common/test_query_object_factory.py b/tests/unit_tests/common/test_query_object_factory.py new file mode 100644 index 0000000000000..4fd906f648ee9 --- /dev/null +++ b/tests/unit_tests/common/test_query_object_factory.py @@ -0,0 +1,114 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Dict, Optional +from unittest.mock import Mock, patch + +from pytest import fixture, mark + +from superset.common.query_object_factory import QueryObjectFactory +from tests.common.query_context_generator import QueryContextGenerator + + +def create_app_config() -> Dict[str, Any]: + return { + "ROW_LIMIT": 5000, + "DEFAULT_RELATIVE_START_TIME": "today", + "DEFAULT_RELATIVE_END_TIME": "today", + "SAMPLES_ROW_LIMIT": 1000, + "SQL_MAX_ROW": 100000, + } + + +@fixture +def app_config() -> Dict[str, Any]: + return create_app_config().copy() + + +@fixture +def session_factory() -> Mock: + return Mock() + + +@fixture +def connector_registry() -> Mock: + return Mock(spec=["get_datasource"]) + + +def apply_max_row_limit(limit: int, max_limit: Optional[int] = None) -> int: + if max_limit is None: + max_limit = create_app_config()["SQL_MAX_ROW"] + if limit != 0: + return min(max_limit, limit) + return max_limit + + +@fixture +def query_object_factory( + app_config: Dict[str, Any], connector_registry: Mock, session_factory: Mock +) -> QueryObjectFactory: + import superset.common.query_object_factory as mod + + mod.apply_max_row_limit = apply_max_row_limit + return QueryObjectFactory(app_config, connector_registry, session_factory) + + +@fixture +def raw_query_context() -> Dict[str, Any]: + return QueryContextGenerator().generate("birth_names") + + +class TestQueryObjectFactory: + def test_query_context_limit_and_offset_defaults( + self, + query_object_factory: QueryObjectFactory, + raw_query_context: Dict[str, Any], + ): + raw_query_object = raw_query_context["queries"][0] + raw_query_object.pop("row_limit", None) + raw_query_object.pop("row_offset", None) + query_object = query_object_factory.create( + raw_query_context["result_type"], **raw_query_object + ) + assert query_object.row_limit == 5000 + assert query_object.row_offset == 0 + + def test_query_context_limit( + self, + query_object_factory: QueryObjectFactory, + raw_query_context: Dict[str, Any], + ): + raw_query_object = raw_query_context["queries"][0] + raw_query_object["row_limit"] = 100 + raw_query_object["row_offset"] = 200 + query_object = query_object_factory.create( + raw_query_context["result_type"], **raw_query_object + ) + + assert query_object.row_limit == 100 + assert query_object.row_offset == 200 + + def test_query_context_null_post_processing_op( + self, + query_object_factory: QueryObjectFactory, + raw_query_context: Dict[str, Any], + ): + raw_query_object = raw_query_context["queries"][0] + raw_query_object["post_processing"] = [None] + query_object = query_object_factory.create( + raw_query_context["result_type"], **raw_query_object + ) + assert query_object.post_processing == [] diff --git a/tests/unit_tests/common/test_time_range_utils.py b/tests/unit_tests/common/test_time_range_utils.py new file mode 100644 index 0000000000000..bde1bd4befc0a --- /dev/null +++ b/tests/unit_tests/common/test_time_range_utils.py @@ -0,0 +1,94 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from unittest import mock + +import pytest + +from superset.common.utils.time_range_utils import ( + get_since_until_from_query_object, + get_since_until_from_time_range, +) + + +def test__get_since_until_from_time_range(): + assert get_since_until_from_time_range(time_range="2001 : 2002") == ( + datetime(2001, 1, 1), + datetime(2002, 1, 1), + ) + assert get_since_until_from_time_range( + time_range="2001 : 2002", time_shift="8 hours ago" + ) == ( + datetime(2000, 12, 31, 16, 0, 0), + datetime(2001, 12, 31, 16, 0, 0), + ) + with mock.patch( + "superset.utils.date_parser.EvalDateTruncFunc.eval", + return_value=datetime(2000, 1, 1, 0, 0, 0), + ): + assert ( + get_since_until_from_time_range( + time_range="Last year", + extras={ + "relative_end": "2100", + }, + ) + )[1] == datetime(2100, 1, 1, 0, 0) + with mock.patch( + "superset.utils.date_parser.EvalDateTruncFunc.eval", + return_value=datetime(2000, 1, 1, 0, 0, 0), + ): + assert ( + get_since_until_from_time_range( + time_range="Next year", + extras={ + "relative_start": "2000", + }, + ) + )[0] == datetime(2000, 1, 1, 0, 0) + + +@pytest.mark.query_object( + { + "time_range": "2001 : 2002", + "time_shift": "8 hours ago", + } +) +def test__since_until_from_time_range(dummy_query_object): + assert get_since_until_from_query_object(dummy_query_object) == ( + datetime(2000, 12, 31, 16, 0, 0), + datetime(2001, 12, 31, 16, 0, 0), + ) + + +@pytest.mark.query_object( + { + "filters": [{"col": "dttm", "op": "TEMPORAL_RANGE", "val": "2001 : 2002"}], + "columns": [ + { + "columnType": "BASE_AXIS", + "label": "dttm", + "sqlExpression": "dttm", + } + ], + } +) +def test__since_until_from_adhoc_filters(dummy_query_object): + assert get_since_until_from_query_object(dummy_query_object) == ( + datetime(2001, 1, 1, 0, 0, 0), + datetime(2002, 1, 1, 0, 0, 0), + ) diff --git a/tests/unit_tests/config_test.py b/tests/unit_tests/config_test.py new file mode 100644 index 0000000000000..021193a6cd36e --- /dev/null +++ b/tests/unit_tests/config_test.py @@ -0,0 +1,330 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=import-outside-toplevel, unused-argument, redefined-outer-name, invalid-name + +from functools import partial +from typing import Any, Dict, TYPE_CHECKING + +import pytest +from pytest_mock import MockerFixture +from sqlalchemy.orm.session import Session + +if TYPE_CHECKING: + from superset.connectors.sqla.models import SqlaTable + +FULL_DTTM_DEFAULTS_EXAMPLE = { + "main_dttm_col": "id", + "dttm_columns": { + "dttm": { + "python_date_format": "epoch_s", + "expression": "CAST(dttm as INTEGER)", + }, + "id": {"python_date_format": "epoch_ms"}, + "month": { + "python_date_format": "%Y-%m-%d", + "expression": ( + "CASE WHEN length(month) = 7 THEN month || '-01' ELSE month END" + ), + }, + }, +} + + +def apply_dttm_defaults(table: "SqlaTable", dttm_defaults: Dict[str, Any]) -> None: + """Applies dttm defaults to the table, mutates in place.""" + for dbcol in table.columns: + # Set is_dttm is column is listed in dttm_columns. + if dbcol.column_name in dttm_defaults.get("dttm_columns", {}): + dbcol.is_dttm = True + + # Skip non dttm columns. + if dbcol.column_name not in dttm_defaults.get("dttm_columns", {}): + continue + + # Set table main_dttm_col. + if dbcol.column_name == dttm_defaults.get("main_dttm_col"): + table.main_dttm_col = dbcol.column_name + + # Apply defaults if empty. + dttm_column_defaults = dttm_defaults.get("dttm_columns", {}).get( + dbcol.column_name, {} + ) + dbcol.is_dttm = True + if ( + not dbcol.python_date_format + and "python_date_format" in dttm_column_defaults + ): + dbcol.python_date_format = dttm_column_defaults["python_date_format"] + if not dbcol.expression and "expression" in dttm_column_defaults: + dbcol.expression = dttm_column_defaults["expression"] + + +@pytest.fixture +def test_table(session: Session) -> "SqlaTable": + """ + Fixture that generates an in-memory table. + """ + from superset.connectors.sqla.models import SqlaTable, TableColumn + from superset.models.core import Database + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + columns = [ + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), + TableColumn(column_name="event_time", is_dttm=1, type="TIMESTAMP"), + TableColumn(column_name="id", type="INTEGER"), + TableColumn(column_name="dttm", type="INTEGER"), + TableColumn(column_name="duration_ms", type="INTEGER"), + ] + + return SqlaTable( + table_name="test_table", + columns=columns, + metrics=[], + main_dttm_col=None, + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + ) + + +def test_main_dttm_col(mocker: MockerFixture, test_table: "SqlaTable") -> None: + """ + Test the ``SQLA_TABLE_MUTATOR`` config. + """ + dttm_defaults = { + "main_dttm_col": "event_time", + "dttm_columns": {"ds": {}, "event_time": {}}, + } + mocker.patch( + "superset.connectors.sqla.models.config", + new={ + "SQLA_TABLE_MUTATOR": partial( + apply_dttm_defaults, + dttm_defaults=dttm_defaults, + ) + }, + ) + mocker.patch( + "superset.connectors.sqla.models.get_physical_table_metadata", + return_value=[ + {"name": "ds", "type": "TIMESTAMP", "is_dttm": True}, + {"name": "event_time", "type": "TIMESTAMP", "is_dttm": True}, + {"name": "id", "type": "INTEGER", "is_dttm": False}, + ], + ) + + assert test_table.main_dttm_col is None + test_table.fetch_metadata() + assert test_table.main_dttm_col == "event_time" + + +def test_main_dttm_col_nonexistent( + mocker: MockerFixture, + test_table: "SqlaTable", +) -> None: + """ + Test the ``SQLA_TABLE_MUTATOR`` config when main datetime column doesn't exist. + """ + dttm_defaults = { + "main_dttm_col": "nonexistent", + } + mocker.patch( + "superset.connectors.sqla.models.config", + new={ + "SQLA_TABLE_MUTATOR": partial( + apply_dttm_defaults, + dttm_defaults=dttm_defaults, + ) + }, + ) + mocker.patch( + "superset.connectors.sqla.models.get_physical_table_metadata", + return_value=[ + {"name": "ds", "type": "TIMESTAMP", "is_dttm": True}, + {"name": "event_time", "type": "TIMESTAMP", "is_dttm": True}, + {"name": "id", "type": "INTEGER", "is_dttm": False}, + ], + ) + + assert test_table.main_dttm_col is None + test_table.fetch_metadata() + # fall back to ds + assert test_table.main_dttm_col == "ds" + + +def test_main_dttm_col_nondttm( + mocker: MockerFixture, + test_table: "SqlaTable", +) -> None: + """ + Test the ``SQLA_TABLE_MUTATOR`` config when main datetime column has wrong type. + """ + dttm_defaults = { + "main_dttm_col": "id", + } + mocker.patch( + "superset.connectors.sqla.models.config", + new={ + "SQLA_TABLE_MUTATOR": partial( + apply_dttm_defaults, + dttm_defaults=dttm_defaults, + ) + }, + ) + mocker.patch( + "superset.connectors.sqla.models.get_physical_table_metadata", + return_value=[ + {"name": "ds", "type": "TIMESTAMP", "is_dttm": True}, + {"name": "event_time", "type": "TIMESTAMP", "is_dttm": True}, + {"name": "id", "type": "INTEGER", "is_dttm": False}, + ], + ) + + assert test_table.main_dttm_col is None + test_table.fetch_metadata() + # fall back to ds + assert test_table.main_dttm_col == "ds" + + +def test_python_date_format_by_column_name( + mocker: MockerFixture, + test_table: "SqlaTable", +) -> None: + """ + Test the ``SQLA_TABLE_MUTATOR`` setting for "python_date_format". + """ + table_defaults = { + "dttm_columns": { + "id": {"python_date_format": "epoch_ms"}, + "dttm": {"python_date_format": "epoch_s"}, + "duration_ms": {"python_date_format": "invalid"}, + }, + } + mocker.patch( + "superset.connectors.sqla.models.config", + new={ + "SQLA_TABLE_MUTATOR": partial( + apply_dttm_defaults, + dttm_defaults=table_defaults, + ) + }, + ) + mocker.patch( + "superset.connectors.sqla.models.get_physical_table_metadata", + return_value=[ + {"name": "id", "type": "INTEGER", "is_dttm": False}, + {"name": "dttm", "type": "INTEGER", "is_dttm": False}, + {"name": "duration_ms", "type": "INTEGER", "is_dttm": False}, + ], + ) + + test_table.fetch_metadata() + + id_col = [c for c in test_table.columns if c.column_name == "id"][0] + assert id_col.is_dttm + assert id_col.python_date_format == "epoch_ms" + + dttm_col = [c for c in test_table.columns if c.column_name == "dttm"][0] + assert dttm_col.is_dttm + assert dttm_col.python_date_format == "epoch_s" + + duration_ms_col = [c for c in test_table.columns if c.column_name == "duration_ms"][ + 0 + ] + assert duration_ms_col.is_dttm + assert duration_ms_col.python_date_format == "invalid" + + +def test_expression_by_column_name( + mocker: MockerFixture, + test_table: "SqlaTable", +) -> None: + """ + Test the ``SQLA_TABLE_MUTATOR`` setting for expression. + """ + table_defaults = { + "dttm_columns": { + "dttm": {"expression": "CAST(dttm as INTEGER)"}, + "duration_ms": {"expression": "CAST(duration_ms as DOUBLE)"}, + }, + } + mocker.patch( + "superset.connectors.sqla.models.config", + new={ + "SQLA_TABLE_MUTATOR": partial( + apply_dttm_defaults, + dttm_defaults=table_defaults, + ) + }, + ) + mocker.patch( + "superset.connectors.sqla.models.get_physical_table_metadata", + return_value=[ + {"name": "dttm", "type": "INTEGER", "is_dttm": False}, + {"name": "duration_ms", "type": "INTEGER", "is_dttm": False}, + ], + ) + + test_table.fetch_metadata() + + dttm_col = [c for c in test_table.columns if c.column_name == "dttm"][0] + assert dttm_col.is_dttm + assert dttm_col.expression == "CAST(dttm as INTEGER)" + + duration_ms_col = [c for c in test_table.columns if c.column_name == "duration_ms"][ + 0 + ] + assert duration_ms_col.is_dttm + assert duration_ms_col.expression == "CAST(duration_ms as DOUBLE)" + + +def test_full_setting( + mocker: MockerFixture, + test_table: "SqlaTable", +) -> None: + """ + Test the ``SQLA_TABLE_MUTATOR`` with full settings. + """ + mocker.patch( + "superset.connectors.sqla.models.config", + new={ + "SQLA_TABLE_MUTATOR": partial( + apply_dttm_defaults, + dttm_defaults=FULL_DTTM_DEFAULTS_EXAMPLE, + ) + }, + ) + mocker.patch( + "superset.connectors.sqla.models.get_physical_table_metadata", + return_value=[ + {"name": "id", "type": "INTEGER", "is_dttm": False}, + {"name": "dttm", "type": "INTEGER", "is_dttm": False}, + {"name": "duration_ms", "type": "INTEGER", "is_dttm": False}, + ], + ) + + test_table.fetch_metadata() + + id_col = [c for c in test_table.columns if c.column_name == "id"][0] + assert id_col.is_dttm + assert id_col.python_date_format == "epoch_ms" + assert id_col.expression == "" + + dttm_col = [c for c in test_table.columns if c.column_name == "dttm"][0] + assert dttm_col.is_dttm + assert dttm_col.python_date_format == "epoch_s" + assert dttm_col.expression == "CAST(dttm as INTEGER)" diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py new file mode 100644 index 0000000000000..6740a8b6e280b --- /dev/null +++ b/tests/unit_tests/conftest.py @@ -0,0 +1,165 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=redefined-outer-name, import-outside-toplevel + +import importlib +import os +import unittest.mock +from typing import Any, Callable, Iterator + +import pytest +from _pytest.fixtures import SubRequest +from pytest_mock import MockFixture +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm.session import Session + +from superset import security_manager +from superset.app import SupersetApp +from superset.common.chart_data import ChartDataResultType +from superset.common.query_object_factory import QueryObjectFactory +from superset.extensions import appbuilder +from superset.initialization import SupersetAppInitializer + + +@pytest.fixture +def get_session(mocker: MockFixture) -> Callable[[], Session]: + """ + Create an in-memory SQLite session to test models. + """ + engine = create_engine("sqlite://") + + def get_session(): + Session_ = sessionmaker(bind=engine) # pylint: disable=invalid-name + in_memory_session = Session_() + + # flask calls session.remove() + in_memory_session.remove = lambda: None + + # patch session + get_session = mocker.patch( + "superset.security.SupersetSecurityManager.get_session", + ) + get_session.return_value = in_memory_session + # FAB calls get_session.get_bind() to get a handler to the engine + get_session.get_bind.return_value = engine + # Allow for queries on security manager + get_session.query = in_memory_session.query + + mocker.patch("superset.db.session", in_memory_session) + return in_memory_session + + return get_session + + +@pytest.fixture +def session(get_session) -> Iterator[Session]: + yield get_session() + + +@pytest.fixture(scope="module") +def app(request: SubRequest) -> Iterator[SupersetApp]: + """ + A fixture that generates a Superset app. + """ + app = SupersetApp(__name__) + + app.config.from_object("superset.config") + app.config["SQLALCHEMY_DATABASE_URI"] = ( + os.environ.get("SUPERSET__SQLALCHEMY_DATABASE_URI") or "sqlite://" + ) + app.config["WTF_CSRF_ENABLED"] = False + app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False + app.config["TESTING"] = True + + # loop over extra configs passed in by tests + if request and hasattr(request, "param"): + for key, val in request.param.items(): + app.config[key] = val + + # ``superset.extensions.appbuilder`` is a singleton, and won't rebuild the + # routes when this fixture is called multiple times; we need to clear the + # registered views to ensure the initialization can happen more than once. + appbuilder.baseviews = [] + + app_initializer = SupersetAppInitializer(app) + app_initializer.init_app() + + # reload base views to ensure error handlers are applied to the app + with app.app_context(): + import superset.views.base + + importlib.reload(superset.views.base) + + yield app + + +@pytest.fixture +def client(app: SupersetApp) -> Any: + with app.test_client() as client: + yield client + + +@pytest.fixture(autouse=True) +def app_context(app: SupersetApp) -> Iterator[None]: + """ + A fixture that yields and application context. + """ + with app.app_context(): + yield + + +@pytest.fixture +def full_api_access(mocker: MockFixture) -> Iterator[None]: + """ + Allow full access to the API. + + TODO (betodealmeida): we should replace this with user-fixtures, eg, ``admin`` or + ``gamma``, so that we have granular access to the APIs. + """ + mocker.patch( + "flask_appbuilder.security.decorators.verify_jwt_in_request", + return_value=True, + ) + mocker.patch.object(security_manager, "has_access", return_value=True) + mocker.patch.object(security_manager, "can_access_all_databases", return_value=True) + + yield + + +@pytest.fixture +def dummy_query_object(request, app_context): + query_obj_marker = request.node.get_closest_marker("query_object") + result_type_marker = request.node.get_closest_marker("result_type") + + if query_obj_marker is None: + query_object = {} + else: + query_object = query_obj_marker.args[0] + + if result_type_marker is None: + result_type = ChartDataResultType.FULL + else: + result_type = result_type_marker.args[0] + + yield QueryObjectFactory( + app_configurations={ + "ROW_LIMIT": 100, + }, + _datasource_dao=unittest.mock.Mock(), + session_maker=unittest.mock.Mock(), + ).create(parent_result_type=result_type, **query_object) diff --git a/tests/unit_tests/core_tests.py b/tests/unit_tests/core_tests.py new file mode 100644 index 0000000000000..1473f18382950 --- /dev/null +++ b/tests/unit_tests/core_tests.py @@ -0,0 +1,235 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from copy import deepcopy + +import pytest + +from superset.utils.core import ( + AdhocColumn, + AdhocMetric, + ExtraFiltersReasonType, + ExtraFiltersTimeColumnType, + GenericDataType, + get_column_name, + get_column_names, + get_metric_name, + get_metric_names, + get_time_filter_status, + is_adhoc_metric, +) +from tests.unit_tests.fixtures.datasets import get_dataset_mock + +STR_METRIC = "my_metric" +SIMPLE_SUM_ADHOC_METRIC: AdhocMetric = { + "aggregate": "SUM", + "column": { + "column_name": "my_col", + "type": "INT", + "type_generic": GenericDataType.NUMERIC, + }, + "expressionType": "SIMPLE", + "label": "my SUM", +} +SQL_ADHOC_METRIC: AdhocMetric = { + "expressionType": "SQL", + "label": "my_sql", + "sqlExpression": "SUM(my_col)", +} +STR_COLUMN = "my_column" +SQL_ADHOC_COLUMN: AdhocColumn = { + "hasCustomLabel": True, + "label": "My Adhoc Column", + "sqlExpression": "case when foo = 1 then 'foo' else 'bar' end", +} + + +def test_get_metric_name_saved_metric(): + assert get_metric_name(STR_METRIC) == "my_metric" + assert get_metric_name(STR_METRIC, {STR_METRIC: "My Metric"}) == "My Metric" + + +def test_get_metric_name_adhoc(): + metric = deepcopy(SIMPLE_SUM_ADHOC_METRIC) + assert get_metric_name(metric) == "my SUM" + assert get_metric_name(metric, {"my SUM": "My Irrelevant Mapping"}) == "my SUM" + del metric["label"] + assert get_metric_name(metric) == "SUM(my_col)" + metric["label"] = "" + assert get_metric_name(metric) == "SUM(my_col)" + del metric["aggregate"] + assert get_metric_name(metric) == "my_col" + metric["aggregate"] = "" + assert get_metric_name(metric) == "my_col" + assert get_metric_name(metric, {"my_col": "My Irrelevant Mapping"}) == "my_col" + + metric = deepcopy(SQL_ADHOC_METRIC) + assert get_metric_name(metric) == "my_sql" + assert get_metric_name(metric, {"my_sql": "My Irrelevant Mapping"}) == "my_sql" + del metric["label"] + assert get_metric_name(metric) == "SUM(my_col)" + metric["label"] = "" + assert get_metric_name(metric) == "SUM(my_col)" + + +def test_get_metric_name_invalid_metric(): + metric = deepcopy(SIMPLE_SUM_ADHOC_METRIC) + del metric["label"] + del metric["column"] + with pytest.raises(ValueError): + get_metric_name(metric) + + metric = deepcopy(SIMPLE_SUM_ADHOC_METRIC) + del metric["label"] + metric["expressionType"] = "FOO" + with pytest.raises(ValueError): + get_metric_name(metric) + + metric = deepcopy(SQL_ADHOC_METRIC) + del metric["label"] + metric["expressionType"] = "FOO" + with pytest.raises(ValueError): + get_metric_name(metric) + + metric = deepcopy(SQL_ADHOC_METRIC) + del metric["expressionType"] + with pytest.raises(ValueError): + get_metric_name(metric) + + with pytest.raises(ValueError): + get_metric_name(None) + with pytest.raises(ValueError): + get_metric_name(0) + with pytest.raises(ValueError): + get_metric_name({}) + + +def test_get_metric_names(): + assert get_metric_names( + [STR_METRIC, SIMPLE_SUM_ADHOC_METRIC, SQL_ADHOC_METRIC] + ) == ["my_metric", "my SUM", "my_sql"] + assert get_metric_names( + [STR_METRIC, SIMPLE_SUM_ADHOC_METRIC, SQL_ADHOC_METRIC], + {STR_METRIC: "My Metric"}, + ) == ["My Metric", "my SUM", "my_sql"] + + +def test_get_column_name_physical_column(): + assert get_column_name(STR_COLUMN) == "my_column" + assert get_metric_name(STR_COLUMN, {STR_COLUMN: "My Column"}) == "My Column" + + +def test_get_column_name_adhoc(): + column = deepcopy(SQL_ADHOC_COLUMN) + assert get_column_name(column) == "My Adhoc Column" + assert ( + get_column_name(column, {"My Adhoc Column": "My Irrelevant Mapping"}) + == "My Adhoc Column" + ) + del column["label"] + assert get_column_name(column) == "case when foo = 1 then 'foo' else 'bar' end" + column["label"] = "" + assert get_column_name(column) == "case when foo = 1 then 'foo' else 'bar' end" + + +def test_get_column_names(): + assert get_column_names([STR_COLUMN, SQL_ADHOC_COLUMN]) == [ + "my_column", + "My Adhoc Column", + ] + assert get_column_names( + [STR_COLUMN, SQL_ADHOC_COLUMN], + {"my_column": "My Column"}, + ) == ["My Column", "My Adhoc Column"] + + +def test_get_column_name_invalid_metric(): + column = deepcopy(SQL_ADHOC_COLUMN) + del column["label"] + del column["sqlExpression"] + with pytest.raises(ValueError): + get_column_name(column) + + +def test_is_adhoc_metric(): + assert is_adhoc_metric(STR_METRIC) is False + assert is_adhoc_metric(SIMPLE_SUM_ADHOC_METRIC) is True + assert is_adhoc_metric(SQL_ADHOC_METRIC) is True + + +def test_get_time_filter_status_time_col(): + dataset = get_dataset_mock() + + assert get_time_filter_status( + dataset, {ExtraFiltersTimeColumnType.TIME_COL: "ds"} + ) == ([{"column": ExtraFiltersTimeColumnType.TIME_COL}], []) + + +def test_get_time_filter_status_time_range(): + dataset = get_dataset_mock() + + assert get_time_filter_status( + dataset, {ExtraFiltersTimeColumnType.TIME_RANGE: "1 year ago"} + ) == ([{"column": ExtraFiltersTimeColumnType.TIME_RANGE}], []) + + +def test_get_time_filter_status_time_grain(): + dataset = get_dataset_mock() + + assert get_time_filter_status( + dataset, {ExtraFiltersTimeColumnType.TIME_GRAIN: "PT1M"} + ) == ([{"column": ExtraFiltersTimeColumnType.TIME_GRAIN}], []) + + +def test_get_time_filter_status_no_temporal_col(): + dataset = get_dataset_mock() + dataset.columns[0].is_dttm = False + + assert get_time_filter_status( + dataset, {ExtraFiltersTimeColumnType.TIME_COL: "foobar"} + ) == ( + [], + [ + { + "reason": ExtraFiltersReasonType.COL_NOT_IN_DATASOURCE, + "column": ExtraFiltersTimeColumnType.TIME_COL, + } + ], + ) + + assert get_time_filter_status( + dataset, {ExtraFiltersTimeColumnType.TIME_RANGE: "1 year ago"} + ) == ( + [], + [ + { + "reason": ExtraFiltersReasonType.NO_TEMPORAL_COLUMN, + "column": ExtraFiltersTimeColumnType.TIME_RANGE, + } + ], + ) + + assert get_time_filter_status( + dataset, {ExtraFiltersTimeColumnType.TIME_GRAIN: "PT1M"} + ) == ( + [], + [ + { + "reason": ExtraFiltersReasonType.NO_TEMPORAL_COLUMN, + "column": ExtraFiltersTimeColumnType.TIME_GRAIN, + } + ], + ) diff --git a/tests/unit_tests/dao/queries_test.py b/tests/unit_tests/dao/queries_test.py new file mode 100644 index 0000000000000..62eeff31065e0 --- /dev/null +++ b/tests/unit_tests/dao/queries_test.py @@ -0,0 +1,274 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from datetime import datetime, timedelta +from typing import Any, Iterator + +import pytest +from pytest_mock import MockFixture +from sqlalchemy.orm.session import Session + +from superset.exceptions import QueryNotFoundException, SupersetCancelQueryException + + +def test_query_dao_save_metadata(session: Session) -> None: + from superset.models.core import Database + from superset.models.sql_lab import Query + + engine = session.get_bind() + Query.metadata.create_all(engine) # pylint: disable=no-member + + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + + query_obj = Query( + client_id="foo", + database=db, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql="select * from bar", + executed_sql="select * from bar", + limit=100, + select_as_cta=False, + rows=100, + error_message="none", + results_key="abc", + ) + + session.add(db) + session.add(query_obj) + + from superset.queries.dao import QueryDAO + + query = session.query(Query).one() + QueryDAO.save_metadata(query=query, payload={"columns": []}) + assert query.extra.get("columns", None) == [] + + +def test_query_dao_get_queries_changed_after(session: Session) -> None: + from superset.models.core import Database + from superset.models.sql_lab import Query + + engine = session.get_bind() + Query.metadata.create_all(engine) # pylint: disable=no-member + + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + + now = datetime.utcnow() + + old_query_obj = Query( + client_id="foo", + database=db, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql="select * from bar", + executed_sql="select * from bar", + limit=100, + select_as_cta=False, + rows=100, + error_message="none", + results_key="abc", + changed_on=now - timedelta(days=3), + ) + + updated_query_obj = Query( + client_id="updated_foo", + database=db, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from foo", + select_sql="select * from foo", + executed_sql="select * from foo", + limit=100, + select_as_cta=False, + rows=100, + error_message="none", + results_key="abc", + changed_on=now - timedelta(days=1), + ) + + session.add(db) + session.add(old_query_obj) + session.add(updated_query_obj) + + from superset.queries.dao import QueryDAO + + timestamp = datetime.timestamp(now - timedelta(days=2)) * 1000 + result = QueryDAO.get_queries_changed_after(timestamp) + assert len(result) == 1 + assert result[0].client_id == "updated_foo" + + +def test_query_dao_stop_query_not_found( + mocker: MockFixture, app: Any, session: Session +) -> None: + from superset.common.db_query_status import QueryStatus + from superset.models.core import Database + from superset.models.sql_lab import Query + + engine = session.get_bind() + Query.metadata.create_all(engine) # pylint: disable=no-member + + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + + query_obj = Query( + client_id="foo", + database=db, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql="select * from bar", + executed_sql="select * from bar", + limit=100, + select_as_cta=False, + rows=100, + error_message="none", + results_key="abc", + status=QueryStatus.RUNNING, + ) + + session.add(db) + session.add(query_obj) + + mocker.patch("superset.sql_lab.cancel_query", return_value=False) + + from superset.queries.dao import QueryDAO + + with pytest.raises(QueryNotFoundException): + QueryDAO.stop_query("foo2") + + query = session.query(Query).one() + assert query.status == QueryStatus.RUNNING + + +def test_query_dao_stop_query_not_running( + mocker: MockFixture, app: Any, session: Session +) -> None: + from superset.common.db_query_status import QueryStatus + from superset.models.core import Database + from superset.models.sql_lab import Query + + engine = session.get_bind() + Query.metadata.create_all(engine) # pylint: disable=no-member + + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + + query_obj = Query( + client_id="foo", + database=db, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql="select * from bar", + executed_sql="select * from bar", + limit=100, + select_as_cta=False, + rows=100, + error_message="none", + results_key="abc", + status=QueryStatus.FAILED, + ) + + session.add(db) + session.add(query_obj) + + from superset.queries.dao import QueryDAO + + QueryDAO.stop_query(query_obj.client_id) + query = session.query(Query).one() + assert query.status == QueryStatus.FAILED + + +def test_query_dao_stop_query_failed( + mocker: MockFixture, app: Any, session: Session +) -> None: + from superset.common.db_query_status import QueryStatus + from superset.models.core import Database + from superset.models.sql_lab import Query + + engine = session.get_bind() + Query.metadata.create_all(engine) # pylint: disable=no-member + + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + + query_obj = Query( + client_id="foo", + database=db, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql="select * from bar", + executed_sql="select * from bar", + limit=100, + select_as_cta=False, + rows=100, + error_message="none", + results_key="abc", + status=QueryStatus.RUNNING, + ) + + session.add(db) + session.add(query_obj) + + mocker.patch("superset.sql_lab.cancel_query", return_value=False) + + from superset.queries.dao import QueryDAO + + with pytest.raises(SupersetCancelQueryException): + QueryDAO.stop_query(query_obj.client_id) + + query = session.query(Query).one() + assert query.status == QueryStatus.RUNNING + + +def test_query_dao_stop_query(mocker: MockFixture, app: Any, session: Session) -> None: + from superset.common.db_query_status import QueryStatus + from superset.models.core import Database + from superset.models.sql_lab import Query + + engine = session.get_bind() + Query.metadata.create_all(engine) # pylint: disable=no-member + + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + + query_obj = Query( + client_id="foo", + database=db, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql="select * from bar", + executed_sql="select * from bar", + limit=100, + select_as_cta=False, + rows=100, + error_message="none", + results_key="abc", + status=QueryStatus.RUNNING, + ) + + session.add(db) + session.add(query_obj) + + mocker.patch("superset.sql_lab.cancel_query", return_value=True) + + from superset.queries.dao import QueryDAO + + QueryDAO.stop_query(query_obj.client_id) + query = session.query(Query).one() + assert query.status == QueryStatus.STOPPED diff --git a/tests/unit_tests/dashboards/__init__.py b/tests/unit_tests/dashboards/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/dashboards/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/dashboards/commands/__init__.py b/tests/unit_tests/dashboards/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/dashboards/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/dashboards/commands/importers/__init__.py b/tests/unit_tests/dashboards/commands/importers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/dashboards/commands/importers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/dashboards/commands/importers/v1/__init__.py b/tests/unit_tests/dashboards/commands/importers/v1/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/dashboards/commands/importers/v1/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/dashboards/commands/importers/v1/import_test.py b/tests/unit_tests/dashboards/commands/importers/v1/import_test.py new file mode 100644 index 0000000000000..08f681d916b3c --- /dev/null +++ b/tests/unit_tests/dashboards/commands/importers/v1/import_test.py @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel, unused-import, invalid-name + +import copy + +from sqlalchemy.orm.session import Session + + +def test_import_dashboard(session: Session) -> None: + """ + Test importing a dashboard. + """ + from superset.connectors.sqla.models import SqlaTable + from superset.dashboards.commands.importers.v1.utils import import_dashboard + from superset.models.core import Database + from superset.models.slice import Slice + from tests.integration_tests.fixtures.importexport import dashboard_config + + engine = session.get_bind() + Slice.metadata.create_all(engine) # pylint: disable=no-member + + config = copy.deepcopy(dashboard_config) + + dashboard = import_dashboard(session, config) + assert dashboard.dashboard_title == "Test dash" + assert dashboard.description is None + assert dashboard.is_managed_externally is False + assert dashboard.external_url is None + + +def test_import_dashboard_managed_externally(session: Session) -> None: + """ + Test importing a dashboard that is managed externally. + """ + from superset.connectors.sqla.models import SqlaTable + from superset.dashboards.commands.importers.v1.utils import import_dashboard + from superset.models.core import Database + from superset.models.slice import Slice + from tests.integration_tests.fixtures.importexport import dashboard_config + + engine = session.get_bind() + Slice.metadata.create_all(engine) # pylint: disable=no-member + + config = copy.deepcopy(dashboard_config) + config["is_managed_externally"] = True + config["external_url"] = "https://example.org/my_dashboard" + + dashboard = import_dashboard(session, config) + assert dashboard.is_managed_externally is True + assert dashboard.external_url == "https://example.org/my_dashboard" diff --git a/tests/unit_tests/dashboards/commands/importers/v1/utils_test.py b/tests/unit_tests/dashboards/commands/importers/v1/utils_test.py new file mode 100644 index 0000000000000..0392acb31596a --- /dev/null +++ b/tests/unit_tests/dashboards/commands/importers/v1/utils_test.py @@ -0,0 +1,123 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=import-outside-toplevel, unused-argument + +from typing import Any, Dict + + +def test_update_id_refs_immune_missing( # pylint: disable=invalid-name + app_context: None, +): + """ + Test that missing immune charts are ignored. + + A chart might be removed from a dashboard but still remain in the list of charts + immune to filters. The missing chart ID should be simply ignored when the + dashboard is imported. + """ + from superset.dashboards.commands.importers.v1.utils import update_id_refs + + config = { + "position": { + "CHART1": { + "id": "CHART1", + "meta": { + "chartId": 101, + "uuid": "uuid1", + }, + "type": "CHART", + }, + "CHART2": { + "id": "CHART2", + "meta": { + "chartId": 102, + "uuid": "uuid2", + }, + "type": "CHART", + }, + }, + "metadata": { + "filter_scopes": { + "101": {"filter_name": {"immune": [102, 103]}}, + "104": {"filter_name": {"immune": [102, 103]}}, + }, + "native_filter_configuration": [], + }, + } + chart_ids = {"uuid1": 1, "uuid2": 2} + dataset_info: Dict[str, Dict[str, Any]] = {} # not used + + fixed = update_id_refs(config, chart_ids, dataset_info) + assert fixed == { + "position": { + "CHART1": { + "id": "CHART1", + "meta": {"chartId": 1, "uuid": "uuid1"}, + "type": "CHART", + }, + "CHART2": { + "id": "CHART2", + "meta": {"chartId": 2, "uuid": "uuid2"}, + "type": "CHART", + }, + }, + "metadata": { + "filter_scopes": {"1": {"filter_name": {"immune": [2]}}}, + "native_filter_configuration": [], + }, + } + + +def test_update_native_filter_config_scope_excluded(): + from superset.dashboards.commands.importers.v1.utils import update_id_refs + + config = { + "position": { + "CHART1": { + "id": "CHART1", + "meta": {"chartId": 101, "uuid": "uuid1"}, + "type": "CHART", + }, + "CHART2": { + "id": "CHART2", + "meta": {"chartId": 102, "uuid": "uuid2"}, + "type": "CHART", + }, + }, + "metadata": { + "native_filter_configuration": [{"scope": {"excluded": [101, 102, 103]}}], + }, + } + chart_ids = {"uuid1": 1, "uuid2": 2} + dataset_info: Dict[str, Dict[str, Any]] = {} # not used + + fixed = update_id_refs(config, chart_ids, dataset_info) + assert fixed == { + "position": { + "CHART1": { + "id": "CHART1", + "meta": {"chartId": 1, "uuid": "uuid1"}, + "type": "CHART", + }, + "CHART2": { + "id": "CHART2", + "meta": {"chartId": 2, "uuid": "uuid2"}, + "type": "CHART", + }, + }, + "metadata": {"native_filter_configuration": [{"scope": {"excluded": [1, 2]}}]}, + } diff --git a/tests/unit_tests/databases/__init__.py b/tests/unit_tests/databases/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/databases/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/databases/api_test.py b/tests/unit_tests/databases/api_test.py new file mode 100644 index 0000000000000..68a9add12e9a4 --- /dev/null +++ b/tests/unit_tests/databases/api_test.py @@ -0,0 +1,345 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=unused-argument, import-outside-toplevel, line-too-long + +import json +from io import BytesIO +from typing import Any +from uuid import UUID + +import pytest +from pytest_mock import MockFixture +from sqlalchemy.orm.session import Session + + +def test_post_with_uuid( + session: Session, + client: Any, + full_api_access: None, +) -> None: + """ + Test that we can set the database UUID when creating it. + """ + from superset.models.core import Database + + # create table for databases + Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member + + response = client.post( + "/api/v1/database/", + json={ + "database_name": "my_db", + "sqlalchemy_uri": "sqlite://", + "uuid": "7c1b7880-a59d-47cd-8bf1-f1eb8d2863cb", + }, + ) + assert response.status_code == 201 + + database = session.query(Database).one() + assert database.uuid == UUID("7c1b7880-a59d-47cd-8bf1-f1eb8d2863cb") + + +def test_password_mask( + mocker: MockFixture, + app: Any, + session: Session, + client: Any, + full_api_access: None, +) -> None: + """ + Test that sensitive information is masked. + """ + from superset.databases.api import DatabaseRestApi + from superset.models.core import Database + + DatabaseRestApi.datamodel.session = session + + # create table for databases + Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member + + database = Database( + database_name="my_database", + sqlalchemy_uri="gsheets://", + encrypted_extra=json.dumps( + { + "service_account_info": { + "type": "service_account", + "project_id": "black-sanctum-314419", + "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", + "private_key": "SECRET", + "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", + "client_id": "114567578578109757129", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", + }, + } + ), + ) + session.add(database) + session.commit() + + # mock the lookup so that we don't need to include the driver + mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets") + mocker.patch("superset.utils.log.DBEventLogger.log") + + response = client.get("/api/v1/database/1") + assert ( + response.json["result"]["parameters"]["service_account_info"]["private_key"] + == "XXXXXXXXXX" + ) + assert "encrypted_extra" not in response.json["result"] + + +@pytest.mark.skip(reason="Works locally but fails on CI") +def test_update_with_password_mask( + app: Any, + session: Session, + client: Any, + full_api_access: None, +) -> None: + """ + Test that an update with a masked password doesn't overwrite the existing password. + """ + from superset.databases.api import DatabaseRestApi + from superset.models.core import Database + + DatabaseRestApi.datamodel.session = session + + # create table for databases + Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member + + database = Database( + database_name="my_database", + sqlalchemy_uri="gsheets://", + encrypted_extra=json.dumps( + { + "service_account_info": { + "project_id": "black-sanctum-314419", + "private_key": "SECRET", + }, + } + ), + ) + session.add(database) + session.commit() + + client.put( + "/api/v1/database/1", + json={ + "encrypted_extra": json.dumps( + { + "service_account_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "XXXXXXXXXX", + }, + } + ), + }, + ) + database = session.query(Database).one() + assert ( + database.encrypted_extra + == '{"service_account_info": {"project_id": "yellow-unicorn-314419", "private_key": "SECRET"}}' + ) + + +def test_non_zip_import(client: Any, full_api_access: None) -> None: + """ + Test that non-ZIP imports are not allowed. + """ + buf = BytesIO(b"definitely_not_a_zip_file") + form_data = { + "formData": (buf, "evil.pdf"), + } + response = client.post( + "/api/v1/database/import/", + data=form_data, + content_type="multipart/form-data", + ) + assert response.status_code == 422 + assert response.json == { + "errors": [ + { + "message": "Not a ZIP file", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "issue_codes": [ + { + "code": 1010, + "message": "Issue 1010 - Superset encountered an error while running a command.", + } + ] + }, + } + ] + } + + +def test_delete_ssh_tunnel( + mocker: MockFixture, + app: Any, + session: Session, + client: Any, + full_api_access: None, +) -> None: + """ + Test that we can delete SSH Tunnel + """ + with app.app_context(): + from superset.databases.api import DatabaseRestApi + from superset.databases.dao import DatabaseDAO + from superset.databases.ssh_tunnel.models import SSHTunnel + from superset.models.core import Database + + DatabaseRestApi.datamodel.session = session + + # create table for databases + Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member + + # Create our Database + database = Database( + database_name="my_database", + sqlalchemy_uri="gsheets://", + encrypted_extra=json.dumps( + { + "service_account_info": { + "type": "service_account", + "project_id": "black-sanctum-314419", + "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", + "private_key": "SECRET", + "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", + "client_id": "SSH_TUNNEL_CREDENTIALS_CLIENT", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", + }, + } + ), + ) + session.add(database) + session.commit() + + # mock the lookup so that we don't need to include the driver + mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets") + mocker.patch("superset.utils.log.DBEventLogger.log") + mocker.patch( + "superset.databases.ssh_tunnel.commands.delete.is_feature_enabled", + return_value=True, + ) + + # Create our SSHTunnel + tunnel = SSHTunnel( + database_id=1, + database=database, + ) + + session.add(tunnel) + session.commit() + + # Get our recently created SSHTunnel + response_tunnel = DatabaseDAO.get_ssh_tunnel(1) + assert response_tunnel + assert isinstance(response_tunnel, SSHTunnel) + assert 1 == response_tunnel.database_id + + # Delete the recently created SSHTunnel + response_delete_tunnel = client.delete("/api/v1/database/1/ssh_tunnel/") + assert response_delete_tunnel.json["message"] == "OK" + + response_tunnel = DatabaseDAO.get_ssh_tunnel(1) + assert response_tunnel is None + + +def test_delete_ssh_tunnel_not_found( + mocker: MockFixture, + app: Any, + session: Session, + client: Any, + full_api_access: None, +) -> None: + """ + Test that we cannot delete a tunnel that does not exist + """ + with app.app_context(): + from superset.databases.api import DatabaseRestApi + from superset.databases.dao import DatabaseDAO + from superset.databases.ssh_tunnel.models import SSHTunnel + from superset.models.core import Database + + DatabaseRestApi.datamodel.session = session + + # create table for databases + Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member + + # Create our Database + database = Database( + database_name="my_database", + sqlalchemy_uri="gsheets://", + encrypted_extra=json.dumps( + { + "service_account_info": { + "type": "service_account", + "project_id": "black-sanctum-314419", + "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", + "private_key": "SECRET", + "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", + "client_id": "SSH_TUNNEL_CREDENTIALS_CLIENT", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", + }, + } + ), + ) + session.add(database) + session.commit() + + # mock the lookup so that we don't need to include the driver + mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets") + mocker.patch("superset.utils.log.DBEventLogger.log") + mocker.patch( + "superset.databases.ssh_tunnel.commands.delete.is_feature_enabled", + return_value=True, + ) + + # Create our SSHTunnel + tunnel = SSHTunnel( + database_id=1, + database=database, + ) + + session.add(tunnel) + session.commit() + + # Delete the recently created SSHTunnel + response_delete_tunnel = client.delete("/api/v1/database/2/ssh_tunnel/") + assert response_delete_tunnel.json["message"] == "Not found" + + # Get our recently created SSHTunnel + response_tunnel = DatabaseDAO.get_ssh_tunnel(1) + assert response_tunnel + assert isinstance(response_tunnel, SSHTunnel) + assert 1 == response_tunnel.database_id + + response_tunnel = DatabaseDAO.get_ssh_tunnel(2) + assert response_tunnel is None diff --git a/tests/unit_tests/databases/commands/__init__.py b/tests/unit_tests/databases/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/databases/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/databases/commands/importers/__init__.py b/tests/unit_tests/databases/commands/importers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/databases/commands/importers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/databases/commands/importers/v1/__init__.py b/tests/unit_tests/databases/commands/importers/v1/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/databases/commands/importers/v1/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/databases/commands/importers/v1/import_test.py b/tests/unit_tests/databases/commands/importers/v1/import_test.py new file mode 100644 index 0000000000000..e665bcb505d55 --- /dev/null +++ b/tests/unit_tests/databases/commands/importers/v1/import_test.py @@ -0,0 +1,78 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel, invalid-name + +import copy + +from sqlalchemy.orm.session import Session + + +def test_import_database(session: Session) -> None: + """ + Test importing a database. + """ + from superset.databases.commands.importers.v1.utils import import_database + from superset.models.core import Database + from tests.integration_tests.fixtures.importexport import database_config + + engine = session.get_bind() + Database.metadata.create_all(engine) # pylint: disable=no-member + + config = copy.deepcopy(database_config) + database = import_database(session, config) + assert database.database_name == "imported_database" + assert database.sqlalchemy_uri == "sqlite:///test.db" + assert database.cache_timeout is None + assert database.expose_in_sqllab is True + assert database.allow_run_async is False + assert database.allow_ctas is True + assert database.allow_cvas is True + assert database.allow_dml is True + assert database.allow_file_upload is True + assert database.extra == "{}" + assert database.uuid == "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89" + assert database.is_managed_externally is False + assert database.external_url is None + + # ``allow_dml`` was initially not exported; the import should work if the field is + # missing + config = copy.deepcopy(database_config) + del config["allow_dml"] + session.delete(database) + session.flush() + database = import_database(session, config) + assert database.allow_dml is False + + +def test_import_database_managed_externally(session: Session) -> None: + """ + Test importing a database that is managed externally. + """ + from superset.databases.commands.importers.v1.utils import import_database + from superset.models.core import Database + from tests.integration_tests.fixtures.importexport import database_config + + engine = session.get_bind() + Database.metadata.create_all(engine) # pylint: disable=no-member + + config = copy.deepcopy(database_config) + config["is_managed_externally"] = True + config["external_url"] = "https://example.org/my_database" + + database = import_database(session, config) + assert database.is_managed_externally is True + assert database.external_url == "https://example.org/my_database" diff --git a/tests/unit_tests/databases/commands/test_connection_test.py b/tests/unit_tests/databases/commands/test_connection_test.py new file mode 100644 index 0000000000000..8e86cfd1cfe9b --- /dev/null +++ b/tests/unit_tests/databases/commands/test_connection_test.py @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from parameterized import parameterized + +from superset.databases.commands.test_connection import get_log_connection_action +from superset.databases.ssh_tunnel.models import SSHTunnel + + +@parameterized.expand( + [ + ("foo", None, None, "foo"), + ("foo", SSHTunnel, None, "foo.ssh_tunnel"), + ("foo", SSHTunnel, Exception("oops"), "foo.Exception.ssh_tunnel"), + ], +) +def test_get_log_connection_action(action, tunnel, exc, expected_result): + assert expected_result == get_log_connection_action(action, tunnel, exc) diff --git a/tests/unit_tests/databases/dao/__init__.py b/tests/unit_tests/databases/dao/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/databases/dao/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/databases/dao/dao_tests.py b/tests/unit_tests/databases/dao/dao_tests.py new file mode 100644 index 0000000000000..47db402670dee --- /dev/null +++ b/tests/unit_tests/databases/dao/dao_tests.py @@ -0,0 +1,69 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Iterator + +import pytest +from sqlalchemy.orm.session import Session + + +@pytest.fixture +def session_with_data(session: Session) -> Iterator[Session]: + from superset.connectors.sqla.models import SqlaTable + from superset.databases.ssh_tunnel.models import SSHTunnel + from superset.models.core import Database + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + sqla_table = SqlaTable( + table_name="my_sqla_table", + columns=[], + metrics=[], + database=db, + ) + ssh_tunnel = SSHTunnel( + database_id=db.id, + database=db, + ) + + session.add(db) + session.add(sqla_table) + session.add(ssh_tunnel) + session.flush() + yield session + session.rollback() + + +def test_database_get_ssh_tunnel(session_with_data: Session) -> None: + from superset.databases.dao import DatabaseDAO + from superset.databases.ssh_tunnel.models import SSHTunnel + + result = DatabaseDAO.get_ssh_tunnel(1) + + assert result + assert isinstance(result, SSHTunnel) + assert 1 == result.database_id + + +def test_database_get_ssh_tunnel_not_found(session_with_data: Session) -> None: + from superset.databases.dao import DatabaseDAO + + result = DatabaseDAO.get_ssh_tunnel(2) + + assert result is None diff --git a/tests/unit_tests/databases/schema_tests.py b/tests/unit_tests/databases/schema_tests.py new file mode 100644 index 0000000000000..c9fca22d1b28d --- /dev/null +++ b/tests/unit_tests/databases/schema_tests.py @@ -0,0 +1,227 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=import-outside-toplevel, invalid-name, unused-argument, redefined-outer-name + +from typing import TYPE_CHECKING + +import pytest +from marshmallow import fields, Schema, ValidationError +from pytest_mock import MockFixture + +if TYPE_CHECKING: + from superset.databases.schemas import DatabaseParametersSchemaMixin + from superset.db_engine_specs.base import BasicParametersMixin + + +# pylint: disable=too-few-public-methods +class InvalidEngine: + """ + An invalid DB engine spec. + """ + + +@pytest.fixture +def dummy_schema() -> "DatabaseParametersSchemaMixin": + """ + Fixture providing a dummy schema. + """ + from superset.databases.schemas import DatabaseParametersSchemaMixin + + class DummySchema(Schema, DatabaseParametersSchemaMixin): + sqlalchemy_uri = fields.String() + + return DummySchema() + + +@pytest.fixture +def dummy_engine(mocker: MockFixture) -> None: + """ + Fixture proving a dummy DB engine spec. + """ + from superset.db_engine_specs.base import BasicParametersMixin + + class DummyEngine(BasicParametersMixin): + engine = "dummy" + default_driver = "dummy" + + mocker.patch("superset.databases.schemas.get_engine_spec", return_value=DummyEngine) + + +def test_database_parameters_schema_mixin( + dummy_engine: None, + dummy_schema: "Schema", +) -> None: + from superset.models.core import ConfigurationMethod + + payload = { + "engine": "dummy_engine", + "configuration_method": ConfigurationMethod.DYNAMIC_FORM, + "parameters": { + "username": "username", + "password": "password", + "host": "localhost", + "port": 12345, + "database": "dbname", + }, + } + result = dummy_schema.load(payload) + assert result == { + "configuration_method": ConfigurationMethod.DYNAMIC_FORM, + "sqlalchemy_uri": "dummy+dummy://username:password@localhost:12345/dbname", + } + + +def test_database_parameters_schema_mixin_no_engine( + dummy_schema: "Schema", +) -> None: + from superset.models.core import ConfigurationMethod + + payload = { + "configuration_method": ConfigurationMethod.DYNAMIC_FORM, + "parameters": { + "username": "username", + "password": "password", + "host": "localhost", + "port": 12345, + "database": "dbname", + }, + } + try: + dummy_schema.load(payload) + except ValidationError as err: + assert err.messages == { + "_schema": [ + ( + "An engine must be specified when passing individual parameters to " + "a database." + ), + ] + } + + +def test_database_parameters_schema_mixin_invalid_engine( + dummy_engine: None, + dummy_schema: "Schema", +) -> None: + from superset.models.core import ConfigurationMethod + + payload = { + "engine": "dummy_engine", + "configuration_method": ConfigurationMethod.DYNAMIC_FORM, + "parameters": { + "username": "username", + "password": "password", + "host": "localhost", + "port": 12345, + "database": "dbname", + }, + } + try: + dummy_schema.load(payload) + except ValidationError as err: + assert err.messages == { + "_schema": ['Engine "dummy_engine" is not a valid engine.'] + } + + +def test_database_parameters_schema_no_mixin( + dummy_engine: None, + dummy_schema: "Schema", +) -> None: + from superset.models.core import ConfigurationMethod + + payload = { + "engine": "invalid_engine", + "configuration_method": ConfigurationMethod.DYNAMIC_FORM, + "parameters": { + "username": "username", + "password": "password", + "host": "localhost", + "port": 12345, + "database": "dbname", + }, + } + try: + dummy_schema.load(payload) + except ValidationError as err: + assert err.messages == { + "_schema": [ + ( + 'Engine spec "InvalidEngine" does not support ' + "being configured via individual parameters." + ) + ] + } + + +def test_database_parameters_schema_mixin_invalid_type( + dummy_engine: None, + dummy_schema: "Schema", +) -> None: + from superset.models.core import ConfigurationMethod + + payload = { + "engine": "dummy_engine", + "configuration_method": ConfigurationMethod.DYNAMIC_FORM, + "parameters": { + "username": "username", + "password": "password", + "host": "localhost", + "port": "badport", + "database": "dbname", + }, + } + try: + dummy_schema.load(payload) + except ValidationError as err: + assert err.messages == {"port": ["Not a valid integer."]} + + +def test_rename_encrypted_extra() -> None: + """ + Test that ``encrypted_extra`` gets renamed to ``masked_encrypted_extra``. + """ + from superset.databases.schemas import ConfigurationMethod, DatabasePostSchema + + schema = DatabasePostSchema() + + # current schema + payload = schema.load( + { + "database_name": "My database", + "masked_encrypted_extra": "{}", + } + ) + assert payload == { + "database_name": "My database", + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + "masked_encrypted_extra": "{}", + } + + # previous schema + payload = schema.load( + { + "database_name": "My database", + "encrypted_extra": "{}", + } + ) + assert payload == { + "database_name": "My database", + "configuration_method": ConfigurationMethod.SQLALCHEMY_FORM, + "masked_encrypted_extra": "{}", + } diff --git a/tests/unit_tests/databases/ssh_tunnel/__init__.py b/tests/unit_tests/databases/ssh_tunnel/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/databases/ssh_tunnel/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/databases/ssh_tunnel/commands/__init__.py b/tests/unit_tests/databases/ssh_tunnel/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/databases/ssh_tunnel/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py b/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py new file mode 100644 index 0000000000000..2a5738ebd396a --- /dev/null +++ b/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py @@ -0,0 +1,68 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Iterator + +import pytest +from sqlalchemy.orm.session import Session + +from superset.databases.ssh_tunnel.commands.exceptions import SSHTunnelInvalidError + + +def test_create_ssh_tunnel_command() -> None: + from superset.databases.ssh_tunnel.commands.create import CreateSSHTunnelCommand + from superset.databases.ssh_tunnel.models import SSHTunnel + from superset.models.core import Database + + db = Database(id=1, database_name="my_database", sqlalchemy_uri="sqlite://") + + properties = { + "database_id": db.id, + "server_address": "123.132.123.1", + "server_port": "3005", + "username": "foo", + "password": "bar", + } + + result = CreateSSHTunnelCommand(db.id, properties).run() + + assert result is not None + assert isinstance(result, SSHTunnel) + + +def test_create_ssh_tunnel_command_invalid_params() -> None: + from superset.databases.ssh_tunnel.commands.create import CreateSSHTunnelCommand + from superset.databases.ssh_tunnel.models import SSHTunnel + from superset.models.core import Database + + db = Database(id=1, database_name="my_database", sqlalchemy_uri="sqlite://") + + # If we are trying to create a tunnel with a private_key_password + # then a private_key is mandatory + properties = { + "database_id": db.id, + "server_address": "123.132.123.1", + "server_port": "3005", + "username": "foo", + "private_key_password": "bar", + } + + command = CreateSSHTunnelCommand(db.id, properties) + + with pytest.raises(SSHTunnelInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == ("SSH Tunnel parameters are invalid.") diff --git a/tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py b/tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py new file mode 100644 index 0000000000000..b5adf765fa5ab --- /dev/null +++ b/tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py @@ -0,0 +1,73 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Iterator + +import pytest +from pytest_mock import MockFixture +from sqlalchemy.orm.session import Session + + +@pytest.fixture +def session_with_data(session: Session) -> Iterator[Session]: + from superset.connectors.sqla.models import SqlaTable + from superset.databases.ssh_tunnel.models import SSHTunnel + from superset.models.core import Database + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + sqla_table = SqlaTable( + table_name="my_sqla_table", + columns=[], + metrics=[], + database=db, + ) + ssh_tunnel = SSHTunnel( + database_id=db.id, + database=db, + ) + + session.add(db) + session.add(sqla_table) + session.add(ssh_tunnel) + session.flush() + yield session + session.rollback() + + +def test_delete_ssh_tunnel_command( + mocker: MockFixture, session_with_data: Session +) -> None: + from superset.databases.dao import DatabaseDAO + from superset.databases.ssh_tunnel.commands.delete import DeleteSSHTunnelCommand + from superset.databases.ssh_tunnel.models import SSHTunnel + + result = DatabaseDAO.get_ssh_tunnel(1) + + assert result + assert isinstance(result, SSHTunnel) + assert 1 == result.database_id + mocker.patch( + "superset.databases.ssh_tunnel.commands.delete.is_feature_enabled", + return_value=True, + ) + DeleteSSHTunnelCommand(1).run() + result = DatabaseDAO.get_ssh_tunnel(1) + + assert result is None diff --git a/tests/unit_tests/databases/ssh_tunnel/commands/update_test.py b/tests/unit_tests/databases/ssh_tunnel/commands/update_test.py new file mode 100644 index 0000000000000..58f90054ccd1f --- /dev/null +++ b/tests/unit_tests/databases/ssh_tunnel/commands/update_test.py @@ -0,0 +1,93 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Iterator + +import pytest +from sqlalchemy.orm.session import Session + +from superset.databases.ssh_tunnel.commands.exceptions import SSHTunnelInvalidError + + +@pytest.fixture +def session_with_data(session: Session) -> Iterator[Session]: + from superset.connectors.sqla.models import SqlaTable + from superset.databases.ssh_tunnel.models import SSHTunnel + from superset.models.core import Database + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + sqla_table = SqlaTable( + table_name="my_sqla_table", + columns=[], + metrics=[], + database=db, + ) + ssh_tunnel = SSHTunnel(database_id=db.id, database=db, server_address="Test") + + session.add(db) + session.add(sqla_table) + session.add(ssh_tunnel) + session.flush() + yield session + session.rollback() + + +def test_update_shh_tunnel_command(session_with_data: Session) -> None: + from superset.databases.dao import DatabaseDAO + from superset.databases.ssh_tunnel.commands.update import UpdateSSHTunnelCommand + from superset.databases.ssh_tunnel.models import SSHTunnel + + result = DatabaseDAO.get_ssh_tunnel(1) + + assert result + assert isinstance(result, SSHTunnel) + assert 1 == result.database_id + assert "Test" == result.server_address + + update_payload = {"server_address": "Test2"} + UpdateSSHTunnelCommand(1, update_payload).run() + + result = DatabaseDAO.get_ssh_tunnel(1) + + assert result + assert isinstance(result, SSHTunnel) + assert "Test2" == result.server_address + + +def test_update_shh_tunnel_invalid_params(session_with_data: Session) -> None: + from superset.databases.dao import DatabaseDAO + from superset.databases.ssh_tunnel.commands.update import UpdateSSHTunnelCommand + from superset.databases.ssh_tunnel.models import SSHTunnel + + result = DatabaseDAO.get_ssh_tunnel(1) + + assert result + assert isinstance(result, SSHTunnel) + assert 1 == result.database_id + assert "Test" == result.server_address + + # If we are trying to update a tunnel with a private_key_password + # then a private_key is mandatory + update_payload = {"private_key_password": "pass"} + command = UpdateSSHTunnelCommand(1, update_payload) + + with pytest.raises(SSHTunnelInvalidError) as excinfo: + command.run() + assert str(excinfo.value) == ("SSH Tunnel parameters are invalid.") diff --git a/tests/unit_tests/databases/ssh_tunnel/dao_tests.py b/tests/unit_tests/databases/ssh_tunnel/dao_tests.py new file mode 100644 index 0000000000000..ae5b6e9bd3c39 --- /dev/null +++ b/tests/unit_tests/databases/ssh_tunnel/dao_tests.py @@ -0,0 +1,43 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Iterator + +import pytest +from sqlalchemy.orm.session import Session + + +def test_create_ssh_tunnel(): + from superset.databases.dao import DatabaseDAO + from superset.databases.ssh_tunnel.dao import SSHTunnelDAO + from superset.databases.ssh_tunnel.models import SSHTunnel + from superset.models.core import Database + + db = Database(id=1, database_name="my_database", sqlalchemy_uri="sqlite://") + + properties = { + "database_id": db.id, + "server_address": "123.132.123.1", + "server_port": "3005", + "username": "foo", + "password": "bar", + } + + result = SSHTunnelDAO.create(properties) + + assert result is not None + assert isinstance(result, SSHTunnel) diff --git a/tests/unit_tests/databases/utils_test.py b/tests/unit_tests/databases/utils_test.py new file mode 100644 index 0000000000000..e402ced2a529f --- /dev/null +++ b/tests/unit_tests/databases/utils_test.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from sqlalchemy.engine.url import make_url +from sqlalchemy.orm.session import Session + +from superset.databases.utils import make_url_safe + + +def test_make_url_safe_string(session: Session) -> None: + """ + Test converting a string to a safe uri + """ + uri_string = "postgresql+psycopg2://superset:***@127.0.0.1:5432/superset" + uri_safe = make_url_safe(uri_string) + assert str(uri_safe) == uri_string + assert uri_safe == make_url(uri_string) + + +def test_make_url_safe_url(session: Session) -> None: + """ + Test converting a url to a safe uri + """ + uri = make_url("postgresql+psycopg2://superset:***@127.0.0.1:5432/superset") + uri_safe = make_url_safe(uri) + assert uri_safe == uri diff --git a/tests/unit_tests/dataframe_test.py b/tests/unit_tests/dataframe_test.py new file mode 100644 index 0000000000000..f0d9bc31b064b --- /dev/null +++ b/tests/unit_tests/dataframe_test.py @@ -0,0 +1,205 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel +from datetime import datetime + +import pytest +from pandas import Timestamp +from pandas._libs.tslibs import NaT + +from superset.dataframe import df_to_records +from superset.superset_typing import DbapiDescription + + +def test_df_to_records() -> None: + from superset.db_engine_specs import BaseEngineSpec + from superset.result_set import SupersetResultSet + + data = [("a1", "b1", "c1"), ("a2", "b2", "c2")] + cursor_descr: DbapiDescription = [ + (column, "string", None, None, None, None, False) for column in ("a", "b", "c") + ] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + df = results.to_pandas_df() + + assert df_to_records(df) == [ + {"a": "a1", "b": "b1", "c": "c1"}, + {"a": "a2", "b": "b2", "c": "c2"}, + ] + + +def test_df_to_records_NaT_type() -> None: + from superset.db_engine_specs import BaseEngineSpec + from superset.result_set import SupersetResultSet + + data = [(NaT,), (Timestamp("2023-01-06 20:50:31.749000+0000", tz="UTC"),)] + cursor_descr: DbapiDescription = [ + ("date", "timestamp with time zone", None, None, None, None, False) + ] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + df = results.to_pandas_df() + + assert df_to_records(df) == [ + {"date": None}, + {"date": "2023-01-06 20:50:31.749000+00:00"}, + ] + + +def test_df_to_records_mixed_emoji_type() -> None: + from superset.db_engine_specs import BaseEngineSpec + from superset.result_set import SupersetResultSet + + data = [ + ("What's up?", "This is a string text", 1), + ("What's up?", "This is a string with an 😍 added", 2), + ("What's up?", NaT, 3), + ("What's up?", "Last emoji 😁", 4), + ] + + cursor_descr: DbapiDescription = [ + ("question", "varchar", None, None, None, None, False), + ("response", "varchar", None, None, None, None, False), + ("count", "integer", None, None, None, None, False), + ] + + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + df = results.to_pandas_df() + + assert df_to_records(df) == [ + {"question": "What's up?", "response": "This is a string text", "count": 1}, + { + "question": "What's up?", + "response": "This is a string with an 😍 added", + "count": 2, + }, + { + "question": "What's up?", + "response": None, + "count": 3, + }, + { + "question": "What's up?", + "response": "Last emoji 😁", + "count": 4, + }, + ] + + +def test_df_to_records_mixed_accent_type() -> None: + from superset.db_engine_specs import BaseEngineSpec + from superset.result_set import SupersetResultSet + + data = [ + ("What's up?", "This is a string text", 1), + ("What's up?", "This is a string with áccent", 2), + ("What's up?", NaT, 3), + ("What's up?", "móre áccent", 4), + ] + + cursor_descr: DbapiDescription = [ + ("question", "varchar", None, None, None, None, False), + ("response", "varchar", None, None, None, None, False), + ("count", "integer", None, None, None, None, False), + ] + + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + df = results.to_pandas_df() + + assert df_to_records(df) == [ + {"question": "What's up?", "response": "This is a string text", "count": 1}, + { + "question": "What's up?", + "response": "This is a string with áccent", + "count": 2, + }, + { + "question": "What's up?", + "response": None, + "count": 3, + }, + { + "question": "What's up?", + "response": "móre áccent", + "count": 4, + }, + ] + + +def test_js_max_int() -> None: + from superset.db_engine_specs import BaseEngineSpec + from superset.result_set import SupersetResultSet + + data = [(1, 1239162456494753670, "c1"), (2, 100, "c2")] + cursor_descr: DbapiDescription = [ + ("a", "int", None, None, None, None, False), + ("b", "int", None, None, None, None, False), + ("c", "string", None, None, None, None, False), + ] + results = SupersetResultSet(data, cursor_descr, BaseEngineSpec) + df = results.to_pandas_df() + + assert df_to_records(df) == [ + {"a": 1, "b": "1239162456494753670", "c": "c1"}, + {"a": 2, "b": 100, "c": "c2"}, + ] + + +@pytest.mark.parametrize( + "input_, expected", + [ + pytest.param( + [ + (datetime.strptime("1677-09-22 00:12:43", "%Y-%m-%d %H:%M:%S"), 1), + (datetime.strptime("2262-04-11 23:47:17", "%Y-%m-%d %H:%M:%S"), 2), + ], + [ + { + "a": datetime.strptime("1677-09-22 00:12:43", "%Y-%m-%d %H:%M:%S"), + "b": 1, + }, + { + "a": datetime.strptime("2262-04-11 23:47:17", "%Y-%m-%d %H:%M:%S"), + "b": 2, + }, + ], + id="timestamp conversion fail", + ), + pytest.param( + [ + (datetime.strptime("1677-09-22 00:12:44", "%Y-%m-%d %H:%M:%S"), 1), + (datetime.strptime("2262-04-11 23:47:16", "%Y-%m-%d %H:%M:%S"), 2), + ], + [ + {"a": Timestamp("1677-09-22 00:12:44"), "b": 1}, + {"a": Timestamp("2262-04-11 23:47:16"), "b": 2}, + ], + id="timestamp conversion success", + ), + ], +) +def test_max_pandas_timestamp(input_, expected) -> None: + from superset.db_engine_specs import BaseEngineSpec + from superset.result_set import SupersetResultSet + + cursor_descr: DbapiDescription = [ + ("a", "datetime", None, None, None, None, False), + ("b", "int", None, None, None, None, False), + ] + results = SupersetResultSet(input_, cursor_descr, BaseEngineSpec) + df = results.to_pandas_df() + + assert df_to_records(df) == expected diff --git a/tests/unit_tests/datasets/__init__.py b/tests/unit_tests/datasets/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/datasets/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/datasets/commands/__init__.py b/tests/unit_tests/datasets/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/datasets/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/datasets/commands/export_test.py b/tests/unit_tests/datasets/commands/export_test.py new file mode 100644 index 0000000000000..c3ad4f764c4f3 --- /dev/null +++ b/tests/unit_tests/datasets/commands/export_test.py @@ -0,0 +1,208 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=import-outside-toplevel, unused-argument, unused-import + +import json + +from sqlalchemy.orm.session import Session + + +def test_export(session: Session) -> None: + """ + Test exporting a dataset. + """ + from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + from superset.datasets.commands.export import ExportDatasetsCommand + from superset.models.core import Database + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + session.add(database) + session.flush() + + columns = [ + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), + TableColumn(column_name="user_id", type="INTEGER"), + TableColumn(column_name="revenue", type="INTEGER"), + TableColumn(column_name="expenses", type="INTEGER"), + TableColumn( + column_name="profit", + type="INTEGER", + expression="revenue-expenses", + extra=json.dumps({"certified_by": "User"}), + ), + ] + metrics = [ + SqlMetric( + metric_name="cnt", + expression="COUNT(*)", + extra=json.dumps({"warning_markdown": None}), + ), + ] + + sqla_table = SqlaTable( + table_name="my_table", + columns=columns, + metrics=metrics, + main_dttm_col="ds", + database=database, + offset=-8, + description="This is the description", + is_featured=1, + cache_timeout=3600, + schema="my_schema", + sql=None, + params=json.dumps( + { + "remote_id": 64, + "database_name": "examples", + "import_time": 1606677834, + } + ), + perm=None, + filter_select_enabled=1, + fetch_values_predicate="foo IN (1, 2)", + is_sqllab_view=0, # no longer used? + template_params=json.dumps({"answer": "42"}), + schema_perm=None, + extra=json.dumps({"warning_markdown": "*WARNING*"}), + ) + + export = list( + ExportDatasetsCommand._export(sqla_table) # pylint: disable=protected-access + ) + assert export == [ + ( + "datasets/my_database/my_table.yaml", + f"""table_name: my_table +main_dttm_col: ds +description: This is the description +default_endpoint: null +offset: -8 +cache_timeout: 3600 +schema: my_schema +sql: null +params: + remote_id: 64 + database_name: examples + import_time: 1606677834 +template_params: + answer: '42' +filter_select_enabled: 1 +fetch_values_predicate: foo IN (1, 2) +extra: + warning_markdown: '*WARNING*' +uuid: null +metrics: +- metric_name: cnt + verbose_name: null + metric_type: null + expression: COUNT(*) + description: null + d3format: null + extra: + warning_markdown: null + warning_text: null +columns: +- column_name: profit + verbose_name: null + is_dttm: null + is_active: null + type: INTEGER + advanced_data_type: null + groupby: null + filterable: null + expression: revenue-expenses + description: null + python_date_format: null + extra: + certified_by: User +- column_name: ds + verbose_name: null + is_dttm: 1 + is_active: null + type: TIMESTAMP + advanced_data_type: null + groupby: null + filterable: null + expression: null + description: null + python_date_format: null + extra: null +- column_name: user_id + verbose_name: null + is_dttm: null + is_active: null + type: INTEGER + advanced_data_type: null + groupby: null + filterable: null + expression: null + description: null + python_date_format: null + extra: null +- column_name: expenses + verbose_name: null + is_dttm: null + is_active: null + type: INTEGER + advanced_data_type: null + groupby: null + filterable: null + expression: null + description: null + python_date_format: null + extra: null +- column_name: revenue + verbose_name: null + is_dttm: null + is_active: null + type: INTEGER + advanced_data_type: null + groupby: null + filterable: null + expression: null + description: null + python_date_format: null + extra: null +version: 1.0.0 +database_uuid: {database.uuid} +""", + ), + ( + "databases/my_database.yaml", + f"""database_name: my_database +sqlalchemy_uri: sqlite:// +cache_timeout: null +expose_in_sqllab: true +allow_run_async: false +allow_ctas: false +allow_cvas: false +allow_dml: false +allow_file_upload: false +extra: + metadata_params: {{}} + engine_params: {{}} + metadata_cache_timeout: {{}} + schemas_allowed_for_file_upload: [] +uuid: {database.uuid} +version: 1.0.0 +""", + ), + ] diff --git a/tests/unit_tests/datasets/commands/importers/__init__.py b/tests/unit_tests/datasets/commands/importers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/datasets/commands/importers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/datasets/commands/importers/v1/__init__.py b/tests/unit_tests/datasets/commands/importers/v1/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/datasets/commands/importers/v1/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/datasets/commands/importers/v1/import_test.py b/tests/unit_tests/datasets/commands/importers/v1/import_test.py new file mode 100644 index 0000000000000..5b52ac7f1da99 --- /dev/null +++ b/tests/unit_tests/datasets/commands/importers/v1/import_test.py @@ -0,0 +1,486 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=import-outside-toplevel, unused-argument, unused-import, invalid-name + +import copy +import json +import re +import uuid +from typing import Any, Dict +from unittest.mock import Mock, patch + +import pytest +from flask import current_app +from sqlalchemy.orm.session import Session + +from superset.datasets.commands.exceptions import DatasetForbiddenDataURI +from superset.datasets.commands.importers.v1.utils import validate_data_uri + + +def test_import_dataset(session: Session) -> None: + """ + Test importing a dataset. + """ + from superset.connectors.sqla.models import SqlaTable + from superset.datasets.commands.importers.v1.utils import import_dataset + from superset.models.core import Database + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + session.add(database) + session.flush() + + dataset_uuid = uuid.uuid4() + config = { + "table_name": "my_table", + "main_dttm_col": "ds", + "description": "This is the description", + "default_endpoint": None, + "offset": -8, + "cache_timeout": 3600, + "schema": "my_schema", + "sql": None, + "params": { + "remote_id": 64, + "database_name": "examples", + "import_time": 1606677834, + }, + "template_params": { + "answer": "42", + }, + "filter_select_enabled": True, + "fetch_values_predicate": "foo IN (1, 2)", + "extra": {"warning_markdown": "*WARNING*"}, + "uuid": dataset_uuid, + "metrics": [ + { + "metric_name": "cnt", + "verbose_name": None, + "metric_type": None, + "expression": "COUNT(*)", + "description": None, + "d3format": None, + "extra": {"warning_markdown": None}, + "warning_text": None, + } + ], + "columns": [ + { + "column_name": "profit", + "verbose_name": None, + "is_dttm": None, + "is_active": None, + "type": "INTEGER", + "groupby": None, + "filterable": None, + "expression": "revenue-expenses", + "description": None, + "python_date_format": None, + "extra": { + "certified_by": "User", + }, + } + ], + "database_uuid": database.uuid, + "database_id": database.id, + } + + sqla_table = import_dataset(session, config) + assert sqla_table.table_name == "my_table" + assert sqla_table.main_dttm_col == "ds" + assert sqla_table.description == "This is the description" + assert sqla_table.default_endpoint is None + assert sqla_table.offset == -8 + assert sqla_table.cache_timeout == 3600 + assert sqla_table.schema == "my_schema" + assert sqla_table.sql is None + assert sqla_table.params == json.dumps( + {"remote_id": 64, "database_name": "examples", "import_time": 1606677834} + ) + assert sqla_table.template_params == json.dumps({"answer": "42"}) + assert sqla_table.filter_select_enabled is True + assert sqla_table.fetch_values_predicate == "foo IN (1, 2)" + assert sqla_table.extra == '{"warning_markdown": "*WARNING*"}' + assert sqla_table.uuid == dataset_uuid + assert len(sqla_table.metrics) == 1 + assert sqla_table.metrics[0].metric_name == "cnt" + assert sqla_table.metrics[0].verbose_name is None + assert sqla_table.metrics[0].metric_type is None + assert sqla_table.metrics[0].expression == "COUNT(*)" + assert sqla_table.metrics[0].description is None + assert sqla_table.metrics[0].d3format is None + assert sqla_table.metrics[0].extra == '{"warning_markdown": null}' + assert sqla_table.metrics[0].warning_text is None + assert len(sqla_table.columns) == 1 + assert sqla_table.columns[0].column_name == "profit" + assert sqla_table.columns[0].verbose_name is None + assert sqla_table.columns[0].is_dttm is False + assert sqla_table.columns[0].is_active is True + assert sqla_table.columns[0].type == "INTEGER" + assert sqla_table.columns[0].groupby is True + assert sqla_table.columns[0].filterable is True + assert sqla_table.columns[0].expression == "revenue-expenses" + assert sqla_table.columns[0].description is None + assert sqla_table.columns[0].python_date_format is None + assert sqla_table.columns[0].extra == '{"certified_by": "User"}' + assert sqla_table.database.uuid == database.uuid + assert sqla_table.database.id == database.id + + +def test_import_dataset_duplicate_column(session: Session) -> None: + """ + Test importing a dataset with a column that already exists. + """ + from superset.columns.models import Column as NewColumn + from superset.connectors.sqla.models import SqlaTable, TableColumn + from superset.datasets.commands.importers.v1.utils import import_dataset + from superset.models.core import Database + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + dataset_uuid = uuid.uuid4() + + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + + session.add(database) + session.flush() + + dataset = SqlaTable( + uuid=dataset_uuid, table_name="existing_dataset", database_id=database.id + ) + column = TableColumn(column_name="existing_column") + session.add(dataset) + session.add(column) + session.flush() + + config = { + "table_name": dataset.table_name, + "main_dttm_col": "ds", + "description": "This is the description", + "default_endpoint": None, + "offset": -8, + "cache_timeout": 3600, + "schema": "my_schema", + "sql": None, + "params": { + "remote_id": 64, + "database_name": "examples", + "import_time": 1606677834, + }, + "template_params": { + "answer": "42", + }, + "filter_select_enabled": True, + "fetch_values_predicate": "foo IN (1, 2)", + "extra": {"warning_markdown": "*WARNING*"}, + "uuid": dataset_uuid, + "metrics": [ + { + "metric_name": "cnt", + "verbose_name": None, + "metric_type": None, + "expression": "COUNT(*)", + "description": None, + "d3format": None, + "extra": {"warning_markdown": None}, + "warning_text": None, + } + ], + "columns": [ + { + "column_name": column.column_name, + "verbose_name": None, + "is_dttm": None, + "is_active": None, + "type": "INTEGER", + "groupby": None, + "filterable": None, + "expression": "revenue-expenses", + "description": None, + "python_date_format": None, + "extra": { + "certified_by": "User", + }, + } + ], + "database_uuid": database.uuid, + "database_id": database.id, + } + + sqla_table = import_dataset(session, config, overwrite=True) + assert sqla_table.table_name == dataset.table_name + assert sqla_table.main_dttm_col == "ds" + assert sqla_table.description == "This is the description" + assert sqla_table.default_endpoint is None + assert sqla_table.offset == -8 + assert sqla_table.cache_timeout == 3600 + assert sqla_table.schema == "my_schema" + assert sqla_table.sql is None + assert sqla_table.params == json.dumps( + {"remote_id": 64, "database_name": "examples", "import_time": 1606677834} + ) + assert sqla_table.template_params == json.dumps({"answer": "42"}) + assert sqla_table.filter_select_enabled is True + assert sqla_table.fetch_values_predicate == "foo IN (1, 2)" + assert sqla_table.extra == '{"warning_markdown": "*WARNING*"}' + assert sqla_table.uuid == dataset_uuid + assert len(sqla_table.metrics) == 1 + assert sqla_table.metrics[0].metric_name == "cnt" + assert sqla_table.metrics[0].verbose_name is None + assert sqla_table.metrics[0].metric_type is None + assert sqla_table.metrics[0].expression == "COUNT(*)" + assert sqla_table.metrics[0].description is None + assert sqla_table.metrics[0].d3format is None + assert sqla_table.metrics[0].extra == '{"warning_markdown": null}' + assert sqla_table.metrics[0].warning_text is None + assert len(sqla_table.columns) == 1 + assert sqla_table.columns[0].column_name == column.column_name + assert sqla_table.columns[0].verbose_name is None + assert sqla_table.columns[0].is_dttm is False + assert sqla_table.columns[0].is_active is True + assert sqla_table.columns[0].type == "INTEGER" + assert sqla_table.columns[0].groupby is True + assert sqla_table.columns[0].filterable is True + assert sqla_table.columns[0].expression == "revenue-expenses" + assert sqla_table.columns[0].description is None + assert sqla_table.columns[0].python_date_format is None + assert sqla_table.columns[0].extra == '{"certified_by": "User"}' + assert sqla_table.database.uuid == database.uuid + assert sqla_table.database.id == database.id + + +def test_import_column_extra_is_string(session: Session) -> None: + """ + Test importing a dataset when the column extra is a string. + """ + from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + from superset.datasets.commands.importers.v1.utils import import_dataset + from superset.datasets.schemas import ImportV1DatasetSchema + from superset.models.core import Database + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + session.add(database) + session.flush() + + dataset_uuid = uuid.uuid4() + yaml_config: Dict[str, Any] = { + "version": "1.0.0", + "table_name": "my_table", + "main_dttm_col": "ds", + "description": "This is the description", + "default_endpoint": None, + "offset": -8, + "cache_timeout": 3600, + "schema": "my_schema", + "sql": None, + "params": { + "remote_id": 64, + "database_name": "examples", + "import_time": 1606677834, + }, + "template_params": { + "answer": "42", + }, + "filter_select_enabled": True, + "fetch_values_predicate": "foo IN (1, 2)", + "extra": '{"warning_markdown": "*WARNING*"}', + "uuid": dataset_uuid, + "metrics": [ + { + "metric_name": "cnt", + "verbose_name": None, + "metric_type": None, + "expression": "COUNT(*)", + "description": None, + "d3format": None, + "extra": '{"warning_markdown": null}', + "warning_text": None, + } + ], + "columns": [ + { + "column_name": "profit", + "verbose_name": None, + "is_dttm": False, + "is_active": True, + "type": "INTEGER", + "groupby": False, + "filterable": False, + "expression": "revenue-expenses", + "description": None, + "python_date_format": None, + "extra": '{"certified_by": "User"}', + } + ], + "database_uuid": database.uuid, + } + + # the Marshmallow schema should convert strings to objects + schema = ImportV1DatasetSchema() + dataset_config = schema.load(yaml_config) + dataset_config["database_id"] = database.id + sqla_table = import_dataset(session, dataset_config) + + assert sqla_table.metrics[0].extra == '{"warning_markdown": null}' + assert sqla_table.columns[0].extra == '{"certified_by": "User"}' + assert sqla_table.extra == '{"warning_markdown": "*WARNING*"}' + + +@patch("superset.datasets.commands.importers.v1.utils.request") +def test_import_column_allowed_data_url(request: Mock, session: Session) -> None: + """ + Test importing a dataset when using data key to fetch data from a URL. + """ + import io + + from superset.connectors.sqla.models import SqlaTable + from superset.datasets.commands.importers.v1.utils import import_dataset + from superset.datasets.schemas import ImportV1DatasetSchema + from superset.models.core import Database + + request.urlopen.return_value = io.StringIO("col1\nvalue1\nvalue2\n") + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + session.add(database) + session.flush() + + dataset_uuid = uuid.uuid4() + yaml_config: Dict[str, Any] = { + "version": "1.0.0", + "table_name": "my_table", + "main_dttm_col": "ds", + "description": "This is the description", + "default_endpoint": None, + "offset": -8, + "cache_timeout": 3600, + "schema": None, + "sql": None, + "params": { + "remote_id": 64, + "database_name": "examples", + "import_time": 1606677834, + }, + "template_params": None, + "filter_select_enabled": True, + "fetch_values_predicate": None, + "extra": None, + "uuid": dataset_uuid, + "metrics": [], + "columns": [ + { + "column_name": "col1", + "verbose_name": None, + "is_dttm": False, + "is_active": True, + "type": "TEXT", + "groupby": False, + "filterable": False, + "expression": None, + "description": None, + "python_date_format": None, + "extra": None, + } + ], + "database_uuid": database.uuid, + "data": "https://some-external-url.com/data.csv", + } + + # the Marshmallow schema should convert strings to objects + schema = ImportV1DatasetSchema() + dataset_config = schema.load(yaml_config) + dataset_config["database_id"] = database.id + _ = import_dataset(session, dataset_config, force_data=True) + session.connection() + assert [("value1",), ("value2",)] == session.execute( + "SELECT * FROM my_table" + ).fetchall() + + +def test_import_dataset_managed_externally(session: Session) -> None: + """ + Test importing a dataset that is managed externally. + """ + from superset.connectors.sqla.models import SqlaTable + from superset.datasets.commands.importers.v1.utils import import_dataset + from superset.models.core import Database + from tests.integration_tests.fixtures.importexport import dataset_config + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + session.add(database) + session.flush() + + config = copy.deepcopy(dataset_config) + config["is_managed_externally"] = True + config["external_url"] = "https://example.org/my_table" + config["database_id"] = database.id + + sqla_table = import_dataset(session, config) + assert sqla_table.is_managed_externally is True + assert sqla_table.external_url == "https://example.org/my_table" + + +@pytest.mark.parametrize( + "allowed_urls, data_uri, expected, exception_class", + [ + ([r".*"], "https://some-url/data.csv", True, None), + ( + [r"^https://.+\.domain1\.com\/?.*", r"^https://.+\.domain2\.com\/?.*"], + "https://host1.domain1.com/data.csv", + True, + None, + ), + ( + [r"^https://.+\.domain1\.com\/?.*", r"^https://.+\.domain2\.com\/?.*"], + "https://host2.domain1.com/data.csv", + True, + None, + ), + ( + [r"^https://.+\.domain1\.com\/?.*", r"^https://.+\.domain2\.com\/?.*"], + "https://host1.domain2.com/data.csv", + True, + None, + ), + ( + [r"^https://.+\.domain1\.com\/?.*", r"^https://.+\.domain2\.com\/?.*"], + "https://host1.domain3.com/data.csv", + False, + DatasetForbiddenDataURI, + ), + ([], "https://host1.domain3.com/data.csv", False, DatasetForbiddenDataURI), + (["*"], "https://host1.domain3.com/data.csv", False, re.error), + ], +) +def test_validate_data_uri(allowed_urls, data_uri, expected, exception_class): + current_app.config["DATASET_IMPORT_ALLOWED_DATA_URLS"] = allowed_urls + if expected: + validate_data_uri(data_uri) + else: + with pytest.raises(exception_class): + validate_data_uri(data_uri) diff --git a/tests/unit_tests/datasets/conftest.py b/tests/unit_tests/datasets/conftest.py new file mode 100644 index 0000000000000..8d217ae27a7e7 --- /dev/null +++ b/tests/unit_tests/datasets/conftest.py @@ -0,0 +1,124 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Dict, TYPE_CHECKING + +import pytest + +if TYPE_CHECKING: + from superset.connectors.sqla.models import SqlMetric, TableColumn + + +@pytest.fixture +def columns_default() -> Dict[str, Any]: + """Default props for new columns""" + return { + "changed_by": 1, + "created_by": 1, + "datasets": [], + "tables": [], + "is_additive": False, + "is_aggregation": False, + "is_dimensional": False, + "is_filterable": True, + "is_increase_desired": True, + "is_partition": False, + "is_physical": True, + "is_spatial": False, + "is_temporal": False, + "description": None, + "extra_json": "{}", + "unit": None, + "warning_text": None, + "is_managed_externally": False, + "external_url": None, + } + + +@pytest.fixture +def sample_columns() -> Dict["TableColumn", Dict[str, Any]]: + from superset.connectors.sqla.models import TableColumn + + return { + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"): { + "name": "ds", + "expression": "ds", + "type": "TIMESTAMP", + "advanced_data_type": None, + "is_temporal": True, + "is_physical": True, + }, + TableColumn(column_name="num_boys", type="INTEGER", groupby=True): { + "name": "num_boys", + "expression": "num_boys", + "type": "INTEGER", + "advanced_data_type": None, + "is_dimensional": True, + "is_physical": True, + }, + TableColumn(column_name="region", type="VARCHAR", groupby=True): { + "name": "region", + "expression": "region", + "type": "VARCHAR", + "advanced_data_type": None, + "is_dimensional": True, + "is_physical": True, + }, + TableColumn( + column_name="profit", + type="INTEGER", + groupby=False, + expression="revenue-expenses", + ): { + "name": "profit", + "expression": "revenue-expenses", + "type": "INTEGER", + "advanced_data_type": None, + "is_physical": False, + }, + } + + +@pytest.fixture +def sample_metrics() -> Dict["SqlMetric", Dict[str, Any]]: + from superset.connectors.sqla.models import SqlMetric + + return { + SqlMetric(metric_name="cnt", expression="COUNT(*)", metric_type="COUNT"): { + "name": "cnt", + "expression": "COUNT(*)", + "extra_json": '{"metric_type": "COUNT"}', + "type": "UNKNOWN", + "advanced_data_type": None, + "is_additive": True, + "is_aggregation": True, + "is_filterable": False, + "is_physical": False, + }, + SqlMetric( + metric_name="avg revenue", expression="AVG(revenue)", metric_type="AVG" + ): { + "name": "avg revenue", + "expression": "AVG(revenue)", + "extra_json": '{"metric_type": "AVG"}', + "type": "UNKNOWN", + "advanced_data_type": None, + "is_additive": False, + "is_aggregation": True, + "is_filterable": False, + "is_physical": False, + }, + } diff --git a/tests/unit_tests/datasets/dao/__init__.py b/tests/unit_tests/datasets/dao/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/datasets/dao/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/datasets/dao/dao_tests.py b/tests/unit_tests/datasets/dao/dao_tests.py new file mode 100644 index 0000000000000..350425d08e897 --- /dev/null +++ b/tests/unit_tests/datasets/dao/dao_tests.py @@ -0,0 +1,103 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Iterator + +import pytest +from sqlalchemy.orm.session import Session + + +@pytest.fixture +def session_with_data(session: Session) -> Iterator[Session]: + from superset.connectors.sqla.models import SqlaTable + from superset.models.core import Database + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + sqla_table = SqlaTable( + table_name="my_sqla_table", + columns=[], + metrics=[], + database=db, + ) + + session.add(db) + session.add(sqla_table) + session.flush() + yield session + session.rollback() + + +def test_datasource_find_by_id_skip_base_filter(session_with_data: Session) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.datasets.dao import DatasetDAO + + result = DatasetDAO.find_by_id( + 1, + session=session_with_data, + skip_base_filter=True, + ) + + assert result + assert 1 == result.id + assert "my_sqla_table" == result.table_name + assert isinstance(result, SqlaTable) + + +def test_datasource_find_by_id_skip_base_filter_not_found( + session_with_data: Session, +) -> None: + from superset.datasets.dao import DatasetDAO + + result = DatasetDAO.find_by_id( + 125326326, + session=session_with_data, + skip_base_filter=True, + ) + assert result is None + + +def test_datasource_find_by_ids_skip_base_filter(session_with_data: Session) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.datasets.dao import DatasetDAO + + result = DatasetDAO.find_by_ids( + [1, 125326326], + session=session_with_data, + skip_base_filter=True, + ) + + assert result + assert [1] == list(map(lambda x: x.id, result)) + assert ["my_sqla_table"] == list(map(lambda x: x.table_name, result)) + assert isinstance(result[0], SqlaTable) + + +def test_datasource_find_by_ids_skip_base_filter_not_found( + session_with_data: Session, +) -> None: + from superset.datasets.dao import DatasetDAO + + result = DatasetDAO.find_by_ids( + [125326326, 125326326125326326], + session=session_with_data, + skip_base_filter=True, + ) + + assert len(result) == 0 diff --git a/tests/unit_tests/datasource/dao_tests.py b/tests/unit_tests/datasource/dao_tests.py new file mode 100644 index 0000000000000..16334066d7ba1 --- /dev/null +++ b/tests/unit_tests/datasource/dao_tests.py @@ -0,0 +1,212 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Iterator + +import pytest +from sqlalchemy.orm.session import Session + +from superset.utils.core import DatasourceType + + +@pytest.fixture +def session_with_data(session: Session) -> Iterator[Session]: + from superset.columns.models import Column + from superset.connectors.sqla.models import SqlaTable, TableColumn + from superset.datasets.models import Dataset + from superset.models.core import Database + from superset.models.sql_lab import Query, SavedQuery + from superset.tables.models import Table + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + db = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + + columns = [ + TableColumn(column_name="a", type="INTEGER"), + ] + + sqla_table = SqlaTable( + table_name="my_sqla_table", + columns=columns, + metrics=[], + database=db, + ) + + query_obj = Query( + client_id="foo", + database=db, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql="select * from bar", + executed_sql="select * from bar", + limit=100, + select_as_cta=False, + rows=100, + error_message="none", + results_key="abc", + ) + + saved_query = SavedQuery(database=db, sql="select * from foo") + + table = Table( + name="my_table", + schema="my_schema", + catalog="my_catalog", + database=db, + columns=[], + ) + + dataset = Dataset( + database=table.database, + name="positions", + expression=""" +SELECT array_agg(array[longitude,latitude]) AS position +FROM my_catalog.my_schema.my_table +""", + tables=[table], + columns=[ + Column( + name="position", + expression="array_agg(array[longitude,latitude])", + ), + ], + ) + + session.add(dataset) + session.add(table) + session.add(saved_query) + session.add(query_obj) + session.add(db) + session.add(sqla_table) + session.flush() + yield session + + +def test_get_datasource_sqlatable(session_with_data: Session) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.datasource.dao import DatasourceDAO + + result = DatasourceDAO.get_datasource( + datasource_type=DatasourceType.TABLE, + datasource_id=1, + session=session_with_data, + ) + + assert 1 == result.id + assert "my_sqla_table" == result.table_name + assert isinstance(result, SqlaTable) + + +def test_get_datasource_query(session_with_data: Session) -> None: + from superset.datasource.dao import DatasourceDAO + from superset.models.sql_lab import Query + + result = DatasourceDAO.get_datasource( + datasource_type=DatasourceType.QUERY, datasource_id=1, session=session_with_data + ) + + assert result.id == 1 + assert isinstance(result, Query) + + +def test_get_datasource_saved_query(session_with_data: Session) -> None: + from superset.datasource.dao import DatasourceDAO + from superset.models.sql_lab import SavedQuery + + result = DatasourceDAO.get_datasource( + datasource_type=DatasourceType.SAVEDQUERY, + datasource_id=1, + session=session_with_data, + ) + + assert result.id == 1 + assert isinstance(result, SavedQuery) + + +def test_get_datasource_sl_table(session_with_data: Session) -> None: + from superset.datasource.dao import DatasourceDAO + from superset.tables.models import Table + + result = DatasourceDAO.get_datasource( + datasource_type=DatasourceType.SLTABLE, + datasource_id=1, + session=session_with_data, + ) + + assert result.id == 1 + assert isinstance(result, Table) + + +def test_get_datasource_sl_dataset(session_with_data: Session) -> None: + from superset.datasets.models import Dataset + from superset.datasource.dao import DatasourceDAO + + result = DatasourceDAO.get_datasource( + datasource_type=DatasourceType.DATASET, + datasource_id=1, + session=session_with_data, + ) + + assert result.id == 1 + assert isinstance(result, Dataset) + + +def test_get_datasource_w_str_param(session_with_data: Session) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.datasets.models import Dataset + from superset.datasource.dao import DatasourceDAO + from superset.tables.models import Table + + assert isinstance( + DatasourceDAO.get_datasource( + datasource_type="table", + datasource_id=1, + session=session_with_data, + ), + SqlaTable, + ) + + assert isinstance( + DatasourceDAO.get_datasource( + datasource_type="sl_table", + datasource_id=1, + session=session_with_data, + ), + Table, + ) + + +def test_get_all_datasources(session_with_data: Session) -> None: + from superset.connectors.sqla.models import SqlaTable + + result = SqlaTable.get_all_datasources(session=session_with_data) + assert len(result) == 1 + + +def test_not_found_datasource(session_with_data: Session) -> None: + from superset.dao.exceptions import DatasourceNotFound + from superset.datasource.dao import DatasourceDAO + + with pytest.raises(DatasourceNotFound): + DatasourceDAO.get_datasource( + datasource_type="table", + datasource_id=500000, + session=session_with_data, + ) diff --git a/tests/unit_tests/db_engine_specs/test_athena.py b/tests/unit_tests/db_engine_specs/test_athena.py new file mode 100644 index 0000000000000..51ec6656aa7f0 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_athena.py @@ -0,0 +1,87 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel, protected-access +import re +from datetime import datetime +from typing import Optional + +import pytest + +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + +SYNTAX_ERROR_REGEX = re.compile( + ": mismatched input '(?P.*?)'. Expecting: " +) + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "DATE '2019-01-02'"), + ("TimeStamp", "TIMESTAMP '2019-01-02 03:04:05.678'"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.athena import AthenaEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +def test_extract_errors() -> None: + """ + Test that custom error messages are extracted correctly. + """ + + from superset.db_engine_specs.athena import AthenaEngineSpec + + msg = ": mismatched input 'fromm'. Expecting: " + result = AthenaEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='Please check your query for syntax errors at or near "fromm". Then, try running your query again.', + error_type=SupersetErrorType.SYNTAX_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Amazon Athena", + "issue_codes": [ + { + "code": 1030, + "message": "Issue 1030 - The query has a syntax error.", + } + ], + }, + ) + ] + + +def test_get_text_clause_with_colon() -> None: + """ + Make sure text clauses don't escape the colon character + """ + + from superset.db_engine_specs.athena import AthenaEngineSpec + + query = ( + "SELECT foo FROM tbl WHERE " "abc >= TIMESTAMP '2021-11-26T00\:00\:00.000000'" + ) + text_clause = AthenaEngineSpec.get_text_clause(query) + assert text_clause.text == query diff --git a/tests/unit_tests/db_engine_specs/test_base.py b/tests/unit_tests/db_engine_specs/test_base.py new file mode 100644 index 0000000000000..868a6bbdc3fe6 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_base.py @@ -0,0 +1,140 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel, protected-access + +from textwrap import dedent +from typing import Any, Dict, Optional, Type + +import pytest +from sqlalchemy import types + +from superset.utils.core import GenericDataType +from tests.unit_tests.db_engine_specs.utils import assert_column_spec + + +def test_get_text_clause_with_colon() -> None: + """ + Make sure text clauses are correctly escaped + """ + + from superset.db_engine_specs.base import BaseEngineSpec + + text_clause = BaseEngineSpec.get_text_clause( + "SELECT foo FROM tbl WHERE foo = '123:456')" + ) + assert text_clause.text == "SELECT foo FROM tbl WHERE foo = '123\\:456')" + + +def test_parse_sql_single_statement() -> None: + """ + `parse_sql` should properly strip leading and trailing spaces and semicolons + """ + + from superset.db_engine_specs.base import BaseEngineSpec + + queries = BaseEngineSpec.parse_sql(" SELECT foo FROM tbl ; ") + assert queries == ["SELECT foo FROM tbl"] + + +def test_parse_sql_multi_statement() -> None: + """ + For string with multiple SQL-statements `parse_sql` method should return list + where each element represents the single SQL-statement + """ + + from superset.db_engine_specs.base import BaseEngineSpec + + queries = BaseEngineSpec.parse_sql("SELECT foo FROM tbl1; SELECT bar FROM tbl2;") + assert queries == [ + "SELECT foo FROM tbl1", + "SELECT bar FROM tbl2", + ] + + +@pytest.mark.parametrize( + "original,expected", + [ + ( + dedent( + """ +with currency as +( +select 'INR' as cur +) +select * from currency +""" + ), + None, + ), + ( + "SELECT 1 as cnt", + None, + ), + ( + dedent( + """ +select 'INR' as cur +union +select 'AUD' as cur +union +select 'USD' as cur +""" + ), + None, + ), + ], +) +def test_cte_query_parsing(original: types.TypeEngine, expected: str) -> None: + from superset.db_engine_specs.base import BaseEngineSpec + + actual = BaseEngineSpec.get_cte_query(original) + assert actual == expected + + +@pytest.mark.parametrize( + "native_type,sqla_type,attrs,generic_type,is_dttm", + [ + ("SMALLINT", types.SmallInteger, None, GenericDataType.NUMERIC, False), + ("INTEGER", types.Integer, None, GenericDataType.NUMERIC, False), + ("BIGINT", types.BigInteger, None, GenericDataType.NUMERIC, False), + ("DECIMAL", types.Numeric, None, GenericDataType.NUMERIC, False), + ("NUMERIC", types.Numeric, None, GenericDataType.NUMERIC, False), + ("REAL", types.REAL, None, GenericDataType.NUMERIC, False), + ("DOUBLE PRECISION", types.Float, None, GenericDataType.NUMERIC, False), + ("MONEY", types.Numeric, None, GenericDataType.NUMERIC, False), + # String + ("CHAR", types.String, None, GenericDataType.STRING, False), + ("VARCHAR", types.String, None, GenericDataType.STRING, False), + ("TEXT", types.String, None, GenericDataType.STRING, False), + # Temporal + ("DATE", types.Date, None, GenericDataType.TEMPORAL, True), + ("TIMESTAMP", types.TIMESTAMP, None, GenericDataType.TEMPORAL, True), + ("TIME", types.Time, None, GenericDataType.TEMPORAL, True), + # Boolean + ("BOOLEAN", types.Boolean, None, GenericDataType.BOOLEAN, False), + ], +) +def test_get_column_spec( + native_type: str, + sqla_type: Type[types.TypeEngine], + attrs: Optional[Dict[str, Any]], + generic_type: GenericDataType, + is_dttm: bool, +) -> None: + from superset.db_engine_specs.databricks import DatabricksNativeEngineSpec as spec + + assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm) diff --git a/tests/unit_tests/db_engine_specs/test_bigquery.py b/tests/unit_tests/db_engine_specs/test_bigquery.py new file mode 100644 index 0000000000000..5b9c6a95636d3 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_bigquery.py @@ -0,0 +1,314 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=line-too-long, import-outside-toplevel, protected-access, invalid-name + +import json +from datetime import datetime +from typing import Optional + +import pytest +from pytest_mock import MockFixture +from sqlalchemy import select +from sqlalchemy.sql import sqltypes +from sqlalchemy_bigquery import BigQueryDialect + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +def test_get_fields() -> None: + """ + Test the custom ``_get_fields`` method. + + The method adds custom labels (aliases) to the columns to prevent + collision when referencing record fields. Eg, if we had these two + columns: + + name STRING + project STRUCT + + One could write this query: + + SELECT + `name`, + `project`.`name` + FROM + the_table + + But then both columns would get aliased as "name". + + The custom method will replace the fields so that the final query + looks like this: + + SELECT + `name` AS `name`, + `project`.`name` AS project__name + FROM + the_table + + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec + + columns = [{"name": "limit"}, {"name": "name"}, {"name": "project.name"}] + fields = BigQueryEngineSpec._get_fields(columns) + + query = select(fields) + assert str(query.compile(dialect=BigQueryDialect())) == ( + "SELECT `limit` AS `limit`, `name` AS `name`, " + "`project`.`name` AS `project__name`" + ) + + +def test_select_star(mocker: MockFixture) -> None: + """ + Test the ``select_star`` method. + + The method removes pseudo-columns from structures inside arrays. While these + pseudo-columns show up as "columns" for metadata reasons, we can't select them + in the query, as opposed to fields from non-array structures. + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec + + cols = [ + { + "name": "trailer", + "type": sqltypes.ARRAY(sqltypes.JSON()), + "nullable": True, + "comment": None, + "default": None, + "precision": None, + "scale": None, + "max_length": None, + }, + { + "name": "trailer.key", + "type": sqltypes.String(), + "nullable": True, + "comment": None, + "default": None, + "precision": None, + "scale": None, + "max_length": None, + }, + { + "name": "trailer.value", + "type": sqltypes.String(), + "nullable": True, + "comment": None, + "default": None, + "precision": None, + "scale": None, + "max_length": None, + }, + { + "name": "trailer.email", + "type": sqltypes.String(), + "nullable": True, + "comment": None, + "default": None, + "precision": None, + "scale": None, + "max_length": None, + }, + ] + + # mock the database so we can compile the query + database = mocker.MagicMock() + database.compile_sqla_query = lambda query: str( + query.compile(dialect=BigQueryDialect()) + ) + + engine = mocker.MagicMock() + engine.dialect = BigQueryDialect() + + sql = BigQueryEngineSpec.select_star( + database=database, + table_name="my_table", + engine=engine, + schema=None, + limit=100, + show_cols=True, + indent=True, + latest_partition=False, + cols=cols, + ) + assert ( + sql + == """SELECT `trailer` AS `trailer` +FROM `my_table` +LIMIT :param_1""" + ) + + +def test_get_parameters_from_uri_serializable() -> None: + """ + Test that the result from ``get_parameters_from_uri`` is JSON serializable. + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec + + parameters = BigQueryEngineSpec.get_parameters_from_uri( + "bigquery://dbt-tutorial-347100/", + {"access_token": "TOP_SECRET"}, + ) + assert parameters == {"access_token": "TOP_SECRET", "query": {}} + assert json.loads(json.dumps(parameters)) == parameters + + +def test_unmask_encrypted_extra() -> None: + """ + Test that the private key can be reused from the previous ``encrypted_extra``. + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec + + old = json.dumps( + { + "credentials_info": { + "project_id": "black-sanctum-314419", + "private_key": "SECRET", + }, + } + ) + new = json.dumps( + { + "credentials_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "XXXXXXXXXX", + }, + } + ) + + assert json.loads(str(BigQueryEngineSpec.unmask_encrypted_extra(old, new))) == { + "credentials_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "SECRET", + }, + } + + +def test_unmask_encrypted_extra_when_empty() -> None: + """ + Test that a None value works for ``encrypted_extra``. + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec + + old = None + new = json.dumps( + { + "credentials_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "XXXXXXXXXX", + }, + } + ) + + assert json.loads(str(BigQueryEngineSpec.unmask_encrypted_extra(old, new))) == { + "credentials_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "XXXXXXXXXX", + }, + } + + +def test_unmask_encrypted_extra_when_new_is_empty() -> None: + """ + Test that a None value works for ``encrypted_extra``. + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec + + old = json.dumps( + { + "credentials_info": { + "project_id": "black-sanctum-314419", + "private_key": "SECRET", + }, + } + ) + new = None + + assert BigQueryEngineSpec.unmask_encrypted_extra(old, new) is None + + +def test_mask_encrypted_extra_when_empty() -> None: + """ + Test that the encrypted extra will return a none value if the field is empty. + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec + + assert BigQueryEngineSpec.mask_encrypted_extra(None) is None + + +def test_parse_error_message() -> None: + """ + Test that we parse a received message and just extract the useful information. + + Example errors: + bigquery error: 400 Syntax error: Table \"case_detail_all_suites\" must be qualified with a dataset (e.g. dataset.table). + + (job ID: ddf30b05-44e8-4fbf-aa29-40bfccaed886) + -----Query Job SQL Follows----- + | . | . | . |\n 1:select * from case_detail_all_suites\n 2:LIMIT 1001\n | . | . | . | + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec + + message = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).\n\n(job ID: ddf30b05-44e8-4fbf-aa29-40bfccaed886)\n\n -----Query Job SQL Follows----- \n\n | . | . | . |\n 1:select * from case_detail_all_suites\n 2:LIMIT 1001\n | . | . | . |' + expected_result = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).' + assert ( + str(BigQueryEngineSpec.parse_error_exception(Exception(message))) + == expected_result + ) + + +def test_parse_error_raises_exception() -> None: + """ + Test that we handle any exception we might get from calling the parse_error_exception method. + + Example errors: + 400 Syntax error: Expected "(" or keyword UNNEST but got "@" at [4:80] + bigquery error: 400 Table \"case_detail_all_suites\" must be qualified with a dataset (e.g. dataset.table). + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec + + message = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).' + message_2 = "6" + expected_result = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).' + assert ( + str(BigQueryEngineSpec.parse_error_exception(Exception(message))) + == expected_result + ) + assert str(BigQueryEngineSpec.parse_error_exception(Exception(message_2))) == "6" + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "CAST('2019-01-02' AS DATE)"), + ("DateTime", "CAST('2019-01-02T03:04:05.678900' AS DATETIME)"), + ("TimeStamp", "CAST('2019-01-02T03:04:05.678900' AS TIMESTAMP)"), + ("Time", "CAST('03:04:05.678900' AS TIME)"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + """ + DB Eng Specs (bigquery): Test conversion to date time + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_clickhouse.py b/tests/unit_tests/db_engine_specs/test_clickhouse.py new file mode 100644 index 0000000000000..0c437bc00998c --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_clickhouse.py @@ -0,0 +1,213 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from typing import Any, Dict, Optional, Type +from unittest.mock import Mock + +import pytest +from sqlalchemy.types import ( + Boolean, + Date, + DateTime, + DECIMAL, + Float, + Integer, + String, + TypeEngine, +) + +from superset.utils.core import GenericDataType +from tests.unit_tests.db_engine_specs.utils import ( + assert_column_spec, + assert_convert_dttm, +) +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "toDate('2019-01-02')"), + ("DateTime", "toDateTime('2019-01-02 03:04:05')"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.clickhouse import ClickHouseEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +def test_execute_connection_error() -> None: + from urllib3.exceptions import NewConnectionError + + from superset.db_engine_specs.clickhouse import ClickHouseEngineSpec + from superset.db_engine_specs.exceptions import SupersetDBAPIDatabaseError + + cursor = Mock() + cursor.execute.side_effect = NewConnectionError( + "Dummypool", "Exception with sensitive data" + ) + with pytest.raises(SupersetDBAPIDatabaseError) as ex: + ClickHouseEngineSpec.execute(cursor, "SELECT col1 from table1") + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "toDate('2019-01-02')"), + ("DateTime", "toDateTime('2019-01-02 03:04:05')"), + ("UnknownType", None), + ], +) +def test_connect_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.clickhouse import ClickHouseEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +@pytest.mark.parametrize( + "native_type,sqla_type,attrs,generic_type,is_dttm", + [ + ("String", String, None, GenericDataType.STRING, False), + ("LowCardinality(String)", String, None, GenericDataType.STRING, False), + ("Nullable(String)", String, None, GenericDataType.STRING, False), + ( + "LowCardinality(Nullable(String))", + String, + None, + GenericDataType.STRING, + False, + ), + ("Array(UInt8)", String, None, GenericDataType.STRING, False), + ("Enum('hello', 'world')", String, None, GenericDataType.STRING, False), + ("Enum('UInt32', 'Bool')", String, None, GenericDataType.STRING, False), + ( + "LowCardinality(Enum('hello', 'world'))", + String, + None, + GenericDataType.STRING, + False, + ), + ( + "Nullable(Enum('hello', 'world'))", + String, + None, + GenericDataType.STRING, + False, + ), + ( + "LowCardinality(Nullable(Enum('hello', 'world')))", + String, + None, + GenericDataType.STRING, + False, + ), + ("FixedString(16)", String, None, GenericDataType.STRING, False), + ("Nullable(FixedString(16))", String, None, GenericDataType.STRING, False), + ( + "LowCardinality(Nullable(FixedString(16)))", + String, + None, + GenericDataType.STRING, + False, + ), + ("UUID", String, None, GenericDataType.STRING, False), + ("Int8", Integer, None, GenericDataType.NUMERIC, False), + ("Int16", Integer, None, GenericDataType.NUMERIC, False), + ("Int32", Integer, None, GenericDataType.NUMERIC, False), + ("Int64", Integer, None, GenericDataType.NUMERIC, False), + ("Int128", Integer, None, GenericDataType.NUMERIC, False), + ("Int256", Integer, None, GenericDataType.NUMERIC, False), + ("Nullable(Int256)", Integer, None, GenericDataType.NUMERIC, False), + ( + "LowCardinality(Nullable(Int256))", + Integer, + None, + GenericDataType.NUMERIC, + False, + ), + ("UInt8", Integer, None, GenericDataType.NUMERIC, False), + ("UInt16", Integer, None, GenericDataType.NUMERIC, False), + ("UInt32", Integer, None, GenericDataType.NUMERIC, False), + ("UInt64", Integer, None, GenericDataType.NUMERIC, False), + ("UInt128", Integer, None, GenericDataType.NUMERIC, False), + ("UInt256", Integer, None, GenericDataType.NUMERIC, False), + ("Nullable(UInt256)", Integer, None, GenericDataType.NUMERIC, False), + ( + "LowCardinality(Nullable(UInt256))", + Integer, + None, + GenericDataType.NUMERIC, + False, + ), + ("Float32", Float, None, GenericDataType.NUMERIC, False), + ("Float64", Float, None, GenericDataType.NUMERIC, False), + ("Decimal(1, 2)", DECIMAL, None, GenericDataType.NUMERIC, False), + ("Decimal32(2)", DECIMAL, None, GenericDataType.NUMERIC, False), + ("Decimal64(2)", DECIMAL, None, GenericDataType.NUMERIC, False), + ("Decimal128(2)", DECIMAL, None, GenericDataType.NUMERIC, False), + ("Decimal256(2)", DECIMAL, None, GenericDataType.NUMERIC, False), + ("Bool", Boolean, None, GenericDataType.BOOLEAN, False), + ("Nullable(Bool)", Boolean, None, GenericDataType.BOOLEAN, False), + ("Date", Date, None, GenericDataType.TEMPORAL, True), + ("Nullable(Date)", Date, None, GenericDataType.TEMPORAL, True), + ("LowCardinality(Nullable(Date))", Date, None, GenericDataType.TEMPORAL, True), + ("Date32", Date, None, GenericDataType.TEMPORAL, True), + ("Datetime", DateTime, None, GenericDataType.TEMPORAL, True), + ("Nullable(Datetime)", DateTime, None, GenericDataType.TEMPORAL, True), + ( + "LowCardinality(Nullable(Datetime))", + DateTime, + None, + GenericDataType.TEMPORAL, + True, + ), + ("Datetime('UTC')", DateTime, None, GenericDataType.TEMPORAL, True), + ("Datetime64(3)", DateTime, None, GenericDataType.TEMPORAL, True), + ("Datetime64(3, 'UTC')", DateTime, None, GenericDataType.TEMPORAL, True), + ], +) +def test_connect_get_column_spec( + native_type: str, + sqla_type: Type[TypeEngine], + attrs: Optional[Dict[str, Any]], + generic_type: GenericDataType, + is_dttm: bool, +) -> None: + from superset.db_engine_specs.clickhouse import ClickHouseConnectEngineSpec as spec + + assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm) + + +@pytest.mark.parametrize( + "column_name,expected_result", + [ + ("time", "time_07cc69"), + ("count", "count_e2942a"), + ], +) +def test_connect_make_label_compatible(column_name: str, expected_result: str) -> None: + from superset.db_engine_specs.clickhouse import ClickHouseConnectEngineSpec as spec + + label = spec.make_label_compatible(column_name) + assert label == expected_result diff --git a/tests/unit_tests/db_engine_specs/test_crate.py b/tests/unit_tests/db_engine_specs/test_crate.py new file mode 100644 index 0000000000000..2cb1cd78966dc --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_crate.py @@ -0,0 +1,71 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +def test_epoch_to_dttm() -> None: + """ + DB Eng Specs (crate): Test epoch to dttm + """ + from superset.db_engine_specs.crate import CrateEngineSpec + + assert CrateEngineSpec.epoch_to_dttm() == "{col} * 1000" + + +def test_epoch_ms_to_dttm() -> None: + """ + DB Eng Specs (crate): Test epoch ms to dttm + """ + from superset.db_engine_specs.crate import CrateEngineSpec + + assert CrateEngineSpec.epoch_ms_to_dttm() == "{col}" + + +def test_alter_new_orm_column() -> None: + """ + DB Eng Specs (crate): Test alter orm column + """ + from superset.connectors.sqla.models import SqlaTable, TableColumn + from superset.db_engine_specs.crate import CrateEngineSpec + from superset.models.core import Database + + database = Database(database_name="crate", sqlalchemy_uri="crate://db") + tbl = SqlaTable(table_name="tbl", database=database) + col = TableColumn(column_name="ts", type="TIMESTAMP", table=tbl) + CrateEngineSpec.alter_new_orm_column(col) + assert col.python_date_format == "epoch_ms" + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("TimeStamp", "1546398245678.9"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.crate import CrateEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_databricks.py b/tests/unit_tests/db_engine_specs/test_databricks.py new file mode 100644 index 0000000000000..49d65b324b0bf --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_databricks.py @@ -0,0 +1,246 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel, protected-access + +import json +from datetime import datetime +from typing import Optional + +import pytest +from pytest_mock import MockerFixture + +from superset.db_engine_specs.databricks import DatabricksNativeEngineSpec +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +def test_get_parameters_from_uri() -> None: + """ + Test that the result from ``get_parameters_from_uri`` is JSON serializable. + """ + from superset.db_engine_specs.databricks import ( + DatabricksNativeEngineSpec, + DatabricksParametersType, + ) + + parameters = DatabricksNativeEngineSpec.get_parameters_from_uri( + "databricks+connector://token:abc12345@my_hostname:1234/test" + ) + assert parameters == DatabricksParametersType( + { + "access_token": "abc12345", + "host": "my_hostname", + "port": 1234, + "database": "test", + "encryption": False, + } + ) + assert json.loads(json.dumps(parameters)) == parameters + + +def test_build_sqlalchemy_uri() -> None: + """ + test that the parameters are can correctly be compiled into a + sqlalchemy_uri + """ + from superset.db_engine_specs.databricks import ( + DatabricksNativeEngineSpec, + DatabricksParametersType, + ) + + parameters = DatabricksParametersType( + { + "access_token": "abc12345", + "host": "my_hostname", + "port": 1234, + "database": "test", + "encryption": False, + } + ) + encrypted_extra = None + sqlalchemy_uri = DatabricksNativeEngineSpec.build_sqlalchemy_uri( + parameters, encrypted_extra + ) + assert sqlalchemy_uri == ( + "databricks+connector://token:abc12345@my_hostname:1234/test" + ) + + +def test_parameters_json_schema() -> None: + """ + test that the parameters schema can be converted to json + """ + from superset.db_engine_specs.databricks import DatabricksNativeEngineSpec + + json_schema = DatabricksNativeEngineSpec.parameters_json_schema() + + assert json_schema == { + "type": "object", + "properties": { + "access_token": {"type": "string"}, + "database": {"type": "string"}, + "encryption": { + "description": "Use an encrypted connection to the database", + "type": "boolean", + }, + "host": {"type": "string"}, + "http_path": {"type": "string"}, + "port": { + "description": "Database port", + "format": "int32", + "maximum": 65536, + "minimum": 0, + "type": "integer", + }, + }, + "required": ["access_token", "database", "host", "http_path", "port"], + } + + +def test_get_extra_params(mocker: MockerFixture) -> None: + """ + Test the ``get_extra_params`` method. + """ + from superset.db_engine_specs.databricks import DatabricksNativeEngineSpec + + database = mocker.MagicMock() + + database.extra = {} + assert DatabricksNativeEngineSpec.get_extra_params(database) == { + "engine_params": { + "connect_args": { + "http_headers": [("User-Agent", "Apache Superset")], + "_user_agent_entry": "Apache Superset", + } + } + } + + database.extra = json.dumps( + { + "engine_params": { + "connect_args": { + "http_headers": [("User-Agent", "Custom user agent")], + "_user_agent_entry": "Custom user agent", + "foo": "bar", + } + } + } + ) + assert DatabricksNativeEngineSpec.get_extra_params(database) == { + "engine_params": { + "connect_args": { + "http_headers": [["User-Agent", "Custom user agent"]], + "_user_agent_entry": "Custom user agent", + "foo": "bar", + } + } + } + + # it should also remove whitespace from http_path + database.extra = json.dumps( + { + "engine_params": { + "connect_args": { + "http_headers": [("User-Agent", "Custom user agent")], + "_user_agent_entry": "Custom user agent", + "http_path": "/some_path_here_with_whitespace ", + } + } + } + ) + assert DatabricksNativeEngineSpec.get_extra_params(database) == { + "engine_params": { + "connect_args": { + "http_headers": [["User-Agent", "Custom user agent"]], + "_user_agent_entry": "Custom user agent", + "http_path": "/some_path_here_with_whitespace", + } + } + } + + +def test_extract_errors() -> None: + """ + Test that custom error messages are extracted correctly. + """ + + msg = ": mismatched input 'fromm'. Expecting: " + result = DatabricksNativeEngineSpec.extract_errors(Exception(msg)) + + assert result == [ + SupersetError( + message=": mismatched input 'fromm'. Expecting: ", + error_type=SupersetErrorType.GENERIC_DB_ENGINE_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Databricks", + "issue_codes": [ + { + "code": 1002, + "message": "Issue 1002 - The database returned an unexpected error.", + } + ], + }, + ) + ] + + +def test_extract_errors_with_context() -> None: + """ + Test that custom error messages are extracted correctly with context. + """ + + msg = ": mismatched input 'fromm'. Expecting: " + context = {"hostname": "foo"} + result = DatabricksNativeEngineSpec.extract_errors(Exception(msg), context) + + assert result == [ + SupersetError( + message=": mismatched input 'fromm'. Expecting: ", + error_type=SupersetErrorType.GENERIC_DB_ENGINE_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Databricks", + "issue_codes": [ + { + "code": 1002, + "message": "Issue 1002 - The database returned an unexpected error.", + } + ], + }, + ) + ] + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "CAST('2019-01-02' AS DATE)"), + ( + "TimeStamp", + "CAST('2019-01-02 03:04:05.678900' AS TIMESTAMP)", + ), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.databricks import DatabricksNativeEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_dremio.py b/tests/unit_tests/db_engine_specs/test_dremio.py new file mode 100644 index 0000000000000..6b1e8203b5dfe --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_dremio.py @@ -0,0 +1,42 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "TO_DATE('2019-01-02', 'YYYY-MM-DD')"), + ( + "TimeStamp", + "TO_TIMESTAMP('2019-01-02 03:04:05.678', 'YYYY-MM-DD HH24:MI:SS.FFF')", + ), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.dremio import DremioEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_drill.py b/tests/unit_tests/db_engine_specs/test_drill.py new file mode 100644 index 0000000000000..e56df5d47cc4b --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_drill.py @@ -0,0 +1,108 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel, protected-access + +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +def test_odbc_impersonation() -> None: + """ + Test ``get_url_for_impersonation`` method when driver == odbc. + + The method adds the parameter ``DelegationUID`` to the query string. + """ + from sqlalchemy.engine.url import URL + + from superset.db_engine_specs.drill import DrillEngineSpec + + url = URL("drill+odbc") + username = "DoAsUser" + url = DrillEngineSpec.get_url_for_impersonation(url, True, username) + assert url.query["DelegationUID"] == username + + +def test_jdbc_impersonation() -> None: + """ + Test ``get_url_for_impersonation`` method when driver == jdbc. + + The method adds the parameter ``impersonation_target`` to the query string. + """ + from sqlalchemy.engine.url import URL + + from superset.db_engine_specs.drill import DrillEngineSpec + + url = URL("drill+jdbc") + username = "DoAsUser" + url = DrillEngineSpec.get_url_for_impersonation(url, True, username) + assert url.query["impersonation_target"] == username + + +def test_sadrill_impersonation() -> None: + """ + Test ``get_url_for_impersonation`` method when driver == sadrill. + + The method adds the parameter ``impersonation_target`` to the query string. + """ + from sqlalchemy.engine.url import URL + + from superset.db_engine_specs.drill import DrillEngineSpec + + url = URL("drill+sadrill") + username = "DoAsUser" + url = DrillEngineSpec.get_url_for_impersonation(url, True, username) + assert url.query["impersonation_target"] == username + + +def test_invalid_impersonation() -> None: + """ + Test ``get_url_for_impersonation`` method when driver == foobar. + + The method raises an exception because impersonation is not supported + for drill+foobar. + """ + from sqlalchemy.engine.url import URL + + from superset.db_engine_specs.drill import DrillEngineSpec + from superset.db_engine_specs.exceptions import SupersetDBAPIProgrammingError + + url = URL("drill+foobar") + username = "DoAsUser" + + with pytest.raises(SupersetDBAPIProgrammingError): + DrillEngineSpec.get_url_for_impersonation(url, True, username) + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "TO_DATE('2019-01-02', 'yyyy-MM-dd')"), + ("TimeStamp", "TO_TIMESTAMP('2019-01-02 03:04:05', 'yyyy-MM-dd HH:mm:ss')"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.drill import DrillEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_druid.py b/tests/unit_tests/db_engine_specs/test_druid.py new file mode 100644 index 0000000000000..d090dffcde043 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_druid.py @@ -0,0 +1,95 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from typing import Optional +from unittest import mock + +import pytest +from sqlalchemy import column + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "CAST(TIME_PARSE('2019-01-02') AS DATE)"), + ("DateTime", "TIME_PARSE('2019-01-02T03:04:05')"), + ("TimeStamp", "TIME_PARSE('2019-01-02T03:04:05')"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.druid import DruidEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +@pytest.mark.parametrize( + "time_grain,expected_result", + [ + ("PT1S", "TIME_FLOOR(CAST(col AS TIMESTAMP), 'PT1S')"), + ("PT5M", "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT5M')"), + ( + "P1W/1970-01-03T00:00:00Z", + "TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST(col AS TIMESTAMP), 'P1D', 1), 'P1W'), 'P1D', 5)", + ), + ( + "1969-12-28T00:00:00Z/P1W", + "TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST(col AS TIMESTAMP), 'P1D', 1), 'P1W'), 'P1D', -1)", + ), + ], +) +def test_timegrain_expressions(time_grain: str, expected_result: str) -> None: + """ + DB Eng Specs (druid): Test time grain expressions + """ + from superset.db_engine_specs.druid import DruidEngineSpec + + assert str( + DruidEngineSpec.get_timestamp_expr( + col=column("col"), pdf=None, time_grain=time_grain + ) + ) + + +def test_extras_without_ssl() -> None: + from superset.db_engine_specs.druid import DruidEngineSpec + from tests.integration_tests.fixtures.database import default_db_extra + + db = mock.Mock() + db.extra = default_db_extra + db.server_cert = None + extras = DruidEngineSpec.get_extra_params(db) + assert "connect_args" not in extras["engine_params"] + + +def test_extras_with_ssl() -> None: + from superset.db_engine_specs.druid import DruidEngineSpec + from tests.integration_tests.fixtures.certificates import ssl_certificate + from tests.integration_tests.fixtures.database import default_db_extra + + db = mock.Mock() + db.extra = default_db_extra + db.server_cert = ssl_certificate + extras = DruidEngineSpec.get_extra_params(db) + connect_args = extras["engine_params"]["connect_args"] + assert connect_args["scheme"] == "https" + assert "ssl_verify_cert" in connect_args diff --git a/tests/unit_tests/db_engine_specs/test_duckdb.py b/tests/unit_tests/db_engine_specs/test_duckdb.py new file mode 100644 index 0000000000000..72d018f4fcdc8 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_duckdb.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Text", "'2019-01-02 03:04:05.678900'"), + ("DateTime", "'2019-01-02 03:04:05.678900'"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.duckdb import DuckDBEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_dynamodb.py b/tests/unit_tests/db_engine_specs/test_dynamodb.py new file mode 100644 index 0000000000000..26196f5b459e7 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_dynamodb.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("text", "'2019-01-02 03:04:05'"), + ("dateTime", "'2019-01-02 03:04:05'"), + ("unknowntype", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.dynamodb import DynamoDBEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_elasticsearch.py b/tests/unit_tests/db_engine_specs/test_elasticsearch.py new file mode 100644 index 0000000000000..de55c63426b70 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_elasticsearch.py @@ -0,0 +1,106 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from typing import Any, Dict, Optional +from unittest.mock import MagicMock + +import pytest +from sqlalchemy import column + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,db_extra,expected_result", + [ + ("DateTime", None, "CAST('2019-01-02T03:04:05' AS DATETIME)"), + ( + "DateTime", + {"version": "7.7"}, + "CAST('2019-01-02T03:04:05' AS DATETIME)", + ), + ( + "DateTime", + {"version": "7.8"}, + "DATETIME_PARSE('2019-01-02 03:04:05', 'yyyy-MM-dd HH:mm:ss')", + ), + ( + "DateTime", + {"version": "unparseable semver version"}, + "CAST('2019-01-02T03:04:05' AS DATETIME)", + ), + ("Unknown", None, None), + ], +) +def test_elasticsearch_convert_dttm( + target_type: str, + db_extra: Optional[Dict[str, Any]], + expected_result: Optional[str], + dttm: datetime, +) -> None: + from superset.db_engine_specs.elasticsearch import ElasticSearchEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm, db_extra) + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("DateTime", "'2019-01-02T03:04:05'"), + ("Unknown", None), + ], +) +def test_opendistro_convert_dttm( + target_type: str, + expected_result: Optional[str], + dttm: datetime, +) -> None: + from superset.db_engine_specs.elasticsearch import OpenDistroEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +@pytest.mark.parametrize( + "original,expected", + [ + ("Col", "Col"), + ("Col.keyword", "Col_keyword"), + ], +) +def test_opendistro_sqla_column_label(original: str, expected: str) -> None: + """ + DB Eng Specs (opendistro): Test column label + """ + from superset.db_engine_specs.elasticsearch import OpenDistroEngineSpec + + assert OpenDistroEngineSpec.make_label_compatible(original) == expected + + +def test_opendistro_strip_comments() -> None: + """ + DB Eng Specs (opendistro): Test execute sql strip comments + """ + from superset.db_engine_specs.elasticsearch import OpenDistroEngineSpec + + mock_cursor = MagicMock() + mock_cursor.execute.return_value = [] + + OpenDistroEngineSpec.execute( + mock_cursor, "-- some comment \nSELECT 1\n --other comment" + ) + mock_cursor.execute.assert_called_once_with("SELECT 1\n") diff --git a/tests/unit_tests/db_engine_specs/test_firebird.py b/tests/unit_tests/db_engine_specs/test_firebird.py new file mode 100644 index 0000000000000..c1add91678abc --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_firebird.py @@ -0,0 +1,102 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "time_grain,expected", + [ + (None, "timestamp_column"), + ( + "PT1S", + ( + "CAST(CAST(timestamp_column AS DATE) " + "|| ' ' " + "|| EXTRACT(HOUR FROM timestamp_column) " + "|| ':' " + "|| EXTRACT(MINUTE FROM timestamp_column) " + "|| ':' " + "|| FLOOR(EXTRACT(SECOND FROM timestamp_column)) AS TIMESTAMP)" + ), + ), + ( + "PT1M", + ( + "CAST(CAST(timestamp_column AS DATE) " + "|| ' ' " + "|| EXTRACT(HOUR FROM timestamp_column) " + "|| ':' " + "|| EXTRACT(MINUTE FROM timestamp_column) " + "|| ':00' AS TIMESTAMP)" + ), + ), + ("P1D", "CAST(timestamp_column AS DATE)"), + ( + "P1M", + ( + "CAST(EXTRACT(YEAR FROM timestamp_column) " + "|| '-' " + "|| EXTRACT(MONTH FROM timestamp_column) " + "|| '-01' AS DATE)" + ), + ), + ("P1Y", "CAST(EXTRACT(YEAR FROM timestamp_column) || '-01-01' AS DATE)"), + ], +) +def test_time_grain_expressions(time_grain: Optional[str], expected: str) -> None: + from superset.db_engine_specs.firebird import FirebirdEngineSpec + + assert ( + FirebirdEngineSpec._time_grain_expressions[time_grain].format( + col="timestamp_column", + ) + == expected + ) + + +def test_epoch_to_dttm() -> None: + from superset.db_engine_specs.firebird import FirebirdEngineSpec + + assert ( + FirebirdEngineSpec.epoch_to_dttm().format(col="timestamp_column") + == "DATEADD(second, timestamp_column, CAST('00:00:00' AS TIMESTAMP))" + ) + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "CAST('2019-01-02' AS DATE)"), + ("DateTime", "CAST('2019-01-02 03:04:05.6789' AS TIMESTAMP)"), + ("TimeStamp", "CAST('2019-01-02 03:04:05.6789' AS TIMESTAMP)"), + ("Time", "CAST('03:04:05.678900' AS TIME)"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.firebird import FirebirdEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_firebolt.py b/tests/unit_tests/db_engine_specs/test_firebolt.py new file mode 100644 index 0000000000000..eb84bb14b3ca8 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_firebolt.py @@ -0,0 +1,57 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "CAST('2019-01-02' AS DATE)"), + ( + "DateTime", + "CAST('2019-01-02T03:04:05' AS DATETIME)", + ), + ( + "TimeStamp", + "CAST('2019-01-02T03:04:05' AS TIMESTAMP)", + ), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.firebolt import FireboltEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +def test_epoch_to_dttm() -> None: + from superset.db_engine_specs.firebolt import FireboltEngineSpec + + assert ( + FireboltEngineSpec.epoch_to_dttm().format(col="timestamp_column") + == "from_unixtime(timestamp_column)" + ) diff --git a/tests/unit_tests/db_engine_specs/test_gsheets.py b/tests/unit_tests/db_engine_specs/test_gsheets.py new file mode 100644 index 0000000000000..042e486642bd8 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_gsheets.py @@ -0,0 +1,309 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=import-outside-toplevel, invalid-name, line-too-long + +import json + +from pytest_mock import MockFixture + +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType + + +class ProgrammingError(Exception): + """ + Dummy ProgrammingError so we don't need to import the optional gsheets. + """ + + +def test_validate_parameters_simple() -> None: + from superset.db_engine_specs.gsheets import ( + GSheetsEngineSpec, + GSheetsPropertiesType, + ) + + properties: GSheetsPropertiesType = { + "parameters": { + "service_account_info": "", + "catalog": {}, + }, + "catalog": {}, + } + errors = GSheetsEngineSpec.validate_parameters(properties) + assert errors == [ + SupersetError( + message="Sheet name is required", + error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR, + level=ErrorLevel.WARNING, + extra={"catalog": {"idx": 0, "name": True}}, + ), + ] + + +def test_validate_parameters_simple_with_in_root_catalog() -> None: + from superset.db_engine_specs.gsheets import ( + GSheetsEngineSpec, + GSheetsPropertiesType, + ) + + properties: GSheetsPropertiesType = { + "parameters": { + "service_account_info": "", + "catalog": {}, + }, + "catalog": {}, + } + errors = GSheetsEngineSpec.validate_parameters(properties) + assert errors == [ + SupersetError( + message="Sheet name is required", + error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR, + level=ErrorLevel.WARNING, + extra={"catalog": {"idx": 0, "name": True}}, + ), + ] + + +def test_validate_parameters_catalog( + mocker: MockFixture, +) -> None: + from superset.db_engine_specs.gsheets import ( + GSheetsEngineSpec, + GSheetsPropertiesType, + ) + + g = mocker.patch("superset.db_engine_specs.gsheets.g") + g.user.email = "admin@example.com" + + create_engine = mocker.patch("superset.db_engine_specs.gsheets.create_engine") + conn = create_engine.return_value.connect.return_value + results = conn.execute.return_value + results.fetchall.side_effect = [ + ProgrammingError("The caller does not have permission"), + [(1,)], + ProgrammingError("Unsupported table: https://www.google.com/"), + ] + + properties: GSheetsPropertiesType = { + "parameters": {"service_account_info": "", "catalog": None}, + "catalog": { + "private_sheet": "https://docs.google.com/spreadsheets/d/1/edit", + "public_sheet": "https://docs.google.com/spreadsheets/d/1/edit#gid=1", + "not_a_sheet": "https://www.google.com/", + }, + } + errors = GSheetsEngineSpec.validate_parameters(properties) # ignore: type + + assert errors == [ + SupersetError( + message=( + "The URL could not be identified. Please check for typos " + "and make sure that ‘Type of Google Sheets allowed’ " + "selection matches the input." + ), + error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, + level=ErrorLevel.WARNING, + extra={ + "catalog": { + "idx": 0, + "url": True, + }, + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", + }, + { + "code": 1005, + "message": "Issue 1005 - The table was deleted or renamed in the database.", + }, + ], + }, + ), + SupersetError( + message=( + "The URL could not be identified. Please check for typos " + "and make sure that ‘Type of Google Sheets allowed’ " + "selection matches the input." + ), + error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, + level=ErrorLevel.WARNING, + extra={ + "catalog": { + "idx": 2, + "url": True, + }, + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", + }, + { + "code": 1005, + "message": "Issue 1005 - The table was deleted or renamed in the database.", + }, + ], + }, + ), + ] + + create_engine.assert_called_with( + "gsheets://", + service_account_info={}, + subject="admin@example.com", + ) + + +def test_validate_parameters_catalog_and_credentials( + mocker: MockFixture, +) -> None: + from superset.db_engine_specs.gsheets import ( + GSheetsEngineSpec, + GSheetsPropertiesType, + ) + + g = mocker.patch("superset.db_engine_specs.gsheets.g") + g.user.email = "admin@example.com" + + create_engine = mocker.patch("superset.db_engine_specs.gsheets.create_engine") + conn = create_engine.return_value.connect.return_value + results = conn.execute.return_value + results.fetchall.side_effect = [ + [(2,)], + [(1,)], + ProgrammingError("Unsupported table: https://www.google.com/"), + ] + + properties: GSheetsPropertiesType = { + "parameters": { + "service_account_info": "", + "catalog": None, + }, + "catalog": { + "private_sheet": "https://docs.google.com/spreadsheets/d/1/edit", + "public_sheet": "https://docs.google.com/spreadsheets/d/1/edit#gid=1", + "not_a_sheet": "https://www.google.com/", + }, + } + errors = GSheetsEngineSpec.validate_parameters(properties) # ignore: type + assert errors == [ + SupersetError( + message=( + "The URL could not be identified. Please check for typos " + "and make sure that ‘Type of Google Sheets allowed’ " + "selection matches the input." + ), + error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, + level=ErrorLevel.WARNING, + extra={ + "catalog": { + "idx": 2, + "url": True, + }, + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", + }, + { + "code": 1005, + "message": "Issue 1005 - The table was deleted or renamed in the database.", + }, + ], + }, + ) + ] + + create_engine.assert_called_with( + "gsheets://", + service_account_info={}, + subject="admin@example.com", + ) + + +def test_unmask_encrypted_extra() -> None: + """ + Test that the private key can be reused from the previous ``encrypted_extra``. + """ + from superset.db_engine_specs.gsheets import GSheetsEngineSpec + + old = json.dumps( + { + "service_account_info": { + "project_id": "black-sanctum-314419", + "private_key": "SECRET", + }, + } + ) + new = json.dumps( + { + "service_account_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "XXXXXXXXXX", + }, + } + ) + + assert json.loads(str(GSheetsEngineSpec.unmask_encrypted_extra(old, new))) == { + "service_account_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "SECRET", + }, + } + + +def test_unmask_encrypted_extra_when_old_is_none() -> None: + """ + Test that a None value works for ``encrypted_extra``. + """ + from superset.db_engine_specs.gsheets import GSheetsEngineSpec + + old = None + new = json.dumps( + { + "service_account_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "XXXXXXXXXX", + }, + } + ) + + assert json.loads(str(GSheetsEngineSpec.unmask_encrypted_extra(old, new))) == { + "service_account_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "XXXXXXXXXX", + }, + } + + +def test_unmask_encrypted_extra_when_new_is_none() -> None: + """ + Test that a None value works for ``encrypted_extra``. + """ + from superset.db_engine_specs.gsheets import GSheetsEngineSpec + + old = json.dumps( + { + "service_account_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "XXXXXXXXXX", + }, + } + ) + new = None + + assert GSheetsEngineSpec.unmask_encrypted_extra(old, new) is None diff --git a/tests/unit_tests/db_engine_specs/test_hana.py b/tests/unit_tests/db_engine_specs/test_hana.py new file mode 100644 index 0000000000000..1d1ac6390839c --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_hana.py @@ -0,0 +1,43 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "TO_DATE('2019-01-02', 'YYYY-MM-DD')"), + ( + "TimeStamp", + "TO_TIMESTAMP('2019-01-02T03:04:05.678900', 'YYYY-MM-DD\"T\"HH24:MI:SS.ff6')", + ), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.hana import HanaEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_hive.py b/tests/unit_tests/db_engine_specs/test_hive.py new file mode 100644 index 0000000000000..3a5cb91405bd4 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_hive.py @@ -0,0 +1,44 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "CAST('2019-01-02' AS DATE)"), + ( + "TimeStamp", + "CAST('2019-01-02 03:04:05.678900' AS TIMESTAMP)", + ), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.hive import HiveEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_impala.py b/tests/unit_tests/db_engine_specs/test_impala.py new file mode 100644 index 0000000000000..8a42440529ae0 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_impala.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "CAST('2019-01-02' AS DATE)"), + ("TimeStamp", "CAST('2019-01-02T03:04:05.678900' AS TIMESTAMP)"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.impala import ImpalaEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_init.py b/tests/unit_tests/db_engine_specs/test_init.py new file mode 100644 index 0000000000000..3189256c70f12 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_init.py @@ -0,0 +1,80 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +import pytest +from pkg_resources import EntryPoint +from pytest_mock import MockFixture + +from superset.db_engine_specs import get_available_engine_specs + + +def test_get_available_engine_specs(mocker: MockFixture) -> None: + """ + get_available_engine_specs should return all engine specs + """ + from superset.db_engine_specs.databricks import ( + DatabricksHiveEngineSpec, + DatabricksNativeEngineSpec, + DatabricksODBCEngineSpec, + ) + + mocker.patch( + "superset.db_engine_specs.load_engine_specs", + return_value=iter( + [ + DatabricksHiveEngineSpec, + DatabricksNativeEngineSpec, + DatabricksODBCEngineSpec, + ] + ), + ) + + assert list(get_available_engine_specs().keys()) == [ + DatabricksHiveEngineSpec, + DatabricksNativeEngineSpec, + DatabricksODBCEngineSpec, + ] + + +@pytest.mark.parametrize( + "app", + [{"DBS_AVAILABLE_DENYLIST": {"databricks": {"pyhive", "pyodbc"}}}], + indirect=True, +) +def test_get_available_engine_specs_with_denylist(mocker: MockFixture) -> None: + """ + The denylist removes items from the db engine spec list + """ + from superset.db_engine_specs.databricks import ( + DatabricksHiveEngineSpec, + DatabricksNativeEngineSpec, + DatabricksODBCEngineSpec, + ) + + mocker.patch( + "superset.db_engine_specs.load_engine_specs", + return_value=iter( + [ + DatabricksHiveEngineSpec, + DatabricksNativeEngineSpec, + DatabricksODBCEngineSpec, + ] + ), + ) + available = get_available_engine_specs() + assert list(available.keys()) == [DatabricksNativeEngineSpec] diff --git a/tests/unit_tests/db_engine_specs/test_kusto.py b/tests/unit_tests/db_engine_specs/test_kusto.py new file mode 100644 index 0000000000000..538eafc6b176b --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_kusto.py @@ -0,0 +1,144 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel, protected-access +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "sql,expected", + [ + ("SELECT foo FROM tbl", True), + ("SHOW TABLES", False), + ("EXPLAIN SELECT foo FROM tbl", False), + ("INSERT INTO tbl (foo) VALUES (1)", False), + ], +) +def test_sql_is_readonly_query(sql: str, expected: bool) -> None: + """ + Make sure that SQL dialect consider only SELECT statements as read-only + """ + + from superset.db_engine_specs.kusto import KustoSqlEngineSpec + from superset.sql_parse import ParsedQuery + + parsed_query = ParsedQuery(sql) + is_readonly = KustoSqlEngineSpec.is_readonly_query(parsed_query) + + assert expected == is_readonly + + +@pytest.mark.parametrize( + "kql,expected", + [ + ("tbl | limit 100", True), + ("let foo = 1; tbl | where bar == foo", True), + (".show tables", False), + ], +) +def test_kql_is_select_query(kql: str, expected: bool) -> None: + """ + Make sure that KQL dialect consider only statements that do not start with "." (dot) + as a SELECT statements + """ + + from superset.db_engine_specs.kusto import KustoKqlEngineSpec + from superset.sql_parse import ParsedQuery + + parsed_query = ParsedQuery(kql) + is_select = KustoKqlEngineSpec.is_select_query(parsed_query) + + assert expected == is_select + + +@pytest.mark.parametrize( + "kql,expected", + [ + ("tbl | limit 100", True), + ("let foo = 1; tbl | where bar == foo", True), + (".show tables", True), + ("print 1", True), + ("set querytrace; Events | take 100", True), + (".drop table foo", False), + (".set-or-append table foo <| bar", False), + ], +) +def test_kql_is_readonly_query(kql: str, expected: bool) -> None: + """ + Make sure that KQL dialect consider only SELECT statements as read-only + """ + + from superset.db_engine_specs.kusto import KustoKqlEngineSpec + from superset.sql_parse import ParsedQuery + + parsed_query = ParsedQuery(kql) + is_readonly = KustoKqlEngineSpec.is_readonly_query(parsed_query) + + assert expected == is_readonly + + +def test_kql_parse_sql() -> None: + """ + parse_sql method should always return a list with a single element + which is an original query + """ + + from superset.db_engine_specs.kusto import KustoKqlEngineSpec + + queries = KustoKqlEngineSpec.parse_sql("let foo = 1; tbl | where bar == foo") + + assert queries == ["let foo = 1; tbl | where bar == foo"] + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("DateTime", "datetime(2019-01-02T03:04:05.678900)"), + ("TimeStamp", "datetime(2019-01-02T03:04:05.678900)"), + ("Date", "datetime(2019-01-02)"), + ("UnknownType", None), + ], +) +def test_kql_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.kusto import KustoKqlEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "CONVERT(DATE, '2019-01-02', 23)"), + ("DateTime", "CONVERT(DATETIME, '2019-01-02T03:04:05.678', 126)"), + ("SmallDateTime", "CONVERT(SMALLDATETIME, '2019-01-02 03:04:05', 20)"), + ("TimeStamp", "CONVERT(TIMESTAMP, '2019-01-02 03:04:05', 20)"), + ("UnknownType", None), + ], +) +def test_sql_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.kusto import KustoSqlEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_kylin.py b/tests/unit_tests/db_engine_specs/test_kylin.py new file mode 100644 index 0000000000000..cbc8c9133de7f --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_kylin.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "CAST('2019-01-02' AS DATE)"), + ("TimeStamp", "CAST('2019-01-02 03:04:05' AS TIMESTAMP)"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.kylin import KylinEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_mssql.py b/tests/unit_tests/db_engine_specs/test_mssql.py new file mode 100644 index 0000000000000..554ad97055f6b --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_mssql.py @@ -0,0 +1,432 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import unittest.mock as mock +from datetime import datetime +from textwrap import dedent +from typing import Any, Dict, Optional, Type + +import pytest +from sqlalchemy import column, table +from sqlalchemy.dialects import mssql +from sqlalchemy.dialects.mssql import DATE, NTEXT, NVARCHAR, TEXT, VARCHAR +from sqlalchemy.sql import select +from sqlalchemy.types import String, TypeEngine, UnicodeText + +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.utils.core import GenericDataType +from tests.unit_tests.db_engine_specs.utils import ( + assert_column_spec, + assert_convert_dttm, +) +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "native_type,sqla_type,attrs,generic_type,is_dttm", + [ + ("CHAR", String, None, GenericDataType.STRING, False), + ("CHAR(10)", String, None, GenericDataType.STRING, False), + ("VARCHAR", String, None, GenericDataType.STRING, False), + ("VARCHAR(10)", String, None, GenericDataType.STRING, False), + ("TEXT", String, None, GenericDataType.STRING, False), + ("NCHAR(10)", UnicodeText, None, GenericDataType.STRING, False), + ("NVARCHAR(10)", UnicodeText, None, GenericDataType.STRING, False), + ("NTEXT", UnicodeText, None, GenericDataType.STRING, False), + ], +) +def test_get_column_spec( + native_type: str, + sqla_type: Type[TypeEngine], + attrs: Optional[Dict[str, Any]], + generic_type: GenericDataType, + is_dttm: bool, +) -> None: + from superset.db_engine_specs.mssql import MssqlEngineSpec as spec + + assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm) + + +def test_where_clause_n_prefix() -> None: + from superset.db_engine_specs.mssql import MssqlEngineSpec + + dialect = mssql.dialect() + + # non-unicode col + sqla_column_type = MssqlEngineSpec.get_column_types("VARCHAR(10)") + assert sqla_column_type is not None + type_, _ = sqla_column_type + str_col = column("col", type_=type_) + + # unicode col + sqla_column_type = MssqlEngineSpec.get_column_types("NTEXT") + assert sqla_column_type is not None + type_, _ = sqla_column_type + unicode_col = column("unicode_col", type_=type_) + + tbl = table("tbl") + sel = ( + select([str_col, unicode_col]) + .select_from(tbl) + .where(str_col == "abc") + .where(unicode_col == "abc") + ) + + query = str(sel.compile(dialect=dialect, compile_kwargs={"literal_binds": True})) + query_expected = ( + "SELECT col, unicode_col \n" + "FROM tbl \n" + "WHERE col = 'abc' AND unicode_col = N'abc'" + ) + assert query == query_expected + + +def test_time_exp_mixd_case_col_1y() -> None: + from superset.db_engine_specs.mssql import MssqlEngineSpec + + col = column("MixedCase") + expr = MssqlEngineSpec.get_timestamp_expr(col, None, "P1Y") + result = str(expr.compile(None, dialect=mssql.dialect())) + assert result == "DATEADD(YEAR, DATEDIFF(YEAR, 0, [MixedCase]), 0)" + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ( + "date", + "CONVERT(DATE, '2019-01-02', 23)", + ), + ( + "datetime", + "CONVERT(DATETIME, '2019-01-02T03:04:05.678', 126)", + ), + ( + "smalldatetime", + "CONVERT(SMALLDATETIME, '2019-01-02 03:04:05', 20)", + ), + ("Other", None), + ], +) +def test_convert_dttm( + target_type: str, + expected_result: Optional[str], + dttm: datetime, +) -> None: + from superset.db_engine_specs.mssql import MssqlEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +def test_extract_error_message() -> None: + from superset.db_engine_specs.mssql import MssqlEngineSpec + + test_mssql_exception = Exception( + "(8155, b\"No column name was specified for column 1 of 'inner_qry'." + "DB-Lib error message 20018, severity 16:\\nGeneral SQL Server error: " + 'Check messages from the SQL Server\\n")' + ) + error_message = MssqlEngineSpec.extract_error_message(test_mssql_exception) + expected_message = ( + "mssql error: All your SQL functions need to " + "have an alias on MSSQL. For example: SELECT COUNT(*) AS C1 FROM TABLE1" + ) + assert expected_message == error_message + + test_mssql_exception = Exception( + '(8200, b"A correlated expression is invalid because it is not in a ' + "GROUP BY clause.\\n\")'" + ) + error_message = MssqlEngineSpec.extract_error_message(test_mssql_exception) + expected_message = "mssql error: " + MssqlEngineSpec._extract_error_message( + test_mssql_exception + ) + assert expected_message == error_message + + +def test_fetch_data() -> None: + from superset.db_engine_specs.base import BaseEngineSpec + from superset.db_engine_specs.mssql import MssqlEngineSpec + + with mock.patch.object( + MssqlEngineSpec, + "pyodbc_rows_to_tuples", + return_value="converted", + ) as mock_pyodbc_rows_to_tuples: + data = [(1, "foo")] + with mock.patch.object(BaseEngineSpec, "fetch_data", return_value=data): + result = MssqlEngineSpec.fetch_data(None, 0) + mock_pyodbc_rows_to_tuples.assert_called_once_with(data) + assert result == "converted" + + +@pytest.mark.parametrize( + "original,expected", + [ + (DATE(), "DATE"), + (VARCHAR(length=255), "VARCHAR(255)"), + (VARCHAR(length=255, collation="utf8_general_ci"), "VARCHAR(255)"), + (NVARCHAR(length=128), "NVARCHAR(128)"), + (TEXT(), "TEXT"), + (NTEXT(collation="utf8_general_ci"), "NTEXT"), + ], +) +def test_column_datatype_to_string(original: TypeEngine, expected: str) -> None: + from superset.db_engine_specs.mssql import MssqlEngineSpec + + actual = MssqlEngineSpec.column_datatype_to_string(original, mssql.dialect()) + assert actual == expected + + +@pytest.mark.parametrize( + "original,expected", + [ + ( + dedent( + """ +with currency as ( +select 'INR' as cur +), +currency_2 as ( +select 'EUR' as cur +) +select * from currency union all select * from currency_2 +""" + ), + dedent( + """WITH currency as ( +select 'INR' as cur +), +currency_2 as ( +select 'EUR' as cur +), +__cte AS ( +select * from currency union all select * from currency_2 +)""" + ), + ), + ( + "SELECT 1 as cnt", + None, + ), + ( + dedent( + """ +select 'INR' as cur +union +select 'AUD' as cur +union +select 'USD' as cur +""" + ), + None, + ), + ], +) +def test_cte_query_parsing(original: TypeEngine, expected: str) -> None: + from superset.db_engine_specs.mssql import MssqlEngineSpec + + actual = MssqlEngineSpec.get_cte_query(original) + assert actual == expected + + +@pytest.mark.parametrize( + "original,expected,top", + [ + ("SEL TOP 1000 * FROM My_table", "SEL TOP 100 * FROM My_table", 100), + ("SEL TOP 1000 * FROM My_table;", "SEL TOP 100 * FROM My_table", 100), + ("SEL TOP 1000 * FROM My_table;", "SEL TOP 1000 * FROM My_table", 10000), + ("SEL TOP 1000 * FROM My_table;", "SEL TOP 1000 * FROM My_table", 1000), + ( + """with abc as (select * from test union select * from test1) +select TOP 100 * from currency""", + """WITH abc as (select * from test union select * from test1) +select TOP 100 * from currency""", + 1000, + ), + ("SELECT 1 as cnt", "SELECT TOP 10 1 as cnt", 10), + ( + "select TOP 1000 * from abc where id=1", + "select TOP 10 * from abc where id=1", + 10, + ), + ], +) +def test_top_query_parsing(original: TypeEngine, expected: str, top: int) -> None: + from superset.db_engine_specs.mssql import MssqlEngineSpec + + actual = MssqlEngineSpec.apply_top_to_sql(original, top) + assert actual == expected + + +def test_extract_errors() -> None: + """ + Test that custom error messages are extracted correctly. + """ + from superset.db_engine_specs.mssql import MssqlEngineSpec + + msg = dedent( + """ +DB-Lib error message 20009, severity 9: +Unable to connect: Adaptive Server is unavailable or does not exist (locahost) + """ + ) + result = MssqlEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR, + message='The hostname "locahost" cannot be resolved.', + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Microsoft SQL Server", + "issue_codes": [ + { + "code": 1007, + "message": "Issue 1007 - The hostname provided can't be resolved.", + } + ], + }, + ) + ] + + msg = dedent( + """ +DB-Lib error message 20009, severity 9: +Unable to connect: Adaptive Server is unavailable or does not exist (localhost) +Net-Lib error during Connection refused (61) +DB-Lib error message 20009, severity 9: +Unable to connect: Adaptive Server is unavailable or does not exist (localhost) +Net-Lib error during Connection refused (61) + """ + ) + result = MssqlEngineSpec.extract_errors( + Exception(msg), context={"port": 12345, "hostname": "localhost"} + ) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_PORT_CLOSED_ERROR, + message='Port 12345 on hostname "localhost" refused the connection.', + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Microsoft SQL Server", + "issue_codes": [ + {"code": 1008, "message": "Issue 1008 - The port is closed."} + ], + }, + ) + ] + + msg = dedent( + """ +DB-Lib error message 20009, severity 9: +Unable to connect: Adaptive Server is unavailable or does not exist (example.com) +Net-Lib error during Operation timed out (60) +DB-Lib error message 20009, severity 9: +Unable to connect: Adaptive Server is unavailable or does not exist (example.com) +Net-Lib error during Operation timed out (60) + """ + ) + result = MssqlEngineSpec.extract_errors( + Exception(msg), context={"port": 12345, "hostname": "example.com"} + ) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_HOST_DOWN_ERROR, + message=( + 'The host "example.com" might be down, ' + "and can't be reached on port 12345." + ), + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Microsoft SQL Server", + "issue_codes": [ + { + "code": 1009, + "message": "Issue 1009 - The host might be down, and can't be reached on the provided port.", + } + ], + }, + ) + ] + + msg = dedent( + """ +DB-Lib error message 20009, severity 9: +Unable to connect: Adaptive Server is unavailable or does not exist (93.184.216.34) +Net-Lib error during Operation timed out (60) +DB-Lib error message 20009, severity 9: +Unable to connect: Adaptive Server is unavailable or does not exist (93.184.216.34) +Net-Lib error during Operation timed out (60) + """ + ) + result = MssqlEngineSpec.extract_errors( + Exception(msg), context={"port": 12345, "hostname": "93.184.216.34"} + ) + assert result == [ + SupersetError( + error_type=SupersetErrorType.CONNECTION_HOST_DOWN_ERROR, + message=( + 'The host "93.184.216.34" might be down, ' + "and can't be reached on port 12345." + ), + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Microsoft SQL Server", + "issue_codes": [ + { + "code": 1009, + "message": "Issue 1009 - The host might be down, and can't be reached on the provided port.", + } + ], + }, + ) + ] + + msg = dedent( + """ +DB-Lib error message 20018, severity 14: +General SQL Server error: Check messages from the SQL Server +DB-Lib error message 20002, severity 9: +Adaptive Server connection failed (mssqldb.cxiotftzsypc.us-west-2.rds.amazonaws.com) +DB-Lib error message 20002, severity 9: +Adaptive Server connection failed (mssqldb.cxiotftzsypc.us-west-2.rds.amazonaws.com) + """ + ) + result = MssqlEngineSpec.extract_errors( + Exception(msg), context={"username": "testuser", "database": "testdb"} + ) + assert result == [ + SupersetError( + message='Either the username "testuser", password, or database name "testdb" is incorrect.', + error_type=SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Microsoft SQL Server", + "issue_codes": [ + { + "code": 1014, + "message": "Issue 1014 - Either the username or " + "the password is wrong.", + }, + { + "code": 1015, + "message": "Issue 1015 - Either the database is " + "spelled incorrectly or does not exist.", + }, + ], + }, + ) + ] diff --git a/tests/unit_tests/db_engine_specs/test_mysql.py b/tests/unit_tests/db_engine_specs/test_mysql.py new file mode 100644 index 0000000000000..a512e71a97f67 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_mysql.py @@ -0,0 +1,150 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from typing import Any, Dict, Optional, Type +from unittest.mock import Mock, patch + +import pytest +from sqlalchemy import types +from sqlalchemy.dialects.mysql import ( + BIT, + DECIMAL, + DOUBLE, + FLOAT, + INTEGER, + LONGTEXT, + MEDIUMINT, + MEDIUMTEXT, + TINYINT, + TINYTEXT, +) +from sqlalchemy.engine.url import make_url + +from superset.utils.core import GenericDataType +from tests.unit_tests.db_engine_specs.utils import ( + assert_column_spec, + assert_convert_dttm, +) +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "native_type,sqla_type,attrs,generic_type,is_dttm", + [ + # Numeric + ("TINYINT", TINYINT, None, GenericDataType.NUMERIC, False), + ("SMALLINT", types.SmallInteger, None, GenericDataType.NUMERIC, False), + ("MEDIUMINT", MEDIUMINT, None, GenericDataType.NUMERIC, False), + ("INT", INTEGER, None, GenericDataType.NUMERIC, False), + ("BIGINT", types.BigInteger, None, GenericDataType.NUMERIC, False), + ("DECIMAL", DECIMAL, None, GenericDataType.NUMERIC, False), + ("FLOAT", FLOAT, None, GenericDataType.NUMERIC, False), + ("DOUBLE", DOUBLE, None, GenericDataType.NUMERIC, False), + ("BIT", BIT, None, GenericDataType.NUMERIC, False), + # String + ("CHAR", types.String, None, GenericDataType.STRING, False), + ("VARCHAR", types.String, None, GenericDataType.STRING, False), + ("TINYTEXT", TINYTEXT, None, GenericDataType.STRING, False), + ("MEDIUMTEXT", MEDIUMTEXT, None, GenericDataType.STRING, False), + ("LONGTEXT", LONGTEXT, None, GenericDataType.STRING, False), + # Temporal + ("DATE", types.Date, None, GenericDataType.TEMPORAL, True), + ("DATETIME", types.DateTime, None, GenericDataType.TEMPORAL, True), + ("TIMESTAMP", types.TIMESTAMP, None, GenericDataType.TEMPORAL, True), + ("TIME", types.Time, None, GenericDataType.TEMPORAL, True), + ], +) +def test_get_column_spec( + native_type: str, + sqla_type: Type[types.TypeEngine], + attrs: Optional[Dict[str, Any]], + generic_type: GenericDataType, + is_dttm: bool, +) -> None: + from superset.db_engine_specs.mysql import MySQLEngineSpec as spec + + assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm) + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "STR_TO_DATE('2019-01-02', '%Y-%m-%d')"), + ( + "DateTime", + "STR_TO_DATE('2019-01-02 03:04:05.678900', '%Y-%m-%d %H:%i:%s.%f')", + ), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.mysql import MySQLEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +@pytest.mark.parametrize( + "sqlalchemy_uri,error", + [ + ("mysql://user:password@host/db1?local_infile=1", True), + ("mysql://user:password@host/db1?local_infile=0", True), + ("mysql://user:password@host/db1", False), + ], +) +def test_validate_database_uri(sqlalchemy_uri: str, error: bool) -> None: + from superset.db_engine_specs.mysql import MySQLEngineSpec + + url = make_url(sqlalchemy_uri) + if error: + with pytest.raises(ValueError): + MySQLEngineSpec.validate_database_uri(url) + return + MySQLEngineSpec.validate_database_uri(url) + + +@patch("sqlalchemy.engine.Engine.connect") +def test_get_cancel_query_id(engine_mock: Mock) -> None: + from superset.db_engine_specs.mysql import MySQLEngineSpec + from superset.models.sql_lab import Query + + query = Query() + cursor_mock = engine_mock.return_value.__enter__.return_value + cursor_mock.fetchone.return_value = ["123"] + assert MySQLEngineSpec.get_cancel_query_id(cursor_mock, query) == "123" + + +@patch("sqlalchemy.engine.Engine.connect") +def test_cancel_query(engine_mock: Mock) -> None: + from superset.db_engine_specs.mysql import MySQLEngineSpec + from superset.models.sql_lab import Query + + query = Query() + cursor_mock = engine_mock.return_value.__enter__.return_value + assert MySQLEngineSpec.cancel_query(cursor_mock, query, "123") is True + + +@patch("sqlalchemy.engine.Engine.connect") +def test_cancel_query_failed(engine_mock: Mock) -> None: + from superset.db_engine_specs.mysql import MySQLEngineSpec + from superset.models.sql_lab import Query + + query = Query() + cursor_mock = engine_mock.raiseError.side_effect = Exception() + assert MySQLEngineSpec.cancel_query(cursor_mock, query, "123") is False diff --git a/tests/unit_tests/db_engine_specs/test_oracle.py b/tests/unit_tests/db_engine_specs/test_oracle.py new file mode 100644 index 0000000000000..0dce956970611 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_oracle.py @@ -0,0 +1,113 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from typing import Optional, Union +from unittest import mock + +import pytest +from sqlalchemy import column, types +from sqlalchemy.dialects import oracle +from sqlalchemy.dialects.oracle import DATE, NVARCHAR, VARCHAR +from sqlalchemy.sql import quoted_name + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "column_name,expected_result", + [ + ("This_Is_32_Character_Column_Name", "3b26974078683be078219674eeb8f5"), + ("snake_label", "snake_label"), + ("camelLabel", "camelLabel"), + ], +) +def test_oracle_sqla_column_name_length_exceeded( + column_name: str, expected_result: Union[str, quoted_name] +) -> None: + from superset.db_engine_specs.oracle import OracleEngineSpec + + label = OracleEngineSpec.make_label_compatible(column_name) + assert isinstance(label, quoted_name) + assert label.quote is True + assert label == expected_result + + +def test_oracle_time_expression_reserved_keyword_1m_grain() -> None: + from superset.db_engine_specs.oracle import OracleEngineSpec + + col = column("decimal") + expr = OracleEngineSpec.get_timestamp_expr(col, None, "P1M") + result = str(expr.compile(dialect=oracle.dialect())) + assert result == "TRUNC(CAST(\"decimal\" as DATE), 'MONTH')" + + +@pytest.mark.parametrize( + "sqla_type,expected_result", + [ + (DATE(), "DATE"), + (VARCHAR(length=255), "VARCHAR(255 CHAR)"), + (VARCHAR(length=255, collation="utf8"), "VARCHAR(255 CHAR)"), + (NVARCHAR(length=128), "NVARCHAR2(128)"), + ], +) +def test_column_datatype_to_string( + sqla_type: types.TypeEngine, expected_result: str +) -> None: + from superset.db_engine_specs.oracle import OracleEngineSpec + + assert ( + OracleEngineSpec.column_datatype_to_string(sqla_type, oracle.dialect()) + == expected_result + ) + + +def test_fetch_data_no_description() -> None: + from superset.db_engine_specs.oracle import OracleEngineSpec + + cursor = mock.MagicMock() + cursor.description = [] + assert OracleEngineSpec.fetch_data(cursor) == [] + + +def test_fetch_data() -> None: + from superset.db_engine_specs.oracle import OracleEngineSpec + + cursor = mock.MagicMock() + result = ["a", "b"] + cursor.fetchall.return_value = result + assert OracleEngineSpec.fetch_data(cursor) == result + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "TO_DATE('2019-01-02', 'YYYY-MM-DD')"), + ("DateTime", """TO_DATE('2019-01-02T03:04:05', 'YYYY-MM-DD"T"HH24:MI:SS')"""), + ( + "TimeStamp", + """TO_TIMESTAMP('2019-01-02T03:04:05.678900', 'YYYY-MM-DD"T"HH24:MI:SS.ff6')""", + ), + ("Other", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.oracle import OracleEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_postgres.py b/tests/unit_tests/db_engine_specs/test_postgres.py new file mode 100644 index 0000000000000..088ce2747834d --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_postgres.py @@ -0,0 +1,91 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from typing import Any, Dict, Optional, Type + +import pytest +from sqlalchemy import types +from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION, ENUM, JSON + +from superset.utils.core import GenericDataType +from tests.unit_tests.db_engine_specs.utils import ( + assert_column_spec, + assert_convert_dttm, +) +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "TO_DATE('2019-01-02', 'YYYY-MM-DD')"), + ( + "DateTime", + "TO_TIMESTAMP('2019-01-02 03:04:05.678900', 'YYYY-MM-DD HH24:MI:SS.US')", + ), + ( + "TimeStamp", + "TO_TIMESTAMP('2019-01-02 03:04:05.678900', 'YYYY-MM-DD HH24:MI:SS.US')", + ), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.postgres import PostgresEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +@pytest.mark.parametrize( + "native_type,sqla_type,attrs,generic_type,is_dttm", + [ + ("SMALLINT", types.SmallInteger, None, GenericDataType.NUMERIC, False), + ("INTEGER", types.Integer, None, GenericDataType.NUMERIC, False), + ("BIGINT", types.BigInteger, None, GenericDataType.NUMERIC, False), + ("DECIMAL", types.Numeric, None, GenericDataType.NUMERIC, False), + ("NUMERIC", types.Numeric, None, GenericDataType.NUMERIC, False), + ("REAL", types.REAL, None, GenericDataType.NUMERIC, False), + ("DOUBLE PRECISION", DOUBLE_PRECISION, None, GenericDataType.NUMERIC, False), + ("MONEY", types.Numeric, None, GenericDataType.NUMERIC, False), + # String + ("CHAR", types.String, None, GenericDataType.STRING, False), + ("VARCHAR", types.String, None, GenericDataType.STRING, False), + ("TEXT", types.String, None, GenericDataType.STRING, False), + ("ARRAY", types.String, None, GenericDataType.STRING, False), + ("ENUM", ENUM, None, GenericDataType.STRING, False), + ("JSON", JSON, None, GenericDataType.STRING, False), + # Temporal + ("DATE", types.Date, None, GenericDataType.TEMPORAL, True), + ("TIMESTAMP", types.TIMESTAMP, None, GenericDataType.TEMPORAL, True), + ("TIME", types.Time, None, GenericDataType.TEMPORAL, True), + # Boolean + ("BOOLEAN", types.Boolean, None, GenericDataType.BOOLEAN, False), + ], +) +def test_get_column_spec( + native_type: str, + sqla_type: Type[types.TypeEngine], + attrs: Optional[Dict[str, Any]], + generic_type: GenericDataType, + is_dttm: bool, +) -> None: + from superset.db_engine_specs.postgres import PostgresEngineSpec as spec + + assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm) diff --git a/tests/unit_tests/db_engine_specs/test_presto.py b/tests/unit_tests/db_engine_specs/test_presto.py new file mode 100644 index 0000000000000..a30fab94c9157 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_presto.py @@ -0,0 +1,84 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from typing import Any, Dict, Optional, Type + +import pytest +import pytz +from sqlalchemy import types + +from superset.utils.core import GenericDataType +from tests.unit_tests.db_engine_specs.utils import ( + assert_column_spec, + assert_convert_dttm, +) + + +@pytest.mark.parametrize( + "target_type,dttm,expected_result", + [ + ("VARCHAR", datetime(2022, 1, 1), None), + ("DATE", datetime(2022, 1, 1), "DATE '2022-01-01'"), + ( + "TIMESTAMP", + datetime(2022, 1, 1, 1, 23, 45, 600000), + "TIMESTAMP '2022-01-01 01:23:45.600000'", + ), + ( + "TIMESTAMP WITH TIME ZONE", + datetime(2022, 1, 1, 1, 23, 45, 600000), + "TIMESTAMP '2022-01-01 01:23:45.600000'", + ), + ( + "TIMESTAMP WITH TIME ZONE", + datetime(2022, 1, 1, 1, 23, 45, 600000, tzinfo=pytz.UTC), + "TIMESTAMP '2022-01-01 01:23:45.600000+00:00'", + ), + ], +) +def test_convert_dttm( + target_type: str, + dttm: datetime, + expected_result: Optional[str], +) -> None: + from superset.db_engine_specs.presto import PrestoEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +@pytest.mark.parametrize( + "native_type,sqla_type,attrs,generic_type,is_dttm", + [ + ("varchar(255)", types.VARCHAR, {"length": 255}, GenericDataType.STRING, False), + ("varchar", types.String, None, GenericDataType.STRING, False), + ("char(255)", types.CHAR, {"length": 255}, GenericDataType.STRING, False), + ("char", types.String, None, GenericDataType.STRING, False), + ("integer", types.Integer, None, GenericDataType.NUMERIC, False), + ("time", types.Time, None, GenericDataType.TEMPORAL, True), + ("timestamp", types.TIMESTAMP, None, GenericDataType.TEMPORAL, True), + ], +) +def test_get_column_spec( + native_type: str, + sqla_type: Type[types.TypeEngine], + attrs: Optional[Dict[str, Any]], + generic_type: GenericDataType, + is_dttm: bool, +) -> None: + from superset.db_engine_specs.presto import PrestoEngineSpec as spec + + assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm) diff --git a/tests/unit_tests/db_engine_specs/test_rockset.py b/tests/unit_tests/db_engine_specs/test_rockset.py new file mode 100644 index 0000000000000..c501dccf2e398 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_rockset.py @@ -0,0 +1,41 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from typing import Optional + +import pytest + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "DATE '2019-01-02'"), + ("DateTime", "DATETIME '2019-01-02 03:04:05.678900'"), + ("Timestamp", "TIMESTAMP '2019-01-02T03:04:05.678900'"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.rockset import RocksetEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) diff --git a/tests/unit_tests/db_engine_specs/test_snowflake.py b/tests/unit_tests/db_engine_specs/test_snowflake.py new file mode 100644 index 0000000000000..9689428d25653 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_snowflake.py @@ -0,0 +1,171 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=import-outside-toplevel + +import json +from datetime import datetime +from typing import Optional +from unittest import mock + +import pytest +from pytest_mock import MockerFixture + +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Date", "TO_DATE('2019-01-02')"), + ("DateTime", "CAST('2019-01-02T03:04:05.678900' AS DATETIME)"), + ("TimeStamp", "TO_TIMESTAMP('2019-01-02T03:04:05.678900')"), + ("TIMESTAMP_NTZ", "TO_TIMESTAMP('2019-01-02T03:04:05.678900')"), + ("TIMESTAMP_LTZ", "TO_TIMESTAMP('2019-01-02T03:04:05.678900')"), + ("TIMESTAMP_TZ", "TO_TIMESTAMP('2019-01-02T03:04:05.678900')"), + ("TIMESTAMPLTZ", "TO_TIMESTAMP('2019-01-02T03:04:05.678900')"), + ("TIMESTAMPNTZ", "TO_TIMESTAMP('2019-01-02T03:04:05.678900')"), + ("TIMESTAMPTZ", "TO_TIMESTAMP('2019-01-02T03:04:05.678900')"), + ( + "TIMESTAMP WITH LOCAL TIME ZONE", + "TO_TIMESTAMP('2019-01-02T03:04:05.678900')", + ), + ("TIMESTAMP WITHOUT TIME ZONE", "TO_TIMESTAMP('2019-01-02T03:04:05.678900')"), + ("UnknownType", None), + ], +) +def test_convert_dttm( + target_type: str, expected_result: Optional[str], dttm: datetime +) -> None: + from superset.db_engine_specs.snowflake import SnowflakeEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +def test_database_connection_test_mutator() -> None: + from superset.db_engine_specs.snowflake import SnowflakeEngineSpec + from superset.models.core import Database + + database = Database(sqlalchemy_uri="snowflake://abc") + SnowflakeEngineSpec.mutate_db_for_connection_test(database) + engine_params = json.loads(database.extra or "{}") + + assert { + "engine_params": {"connect_args": {"validate_default_parameters": True}} + } == engine_params + + +def test_extract_errors() -> None: + from superset.db_engine_specs.snowflake import SnowflakeEngineSpec + + msg = "Object dumbBrick does not exist or not authorized." + result = SnowflakeEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message="dumbBrick does not exist in this database.", + error_type=SupersetErrorType.OBJECT_DOES_NOT_EXIST_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Snowflake", + "issue_codes": [ + { + "code": 1029, + "message": "Issue 1029 - The object does not exist in the given database.", + } + ], + }, + ) + ] + + msg = "syntax error line 1 at position 10 unexpected 'limited'." + result = SnowflakeEngineSpec.extract_errors(Exception(msg)) + assert result == [ + SupersetError( + message='Please check your query for syntax errors at or near "limited". Then, try running your query again.', + error_type=SupersetErrorType.SYNTAX_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Snowflake", + "issue_codes": [ + { + "code": 1030, + "message": "Issue 1030 - The query has a syntax error.", + } + ], + }, + ) + ] + + +@mock.patch("sqlalchemy.engine.Engine.connect") +def test_get_cancel_query_id(engine_mock: mock.Mock) -> None: + from superset.db_engine_specs.snowflake import SnowflakeEngineSpec + from superset.models.sql_lab import Query + + query = Query() + cursor_mock = engine_mock.return_value.__enter__.return_value + cursor_mock.fetchone.return_value = [123] + assert SnowflakeEngineSpec.get_cancel_query_id(cursor_mock, query) == 123 + + +@mock.patch("sqlalchemy.engine.Engine.connect") +def test_cancel_query(engine_mock: mock.Mock) -> None: + from superset.db_engine_specs.snowflake import SnowflakeEngineSpec + from superset.models.sql_lab import Query + + query = Query() + cursor_mock = engine_mock.return_value.__enter__.return_value + assert SnowflakeEngineSpec.cancel_query(cursor_mock, query, "123") is True + + +@mock.patch("sqlalchemy.engine.Engine.connect") +def test_cancel_query_failed(engine_mock: mock.Mock) -> None: + from superset.db_engine_specs.snowflake import SnowflakeEngineSpec + from superset.models.sql_lab import Query + + query = Query() + cursor_mock = engine_mock.raiseError.side_effect = Exception() + assert SnowflakeEngineSpec.cancel_query(cursor_mock, query, "123") is False + + +def test_get_extra_params(mocker: MockerFixture) -> None: + """ + Test the ``get_extra_params`` method. + """ + from superset.db_engine_specs.snowflake import SnowflakeEngineSpec + + database = mocker.MagicMock() + + database.extra = {} + assert SnowflakeEngineSpec.get_extra_params(database) == { + "engine_params": {"connect_args": {"application": "Apache Superset"}} + } + + database.extra = json.dumps( + { + "engine_params": { + "connect_args": {"application": "Custom user agent", "foo": "bar"} + } + } + ) + assert SnowflakeEngineSpec.get_extra_params(database) == { + "engine_params": { + "connect_args": {"application": "Custom user agent", "foo": "bar"} + } + } diff --git a/tests/unit_tests/db_engine_specs/test_sqlite.py b/tests/unit_tests/db_engine_specs/test_sqlite.py new file mode 100644 index 0000000000000..11ce174c0f4ed --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_sqlite.py @@ -0,0 +1,99 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=invalid-name, unused-argument, import-outside-toplevel, redefined-outer-name +from datetime import datetime +from typing import Optional + +import pytest +from sqlalchemy.engine import create_engine + +from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("Text", "'2019-01-02 03:04:05'"), + ("DateTime", "'2019-01-02 03:04:05'"), + ("TimeStamp", "'2019-01-02 03:04:05'"), + ("Other", None), + ], +) +def test_convert_dttm( + target_type: str, + expected_result: Optional[str], + dttm: datetime, +) -> None: + from superset.db_engine_specs.sqlite import SqliteEngineSpec as spec + + assert_convert_dttm(spec, target_type, expected_result, dttm) + + +@pytest.mark.parametrize( + "dttm,grain,expected", + [ + ("2022-05-04T05:06:07.89Z", "PT1S", "2022-05-04 05:06:07"), + ("2022-05-04T05:06:07.89Z", "PT1M", "2022-05-04 05:06:00"), + ("2022-05-04T05:06:07.89Z", "PT1H", "2022-05-04 05:00:00"), + ("2022-05-04T05:06:07.89Z", "P1D", "2022-05-04 00:00:00"), + ("2022-05-04T05:06:07.89Z", "P1W", "2022-05-01 00:00:00"), + ("2022-05-04T05:06:07.89Z", "P1M", "2022-05-01 00:00:00"), + ("2022-05-04T05:06:07.89Z", "P1Y", "2022-01-01 00:00:00"), + # ___________________________ + # | May 2022 | + # |---------------------------| + # | S | M | T | W | T | F | S | + # |---+---+---+---+---+---+---| + # | 1 | 2 | 3 | 4 | 5 | 6 | 7 | + # --------------------------- + # week ending Saturday + ("2022-05-04T05:06:07.89Z", "P1W/1970-01-03T00:00:00Z", "2022-05-07 00:00:00"), + # week ending Sunday + ("2022-05-04T05:06:07.89Z", "P1W/1970-01-04T00:00:00Z", "2022-05-08 00:00:00"), + # week starting Sunday + ("2022-05-04T05:06:07.89Z", "1969-12-28T00:00:00Z/P1W", "2022-05-01 00:00:00"), + # week starting Monday + ("2022-05-04T05:06:07.89Z", "1969-12-29T00:00:00Z/P1W", "2022-05-02 00:00:00"), + # tests for quarter + ("2022-01-04T05:06:07.89Z", "P3M", "2022-01-01 00:00:00"), + ("2022-02-04T05:06:07.89Z", "P3M", "2022-01-01 00:00:00"), + ("2022-03-04T05:06:07.89Z", "P3M", "2022-01-01 00:00:00"), + ("2022-04-04T05:06:07.89Z", "P3M", "2022-04-01 00:00:00"), + ("2022-05-04T05:06:07.89Z", "P3M", "2022-04-01 00:00:00"), + ("2022-06-04T05:06:07.89Z", "P3M", "2022-04-01 00:00:00"), + ("2022-07-04T05:06:07.89Z", "P3M", "2022-07-01 00:00:00"), + ("2022-08-04T05:06:07.89Z", "P3M", "2022-07-01 00:00:00"), + ("2022-09-04T05:06:07.89Z", "P3M", "2022-07-01 00:00:00"), + ("2022-10-04T05:06:07.89Z", "P3M", "2022-10-01 00:00:00"), + ("2022-11-04T05:06:07.89Z", "P3M", "2022-10-01 00:00:00"), + ("2022-12-04T05:06:07.89Z", "P3M", "2022-10-01 00:00:00"), + ], +) +def test_time_grain_expressions(dttm: str, grain: str, expected: str) -> None: + from superset.db_engine_specs.sqlite import SqliteEngineSpec + + engine = create_engine("sqlite://") + connection = engine.connect() + connection.execute("CREATE TABLE t (dttm DATETIME)") + connection.execute("INSERT INTO t VALUES (?)", dttm) + + # pylint: disable=protected-access + expression = SqliteEngineSpec._time_grain_expressions[grain].format(col="dttm") + sql = f"SELECT {expression} FROM t" + result = connection.execute(sql).scalar() + assert result == expected diff --git a/tests/unit_tests/db_engine_specs/test_teradata.py b/tests/unit_tests/db_engine_specs/test_teradata.py new file mode 100644 index 0000000000000..eab03e040d566 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_teradata.py @@ -0,0 +1,43 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel, protected-access +import pytest + + +@pytest.mark.parametrize( + "limit,original,expected", + [ + (100, "SEL TOP 1000 * FROM My_table", "SEL TOP 100 * FROM My_table"), + (100, "SEL TOP 1000 * FROM My_table;", "SEL TOP 100 * FROM My_table"), + (10000, "SEL TOP 1000 * FROM My_table;", "SEL TOP 1000 * FROM My_table"), + (1000, "SEL TOP 1000 * FROM My_table;", "SEL TOP 1000 * FROM My_table"), + (100, "SELECT TOP 1000 * FROM My_table", "SELECT TOP 100 * FROM My_table"), + (100, "SEL SAMPLE 1000 * FROM My_table", "SEL SAMPLE 100 * FROM My_table"), + (10000, "SEL SAMPLE 1000 * FROM My_table", "SEL SAMPLE 1000 * FROM My_table"), + ], +) +def test_apply_top_to_sql_limit( + limit: int, + original: str, + expected: str, +) -> None: + """ + Ensure limits are applied to the query correctly + """ + from superset.db_engine_specs.teradata import TeradataEngineSpec + + assert TeradataEngineSpec.apply_top_to_sql(original, limit) == expected diff --git a/tests/unit_tests/db_engine_specs/test_trino.py b/tests/unit_tests/db_engine_specs/test_trino.py new file mode 100644 index 0000000000000..0ea296a075e71 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/test_trino.py @@ -0,0 +1,368 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=unused-argument, import-outside-toplevel, protected-access +import json +from datetime import datetime +from typing import Any, Dict, Optional, Type +from unittest.mock import Mock, patch + +import pandas as pd +import pytest +from pytest_mock import MockerFixture +from sqlalchemy import types + +import superset.config +from superset.constants import QUERY_CANCEL_KEY, QUERY_EARLY_CANCEL_KEY, USER_AGENT +from superset.utils.core import GenericDataType +from tests.unit_tests.db_engine_specs.utils import ( + assert_column_spec, + assert_convert_dttm, +) +from tests.unit_tests.fixtures.common import dttm + + +@pytest.mark.parametrize( + "extra,expected", + [ + ({}, {"engine_params": {"connect_args": {"source": USER_AGENT}}}), + ( + { + "first": 1, + "engine_params": { + "second": "two", + "connect_args": {"source": "foobar", "third": "three"}, + }, + }, + { + "first": 1, + "engine_params": { + "second": "two", + "connect_args": {"source": "foobar", "third": "three"}, + }, + }, + ), + ], +) +def test_get_extra_params(extra: Dict[str, Any], expected: Dict[str, Any]) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + + database = Mock() + + database.extra = json.dumps(extra) + database.server_cert = None + assert TrinoEngineSpec.get_extra_params(database) == expected + + +@patch("superset.utils.core.create_ssl_cert_file") +def test_get_extra_params_with_server_cert(mock_create_ssl_cert_file: Mock) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + + database = Mock() + + database.extra = json.dumps({}) + database.server_cert = "TEST_CERT" + mock_create_ssl_cert_file.return_value = "/path/to/tls.crt" + extra = TrinoEngineSpec.get_extra_params(database) + + connect_args = extra.get("engine_params", {}).get("connect_args", {}) + assert connect_args.get("http_scheme") == "https" + assert connect_args.get("verify") == "/path/to/tls.crt" + mock_create_ssl_cert_file.assert_called_once_with(database.server_cert) + + +@patch("trino.auth.BasicAuthentication") +def test_auth_basic(mock_auth: Mock) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + + database = Mock() + + auth_params = {"username": "username", "password": "password"} + database.encrypted_extra = json.dumps( + {"auth_method": "basic", "auth_params": auth_params} + ) + + params: Dict[str, Any] = {} + TrinoEngineSpec.update_params_from_encrypted_extra(database, params) + connect_args = params.setdefault("connect_args", {}) + assert connect_args.get("http_scheme") == "https" + mock_auth.assert_called_once_with(**auth_params) + + +@patch("trino.auth.KerberosAuthentication") +def test_auth_kerberos(mock_auth: Mock) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + + database = Mock() + + auth_params = { + "service_name": "superset", + "mutual_authentication": False, + "delegate": True, + } + database.encrypted_extra = json.dumps( + {"auth_method": "kerberos", "auth_params": auth_params} + ) + + params: Dict[str, Any] = {} + TrinoEngineSpec.update_params_from_encrypted_extra(database, params) + connect_args = params.setdefault("connect_args", {}) + assert connect_args.get("http_scheme") == "https" + mock_auth.assert_called_once_with(**auth_params) + + +@patch("trino.auth.CertificateAuthentication") +def test_auth_certificate(mock_auth: Mock) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + + database = Mock() + auth_params = {"cert": "/path/to/cert.pem", "key": "/path/to/key.pem"} + database.encrypted_extra = json.dumps( + {"auth_method": "certificate", "auth_params": auth_params} + ) + + params: Dict[str, Any] = {} + TrinoEngineSpec.update_params_from_encrypted_extra(database, params) + connect_args = params.setdefault("connect_args", {}) + assert connect_args.get("http_scheme") == "https" + mock_auth.assert_called_once_with(**auth_params) + + +@patch("trino.auth.JWTAuthentication") +def test_auth_jwt(mock_auth: Mock) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + + database = Mock() + + auth_params = {"token": "jwt-token-string"} + database.encrypted_extra = json.dumps( + {"auth_method": "jwt", "auth_params": auth_params} + ) + + params: Dict[str, Any] = {} + TrinoEngineSpec.update_params_from_encrypted_extra(database, params) + connect_args = params.setdefault("connect_args", {}) + assert connect_args.get("http_scheme") == "https" + mock_auth.assert_called_once_with(**auth_params) + + +def test_auth_custom_auth() -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + + database = Mock() + auth_class = Mock() + + auth_method = "custom_auth" + auth_params = {"params1": "params1", "params2": "params2"} + database.encrypted_extra = json.dumps( + {"auth_method": auth_method, "auth_params": auth_params} + ) + + with patch.dict( + "superset.config.ALLOWED_EXTRA_AUTHENTICATIONS", + {"trino": {"custom_auth": auth_class}}, + clear=True, + ): + params: Dict[str, Any] = {} + TrinoEngineSpec.update_params_from_encrypted_extra(database, params) + + connect_args = params.setdefault("connect_args", {}) + assert connect_args.get("http_scheme") == "https" + + auth_class.assert_called_once_with(**auth_params) + + +def test_auth_custom_auth_denied() -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + + database = Mock() + auth_method = "my.module:TrinoAuthClass" + auth_params = {"params1": "params1", "params2": "params2"} + database.encrypted_extra = json.dumps( + {"auth_method": auth_method, "auth_params": auth_params} + ) + + superset.config.ALLOWED_EXTRA_AUTHENTICATIONS = {} + + with pytest.raises(ValueError) as excinfo: + TrinoEngineSpec.update_params_from_encrypted_extra(database, {}) + + assert str(excinfo.value) == ( + f"For security reason, custom authentication '{auth_method}' " + f"must be listed in 'ALLOWED_EXTRA_AUTHENTICATIONS' config" + ) + + +@pytest.mark.parametrize( + "native_type,sqla_type,attrs,generic_type,is_dttm", + [ + ("BOOLEAN", types.Boolean, None, GenericDataType.BOOLEAN, False), + ("TINYINT", types.Integer, None, GenericDataType.NUMERIC, False), + ("SMALLINT", types.SmallInteger, None, GenericDataType.NUMERIC, False), + ("INTEGER", types.Integer, None, GenericDataType.NUMERIC, False), + ("BIGINT", types.BigInteger, None, GenericDataType.NUMERIC, False), + ("REAL", types.FLOAT, None, GenericDataType.NUMERIC, False), + ("DOUBLE", types.FLOAT, None, GenericDataType.NUMERIC, False), + ("DECIMAL", types.DECIMAL, None, GenericDataType.NUMERIC, False), + ("VARCHAR", types.String, None, GenericDataType.STRING, False), + ("VARCHAR(20)", types.VARCHAR, {"length": 20}, GenericDataType.STRING, False), + ("CHAR", types.String, None, GenericDataType.STRING, False), + ("CHAR(2)", types.CHAR, {"length": 2}, GenericDataType.STRING, False), + ("JSON", types.JSON, None, GenericDataType.STRING, False), + ("TIMESTAMP", types.TIMESTAMP, None, GenericDataType.TEMPORAL, True), + ("TIMESTAMP(3)", types.TIMESTAMP, None, GenericDataType.TEMPORAL, True), + ( + "TIMESTAMP WITH TIME ZONE", + types.TIMESTAMP, + None, + GenericDataType.TEMPORAL, + True, + ), + ( + "TIMESTAMP(3) WITH TIME ZONE", + types.TIMESTAMP, + None, + GenericDataType.TEMPORAL, + True, + ), + ("DATE", types.Date, None, GenericDataType.TEMPORAL, True), + ], +) +def test_get_column_spec( + native_type: str, + sqla_type: Type[types.TypeEngine], + attrs: Optional[Dict[str, Any]], + generic_type: GenericDataType, + is_dttm: bool, +) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec as spec + + assert_column_spec( + spec, + native_type, + sqla_type, + attrs, + generic_type, + is_dttm, + ) + + +@pytest.mark.parametrize( + "target_type,expected_result", + [ + ("TimeStamp", "TIMESTAMP '2019-01-02 03:04:05.678900'"), + ("TimeStamp(3)", "TIMESTAMP '2019-01-02 03:04:05.678900'"), + ("TimeStamp With Time Zone", "TIMESTAMP '2019-01-02 03:04:05.678900'"), + ("TimeStamp(3) With Time Zone", "TIMESTAMP '2019-01-02 03:04:05.678900'"), + ("Date", "DATE '2019-01-02'"), + ("Other", None), + ], +) +def test_convert_dttm( + target_type: str, + expected_result: Optional[str], + dttm: datetime, +) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + + assert_convert_dttm(TrinoEngineSpec, target_type, expected_result, dttm) + + +def test_extra_table_metadata() -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + + db_mock = Mock() + db_mock.get_indexes = Mock( + return_value=[{"column_names": ["ds", "hour"], "name": "partition"}] + ) + db_mock.get_extra = Mock(return_value={}) + db_mock.has_view_by_name = Mock(return_value=None) + db_mock.get_df = Mock(return_value=pd.DataFrame({"ds": ["01-01-19"], "hour": [1]})) + result = TrinoEngineSpec.extra_table_metadata(db_mock, "test_table", "test_schema") + assert result["partitions"]["cols"] == ["ds", "hour"] + assert result["partitions"]["latest"] == {"ds": "01-01-19", "hour": 1} + + +@patch("sqlalchemy.engine.Engine.connect") +def test_cancel_query_success(engine_mock: Mock) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + from superset.models.sql_lab import Query + + query = Query() + cursor_mock = engine_mock.return_value.__enter__.return_value + assert TrinoEngineSpec.cancel_query(cursor_mock, query, "123") is True + + +@patch("sqlalchemy.engine.Engine.connect") +def test_cancel_query_failed(engine_mock: Mock) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + from superset.models.sql_lab import Query + + query = Query() + cursor_mock = engine_mock.raiseError.side_effect = Exception() + assert TrinoEngineSpec.cancel_query(cursor_mock, query, "123") is False + + +@pytest.mark.parametrize( + "initial_extra,final_extra", + [ + ({}, {QUERY_EARLY_CANCEL_KEY: True}), + ({QUERY_CANCEL_KEY: "my_key"}, {QUERY_CANCEL_KEY: "my_key"}), + ], +) +def test_prepare_cancel_query( + initial_extra: Dict[str, Any], + final_extra: Dict[str, Any], + mocker: MockerFixture, +) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + from superset.models.sql_lab import Query + + session_mock = mocker.MagicMock() + query = Query(extra_json=json.dumps(initial_extra)) + TrinoEngineSpec.prepare_cancel_query(query=query, session=session_mock) + assert query.extra == final_extra + + +@pytest.mark.parametrize("cancel_early", [True, False]) +@patch("superset.db_engine_specs.trino.TrinoEngineSpec.cancel_query") +@patch("sqlalchemy.engine.Engine.connect") +def test_handle_cursor_early_cancel( + engine_mock: Mock, + cancel_query_mock: Mock, + cancel_early: bool, + mocker: MockerFixture, +) -> None: + from superset.db_engine_specs.trino import TrinoEngineSpec + from superset.models.sql_lab import Query + + query_id = "myQueryId" + + cursor_mock = engine_mock.return_value.__enter__.return_value + cursor_mock.stats = {"queryId": query_id} + session_mock = mocker.MagicMock() + + query = Query() + + if cancel_early: + TrinoEngineSpec.prepare_cancel_query(query=query, session=session_mock) + + TrinoEngineSpec.handle_cursor(cursor=cursor_mock, query=query, session=session_mock) + + if cancel_early: + assert cancel_query_mock.call_args[1]["cancel_query_id"] == query_id + else: + assert cancel_query_mock.call_args is None diff --git a/tests/unit_tests/db_engine_specs/utils.py b/tests/unit_tests/db_engine_specs/utils.py new file mode 100644 index 0000000000000..13ae7a34d2931 --- /dev/null +++ b/tests/unit_tests/db_engine_specs/utils.py @@ -0,0 +1,67 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime +from typing import Any, Dict, Optional, Type, TYPE_CHECKING + +from sqlalchemy import types + +from superset.utils.core import GenericDataType + +if TYPE_CHECKING: + from superset.db_engine_specs.base import BaseEngineSpec + + +def assert_convert_dttm( + db_engine_spec: Type[BaseEngineSpec], + target_type: str, + expected_result: Optional[str], + dttm: datetime, + db_extra: Optional[Dict[str, Any]] = None, +) -> None: + for target in ( + target_type, + target_type.upper(), + target_type.lower(), + target_type.capitalize(), + ): + assert ( + result := db_engine_spec.convert_dttm( + target_type=target, + dttm=dttm, + db_extra=db_extra, + ) + ) == expected_result, result + + +def assert_column_spec( + db_engine_spec: Type[BaseEngineSpec], + native_type: str, + sqla_type: Type[types.TypeEngine], + attrs: Optional[Dict[str, Any]], + generic_type: GenericDataType, + is_dttm: bool, +) -> None: + assert (column_spec := db_engine_spec.get_column_spec(native_type)) is not None + assert isinstance(column_spec.sqla_type, sqla_type) + + for key, value in (attrs or {}).items(): + assert getattr(column_spec.sqla_type, key) == value + + assert column_spec.generic_type == generic_type + assert column_spec.is_dttm == is_dttm diff --git a/tests/unit_tests/explore/__init__.py b/tests/unit_tests/explore/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/explore/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/explore/api_test.py b/tests/unit_tests/explore/api_test.py new file mode 100644 index 0000000000000..147790844dcdc --- /dev/null +++ b/tests/unit_tests/explore/api_test.py @@ -0,0 +1,30 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Any + +import pytest + + +def test_explore_datasource_not_found(client: Any, full_api_access: None) -> None: + # validating the payload for a dataset that doesn't exist + # user should be expecting missing_datasource view + response = client.get( + "/api/v1/explore/?datasource_id=50000&datasource_type=table", + ) + response.json["result"]["dataset"]["name"] == "[Missing Dataset]" + assert response.status_code == 200 diff --git a/tests/unit_tests/explore/utils_test.py b/tests/unit_tests/explore/utils_test.py new file mode 100644 index 0000000000000..b84000a7f0577 --- /dev/null +++ b/tests/unit_tests/explore/utils_test.py @@ -0,0 +1,289 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from flask_appbuilder.security.sqla.models import User +from pytest import raises +from pytest_mock import MockFixture + +from superset.charts.commands.exceptions import ( + ChartAccessDeniedError, + ChartNotFoundError, +) +from superset.commands.exceptions import ( + DatasourceNotFoundValidationError, + DatasourceTypeInvalidError, + OwnersNotFoundValidationError, + QueryNotFoundValidationError, +) +from superset.datasets.commands.exceptions import ( + DatasetAccessDeniedError, + DatasetNotFoundError, +) +from superset.exceptions import SupersetSecurityException +from superset.utils.core import DatasourceType, override_user + +dataset_find_by_id = "superset.datasets.dao.DatasetDAO.find_by_id" +query_find_by_id = "superset.queries.dao.QueryDAO.find_by_id" +chart_find_by_id = "superset.charts.dao.ChartDAO.find_by_id" +is_admin = "superset.security.SupersetSecurityManager.is_admin" +is_owner = "superset.security.SupersetSecurityManager.is_owner" +can_access_datasource = ( + "superset.security.SupersetSecurityManager.can_access_datasource" +) +can_access = "superset.security.SupersetSecurityManager.can_access" +raise_for_access = "superset.security.SupersetSecurityManager.raise_for_access" +query_datasources_by_name = ( + "superset.connectors.sqla.models.SqlaTable.query_datasources_by_name" +) + + +def test_unsaved_chart_no_dataset_id() -> None: + from superset.explore.utils import check_access as check_chart_access + + with raises(DatasourceNotFoundValidationError): + with override_user(User()): + check_chart_access( + datasource_id=0, + chart_id=0, + datasource_type=DatasourceType.TABLE, + ) + + +def test_unsaved_chart_unknown_dataset_id(mocker: MockFixture) -> None: + from superset.explore.utils import check_access as check_chart_access + + with raises(DatasetNotFoundError): + mocker.patch(dataset_find_by_id, return_value=None) + + with override_user(User()): + check_chart_access( + datasource_id=1, + chart_id=0, + datasource_type=DatasourceType.TABLE, + ) + + +def test_unsaved_chart_unknown_query_id(mocker: MockFixture) -> None: + from superset.explore.utils import check_access as check_chart_access + + with raises(QueryNotFoundValidationError): + mocker.patch(query_find_by_id, return_value=None) + + with override_user(User()): + check_chart_access( + datasource_id=1, + chart_id=0, + datasource_type=DatasourceType.QUERY, + ) + + +def test_unsaved_chart_unauthorized_dataset(mocker: MockFixture) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.explore.utils import check_access as check_chart_access + + with raises(DatasetAccessDeniedError): + mocker.patch(dataset_find_by_id, return_value=SqlaTable()) + mocker.patch(can_access_datasource, return_value=False) + + with override_user(User()): + check_chart_access( + datasource_id=1, + chart_id=0, + datasource_type=DatasourceType.TABLE, + ) + + +def test_unsaved_chart_authorized_dataset(mocker: MockFixture) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.explore.utils import check_access as check_chart_access + + mocker.patch(dataset_find_by_id, return_value=SqlaTable()) + mocker.patch(can_access_datasource, return_value=True) + + with override_user(User()): + check_chart_access( + datasource_id=1, + chart_id=0, + datasource_type=DatasourceType.TABLE, + ) + + +def test_saved_chart_unknown_chart_id(mocker: MockFixture) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.explore.utils import check_access as check_chart_access + + with raises(ChartNotFoundError): + mocker.patch(dataset_find_by_id, return_value=SqlaTable()) + mocker.patch(can_access_datasource, return_value=True) + mocker.patch(chart_find_by_id, return_value=None) + + with override_user(User()): + check_chart_access( + datasource_id=1, + chart_id=1, + datasource_type=DatasourceType.TABLE, + ) + + +def test_saved_chart_unauthorized_dataset(mocker: MockFixture) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.explore.utils import check_access as check_chart_access + + with raises(DatasetAccessDeniedError): + mocker.patch(dataset_find_by_id, return_value=SqlaTable()) + mocker.patch(can_access_datasource, return_value=False) + + with override_user(User()): + check_chart_access( + datasource_id=1, + chart_id=1, + datasource_type=DatasourceType.TABLE, + ) + + +def test_saved_chart_is_admin(mocker: MockFixture) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.explore.utils import check_access as check_chart_access + from superset.models.slice import Slice + + mocker.patch(dataset_find_by_id, return_value=SqlaTable()) + mocker.patch(can_access_datasource, return_value=True) + mocker.patch(is_admin, return_value=True) + mocker.patch(chart_find_by_id, return_value=Slice()) + + with override_user(User()): + check_chart_access( + datasource_id=1, + chart_id=1, + datasource_type=DatasourceType.TABLE, + ) + + +def test_saved_chart_is_owner(mocker: MockFixture) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.explore.utils import check_access as check_chart_access + from superset.models.slice import Slice + + mocker.patch(dataset_find_by_id, return_value=SqlaTable()) + mocker.patch(can_access_datasource, return_value=True) + mocker.patch(is_admin, return_value=False) + mocker.patch(is_owner, return_value=True) + mocker.patch(chart_find_by_id, return_value=Slice()) + + with override_user(User()): + check_chart_access( + datasource_id=1, + chart_id=1, + datasource_type=DatasourceType.TABLE, + ) + + +def test_saved_chart_has_access(mocker: MockFixture) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.explore.utils import check_access as check_chart_access + from superset.models.slice import Slice + + mocker.patch(dataset_find_by_id, return_value=SqlaTable()) + mocker.patch(can_access_datasource, return_value=True) + mocker.patch(is_admin, return_value=False) + mocker.patch(is_owner, return_value=False) + mocker.patch(can_access, return_value=True) + mocker.patch(chart_find_by_id, return_value=Slice()) + + with override_user(User()): + check_chart_access( + datasource_id=1, + chart_id=1, + datasource_type=DatasourceType.TABLE, + ) + + +def test_saved_chart_no_access(mocker: MockFixture) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.explore.utils import check_access as check_chart_access + from superset.models.slice import Slice + + with raises(ChartAccessDeniedError): + mocker.patch(dataset_find_by_id, return_value=SqlaTable()) + mocker.patch(can_access_datasource, return_value=True) + mocker.patch(is_admin, return_value=False) + mocker.patch(is_owner, return_value=False) + mocker.patch(can_access, return_value=False) + mocker.patch(chart_find_by_id, return_value=Slice()) + + with override_user(User()): + check_chart_access( + datasource_id=1, + chart_id=1, + datasource_type=DatasourceType.TABLE, + ) + + +def test_dataset_has_access(mocker: MockFixture) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.explore.utils import check_datasource_access + + mocker.patch(dataset_find_by_id, return_value=SqlaTable()) + mocker.patch(can_access_datasource, return_value=True) + mocker.patch(is_admin, return_value=False) + mocker.patch(is_owner, return_value=False) + mocker.patch(can_access, return_value=True) + assert ( + check_datasource_access( + datasource_id=1, + datasource_type=DatasourceType.TABLE, + ) + == True + ) + + +def test_query_has_access(mocker: MockFixture) -> None: + from superset.explore.utils import check_datasource_access + from superset.models.sql_lab import Query + + mocker.patch(query_find_by_id, return_value=Query()) + mocker.patch(raise_for_access, return_value=True) + mocker.patch(is_admin, return_value=False) + mocker.patch(is_owner, return_value=False) + mocker.patch(can_access, return_value=True) + assert ( + check_datasource_access( + datasource_id=1, + datasource_type=DatasourceType.QUERY, + ) + == True + ) + + +def test_query_no_access(mocker: MockFixture, client) -> None: + from superset.connectors.sqla.models import SqlaTable + from superset.explore.utils import check_datasource_access + from superset.models.core import Database + from superset.models.sql_lab import Query + + with raises(SupersetSecurityException): + mocker.patch( + query_find_by_id, + return_value=Query(database=Database(), sql="select * from foo"), + ) + mocker.patch(query_datasources_by_name, return_value=[SqlaTable()]) + mocker.patch(is_admin, return_value=False) + mocker.patch(is_owner, return_value=False) + mocker.patch(can_access, return_value=False) + check_datasource_access( + datasource_id=1, + datasource_type=DatasourceType.QUERY, + ) diff --git a/tests/unit_tests/extension_tests.py b/tests/unit_tests/extension_tests.py new file mode 100644 index 0000000000000..724b03f01a2ab --- /dev/null +++ b/tests/unit_tests/extension_tests.py @@ -0,0 +1,51 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from os.path import dirname +from unittest.mock import Mock + +from superset.extensions import UIManifestProcessor + +APP_DIR = f"{dirname(__file__)}/fixtures" + + +def test_get_manifest_with_prefix(): + app = Mock( + config={"STATIC_ASSETS_PREFIX": "https://cool.url/here"}, + template_context_processors={None: []}, + ) + manifest_processor = UIManifestProcessor(APP_DIR) + manifest_processor.init_app(app) + manifest = manifest_processor.get_manifest() + assert manifest["js_manifest"]("main") == ["/static/dist/main-js.js"] + assert manifest["css_manifest"]("main") == ["/static/dist/main-css.css"] + assert manifest["js_manifest"]("styles") == ["/static/dist/styles-js.js"] + assert manifest["css_manifest"]("styles") == [] + assert manifest["assets_prefix"] == "https://cool.url/here" + + +def test_get_manifest_no_prefix(): + app = Mock( + config={"STATIC_ASSETS_PREFIX": ""}, template_context_processors={None: []} + ) + manifest_processor = UIManifestProcessor(APP_DIR) + manifest_processor.init_app(app) + manifest = manifest_processor.get_manifest() + assert manifest["js_manifest"]("main") == ["/static/dist/main-js.js"] + assert manifest["css_manifest"]("main") == ["/static/dist/main-css.css"] + assert manifest["js_manifest"]("styles") == ["/static/dist/styles-js.js"] + assert manifest["css_manifest"]("styles") == [] + assert manifest["assets_prefix"] == "" diff --git a/tests/unit_tests/feature_flag_test.py b/tests/unit_tests/feature_flag_test.py new file mode 100644 index 0000000000000..43d142e7f67d3 --- /dev/null +++ b/tests/unit_tests/feature_flag_test.py @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from pytest_mock import MockFixture + +from superset import is_feature_enabled + + +def dummy_is_feature_enabled(feature_flag_name: str, default: bool = True) -> bool: + return True if feature_flag_name.startswith("True_") else default + + +def test_existing_feature_flags(mocker: MockFixture) -> None: + """ + Test that ``is_feature_enabled`` reads flags correctly. + """ + mocker.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"FOO": True}, + clear=True, + ) + assert is_feature_enabled("FOO") is True + + +def test_nonexistent_feature_flags(mocker: MockFixture) -> None: + """ + Test that ``is_feature_enabled`` returns ``False`` when flag not set. + """ + mocker.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", {}, clear=True + ) + assert is_feature_enabled("FOO") is False + + +def test_is_feature_enabled(mocker: MockFixture) -> None: + """ + Test ``_is_feature_enabled_func``. + """ + mocker.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"True_Flag1": False, "True_Flag2": True, "Flag3": False, "Flag4": True}, + clear=True, + ) + mocker.patch( + "superset.extensions.feature_flag_manager._is_feature_enabled_func", + dummy_is_feature_enabled, + ) + + assert is_feature_enabled("True_Flag1") is True + assert is_feature_enabled("True_Flag2") is True + assert is_feature_enabled("Flag3") is False + assert is_feature_enabled("Flag4") is True diff --git a/tests/unit_tests/fixtures/__init__.py b/tests/unit_tests/fixtures/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/fixtures/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/fixtures/assets_configs.py b/tests/unit_tests/fixtures/assets_configs.py new file mode 100644 index 0000000000000..6e78d9e562d10 --- /dev/null +++ b/tests/unit_tests/fixtures/assets_configs.py @@ -0,0 +1,260 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Dict + +databases_config: Dict[str, Any] = { + "databases/examples.yaml": { + "database_name": "examples", + "sqlalchemy_uri": "sqlite:///test.db", + "cache_timeout": None, + "expose_in_sqllab": True, + "allow_run_async": False, + "allow_ctas": False, + "allow_cvas": False, + "extra": {}, + "uuid": "a2dc77af-e654-49bb-b321-40f6b559a1ee", + "version": "1.0.0", + "password": None, + "allow_csv_upload": False, + }, +} +datasets_config: Dict[str, Any] = { + "datasets/examples/video_game_sales.yaml": { + "table_name": "video_game_sales", + "main_dttm_col": None, + "description": None, + "default_endpoint": None, + "offset": 0, + "cache_timeout": None, + "schema": "main", + "sql": "", + "params": {}, + "template_params": None, + "filter_select_enabled": True, + "fetch_values_predicate": None, + "extra": None, + "uuid": "53d47c0c-c03d-47f0-b9ac-81225f808283", + "metrics": [ + { + "metric_name": "count", + "verbose_name": "COUNT(*)", + "metric_type": None, + "expression": "COUNT(*)", + "description": None, + "d3format": None, + "extra": None, + "warning_text": None, + } + ], + "columns": [ + { + "column_name": "genre", + "verbose_name": None, + "is_dttm": False, + "is_active": None, + "type": "STRING", + "advanced_data_type": None, + "groupby": True, + "filterable": True, + "expression": None, + "description": None, + "python_date_format": None, + "extra": None, + }, + ], + "version": "1.0.0", + "database_uuid": "a2dc77af-e654-49bb-b321-40f6b559a1ee", + }, +} +charts_config_1: Dict[str, Any] = { + "charts/Games_per_Genre_over_time_95.yaml": { + "slice_name": "Games per Genre over time", + "viz_type": "line", + "params": {}, + "cache_timeout": None, + "uuid": "0f8976aa-7bb4-40c7-860b-64445a51aaaf", + "version": "1.0.0", + "dataset_uuid": "53d47c0c-c03d-47f0-b9ac-81225f808283", + }, + "charts/Games_per_Genre_131.yaml": { + "slice_name": "Games per Genre", + "viz_type": "treemap", + "params": {}, + "cache_timeout": None, + "uuid": "0499bdec-0837-44f3-ae8a-8c670de81afd", + "version": "1.0.0", + "dataset_uuid": "53d47c0c-c03d-47f0-b9ac-81225f808283", + }, +} +dashboards_config_1: Dict[str, Any] = { + "dashboards/Video_Game_Sales_11.yaml": { + "dashboard_title": "Video Game Sales", + "description": None, + "css": "", + "slug": None, + "uuid": "c7bc10f4-6a2d-7569-caae-bbc91864ee11", + "position": { + "CHART-1L7NIcXvVN": { + "children": [], + "id": "CHART-1L7NIcXvVN", + "meta": { + "chartId": 95, + "height": 79, + "sliceName": "Games per Genre over time", + "uuid": "0f8976aa-7bb4-40c7-860b-64445a51aaaf", + "width": 6, + }, + "parents": [ + "ROOT_ID", + "GRID_ID", + "ROW-0F99WDC-sz", + ], + "type": "CHART", + }, + "CHART-7mKdnU7OUJ": { + "children": [], + "id": "CHART-7mKdnU7OUJ", + "meta": { + "chartId": 131, + "height": 80, + "sliceName": "Games per Genre", + "uuid": "0499bdec-0837-44f3-ae8a-8c670de81afd", + "width": 3, + }, + "parents": [ + "ROOT_ID", + "GRID_ID", + "ROW-0F99WDC-sz", + ], + "type": "CHART", + }, + "DASHBOARD_VERSION_KEY": "v2", + "GRID_ID": { + "children": ["ROW-0F99WDC-sz"], + "id": "GRID_ID", + "parents": ["ROOT_ID"], + "type": "GRID", + }, + "HEADER_ID": { + "id": "HEADER_ID", + "meta": {"text": "Video Game Sales"}, + "type": "HEADER", + }, + "ROOT_ID": { + "children": ["GRID_ID"], + "id": "ROOT_ID", + "type": "ROOT", + }, + "ROW-0F99WDC-sz": { + "children": ["CHART-1L7NIcXvVN", "CHART-7mKdnU7OUJ"], + "id": "ROW-0F99WDC-sz", + "meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"}, + "parents": ["ROOT_ID", "GRID_ID"], + "type": "ROW", + }, + }, + "metadata": { + "timed_refresh_immune_slices": [], + "expanded_slices": {}, + "refresh_frequency": 0, + "default_filters": "{}", + "color_scheme": "supersetColors", + "label_colors": {}, + "show_native_filters": True, + "color_scheme_domain": [], + "shared_label_colors": {}, + "cross_filters_enabled": False, + }, + "version": "1.0.0", + }, +} + +charts_config_2: Dict[str, Any] = { + "charts/Games_per_Genre_131.yaml": { + "slice_name": "Games per Genre", + "viz_type": "treemap", + "params": {}, + "cache_timeout": None, + "uuid": "0499bdec-0837-44f3-ae8a-8c670de81afd", + "version": "1.0.0", + "dataset_uuid": "53d47c0c-c03d-47f0-b9ac-81225f808283", + }, +} +dashboards_config_2: Dict[str, Any] = { + "dashboards/Video_Game_Sales_11.yaml": { + "dashboard_title": "Video Game Sales", + "description": None, + "css": "", + "slug": None, + "uuid": "c7bc10f4-6a2d-7569-caae-bbc91864ee11", + "position": { + "CHART-7mKdnU7OUJ": { + "children": [], + "id": "CHART-7mKdnU7OUJ", + "meta": { + "chartId": 131, + "height": 80, + "sliceName": "Games per Genre", + "uuid": "0499bdec-0837-44f3-ae8a-8c670de81afd", + "width": 3, + }, + "parents": [ + "ROOT_ID", + "GRID_ID", + "ROW-0F99WDC-sz", + ], + "type": "CHART", + }, + "DASHBOARD_VERSION_KEY": "v2", + "GRID_ID": { + "children": ["ROW-0F99WDC-sz"], + "id": "GRID_ID", + "parents": ["ROOT_ID"], + "type": "GRID", + }, + "HEADER_ID": { + "id": "HEADER_ID", + "meta": {"text": "Video Game Sales"}, + "type": "HEADER", + }, + "ROOT_ID": { + "children": ["GRID_ID"], + "id": "ROOT_ID", + "type": "ROOT", + }, + "ROW-0F99WDC-sz": { + "children": ["CHART-7mKdnU7OUJ"], + "id": "ROW-0F99WDC-sz", + "meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"}, + "parents": ["ROOT_ID", "GRID_ID"], + "type": "ROW", + }, + }, + "metadata": { + "timed_refresh_immune_slices": [], + "expanded_slices": {}, + "refresh_frequency": 0, + "default_filters": "{}", + "color_scheme": "supersetColors", + "label_colors": {}, + "show_native_filters": True, + "color_scheme_domain": [], + "shared_label_colors": {}, + }, + "version": "1.0.0", + }, +} diff --git a/tests/unit_tests/fixtures/common.py b/tests/unit_tests/fixtures/common.py new file mode 100644 index 0000000000000..6c2af8df658eb --- /dev/null +++ b/tests/unit_tests/fixtures/common.py @@ -0,0 +1,25 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime + +import pytest + + +@pytest.fixture +def dttm() -> datetime: + return datetime.strptime("2019-01-02 03:04:05.678900", "%Y-%m-%d %H:%M:%S.%f") diff --git a/tests/unit_tests/fixtures/dataframes.py b/tests/unit_tests/fixtures/dataframes.py new file mode 100644 index 0000000000000..31a275b735ac7 --- /dev/null +++ b/tests/unit_tests/fixtures/dataframes.py @@ -0,0 +1,197 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import date, datetime + +from pandas import DataFrame, to_datetime + +names_df = DataFrame( + [ + { + "dt": date(2020, 1, 2), + "name": "John", + "region": "EU", + "country": "United Kingdom", + "cars": 3, + "bikes": 1, + "seconds": 30, + }, + { + "dt": date(2020, 1, 2), + "name": "Peter", + "region": "EU", + "country": "Sweden", + "cars": 4, + "bikes": 2, + "seconds": 1, + }, + { + "dt": date(2020, 1, 3), + "name": "Mary", + "region": "EU", + "country": "Finland", + "cars": 5, + "bikes": 3, + "seconds": None, + }, + { + "dt": date(2020, 1, 3), + "name": "Peter", + "region": "Asia", + "country": "India", + "cars": 6, + "bikes": 4, + "seconds": 12, + }, + { + "dt": date(2020, 1, 4), + "name": "John", + "region": "EU", + "country": "Portugal", + "cars": 7, + "bikes": None, + "seconds": 75, + }, + { + "dt": date(2020, 1, 4), + "name": "Peter", + "region": "EU", + "country": "Italy", + "cars": None, + "bikes": 5, + "seconds": 600, + }, + { + "dt": date(2020, 1, 4), + "name": "Mary", + "region": None, + "country": None, + "cars": 9, + "bikes": 6, + "seconds": 2, + }, + { + "dt": date(2020, 1, 4), + "name": None, + "region": "Oceania", + "country": "Australia", + "cars": 10, + "bikes": 7, + "seconds": 99, + }, + { + "dt": date(2020, 1, 1), + "name": "John", + "region": "North America", + "country": "USA", + "cars": 1, + "bikes": 8, + "seconds": None, + }, + { + "dt": date(2020, 1, 1), + "name": "Mary", + "region": "Oceania", + "country": "Fiji", + "cars": 2, + "bikes": 9, + "seconds": 50, + }, + ] +) + +categories_df = DataFrame( + { + "constant": ["dummy" for _ in range(0, 101)], + "category": [f"cat{i%3}" for i in range(0, 101)], + "dept": [f"dept{i%5}" for i in range(0, 101)], + "name": [f"person{i}" for i in range(0, 101)], + "asc_idx": [i for i in range(0, 101)], + "desc_idx": [i for i in range(100, -1, -1)], + "idx_nulls": [i if i % 5 == 0 else None for i in range(0, 101)], + } +) + +timeseries_df = DataFrame( + index=to_datetime(["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"]), + data={"label": ["x", "y", "z", "q"], "y": [1.0, 2.0, 3.0, 4.0]}, +) + +timeseries_df2 = DataFrame( + index=to_datetime(["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"]), + data={ + "label": ["x", "y", "z", "q"], + "y": [2.0, 2.0, 2.0, 2.0], + "z": [2.0, 4.0, 10.0, 8.0], + }, +) + +lonlat_df = DataFrame( + { + "city": ["New York City", "Sydney"], + "geohash": ["dr5regw3pg6f", "r3gx2u9qdevk"], + "latitude": [40.71277496, -33.85598011], + "longitude": [-74.00597306, 151.20666526], + "altitude": [5.5, 0.012], + "geodetic": [ + "40.71277496, -74.00597306, 5.5km", + "-33.85598011, 151.20666526, 12m", + ], + } +) + +prophet_df = DataFrame( + { + "__timestamp": [ + datetime(2018, 12, 31), + datetime(2019, 12, 31), + datetime(2020, 12, 31), + datetime(2021, 12, 31), + ], + "a": [1.1, 1, 1.9, 3.15], + "b": [4, 3, 4.1, 3.95], + } +) + +single_metric_df = DataFrame( + { + "dttm": to_datetime( + [ + "2019-01-01", + "2019-01-01", + "2019-01-02", + "2019-01-02", + ] + ), + "country": ["UK", "US", "UK", "US"], + "sum_metric": [5, 6, 7, 8], + } +) +multiple_metrics_df = DataFrame( + { + "dttm": to_datetime( + [ + "2019-01-01", + "2019-01-01", + "2019-01-02", + "2019-01-02", + ] + ), + "country": ["UK", "US", "UK", "US"], + "sum_metric": [5, 6, 7, 8], + "count_metric": [1, 2, 3, 4], + } +) diff --git a/tests/unit_tests/fixtures/datasets.py b/tests/unit_tests/fixtures/datasets.py new file mode 100644 index 0000000000000..5d5466a5e8135 --- /dev/null +++ b/tests/unit_tests/fixtures/datasets.py @@ -0,0 +1,206 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Dict +from unittest.mock import Mock + + +def get_column_mock(params: Dict[str, Any]) -> Mock: + mock = Mock() + mock.id = params["id"] + mock.column_name = params["column_name"] + mock.verbose_name = params["verbose_name"] + mock.description = params["description"] + mock.expression = params["expression"] + mock.filterable = params["filterable"] + mock.groupby = params["groupby"] + mock.is_dttm = params["is_dttm"] + mock.type = params["type"] + return mock + + +def get_metric_mock(params: Dict[str, Any]) -> Mock: + mock = Mock() + mock.id = params["id"] + mock.metric_name = params["metric_name"] + mock.metric_name = params["verbose_name"] + mock.description = params["description"] + mock.expression = params["expression"] + mock.warning_text = params["warning_text"] + mock.d3format = params["d3format"] + return mock + + +def get_dataset_mock() -> Mock: + mock = Mock() + mock.id = None + mock.column_formats = {"ratio": ".2%"} + mock.database = {"id": 1} + mock.description = "Adding a DESCRip" + mock.default_endpoint = "" + mock.filter_select_enabled = True + mock.name = "birth_names" + mock.table_name = "birth_names" + mock.datasource_name = "birth_names" + mock.type = "table" + mock.schema = None + mock.offset = 66 + mock.cache_timeout = 55 + mock.sql = "" + mock.columns = [ + get_column_mock( + { + "id": 504, + "column_name": "ds", + "verbose_name": "", + "description": None, + "expression": "", + "filterable": True, + "groupby": True, + "is_dttm": True, + "type": "DATETIME", + } + ), + get_column_mock( + { + "id": 505, + "column_name": "gender", + "verbose_name": None, + "description": None, + "expression": "", + "filterable": True, + "groupby": True, + "is_dttm": False, + "type": "VARCHAR(16)", + } + ), + get_column_mock( + { + "id": 506, + "column_name": "name", + "verbose_name": None, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_dttm": None, + "type": "VARCHAR(255)", + } + ), + get_column_mock( + { + "id": 508, + "column_name": "state", + "verbose_name": None, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_dttm": None, + "type": "VARCHAR(10)", + } + ), + get_column_mock( + { + "id": 509, + "column_name": "num_boys", + "verbose_name": None, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_dttm": None, + "type": "BIGINT(20)", + } + ), + get_column_mock( + { + "id": 510, + "column_name": "num_girls", + "verbose_name": None, + "description": None, + "expression": "", + "filterable": False, + "groupby": False, + "is_dttm": False, + "type": "BIGINT(20)", + } + ), + get_column_mock( + { + "id": 532, + "column_name": "num", + "verbose_name": None, + "description": None, + "expression": None, + "filterable": True, + "groupby": True, + "is_dttm": None, + "type": "BIGINT(20)", + } + ), + get_column_mock( + { + "id": 522, + "column_name": "num_california", + "verbose_name": None, + "description": None, + "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END", + "filterable": False, + "groupby": False, + "is_dttm": False, + "type": "NUMBER", + } + ), + ] + mock.metrics = ( + [ + get_metric_mock( + { + "id": 824, + "metric_name": "sum__num", + "verbose_name": "Babies", + "description": "", + "expression": "SUM(num)", + "warning_text": "", + "d3format": "", + } + ), + get_metric_mock( + { + "id": 836, + "metric_name": "count", + "verbose_name": "", + "description": None, + "expression": "count(1)", + "warning_text": None, + "d3format": None, + } + ), + get_metric_mock( + { + "id": 843, + "metric_name": "ratio", + "verbose_name": "Ratio Boys/Girls", + "description": "This represents the ratio of boys/girls", + "expression": "sum(num_boys) / sum(num_girls)", + "warning_text": "no warning", + "d3format": ".2%", + } + ), + ], + ) + return mock diff --git a/tests/unit_tests/fixtures/static/assets/manifest.json b/tests/unit_tests/fixtures/static/assets/manifest.json new file mode 100644 index 0000000000000..7482a04eac74e --- /dev/null +++ b/tests/unit_tests/fixtures/static/assets/manifest.json @@ -0,0 +1,20 @@ +{ + "entrypoints": { + "styles": { + "js": [ + "/static/dist/styles-js.js" + ] + }, + "main": { + "css": [ + "/static/dist/main-css.css" + ], + "js": [ + "/static/dist/main-js.js" + ] + } + }, + "main.css": "/static/dist/main.b51d3f6225194da423d6.entry.css", + "main.js": "/static/dist/main.b51d3f6225194da423d6.entry.js", + "styles.js": "/static/dist/styles.35840b4bbf794f902b7c.entry.js" +} diff --git a/tests/unit_tests/importexport/__init__.py b/tests/unit_tests/importexport/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/importexport/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/importexport/api_test.py b/tests/unit_tests/importexport/api_test.py new file mode 100644 index 0000000000000..a65a682018eda --- /dev/null +++ b/tests/unit_tests/importexport/api_test.py @@ -0,0 +1,245 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=invalid-name, import-outside-toplevel, unused-argument + +import json +from io import BytesIO +from pathlib import Path +from typing import Any +from zipfile import is_zipfile, ZipFile + +from pytest_mock import MockFixture + +from superset import security_manager + + +def test_export_assets( + mocker: MockFixture, + client: Any, + full_api_access: None, +) -> None: + """ + Test exporting assets. + """ + from superset.commands.importers.v1.utils import get_contents_from_bundle + + mocked_contents = [ + ( + "metadata.yaml", + "version: 1.0.0\ntype: assets\ntimestamp: '2022-01-01T00:00:00+00:00'\n", + ), + ("databases/example.yaml", ""), + ] + + ExportAssetsCommand = mocker.patch("superset.importexport.api.ExportAssetsCommand") + ExportAssetsCommand().run.return_value = mocked_contents[:] + + response = client.get("/api/v1/assets/export/") + assert response.status_code == 200 + + buf = BytesIO(response.data) + assert is_zipfile(buf) + + buf.seek(0) + with ZipFile(buf) as bundle: + contents = get_contents_from_bundle(bundle) + assert contents == dict(mocked_contents) + + +def test_import_assets( + mocker: MockFixture, + client: Any, + full_api_access: None, +) -> None: + """ + Test importing assets. + """ + mocked_contents = { + "metadata.yaml": ( + "version: 1.0.0\ntype: assets\ntimestamp: '2022-01-01T00:00:00+00:00'\n" + ), + "databases/example.yaml": "", + } + + ImportAssetsCommand = mocker.patch("superset.importexport.api.ImportAssetsCommand") + + root = Path("assets_export") + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + for path, contents in mocked_contents.items(): + with bundle.open(str(root / path), "w") as fp: + fp.write(contents.encode()) + buf.seek(0) + + form_data = { + "bundle": (buf, "assets_export.zip"), + "passwords": json.dumps( + {"assets_export/databases/imported_database.yaml": "SECRET"} + ), + } + response = client.post( + "/api/v1/assets/import/", data=form_data, content_type="multipart/form-data" + ) + assert response.status_code == 200 + assert response.json == {"message": "OK"} + + passwords = {"assets_export/databases/imported_database.yaml": "SECRET"} + ImportAssetsCommand.assert_called_with(mocked_contents, passwords=passwords) + + +def test_import_assets_not_zip( + mocker: MockFixture, + client: Any, + full_api_access: None, +) -> None: + """ + Test error message when the upload is not a ZIP file. + """ + buf = BytesIO(b"definitely_not_a_zip_file") + form_data = { + "bundle": (buf, "broken.txt"), + } + response = client.post( + "/api/v1/assets/import/", data=form_data, content_type="multipart/form-data" + ) + assert response.status_code == 422 + assert response.json == { + "errors": [ + { + "message": "Not a ZIP file", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "issue_codes": [ + { + "code": 1010, + "message": ( + "Issue 1010 - Superset encountered an error while " + "running a command." + ), + } + ] + }, + } + ] + } + + +def test_import_assets_no_form_data( + mocker: MockFixture, + client: Any, + full_api_access: None, +) -> None: + """ + Test error message when the upload has no form data. + """ + mocker.patch.object(security_manager, "has_access", return_value=True) + + response = client.post("/api/v1/assets/import/", data="some_content") + assert response.status_code == 400 + assert response.json == { + "errors": [ + { + "message": "Request MIME type is not 'multipart/form-data'", + "error_type": "INVALID_PAYLOAD_FORMAT_ERROR", + "level": "error", + "extra": { + "issue_codes": [ + { + "code": 1019, + "message": ( + "Issue 1019 - The submitted payload has the incorrect " + "format." + ), + } + ] + }, + } + ] + } + + +def test_import_assets_incorrect_form_data( + mocker: MockFixture, + client: Any, + full_api_access: None, +) -> None: + """ + Test error message when the upload form data has the wrong key. + """ + buf = BytesIO(b"definitely_not_a_zip_file") + form_data = { + "wrong": (buf, "broken.txt"), + } + response = client.post( + "/api/v1/assets/import/", data=form_data, content_type="multipart/form-data" + ) + assert response.status_code == 400 + assert response.json == {"message": "Arguments are not correct"} + + +def test_import_assets_no_contents( + mocker: MockFixture, + client: Any, + full_api_access: None, +) -> None: + """ + Test error message when the ZIP bundle has no contents. + """ + mocked_contents = { + "README.txt": "Something is wrong", + } + + root = Path("assets_export") + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + for path, contents in mocked_contents.items(): + with bundle.open(str(root / path), "w") as fp: + fp.write(contents.encode()) + buf.seek(0) + + form_data = { + "bundle": (buf, "assets_export.zip"), + "passwords": json.dumps( + {"assets_export/databases/imported_database.yaml": "SECRET"} + ), + } + response = client.post( + "/api/v1/assets/import/", data=form_data, content_type="multipart/form-data" + ) + assert response.status_code == 400 + assert response.json == { + "errors": [ + { + "message": "No valid import files were found", + "error_type": "GENERIC_COMMAND_ERROR", + "level": "warning", + "extra": { + "issue_codes": [ + { + "code": 1010, + "message": ( + "Issue 1010 - Superset encountered an error while " + "running a command." + ), + } + ] + }, + } + ] + } diff --git a/tests/unit_tests/jinja_context_test.py b/tests/unit_tests/jinja_context_test.py new file mode 100644 index 0000000000000..13b3ae9e9c948 --- /dev/null +++ b/tests/unit_tests/jinja_context_test.py @@ -0,0 +1,126 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=invalid-name, unused-argument + +import json + +import pytest +from pytest_mock import MockFixture + +from superset.datasets.commands.exceptions import DatasetNotFoundError +from superset.jinja_context import dataset_macro, where_in + + +def test_where_in() -> None: + """ + Test the ``where_in`` Jinja2 filter. + """ + assert where_in([1, "b", 3]) == "(1, 'b', 3)" + assert where_in([1, "b", 3], '"') == '(1, "b", 3)' + assert where_in(["O'Malley's"]) == "('O''Malley''s')" + + +def test_dataset_macro(mocker: MockFixture) -> None: + """ + Test the ``dataset_macro`` macro. + """ + # pylint: disable=import-outside-toplevel + from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + from superset.models.core import Database + + columns = [ + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), + TableColumn(column_name="num_boys", type="INTEGER"), + TableColumn(column_name="revenue", type="INTEGER"), + TableColumn(column_name="expenses", type="INTEGER"), + TableColumn( + column_name="profit", type="INTEGER", expression="revenue-expenses" + ), + ] + metrics = [ + SqlMetric(metric_name="cnt", expression="COUNT(*)"), + ] + + dataset = SqlaTable( + table_name="old_dataset", + columns=columns, + metrics=metrics, + main_dttm_col="ds", + default_endpoint="https://www.youtube.com/watch?v=dQw4w9WgXcQ", # not used + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + offset=-8, + description="This is the description", + is_featured=1, + cache_timeout=3600, + schema="my_schema", + sql=None, + params=json.dumps( + { + "remote_id": 64, + "database_name": "examples", + "import_time": 1606677834, + } + ), + perm=None, + filter_select_enabled=1, + fetch_values_predicate="foo IN (1, 2)", + is_sqllab_view=0, # no longer used? + template_params=json.dumps({"answer": "42"}), + schema_perm=None, + extra=json.dumps({"warning_markdown": "*WARNING*"}), + ) + DatasetDAO = mocker.patch("superset.datasets.dao.DatasetDAO") + DatasetDAO.find_by_id.return_value = dataset + + assert ( + dataset_macro(1) + == """(SELECT ds AS ds, + num_boys AS num_boys, + revenue AS revenue, + expenses AS expenses, + revenue-expenses AS profit +FROM my_schema.old_dataset) AS dataset_1""" + ) + + assert ( + dataset_macro(1, include_metrics=True) + == """(SELECT ds AS ds, + num_boys AS num_boys, + revenue AS revenue, + expenses AS expenses, + revenue-expenses AS profit, + COUNT(*) AS cnt +FROM my_schema.old_dataset +GROUP BY ds, + num_boys, + revenue, + expenses, + revenue-expenses) AS dataset_1""" + ) + + assert ( + dataset_macro(1, include_metrics=True, columns=["ds"]) + == """(SELECT ds AS ds, + COUNT(*) AS cnt +FROM my_schema.old_dataset +GROUP BY ds) AS dataset_1""" + ) + + DatasetDAO.find_by_id.return_value = None + with pytest.raises(DatasetNotFoundError) as excinfo: + dataset_macro(1) + assert str(excinfo.value) == "Dataset 1 not found!" diff --git a/tests/unit_tests/key_value/__init__.py b/tests/unit_tests/key_value/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/key_value/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/key_value/utils_test.py b/tests/unit_tests/key_value/utils_test.py new file mode 100644 index 0000000000000..5d78f6361c02c --- /dev/null +++ b/tests/unit_tests/key_value/utils_test.py @@ -0,0 +1,60 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from uuid import UUID + +import pytest + +from superset.key_value.exceptions import KeyValueParseKeyError +from superset.key_value.types import KeyValueResource + +RESOURCE = KeyValueResource.APP +UUID_KEY = UUID("3e7a2ab8-bcaf-49b0-a5df-dfb432f291cc") +ID_KEY = 123 + + +def test_get_filter_uuid() -> None: + from superset.key_value.utils import get_filter + + assert get_filter(resource=RESOURCE, key=UUID_KEY) == { + "resource": RESOURCE, + "uuid": UUID_KEY, + } + + +def test_get_filter_id() -> None: + from superset.key_value.utils import get_filter + + assert get_filter(resource=RESOURCE, key=ID_KEY) == { + "resource": RESOURCE, + "id": ID_KEY, + } + + +def test_encode_permalink_id_valid() -> None: + from superset.key_value.utils import encode_permalink_key + + salt = "abc" + assert encode_permalink_key(1, salt) == "AyBn4lm9qG8" + + +def test_decode_permalink_id_invalid() -> None: + from superset.key_value.utils import decode_permalink_id + + with pytest.raises(KeyValueParseKeyError): + decode_permalink_id("foo", "bar") diff --git a/tests/unit_tests/models/__init__.py b/tests/unit_tests/models/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/models/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/models/core_test.py b/tests/unit_tests/models/core_test.py new file mode 100644 index 0000000000000..f8534391d837e --- /dev/null +++ b/tests/unit_tests/models/core_test.py @@ -0,0 +1,145 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=import-outside-toplevel + +from typing import List, Optional + +from pytest_mock import MockFixture +from sqlalchemy.engine.reflection import Inspector + + +def test_get_metrics(mocker: MockFixture) -> None: + """ + Tests for ``get_metrics``. + """ + from superset.db_engine_specs.base import MetricType + from superset.db_engine_specs.sqlite import SqliteEngineSpec + from superset.models.core import Database + + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + assert database.get_metrics("table") == [ + { + "expression": "COUNT(*)", + "metric_name": "count", + "metric_type": "count", + "verbose_name": "COUNT(*)", + } + ] + + class CustomSqliteEngineSpec(SqliteEngineSpec): + @classmethod + def get_metrics( + cls, + database: Database, + inspector: Inspector, + table_name: str, + schema: Optional[str], + ) -> List[MetricType]: + return [ + { + "expression": "COUNT(DISTINCT user_id)", + "metric_name": "count_distinct_user_id", + "metric_type": "count_distinct", + "verbose_name": "COUNT(DISTINCT user_id)", + }, + ] + + database.get_db_engine_spec = mocker.MagicMock(return_value=CustomSqliteEngineSpec) + assert database.get_metrics("table") == [ + { + "expression": "COUNT(DISTINCT user_id)", + "metric_name": "count_distinct_user_id", + "metric_type": "count_distinct", + "verbose_name": "COUNT(DISTINCT user_id)", + }, + ] + + +def test_get_db_engine_spec(mocker: MockFixture) -> None: + """ + Tests for ``get_db_engine_spec``. + """ + from superset.db_engine_specs import BaseEngineSpec + from superset.models.core import Database + + # pylint: disable=abstract-method + class PostgresDBEngineSpec(BaseEngineSpec): + """ + A DB engine spec with drivers and a default driver. + """ + + engine = "postgresql" + engine_aliases = {"postgres"} + drivers = { + "psycopg2": "The default Postgres driver", + "asyncpg": "An async Postgres driver", + } + default_driver = "psycopg2" + + # pylint: disable=abstract-method + class OldDBEngineSpec(BaseEngineSpec): + """ + And old DB engine spec without drivers nor a default driver. + """ + + engine = "mysql" + + load_engine_specs = mocker.patch("superset.db_engine_specs.load_engine_specs") + load_engine_specs.return_value = [ + PostgresDBEngineSpec, + OldDBEngineSpec, + ] + + assert ( + Database(database_name="db", sqlalchemy_uri="postgresql://").db_engine_spec + == PostgresDBEngineSpec + ) + assert ( + Database( + database_name="db", sqlalchemy_uri="postgresql+psycopg2://" + ).db_engine_spec + == PostgresDBEngineSpec + ) + assert ( + Database( + database_name="db", sqlalchemy_uri="postgresql+asyncpg://" + ).db_engine_spec + == PostgresDBEngineSpec + ) + assert ( + Database( + database_name="db", sqlalchemy_uri="postgresql+fancynewdriver://" + ).db_engine_spec + == PostgresDBEngineSpec + ) + assert ( + Database(database_name="db", sqlalchemy_uri="mysql://").db_engine_spec + == OldDBEngineSpec + ) + assert ( + Database( + database_name="db", sqlalchemy_uri="mysql+mysqlconnector://" + ).db_engine_spec + == OldDBEngineSpec + ) + assert ( + Database( + database_name="db", sqlalchemy_uri="mysql+fancynewdriver://" + ).db_engine_spec + == OldDBEngineSpec + ) diff --git a/tests/unit_tests/notifications/email_tests.py b/tests/unit_tests/notifications/email_tests.py new file mode 100644 index 0000000000000..4ce34b99cac4d --- /dev/null +++ b/tests/unit_tests/notifications/email_tests.py @@ -0,0 +1,54 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pandas as pd + + +def test_render_description_with_html() -> None: + # `superset.models.helpers`, a dependency of following imports, + # requires app context + from superset.reports.models import ReportRecipients, ReportRecipientType + from superset.reports.notifications.base import NotificationContent + from superset.reports.notifications.email import EmailNotification + + content = NotificationContent( + name="test alert", + embedded_data=pd.DataFrame( + { + "A": [1, 2, 3], + "B": [4, 5, 6], + "C": ["111", "222", '333'], + } + ), + description='

This is a test alert


', + header_data={ + "notification_format": "PNG", + "notification_type": "Alert", + "owners": [1], + "notification_source": None, + "chart_id": None, + "dashboard_id": None, + }, + ) + email_body = ( + EmailNotification( + recipient=ReportRecipients(type=ReportRecipientType.EMAIL), content=content + ) + ._get_content() + .body + ) + assert '

This is a test alert


' in email_body + assert '<a href="http://www.example.com">333</a>' in email_body diff --git a/tests/unit_tests/pandas_postprocessing/__init__.py b/tests/unit_tests/pandas_postprocessing/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/pandas_postprocessing/test_aggregate.py b/tests/unit_tests/pandas_postprocessing/test_aggregate.py new file mode 100644 index 0000000000000..69d42e36f06be --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_aggregate.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from superset.utils.pandas_postprocessing import aggregate +from tests.unit_tests.fixtures.dataframes import categories_df +from tests.unit_tests.pandas_postprocessing.utils import series_to_list + + +def test_aggregate(): + aggregates = { + "asc sum": {"column": "asc_idx", "operator": "sum"}, + "asc q2": { + "column": "asc_idx", + "operator": "percentile", + "options": {"q": 75}, + }, + "desc q1": { + "column": "desc_idx", + "operator": "percentile", + "options": {"q": 25}, + }, + } + df = aggregate(df=categories_df, groupby=["constant"], aggregates=aggregates) + assert df.columns.tolist() == ["constant", "asc sum", "asc q2", "desc q1"] + assert series_to_list(df["asc sum"])[0] == 5050 + assert series_to_list(df["asc q2"])[0] == 75 + assert series_to_list(df["desc q1"])[0] == 25 diff --git a/tests/unit_tests/pandas_postprocessing/test_boxplot.py b/tests/unit_tests/pandas_postprocessing/test_boxplot.py new file mode 100644 index 0000000000000..27dff0adeb894 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_boxplot.py @@ -0,0 +1,151 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from superset.exceptions import InvalidPostProcessingError +from superset.utils.core import PostProcessingBoxplotWhiskerType +from superset.utils.pandas_postprocessing import boxplot +from tests.unit_tests.fixtures.dataframes import names_df + + +def test_boxplot_tukey(): + df = boxplot( + df=names_df, + groupby=["region"], + whisker_type=PostProcessingBoxplotWhiskerType.TUKEY, + metrics=["cars"], + ) + columns = {column for column in df.columns} + assert columns == { + "cars__mean", + "cars__median", + "cars__q1", + "cars__q3", + "cars__max", + "cars__min", + "cars__count", + "cars__outliers", + "region", + } + assert len(df) == 4 + + +def test_boxplot_min_max(): + df = boxplot( + df=names_df, + groupby=["region"], + whisker_type=PostProcessingBoxplotWhiskerType.MINMAX, + metrics=["cars"], + ) + columns = {column for column in df.columns} + assert columns == { + "cars__mean", + "cars__median", + "cars__q1", + "cars__q3", + "cars__max", + "cars__min", + "cars__count", + "cars__outliers", + "region", + } + assert len(df) == 4 + + +def test_boxplot_percentile(): + df = boxplot( + df=names_df, + groupby=["region"], + whisker_type=PostProcessingBoxplotWhiskerType.PERCENTILE, + metrics=["cars"], + percentiles=[1, 99], + ) + columns = {column for column in df.columns} + assert columns == { + "cars__mean", + "cars__median", + "cars__q1", + "cars__q3", + "cars__max", + "cars__min", + "cars__count", + "cars__outliers", + "region", + } + assert len(df) == 4 + + +def test_boxplot_percentile_incorrect_params(): + with pytest.raises(InvalidPostProcessingError): + boxplot( + df=names_df, + groupby=["region"], + whisker_type=PostProcessingBoxplotWhiskerType.PERCENTILE, + metrics=["cars"], + ) + + with pytest.raises(InvalidPostProcessingError): + boxplot( + df=names_df, + groupby=["region"], + whisker_type=PostProcessingBoxplotWhiskerType.PERCENTILE, + metrics=["cars"], + percentiles=[10], + ) + + with pytest.raises(InvalidPostProcessingError): + boxplot( + df=names_df, + groupby=["region"], + whisker_type=PostProcessingBoxplotWhiskerType.PERCENTILE, + metrics=["cars"], + percentiles=[90, 10], + ) + + with pytest.raises(InvalidPostProcessingError): + boxplot( + df=names_df, + groupby=["region"], + whisker_type=PostProcessingBoxplotWhiskerType.PERCENTILE, + metrics=["cars"], + percentiles=[10, 90, 10], + ) + + +def test_boxplot_type_coercion(): + df = names_df + df["cars"] = df["cars"].astype(str) + df = boxplot( + df=df, + groupby=["region"], + whisker_type=PostProcessingBoxplotWhiskerType.TUKEY, + metrics=["cars"], + ) + + columns = {column for column in df.columns} + assert columns == { + "cars__mean", + "cars__median", + "cars__q1", + "cars__q3", + "cars__max", + "cars__min", + "cars__count", + "cars__outliers", + "region", + } + assert len(df) == 4 diff --git a/tests/unit_tests/pandas_postprocessing/test_compare.py b/tests/unit_tests/pandas_postprocessing/test_compare.py new file mode 100644 index 0000000000000..9da8a31535470 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_compare.py @@ -0,0 +1,231 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pandas as pd + +from superset.constants import PandasPostprocessingCompare as PPC +from superset.utils import pandas_postprocessing as pp +from superset.utils.pandas_postprocessing.utils import FLAT_COLUMN_SEPARATOR +from tests.unit_tests.fixtures.dataframes import multiple_metrics_df, timeseries_df2 + + +def test_compare_should_not_side_effect(): + _timeseries_df2 = timeseries_df2.copy() + pp.compare( + df=_timeseries_df2, + source_columns=["y"], + compare_columns=["z"], + compare_type=PPC.DIFF, + ) + assert _timeseries_df2.equals(timeseries_df2) + + +def test_compare_diff(): + # `difference` comparison + post_df = pp.compare( + df=timeseries_df2, + source_columns=["y"], + compare_columns=["z"], + compare_type=PPC.DIFF, + ) + """ + label y z difference__y__z + 2019-01-01 x 2.0 2.0 0.0 + 2019-01-02 y 2.0 4.0 -2.0 + 2019-01-05 z 2.0 10.0 -8.0 + 2019-01-07 q 2.0 8.0 -6.0 + """ + assert post_df.equals( + pd.DataFrame( + index=timeseries_df2.index, + data={ + "label": ["x", "y", "z", "q"], + "y": [2.0, 2.0, 2.0, 2.0], + "z": [2.0, 4.0, 10.0, 8.0], + "difference__y__z": [0.0, -2.0, -8.0, -6.0], + }, + ) + ) + + # drop original columns + post_df = pp.compare( + df=timeseries_df2, + source_columns=["y"], + compare_columns=["z"], + compare_type=PPC.DIFF, + drop_original_columns=True, + ) + assert post_df.equals( + pd.DataFrame( + index=timeseries_df2.index, + data={ + "label": ["x", "y", "z", "q"], + "difference__y__z": [0.0, -2.0, -8.0, -6.0], + }, + ) + ) + + +def test_compare_percentage(): + # `percentage` comparison + post_df = pp.compare( + df=timeseries_df2, + source_columns=["y"], + compare_columns=["z"], + compare_type=PPC.PCT, + ) + """ + label y z percentage__y__z + 2019-01-01 x 2.0 2.0 0.0 + 2019-01-02 y 2.0 4.0 -0.50 + 2019-01-05 z 2.0 10.0 -0.80 + 2019-01-07 q 2.0 8.0 -0.75 + """ + assert post_df.equals( + pd.DataFrame( + index=timeseries_df2.index, + data={ + "label": ["x", "y", "z", "q"], + "y": [2.0, 2.0, 2.0, 2.0], + "z": [2.0, 4.0, 10.0, 8.0], + "percentage__y__z": [0.0, -0.50, -0.80, -0.75], + }, + ) + ) + + +def test_compare_ratio(): + # `ratio` comparison + post_df = pp.compare( + df=timeseries_df2, + source_columns=["y"], + compare_columns=["z"], + compare_type=PPC.RAT, + ) + """ + label y z ratio__y__z + 2019-01-01 x 2.0 2.0 1.00 + 2019-01-02 y 2.0 4.0 0.50 + 2019-01-05 z 2.0 10.0 0.20 + 2019-01-07 q 2.0 8.0 0.25 + """ + assert post_df.equals( + pd.DataFrame( + index=timeseries_df2.index, + data={ + "label": ["x", "y", "z", "q"], + "y": [2.0, 2.0, 2.0, 2.0], + "z": [2.0, 4.0, 10.0, 8.0], + "ratio__y__z": [1.00, 0.50, 0.20, 0.25], + }, + ) + ) + + +def test_compare_multi_index_column(): + index = pd.to_datetime(["2021-01-01", "2021-01-02", "2021-01-03"]) + index.name = "__timestamp" + iterables = [["m1", "m2"], ["a", "b"], ["x", "y"]] + columns = pd.MultiIndex.from_product(iterables, names=[None, "level1", "level2"]) + df = pd.DataFrame(index=index, columns=columns, data=1) + """ + m1 m2 + level1 a b a b + level2 x y x y x y x y + __timestamp + 2021-01-01 1 1 1 1 1 1 1 1 + 2021-01-02 1 1 1 1 1 1 1 1 + 2021-01-03 1 1 1 1 1 1 1 1 + """ + post_df = pp.compare( + df, + source_columns=["m1"], + compare_columns=["m2"], + compare_type=PPC.DIFF, + drop_original_columns=True, + ) + flat_df = pp.flatten(post_df) + """ + __timestamp difference__m1__m2, a, x difference__m1__m2, a, y difference__m1__m2, b, x difference__m1__m2, b, y + 0 2021-01-01 0 0 0 0 + 1 2021-01-02 0 0 0 0 + 2 2021-01-03 0 0 0 0 + """ + assert flat_df.equals( + pd.DataFrame( + data={ + "__timestamp": pd.to_datetime( + ["2021-01-01", "2021-01-02", "2021-01-03"] + ), + "difference__m1__m2, a, x": [0, 0, 0], + "difference__m1__m2, a, y": [0, 0, 0], + "difference__m1__m2, b, x": [0, 0, 0], + "difference__m1__m2, b, y": [0, 0, 0], + } + ) + ) + + +def test_compare_after_pivot(): + pivot_df = pp.pivot( + df=multiple_metrics_df, + index=["dttm"], + columns=["country"], + aggregates={ + "sum_metric": {"operator": "sum"}, + "count_metric": {"operator": "sum"}, + }, + ) + """ + count_metric sum_metric + country UK US UK US + dttm + 2019-01-01 1 2 5 6 + 2019-01-02 3 4 7 8 + """ + compared_df = pp.compare( + pivot_df, + source_columns=["count_metric"], + compare_columns=["sum_metric"], + compare_type=PPC.DIFF, + drop_original_columns=True, + ) + """ + difference__count_metric__sum_metric + country UK US + dttm + 2019-01-01 -4 -4 + 2019-01-02 -4 -4 + """ + flat_df = pp.flatten(compared_df) + """ + dttm difference__count_metric__sum_metric, UK difference__count_metric__sum_metric, US + 0 2019-01-01 -4 -4 + 1 2019-01-02 -4 -4 + """ + assert flat_df.equals( + pd.DataFrame( + data={ + "dttm": pd.to_datetime(["2019-01-01", "2019-01-02"]), + FLAT_COLUMN_SEPARATOR.join( + ["difference__count_metric__sum_metric", "UK"] + ): [-4, -4], + FLAT_COLUMN_SEPARATOR.join( + ["difference__count_metric__sum_metric", "US"] + ): [-4, -4], + } + ) + ) diff --git a/tests/unit_tests/pandas_postprocessing/test_contribution.py b/tests/unit_tests/pandas_postprocessing/test_contribution.py new file mode 100644 index 0000000000000..7eb34c4d13f7b --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_contribution.py @@ -0,0 +1,80 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime + +import pytest +from numpy import nan +from numpy.testing import assert_array_equal +from pandas import DataFrame + +from superset.exceptions import InvalidPostProcessingError +from superset.utils.core import DTTM_ALIAS, PostProcessingContributionOrientation +from superset.utils.pandas_postprocessing import contribution + + +def test_contribution(): + df = DataFrame( + { + DTTM_ALIAS: [ + datetime(2020, 7, 16, 14, 49), + datetime(2020, 7, 16, 14, 50), + datetime(2020, 7, 16, 14, 51), + ], + "a": [1, 3, nan], + "b": [1, 9, nan], + "c": [nan, nan, nan], + } + ) + with pytest.raises(InvalidPostProcessingError, match="not numeric"): + contribution(df, columns=[DTTM_ALIAS]) + + with pytest.raises(InvalidPostProcessingError, match="same length"): + contribution(df, columns=["a"], rename_columns=["aa", "bb"]) + + # cell contribution across row + processed_df = contribution( + df, + orientation=PostProcessingContributionOrientation.ROW, + ) + assert processed_df.columns.tolist() == [DTTM_ALIAS, "a", "b", "c"] + assert_array_equal(processed_df["a"].tolist(), [0.5, 0.25, nan]) + assert_array_equal(processed_df["b"].tolist(), [0.5, 0.75, nan]) + assert_array_equal(processed_df["c"].tolist(), [0, 0, nan]) + + # cell contribution across column without temporal column + df.pop(DTTM_ALIAS) + processed_df = contribution( + df, orientation=PostProcessingContributionOrientation.COLUMN + ) + assert processed_df.columns.tolist() == ["a", "b", "c"] + assert_array_equal(processed_df["a"].tolist(), [0.25, 0.75, 0]) + assert_array_equal(processed_df["b"].tolist(), [0.1, 0.9, 0]) + assert_array_equal(processed_df["c"].tolist(), [nan, nan, nan]) + + # contribution only on selected columns + processed_df = contribution( + df, + orientation=PostProcessingContributionOrientation.COLUMN, + columns=["a"], + rename_columns=["pct_a"], + ) + assert processed_df.columns.tolist() == ["a", "b", "c", "pct_a"] + assert_array_equal(processed_df["a"].tolist(), [1, 3, nan]) + assert_array_equal(processed_df["b"].tolist(), [1, 9, nan]) + assert_array_equal(processed_df["c"].tolist(), [nan, nan, nan]) + assert processed_df["pct_a"].tolist() == [0.25, 0.75, 0] diff --git a/tests/unit_tests/pandas_postprocessing/test_cum.py b/tests/unit_tests/pandas_postprocessing/test_cum.py new file mode 100644 index 0000000000000..130e0602520a1 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_cum.py @@ -0,0 +1,164 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pandas as pd +import pytest + +from superset.exceptions import InvalidPostProcessingError +from superset.utils import pandas_postprocessing as pp +from superset.utils.pandas_postprocessing.utils import FLAT_COLUMN_SEPARATOR +from tests.unit_tests.fixtures.dataframes import ( + multiple_metrics_df, + single_metric_df, + timeseries_df, +) +from tests.unit_tests.pandas_postprocessing.utils import series_to_list + + +def test_cum_should_not_side_effect(): + _timeseries_df = timeseries_df.copy() + pp.cum( + df=timeseries_df, + columns={"y": "y2"}, + operator="sum", + ) + assert _timeseries_df.equals(timeseries_df) + + +def test_cum(): + # create new column (cumsum) + post_df = pp.cum( + df=timeseries_df, + columns={"y": "y2"}, + operator="sum", + ) + assert post_df.columns.tolist() == ["label", "y", "y2"] + assert series_to_list(post_df["label"]) == ["x", "y", "z", "q"] + assert series_to_list(post_df["y"]) == [1.0, 2.0, 3.0, 4.0] + assert series_to_list(post_df["y2"]) == [1.0, 3.0, 6.0, 10.0] + + # overwrite column (cumprod) + post_df = pp.cum( + df=timeseries_df, + columns={"y": "y"}, + operator="prod", + ) + assert post_df.columns.tolist() == ["label", "y"] + assert series_to_list(post_df["y"]) == [1.0, 2.0, 6.0, 24.0] + + # overwrite column (cummin) + post_df = pp.cum( + df=timeseries_df, + columns={"y": "y"}, + operator="min", + ) + assert post_df.columns.tolist() == ["label", "y"] + assert series_to_list(post_df["y"]) == [1.0, 1.0, 1.0, 1.0] + + # invalid operator + with pytest.raises(InvalidPostProcessingError): + pp.cum( + df=timeseries_df, + columns={"y": "y"}, + operator="abc", + ) + + +def test_cum_after_pivot_with_single_metric(): + pivot_df = pp.pivot( + df=single_metric_df, + index=["dttm"], + columns=["country"], + aggregates={"sum_metric": {"operator": "sum"}}, + ) + """ + sum_metric + country UK US + dttm + 2019-01-01 5 6 + 2019-01-02 7 8 + """ + cum_df = pp.cum(df=pivot_df, operator="sum", columns={"sum_metric": "sum_metric"}) + """ + sum_metric + country UK US + dttm + 2019-01-01 5 6 + 2019-01-02 12 14 + """ + cum_and_flat_df = pp.flatten(cum_df) + """ + dttm sum_metric, UK sum_metric, US + 0 2019-01-01 5 6 + 1 2019-01-02 12 14 + """ + assert cum_and_flat_df.equals( + pd.DataFrame( + { + "dttm": pd.to_datetime(["2019-01-01", "2019-01-02"]), + FLAT_COLUMN_SEPARATOR.join(["sum_metric", "UK"]): [5, 12], + FLAT_COLUMN_SEPARATOR.join(["sum_metric", "US"]): [6, 14], + } + ) + ) + + +def test_cum_after_pivot_with_multiple_metrics(): + pivot_df = pp.pivot( + df=multiple_metrics_df, + index=["dttm"], + columns=["country"], + aggregates={ + "sum_metric": {"operator": "sum"}, + "count_metric": {"operator": "sum"}, + }, + ) + """ + count_metric sum_metric + country UK US UK US + dttm + 2019-01-01 1 2 5 6 + 2019-01-02 3 4 7 8 + """ + cum_df = pp.cum( + df=pivot_df, + operator="sum", + columns={"sum_metric": "sum_metric", "count_metric": "count_metric"}, + ) + """ + count_metric sum_metric + country UK US UK US + dttm + 2019-01-01 1 2 5 6 + 2019-01-02 4 6 12 14 + """ + flat_df = pp.flatten(cum_df) + """ + dttm count_metric, UK count_metric, US sum_metric, UK sum_metric, US + 0 2019-01-01 1 2 5 6 + 1 2019-01-02 4 6 12 14 + """ + assert flat_df.equals( + pd.DataFrame( + { + "dttm": pd.to_datetime(["2019-01-01", "2019-01-02"]), + FLAT_COLUMN_SEPARATOR.join(["count_metric", "UK"]): [1, 4], + FLAT_COLUMN_SEPARATOR.join(["count_metric", "US"]): [2, 6], + FLAT_COLUMN_SEPARATOR.join(["sum_metric", "UK"]): [5, 12], + FLAT_COLUMN_SEPARATOR.join(["sum_metric", "US"]): [6, 14], + } + ) + ) diff --git a/tests/unit_tests/pandas_postprocessing/test_diff.py b/tests/unit_tests/pandas_postprocessing/test_diff.py new file mode 100644 index 0000000000000..c77195bbf6d71 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_diff.py @@ -0,0 +1,51 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from superset.exceptions import InvalidPostProcessingError +from superset.utils.pandas_postprocessing import diff +from tests.unit_tests.fixtures.dataframes import timeseries_df, timeseries_df2 +from tests.unit_tests.pandas_postprocessing.utils import series_to_list + + +def test_diff(): + # overwrite column + post_df = diff(df=timeseries_df, columns={"y": "y"}) + assert post_df.columns.tolist() == ["label", "y"] + assert series_to_list(post_df["y"]) == [None, 1.0, 1.0, 1.0] + + # add column + post_df = diff(df=timeseries_df, columns={"y": "y1"}) + assert post_df.columns.tolist() == ["label", "y", "y1"] + assert series_to_list(post_df["y"]) == [1.0, 2.0, 3.0, 4.0] + assert series_to_list(post_df["y1"]) == [None, 1.0, 1.0, 1.0] + + # look ahead + post_df = diff(df=timeseries_df, columns={"y": "y1"}, periods=-1) + assert series_to_list(post_df["y1"]) == [-1.0, -1.0, -1.0, None] + + # invalid column reference + with pytest.raises(InvalidPostProcessingError): + diff( + df=timeseries_df, + columns={"abc": "abc"}, + ) + + # diff by columns + post_df = diff(df=timeseries_df2, columns={"y": "y", "z": "z"}, axis=1) + assert post_df.columns.tolist() == ["label", "y", "z"] + assert series_to_list(post_df["z"]) == [0.0, 2.0, 8.0, 6.0] diff --git a/tests/unit_tests/pandas_postprocessing/test_flatten.py b/tests/unit_tests/pandas_postprocessing/test_flatten.py new file mode 100644 index 0000000000000..fea84f7b9f5b0 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_flatten.py @@ -0,0 +1,177 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pandas as pd + +from superset.utils import pandas_postprocessing as pp +from superset.utils.pandas_postprocessing.utils import FLAT_COLUMN_SEPARATOR +from tests.unit_tests.fixtures.dataframes import timeseries_df + + +def test_flat_should_not_change(): + df = pd.DataFrame( + data={ + "foo": [1, 2, 3], + "bar": [4, 5, 6], + } + ) + + assert pp.flatten(df).equals(df) + + +def test_flat_should_not_reset_index(): + index = pd.to_datetime(["2021-01-01", "2021-01-02", "2021-01-03"]) + index.name = "__timestamp" + df = pd.DataFrame(index=index, data={"foo": [1, 2, 3], "bar": [4, 5, 6]}) + + assert pp.flatten(df, reset_index=False).equals(df) + + +def test_flat_should_flat_datetime_index(): + index = pd.to_datetime(["2021-01-01", "2021-01-02", "2021-01-03"]) + index.name = "__timestamp" + df = pd.DataFrame(index=index, data={"foo": [1, 2, 3], "bar": [4, 5, 6]}) + + assert pp.flatten(df).equals( + pd.DataFrame( + { + "__timestamp": index, + "foo": [1, 2, 3], + "bar": [4, 5, 6], + } + ) + ) + + +def test_flat_should_flat_multiple_index(): + index = pd.to_datetime(["2021-01-01", "2021-01-02", "2021-01-03"]) + index.name = "__timestamp" + iterables = [["foo", "bar"], [1, "two"]] + columns = pd.MultiIndex.from_product(iterables, names=["level1", "level2"]) + df = pd.DataFrame(index=index, columns=columns, data=1) + + assert pp.flatten(df).equals( + pd.DataFrame( + { + "__timestamp": index, + FLAT_COLUMN_SEPARATOR.join(["foo", "1"]): [1, 1, 1], + FLAT_COLUMN_SEPARATOR.join(["foo", "two"]): [1, 1, 1], + FLAT_COLUMN_SEPARATOR.join(["bar", "1"]): [1, 1, 1], + FLAT_COLUMN_SEPARATOR.join(["bar", "two"]): [1, 1, 1], + } + ) + ) + + +def test_flat_should_drop_index_level(): + index = pd.to_datetime(["2021-01-01", "2021-01-02", "2021-01-03"]) + index.name = "__timestamp" + columns = pd.MultiIndex.from_arrays( + [["a"] * 3, ["b"] * 3, ["c", "d", "e"], ["ff", "ii", "gg"]], + names=["level1", "level2", "level3", "level4"], + ) + df = pd.DataFrame(index=index, columns=columns, data=1) + + # drop level by index + assert pp.flatten(df.copy(), drop_levels=(0, 1,)).equals( + pd.DataFrame( + { + "__timestamp": index, + FLAT_COLUMN_SEPARATOR.join(["c", "ff"]): [1, 1, 1], + FLAT_COLUMN_SEPARATOR.join(["d", "ii"]): [1, 1, 1], + FLAT_COLUMN_SEPARATOR.join(["e", "gg"]): [1, 1, 1], + } + ) + ) + + # drop level by name + assert pp.flatten(df.copy(), drop_levels=("level1", "level2")).equals( + pd.DataFrame( + { + "__timestamp": index, + FLAT_COLUMN_SEPARATOR.join(["c", "ff"]): [1, 1, 1], + FLAT_COLUMN_SEPARATOR.join(["d", "ii"]): [1, 1, 1], + FLAT_COLUMN_SEPARATOR.join(["e", "gg"]): [1, 1, 1], + } + ) + ) + + # only leave 1 level + assert pp.flatten(df.copy(), drop_levels=(0, 1, 2)).equals( + pd.DataFrame( + { + "__timestamp": index, + FLAT_COLUMN_SEPARATOR.join(["ff"]): [1, 1, 1], + FLAT_COLUMN_SEPARATOR.join(["ii"]): [1, 1, 1], + FLAT_COLUMN_SEPARATOR.join(["gg"]): [1, 1, 1], + } + ) + ) + + +def test_flat_should_not_droplevel(): + assert pp.flatten(timeseries_df, drop_levels=(0,)).equals( + pd.DataFrame( + { + "index": pd.to_datetime( + ["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"] + ), + "label": ["x", "y", "z", "q"], + "y": [1.0, 2.0, 3.0, 4.0], + } + ) + ) + + +def test_flat_integer_column_name(): + index = pd.to_datetime(["2021-01-01", "2021-01-02", "2021-01-03"]) + index.name = "__timestamp" + columns = pd.MultiIndex.from_arrays( + [["a"] * 3, [100, 200, 300]], + names=["level1", "level2"], + ) + df = pd.DataFrame(index=index, columns=columns, data=1) + assert pp.flatten(df, drop_levels=(0,)).equals( + pd.DataFrame( + { + "__timestamp": pd.to_datetime( + ["2021-01-01", "2021-01-02", "2021-01-03"] + ), + "100": [1, 1, 1], + "200": [1, 1, 1], + "300": [1, 1, 1], + } + ) + ) + + +def test_escape_column_name(): + index = pd.to_datetime(["2021-01-01", "2021-01-02", "2021-01-03"]) + index.name = "__timestamp" + columns = pd.MultiIndex.from_arrays( + [ + ["level1,value1", "level1,value2", "level1,value3"], + ["level2, value1", "level2, value2", "level2, value3"], + ], + names=["level1", "level2"], + ) + df = pd.DataFrame(index=index, columns=columns, data=1) + assert list(pp.flatten(df).columns.values) == [ + "__timestamp", + "level1\\,value1" + FLAT_COLUMN_SEPARATOR + "level2\\, value1", + "level1\\,value2" + FLAT_COLUMN_SEPARATOR + "level2\\, value2", + "level1\\,value3" + FLAT_COLUMN_SEPARATOR + "level2\\, value3", + ] diff --git a/tests/unit_tests/pandas_postprocessing/test_geography.py b/tests/unit_tests/pandas_postprocessing/test_geography.py new file mode 100644 index 0000000000000..6162f3c8a0b94 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_geography.py @@ -0,0 +1,90 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from superset.utils.pandas_postprocessing import ( + geodetic_parse, + geohash_decode, + geohash_encode, +) +from tests.unit_tests.fixtures.dataframes import lonlat_df +from tests.unit_tests.pandas_postprocessing.utils import round_floats, series_to_list + + +def test_geohash_decode(): + # decode lon/lat from geohash + post_df = geohash_decode( + df=lonlat_df[["city", "geohash"]], + geohash="geohash", + latitude="latitude", + longitude="longitude", + ) + assert sorted(post_df.columns.tolist()) == sorted( + ["city", "geohash", "latitude", "longitude"] + ) + assert round_floats(series_to_list(post_df["longitude"]), 6) == round_floats( + series_to_list(lonlat_df["longitude"]), 6 + ) + assert round_floats(series_to_list(post_df["latitude"]), 6) == round_floats( + series_to_list(lonlat_df["latitude"]), 6 + ) + + +def test_geohash_encode(): + # encode lon/lat into geohash + post_df = geohash_encode( + df=lonlat_df[["city", "latitude", "longitude"]], + latitude="latitude", + longitude="longitude", + geohash="geohash", + ) + assert sorted(post_df.columns.tolist()) == sorted( + ["city", "geohash", "latitude", "longitude"] + ) + assert series_to_list(post_df["geohash"]) == series_to_list(lonlat_df["geohash"]) + + +def test_geodetic_parse(): + # parse geodetic string with altitude into lon/lat/altitude + post_df = geodetic_parse( + df=lonlat_df[["city", "geodetic"]], + geodetic="geodetic", + latitude="latitude", + longitude="longitude", + altitude="altitude", + ) + assert sorted(post_df.columns.tolist()) == sorted( + ["city", "geodetic", "latitude", "longitude", "altitude"] + ) + assert series_to_list(post_df["longitude"]) == series_to_list( + lonlat_df["longitude"] + ) + assert series_to_list(post_df["latitude"]) == series_to_list(lonlat_df["latitude"]) + assert series_to_list(post_df["altitude"]) == series_to_list(lonlat_df["altitude"]) + + # parse geodetic string into lon/lat + post_df = geodetic_parse( + df=lonlat_df[["city", "geodetic"]], + geodetic="geodetic", + latitude="latitude", + longitude="longitude", + ) + assert sorted(post_df.columns.tolist()) == sorted( + ["city", "geodetic", "latitude", "longitude"] + ) + assert series_to_list(post_df["longitude"]) == series_to_list( + lonlat_df["longitude"] + ) + assert series_to_list(post_df["latitude"]), series_to_list(lonlat_df["latitude"]) diff --git a/tests/unit_tests/pandas_postprocessing/test_pivot.py b/tests/unit_tests/pandas_postprocessing/test_pivot.py new file mode 100644 index 0000000000000..8efd203906077 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_pivot.py @@ -0,0 +1,205 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import numpy as np +import pytest +from pandas import DataFrame, to_datetime + +from superset.exceptions import InvalidPostProcessingError +from superset.utils.pandas_postprocessing import flatten, pivot +from tests.unit_tests.fixtures.dataframes import categories_df +from tests.unit_tests.pandas_postprocessing.utils import AGGREGATES_SINGLE + + +def test_pivot_without_columns(): + """ + Make sure pivot without columns returns correct DataFrame + """ + df = pivot( + df=categories_df, + index=["name"], + aggregates=AGGREGATES_SINGLE, + ) + assert df.columns.tolist() == ["idx_nulls"] + assert len(df) == 101 + assert df["idx_nulls"].sum() == 1050 + + +def test_pivot_with_single_column(): + """ + Make sure pivot with single column returns correct DataFrame + """ + df = pivot( + df=categories_df, + index=["name"], + columns=["category"], + aggregates=AGGREGATES_SINGLE, + ) + assert df.columns.tolist() == [ + ("idx_nulls", "cat0"), + ("idx_nulls", "cat1"), + ("idx_nulls", "cat2"), + ] + assert len(df) == 101 + assert df["idx_nulls"]["cat0"].sum() == 315 + + df = pivot( + df=categories_df, + index=["dept"], + columns=["category"], + aggregates=AGGREGATES_SINGLE, + ) + assert df.columns.tolist() == [ + ("idx_nulls", "cat0"), + ("idx_nulls", "cat1"), + ("idx_nulls", "cat2"), + ] + assert len(df) == 5 + + +def test_pivot_with_multiple_columns(): + """ + Make sure pivot with multiple columns returns correct DataFrame + """ + df = pivot( + df=categories_df, + index=["name"], + columns=["category", "dept"], + aggregates=AGGREGATES_SINGLE, + ) + df = flatten(df) + assert len(df.columns) == 1 + 3 * 5 # index + possible permutations + + +def test_pivot_fill_values(): + """ + Make sure pivot with fill values returns correct DataFrame + """ + df = pivot( + df=categories_df, + index=["name"], + columns=["category"], + metric_fill_value=1, + aggregates={"idx_nulls": {"operator": "sum"}}, + ) + assert df["idx_nulls"]["cat0"].sum() == 382 + + +def test_pivot_fill_column_values(): + """ + Make sure pivot witn null column names returns correct DataFrame + """ + df_copy = categories_df.copy() + df_copy["category"] = None + df = pivot( + df=df_copy, + index=["name"], + columns=["category"], + aggregates={"idx_nulls": {"operator": "sum"}}, + ) + assert len(df) == 101 + assert df.columns.tolist() == [("idx_nulls", "")] + + +def test_pivot_exceptions(): + """ + Make sure pivot raises correct Exceptions + """ + # Missing index + with pytest.raises(TypeError): + pivot(df=categories_df, columns=["dept"], aggregates=AGGREGATES_SINGLE) + + # invalid index reference + with pytest.raises(InvalidPostProcessingError): + pivot( + df=categories_df, + index=["abc"], + columns=["dept"], + aggregates=AGGREGATES_SINGLE, + ) + + # invalid column reference + with pytest.raises(InvalidPostProcessingError): + pivot( + df=categories_df, + index=["dept"], + columns=["abc"], + aggregates=AGGREGATES_SINGLE, + ) + + # invalid aggregate options + with pytest.raises(InvalidPostProcessingError): + pivot( + df=categories_df, + index=["name"], + columns=["category"], + aggregates={"idx_nulls": {}}, + ) + + +def test_pivot_eliminate_cartesian_product_columns(): + # single metric + mock_df = DataFrame( + { + "dttm": to_datetime(["2019-01-01", "2019-01-01"]), + "a": [0, 1], + "b": [0, 1], + "metric": [9, np.NAN], + } + ) + + df = pivot( + df=mock_df, + index=["dttm"], + columns=["a", "b"], + aggregates={"metric": {"operator": "mean"}}, + drop_missing_columns=False, + ) + df = flatten(df) + assert list(df.columns) == ["dttm", "metric, 0, 0", "metric, 1, 1"] + assert np.isnan(df["metric, 1, 1"][0]) + + # multiple metrics + mock_df = DataFrame( + { + "dttm": to_datetime(["2019-01-01", "2019-01-01"]), + "a": [0, 1], + "b": [0, 1], + "metric": [9, np.NAN], + "metric2": [10, 11], + } + ) + + df = pivot( + df=mock_df, + index=["dttm"], + columns=["a", "b"], + aggregates={ + "metric": {"operator": "mean"}, + "metric2": {"operator": "mean"}, + }, + drop_missing_columns=False, + ) + df = flatten(df) + assert list(df.columns) == [ + "dttm", + "metric, 0, 0", + "metric, 1, 1", + "metric2, 0, 0", + "metric2, 1, 1", + ] + assert np.isnan(df["metric, 1, 1"][0]) diff --git a/tests/unit_tests/pandas_postprocessing/test_prophet.py b/tests/unit_tests/pandas_postprocessing/test_prophet.py new file mode 100644 index 0000000000000..6da3a7a591a3d --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_prophet.py @@ -0,0 +1,190 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from importlib.util import find_spec + +import pandas as pd +import pytest + +from superset.exceptions import InvalidPostProcessingError +from superset.utils.core import DTTM_ALIAS +from superset.utils.pandas_postprocessing import prophet +from tests.unit_tests.fixtures.dataframes import prophet_df + + +def test_prophet_valid(): + pytest.importorskip("prophet") + + df = prophet(df=prophet_df, time_grain="P1M", periods=3, confidence_interval=0.9) + columns = {column for column in df.columns} + assert columns == { + DTTM_ALIAS, + "a__yhat", + "a__yhat_upper", + "a__yhat_lower", + "a", + "b__yhat", + "b__yhat_upper", + "b__yhat_lower", + "b", + } + assert df[DTTM_ALIAS].iloc[0].to_pydatetime() == datetime(2018, 12, 31) + assert df[DTTM_ALIAS].iloc[-1].to_pydatetime() == datetime(2022, 3, 31) + assert len(df) == 7 + + df = prophet(df=prophet_df, time_grain="P1M", periods=5, confidence_interval=0.9) + assert df[DTTM_ALIAS].iloc[0].to_pydatetime() == datetime(2018, 12, 31) + assert df[DTTM_ALIAS].iloc[-1].to_pydatetime() == datetime(2022, 5, 31) + assert len(df) == 9 + + df = prophet( + df=pd.DataFrame( + { + "__timestamp": [datetime(2022, 1, 2), datetime(2022, 1, 9)], + "x": [1, 1], + } + ), + time_grain="P1W", + periods=1, + confidence_interval=0.9, + ) + + assert df[DTTM_ALIAS].iloc[-1].to_pydatetime() == datetime(2022, 1, 16) + assert len(df) == 3 + + df = prophet( + df=pd.DataFrame( + { + "__timestamp": [datetime(2022, 1, 2), datetime(2022, 1, 9)], + "x": [1, 1], + } + ), + time_grain="1969-12-28T00:00:00Z/P1W", + periods=1, + confidence_interval=0.9, + ) + + assert df[DTTM_ALIAS].iloc[-1].to_pydatetime() == datetime(2022, 1, 16) + assert len(df) == 3 + + df = prophet( + df=pd.DataFrame( + { + "__timestamp": [datetime(2022, 1, 3), datetime(2022, 1, 10)], + "x": [1, 1], + } + ), + time_grain="1969-12-29T00:00:00Z/P1W", + periods=1, + confidence_interval=0.9, + ) + + assert df[DTTM_ALIAS].iloc[-1].to_pydatetime() == datetime(2022, 1, 17) + assert len(df) == 3 + + df = prophet( + df=pd.DataFrame( + { + "__timestamp": [datetime(2022, 1, 8), datetime(2022, 1, 15)], + "x": [1, 1], + } + ), + time_grain="P1W/1970-01-03T00:00:00Z", + periods=1, + confidence_interval=0.9, + ) + + assert df[DTTM_ALIAS].iloc[-1].to_pydatetime() == datetime(2022, 1, 22) + assert len(df) == 3 + + +def test_prophet_valid_zero_periods(): + pytest.importorskip("prophet") + + df = prophet(df=prophet_df, time_grain="P1M", periods=0, confidence_interval=0.9) + columns = {column for column in df.columns} + assert columns == { + DTTM_ALIAS, + "a__yhat", + "a__yhat_upper", + "a__yhat_lower", + "a", + "b__yhat", + "b__yhat_upper", + "b__yhat_lower", + "b", + } + assert df[DTTM_ALIAS].iloc[0].to_pydatetime() == datetime(2018, 12, 31) + assert df[DTTM_ALIAS].iloc[-1].to_pydatetime() == datetime(2021, 12, 31) + assert len(df) == 4 + + +def test_prophet_import(): + dynamic_module = find_spec("prophet") + if dynamic_module is None: + with pytest.raises(InvalidPostProcessingError): + prophet(df=prophet_df, time_grain="P1M", periods=3, confidence_interval=0.9) + + +def test_prophet_missing_temporal_column(): + df = prophet_df.drop(DTTM_ALIAS, axis=1) + + with pytest.raises(InvalidPostProcessingError): + prophet( + df=df, + time_grain="P1M", + periods=3, + confidence_interval=0.9, + ) + + +def test_prophet_incorrect_confidence_interval(): + with pytest.raises(InvalidPostProcessingError): + prophet( + df=prophet_df, + time_grain="P1M", + periods=3, + confidence_interval=0.0, + ) + + with pytest.raises(InvalidPostProcessingError): + prophet( + df=prophet_df, + time_grain="P1M", + periods=3, + confidence_interval=1.0, + ) + + +def test_prophet_incorrect_periods(): + with pytest.raises(InvalidPostProcessingError): + prophet( + df=prophet_df, + time_grain="P1M", + periods=-1, + confidence_interval=0.8, + ) + + +def test_prophet_incorrect_time_grain(): + with pytest.raises(InvalidPostProcessingError): + prophet( + df=prophet_df, + time_grain="yearly", + periods=10, + confidence_interval=0.8, + ) diff --git a/tests/unit_tests/pandas_postprocessing/test_rename.py b/tests/unit_tests/pandas_postprocessing/test_rename.py new file mode 100644 index 0000000000000..f49680a352618 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_rename.py @@ -0,0 +1,175 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pandas as pd +import pytest + +from superset.exceptions import InvalidPostProcessingError +from superset.utils import pandas_postprocessing as pp +from tests.unit_tests.fixtures.dataframes import categories_df + + +def test_rename_should_not_side_effect(): + _categories_df = categories_df.copy() + pp.rename( + df=_categories_df, + columns={ + "constant": "constant_newname", + "category": "category_namename", + }, + ) + assert _categories_df.equals(categories_df) + + +def test_rename(): + new_categories_df = pp.rename( + df=categories_df, + columns={ + "constant": "constant_newname", + "category": "category_newname", + }, + ) + assert list(new_categories_df.columns.values) == [ + "constant_newname", + "category_newname", + "dept", + "name", + "asc_idx", + "desc_idx", + "idx_nulls", + ] + assert not new_categories_df.equals(categories_df) + + +def test_should_inplace_rename(): + _categories_df = categories_df.copy() + _categories_df_inplaced = pp.rename( + df=_categories_df, + columns={ + "constant": "constant_newname", + "category": "category_namename", + }, + inplace=True, + ) + assert _categories_df_inplaced.equals(_categories_df) + + +def test_should_rename_on_level(): + iterables = [["m1", "m2"], ["a", "b"], ["x", "y"]] + columns = pd.MultiIndex.from_product(iterables, names=[None, "level1", "level2"]) + df = pd.DataFrame(index=[0, 1, 2], columns=columns, data=1) + """ + m1 m2 + level1 a b a b + level2 x y x y x y x y + 0 1 1 1 1 1 1 1 1 + 1 1 1 1 1 1 1 1 1 + 2 1 1 1 1 1 1 1 1 + """ + post_df = pp.rename( + df=df, + columns={"m1": "new_m1"}, + level=0, + ) + assert post_df.columns.get_level_values(level=0).equals( + pd.Index( + [ + "new_m1", + "new_m1", + "new_m1", + "new_m1", + "m2", + "m2", + "m2", + "m2", + ] + ) + ) + + +def test_should_raise_exception_no_column(): + with pytest.raises(InvalidPostProcessingError): + pp.rename( + df=categories_df, + columns={ + "foobar": "foobar2", + }, + ) + + +def test_should_raise_exception_duplication(): + with pytest.raises(InvalidPostProcessingError): + pp.rename( + df=categories_df, + columns={ + "constant": "category", + }, + ) + + +def test_should_raise_exception_duplication_on_multiindx(): + iterables = [["m1", "m2"], ["a", "b"], ["x", "y"]] + columns = pd.MultiIndex.from_product(iterables, names=[None, "level1", "level2"]) + df = pd.DataFrame(index=[0, 1, 2], columns=columns, data=1) + """ + m1 m2 + level1 a b a b + level2 x y x y x y x y + 0 1 1 1 1 1 1 1 1 + 1 1 1 1 1 1 1 1 1 + 2 1 1 1 1 1 1 1 1 + """ + + with pytest.raises(InvalidPostProcessingError): + pp.rename( + df=df, + columns={ + "m1": "m2", + }, + level=0, + ) + pp.rename( + df=df, + columns={ + "a": "b", + }, + level=1, + ) + + +def test_should_raise_exception_invalid_level(): + with pytest.raises(InvalidPostProcessingError): + pp.rename( + df=categories_df, + columns={ + "constant": "new_constant", + }, + level=100, + ) + pp.rename( + df=categories_df, + columns={ + "constant": "new_constant", + }, + level="xxxxx", + ) + + +def test_should_return_df_empty_columns(): + assert pp.rename( + df=categories_df, + columns={}, + ).equals(categories_df) diff --git a/tests/unit_tests/pandas_postprocessing/test_resample.py b/tests/unit_tests/pandas_postprocessing/test_resample.py new file mode 100644 index 0000000000000..b1414c5fe8fdc --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_resample.py @@ -0,0 +1,208 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import numpy as np +import pandas as pd +import pytest +from pandas import to_datetime + +from superset.exceptions import InvalidPostProcessingError +from superset.utils import pandas_postprocessing as pp +from tests.unit_tests.fixtures.dataframes import categories_df, timeseries_df + + +def test_resample_should_not_side_effect(): + _timeseries_df = timeseries_df.copy() + pp.resample(df=_timeseries_df, rule="1D", method="ffill") + assert _timeseries_df.equals(timeseries_df) + + +def test_resample(): + post_df = pp.resample(df=timeseries_df, rule="1D", method="ffill") + """ + label y + 2019-01-01 x 1.0 + 2019-01-02 y 2.0 + 2019-01-03 y 2.0 + 2019-01-04 y 2.0 + 2019-01-05 z 3.0 + 2019-01-06 z 3.0 + 2019-01-07 q 4.0 + """ + assert post_df.equals( + pd.DataFrame( + index=pd.to_datetime( + [ + "2019-01-01", + "2019-01-02", + "2019-01-03", + "2019-01-04", + "2019-01-05", + "2019-01-06", + "2019-01-07", + ] + ), + data={ + "label": ["x", "y", "y", "y", "z", "z", "q"], + "y": [1.0, 2.0, 2.0, 2.0, 3.0, 3.0, 4.0], + }, + ) + ) + + +def test_resample_zero_fill(): + post_df = pp.resample(df=timeseries_df, rule="1D", method="asfreq", fill_value=0) + assert post_df.equals( + pd.DataFrame( + index=pd.to_datetime( + [ + "2019-01-01", + "2019-01-02", + "2019-01-03", + "2019-01-04", + "2019-01-05", + "2019-01-06", + "2019-01-07", + ] + ), + data={ + "label": ["x", "y", 0, 0, "z", 0, "q"], + "y": [1.0, 2.0, 0, 0, 3.0, 0, 4.0], + }, + ) + ) + + +def test_resample_after_pivot(): + df = pd.DataFrame( + data={ + "__timestamp": pd.to_datetime( + [ + "2022-01-13", + "2022-01-13", + "2022-01-13", + "2022-01-11", + "2022-01-11", + "2022-01-11", + ] + ), + "city": ["Chicago", "LA", "NY", "Chicago", "LA", "NY"], + "val": [6.0, 5.0, 4.0, 3.0, 2.0, 1.0], + } + ) + pivot_df = pp.pivot( + df=df, + index=["__timestamp"], + columns=["city"], + aggregates={ + "val": {"operator": "sum"}, + }, + ) + """ + val + city Chicago LA NY + __timestamp + 2022-01-11 3.0 2.0 1.0 + 2022-01-13 6.0 5.0 4.0 + """ + resample_df = pp.resample( + df=pivot_df, + rule="1D", + method="asfreq", + fill_value=0, + ) + """ + val + city Chicago LA NY + __timestamp + 2022-01-11 3.0 2.0 1.0 + 2022-01-12 0.0 0.0 0.0 + 2022-01-13 6.0 5.0 4.0 + """ + flat_df = pp.flatten(resample_df) + """ + __timestamp val, Chicago val, LA val, NY + 0 2022-01-11 3.0 2.0 1.0 + 1 2022-01-12 0.0 0.0 0.0 + 2 2022-01-13 6.0 5.0 4.0 + """ + assert flat_df.equals( + pd.DataFrame( + data={ + "__timestamp": pd.to_datetime( + ["2022-01-11", "2022-01-12", "2022-01-13"] + ), + "val, Chicago": [3.0, 0, 6.0], + "val, LA": [2.0, 0, 5.0], + "val, NY": [1.0, 0, 4.0], + } + ) + ) + + +def test_resample_should_raise_ex(): + with pytest.raises(InvalidPostProcessingError): + pp.resample( + df=categories_df, + rule="1D", + method="asfreq", + ) + + with pytest.raises(InvalidPostProcessingError): + pp.resample( + df=timeseries_df, + rule="1D", + method="foobar", + ) + + +def test_resample_linear(): + df = pd.DataFrame( + index=to_datetime(["2019-01-01", "2019-01-05", "2019-01-08"]), + data={"label": ["a", "e", "j"], "y": [1.0, 5.0, 8.0]}, + ) + post_df = pp.resample(df=df, rule="1D", method="linear") + """ + label y + 2019-01-01 a 1.0 + 2019-01-02 NaN 2.0 + 2019-01-03 NaN 3.0 + 2019-01-04 NaN 4.0 + 2019-01-05 e 5.0 + 2019-01-06 NaN 6.0 + 2019-01-07 NaN 7.0 + 2019-01-08 j 8.0 + """ + assert post_df.equals( + pd.DataFrame( + index=pd.to_datetime( + [ + "2019-01-01", + "2019-01-02", + "2019-01-03", + "2019-01-04", + "2019-01-05", + "2019-01-06", + "2019-01-07", + "2019-01-08", + ] + ), + data={ + "label": ["a", np.NaN, np.NaN, np.NaN, "e", np.NaN, np.NaN, "j"], + "y": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], + }, + ) + ) diff --git a/tests/unit_tests/pandas_postprocessing/test_rolling.py b/tests/unit_tests/pandas_postprocessing/test_rolling.py new file mode 100644 index 0000000000000..b72a8bee44827 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_rolling.py @@ -0,0 +1,222 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pandas as pd +import pytest + +from superset.exceptions import InvalidPostProcessingError +from superset.utils import pandas_postprocessing as pp +from superset.utils.pandas_postprocessing.utils import FLAT_COLUMN_SEPARATOR +from tests.unit_tests.fixtures.dataframes import ( + multiple_metrics_df, + single_metric_df, + timeseries_df, +) +from tests.unit_tests.pandas_postprocessing.utils import series_to_list + + +def test_rolling_should_not_side_effect(): + _timeseries_df = timeseries_df.copy() + pp.rolling( + df=timeseries_df, + columns={"y": "y"}, + rolling_type="sum", + window=2, + min_periods=0, + ) + assert _timeseries_df.equals(timeseries_df) + + +def test_rolling(): + # sum rolling type + post_df = pp.rolling( + df=timeseries_df, + columns={"y": "y"}, + rolling_type="sum", + window=2, + min_periods=0, + ) + + assert post_df.columns.tolist() == ["label", "y"] + assert series_to_list(post_df["y"]) == [1.0, 3.0, 5.0, 7.0] + + # mean rolling type with alias + post_df = pp.rolling( + df=timeseries_df, + rolling_type="mean", + columns={"y": "y_mean"}, + window=10, + min_periods=0, + ) + assert post_df.columns.tolist() == ["label", "y", "y_mean"] + assert series_to_list(post_df["y_mean"]) == [1.0, 1.5, 2.0, 2.5] + + # count rolling type + post_df = pp.rolling( + df=timeseries_df, + rolling_type="count", + columns={"y": "y"}, + window=10, + min_periods=0, + ) + assert post_df.columns.tolist() == ["label", "y"] + assert series_to_list(post_df["y"]) == [1.0, 2.0, 3.0, 4.0] + + # quantile rolling type + post_df = pp.rolling( + df=timeseries_df, + columns={"y": "q1"}, + rolling_type="quantile", + rolling_type_options={"quantile": 0.25}, + window=10, + min_periods=0, + ) + assert post_df.columns.tolist() == ["label", "y", "q1"] + assert series_to_list(post_df["q1"]) == [1.0, 1.25, 1.5, 1.75] + + # incorrect rolling type + with pytest.raises(InvalidPostProcessingError): + pp.rolling( + df=timeseries_df, + columns={"y": "y"}, + rolling_type="abc", + window=2, + ) + + # incorrect rolling type options + with pytest.raises(InvalidPostProcessingError): + pp.rolling( + df=timeseries_df, + columns={"y": "y"}, + rolling_type="quantile", + rolling_type_options={"abc": 123}, + window=2, + ) + + +def test_rolling_should_empty_df(): + pivot_df = pp.pivot( + df=single_metric_df, + index=["dttm"], + columns=["country"], + aggregates={"sum_metric": {"operator": "sum"}}, + ) + rolling_df = pp.rolling( + df=pivot_df, + rolling_type="sum", + window=2, + min_periods=2, + columns={"sum_metric": "sum_metric"}, + ) + assert rolling_df.empty is True + + +def test_rolling_after_pivot_with_single_metric(): + pivot_df = pp.pivot( + df=single_metric_df, + index=["dttm"], + columns=["country"], + aggregates={"sum_metric": {"operator": "sum"}}, + ) + """ + sum_metric + country UK US + dttm + 2019-01-01 5 6 + 2019-01-02 7 8 + """ + rolling_df = pp.rolling( + df=pivot_df, + columns={"sum_metric": "sum_metric"}, + rolling_type="sum", + window=2, + min_periods=0, + ) + """ + sum_metric + country UK US + dttm + 2019-01-01 5.0 6.0 + 2019-01-02 12.0 14.0 + """ + flat_df = pp.flatten(rolling_df) + """ + dttm sum_metric, UK sum_metric, US + 0 2019-01-01 5.0 6.0 + 1 2019-01-02 12.0 14.0 + """ + assert flat_df.equals( + pd.DataFrame( + data={ + "dttm": pd.to_datetime(["2019-01-01", "2019-01-02"]), + FLAT_COLUMN_SEPARATOR.join(["sum_metric", "UK"]): [5.0, 12.0], + FLAT_COLUMN_SEPARATOR.join(["sum_metric", "US"]): [6.0, 14.0], + } + ) + ) + + +def test_rolling_after_pivot_with_multiple_metrics(): + pivot_df = pp.pivot( + df=multiple_metrics_df, + index=["dttm"], + columns=["country"], + aggregates={ + "sum_metric": {"operator": "sum"}, + "count_metric": {"operator": "sum"}, + }, + ) + """ + count_metric sum_metric + country UK US UK US + dttm + 2019-01-01 1 2 5 6 + 2019-01-02 3 4 7 8 + """ + rolling_df = pp.rolling( + df=pivot_df, + columns={ + "count_metric": "count_metric", + "sum_metric": "sum_metric", + }, + rolling_type="sum", + window=2, + min_periods=0, + ) + """ + count_metric sum_metric + country UK US UK US + dttm + 2019-01-01 1.0 2.0 5.0 6.0 + 2019-01-02 4.0 6.0 12.0 14.0 + """ + flat_df = pp.flatten(rolling_df) + """ + dttm count_metric, UK count_metric, US sum_metric, UK sum_metric, US + 0 2019-01-01 1.0 2.0 5.0 6.0 + 1 2019-01-02 4.0 6.0 12.0 14.0 + """ + assert flat_df.equals( + pd.DataFrame( + data={ + "dttm": pd.to_datetime(["2019-01-01", "2019-01-02"]), + FLAT_COLUMN_SEPARATOR.join(["count_metric", "UK"]): [1.0, 4.0], + FLAT_COLUMN_SEPARATOR.join(["count_metric", "US"]): [2.0, 6.0], + FLAT_COLUMN_SEPARATOR.join(["sum_metric", "UK"]): [5.0, 12.0], + FLAT_COLUMN_SEPARATOR.join(["sum_metric", "US"]): [6.0, 14.0], + } + ) + ) diff --git a/tests/unit_tests/pandas_postprocessing/test_select.py b/tests/unit_tests/pandas_postprocessing/test_select.py new file mode 100644 index 0000000000000..2ba126fc4c739 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_select.py @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from superset.exceptions import InvalidPostProcessingError +from superset.utils.pandas_postprocessing.select import select +from tests.unit_tests.fixtures.dataframes import timeseries_df + + +def test_select(): + # reorder columns + post_df = select(df=timeseries_df, columns=["y", "label"]) + assert post_df.columns.tolist() == ["y", "label"] + + # one column + post_df = select(df=timeseries_df, columns=["label"]) + assert post_df.columns.tolist() == ["label"] + + # rename and select one column + post_df = select(df=timeseries_df, columns=["y"], rename={"y": "y1"}) + assert post_df.columns.tolist() == ["y1"] + + # rename one and leave one unchanged + post_df = select(df=timeseries_df, rename={"y": "y1"}) + assert post_df.columns.tolist() == ["label", "y1"] + + # drop one column + post_df = select(df=timeseries_df, exclude=["label"]) + assert post_df.columns.tolist() == ["y"] + + # rename and drop one column + post_df = select(df=timeseries_df, rename={"y": "y1"}, exclude=["label"]) + assert post_df.columns.tolist() == ["y1"] + + # invalid columns + with pytest.raises(InvalidPostProcessingError): + select(df=timeseries_df, columns=["abc"], rename={"abc": "qwerty"}) + + # select renamed column by new name + with pytest.raises(InvalidPostProcessingError): + select(df=timeseries_df, columns=["label_new"], rename={"label": "label_new"}) diff --git a/tests/unit_tests/pandas_postprocessing/test_sort.py b/tests/unit_tests/pandas_postprocessing/test_sort.py new file mode 100644 index 0000000000000..e19da38efc1ec --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_sort.py @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest +from dateutil.parser import parse + +from superset.exceptions import InvalidPostProcessingError +from superset.utils.pandas_postprocessing import sort +from tests.unit_tests.fixtures.dataframes import categories_df, timeseries_df +from tests.unit_tests.pandas_postprocessing.utils import series_to_list + + +def test_sort(): + df = sort(df=categories_df, by=["category", "asc_idx"], ascending=[True, False]) + assert series_to_list(df["asc_idx"])[1] == 96 + + df = sort(df=categories_df.set_index("name"), is_sort_index=True) + assert df.index[0] == "person0" + + df = sort(df=categories_df.set_index("name"), is_sort_index=True, ascending=False) + assert df.index[0] == "person99" + + df = sort(df=categories_df.set_index("name"), by="asc_idx") + assert df["asc_idx"][0] == 0 + + df = sort(df=categories_df.set_index("name"), by="asc_idx", ascending=False) + assert df["asc_idx"][0] == 100 + + df = sort(df=timeseries_df, is_sort_index=True) + assert df.index[0] == parse("2019-01-01") + + df = sort(df=timeseries_df, is_sort_index=True, ascending=False) + assert df.index[0] == parse("2019-01-07") + + df = sort(df=timeseries_df) + assert df.equals(timeseries_df) + + with pytest.raises(InvalidPostProcessingError): + sort(df=df, by="abc", ascending=False) + sort(df=df, by=["abc", "def"]) diff --git a/tests/unit_tests/pandas_postprocessing/test_utils.py b/tests/unit_tests/pandas_postprocessing/test_utils.py new file mode 100644 index 0000000000000..058cefcd6c72a --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/test_utils.py @@ -0,0 +1,30 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from superset.utils.pandas_postprocessing import escape_separator, unescape_separator + + +def test_escape_separator(): + assert escape_separator(r" hell \world ") == r" hell \world " + assert unescape_separator(r" hell \world ") == r" hell \world " + + escape_string = escape_separator("hello, world") + assert escape_string == r"hello\, world" + assert unescape_separator(escape_string) == "hello, world" + + escape_string = escape_separator("hello,world") + assert escape_string == r"hello\,world" + assert unescape_separator(escape_string) == "hello,world" diff --git a/tests/unit_tests/pandas_postprocessing/utils.py b/tests/unit_tests/pandas_postprocessing/utils.py new file mode 100644 index 0000000000000..07366b15774d1 --- /dev/null +++ b/tests/unit_tests/pandas_postprocessing/utils.py @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import math +from typing import Any, List, Optional + +from pandas import Series + +AGGREGATES_SINGLE = {"idx_nulls": {"operator": "sum"}} +AGGREGATES_MULTIPLE = { + "idx_nulls": {"operator": "sum"}, + "asc_idx": {"operator": "mean"}, +} + + +def series_to_list(series: Series) -> List[Any]: + """ + Converts a `Series` to a regular list, and replaces non-numeric values to + Nones. + + :param series: Series to convert + :return: list without nan or inf + """ + return [ + None + if not isinstance(val, str) and (math.isnan(val) or math.isinf(val)) + else val + for val in series.tolist() + ] + + +def round_floats( + floats: List[Optional[float]], precision: int +) -> List[Optional[float]]: + """ + Round list of floats to certain precision + + :param floats: floats to round + :param precision: intended decimal precision + :return: rounded floats + """ + return [round(val, precision) if val else None for val in floats] diff --git a/tests/unit_tests/result_set_test.py b/tests/unit_tests/result_set_test.py new file mode 100644 index 0000000000000..331810bb1ed62 --- /dev/null +++ b/tests/unit_tests/result_set_test.py @@ -0,0 +1,142 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=import-outside-toplevel, unused-argument + + +import numpy as np +import pandas as pd +from numpy.core.multiarray import array + +from superset.result_set import stringify_values + + +def test_column_names_as_bytes() -> None: + """ + Test that we can handle column names as bytes. + """ + from superset.db_engine_specs.redshift import RedshiftEngineSpec + from superset.result_set import SupersetResultSet + + data = ( + [ + "2016-01-26", + 392.002014, + 397.765991, + 390.575012, + 392.153015, + 392.153015, + 58147000, + ], + [ + "2016-01-27", + 392.444, + 396.842987, + 391.782013, + 394.971985, + 394.971985, + 47424400, + ], + ) + description = [ + (b"date", 1043, None, None, None, None, None), + (b"open", 701, None, None, None, None, None), + (b"high", 701, None, None, None, None, None), + (b"low", 701, None, None, None, None, None), + (b"close", 701, None, None, None, None, None), + (b"adj close", 701, None, None, None, None, None), + (b"volume", 20, None, None, None, None, None), + ] + result_set = SupersetResultSet(data, description, RedshiftEngineSpec) # type: ignore + + assert ( + result_set.to_pandas_df().to_markdown() + == """ +| | date | open | high | low | close | adj close | volume | +|---:|:-----------|--------:|--------:|--------:|--------:|------------:|---------:| +| 0 | 2016-01-26 | 392.002 | 397.766 | 390.575 | 392.153 | 392.153 | 58147000 | +| 1 | 2016-01-27 | 392.444 | 396.843 | 391.782 | 394.972 | 394.972 | 47424400 | + """.strip() + ) + + +def test_stringify_with_null_integers(): + """ + Test that we can safely handle type errors when an integer column has a null value + """ + + data = [ + ("foo", "bar", pd.NA, None), + ("foo", "bar", pd.NA, True), + ("foo", "bar", pd.NA, None), + ] + numpy_dtype = [ + ("id", "object"), + ("value", "object"), + ("num", "object"), + ("bool", "object"), + ] + + array2 = np.array(data, dtype=numpy_dtype) + column_names = ["id", "value", "num", "bool"] + + result_set = np.array([stringify_values(array2[column]) for column in column_names]) + + expected = np.array( + [ + array(["foo", "foo", "foo"], dtype=object), + array(["bar", "bar", "bar"], dtype=object), + array([None, None, None], dtype=object), + array([None, "True", None], dtype=object), + ] + ) + + assert np.array_equal(result_set, expected) + + +def test_stringify_with_null_timestamps(): + """ + Test that we can safely handle type errors when a timestamp column has a null value + """ + + data = [ + ("foo", "bar", pd.NaT, None), + ("foo", "bar", pd.NaT, True), + ("foo", "bar", pd.NaT, None), + ] + numpy_dtype = [ + ("id", "object"), + ("value", "object"), + ("num", "object"), + ("bool", "object"), + ] + + array2 = np.array(data, dtype=numpy_dtype) + column_names = ["id", "value", "num", "bool"] + + result_set = np.array([stringify_values(array2[column]) for column in column_names]) + + expected = np.array( + [ + array(["foo", "foo", "foo"], dtype=object), + array(["bar", "bar", "bar"], dtype=object), + array([None, None, None], dtype=object), + array([None, "True", None], dtype=object), + ] + ) + + assert np.array_equal(result_set, expected) diff --git a/tests/unit_tests/sql_lab_test.py b/tests/unit_tests/sql_lab_test.py new file mode 100644 index 0000000000000..29f45eab682a0 --- /dev/null +++ b/tests/unit_tests/sql_lab_test.py @@ -0,0 +1,218 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=import-outside-toplevel, invalid-name, unused-argument, too-many-locals + +import sqlparse +from pytest_mock import MockerFixture +from sqlalchemy.orm.session import Session + +from superset.utils.core import override_user + + +def test_execute_sql_statement(mocker: MockerFixture, app: None) -> None: + """ + Simple test for `execute_sql_statement`. + """ + from superset.sql_lab import execute_sql_statement + + sql_statement = "SELECT 42 AS answer" + + query = mocker.MagicMock() + query.limit = 1 + query.select_as_cta_used = False + database = query.database + database.allow_dml = False + database.apply_limit_to_sql.return_value = "SELECT 42 AS answer LIMIT 2" + db_engine_spec = database.db_engine_spec + db_engine_spec.is_select_query.return_value = True + db_engine_spec.fetch_data.return_value = [(42,)] + + session = mocker.MagicMock() + cursor = mocker.MagicMock() + SupersetResultSet = mocker.patch("superset.sql_lab.SupersetResultSet") + + execute_sql_statement( + sql_statement, + query, + session=session, + cursor=cursor, + log_params={}, + apply_ctas=False, + ) + + database.apply_limit_to_sql.assert_called_with("SELECT 42 AS answer", 2, force=True) + db_engine_spec.execute.assert_called_with( + cursor, "SELECT 42 AS answer LIMIT 2", async_=True + ) + SupersetResultSet.assert_called_with([(42,)], cursor.description, db_engine_spec) + + +def test_execute_sql_statement_with_rls( + mocker: MockerFixture, +) -> None: + """ + Test for `execute_sql_statement` when an RLS rule is in place. + """ + from superset.sql_lab import execute_sql_statement + + sql_statement = "SELECT * FROM sales" + + query = mocker.MagicMock() + query.limit = 100 + query.select_as_cta_used = False + database = query.database + database.allow_dml = False + database.apply_limit_to_sql.return_value = ( + "SELECT * FROM sales WHERE organization_id=42 LIMIT 101" + ) + db_engine_spec = database.db_engine_spec + db_engine_spec.is_select_query.return_value = True + db_engine_spec.fetch_data.return_value = [(42,)] + + session = mocker.MagicMock() + cursor = mocker.MagicMock() + SupersetResultSet = mocker.patch("superset.sql_lab.SupersetResultSet") + mocker.patch( + "superset.sql_lab.insert_rls", + return_value=sqlparse.parse("SELECT * FROM sales WHERE organization_id=42")[0], + ) + mocker.patch("superset.sql_lab.is_feature_enabled", return_value=True) + + execute_sql_statement( + sql_statement, + query, + session=session, + cursor=cursor, + log_params={}, + apply_ctas=False, + ) + + database.apply_limit_to_sql.assert_called_with( + "SELECT * FROM sales WHERE organization_id=42", + 101, + force=True, + ) + db_engine_spec.execute.assert_called_with( + cursor, + "SELECT * FROM sales WHERE organization_id=42 LIMIT 101", + async_=True, + ) + SupersetResultSet.assert_called_with([(42,)], cursor.description, db_engine_spec) + + +def test_sql_lab_insert_rls( + mocker: MockerFixture, + session: Session, +) -> None: + """ + Integration test for `insert_rls`. + """ + from flask_appbuilder.security.sqla.models import Role, User + + from superset.connectors.sqla.models import RowLevelSecurityFilter, SqlaTable + from superset.models.core import Database + from superset.models.sql_lab import Query + from superset.security.manager import SupersetSecurityManager + from superset.sql_lab import execute_sql_statement + from superset.utils.core import RowLevelSecurityFilterType + + engine = session.connection().engine + Query.metadata.create_all(engine) # pylint: disable=no-member + + connection = engine.raw_connection() + connection.execute("CREATE TABLE t (c INTEGER)") + for i in range(10): + connection.execute("INSERT INTO t VALUES (?)", (i,)) + + cursor = connection.cursor() + + query = Query( + sql="SELECT c FROM t", + client_id="abcde", + database=Database(database_name="test_db", sqlalchemy_uri="sqlite://"), + schema=None, + limit=5, + select_as_cta_used=False, + ) + session.add(query) + session.commit() + + admin = User( + first_name="Alice", + last_name="Doe", + email="adoe@example.org", + username="admin", + roles=[Role(name="Admin")], + ) + + # first without RLS + with override_user(admin): + superset_result_set = execute_sql_statement( + sql_statement=query.sql, + query=query, + session=session, + cursor=cursor, + log_params=None, + apply_ctas=False, + ) + assert ( + superset_result_set.to_pandas_df().to_markdown() + == """ +| | c | +|---:|----:| +| 0 | 0 | +| 1 | 1 | +| 2 | 2 | +| 3 | 3 | +| 4 | 4 |""".strip() + ) + assert query.executed_sql == "SELECT c FROM t\nLIMIT 6" + + # now with RLS + rls = RowLevelSecurityFilter( + name="sqllab_rls1", + filter_type=RowLevelSecurityFilterType.REGULAR, + tables=[SqlaTable(database_id=1, schema=None, table_name="t")], + roles=[admin.roles[0]], + group_key=None, + clause="c > 5", + ) + session.add(rls) + session.flush() + mocker.patch.object(SupersetSecurityManager, "find_user", return_value=admin) + mocker.patch("superset.sql_lab.is_feature_enabled", return_value=True) + + with override_user(admin): + superset_result_set = execute_sql_statement( + sql_statement=query.sql, + query=query, + session=session, + cursor=cursor, + log_params=None, + apply_ctas=False, + ) + assert ( + superset_result_set.to_pandas_df().to_markdown() + == """ +| | c | +|---:|----:| +| 0 | 6 | +| 1 | 7 | +| 2 | 8 | +| 3 | 9 |""".strip() + ) + assert query.executed_sql == "SELECT c FROM t WHERE (t.c > 5)\nLIMIT 6" diff --git a/tests/unit_tests/sql_parse_tests.py b/tests/unit_tests/sql_parse_tests.py new file mode 100644 index 0000000000000..ba3da69aaefaf --- /dev/null +++ b/tests/unit_tests/sql_parse_tests.py @@ -0,0 +1,1508 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=invalid-name, redefined-outer-name, unused-argument, protected-access, too-many-lines + +import unittest +from typing import Optional, Set + +import pytest +import sqlparse +from pytest_mock import MockerFixture +from sqlalchemy import text +from sqlparse.sql import Identifier, Token, TokenList +from sqlparse.tokens import Name + +from superset.exceptions import QueryClauseValidationException +from superset.sql_parse import ( + add_table_name, + extract_table_references, + get_rls_for_table, + has_table_query, + insert_rls, + ParsedQuery, + sanitize_clause, + strip_comments_from_sql, + Table, +) + + +def extract_tables(query: str) -> Set[Table]: + """ + Helper function to extract tables referenced in a query. + """ + return ParsedQuery(query).tables + + +def test_table() -> None: + """ + Test the ``Table`` class and its string conversion. + + Special characters in the table, schema, or catalog name should be escaped correctly. + """ + assert str(Table("tbname")) == "tbname" + assert str(Table("tbname", "schemaname")) == "schemaname.tbname" + assert ( + str(Table("tbname", "schemaname", "catalogname")) + == "catalogname.schemaname.tbname" + ) + assert ( + str(Table("table.name", "schema/name", "catalog\nname")) + == "catalog%0Aname.schema%2Fname.table%2Ename" + ) + + +def test_extract_tables() -> None: + """ + Test that referenced tables are parsed correctly from the SQL. + """ + assert extract_tables("SELECT * FROM tbname") == {Table("tbname")} + assert extract_tables("SELECT * FROM tbname foo") == {Table("tbname")} + assert extract_tables("SELECT * FROM tbname AS foo") == {Table("tbname")} + + # underscore + assert extract_tables("SELECT * FROM tb_name") == {Table("tb_name")} + + # quotes + assert extract_tables('SELECT * FROM "tbname"') == {Table("tbname")} + + # unicode + assert extract_tables('SELECT * FROM "tb_name" WHERE city = "Lübeck"') == { + Table("tb_name") + } + + # columns + assert extract_tables("SELECT field1, field2 FROM tb_name") == {Table("tb_name")} + assert extract_tables("SELECT t1.f1, t2.f2 FROM t1, t2") == { + Table("t1"), + Table("t2"), + } + + # named table + assert extract_tables("SELECT a.date, a.field FROM left_table a LIMIT 10") == { + Table("left_table") + } + + # reverse select + assert extract_tables("FROM t1 SELECT field") == {Table("t1")} + + +def test_extract_tables_subselect() -> None: + """ + Test that tables inside subselects are parsed correctly. + """ + assert ( + extract_tables( + """ +SELECT sub.* +FROM ( + SELECT * + FROM s1.t1 + WHERE day_of_week = 'Friday' + ) sub, s2.t2 +WHERE sub.resolution = 'NONE' +""" + ) + == {Table("t1", "s1"), Table("t2", "s2")} + ) + + assert ( + extract_tables( + """ +SELECT sub.* +FROM ( + SELECT * + FROM s1.t1 + WHERE day_of_week = 'Friday' +) sub +WHERE sub.resolution = 'NONE' +""" + ) + == {Table("t1", "s1")} + ) + + assert ( + extract_tables( + """ +SELECT * FROM t1 +WHERE s11 > ANY ( + SELECT COUNT(*) /* no hint */ FROM t2 + WHERE NOT EXISTS ( + SELECT * FROM t3 + WHERE ROW(5*t2.s1,77)=( + SELECT 50,11*s1 FROM t4 + ) + ) +) +""" + ) + == {Table("t1"), Table("t2"), Table("t3"), Table("t4")} + ) + + +def test_extract_tables_select_in_expression() -> None: + """ + Test that parser works with ``SELECT``s used as expressions. + """ + assert extract_tables("SELECT f1, (SELECT count(1) FROM t2) FROM t1") == { + Table("t1"), + Table("t2"), + } + assert extract_tables("SELECT f1, (SELECT count(1) FROM t2) as f2 FROM t1") == { + Table("t1"), + Table("t2"), + } + + +def test_extract_tables_parenthesis() -> None: + """ + Test that parenthesis are parsed correctly. + """ + assert extract_tables("SELECT f1, (x + y) AS f2 FROM t1") == {Table("t1")} + + +def test_extract_tables_with_schema() -> None: + """ + Test that schemas are parsed correctly. + """ + assert extract_tables("SELECT * FROM schemaname.tbname") == { + Table("tbname", "schemaname") + } + assert extract_tables('SELECT * FROM "schemaname"."tbname"') == { + Table("tbname", "schemaname") + } + assert extract_tables('SELECT * FROM "schemaname"."tbname" foo') == { + Table("tbname", "schemaname") + } + assert extract_tables('SELECT * FROM "schemaname"."tbname" AS foo') == { + Table("tbname", "schemaname") + } + + +def test_extract_tables_union() -> None: + """ + Test that ``UNION`` queries work as expected. + """ + assert extract_tables("SELECT * FROM t1 UNION SELECT * FROM t2") == { + Table("t1"), + Table("t2"), + } + assert extract_tables("SELECT * FROM t1 UNION ALL SELECT * FROM t2") == { + Table("t1"), + Table("t2"), + } + assert extract_tables("SELECT * FROM t1 INTERSECT ALL SELECT * FROM t2") == { + Table("t1"), + Table("t2"), + } + + +def test_extract_tables_select_from_values() -> None: + """ + Test that selecting from values returns no tables. + """ + assert extract_tables("SELECT * FROM VALUES (13, 42)") == set() + + +def test_extract_tables_select_array() -> None: + """ + Test that queries selecting arrays work as expected. + """ + assert ( + extract_tables( + """ +SELECT ARRAY[1, 2, 3] AS my_array +FROM t1 LIMIT 10 +""" + ) + == {Table("t1")} + ) + + +def test_extract_tables_select_if() -> None: + """ + Test that queries with an ``IF`` work as expected. + """ + assert ( + extract_tables( + """ +SELECT IF(CARDINALITY(my_array) >= 3, my_array[3], NULL) +FROM t1 LIMIT 10 +""" + ) + == {Table("t1")} + ) + + +def test_extract_tables_with_catalog() -> None: + """ + Test that catalogs are parsed correctly. + """ + assert extract_tables("SELECT * FROM catalogname.schemaname.tbname") == { + Table("tbname", "schemaname", "catalogname") + } + + +def test_extract_tables_illdefined() -> None: + """ + Test that ill-defined tables return an empty set. + """ + assert extract_tables("SELECT * FROM schemaname.") == set() + assert extract_tables("SELECT * FROM catalogname.schemaname.") == set() + assert extract_tables("SELECT * FROM catalogname..") == set() + assert extract_tables("SELECT * FROM catalogname..tbname") == set() + + +def test_extract_tables_show_tables_from() -> None: + """ + Test ``SHOW TABLES FROM``. + """ + assert extract_tables("SHOW TABLES FROM s1 like '%order%'") == set() + + +def test_extract_tables_show_columns_from() -> None: + """ + Test ``SHOW COLUMNS FROM``. + """ + assert extract_tables("SHOW COLUMNS FROM t1") == {Table("t1")} + + +def test_extract_tables_where_subquery() -> None: + """ + Test that tables in a ``WHERE`` subquery are parsed correctly. + """ + assert ( + extract_tables( + """ +SELECT name +FROM t1 +WHERE regionkey = (SELECT max(regionkey) FROM t2) +""" + ) + == {Table("t1"), Table("t2")} + ) + + assert ( + extract_tables( + """ +SELECT name +FROM t1 +WHERE regionkey IN (SELECT regionkey FROM t2) +""" + ) + == {Table("t1"), Table("t2")} + ) + + assert ( + extract_tables( + """ +SELECT name +FROM t1 +WHERE regionkey EXISTS (SELECT regionkey FROM t2) +""" + ) + == {Table("t1"), Table("t2")} + ) + + +def test_extract_tables_describe() -> None: + """ + Test ``DESCRIBE``. + """ + assert extract_tables("DESCRIBE t1") == {Table("t1")} + + +def test_extract_tables_show_partitions() -> None: + """ + Test ``SHOW PARTITIONS``. + """ + assert ( + extract_tables( + """ +SHOW PARTITIONS FROM orders +WHERE ds >= '2013-01-01' ORDER BY ds DESC +""" + ) + == {Table("orders")} + ) + + +def test_extract_tables_join() -> None: + """ + Test joins. + """ + assert extract_tables("SELECT t1.*, t2.* FROM t1 JOIN t2 ON t1.a = t2.a;") == { + Table("t1"), + Table("t2"), + } + + assert ( + extract_tables( + """ +SELECT a.date, b.name +FROM left_table a +JOIN ( + SELECT + CAST((b.year) as VARCHAR) date, + name + FROM right_table +) b +ON a.date = b.date +""" + ) + == {Table("left_table"), Table("right_table")} + ) + + assert ( + extract_tables( + """ +SELECT a.date, b.name +FROM left_table a +LEFT INNER JOIN ( + SELECT + CAST((b.year) as VARCHAR) date, + name + FROM right_table +) b +ON a.date = b.date +""" + ) + == {Table("left_table"), Table("right_table")} + ) + + assert ( + extract_tables( + """ +SELECT a.date, b.name +FROM left_table a +RIGHT OUTER JOIN ( + SELECT + CAST((b.year) as VARCHAR) date, + name + FROM right_table +) b +ON a.date = b.date +""" + ) + == {Table("left_table"), Table("right_table")} + ) + + assert ( + extract_tables( + """ +SELECT a.date, b.name +FROM left_table a +FULL OUTER JOIN ( + SELECT + CAST((b.year) as VARCHAR) date, + name + FROM right_table +) b +ON a.date = b.date +""" + ) + == {Table("left_table"), Table("right_table")} + ) + + +def test_extract_tables_semi_join() -> None: + """ + Test ``LEFT SEMI JOIN``. + """ + assert ( + extract_tables( + """ +SELECT a.date, b.name +FROM left_table a +LEFT SEMI JOIN ( + SELECT + CAST((b.year) as VARCHAR) date, + name + FROM right_table +) b +ON a.data = b.date +""" + ) + == {Table("left_table"), Table("right_table")} + ) + + +def test_extract_tables_combinations() -> None: + """ + Test a complex case with nested queries. + """ + assert ( + extract_tables( + """ +SELECT * FROM t1 +WHERE s11 > ANY ( + SELECT * FROM t1 UNION ALL SELECT * FROM ( + SELECT t6.*, t3.* FROM t6 JOIN t3 ON t6.a = t3.a + ) tmp_join + WHERE NOT EXISTS ( + SELECT * FROM t3 + WHERE ROW(5*t3.s1,77)=( + SELECT 50,11*s1 FROM t4 + ) + ) +) +""" + ) + == {Table("t1"), Table("t3"), Table("t4"), Table("t6")} + ) + + assert ( + extract_tables( + """ +SELECT * FROM ( + SELECT * FROM ( + SELECT * FROM ( + SELECT * FROM EmployeeS + ) AS S1 + ) AS S2 +) AS S3 +""" + ) + == {Table("EmployeeS")} + ) + + +def test_extract_tables_with() -> None: + """ + Test ``WITH``. + """ + assert ( + extract_tables( + """ +WITH + x AS (SELECT a FROM t1), + y AS (SELECT a AS b FROM t2), + z AS (SELECT b AS c FROM t3) +SELECT c FROM z +""" + ) + == {Table("t1"), Table("t2"), Table("t3")} + ) + + assert ( + extract_tables( + """ +WITH + x AS (SELECT a FROM t1), + y AS (SELECT a AS b FROM x), + z AS (SELECT b AS c FROM y) +SELECT c FROM z +""" + ) + == {Table("t1")} + ) + + +def test_extract_tables_reusing_aliases() -> None: + """ + Test that the parser follows aliases. + """ + assert ( + extract_tables( + """ +with q1 as ( select key from q2 where key = '5'), +q2 as ( select key from src where key = '5') +select * from (select key from q1) a +""" + ) + == {Table("src")} + ) + + +def test_extract_tables_multistatement() -> None: + """ + Test that the parser works with multiple statements. + """ + assert extract_tables("SELECT * FROM t1; SELECT * FROM t2") == { + Table("t1"), + Table("t2"), + } + assert extract_tables("SELECT * FROM t1; SELECT * FROM t2;") == { + Table("t1"), + Table("t2"), + } + + +def test_extract_tables_complex() -> None: + """ + Test a few complex queries. + """ + assert ( + extract_tables( + """ +SELECT sum(m_examples) AS "sum__m_example" +FROM ( + SELECT + COUNT(DISTINCT id_userid) AS m_examples, + some_more_info + FROM my_b_table b + JOIN my_t_table t ON b.ds=t.ds + JOIN my_l_table l ON b.uid=l.uid + WHERE + b.rid IN ( + SELECT other_col + FROM inner_table + ) + AND l.bla IN ('x', 'y') + GROUP BY 2 + ORDER BY 2 ASC +) AS "meh" +ORDER BY "sum__m_example" DESC +LIMIT 10; +""" + ) + == { + Table("my_l_table"), + Table("my_b_table"), + Table("my_t_table"), + Table("inner_table"), + } + ) + + assert ( + extract_tables( + """ +SELECT * +FROM table_a AS a, table_b AS b, table_c as c +WHERE a.id = b.id and b.id = c.id +""" + ) + == {Table("table_a"), Table("table_b"), Table("table_c")} + ) + + assert ( + extract_tables( + """ +SELECT somecol AS somecol +FROM ( + WITH bla AS ( + SELECT col_a + FROM a + WHERE + 1=1 + AND column_of_choice NOT IN ( + SELECT interesting_col + FROM b + ) + ), + rb AS ( + SELECT yet_another_column + FROM ( + SELECT a + FROM c + GROUP BY the_other_col + ) not_table + LEFT JOIN bla foo + ON foo.prop = not_table.bad_col0 + WHERE 1=1 + GROUP BY + not_table.bad_col1 , + not_table.bad_col2 , + ORDER BY not_table.bad_col_3 DESC , + not_table.bad_col4 , + not_table.bad_col5 + ) + SELECT random_col + FROM d + WHERE 1=1 + UNION ALL SELECT even_more_cols + FROM e + WHERE 1=1 + UNION ALL SELECT lets_go_deeper + FROM f + WHERE 1=1 + WHERE 2=2 + GROUP BY last_col + LIMIT 50000 +) +""" + ) + == {Table("a"), Table("b"), Table("c"), Table("d"), Table("e"), Table("f")} + ) + + +def test_extract_tables_mixed_from_clause() -> None: + """ + Test that the parser handles a ``FROM`` clause with table and subselect. + """ + assert ( + extract_tables( + """ +SELECT * +FROM table_a AS a, (select * from table_b) AS b, table_c as c +WHERE a.id = b.id and b.id = c.id +""" + ) + == {Table("table_a"), Table("table_b"), Table("table_c")} + ) + + +def test_extract_tables_nested_select() -> None: + """ + Test that the parser handles selects inside functions. + """ + assert ( + extract_tables( + """ +select (extractvalue(1,concat(0x7e,(select GROUP_CONCAT(TABLE_NAME) +from INFORMATION_SCHEMA.COLUMNS +WHERE TABLE_SCHEMA like "%bi%"),0x7e))); +""" + ) + == {Table("COLUMNS", "INFORMATION_SCHEMA")} + ) + + assert ( + extract_tables( + """ +select (extractvalue(1,concat(0x7e,(select GROUP_CONCAT(COLUMN_NAME) +from INFORMATION_SCHEMA.COLUMNS +WHERE TABLE_NAME="bi_achievement_daily"),0x7e))); +""" + ) + == {Table("COLUMNS", "INFORMATION_SCHEMA")} + ) + + +def test_extract_tables_complex_cte_with_prefix() -> None: + """ + Test that the parser handles CTEs with prefixes. + """ + assert ( + extract_tables( + """ +WITH CTE__test (SalesPersonID, SalesOrderID, SalesYear) +AS ( + SELECT SalesPersonID, SalesOrderID, YEAR(OrderDate) AS SalesYear + FROM SalesOrderHeader + WHERE SalesPersonID IS NOT NULL +) +SELECT SalesPersonID, COUNT(SalesOrderID) AS TotalSales, SalesYear +FROM CTE__test +GROUP BY SalesYear, SalesPersonID +ORDER BY SalesPersonID, SalesYear; +""" + ) + == {Table("SalesOrderHeader")} + ) + + +def test_extract_tables_identifier_list_with_keyword_as_alias() -> None: + """ + Test that aliases that are keywords are parsed correctly. + """ + assert ( + extract_tables( + """ +WITH + f AS (SELECT * FROM foo), + match AS (SELECT * FROM f) +SELECT * FROM match +""" + ) + == {Table("foo")} + ) + + +def test_update() -> None: + """ + Test that ``UPDATE`` is not detected as ``SELECT``. + """ + assert ParsedQuery("UPDATE t1 SET col1 = NULL").is_select() is False + + +def test_set() -> None: + """ + Test that ``SET`` is detected correctly. + """ + query = ParsedQuery( + """ +-- comment +SET hivevar:desc='Legislators'; +""" + ) + assert query.is_set() is True + assert query.is_select() is False + + assert ParsedQuery("set hivevar:desc='bla'").is_set() is True + assert ParsedQuery("SELECT 1").is_set() is False + + +def test_show() -> None: + """ + Test that ``SHOW`` is detected correctly. + """ + query = ParsedQuery( + """ +-- comment +SHOW LOCKS test EXTENDED; +-- comment +""" + ) + assert query.is_show() is True + assert query.is_select() is False + + assert ParsedQuery("SHOW TABLES").is_show() is True + assert ParsedQuery("shOw TABLES").is_show() is True + assert ParsedQuery("show TABLES").is_show() is True + assert ParsedQuery("SELECT 1").is_show() is False + + +def test_is_explain() -> None: + """ + Test that ``EXPLAIN`` is detected correctly. + """ + assert ParsedQuery("EXPLAIN SELECT 1").is_explain() is True + assert ParsedQuery("EXPLAIN SELECT 1").is_select() is False + + assert ( + ParsedQuery( + """ +-- comment +EXPLAIN select * from table +-- comment 2 +""" + ).is_explain() + is True + ) + + assert ( + ParsedQuery( + """ +-- comment +EXPLAIN select * from table +where col1 = 'something' +-- comment 2 + +-- comment 3 +EXPLAIN select * from table +where col1 = 'something' +-- comment 4 +""" + ).is_explain() + is True + ) + + assert ( + ParsedQuery( + """ +-- This is a comment + -- this is another comment but with a space in the front +EXPLAIN SELECT * FROM TABLE +""" + ).is_explain() + is True + ) + + assert ( + ParsedQuery( + """ +/* This is a comment + with stars instead */ +EXPLAIN SELECT * FROM TABLE +""" + ).is_explain() + is True + ) + + assert ( + ParsedQuery( + """ +-- comment +select * from table +where col1 = 'something' +-- comment 2 +""" + ).is_explain() + is False + ) + + +def test_is_valid_ctas() -> None: + """ + Test if a query is a valid CTAS. + + A valid CTAS has a ``SELECT`` as its last statement. + """ + assert ( + ParsedQuery("SELECT * FROM table", strip_comments=True).is_valid_ctas() is True + ) + + assert ( + ParsedQuery( + """ +-- comment +SELECT * FROM table +-- comment 2 +""", + strip_comments=True, + ).is_valid_ctas() + is True + ) + + assert ( + ParsedQuery( + """ +-- comment +SET @value = 42; +SELECT @value as foo; +-- comment 2 +""", + strip_comments=True, + ).is_valid_ctas() + is True + ) + + assert ( + ParsedQuery( + """ +-- comment +EXPLAIN SELECT * FROM table +-- comment 2 +""", + strip_comments=True, + ).is_valid_ctas() + is False + ) + + assert ( + ParsedQuery( + """ +SELECT * FROM table; +INSERT INTO TABLE (foo) VALUES (42); +""", + strip_comments=True, + ).is_valid_ctas() + is False + ) + + +def test_is_valid_cvas() -> None: + """ + Test if a query is a valid CVAS. + + A valid CVAS has a single ``SELECT`` statement. + """ + assert ( + ParsedQuery("SELECT * FROM table", strip_comments=True).is_valid_cvas() is True + ) + + assert ( + ParsedQuery( + """ +-- comment +SELECT * FROM table +-- comment 2 +""", + strip_comments=True, + ).is_valid_cvas() + is True + ) + + assert ( + ParsedQuery( + """ +-- comment +SET @value = 42; +SELECT @value as foo; +-- comment 2 +""", + strip_comments=True, + ).is_valid_cvas() + is False + ) + + assert ( + ParsedQuery( + """ +-- comment +EXPLAIN SELECT * FROM table +-- comment 2 +""", + strip_comments=True, + ).is_valid_cvas() + is False + ) + + assert ( + ParsedQuery( + """ +SELECT * FROM table; +INSERT INTO TABLE (foo) VALUES (42); +""", + strip_comments=True, + ).is_valid_cvas() + is False + ) + + +def test_is_select_cte_with_comments() -> None: + """ + Some CTES with comments are not correctly identified as SELECTS. + """ + sql = ParsedQuery( + """WITH blah AS + (SELECT * FROM core_dev.manager_team), + +blah2 AS + (SELECT * FROM core_dev.manager_workspace) + +SELECT * FROM blah +INNER JOIN blah2 ON blah2.team_id = blah.team_id""" + ) + assert sql.is_select() + + sql = ParsedQuery( + """WITH blah AS +/*blahblahbalh*/ + (SELECT * FROM core_dev.manager_team), +--blahblahbalh + +blah2 AS + (SELECT * FROM core_dev.manager_workspace) + +SELECT * FROM blah +INNER JOIN blah2 ON blah2.team_id = blah.team_id""" + ) + assert sql.is_select() + + +def test_cte_is_select() -> None: + """ + Some CTEs are not correctly identified as SELECTS. + """ + # `AS(` gets parsed as a function + sql = ParsedQuery( + """WITH foo AS( +SELECT + FLOOR(__time TO WEEK) AS "week", + name, + COUNT(DISTINCT user_id) AS "unique_users" +FROM "druid"."my_table" +GROUP BY 1,2 +) +SELECT + f.week, + f.name, + f.unique_users +FROM foo f""" + ) + assert sql.is_select() + + +def test_unknown_select() -> None: + """ + Test that `is_select` works when sqlparse fails to identify the type. + """ + sql = "WITH foo AS(SELECT 1) SELECT 1" + assert sqlparse.parse(sql)[0].get_type() == "SELECT" + assert ParsedQuery(sql).is_select() + + sql = "WITH foo AS(SELECT 1) INSERT INTO my_table (a) VALUES (1)" + assert sqlparse.parse(sql)[0].get_type() == "INSERT" + assert not ParsedQuery(sql).is_select() + + sql = "WITH foo AS(SELECT 1) DELETE FROM my_table" + assert sqlparse.parse(sql)[0].get_type() == "DELETE" + assert not ParsedQuery(sql).is_select() + + +def test_get_query_with_new_limit_comment() -> None: + """ + Test that limit is applied correctly. + """ + query = ParsedQuery("SELECT * FROM birth_names -- SOME COMMENT") + assert query.set_or_update_query_limit(1000) == ( + "SELECT * FROM birth_names -- SOME COMMENT\nLIMIT 1000" + ) + + +def test_get_query_with_new_limit_comment_with_limit() -> None: + """ + Test that limits in comments are ignored. + """ + query = ParsedQuery("SELECT * FROM birth_names -- SOME COMMENT WITH LIMIT 555") + assert query.set_or_update_query_limit(1000) == ( + "SELECT * FROM birth_names -- SOME COMMENT WITH LIMIT 555\nLIMIT 1000" + ) + + +def test_get_query_with_new_limit_lower() -> None: + """ + Test that lower limits are not replaced. + """ + query = ParsedQuery("SELECT * FROM birth_names LIMIT 555") + assert query.set_or_update_query_limit(1000) == ( + "SELECT * FROM birth_names LIMIT 555" + ) + + +def test_get_query_with_new_limit_upper() -> None: + """ + Test that higher limits are replaced. + """ + query = ParsedQuery("SELECT * FROM birth_names LIMIT 2000") + assert query.set_or_update_query_limit(1000) == ( + "SELECT * FROM birth_names LIMIT 1000" + ) + + +def test_basic_breakdown_statements() -> None: + """ + Test that multiple statements are parsed correctly. + """ + query = ParsedQuery( + """ +SELECT * FROM birth_names; +SELECT * FROM birth_names LIMIT 1; +""" + ) + assert query.get_statements() == [ + "SELECT * FROM birth_names", + "SELECT * FROM birth_names LIMIT 1", + ] + + +def test_messy_breakdown_statements() -> None: + """ + Test the messy multiple statements are parsed correctly. + """ + query = ParsedQuery( + """ +SELECT 1;\t\n\n\n \t +\t\nSELECT 2; +SELECT * FROM birth_names;;; +SELECT * FROM birth_names LIMIT 1 +""" + ) + assert query.get_statements() == [ + "SELECT 1", + "SELECT 2", + "SELECT * FROM birth_names", + "SELECT * FROM birth_names LIMIT 1", + ] + + +def test_sqlparse_formatting(): + """ + Test that ``from_unixtime`` is formatted correctly. + """ + assert sqlparse.format( + "SELECT extract(HOUR from from_unixtime(hour_ts) " + "AT TIME ZONE 'America/Los_Angeles') from table", + reindent=True, + ) == ( + "SELECT extract(HOUR\n from from_unixtime(hour_ts) " + "AT TIME ZONE 'America/Los_Angeles')\nfrom table" + ) + + +def test_strip_comments_from_sql() -> None: + """ + Test that comments are stripped out correctly. + """ + assert ( + strip_comments_from_sql("SELECT col1, col2 FROM table1") + == "SELECT col1, col2 FROM table1" + ) + assert ( + strip_comments_from_sql("SELECT col1, col2 FROM table1\n-- comment") + == "SELECT col1, col2 FROM table1\n" + ) + assert ( + strip_comments_from_sql("SELECT '--abc' as abc, col2 FROM table1\n") + == "SELECT '--abc' as abc, col2 FROM table1" + ) + + +def test_sanitize_clause_valid(): + # regular clauses + assert sanitize_clause("col = 1") == "col = 1" + assert sanitize_clause("1=\t\n1") == "1=\t\n1" + assert sanitize_clause("(col = 1)") == "(col = 1)" + assert sanitize_clause("(col1 = 1) AND (col2 = 2)") == "(col1 = 1) AND (col2 = 2)" + assert sanitize_clause("col = 'abc' -- comment") == "col = 'abc' -- comment\n" + + # Valid literal values that at could be flagged as invalid by a naive query parser + assert ( + sanitize_clause("col = 'col1 = 1) AND (col2 = 2'") + == "col = 'col1 = 1) AND (col2 = 2'" + ) + assert sanitize_clause("col = 'select 1; select 2'") == "col = 'select 1; select 2'" + assert sanitize_clause("col = 'abc -- comment'") == "col = 'abc -- comment'" + + +def test_sanitize_clause_closing_unclosed(): + with pytest.raises(QueryClauseValidationException): + sanitize_clause("col1 = 1) AND (col2 = 2)") + + +def test_sanitize_clause_unclosed(): + with pytest.raises(QueryClauseValidationException): + sanitize_clause("(col1 = 1) AND (col2 = 2") + + +def test_sanitize_clause_closing_and_unclosed(): + with pytest.raises(QueryClauseValidationException): + sanitize_clause("col1 = 1) AND (col2 = 2") + + +def test_sanitize_clause_closing_and_unclosed_nested(): + with pytest.raises(QueryClauseValidationException): + sanitize_clause("(col1 = 1)) AND ((col2 = 2)") + + +def test_sanitize_clause_multiple(): + with pytest.raises(QueryClauseValidationException): + sanitize_clause("TRUE; SELECT 1") + + +def test_sqlparse_issue_652(): + stmt = sqlparse.parse(r"foo = '\' AND bar = 'baz'")[0] + assert len(stmt.tokens) == 5 + assert str(stmt.tokens[0]) == "foo = '\\'" + + +@pytest.mark.parametrize( + "sql,expected", + [ + ("SELECT * FROM table", True), + ("SELECT a FROM (SELECT 1 AS a) JOIN (SELECT * FROM table)", True), + ("(SELECT COUNT(DISTINCT name) AS foo FROM birth_names)", True), + ("COUNT(*)", False), + ("SELECT a FROM (SELECT 1 AS a)", False), + ("SELECT a FROM (SELECT 1 AS a) JOIN table", True), + ("SELECT * FROM (SELECT 1 AS foo, 2 AS bar) ORDER BY foo ASC, bar", False), + ("SELECT * FROM other_table", True), + ("extract(HOUR from from_unixtime(hour_ts)", False), + ("(SELECT * FROM table)", True), + ("(SELECT COUNT(DISTINCT name) from birth_names)", True), + ], +) +def test_has_table_query(sql: str, expected: bool) -> None: + """ + Test if a given statement queries a table. + + This is used to prevent ad-hoc metrics from querying unauthorized tables, bypassing + row-level security. + """ + statement = sqlparse.parse(sql)[0] + assert has_table_query(statement) == expected + + +@pytest.mark.parametrize( + "sql,table,rls,expected", + [ + # Basic test: append RLS (some_table.id=42) to an existing WHERE clause. + ( + "SELECT * FROM some_table WHERE 1=1", + "some_table", + "id=42", + "SELECT * FROM some_table WHERE ( 1=1) AND some_table.id=42", + ), + # Any existing predicates MUST to be wrapped in parenthesis because AND has higher + # precedence than OR. If the RLS it `1=0` and we didn't add parenthesis a user + # could bypass it by crafting a query with `WHERE TRUE OR FALSE`, since + # `WHERE TRUE OR FALSE AND 1=0` evaluates to `WHERE TRUE OR (FALSE AND 1=0)`. + ( + "SELECT * FROM some_table WHERE TRUE OR FALSE", + "some_table", + "1=0", + "SELECT * FROM some_table WHERE ( TRUE OR FALSE) AND 1=0", + ), + # Here "table" is a reserved word; since sqlparse is too aggressive when + # characterizing reserved words we need to support them even when not quoted. + ( + "SELECT * FROM table WHERE 1=1", + "table", + "id=42", + "SELECT * FROM table WHERE ( 1=1) AND table.id=42", + ), + # RLS is only applied to queries reading from the associated table. + ( + "SELECT * FROM table WHERE 1=1", + "other_table", + "id=42", + "SELECT * FROM table WHERE 1=1", + ), + ( + "SELECT * FROM other_table WHERE 1=1", + "table", + "id=42", + "SELECT * FROM other_table WHERE 1=1", + ), + # If there's no pre-existing WHERE clause we create one. + ( + "SELECT * FROM table", + "table", + "id=42", + "SELECT * FROM table WHERE table.id=42", + ), + ( + "SELECT * FROM some_table", + "some_table", + "id=42", + "SELECT * FROM some_table WHERE some_table.id=42", + ), + ( + "SELECT * FROM table ORDER BY id", + "table", + "id=42", + "SELECT * FROM table WHERE table.id=42 ORDER BY id", + ), + ( + "SELECT * FROM some_table;", + "some_table", + "id=42", + "SELECT * FROM some_table WHERE some_table.id=42 ;", + ), + ( + "SELECT * FROM some_table ;", + "some_table", + "id=42", + "SELECT * FROM some_table WHERE some_table.id=42 ;", + ), + ( + "SELECT * FROM some_table ", + "some_table", + "id=42", + "SELECT * FROM some_table WHERE some_table.id=42", + ), + # We add the RLS even if it's already present, to be conservative. It should have + # no impact on the query, and it's easier than testing if the RLS is already + # present (it could be present in an OR clause, eg). + ( + "SELECT * FROM table WHERE 1=1 AND table.id=42", + "table", + "id=42", + "SELECT * FROM table WHERE ( 1=1 AND table.id=42) AND table.id=42", + ), + ( + ( + "SELECT * FROM table JOIN other_table ON " + "table.id = other_table.id AND other_table.id=42" + ), + "other_table", + "id=42", + ( + "SELECT * FROM table JOIN other_table ON other_table.id=42 " + "AND ( table.id = other_table.id AND other_table.id=42 )" + ), + ), + ( + "SELECT * FROM table WHERE 1=1 AND id=42", + "table", + "id=42", + "SELECT * FROM table WHERE ( 1=1 AND id=42) AND table.id=42", + ), + # For joins we apply the RLS to the ON clause, since it's easier and prevents + # leaking information about number of rows on OUTER JOINs. + ( + "SELECT * FROM table JOIN other_table ON table.id = other_table.id", + "other_table", + "id=42", + ( + "SELECT * FROM table JOIN other_table ON other_table.id=42 " + "AND ( table.id = other_table.id )" + ), + ), + ( + ( + "SELECT * FROM table JOIN other_table ON table.id = other_table.id " + "WHERE 1=1" + ), + "other_table", + "id=42", + ( + "SELECT * FROM table JOIN other_table ON other_table.id=42 " + "AND ( table.id = other_table.id ) WHERE 1=1" + ), + ), + # Subqueries also work, as expected. + ( + "SELECT * FROM (SELECT * FROM other_table)", + "other_table", + "id=42", + "SELECT * FROM (SELECT * FROM other_table WHERE other_table.id=42 )", + ), + # As well as UNION. + ( + "SELECT * FROM table UNION ALL SELECT * FROM other_table", + "table", + "id=42", + "SELECT * FROM table WHERE table.id=42 UNION ALL SELECT * FROM other_table", + ), + ( + "SELECT * FROM table UNION ALL SELECT * FROM other_table", + "other_table", + "id=42", + ( + "SELECT * FROM table UNION ALL " + "SELECT * FROM other_table WHERE other_table.id=42" + ), + ), + # When comparing fully qualified table names (eg, schema.table) to simple names + # (eg, table) we are also conservative, assuming the schema is the same, since + # we don't have information on the default schema. + ( + "SELECT * FROM schema.table_name", + "table_name", + "id=42", + "SELECT * FROM schema.table_name WHERE table_name.id=42", + ), + ( + "SELECT * FROM schema.table_name", + "schema.table_name", + "id=42", + "SELECT * FROM schema.table_name WHERE schema.table_name.id=42", + ), + ( + "SELECT * FROM table_name", + "schema.table_name", + "id=42", + "SELECT * FROM table_name WHERE schema.table_name.id=42", + ), + ], +) +def test_insert_rls( + mocker: MockerFixture, sql: str, table: str, rls: str, expected: str +) -> None: + """ + Insert into a statement a given RLS condition associated with a table. + """ + condition = sqlparse.parse(rls)[0] + add_table_name(condition, table) + + # pylint: disable=unused-argument + def get_rls_for_table( + candidate: Token, + database_id: int, + default_schema: str, + ) -> Optional[TokenList]: + """ + Return the RLS ``condition`` if ``candidate`` matches ``table``. + """ + # compare ignoring schema + for left, right in zip(str(candidate).split(".")[::-1], table.split(".")[::-1]): + if left != right: + return None + return condition + + mocker.patch("superset.sql_parse.get_rls_for_table", new=get_rls_for_table) + + statement = sqlparse.parse(sql)[0] + assert ( + str( + insert_rls(token_list=statement, database_id=1, default_schema="my_schema") + ).strip() + == expected.strip() + ) + + +@pytest.mark.parametrize( + "rls,table,expected", + [ + ("id=42", "users", "users.id=42"), + ("users.id=42", "users", "users.id=42"), + ("schema.users.id=42", "users", "schema.users.id=42"), + ("false", "users", "false"), + ], +) +def test_add_table_name(rls: str, table: str, expected: str) -> None: + condition = sqlparse.parse(rls)[0] + add_table_name(condition, table) + assert str(condition) == expected + + +def test_get_rls_for_table(mocker: MockerFixture) -> None: + """ + Tests for ``get_rls_for_table``. + """ + candidate = Identifier([Token(Name, "some_table")]) + db = mocker.patch("superset.db") + dataset = db.session.query().filter().one_or_none() + dataset.__str__.return_value = "some_table" + + dataset.get_sqla_row_level_filters.return_value = [text("organization_id = 1")] + assert ( + str(get_rls_for_table(candidate, 1, "public")) + == "some_table.organization_id = 1" + ) + + dataset.get_sqla_row_level_filters.return_value = [ + text("organization_id = 1"), + text("foo = 'bar'"), + ] + assert ( + str(get_rls_for_table(candidate, 1, "public")) + == "some_table.organization_id = 1 AND some_table.foo = 'bar'" + ) + + dataset.get_sqla_row_level_filters.return_value = [] + assert get_rls_for_table(candidate, 1, "public") is None + + +def test_extract_table_references(mocker: MockerFixture) -> None: + """ + Test the ``extract_table_references`` helper function. + """ + assert extract_table_references("SELECT 1", "trino") == set() + assert extract_table_references("SELECT 1 FROM some_table", "trino") == { + Table(table="some_table", schema=None, catalog=None) + } + assert extract_table_references("SELECT {{ jinja }} FROM some_table", "trino") == { + Table(table="some_table", schema=None, catalog=None) + } + assert extract_table_references( + "SELECT 1 FROM some_catalog.some_schema.some_table", "trino" + ) == {Table(table="some_table", schema="some_schema", catalog="some_catalog")} + + # with identifier quotes + assert extract_table_references( + "SELECT 1 FROM `some_catalog`.`some_schema`.`some_table`", "mysql" + ) == {Table(table="some_table", schema="some_schema", catalog="some_catalog")} + assert extract_table_references( + 'SELECT 1 FROM "some_catalog".some_schema."some_table"', "trino" + ) == {Table(table="some_table", schema="some_schema", catalog="some_catalog")} + + assert extract_table_references( + "SELECT * FROM some_table JOIN other_table ON some_table.id = other_table.id", + "trino", + ) == { + Table(table="some_table", schema=None, catalog=None), + Table(table="other_table", schema=None, catalog=None), + } + + # test falling back to sqlparse + logger = mocker.patch("superset.sql_parse.logger") + sql = "SELECT * FROM table UNION ALL SELECT * FROM other_table" + assert extract_table_references( + sql, + "trino", + ) == {Table(table="other_table", schema=None, catalog=None)} + logger.warning.assert_called_once() + + logger = mocker.patch("superset.migrations.shared.utils.logger") + sql = "SELECT * FROM table UNION ALL SELECT * FROM other_table" + assert extract_table_references(sql, "trino", show_warning=False) == { + Table(table="other_table", schema=None, catalog=None) + } + logger.warning.assert_not_called() diff --git a/tests/unit_tests/tables/__init__.py b/tests/unit_tests/tables/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/tables/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/tables/test_models.py b/tests/unit_tests/tables/test_models.py new file mode 100644 index 0000000000000..7705dba6aa09d --- /dev/null +++ b/tests/unit_tests/tables/test_models.py @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=import-outside-toplevel, unused-argument + +from sqlalchemy.orm.session import Session + + +def test_table_model(session: Session) -> None: + """ + Test basic attributes of a ``Table``. + """ + from superset.columns.models import Column + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Table.metadata.create_all(engine) # pylint: disable=no-member + + table = Table( + name="my_table", + schema="my_schema", + catalog="my_catalog", + database=Database(database_name="my_database", sqlalchemy_uri="test://"), + columns=[ + Column( + name="ds", + type="TIMESTAMP", + expression="ds", + ) + ], + ) + session.add(table) + session.flush() + + assert table.id == 1 + assert table.uuid is not None + assert table.database_id == 1 + assert table.catalog == "my_catalog" + assert table.schema == "my_schema" + assert table.name == "my_table" + assert [column.name for column in table.columns] == ["ds"] diff --git a/tests/unit_tests/tasks/__init__.py b/tests/unit_tests/tasks/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/tasks/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/tasks/test_cron_util.py b/tests/unit_tests/tasks/test_cron_util.py new file mode 100644 index 0000000000000..d0f9ae21705e2 --- /dev/null +++ b/tests/unit_tests/tasks/test_cron_util.py @@ -0,0 +1,212 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from datetime import datetime +from typing import List + +import pytest +import pytz +from dateutil import parser +from freezegun import freeze_time +from freezegun.api import FakeDatetime # type: ignore + +from superset.tasks.cron_util import cron_schedule_window + + +@pytest.mark.parametrize( + "current_dttm, cron, expected", + [ + ("2020-01-01T08:59:01Z", "0 1 * * *", []), + ( + "2020-01-01T08:59:02Z", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 9, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ( + "2020-01-01T08:59:59Z", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 9, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ( + "2020-01-01T09:00:00Z", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 9, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ("2020-01-01T09:00:01Z", "0 1 * * *", []), + ], +) +def test_cron_schedule_window_los_angeles( + current_dttm: str, cron: str, expected: List[FakeDatetime] +) -> None: + """ + Reports scheduler: Test cron schedule window for "America/Los_Angeles" + """ + + with freeze_time(current_dttm): + datetimes = cron_schedule_window(cron, "America/Los_Angeles") + assert ( + list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) + == expected + ) + + +@pytest.mark.parametrize( + "current_dttm, cron, expected", + [ + ("2020-01-01T00:59:01Z", "0 1 * * *", []), + ( + "2020-01-01T00:59:02Z", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 1, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ( + "2020-01-01T00:59:59Z", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 1, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ( + "2020-01-01T01:00:00Z", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 1, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ("2020-01-01T01:00:01Z", "0 1 * * *", []), + ], +) +def test_cron_schedule_window_invalid_timezone( + current_dttm: str, cron: str, expected: List[FakeDatetime] +) -> None: + """ + Reports scheduler: Test cron schedule window for "invalid timezone" + """ + + with freeze_time(current_dttm): + datetimes = cron_schedule_window(cron, "invalid timezone") + # it should default to UTC + assert ( + list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) + == expected + ) + + +@pytest.mark.parametrize( + "current_dttm, cron, expected", + [ + ("2020-01-01T05:59:01Z", "0 1 * * *", []), + ( + "2020-01-01T05:59:02Z", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ( + "2020-01-01T5:59:59Z", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ( + "2020-01-01T6:00:00", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ("2020-01-01T6:00:01Z", "0 1 * * *", []), + ], +) +def test_cron_schedule_window_new_york( + current_dttm: str, cron: str, expected: List[FakeDatetime] +) -> None: + """ + Reports scheduler: Test cron schedule window for "America/New_York" + """ + + with freeze_time(current_dttm, tz_offset=0): + datetimes = cron_schedule_window(cron, "America/New_York") + assert ( + list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) + == expected + ) + + +@pytest.mark.parametrize( + "current_dttm, cron, expected", + [ + ("2020-01-01T06:59:01Z", "0 1 * * *", []), + ( + "2020-01-01T06:59:02Z", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 7, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ( + "2020-01-01T06:59:59Z", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 7, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ( + "2020-01-01T07:00:00", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 7, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ("2020-01-01T07:00:01Z", "0 1 * * *", []), + ], +) +def test_cron_schedule_window_chicago( + current_dttm: str, cron: str, expected: List[FakeDatetime] +) -> None: + """ + Reports scheduler: Test cron schedule window for "America/Chicago" + """ + + with freeze_time(current_dttm, tz_offset=0): + datetimes = cron_schedule_window(cron, "America/Chicago") + assert ( + list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) + == expected + ) + + +@pytest.mark.parametrize( + "current_dttm, cron, expected", + [ + ("2020-07-01T05:59:01Z", "0 1 * * *", []), + ( + "2020-07-01T05:59:02Z", + "0 1 * * *", + [FakeDatetime(2020, 7, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ( + "2020-07-01T05:59:59Z", + "0 1 * * *", + [FakeDatetime(2020, 7, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ( + "2020-07-01T06:00:00", + "0 1 * * *", + [FakeDatetime(2020, 7, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ("2020-07-01T06:00:01Z", "0 1 * * *", []), + ], +) +def test_cron_schedule_window_chicago_daylight( + current_dttm: str, cron: str, expected: List[FakeDatetime] +) -> None: + """ + Reports scheduler: Test cron schedule window for "America/Chicago" + """ + + with freeze_time(current_dttm, tz_offset=0): + datetimes = cron_schedule_window(cron, "America/Chicago") + assert ( + list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) + == expected + ) diff --git a/tests/unit_tests/tasks/test_utils.py b/tests/unit_tests/tasks/test_utils.py new file mode 100644 index 0000000000000..7854717201229 --- /dev/null +++ b/tests/unit_tests/tasks/test_utils.py @@ -0,0 +1,323 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from contextlib import nullcontext +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, List, Optional, Tuple, Type, Union + +import pytest +from flask_appbuilder.security.sqla.models import User + +from superset.tasks.exceptions import ExecutorNotFoundError +from superset.tasks.types import ExecutorType + +SELENIUM_USER_ID = 1234 +SELENIUM_USERNAME = "admin" + + +def _get_users( + params: Optional[Union[int, List[int]]] +) -> Optional[Union[User, List[User]]]: + if params is None: + return None + if isinstance(params, int): + return User(id=params, username=str(params)) + return [User(id=user, username=str(user)) for user in params] + + +@dataclass +class ModelConfig: + owners: List[int] + creator: Optional[int] = None + modifier: Optional[int] = None + + +class ModelType(int, Enum): + DASHBOARD = 1 + CHART = 2 + REPORT_SCHEDULE = 3 + + +@pytest.mark.parametrize( + "model_type,executor_types,model_config,current_user,expected_result", + [ + ( + ModelType.REPORT_SCHEDULE, + [ExecutorType.SELENIUM], + ModelConfig( + owners=[1, 2], + creator=3, + modifier=4, + ), + None, + (ExecutorType.SELENIUM, SELENIUM_USER_ID), + ), + ( + ModelType.REPORT_SCHEDULE, + [ + ExecutorType.CREATOR, + ExecutorType.CREATOR_OWNER, + ExecutorType.OWNER, + ExecutorType.MODIFIER, + ExecutorType.MODIFIER_OWNER, + ExecutorType.SELENIUM, + ], + ModelConfig(owners=[]), + None, + (ExecutorType.SELENIUM, SELENIUM_USER_ID), + ), + ( + ModelType.REPORT_SCHEDULE, + [ + ExecutorType.CREATOR, + ExecutorType.CREATOR_OWNER, + ExecutorType.OWNER, + ExecutorType.MODIFIER, + ExecutorType.MODIFIER_OWNER, + ExecutorType.SELENIUM, + ], + ModelConfig(owners=[], modifier=1), + None, + (ExecutorType.MODIFIER, 1), + ), + ( + ModelType.REPORT_SCHEDULE, + [ + ExecutorType.CREATOR, + ExecutorType.CREATOR_OWNER, + ExecutorType.OWNER, + ExecutorType.MODIFIER, + ExecutorType.MODIFIER_OWNER, + ExecutorType.SELENIUM, + ], + ModelConfig(owners=[2], modifier=1), + None, + (ExecutorType.OWNER, 2), + ), + ( + ModelType.REPORT_SCHEDULE, + [ + ExecutorType.CREATOR, + ExecutorType.CREATOR_OWNER, + ExecutorType.OWNER, + ExecutorType.MODIFIER, + ExecutorType.MODIFIER_OWNER, + ExecutorType.SELENIUM, + ], + ModelConfig(owners=[2], creator=3, modifier=1), + None, + (ExecutorType.CREATOR, 3), + ), + ( + ModelType.REPORT_SCHEDULE, + [ + ExecutorType.OWNER, + ], + ModelConfig(owners=[1, 2, 3, 4, 5, 6, 7], creator=3, modifier=4), + None, + (ExecutorType.OWNER, 4), + ), + ( + ModelType.REPORT_SCHEDULE, + [ + ExecutorType.OWNER, + ], + ModelConfig(owners=[1, 2, 3, 4, 5, 6, 7], creator=3, modifier=8), + None, + (ExecutorType.OWNER, 3), + ), + ( + ModelType.REPORT_SCHEDULE, + [ + ExecutorType.MODIFIER_OWNER, + ], + ModelConfig(owners=[1, 2, 3, 4, 5, 6, 7], creator=8, modifier=9), + None, + ExecutorNotFoundError(), + ), + ( + ModelType.REPORT_SCHEDULE, + [ + ExecutorType.MODIFIER_OWNER, + ], + ModelConfig(owners=[1, 2, 3, 4, 5, 6, 7], creator=8, modifier=4), + None, + (ExecutorType.MODIFIER_OWNER, 4), + ), + ( + ModelType.REPORT_SCHEDULE, + [ + ExecutorType.CREATOR_OWNER, + ], + ModelConfig(owners=[1, 2, 3, 4, 5, 6, 7], creator=8, modifier=9), + None, + ExecutorNotFoundError(), + ), + ( + ModelType.REPORT_SCHEDULE, + [ + ExecutorType.CREATOR_OWNER, + ], + ModelConfig(owners=[1, 2, 3, 4, 5, 6, 7], creator=4, modifier=8), + None, + (ExecutorType.CREATOR_OWNER, 4), + ), + ( + ModelType.REPORT_SCHEDULE, + [ + ExecutorType.CURRENT_USER, + ], + ModelConfig(owners=[1, 2, 3, 4, 5, 6, 7], creator=4, modifier=8), + None, + ExecutorNotFoundError(), + ), + ( + ModelType.DASHBOARD, + [ + ExecutorType.CURRENT_USER, + ], + ModelConfig(owners=[1], creator=2, modifier=3), + 4, + (ExecutorType.CURRENT_USER, 4), + ), + ( + ModelType.DASHBOARD, + [ + ExecutorType.SELENIUM, + ], + ModelConfig(owners=[1], creator=2, modifier=3), + 4, + (ExecutorType.SELENIUM, SELENIUM_USER_ID), + ), + ( + ModelType.DASHBOARD, + [ + ExecutorType.CURRENT_USER, + ], + ModelConfig(owners=[1], creator=2, modifier=3), + None, + ExecutorNotFoundError(), + ), + ( + ModelType.DASHBOARD, + [ + ExecutorType.CREATOR_OWNER, + ExecutorType.MODIFIER_OWNER, + ExecutorType.CURRENT_USER, + ExecutorType.SELENIUM, + ], + ModelConfig(owners=[1], creator=2, modifier=3), + None, + (ExecutorType.SELENIUM, SELENIUM_USER_ID), + ), + ( + ModelType.CHART, + [ + ExecutorType.CURRENT_USER, + ], + ModelConfig(owners=[1], creator=2, modifier=3), + 4, + (ExecutorType.CURRENT_USER, 4), + ), + ( + ModelType.CHART, + [ + ExecutorType.SELENIUM, + ], + ModelConfig(owners=[1], creator=2, modifier=3), + 4, + (ExecutorType.SELENIUM, SELENIUM_USER_ID), + ), + ( + ModelType.CHART, + [ + ExecutorType.CURRENT_USER, + ], + ModelConfig(owners=[1], creator=2, modifier=3), + None, + ExecutorNotFoundError(), + ), + ( + ModelType.CHART, + [ + ExecutorType.CREATOR_OWNER, + ExecutorType.MODIFIER_OWNER, + ExecutorType.CURRENT_USER, + ExecutorType.SELENIUM, + ], + ModelConfig(owners=[1], creator=2, modifier=3), + None, + (ExecutorType.SELENIUM, SELENIUM_USER_ID), + ), + ], +) +def test_get_executor( + model_type: ModelType, + executor_types: List[ExecutorType], + model_config: ModelConfig, + current_user: Optional[int], + expected_result: Tuple[int, ExecutorNotFoundError], +) -> None: + from superset.models.dashboard import Dashboard + from superset.models.slice import Slice + from superset.reports.models import ReportSchedule + from superset.tasks.utils import get_executor + + model: Type[Union[Dashboard, ReportSchedule, Slice]] + model_kwargs: Dict[str, Any] = {} + if model_type == ModelType.REPORT_SCHEDULE: + model = ReportSchedule + model_kwargs = { + "type": "report", + "name": "test_report", + } + elif model_type == ModelType.DASHBOARD: + model = Dashboard + elif model_type == ModelType.CHART: + model = Slice + else: + raise Exception(f"Unsupported model type: {model_type}") + + obj = model( + id=1, + owners=_get_users(model_config.owners), + created_by=_get_users(model_config.creator), + changed_by=_get_users(model_config.modifier), + **model_kwargs, + ) + if isinstance(expected_result, Exception): + cm = pytest.raises(type(expected_result)) + expected_executor_type = None + expected_executor = None + else: + cm = nullcontext() + expected_executor_type = expected_result[0] + expected_executor = ( + SELENIUM_USERNAME + if expected_executor_type == ExecutorType.SELENIUM + else str(expected_result[1]) + ) + + with cm: + executor_type, executor = get_executor( + executor_types=executor_types, + model=obj, + current_user=str(current_user) if current_user else None, + ) + assert executor_type == expected_executor_type + assert executor == expected_executor diff --git a/tests/unit_tests/test_jinja_context.py b/tests/unit_tests/test_jinja_context.py new file mode 100644 index 0000000000000..8704b1d65c211 --- /dev/null +++ b/tests/unit_tests/test_jinja_context.py @@ -0,0 +1,267 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from typing import Any + +import pytest +from sqlalchemy.dialects.postgresql import dialect + +from superset import app +from superset.exceptions import SupersetTemplateException +from superset.jinja_context import ExtraCache, safe_proxy + + +def test_filter_values_default() -> None: + cache = ExtraCache() + assert cache.filter_values("name", "foo") == ["foo"] + assert cache.removed_filters == [] + + +def test_filter_values_remove_not_present() -> None: + cache = ExtraCache() + assert cache.filter_values("name", remove_filter=True) == [] + assert cache.removed_filters == [] + + +def test_get_filters_remove_not_present() -> None: + cache = ExtraCache() + assert cache.get_filters("name", remove_filter=True) == [] + assert cache.removed_filters == [] + + +def test_filter_values_no_default() -> None: + cache = ExtraCache() + assert cache.filter_values("name") == [] + + +def test_filter_values_adhoc_filters() -> None: + with app.test_request_context( + data={ + "form_data": json.dumps( + { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "foo", + "expressionType": "SIMPLE", + "operator": "in", + "subject": "name", + } + ], + } + ) + } + ): + cache = ExtraCache() + assert cache.filter_values("name") == ["foo"] + assert cache.applied_filters == ["name"] + + with app.test_request_context( + data={ + "form_data": json.dumps( + { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": ["foo", "bar"], + "expressionType": "SIMPLE", + "operator": "in", + "subject": "name", + } + ], + } + ) + } + ): + cache = ExtraCache() + assert cache.filter_values("name") == ["foo", "bar"] + assert cache.applied_filters == ["name"] + + +def test_get_filters_adhoc_filters() -> None: + with app.test_request_context( + data={ + "form_data": json.dumps( + { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "foo", + "expressionType": "SIMPLE", + "operator": "in", + "subject": "name", + } + ], + } + ) + } + ): + cache = ExtraCache() + assert cache.get_filters("name") == [ + {"op": "IN", "col": "name", "val": ["foo"]} + ] + + assert cache.removed_filters == [] + assert cache.applied_filters == ["name"] + + with app.test_request_context( + data={ + "form_data": json.dumps( + { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": ["foo", "bar"], + "expressionType": "SIMPLE", + "operator": "in", + "subject": "name", + } + ], + } + ) + } + ): + cache = ExtraCache() + assert cache.get_filters("name") == [ + {"op": "IN", "col": "name", "val": ["foo", "bar"]} + ] + assert cache.removed_filters == [] + + with app.test_request_context( + data={ + "form_data": json.dumps( + { + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": ["foo", "bar"], + "expressionType": "SIMPLE", + "operator": "in", + "subject": "name", + } + ], + } + ) + } + ): + cache = ExtraCache() + assert cache.get_filters("name", remove_filter=True) == [ + {"op": "IN", "col": "name", "val": ["foo", "bar"]} + ] + assert cache.removed_filters == ["name"] + assert cache.applied_filters == ["name"] + + +def test_filter_values_extra_filters() -> None: + with app.test_request_context( + data={ + "form_data": json.dumps( + {"extra_filters": [{"col": "name", "op": "in", "val": "foo"}]} + ) + } + ): + cache = ExtraCache() + assert cache.filter_values("name") == ["foo"] + assert cache.applied_filters == ["name"] + + +def test_url_param_default() -> None: + with app.test_request_context(): + cache = ExtraCache() + assert cache.url_param("foo", "bar") == "bar" + + +def test_url_param_no_default() -> None: + with app.test_request_context(): + cache = ExtraCache() + assert cache.url_param("foo") is None + + +def test_url_param_query() -> None: + with app.test_request_context(query_string={"foo": "bar"}): + cache = ExtraCache() + assert cache.url_param("foo") == "bar" + + +def test_url_param_form_data() -> None: + with app.test_request_context( + query_string={"form_data": json.dumps({"url_params": {"foo": "bar"}})} + ): + cache = ExtraCache() + assert cache.url_param("foo") == "bar" + + +def test_url_param_escaped_form_data() -> None: + with app.test_request_context( + query_string={"form_data": json.dumps({"url_params": {"foo": "O'Brien"}})} + ): + cache = ExtraCache(dialect=dialect()) + assert cache.url_param("foo") == "O''Brien" + + +def test_url_param_escaped_default_form_data() -> None: + with app.test_request_context( + query_string={"form_data": json.dumps({"url_params": {"foo": "O'Brien"}})} + ): + cache = ExtraCache(dialect=dialect()) + assert cache.url_param("bar", "O'Malley") == "O''Malley" + + +def test_url_param_unescaped_form_data() -> None: + with app.test_request_context( + query_string={"form_data": json.dumps({"url_params": {"foo": "O'Brien"}})} + ): + cache = ExtraCache(dialect=dialect()) + assert cache.url_param("foo", escape_result=False) == "O'Brien" + + +def test_url_param_unescaped_default_form_data() -> None: + with app.test_request_context( + query_string={"form_data": json.dumps({"url_params": {"foo": "O'Brien"}})} + ): + cache = ExtraCache(dialect=dialect()) + assert cache.url_param("bar", "O'Malley", escape_result=False) == "O'Malley" + + +def test_safe_proxy_primitive() -> None: + def func(input_: Any) -> Any: + return input_ + + assert safe_proxy(func, "foo") == "foo" + + +def test_safe_proxy_dict() -> None: + def func(input_: Any) -> Any: + return input_ + + assert safe_proxy(func, {"foo": "bar"}) == {"foo": "bar"} + + +def test_safe_proxy_lambda() -> None: + def func(input_: Any) -> Any: + return input_ + + with pytest.raises(SupersetTemplateException): + safe_proxy(func, lambda: "bar") + + +def test_safe_proxy_nested_lambda() -> None: + def func(input_: Any) -> Any: + return input_ + + with pytest.raises(SupersetTemplateException): + safe_proxy(func, {"foo": lambda: "bar"}) diff --git a/tests/unit_tests/thumbnails/__init__.py b/tests/unit_tests/thumbnails/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/thumbnails/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/thumbnails/test_digest.py b/tests/unit_tests/thumbnails/test_digest.py new file mode 100644 index 0000000000000..04f244e629b59 --- /dev/null +++ b/tests/unit_tests/thumbnails/test_digest.py @@ -0,0 +1,258 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from contextlib import nullcontext +from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union +from unittest.mock import patch + +import pytest +from flask_appbuilder.security.sqla.models import User + +from superset.tasks.exceptions import ExecutorNotFoundError +from superset.tasks.types import ExecutorType +from superset.utils.core import override_user + +if TYPE_CHECKING: + from superset.models.dashboard import Dashboard + from superset.models.slice import Slice + +_DEFAULT_DASHBOARD_KWARGS: Dict[str, Any] = { + "id": 1, + "dashboard_title": "My Title", + "slices": [{"id": 1, "slice_name": "My Chart"}], + "position_json": '{"a": "b"}', + "css": "background-color: lightblue;", + "json_metadata": '{"c": "d"}', +} + +_DEFAULT_CHART_KWARGS = { + "id": 2, + "params": {"a": "b"}, +} + + +def CUSTOM_DASHBOARD_FUNC( + dashboard: Dashboard, + executor_type: ExecutorType, + executor: str, +) -> str: + return f"{dashboard.id}.{executor_type.value}.{executor}" + + +def CUSTOM_CHART_FUNC( + chart: Slice, + executor_type: ExecutorType, + executor: str, +) -> str: + return f"{chart.id}.{executor_type.value}.{executor}" + + +@pytest.mark.parametrize( + "dashboard_overrides,execute_as,has_current_user,use_custom_digest,expected_result", + [ + ( + None, + [ExecutorType.SELENIUM], + False, + False, + "71452fee8ffbd8d340193d611bcd4559", + ), + ( + None, + [ExecutorType.CURRENT_USER], + True, + False, + "209dc060ac19271b8708731e3b8280f5", + ), + ( + { + "dashboard_title": "My Other Title", + }, + [ExecutorType.CURRENT_USER], + True, + False, + "209dc060ac19271b8708731e3b8280f5", + ), + ( + { + "id": 2, + }, + [ExecutorType.CURRENT_USER], + True, + False, + "06a4144466dbd5ffad0c3c2225e96296", + ), + ( + { + "slices": [{"id": 2, "slice_name": "My Other Chart"}], + }, + [ExecutorType.CURRENT_USER], + True, + False, + "a823ece9563895ccb14f3d9095e84f7a", + ), + ( + { + "position_json": {"b": "c"}, + }, + [ExecutorType.CURRENT_USER], + True, + False, + "33c5475f92a904925ab3ef493526e5b5", + ), + ( + { + "css": "background-color: darkblue;", + }, + [ExecutorType.CURRENT_USER], + True, + False, + "cec57345e6402c0d4b3caee5cfaa0a03", + ), + ( + { + "json_metadata": {"d": "e"}, + }, + [ExecutorType.CURRENT_USER], + True, + False, + "5380dcbe94621a0759b09554404f3d02", + ), + ( + None, + [ExecutorType.CURRENT_USER], + True, + True, + "1.current_user.1", + ), + ( + None, + [ExecutorType.CURRENT_USER], + False, + False, + ExecutorNotFoundError(), + ), + ], +) +def test_dashboard_digest( + dashboard_overrides: Optional[Dict[str, Any]], + execute_as: List[ExecutorType], + has_current_user: bool, + use_custom_digest: bool, + expected_result: Union[str, Exception], +) -> None: + from superset import app + from superset.models.dashboard import Dashboard + from superset.models.slice import Slice + from superset.thumbnails.digest import get_dashboard_digest + + kwargs = { + **_DEFAULT_DASHBOARD_KWARGS, + **(dashboard_overrides or {}), + } + slices = [Slice(**slice_kwargs) for slice_kwargs in kwargs.pop("slices")] + dashboard = Dashboard(**kwargs, slices=slices) + user: Optional[User] = None + if has_current_user: + user = User(id=1, username="1") + func = CUSTOM_DASHBOARD_FUNC if use_custom_digest else None + + with patch.dict( + app.config, + { + "THUMBNAIL_EXECUTE_AS": execute_as, + "THUMBNAIL_DASHBOARD_DIGEST_FUNC": func, + }, + ), override_user(user): + cm = ( + pytest.raises(type(expected_result)) + if isinstance(expected_result, Exception) + else nullcontext() + ) + with cm: + assert get_dashboard_digest(dashboard=dashboard) == expected_result + + +@pytest.mark.parametrize( + "chart_overrides,execute_as,has_current_user,use_custom_digest,expected_result", + [ + ( + None, + [ExecutorType.SELENIUM], + False, + False, + "47d852b5c4df211c115905617bb722c1", + ), + ( + None, + [ExecutorType.CURRENT_USER], + True, + False, + "4f8109d3761e766e650af514bb358f10", + ), + ( + None, + [ExecutorType.CURRENT_USER], + True, + True, + "2.current_user.1", + ), + ( + None, + [ExecutorType.CURRENT_USER], + False, + False, + ExecutorNotFoundError(), + ), + ], +) +def test_chart_digest( + chart_overrides: Optional[Dict[str, Any]], + execute_as: List[ExecutorType], + has_current_user: bool, + use_custom_digest: bool, + expected_result: Union[str, Exception], +) -> None: + from superset import app + from superset.models.slice import Slice + from superset.thumbnails.digest import get_chart_digest + + kwargs = { + **_DEFAULT_CHART_KWARGS, + **(chart_overrides or {}), + } + chart = Slice(**kwargs) + user: Optional[User] = None + if has_current_user: + user = User(id=1, username="1") + func = CUSTOM_CHART_FUNC if use_custom_digest else None + + with patch.dict( + app.config, + { + "THUMBNAIL_EXECUTE_AS": execute_as, + "THUMBNAIL_CHART_DIGEST_FUNC": func, + }, + ), override_user(user): + cm = ( + pytest.raises(type(expected_result)) + if isinstance(expected_result, Exception) + else nullcontext() + ) + with cm: + assert get_chart_digest(chart=chart) == expected_result diff --git a/tests/unit_tests/utils/cache_test.py b/tests/unit_tests/utils/cache_test.py new file mode 100644 index 0000000000000..53650e1d20324 --- /dev/null +++ b/tests/unit_tests/utils/cache_test.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=import-outside-toplevel, unused-argument + +from pytest_mock import MockerFixture + + +def test_memoized_func(mocker: MockerFixture) -> None: + """ + Test the ``memoized_func`` decorator. + """ + from superset.utils.cache import memoized_func + + cache = mocker.MagicMock() + + decorator = memoized_func("db:{self.id}:schema:{schema}:view_list", cache) + decorated = decorator(lambda self, schema, cache=False: 42) + + self = mocker.MagicMock() + self.id = 1 + + # skip cache + result = decorated(self, "public", cache=False) + assert result == 42 + cache.get.assert_not_called() + + # check cache, no cached value + cache.get.return_value = None + result = decorated(self, "public", cache=True) + assert result == 42 + cache.get.assert_called_with("db:1:schema:public:view_list") + + # check cache, cached value + cache.get.return_value = 43 + result = decorated(self, "public", cache=True) + assert result == 43 diff --git a/tests/unit_tests/utils/date_parser_tests.py b/tests/unit_tests/utils/date_parser_tests.py new file mode 100644 index 0000000000000..f3c8b6968077b --- /dev/null +++ b/tests/unit_tests/utils/date_parser_tests.py @@ -0,0 +1,358 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import re +from datetime import date, datetime, timedelta +from typing import Optional, Tuple +from unittest.mock import Mock, patch + +import pytest +from dateutil.relativedelta import relativedelta + +from superset.charts.commands.exceptions import ( + TimeRangeAmbiguousError, + TimeRangeParseFailError, +) +from superset.utils.date_parser import ( + DateRangeMigration, + datetime_eval, + get_past_or_future, + get_since_until, + parse_human_datetime, + parse_human_timedelta, + parse_past_timedelta, +) + + +def mock_parse_human_datetime(s: str) -> Optional[datetime]: + if s == "now": + return datetime(2016, 11, 7, 9, 30, 10) + elif s == "2018": + return datetime(2018, 1, 1) + elif s == "2018-9": + return datetime(2018, 9, 1) + elif s == "today": + return datetime(2016, 11, 7) + elif s == "yesterday": + return datetime(2016, 11, 6) + elif s == "tomorrow": + return datetime(2016, 11, 8) + elif s == "Last year": + return datetime(2015, 11, 7) + elif s == "Last week": + return datetime(2015, 10, 31) + elif s == "Last 5 months": + return datetime(2016, 6, 7) + elif s == "Next 5 months": + return datetime(2017, 4, 7) + elif s in ["5 days", "5 days ago"]: + return datetime(2016, 11, 2) + elif s == "2018-01-01T00:00:00": + return datetime(2018, 1, 1) + elif s == "2018-12-31T23:59:59": + return datetime(2018, 12, 31, 23, 59, 59) + else: + return None + + +@patch("superset.utils.date_parser.parse_human_datetime", mock_parse_human_datetime) +def test_get_since_until() -> None: + result: Tuple[Optional[datetime], Optional[datetime]] + expected: Tuple[Optional[datetime], Optional[datetime]] + + result = get_since_until() + expected = None, datetime(2016, 11, 7) + assert result == expected + + result = get_since_until(" : now") + expected = None, datetime(2016, 11, 7, 9, 30, 10) + assert result == expected + + result = get_since_until("yesterday : tomorrow") + expected = datetime(2016, 11, 6), datetime(2016, 11, 8) + assert result == expected + + result = get_since_until("2018-01-01T00:00:00 : 2018-12-31T23:59:59") + expected = datetime(2018, 1, 1), datetime(2018, 12, 31, 23, 59, 59) + assert result == expected + + result = get_since_until("Last year") + expected = datetime(2015, 11, 7), datetime(2016, 11, 7) + assert result == expected + + result = get_since_until("Last quarter") + expected = datetime(2016, 8, 7), datetime(2016, 11, 7) + assert result == expected + + result = get_since_until("Last 5 months") + expected = datetime(2016, 6, 7), datetime(2016, 11, 7) + assert result == expected + + result = get_since_until("Last 1 month") + expected = datetime(2016, 10, 7), datetime(2016, 11, 7) + assert result == expected + + result = get_since_until("Next 5 months") + expected = datetime(2016, 11, 7), datetime(2017, 4, 7) + assert result == expected + + result = get_since_until("Next 1 month") + expected = datetime(2016, 11, 7), datetime(2016, 12, 7) + assert result == expected + + result = get_since_until(since="5 days") + expected = datetime(2016, 11, 2), datetime(2016, 11, 7) + assert result == expected + + result = get_since_until(since="5 days ago", until="tomorrow") + expected = datetime(2016, 11, 2), datetime(2016, 11, 8) + assert result == expected + + result = get_since_until(time_range="yesterday : tomorrow", time_shift="1 day") + expected = datetime(2016, 11, 5), datetime(2016, 11, 7) + assert result == expected + + result = get_since_until(time_range="5 days : now") + expected = datetime(2016, 11, 2), datetime(2016, 11, 7, 9, 30, 10) + assert result == expected + + result = get_since_until("Last week", relative_end="now") + expected = datetime(2016, 10, 31), datetime(2016, 11, 7, 9, 30, 10) + assert result == expected + + result = get_since_until("Last week", relative_start="now") + expected = datetime(2016, 10, 31, 9, 30, 10), datetime(2016, 11, 7) + assert result == expected + + result = get_since_until("Last week", relative_start="now", relative_end="now") + expected = datetime(2016, 10, 31, 9, 30, 10), datetime(2016, 11, 7, 9, 30, 10) + assert result == expected + + result = get_since_until("previous calendar week") + expected = datetime(2016, 10, 31, 0, 0, 0), datetime(2016, 11, 7, 0, 0, 0) + assert result == expected + + result = get_since_until("previous calendar month") + expected = datetime(2016, 10, 1, 0, 0, 0), datetime(2016, 11, 1, 0, 0, 0) + assert result == expected + + result = get_since_until("previous calendar year") + expected = datetime(2015, 1, 1, 0, 0, 0), datetime(2016, 1, 1, 0, 0, 0) + assert result == expected + + with pytest.raises(ValueError): + get_since_until(time_range="tomorrow : yesterday") + + +@patch("superset.utils.date_parser.parse_human_datetime", mock_parse_human_datetime) +def test_datetime_eval() -> None: + result = datetime_eval("datetime('now')") + expected = datetime(2016, 11, 7, 9, 30, 10) + assert result == expected + + result = datetime_eval("datetime('today')") + expected = datetime(2016, 11, 7) + assert result == expected + + result = datetime_eval("datetime('2018')") + expected = datetime(2018, 1, 1) + assert result == expected + + result = datetime_eval("datetime('2018-9')") + expected = datetime(2018, 9, 1) + assert result == expected + + # Parse compact arguments spelling + result = datetime_eval("dateadd(datetime('today'),1,year,)") + expected = datetime(2017, 11, 7) + assert result == expected + + result = datetime_eval("dateadd(datetime('today'), -2, year)") + expected = datetime(2014, 11, 7) + assert result == expected + + result = datetime_eval("dateadd(datetime('today'), 2, quarter)") + expected = datetime(2017, 5, 7) + assert result == expected + + result = datetime_eval("dateadd(datetime('today'), 3, month)") + expected = datetime(2017, 2, 7) + assert result == expected + + result = datetime_eval("dateadd(datetime('today'), -3, week)") + expected = datetime(2016, 10, 17) + assert result == expected + + result = datetime_eval("dateadd(datetime('today'), 3, day)") + expected = datetime(2016, 11, 10) + assert result == expected + + result = datetime_eval("dateadd(datetime('now'), 3, hour)") + expected = datetime(2016, 11, 7, 12, 30, 10) + assert result == expected + + result = datetime_eval("dateadd(datetime('now'), 40, minute)") + expected = datetime(2016, 11, 7, 10, 10, 10) + assert result == expected + + result = datetime_eval("dateadd(datetime('now'), -11, second)") + expected = datetime(2016, 11, 7, 9, 29, 59) + assert result == expected + + result = datetime_eval("datetrunc(datetime('now'), year)") + expected = datetime(2016, 1, 1, 0, 0, 0) + assert result == expected + + result = datetime_eval("datetrunc(datetime('now'), quarter)") + expected = datetime(2016, 10, 1, 0, 0, 0) + assert result == expected + + result = datetime_eval("datetrunc(datetime('now'), month)") + expected = datetime(2016, 11, 1, 0, 0, 0) + assert result == expected + + result = datetime_eval("datetrunc(datetime('now'), day)") + expected = datetime(2016, 11, 7, 0, 0, 0) + assert result == expected + + result = datetime_eval("datetrunc(datetime('now'), week)") + expected = datetime(2016, 11, 7, 0, 0, 0) + assert result == expected + + result = datetime_eval("datetrunc(datetime('now'), hour)") + expected = datetime(2016, 11, 7, 9, 0, 0) + assert result == expected + + result = datetime_eval("datetrunc(datetime('now'), minute)") + expected = datetime(2016, 11, 7, 9, 30, 0) + assert result == expected + + result = datetime_eval("datetrunc(datetime('now'), second)") + expected = datetime(2016, 11, 7, 9, 30, 10) + assert result == expected + + result = datetime_eval("lastday(datetime('now'), year)") + expected = datetime(2016, 12, 31, 0, 0, 0) + assert result == expected + + result = datetime_eval("lastday(datetime('today'), month)") + expected = datetime(2016, 11, 30, 0, 0, 0) + assert result == expected + + result = datetime_eval("holiday('Christmas')") + expected = datetime(2016, 12, 25, 0, 0, 0) + assert result == expected + + result = datetime_eval("holiday('Labor day', datetime('2018-01-01T00:00:00'))") + expected = datetime(2018, 9, 3, 0, 0, 0) + assert result == expected + + result = datetime_eval( + "holiday('Boxing day', datetime('2018-01-01T00:00:00'), 'UK')" + ) + expected = datetime(2018, 12, 26, 0, 0, 0) + assert result == expected + + result = datetime_eval( + "lastday(dateadd(datetime('2018-01-01T00:00:00'), 1, month), month)" + ) + expected = datetime(2018, 2, 28, 0, 0, 0) + assert result == expected + + +@patch("superset.utils.date_parser.datetime") +def test_parse_human_timedelta(mock_datetime: Mock) -> None: + mock_datetime.now.return_value = datetime(2019, 4, 1) + mock_datetime.side_effect = lambda *args, **kw: datetime(*args, **kw) + assert parse_human_timedelta("now") == timedelta(0) + assert parse_human_timedelta("1 year") == timedelta(366) + assert parse_human_timedelta("-1 year") == timedelta(-365) + assert parse_human_timedelta(None) == timedelta(0) + assert parse_human_timedelta("1 month", datetime(2019, 4, 1)) == timedelta(30) + assert parse_human_timedelta("1 month", datetime(2019, 5, 1)) == timedelta(31) + assert parse_human_timedelta("1 month", datetime(2019, 2, 1)) == timedelta(28) + assert parse_human_timedelta("-1 month", datetime(2019, 2, 1)) == timedelta(-31) + + +@patch("superset.utils.date_parser.datetime") +def test_parse_past_timedelta(mock_datetime: Mock) -> None: + mock_datetime.now.return_value = datetime(2019, 4, 1) + mock_datetime.side_effect = lambda *args, **kw: datetime(*args, **kw) + assert parse_past_timedelta("1 year") == timedelta(365) + assert parse_past_timedelta("-1 year") == timedelta(365) + assert parse_past_timedelta("52 weeks") == timedelta(364) + assert parse_past_timedelta("1 month") == timedelta(31) + + +def test_get_past_or_future() -> None: + # 2020 is a leap year + dttm = datetime(2020, 2, 29) + assert get_past_or_future("1 year", dttm) == datetime(2021, 2, 28) + assert get_past_or_future("-1 year", dttm) == datetime(2019, 2, 28) + assert get_past_or_future("1 month", dttm) == datetime(2020, 3, 29) + assert get_past_or_future("3 month", dttm) == datetime(2020, 5, 29) + + +def test_parse_human_datetime() -> None: + with pytest.raises(TimeRangeAmbiguousError): + parse_human_datetime("2 days") + + with pytest.raises(TimeRangeAmbiguousError): + parse_human_datetime("2 day") + + with pytest.raises(TimeRangeParseFailError): + parse_human_datetime("xxxxxxx") + + assert parse_human_datetime("2015-04-03") == datetime(2015, 4, 3, 0, 0) + assert parse_human_datetime("2/3/1969") == datetime(1969, 2, 3, 0, 0) + + assert parse_human_datetime("now") <= datetime.now() + assert parse_human_datetime("yesterday") < datetime.now() + assert date.today() - timedelta(1) == parse_human_datetime("yesterday").date() + + assert ( + parse_human_datetime("one year ago").date() + == (datetime.now() - relativedelta(years=1)).date() + ) + assert ( + parse_human_datetime("2 years after").date() + == (datetime.now() + relativedelta(years=2)).date() + ) + + +def test_date_range_migration() -> None: + params = '{"time_range": " 8 days : 2020-03-10T00:00:00"}' + assert re.search(DateRangeMigration.x_dateunit_in_since, params) + + params = '{"time_range": "2020-03-10T00:00:00 : 8 days "}' + assert re.search(DateRangeMigration.x_dateunit_in_until, params) + + params = '{"time_range": " 2 weeks : 8 days "}' + assert re.search(DateRangeMigration.x_dateunit_in_since, params) + assert re.search(DateRangeMigration.x_dateunit_in_until, params) + + params = '{"time_range": "2 weeks ago : 8 days later"}' + assert not re.search(DateRangeMigration.x_dateunit_in_since, params) + assert not re.search(DateRangeMigration.x_dateunit_in_until, params) + + field = " 8 days " + assert re.search(DateRangeMigration.x_dateunit, field) + + field = "last week" + assert not re.search(DateRangeMigration.x_dateunit, field) + + field = "10 years ago" + assert not re.search(DateRangeMigration.x_dateunit, field) diff --git a/tests/unit_tests/utils/db.py b/tests/unit_tests/utils/db.py new file mode 100644 index 0000000000000..554c95bd43187 --- /dev/null +++ b/tests/unit_tests/utils/db.py @@ -0,0 +1,30 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any + +from superset import security_manager + + +def get_test_user(id_: int, username: str) -> Any: + """Create a sample test user""" + return security_manager.user_model( + id=id_, + username=username, + first_name=username, + last_name=username, + email=f"{username}@example.com", + ) diff --git a/tests/unit_tests/utils/log_tests.py b/tests/unit_tests/utils/log_tests.py new file mode 100644 index 0000000000000..5b031b5778875 --- /dev/null +++ b/tests/unit_tests/utils/log_tests.py @@ -0,0 +1,37 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +from superset.utils.log import get_logger_from_status + + +def test_log_from_status_exception() -> None: + (func, log_level) = get_logger_from_status(500) + assert func.__name__ == "exception" + assert log_level == "exception" + + +def test_log_from_status_warning() -> None: + (func, log_level) = get_logger_from_status(422) + assert func.__name__ == "warning" + assert log_level == "warning" + + +def test_log_from_status_info() -> None: + (func, log_level) = get_logger_from_status(300) + assert func.__name__ == "info" + assert log_level == "info" diff --git a/tests/unit_tests/utils/test_core.py b/tests/unit_tests/utils/test_core.py new file mode 100644 index 0000000000000..6845bb2fc1545 --- /dev/null +++ b/tests/unit_tests/utils/test_core.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Dict + +import pytest + +from superset.utils.core import QueryObjectFilterClause, remove_extra_adhoc_filters + +ADHOC_FILTER: QueryObjectFilterClause = { + "col": "foo", + "op": "==", + "val": "bar", +} + +EXTRA_FILTER: QueryObjectFilterClause = { + "col": "foo", + "op": "==", + "val": "bar", + "isExtra": True, +} + + +@pytest.mark.parametrize( + "original,expected", + [ + ({"foo": "bar"}, {"foo": "bar"}), + ( + {"foo": "bar", "adhoc_filters": [ADHOC_FILTER]}, + {"foo": "bar", "adhoc_filters": [ADHOC_FILTER]}, + ), + ( + {"foo": "bar", "adhoc_filters": [EXTRA_FILTER]}, + {"foo": "bar", "adhoc_filters": []}, + ), + ( + { + "foo": "bar", + "adhoc_filters": [ADHOC_FILTER, EXTRA_FILTER], + }, + {"foo": "bar", "adhoc_filters": [ADHOC_FILTER]}, + ), + ( + { + "foo": "bar", + "adhoc_filters_b": [ADHOC_FILTER, EXTRA_FILTER], + }, + {"foo": "bar", "adhoc_filters_b": [ADHOC_FILTER]}, + ), + ( + { + "foo": "bar", + "custom_adhoc_filters": [ + ADHOC_FILTER, + EXTRA_FILTER, + ], + }, + { + "foo": "bar", + "custom_adhoc_filters": [ + ADHOC_FILTER, + EXTRA_FILTER, + ], + }, + ), + ], +) +def test_remove_extra_adhoc_filters( + original: Dict[str, Any], expected: Dict[str, Any] +) -> None: + remove_extra_adhoc_filters(original) + assert expected == original diff --git a/tests/unit_tests/utils/test_decorators.py b/tests/unit_tests/utils/test_decorators.py new file mode 100644 index 0000000000000..3aafc7a91db2b --- /dev/null +++ b/tests/unit_tests/utils/test_decorators.py @@ -0,0 +1,87 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +from contextlib import nullcontext +from enum import Enum +from inspect import isclass +from typing import Any, Optional +from unittest.mock import call, Mock, patch + +import pytest + +from superset import app +from superset.utils import decorators + + +class ResponseValues(str, Enum): + FAIL = "fail" + WARN = "warn" + OK = "ok" + + +def test_debounce() -> None: + mock = Mock() + + @decorators.debounce() + def myfunc(arg1: int, arg2: int, kwarg1: str = "abc", kwarg2: int = 2) -> int: + mock(arg1, kwarg1) + return arg1 + arg2 + kwarg2 + + # should be called only once when arguments don't change + myfunc(1, 1) + myfunc(1, 1) + result = myfunc(1, 1) + mock.assert_called_once_with(1, "abc") + assert result == 4 + + # kwarg order shouldn't matter + myfunc(1, 0, kwarg2=2, kwarg1="haha") + result = myfunc(1, 0, kwarg1="haha", kwarg2=2) + mock.assert_has_calls([call(1, "abc"), call(1, "haha")]) + assert result == 3 + + +@pytest.mark.parametrize( + "response_value, expected_exception, expected_result", + [ + (ResponseValues.OK, None, "custom.prefix.ok"), + (ResponseValues.FAIL, ValueError, "custom.prefix.error"), + (ResponseValues.WARN, FileNotFoundError, "custom.prefix.warn"), + ], +) +def test_statsd_gauge( + response_value: str, expected_exception: Optional[Exception], expected_result: str +) -> None: + @decorators.statsd_gauge("custom.prefix") + def my_func(response: ResponseValues, *args: Any, **kwargs: Any) -> str: + if response == ResponseValues.FAIL: + raise ValueError("Error") + if response == ResponseValues.WARN: + raise FileNotFoundError("Not found") + return "OK" + + with patch.object(app.config["STATS_LOGGER"], "gauge") as mock: + cm = ( + pytest.raises(expected_exception) + if isclass(expected_exception) and issubclass(expected_exception, Exception) + else nullcontext() + ) + + with cm: + my_func(response_value, 1, 2) + mock.assert_called_once_with(expected_result, 1) diff --git a/tests/unit_tests/utils/test_file.py b/tests/unit_tests/utils/test_file.py new file mode 100644 index 0000000000000..de20402e5c21c --- /dev/null +++ b/tests/unit_tests/utils/test_file.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from superset.utils.file import get_filename + + +@pytest.mark.parametrize( + "model_name,model_id,skip_id,expected_filename", + [ + ("Energy Sankey", 132, False, "Energy_Sankey_132"), + ("Energy Sankey", 132, True, "Energy_Sankey"), + ("folder1/Energy Sankey", 132, True, "folder1_Energy_Sankey"), + ("D:\\Charts\\Energy Sankey", 132, True, "DChartsEnergy_Sankey"), + ("🥴🥴🥴", 4751, False, "4751"), + ("🥴🥴🥴", 4751, True, "4751"), + ("Energy Sankey 🥴🥴🥴", 4751, False, "Energy_Sankey_4751"), + ("Energy Sankey 🥴🥴🥴", 4751, True, "Energy_Sankey"), + ("你好", 475, False, "475"), + ("你好", 475, True, "475"), + ("Energy Sankey 你好", 475, False, "Energy_Sankey_475"), + ("Energy Sankey 你好", 475, True, "Energy_Sankey"), + ], +) +def test_get_filename( + model_name: str, model_id: int, skip_id: bool, expected_filename: str +) -> None: + original_filename = get_filename(model_name, model_id, skip_id) + assert expected_filename == original_filename diff --git a/tests/unit_tests/utils/urls_tests.py b/tests/unit_tests/utils/urls_tests.py new file mode 100644 index 0000000000000..208d6caea4375 --- /dev/null +++ b/tests/unit_tests/utils/urls_tests.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from superset.utils.urls import modify_url_query + +EXPLORE_CHART_LINK = "http://localhost:9000/explore/?form_data=%7B%22slice_id%22%3A+76%7D&standalone=true&force=false" + +EXPLORE_DASHBOARD_LINK = "http://localhost:9000/superset/dashboard/3/?standalone=3" + + +def test_convert_chart_link() -> None: + test_url = modify_url_query(EXPLORE_CHART_LINK, standalone="0") + assert ( + test_url + == "http://localhost:9000/explore/?form_data=%7B%22slice_id%22%3A%2076%7D&standalone=0&force=false" + ) + + +def test_convert_dashboard_link() -> None: + test_url = modify_url_query(EXPLORE_DASHBOARD_LINK, standalone="0") + assert test_url == "http://localhost:9000/superset/dashboard/3/?standalone=0" + + +def test_convert_dashboard_link_with_integer() -> None: + test_url = modify_url_query(EXPLORE_DASHBOARD_LINK, standalone=0) + assert test_url == "http://localhost:9000/superset/dashboard/3/?standalone=0" + + +@pytest.mark.parametrize( + "url,is_safe", + [ + ("http://localhost/", True), + ("http://localhost/superset/1", True), + ("https://localhost/", False), + ("https://localhost/superset/1", False), + ("localhost/superset/1", False), + ("ftp://localhost/superset/1", False), + ("http://external.com", False), + ("https://external.com", False), + ("external.com", False), + ("///localhost", False), + ("xpto://localhost:[3/1/", False), + ], +) +def test_is_safe_url(url: str, is_safe: bool) -> None: + from superset import app + from superset.utils.urls import is_safe_url + + with app.test_request_context("/"): + assert is_safe_url(url) == is_safe diff --git a/tests/unit_tests/views/__init__.py b/tests/unit_tests/views/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/views/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License.