Skip to content

Commit

Permalink
Still trying to fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mistercrunch committed Sep 19, 2018
1 parent af176ee commit ca046d3
Show file tree
Hide file tree
Showing 6 changed files with 43 additions and 66 deletions.
2 changes: 0 additions & 2 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,8 +307,6 @@ class CeleryConfig(object):
CELERY_CONFIG = CeleryConfig
"""
CELERY_CONFIG = None
SQL_CELERY_DB_FILE_PATH = os.path.join(DATA_DIR, 'celerydb.sqlite')
SQL_CELERY_RESULTS_DB_FILE_PATH = os.path.join(DATA_DIR, 'celery_results.sqlite')

# static http headers to be served by your Superset server.
# This header prevents iFrames from other domains and
Expand Down
18 changes: 4 additions & 14 deletions superset/sql_lab.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ def handle_error(msg):
# Limit enforced only for retrieving the data, not for the CTA queries.
superset_query = SupersetQuery(rendered_query)
executed_sql = superset_query.stripped()
SQL_MAX_ROWS = app.config.get('SQL_MAX_ROW')
if not superset_query.is_readonly() and not database.allow_dml:
return handle_error(
'Only `SELECT` statements are allowed against this database')
Expand All @@ -152,20 +153,9 @@ def handle_error(msg):
query.user_id, start_dttm.strftime('%Y_%m_%d_%H_%M_%S'))
executed_sql = superset_query.as_create_table(query.tmp_table_name)
query.select_as_cta_used = True

SQL_MAX_ROW = app.config.get('SQL_MAX_ROW')
DISPLAY_MAX_ROW = app.config.get('DISPLAY_MAX_ROW')
# In async mode, we allow a higher limit that gets sent to the results
# backend to power the larger CSV extracts
# the lower DISPLAY_MAX_ROW limit still gets applied on the resutls/
# endpoint. SQL Lab never gets more than DISPLAY_MAX_ROW to avoid
# crashing the user's browser
applicable_limit = SQL_MAX_ROW if store_results else DISPLAY_MAX_ROW

if (superset_query.is_select() and applicable_limit and
(not query.limit or query.limit > applicable_limit)):
print('query,applicable', query.limit, applicable_limit)
query.limit = applicable_limit
if (superset_query.is_select() and SQL_MAX_ROWS and
(not query.limit or query.limit > SQL_MAX_ROWS)):
query.limit = SQL_MAX_ROWS
executed_sql = database.apply_limit_to_sql(executed_sql, query.limit)

# Hook to allow environment-specific mutation (usually comments) to the SQL
Expand Down
2 changes: 1 addition & 1 deletion superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2418,7 +2418,7 @@ def sql_json(self):
tmp_table_name,
)

client_id = request.form.get('client_id') or utils.shortid()
client_id = request.form.get('client_id') or utils.shortid()[:10]

query = Query(
database_id=int(database_id),
Expand Down
2 changes: 1 addition & 1 deletion tests/base_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def revoke_public_access_to_table(self, table):
perm.view_menu and table.perm in perm.view_menu.name):
security_manager.del_permission_role(public_role, perm)

def run_sql(self, sql, client_id, user_name=None, raise_on_error=False):
def run_sql(self, sql, client_id=None, user_name=None, raise_on_error=False):
if user_name:
self.logout()
self.login(username=(user_name if user_name else 'admin'))
Expand Down
83 changes: 37 additions & 46 deletions tests/celery_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,14 @@
from __future__ import unicode_literals

import json
import os
import subprocess
import time
import unittest

import pandas as pd
from past.builtins import basestring

from superset import app, db, security_manager
from superset import app, db
from superset.models.helpers import QueryStatus
from superset.models.sql_lab import Query
from superset.sql_parse import SupersetQuery
Expand All @@ -23,13 +22,12 @@


BASE_DIR = app.config.get('BASE_DIR')
CELERY_SLEEP_TIME = 5


class CeleryConfig(object):
BROKER_URL = 'sqla+sqlite:///' + app.config.get('SQL_CELERY_DB_FILE_PATH')
BROKER_URL = app.config.get('CELERY_RESULT_BACKEND')
CELERY_IMPORTS = ('superset.sql_lab', )
CELERY_RESULT_BACKEND = (
'db+sqlite:///' + app.config.get('SQL_CELERY_RESULTS_DB_FILE_PATH'))
CELERY_ANNOTATIONS = {'sql_lab.add': {'rate_limit': '10/s'}}
CONCURRENCY = 1

Expand Down Expand Up @@ -91,28 +89,11 @@ def get_query_by_id(self, id):
def setUpClass(cls):
db.session.query(Query).delete()
db.session.commit()
try:
os.remove(app.config.get('SQL_CELERY_DB_FILE_PATH'))
except OSError as e:
app.logger.warn(str(e))
try:
os.remove(app.config.get('SQL_CELERY_RESULTS_DB_FILE_PATH'))
except OSError as e:
app.logger.warn(str(e))

security_manager.sync_role_definitions()

worker_command = BASE_DIR + '/bin/superset worker'

worker_command = BASE_DIR + '/bin/superset worker -w 2'
subprocess.Popen(
worker_command, shell=True, stdout=subprocess.PIPE)

admin = security_manager.find_user('admin')
if not admin:
security_manager.add_user(
'admin', 'admin', ' user', '[email protected]',
security_manager.find_role('Admin'),
password='general')

@classmethod
def tearDownClass(cls):
subprocess.call(
Expand All @@ -124,7 +105,7 @@ def tearDownClass(cls):
shell=True,
)

def run_sql(self, db_id, sql, client_id, cta='false', tmp_table='tmp',
def run_sql(self, db_id, sql, client_id=None, cta='false', tmp_table='tmp',
async_='false'):
self.login()
resp = self.client.post(
Expand All @@ -142,7 +123,8 @@ def run_sql(self, db_id, sql, client_id, cta='false', tmp_table='tmp',
return json.loads(resp.data.decode('utf-8'))

def test_run_sync_query_dont_exist(self):
db_id = get_main_database(db.session).id
main_db = get_main_database(db.session)
db_id = main_db.id
sql_dont_exist = 'SELECT name FROM table_dont_exist'
result1 = self.run_sql(db_id, sql_dont_exist, '1', cta='true')
self.assertTrue('error' in result1)
Expand All @@ -151,11 +133,13 @@ def test_run_sync_query_cta(self):
main_db = get_main_database(db.session)
db_id = main_db.id
eng = main_db.get_sqla_engine()
tmp_table_name = 'tmp_async_22'
self.drop_table_if_exists(tmp_table_name, main_db)
perm_name = 'can_sql_json'
sql_where = (
"SELECT name FROM ab_permission WHERE name='{}'".format(perm_name))
result2 = self.run_sql(
db_id, sql_where, '2', tmp_table='tmp_table_2', cta='true')
db_id, sql_where, '2', tmp_table=tmp_table_name, cta='true')
self.assertEqual(QueryStatus.SUCCESS, result2['query']['state'])
self.assertEqual([], result2['data'])
self.assertEqual([], result2['columns'])
Expand All @@ -167,34 +151,42 @@ def test_run_sync_query_cta(self):
self.assertEqual([{'name': perm_name}], data2)

def test_run_sync_query_cta_no_data(self):
db_id = get_main_database(db.session).id
sql_empty_result = 'SELECT * FROM ab_user WHERE id=666 LIMIT 666'
result3 = self.run_sql(
db_id, sql_empty_result, '3', cta='false')
main_db = get_main_database(db.session)
db_id = main_db.id
sql_empty_result = 'SELECT * FROM ab_user WHERE id=666'
result3 = self.run_sql(db_id, sql_empty_result, '3')
self.assertEqual(QueryStatus.SUCCESS, result3['query']['state'])
self.assertEqual([], result3['data'])
self.assertEqual([], result3['columns'])

query = self.get_query_by_id(result3['query']['serverId'])
self.assertEqual(QueryStatus.SUCCESS, query.status)
self.assertEqual(666, query.limit)
query3 = self.get_query_by_id(result3['query']['serverId'])
self.assertEqual(QueryStatus.SUCCESS, query3.status)

def drop_table_if_exists(self, table_name, database=None):
"""Drop table if it exists, works on any DB"""
sql = 'DROP TABLE {}'.format(table_name)
db_id = database.id
if database:
database.allow_dml = True
db.session.flush()
return self.run_sql(db_id, sql)

def test_run_async_query(self):
main_db = get_main_database(db.session)
eng = main_db.get_sqla_engine()
db_id = main_db.id

self.drop_table_if_exists('tmp_async_1', main_db)

sql_where = "SELECT name FROM ab_role WHERE name='Admin'"
result = self.run_sql(
main_db.id, sql_where, '4', async_='true', tmp_table='tmp_async_1',
db_id, sql_where, '4', async_='true', tmp_table='tmp_async_1',
cta='true')
assert result['query']['state'] in (
QueryStatus.PENDING, QueryStatus.RUNNING, QueryStatus.SUCCESS)

time.sleep(1)
time.sleep(CELERY_SLEEP_TIME)

query = self.get_query_by_id(result['query']['serverId'])
df = pd.read_sql_query(query.select_sql, con=eng)
self.assertEqual(QueryStatus.SUCCESS, query.status)
self.assertEqual([{'name': 'Admin'}], df.to_dict(orient='records'))
self.assertEqual(QueryStatus.SUCCESS, query.status)
self.assertTrue('FROM tmp_async_1' in query.select_sql)
self.assertEqual(
Expand All @@ -208,20 +200,19 @@ def test_run_async_query(self):

def test_run_async_query_with_lower_limit(self):
main_db = get_main_database(db.session)
eng = main_db.get_sqla_engine()
db_id = main_db.id
self.drop_table_if_exists('tmp_async_2', main_db)

sql_where = "SELECT name FROM ab_role WHERE name='Alpha' LIMIT 1"
result = self.run_sql(
main_db.id, sql_where, '5', async_='true', tmp_table='tmp_async_2',
db_id, sql_where, '5', async_='true', tmp_table='tmp_async_2',
cta='true')
assert result['query']['state'] in (
QueryStatus.PENDING, QueryStatus.RUNNING, QueryStatus.SUCCESS)

time.sleep(1)
time.sleep(CELERY_SLEEP_TIME)

query = self.get_query_by_id(result['query']['serverId'])
df = pd.read_sql_query(query.select_sql, con=eng)
self.assertEqual(QueryStatus.SUCCESS, query.status)
self.assertEqual([{'name': 'Alpha'}], df.to_dict(orient='records'))
self.assertEqual(QueryStatus.SUCCESS, query.status)
self.assertTrue('FROM tmp_async_2' in query.select_sql)
self.assertEqual(
Expand Down
2 changes: 0 additions & 2 deletions tests/superset_test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
if 'SUPERSET__SQLALCHEMY_DATABASE_URI' in os.environ:
SQLALCHEMY_DATABASE_URI = os.environ.get('SUPERSET__SQLALCHEMY_DATABASE_URI')

SQL_CELERY_RESULTS_DB_FILE_PATH = os.path.join(DATA_DIR, 'celery_results.sqlite')
SQL_SELECT_AS_CTA = True
SQL_MAX_ROW = 666

Expand All @@ -28,7 +27,6 @@
class CeleryConfig(object):
BROKER_URL = 'redis://localhost'
CELERY_IMPORTS = ('superset.sql_lab', )
CELERY_RESULT_BACKEND = 'db+sqlite:///' + SQL_CELERY_RESULTS_DB_FILE_PATH
CELERY_ANNOTATIONS = {'sql_lab.add': {'rate_limit': '10/s'}}
CONCURRENCY = 1

Expand Down

0 comments on commit ca046d3

Please sign in to comment.