From a7b14bfb9a74dc84a9e8af50333c2712c4af9929 Mon Sep 17 00:00:00 2001 From: Yoshiken Date: Sun, 11 Aug 2019 22:09:04 +0900 Subject: [PATCH] Fix according to pycodestyle format (#4011) * Fix W292 no newline at end of file * Fix extra whitespace * Fix E305 expected 2 blank lines after class or function definition * Fix W391 blank line at end of file * Fix E231 missing whitespace after * Fix E303 too many blank lines * Fix E302 expected 2 blank lines * Fix E128 continuation line under-indented for visual indent --- redash/__init__.py | 2 +- redash/authentication/account.py | 2 -- redash/authentication/org_resolving.py | 1 + redash/authentication/remote_user_auth.py | 1 + redash/cli/data_sources.py | 2 +- redash/destinations/chatwork.py | 1 + redash/destinations/slack.py | 3 ++- redash/handlers/authentication.py | 2 +- redash/handlers/chrome_logger.py | 4 ++-- redash/handlers/events.py | 2 +- redash/handlers/organization.py | 2 +- redash/metrics/request.py | 1 + redash/query_runner/__init__.py | 2 +- redash/query_runner/axibase_tsd.py | 1 + redash/query_runner/clickhouse.py | 1 + redash/query_runner/drill.py | 22 +++++++++++----------- redash/query_runner/google_spreadsheets.py | 2 +- redash/query_runner/graphite.py | 1 + redash/query_runner/hive_ds.py | 7 +++---- redash/query_runner/impala_ds.py | 1 + redash/query_runner/jql.py | 8 +++++--- redash/query_runner/mapd.py | 1 + redash/query_runner/mongodb.py | 10 +++++----- redash/query_runner/mssql.py | 1 + redash/query_runner/mssql_odbc.py | 1 + redash/query_runner/oracle.py | 3 ++- redash/query_runner/pg.py | 1 + redash/query_runner/phoenix.py | 2 ++ redash/query_runner/python.py | 1 - redash/query_runner/qubole.py | 1 + redash/query_runner/salesforce.py | 1 + redash/query_runner/sqlite.py | 1 + redash/query_runner/treasuredata.py | 3 ++- redash/query_runner/vertica.py | 3 ++- redash/serializers/__init__.py | 2 +- redash/serializers/query_result.py | 2 +- redash/worker.py | 4 ++-- 37 files changed, 62 insertions(+), 43 deletions(-) diff --git a/redash/__init__.py b/redash/__init__.py index b7dad05fd4..0cd1a5db03 100644 --- a/redash/__init__.py +++ b/redash/__init__.py @@ -46,4 +46,4 @@ def setup_logging(): limiter = Limiter(key_func=get_ipaddr, storage_uri=settings.LIMITER_STORAGE) import_query_runners(settings.QUERY_RUNNERS) -import_destinations(settings.DESTINATIONS) \ No newline at end of file +import_destinations(settings.DESTINATIONS) diff --git a/redash/authentication/account.py b/redash/authentication/account.py index 3300796221..54a8ae6551 100644 --- a/redash/authentication/account.py +++ b/redash/authentication/account.py @@ -71,5 +71,3 @@ def send_password_reset_email(user): send_mail.delay([user.email], subject, html_content, text_content) return reset_link - - diff --git a/redash/authentication/org_resolving.py b/redash/authentication/org_resolving.py index 755b788a9c..0eacaad5f6 100644 --- a/redash/authentication/org_resolving.py +++ b/redash/authentication/org_resolving.py @@ -19,5 +19,6 @@ def _get_current_org(): logging.debug("Current organization: %s (slug: %s)", g.org, slug) return g.org + # TODO: move to authentication current_org = LocalProxy(_get_current_org) diff --git a/redash/authentication/remote_user_auth.py b/redash/authentication/remote_user_auth.py index 77002e9324..8402aeb23d 100644 --- a/redash/authentication/remote_user_auth.py +++ b/redash/authentication/remote_user_auth.py @@ -9,6 +9,7 @@ blueprint = Blueprint('remote_user_auth', __name__) + @blueprint.route(org_scoped_rule("/remote_user/login")) def login(org_slug=None): unsafe_next_path = request.args.get('next') diff --git a/redash/cli/data_sources.py b/redash/cli/data_sources.py index 76e54a88c7..fa7c7a8032 100644 --- a/redash/cli/data_sources.py +++ b/redash/cli/data_sources.py @@ -37,7 +37,7 @@ def list(organization=None): def validate_data_source_type(type): if type not in query_runners.keys(): - print ("Error: the type \"{}\" is not supported (supported types: {})." + print("Error: the type \"{}\" is not supported (supported types: {})." .format(type, ", ".join(query_runners.keys()))) print("OJNK") exit(1) diff --git a/redash/destinations/chatwork.py b/redash/destinations/chatwork.py index c4751f25d8..aea6855a3e 100644 --- a/redash/destinations/chatwork.py +++ b/redash/destinations/chatwork.py @@ -62,4 +62,5 @@ def notify(self, alert, query, user, new_state, app, host, options): except Exception: logging.exception('ChatWork send ERROR.') + register(ChatWork) diff --git a/redash/destinations/slack.py b/redash/destinations/slack.py index 9cbcfda2c5..18c998cf89 100644 --- a/redash/destinations/slack.py +++ b/redash/destinations/slack.py @@ -67,7 +67,7 @@ def notify(self, alert, query, user, new_state, app, host, options): else: text = alert.name + " went back to normal" color = "#27ae60" - + payload = {'attachments': [{'text': text, 'color': color, 'fields': fields}]} if options.get('username'): payload['username'] = options.get('username') @@ -83,4 +83,5 @@ def notify(self, alert, query, user, new_state, app, host, options): except Exception: logging.exception("Slack send ERROR.") + register(Slack) diff --git a/redash/handlers/authentication.py b/redash/handlers/authentication.py index 6c002106c2..ec69a80c5c 100644 --- a/redash/handlers/authentication.py +++ b/redash/handlers/authentication.py @@ -100,7 +100,7 @@ def verify(token, org_slug=None): models.db.session.add(user) models.db.session.commit() - template_context = { "org_slug": org_slug } if settings.MULTI_ORG else {} + template_context = {"org_slug": org_slug} if settings.MULTI_ORG else {} next_url = url_for('redash.index', **template_context) return render_template("verify.html", next_url=next_url) diff --git a/redash/handlers/chrome_logger.py b/redash/handlers/chrome_logger.py index dfd26a02db..ab3b099aae 100644 --- a/redash/handlers/chrome_logger.py +++ b/redash/handlers/chrome_logger.py @@ -30,7 +30,7 @@ def chrome_log(response): request.method, request.path, response.status_code, request_duration, queries_count, queries_duration) chromelogger.group_collapsed(group_name) - + endpoint = (request.endpoint or 'unknown').replace('.', '_') chromelogger.info('Endpoint: {}'.format(endpoint)) chromelogger.info('Content Type: {}'.format(response.content_type)) @@ -49,6 +49,6 @@ def chrome_log(response): def init_app(app): if not app.debug: - return + return app.after_request(chrome_log) diff --git a/redash/handlers/events.py b/redash/handlers/events.py index ecef687c9e..ec172ac97c 100644 --- a/redash/handlers/events.py +++ b/redash/handlers/events.py @@ -22,7 +22,7 @@ def event_details(event): if event.object_type == 'data_source' and event.action == 'execute_query': details['query'] = event.additional_properties['query'] details['data_source'] = event.object_id - elif event.object_type == 'page' and event.action =='view': + elif event.object_type == 'page' and event.action == 'view': details['page'] = event.object_id else: details['object_id'] = event.object_id diff --git a/redash/handlers/organization.py b/redash/handlers/organization.py index 464943a065..5c9858e750 100644 --- a/redash/handlers/organization.py +++ b/redash/handlers/organization.py @@ -14,7 +14,7 @@ def organization_status(org_slug=None): 'alerts': models.Alert.all(group_ids=current_user.group_ids).count(), 'data_sources': models.DataSource.all(current_org, group_ids=current_user.group_ids).count(), 'queries': models.Query.all_queries(current_user.group_ids, current_user.id, include_drafts=True).count(), - 'dashboards': models.Dashboard.query.filter(models.Dashboard.org==current_org, models.Dashboard.is_archived==False).count(), + 'dashboards': models.Dashboard.query.filter(models.Dashboard.org == current_org, models.Dashboard.is_archived == False).count(), } return json_response(dict(object_counters=counters)) diff --git a/redash/metrics/request.py b/redash/metrics/request.py index b90f81aaff..02ad1ba493 100644 --- a/redash/metrics/request.py +++ b/redash/metrics/request.py @@ -37,6 +37,7 @@ def calculate_metrics(response): return response + MockResponse = namedtuple('MockResponse', ['status_code', 'content_type', 'content_length']) diff --git a/redash/query_runner/__init__.py b/redash/query_runner/__init__.py index 0079d7f915..c1d473cd1c 100644 --- a/redash/query_runner/__init__.py +++ b/redash/query_runner/__init__.py @@ -308,4 +308,4 @@ def guess_type_from_string(string_value): except (ValueError, OverflowError): pass - return TYPE_STRING \ No newline at end of file + return TYPE_STRING diff --git a/redash/query_runner/axibase_tsd.py b/redash/query_runner/axibase_tsd.py index 78f533fdbf..d76de9ee29 100644 --- a/redash/query_runner/axibase_tsd.py +++ b/redash/query_runner/axibase_tsd.py @@ -195,4 +195,5 @@ def get_schema(self, get_stats=False): values = schema.values() return values + register(AxibaseTSD) diff --git a/redash/query_runner/clickhouse.py b/redash/query_runner/clickhouse.py index 8a07b3634a..ebab7c83d3 100644 --- a/redash/query_runner/clickhouse.py +++ b/redash/query_runner/clickhouse.py @@ -150,4 +150,5 @@ def run_query(self, query, user): error = unicode(e) return data, error + register(ClickHouse) diff --git a/redash/query_runner/drill.py b/redash/query_runner/drill.py index fff5d8be24..780e74072b 100644 --- a/redash/query_runner/drill.py +++ b/redash/query_runner/drill.py @@ -104,17 +104,17 @@ def run_query(self, query, user): def get_schema(self, get_stats=False): query = """ - SELECT DISTINCT - TABLE_SCHEMA, - TABLE_NAME, - COLUMN_NAME - FROM - INFORMATION_SCHEMA.`COLUMNS` - WHERE - TABLE_SCHEMA not in ('INFORMATION_SCHEMA', 'information_schema', 'sys') - and TABLE_SCHEMA not like '%.information_schema' - and TABLE_SCHEMA not like '%.INFORMATION_SCHEMA' - + SELECT DISTINCT + TABLE_SCHEMA, + TABLE_NAME, + COLUMN_NAME + FROM + INFORMATION_SCHEMA.`COLUMNS` + WHERE + TABLE_SCHEMA not in ('INFORMATION_SCHEMA', 'information_schema', 'sys') + and TABLE_SCHEMA not like '%.information_schema' + and TABLE_SCHEMA not like '%.INFORMATION_SCHEMA' + """ allowed_schemas = self.configuration.get('allowed_schemas') if allowed_schemas: diff --git a/redash/query_runner/google_spreadsheets.py b/redash/query_runner/google_spreadsheets.py index 908230ee97..5b144f4459 100644 --- a/redash/query_runner/google_spreadsheets.py +++ b/redash/query_runner/google_spreadsheets.py @@ -146,7 +146,7 @@ def __init__(self, configuration): @classmethod def annotate_query(cls): return False - + @classmethod def name(cls): return "Google Sheets" diff --git a/redash/query_runner/graphite.py b/redash/query_runner/graphite.py index 4e594c8b05..1fb5ec1503 100644 --- a/redash/query_runner/graphite.py +++ b/redash/query_runner/graphite.py @@ -88,4 +88,5 @@ def run_query(self, query, user): return data, error + register(Graphite) diff --git a/redash/query_runner/hive_ds.py b/redash/query_runner/hive_ds.py index b3c78bf431..2107d0d0b9 100644 --- a/redash/query_runner/hive_ds.py +++ b/redash/query_runner/hive_ds.py @@ -98,14 +98,13 @@ def _get_connection(self): database=self.configuration.get('database', 'default'), username=self.configuration.get('username', None), ) - - return connection + return connection def run_query(self, query, user): connection = None try: - connection = self._get_connection() + connection = self._get_connection() cursor = connection.cursor() cursor.execute(query) @@ -214,7 +213,7 @@ def _get_connection(self): # create connection connection = hive.connect(thrift_transport=transport) - + return connection diff --git a/redash/query_runner/impala_ds.py b/redash/query_runner/impala_ds.py index 5b8b590777..90b63fb3d1 100644 --- a/redash/query_runner/impala_ds.py +++ b/redash/query_runner/impala_ds.py @@ -135,4 +135,5 @@ def run_query(self, query, user): return json_data, error + register(Impala) diff --git a/redash/query_runner/jql.py b/redash/query_runner/jql.py index de10508ad5..d24ee0b9f8 100644 --- a/redash/query_runner/jql.py +++ b/redash/query_runner/jql.py @@ -27,6 +27,7 @@ def to_json(self): def merge(self, set): self.rows = self.rows + set.rows + def parse_issue(issue, field_mapping): result = OrderedDict() result['key'] = issue['key'] @@ -117,20 +118,20 @@ def __init__(cls, query_field_mapping): 'output_field_name': v }) - def get_output_field_name(cls,field_name): + def get_output_field_name(cls, field_name): for item in cls.mapping: if item['field_name'] == field_name and not item['member_name']: return item['output_field_name'] return field_name - def get_dict_members(cls,field_name): + def get_dict_members(cls, field_name): member_names = [] for item in cls.mapping: if item['field_name'] == field_name and item['member_name']: member_names.append(item['member_name']) return member_names - def get_dict_output_field_name(cls,field_name, member_name): + def get_dict_output_field_name(cls, field_name, member_name): for item in cls.mapping: if item['field_name'] == field_name and item['member_name'] == member_name: return item['output_field_name'] @@ -199,4 +200,5 @@ def run_query(self, query, user): except KeyboardInterrupt: return None, "Query cancelled by user." + register(JiraJQL) diff --git a/redash/query_runner/mapd.py b/redash/query_runner/mapd.py index 84ed2e480a..d116efa456 100644 --- a/redash/query_runner/mapd.py +++ b/redash/query_runner/mapd.py @@ -112,4 +112,5 @@ def test_connection(self): finally: connection.close + register(Mapd) diff --git a/redash/query_runner/mongodb.py b/redash/query_runner/mongodb.py index 44511647d8..c6fbdc9760 100644 --- a/redash/query_runner/mongodb.py +++ b/redash/query_runner/mongodb.py @@ -220,7 +220,6 @@ def get_schema(self, get_stats=False): return schema.values() - def run_query(self, query, user): db = self._get_db() @@ -301,12 +300,12 @@ def run_query(self, query, user): if "count" in query_data: columns.append({ - "name" : "count", - "friendly_name" : "count", - "type" : TYPE_INTEGER + "name": "count", + "friendly_name": "count", + "type": TYPE_INTEGER }) - rows.append({ "count" : cursor }) + rows.append({"count": cursor}) else: rows, columns = parse_results(cursor) @@ -332,4 +331,5 @@ def run_query(self, query, user): return json_data, error + register(MongoDB) diff --git a/redash/query_runner/mssql.py b/redash/query_runner/mssql.py index 007aa825b6..c4b4fea1e0 100644 --- a/redash/query_runner/mssql.py +++ b/redash/query_runner/mssql.py @@ -168,4 +168,5 @@ def run_query(self, query, user): return json_data, error + register(SqlServer) diff --git a/redash/query_runner/mssql_odbc.py b/redash/query_runner/mssql_odbc.py index 9be2278f22..a729e037c7 100644 --- a/redash/query_runner/mssql_odbc.py +++ b/redash/query_runner/mssql_odbc.py @@ -157,4 +157,5 @@ def run_query(self, query, user): return json_data, error + register(SQLServerODBC) diff --git a/redash/query_runner/oracle.py b/redash/query_runner/oracle.py index eff9250042..10795dcbee 100644 --- a/redash/query_runner/oracle.py +++ b/redash/query_runner/oracle.py @@ -22,13 +22,13 @@ cx_Oracle.TIMESTAMP: TYPE_DATETIME, } - ENABLED = True except ImportError: ENABLED = False logger = logging.getLogger(__name__) + class Oracle(BaseSQLQueryRunner): noop_query = "SELECT 1 FROM dual" @@ -165,4 +165,5 @@ def run_query(self, query, user): return json_data, error + register(Oracle) diff --git a/redash/query_runner/pg.py b/redash/query_runner/pg.py index 5524375de6..df5dacfba1 100644 --- a/redash/query_runner/pg.py +++ b/redash/query_runner/pg.py @@ -305,6 +305,7 @@ class CockroachDB(PostgreSQL): def type(cls): return "cockroach" + register(PostgreSQL) register(Redshift) register(CockroachDB) diff --git a/redash/query_runner/phoenix.py b/redash/query_runner/phoenix.py index 06f2bf66ae..1c36aaf571 100644 --- a/redash/query_runner/phoenix.py +++ b/redash/query_runner/phoenix.py @@ -39,6 +39,7 @@ 'DECIMAL': TYPE_FLOAT } + class Phoenix(BaseQueryRunner): noop_query = 'select 1' @@ -118,4 +119,5 @@ def run_query(self, query, user): return json_data, error + register(Phoenix) diff --git a/redash/query_runner/python.py b/redash/query_runner/python.py index f6cc2fbcd9..36209cd0ea 100644 --- a/redash/query_runner/python.py +++ b/redash/query_runner/python.py @@ -259,7 +259,6 @@ def run_query(self, query, user): restricted_globals["TYPE_DATE"] = TYPE_DATE restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT - # TODO: Figure out the best way to have a timeout on a script # One option is to use ETA with Celery + timeouts on workers # And replacement of worker process every X requests handled. diff --git a/redash/query_runner/qubole.py b/redash/query_runner/qubole.py index 7efc847d77..d62260cd5f 100644 --- a/redash/query_runner/qubole.py +++ b/redash/query_runner/qubole.py @@ -129,4 +129,5 @@ def _get_header(self): return {"Content-type": "application/json", "Accept": "application/json", "X-AUTH-TOKEN": self.configuration['token']} + register(Qubole) diff --git a/redash/query_runner/salesforce.py b/redash/query_runner/salesforce.py index 527f1e26ec..b1187bef58 100644 --- a/redash/query_runner/salesforce.py +++ b/redash/query_runner/salesforce.py @@ -187,4 +187,5 @@ def get_schema(self, get_stats=False): schema[table_name] = {'name': table_name, 'columns': [f['name'] for f in fields]} return schema.values() + register(Salesforce) diff --git a/redash/query_runner/sqlite.py b/redash/query_runner/sqlite.py index c1933d81e6..c06cc024fe 100644 --- a/redash/query_runner/sqlite.py +++ b/redash/query_runner/sqlite.py @@ -91,4 +91,5 @@ def run_query(self, query, user): connection.close() return json_data, error + register(Sqlite) diff --git a/redash/query_runner/treasuredata.py b/redash/query_runner/treasuredata.py index 5e3673ed78..320f4e3457 100644 --- a/redash/query_runner/treasuredata.py +++ b/redash/query_runner/treasuredata.py @@ -60,7 +60,7 @@ def configuration_schema(cls): 'default': False } }, - 'required': ['apikey','db'] + 'required': ['apikey', 'db'] } @classmethod @@ -116,4 +116,5 @@ def run_query(self, query, user): error = "%s: %s" % (e.message, cursor.show_job().get('debug', {}).get('stderr', 'No stderr message in the response')) return json_data, error + register(TreasureData) diff --git a/redash/query_runner/vertica.py b/redash/query_runner/vertica.py index 92ab864c1a..08f4d616e1 100644 --- a/redash/query_runner/vertica.py +++ b/redash/query_runner/vertica.py @@ -117,7 +117,7 @@ def run_query(self, query, user): 'database': self.configuration.get('database', ''), 'read_timeout': self.configuration.get('read_timeout', 600) } - + if self.configuration.get('connection_timeout'): conn_info['connection_timeout'] = self.configuration.get('connection_timeout') @@ -152,4 +152,5 @@ def run_query(self, query, user): return json_data, error + register(Vertica) diff --git a/redash/serializers/__init__.py b/redash/serializers/__init__.py index 9223aaaac4..0dc8b6a9f9 100644 --- a/redash/serializers/__init__.py +++ b/redash/serializers/__init__.py @@ -199,7 +199,7 @@ def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state= widgets.append(serialize_widget(w)) else: widget = project(serialize_widget(w), - ('id', 'width', 'dashboard_id', 'options', 'created_at', 'updated_at')) + ('id', 'width', 'dashboard_id', 'options', 'created_at', 'updated_at')) widget['restricted'] = True widgets.append(widget) else: diff --git a/redash/serializers/query_result.py b/redash/serializers/query_result.py index 32eef9789f..6432795c66 100644 --- a/redash/serializers/query_result.py +++ b/redash/serializers/query_result.py @@ -53,7 +53,7 @@ def _get_column_lists(columns): for col_type in special_types.keys(): if col['type'] == col_type: special_columns[col['name']] = special_types[col_type] - + return fieldnames, special_columns diff --git a/redash/worker.py b/redash/worker.py index 292610d2ef..e960c34fd7 100644 --- a/redash/worker.py +++ b/redash/worker.py @@ -68,8 +68,8 @@ worker_log_format=settings.CELERYD_WORKER_LOG_FORMAT, worker_task_log_format=settings.CELERYD_WORKER_TASK_LOG_FORMAT, worker_prefetch_multiplier=settings.CELERY_WORKER_PREFETCH_MULTIPLIER, - accept_content=settings.CELERY_ACCEPT_CONTENT, - task_serializer=settings.CELERY_TASK_SERIALIZER, + accept_content=settings.CELERY_ACCEPT_CONTENT, + task_serializer=settings.CELERY_TASK_SERIALIZER, result_serializer=settings.CELERY_RESULT_SERIALIZER) # Create a new Task base class, that pushes a new Flask app context to allow DB connections if needed.