Skip to content

Commit

Permalink
Keep column order in .csv
Browse files Browse the repository at this point in the history
  • Loading branch information
Bogdan Kyryliuk committed Mar 10, 2017
1 parent 740624b commit 0c63653
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 6 deletions.
3 changes: 1 addition & 2 deletions superset/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def data(self):
return self.__df.to_dict(orient='records')

@property
def columns_dict(self):
def columns(self):
"""Provides metadata about columns for data visualization.
:return: dict, with the fields name, type, is_date, is_dim and agg.
Expand Down Expand Up @@ -69,7 +69,6 @@ def columns_dict(self):
if not column['agg']:
column.pop('agg', None)
columns.append(column)

return columns


Expand Down
2 changes: 1 addition & 1 deletion superset/sql_lab.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def handle_error(msg):
'query_id': query.id,
'status': query.status,
'data': cdf.data if cdf.data else [],
'columns': cdf.columns_dict if cdf.columns_dict else {},
'columns': cdf.columns if cdf.columns else [],
'query': query.to_dict(),
}
payload = json.dumps(payload, default=utils.json_iso_dttm_ser)
Expand Down
3 changes: 2 additions & 1 deletion superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2057,7 +2057,8 @@ def csv(self, client_id):
if blob:
json_payload = zlib.decompress(blob)
obj = json.loads(json_payload)
df = pd.DataFrame.from_records(obj['data'])
columns = [c['name'] for c in obj['columns']]
df = pd.DataFrame.from_records(obj['data'], columns=columns)
csv = df.to_csv(index=False, encoding='utf-8')
else:
sql = query.select_sql or query.executed_sql
Expand Down
4 changes: 2 additions & 2 deletions tests/celery_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def test_run_sync_query_cta(self):
db_id, sql_where, "2", tmp_table='tmp_table_2', cta='true')
self.assertEqual(QueryStatus.SUCCESS, result2['query']['state'])
self.assertEqual([], result2['data'])
self.assertEqual({}, result2['columns'])
self.assertEqual([], result2['columns'])
query2 = self.get_query_by_id(result2['query']['serverId'])

# Check the data in the tmp table.
Expand All @@ -204,7 +204,7 @@ def test_run_sync_query_cta_no_data(self):
db_id, sql_empty_result, "3", tmp_table='tmp_table_3', cta='true')
self.assertEqual(QueryStatus.SUCCESS, result3['query']['state'])
self.assertEqual([], result3['data'])
self.assertEqual({}, result3['columns'])
self.assertEqual([], result3['columns'])

query3 = self.get_query_by_id(result3['query']['serverId'])
self.assertEqual(QueryStatus.SUCCESS, query3.status)
Expand Down

0 comments on commit 0c63653

Please sign in to comment.