From dad92883afe13262529a757e003fe81ba1aa0b07 Mon Sep 17 00:00:00 2001 From: necnec Date: Mon, 28 Nov 2016 12:37:31 +0300 Subject: [PATCH] Change parameter to kwargs --- pandas/io/gbq.py | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/pandas/io/gbq.py b/pandas/io/gbq.py index 94d291d408cd0..fd9c7a6244481 100644 --- a/pandas/io/gbq.py +++ b/pandas/io/gbq.py @@ -375,7 +375,7 @@ def process_insert_errors(self, insert_errors): raise StreamingInsertError - def run_query(self, query, udf_resource_uri=None): + def run_query(self, query, **kwargs): try: from googleapiclient.errors import HttpError except: @@ -395,13 +395,9 @@ def run_query(self, query, udf_resource_uri=None): } } } - - if udf_resource_uri is not None: - if not isinstance(udf_resource_uri, list): - udf_resource_uri = [udf_resource_uri] - - job_data['configuration']['query']['userDefinedFunctionResources'] = \ - [{'resourceUri': uri} for uri in udf_resource_uri] + query_config = kwargs.get('query_config') + if query_config is not None: + job_data['configuration']['query'].update(query_config) self._start_timer() @@ -629,8 +625,9 @@ def _parse_entry(field_value, field_type): return field_value -def read_gbq(query, project_id=None, index_col=None, col_order=None, - reauth=False, verbose=True, private_key=None, dialect='legacy', udf_resource_uri=None): +def read_gbq(query, project_id=None, index_col=None, col_order=None, + reauth=False, verbose=True, private_key=None, dialect='legacy', + **kwargs): """Load data from Google BigQuery. THIS IS AN EXPERIMENTAL LIBRARY @@ -690,9 +687,10 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None, .. versionadded:: 0.19.0 - udf_resource_uri : list(str) or str (optional) - A code resource to load from a Google Cloud Storage URI. - Describes user-defined function resources used in the query. + **kwargs: Arbitrary keyword arguments + query_config (dict): query configuration parameters for job processing. + For more information see `BigQuery SQL Reference + ` .. versionadded:: 0.19.0 @@ -712,7 +710,7 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None, connector = GbqConnector(project_id, reauth=reauth, verbose=verbose, private_key=private_key, dialect=dialect) - schema, pages = connector.run_query(query, udf_resource_uri) + schema, pages = connector.run_query(query, **kwargs) dataframe_list = [] while len(pages) > 0: page = pages.pop()