Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Try fixing test_purge_db() #614

Merged
merged 2 commits into from
Nov 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions kcidb/db/bigquery/v04_00.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,8 @@ def __init__(self, params):
except GoogleNotFound as exc:
raise NotFound(params) from exc

def query_create(self, query_string, query_parameters=None):
def query_create(self, query_string, query_parameters=None,
use_query_cache=True):
"""
Creates a Query job configured for a given query string and
optional parameters. BigQuery can run the job to query the database.
Expand All @@ -89,6 +90,8 @@ def query_create(self, query_string, query_parameters=None):
query_parameters: A list containing the optional query parameters
(google.cloud.bigquery.ArrayQueryParameter).
The default is an empty list.
use_query_cache: True if BigQuery query cache should be used,
False otherwise.

Returns:
The Query job (google.cloud.bigquery.job.QueryJob)
Expand All @@ -97,7 +100,10 @@ def query_create(self, query_string, query_parameters=None):
query_parameters = []
LOGGER.debug("Query string: %s", query_string)
LOGGER.debug("Query params: %s", query_parameters)
if not use_query_cache:
LOGGER.debug("Query cache: DISABLED")
job_config = bigquery.job.QueryJobConfig(
use_query_cache=use_query_cache,
query_parameters=query_parameters,
default_dataset=self.dataset_ref)
return self.client.query(query_string, job_config=job_config)
Expand Down Expand Up @@ -806,7 +812,8 @@ def dump_iter(self, objects_per_report, with_metadata, after, until):
bigquery.ScalarQueryParameter(None, ts_field.field_type, v)
for v in (table_after, table_until) if v
]
query_job = self.conn.query_create(query_string, query_parameters)
query_job = self.conn.query_create(query_string, query_parameters,
use_query_cache=False)
obj_list = None
for row in query_job:
if obj_list is None:
Expand Down
5 changes: 3 additions & 2 deletions test_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,8 @@ def filter_test_data(data):
for obj_list_name in min_io_version.graph:
if obj_list_name:
assert len(dump.get(obj_list_name, [])) == 2, \
f"Invalid number of {obj_list_name}"
f"Invalid number of {obj_list_name} in " \
f"{database} database"

# Trigger the purge at the boundary
publisher.publish(
Expand All @@ -355,7 +356,7 @@ def filter_test_data(data):
break
assert dump == client.get_schema()[1].upgrade(
data_after if purging else data
)
), "Unexpected data in {database} database"


def test_archive(empty_deployment):
Expand Down