Skip to content

Commit

Permalink
Updated clear_datastore.py to reflect new API.
Browse files Browse the repository at this point in the history
  • Loading branch information
dhermes committed Jan 7, 2015
1 parent 73c5202 commit a9baae5
Showing 1 changed file with 37 additions and 32 deletions.
69 changes: 37 additions & 32 deletions regression/clear_datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,18 @@
"""Script to populate datastore with regression test data."""


# This assumes the command is being run via tox hence the
# repository root is the current directory.
from gcloud import datastore
from gcloud.datastore import _implicit_environ
from gcloud.datastore.query import Query
from regression import regression_utils
from gcloud.datastore.transaction import Transaction
from six.moves import input


datastore._DATASET_ENV_VAR_NAME = 'GCLOUD_TESTS_DATASET_ID'
datastore.set_default_dataset_id()
datastore.set_default_connection()


FETCH_MAX = 20
ALL_KINDS = [
'Character',
Expand All @@ -33,10 +38,9 @@
TRANSACTION_MAX_GROUPS = 5


def fetch_keys(dataset, kind, fetch_max=FETCH_MAX, query=None, cursor=None):
def fetch_keys(kind, fetch_max=FETCH_MAX, query=None, cursor=None):
if query is None:
query = Query(
dataset_id=dataset.id(), kind=kind, projection=['__key__'])
query = Query(kind=kind, projection=['__key__'])

iterator = query.fetch(limit=fetch_max, start_cursor=cursor)

Expand All @@ -46,37 +50,39 @@ def fetch_keys(dataset, kind, fetch_max=FETCH_MAX, query=None, cursor=None):

def get_ancestors(entities):
# NOTE: A key will always have at least one path element.
key_roots = [entity.key().path[0] for entity in entities]
# Turn into hashable type so we can use set to get unique roots.
# Also sorted the items() to ensure uniqueness.
key_roots = [tuple(sorted(root.items())) for root in key_roots]
# Cast back to dictionary.
return [dict(root) for root in set(key_roots)]
key_roots = [entity.key.flat_path[:2] for entity in entities]
# Return the unique roots.
return list(set(key_roots))


def delete_entities(dataset, entities):
dataset_id = dataset.id()
connection = dataset.connection()
def delete_entities(entities):
if not entities:
return

key_pbs = [entity.key().to_protobuf() for entity in entities]
connection.delete_entities(dataset_id, key_pbs)
dataset_ids = set(entity.key.dataset_id for entity in entities)
if len(dataset_ids) != 1:
raise ValueError('Expected a unique dataset ID.')

dataset_id = dataset_ids.pop()
key_pbs = [entity.key.to_protobuf() for entity in entities]
_implicit_environ.CONNECTION.delete_entities(dataset_id, key_pbs)

def remove_kind(dataset, kind):
delete_outside_transaction = False
with dataset.transaction():
results = []

query, curr_results, cursor = fetch_keys(dataset, kind)
def remove_kind(kind):
results = []

query, curr_results, cursor = fetch_keys(kind)
results.extend(curr_results)
while curr_results:
query, curr_results, cursor = fetch_keys(kind, query=query,
cursor=cursor)
results.extend(curr_results)
while curr_results:
query, curr_results, cursor = fetch_keys(
dataset, kind, query=query, cursor=cursor)
results.extend(curr_results)

if not results:
return
if not results:
return

delete_outside_transaction = False
with Transaction():
# Now that we have all results, we seek to delete.
print('Deleting keys:')
print(results)
Expand All @@ -85,10 +91,10 @@ def remove_kind(dataset, kind):
if len(ancestors) > TRANSACTION_MAX_GROUPS:
delete_outside_transaction = True
else:
delete_entities(dataset, results)
delete_entities(results)

if delete_outside_transaction:
delete_entities(dataset, results)
delete_entities(results)


def remove_all_entities():
Expand All @@ -99,9 +105,8 @@ def remove_all_entities():
print('Doing nothing.')
return

dataset = regression_utils.get_dataset()
for kind in ALL_KINDS:
remove_kind(dataset, kind)
remove_kind(kind)


if __name__ == '__main__':
Expand Down

0 comments on commit a9baae5

Please sign in to comment.