Skip to content
This repository has been archived by the owner on Nov 23, 2023. It is now read-only.

Commit

Permalink
refactor: Pull out CLI argument names
Browse files Browse the repository at this point in the history
  • Loading branch information
l0b0 committed Jun 13, 2022
1 parent 8334e18 commit 9987a37
Show file tree
Hide file tree
Showing 2 changed files with 59 additions and 29 deletions.
29 changes: 22 additions & 7 deletions geostore/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,16 @@
)
from .types import JsonList, JsonObject

DATASET_ID_ARGUMENT = "--dataset-id"
DESCRIPTION_ARGUMENT = "--description"
ENVIRONMENT_NAME_ARGUMENT = "--environment-name"
EXECUTION_ARN_ARGUMENT = "--execution-arn"
ID_ARGUMENT = "--id"
METADATA_URL_ARGUMENT = "--metadata-url"
S3_ROLE_ARN_ARGUMENT = "--s3-role-arn"
TITLE_ARGUMENT = "--title"
VERSION_FLAG = "--version"

DATASET_ID_HELP = "Dataset ID, as printed when running `geostore dataset create`."

HTTP_METHOD_CREATE = "POST"
Expand Down Expand Up @@ -68,11 +78,12 @@ def print_version(value: bool) -> None:
def main(
environment_name: Optional[str] = Option(
None,
ENVIRONMENT_NAME_ARGUMENT,
help="Set environment name, such as 'test'."
f" Overrides the value of ${ENV_NAME_VARIABLE_NAME}."
f" [default: {PRODUCTION_ENVIRONMENT_NAME}]",
),
__version: Optional[bool] = Option(None, "--version", callback=print_version, is_eager=True),
__version: Optional[bool] = Option(None, VERSION_FLAG, callback=print_version, is_eager=True),
) -> None:
if environment_name:
environ[ENV_NAME_VARIABLE_NAME] = environment_name
Expand All @@ -82,8 +93,8 @@ def main(

@dataset_app.command(name="create", help="Create a new dataset.")
def dataset_create(
title: str = Option(..., help=f"Allowed characters: '{TITLE_CHARACTERS}'."),
description: str = Option(...),
title: str = Option(..., TITLE_ARGUMENT, help=f"Allowed characters: '{TITLE_CHARACTERS}'."),
description: str = Option(..., DESCRIPTION_ARGUMENT),
) -> None:
request_object = {
HTTP_METHOD_KEY: HTTP_METHOD_CREATE,
Expand All @@ -102,7 +113,7 @@ def get_output(response_body: JsonObject) -> str:


@dataset_app.command(name="list", help="List datasets.")
def dataset_list(*, id_: Optional[str] = Option(None, "--id", help=DATASET_ID_HELP)) -> None:
def dataset_list(*, id_: Optional[str] = Option(None, ID_ARGUMENT, help=DATASET_ID_HELP)) -> None:
body = {}
get_output: GetOutputFunctionType

Expand Down Expand Up @@ -134,7 +145,7 @@ def get_single_output(response_body: JsonObject) -> str:


@dataset_app.command(name="delete", help="Delete a dataset.")
def dataset_delete(id_: str = Option(..., "--id", help=DATASET_ID_HELP)) -> None:
def dataset_delete(id_: str = Option(..., ID_ARGUMENT, help=DATASET_ID_HELP)) -> None:
handle_api_request(
Resource.DATASETS_ENDPOINT_FUNCTION_NAME.resource_name,
{HTTP_METHOD_KEY: "DELETE", BODY_KEY: {DATASET_ID_SHORT_KEY: id_}},
Expand All @@ -144,14 +155,16 @@ def dataset_delete(id_: str = Option(..., "--id", help=DATASET_ID_HELP)) -> None

@dataset_version_app.command(name="create", help="Create a dataset version.")
def dataset_version_create(
dataset_id: str = Option(..., help=DATASET_ID_HELP),
dataset_id: str = Option(..., DATASET_ID_ARGUMENT, help=DATASET_ID_HELP),
metadata_url: str = Option(
...,
METADATA_URL_ARGUMENT,
help="S3 URL to the top level metadata file,"
" for example 's3://my-bucket/my-dataset/collection.json'.",
),
s3_role_arn: str = Option(
...,
S3_ROLE_ARN_ARGUMENT,
help="ARN of the role which the Geostore should assume to read your dataset,"
" for example 'arn:aws:iam::1234567890:role/s3-reader'.",
),
Expand All @@ -176,7 +189,9 @@ def get_output(response_body: JsonObject) -> str:
@dataset_version_app.command(name="status", help="Get status of dataset version creation.")
def dataset_version_status(
execution_arn: str = Option(
..., help="Execution ARN, as printed when running `geostore version create`."
...,
EXECUTION_ARN_ARGUMENT,
help="Execution ARN, as printed when running `geostore version create`.",
)
) -> None:
def get_output(response_body: JsonObject) -> str:
Expand Down
59 changes: 37 additions & 22 deletions tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,18 @@
from typer.testing import CliRunner

from geostore.aws_keys import AWS_DEFAULT_REGION_KEY, BODY_KEY, STATUS_CODE_KEY
from geostore.cli import app
from geostore.cli import (
DATASET_ID_ARGUMENT,
DESCRIPTION_ARGUMENT,
ENVIRONMENT_NAME_ARGUMENT,
EXECUTION_ARN_ARGUMENT,
ID_ARGUMENT,
METADATA_URL_ARGUMENT,
S3_ROLE_ARN_ARGUMENT,
TITLE_ARGUMENT,
VERSION_FLAG,
app,
)
from geostore.dataset_properties import DATASET_KEY_SEPARATOR
from geostore.environment import ENV_NAME_VARIABLE_NAME
from geostore.resources import Resource
Expand Down Expand Up @@ -75,8 +86,8 @@ def should_create_dataset() -> None:
[
"dataset",
"create",
f"--title={dataset_title}",
f"--description={any_dataset_description()}",
f"{TITLE_ARGUMENT}={dataset_title}",
f"{DESCRIPTION_ARGUMENT}={any_dataset_description()}",
],
)

Expand All @@ -93,8 +104,8 @@ def should_report_duplicate_dataset_title(subtests: SubTests) -> None:
[
"dataset",
"create",
f"--title={dataset_title}",
f"--description={any_dataset_description()}",
f"{TITLE_ARGUMENT}={dataset_title}",
f"{DESCRIPTION_ARGUMENT}={any_dataset_description()}",
],
)
assert first_result.exit_code == 0, first_result
Expand All @@ -104,8 +115,8 @@ def should_report_duplicate_dataset_title(subtests: SubTests) -> None:
[
"dataset",
"create",
f"--title={dataset_title}",
f"--description={any_dataset_description()}",
f"{TITLE_ARGUMENT}={dataset_title}",
f"{DESCRIPTION_ARGUMENT}={any_dataset_description()}",
],
)

Expand Down Expand Up @@ -143,8 +154,8 @@ def should_report_dataset_creation_success(
[
"dataset",
"create",
f"--title={any_dataset_title()}",
f"--description={any_dataset_description()}",
f"{TITLE_ARGUMENT}={any_dataset_title()}",
f"{DESCRIPTION_ARGUMENT}={any_dataset_description()}",
],
)

Expand Down Expand Up @@ -262,7 +273,9 @@ def should_filter_datasets_listing(subtests: SubTests) -> None:
# Given two datasets
with Dataset() as first_dataset, Dataset():
# When
result = CLI_RUNNER.invoke(app, ["dataset", "list", f"--id={first_dataset.dataset_id}"])
result = CLI_RUNNER.invoke(
app, ["dataset", "list", f"{ID_ARGUMENT}={first_dataset.dataset_id}"]
)

# Then
with subtests.test(msg="should print dataset to standard output"):
Expand All @@ -283,7 +296,9 @@ def should_delete_dataset(subtests: SubTests) -> None:
# Given
with Dataset() as dataset:
# When
result = CLI_RUNNER.invoke(app, ["dataset", "delete", f"--id={dataset.dataset_id}"])
result = CLI_RUNNER.invoke(
app, ["dataset", "delete", f"{ID_ARGUMENT}={dataset.dataset_id}"]
)

# Then
with subtests.test(msg="should print nothing to standard output"):
Expand All @@ -305,9 +320,9 @@ def should_create_dataset_version(subtests: SubTests) -> None:
[
"version",
"create",
f"--dataset-id={dataset.dataset_id}",
f"--metadata-url={any_s3_url()}",
f"--s3-role-arn={any_role_arn()}",
f"{DATASET_ID_ARGUMENT}={dataset.dataset_id}",
f"{METADATA_URL_ARGUMENT}={any_s3_url()}",
f"{S3_ROLE_ARN_ARGUMENT}={any_role_arn()}",
],
)

Expand Down Expand Up @@ -356,7 +371,7 @@ def should_print_version_import_status_verbatim(
)

result = CLI_RUNNER.invoke(
app, ["version", "status", f"--execution-arn={any_arn_formatted_string()}"]
app, ["version", "status", f"{EXECUTION_ARN_ARGUMENT}={any_arn_formatted_string()}"]
)

with subtests.test(msg="should print JSON response body to standard output"):
Expand All @@ -374,6 +389,7 @@ def should_get_version_import_status(subtests: SubTests) -> None:
asset_filename = any_safe_filename()
asset_content = any_file_contents()
asset_multihash = sha256_hex_digest_to_multihash(sha256(asset_content).hexdigest())
s3_role_arn = f"arn:aws:iam::{ACCOUNT_NUMBER}:role/{Resource.API_USERS_ROLE_NAME.resource_name}"
with Dataset() as dataset, S3Object(
file_object=BytesIO(),
bucket_name=Resource.STAGING_BUCKET_NAME.resource_name,
Expand All @@ -398,16 +414,15 @@ def should_get_version_import_status(subtests: SubTests) -> None:
[
"version",
"create",
f"--dataset-id={dataset.dataset_id}",
f"--metadata-url={collection_object.url}",
"--s3-role-arn="
f"arn:aws:iam::{ACCOUNT_NUMBER}:role/{Resource.API_USERS_ROLE_NAME.resource_name}",
f"{DATASET_ID_ARGUMENT}={dataset.dataset_id}",
f"{METADATA_URL_ARGUMENT}={collection_object.url}",
f"{S3_ROLE_ARN_ARGUMENT}={s3_role_arn}",
],
)
execution_arn = version_create_result.stdout.split("\t", maxsplit=1)[1].rstrip()

status_result = CLI_RUNNER.invoke(
app, ["version", "status", f"--execution-arn={execution_arn}"]
app, ["version", "status", f"{EXECUTION_ARN_ARGUMENT}={execution_arn}"]
)

with subtests.test(msg="should print JSON response body to standard output"):
Expand All @@ -434,7 +449,7 @@ def should_call_given_environment_function(
with patch.dict(environ, {ENV_NAME_VARIABLE_NAME: environment_name}):
# When
result = CLI_RUNNER.invoke(
app, [f"--environment-name={environment_name}", "dataset", "list"]
app, [f"{ENVIRONMENT_NAME_ARGUMENT}={environment_name}", "dataset", "list"]
)

# Then
Expand Down Expand Up @@ -477,7 +492,7 @@ def should_default_to_production_environment(

def should_print_version_information(subtests: SubTests) -> None:
# When
result = CLI_RUNNER.invoke(app, ["--version"])
result = CLI_RUNNER.invoke(app, [VERSION_FLAG])

# Then
with subtests.test(msg="should print version number to standard output"):
Expand Down

0 comments on commit 9987a37

Please sign in to comment.