Skip to content

Commit

Permalink
Add Exception Handling to Generate Route (#911)
Browse files Browse the repository at this point in the history
* validate credentials for connectors

* bigquery handling, failure tests

* changelog

* add missing ctl path to imports
  • Loading branch information
SteveDMurphy authored Jul 25, 2022
1 parent cbe0899 commit de1e4f1
Show file tree
Hide file tree
Showing 5 changed files with 122 additions and 3 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ The types of changes are:
* Update `fideslang` to `1.1.0`, simplifying the default taxonomy and adding `tags` for resources [#865](https://github.com/ethyca/fides/pull/865)
* Remove the `obscure` requirement from the `generate` endpoint [#819](https://github.com/ethyca/fides/pull/819)
* Merge existing configurations with `fideslib` library [#913](https://github.com/ethyca/fides/pull/913)
* Replicated the error response handling from the `/validate` endpoint to the `/generate` endpoint [#911](https://github.com/ethyca/fides/pull/911)
* Moved frontend static files to `src/ui-build/static` [#934](https://github.com/ethyca/fides/pull/934)

### Developer Experience
Expand Down
39 changes: 38 additions & 1 deletion src/fidesctl/api/ctl/routes/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,13 @@
AWSConfig,
BigQueryConfig,
ConnectorAuthFailureException,
ConnectorFailureException,
DatabaseConfig,
OktaConfig,
)
from fidesctl.ctl.core.dataset import generate_bigquery_datasets, generate_db_datasets
from fidesctl.ctl.core.system import generate_aws_systems, generate_okta_systems
from fidesctl.ctl.core.utils import validate_db_engine


class ValidTargets(str, Enum):
Expand Down Expand Up @@ -168,6 +170,17 @@ def generate_aws(
"""
Returns a list of Systems found in AWS.
"""
from fidesctl.ctl.connectors.aws import validate_credentials

log.info("Validating AWS credentials")
try:
validate_credentials(aws_config)
except ConnectorAuthFailureException as error:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=str(error),
)

log.info("Generating systems from AWS")
aws_systems = generate_aws_systems(organization=organization, aws_config=aws_config)

Expand All @@ -181,6 +194,16 @@ async def generate_okta(
"""
Returns a list of Systems found in Okta.
"""
from fidesctl.ctl.connectors.okta import validate_credentials

log.info("Validating Okta credentials")
try:
validate_credentials(okta_config)
except ConnectorAuthFailureException as error:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=str(error),
)
log.info("Generating systems from Okta")
okta_systems = await generate_okta_systems(
organization=organization, okta_config=okta_config
Expand All @@ -192,6 +215,14 @@ def generate_db(db_config: DatabaseConfig) -> List[Dict[str, str]]:
"""
Returns a list of datasets found in a database.
"""
log.info("Validating database credentials")
try:
validate_db_engine(db_config.connection_string)
except ConnectorAuthFailureException as error:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=str(error),
)
log.info("Generating datasets from database")
db_datasets = generate_db_datasets(connection_string=db_config.connection_string)

Expand All @@ -204,5 +235,11 @@ def generate_bigquery(bigquery_config: BigQueryConfig) -> List[Dict[str, str]]:
Returns a list of datasets found in a BigQuery dataset
"""
log.info("Generating datasets from BigQuery")
bigquery_datasets = generate_bigquery_datasets(bigquery_config)
try:
bigquery_datasets = generate_bigquery_datasets(bigquery_config)
except ConnectorFailureException as error:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=str(error),
)
return [i.dict(exclude_none=True) for i in bigquery_datasets]
15 changes: 15 additions & 0 deletions src/fidesctl/ctl/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@
from fideslang.validation import FidesValidationError
from git.repo import Repo
from sqlalchemy.engine import Engine
from sqlalchemy.exc import SQLAlchemyError

from fidesctl.ctl.connectors.models import ConnectorAuthFailureException

logger = logging.getLogger("server_api")

Expand All @@ -36,6 +39,18 @@ def check_response(response: requests.Response) -> requests.Response:
return response


def validate_db_engine(connection_string: str) -> None:
"""
Use SQLAlchemy to create a DB engine.
"""
try:
engine = sqlalchemy.create_engine(connection_string)
with engine.begin() as connection:
connection.execute("SELECT 1")
except SQLAlchemyError as error:
raise ConnectorAuthFailureException(error)


def get_db_engine(connection_string: str) -> Engine:
"""
Use SQLAlchemy to create a DB engine.
Expand Down
66 changes: 66 additions & 0 deletions tests/ctl/api/test_generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,37 @@
},
}

EXTERNAL_FAILURE_CONFIG_BODY = {
"aws": {
"region_name": getenv("AWS_DEFAULT_REGION", ""),
"aws_access_key_id": "ILLEGAL_ACCESS_KEY_ID",
"aws_secret_access_key": "ILLEGAL_SECRET_ACCESS_KEY_ID",
},
"bigquery": {
"dataset": "fidesopstest",
"keyfile_creds": loads(
b64decode(getenv("BIGQUERY_CONFIG", "e30=").encode("utf-8")).decode("utf-8")
),
},
"db": {
"connection_string": "postgresql+psycopg2://postgres:postgres@postgres-test:5432/INVALID_DB"
},
"okta": {
"orgUrl": "https://dev-78908748.okta.com",
"token": "INVALID_TOKEN",
},
}
EXTERNAL_FAILURE_CONFIG_BODY["bigquery"]["keyfile_creds"][
"project_id"
] = "INVALID_PROJECT_ID"

EXPECTED_FAILURE_MESSAGES = {
"aws": "The security token included in the request is invalid.",
"okta": "Invalid token provided",
"db": '(psycopg2.OperationalError) FATAL: database "INVALID_DB" does not exist\n\n(Background on this error at: https://sqlalche.me/e/14/e3q8)',
"bigquery": "Invalid project ID 'INVALID_PROJECT_ID'. Project IDs must contain 6-63 lowercase letters, digits, or dashes. Some project IDs also include domain name separated by a colon. IDs must start with a letter and may not end with a dash.",
}


@pytest.mark.external
@pytest.mark.parametrize(
Expand Down Expand Up @@ -66,3 +97,38 @@ def test_generate(
generate_response = GenerateResponse.parse_raw(response.text)
assert len(generate_response.generate_results) > 0
assert response.status_code == 200


@pytest.mark.external
@pytest.mark.parametrize(
"generate_type, generate_target",
[
("systems", "aws"),
("systems", "okta"),
("datasets", "db"),
("datasets", "bigquery"),
],
)
def test_generate_failure(
test_config: FidesctlConfig,
generate_type: str,
generate_target: str,
test_client: TestClient,
) -> None:

data = {
"organization_key": "default_organization",
"generate": {
"config": EXTERNAL_FAILURE_CONFIG_BODY[generate_target],
"target": generate_target,
"type": generate_type,
},
}

response = test_client.post(
test_config.cli.server_url + API_PREFIX + "/generate/",
headers=test_config.user.request_headers,
data=dumps(data),
)

assert loads(response.text)["detail"] == EXPECTED_FAILURE_MESSAGES[generate_target]
4 changes: 2 additions & 2 deletions tests/ctl/cli/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -764,7 +764,7 @@ def test_generate_dataset_bigquery_credentials_id(
) -> None:

tmp_output_file = tmpdir.join("dataset.yml")
config_data = os.getenv("BIGQUERY_CONFIG", "")
config_data = os.getenv("BIGQUERY_CONFIG", "e30=")
config_data_decoded = loads(b64decode(config_data.encode("utf-8")).decode("utf-8"))
os.environ["FIDESCTL__CREDENTIALS__BIGQUERY_1__PROJECT_ID"] = config_data_decoded[
"project_id"
Expand Down Expand Up @@ -813,7 +813,7 @@ def test_generate_dataset_bigquery_keyfile_path(

tmp_output_file = tmpdir.join("dataset.yml")
tmp_keyfile = tmpdir.join("bigquery.json")
config_data = os.getenv("BIGQUERY_CONFIG", "")
config_data = os.getenv("BIGQUERY_CONFIG", "e30=")
config_data_decoded = loads(b64decode(config_data.encode("utf-8")).decode("utf-8"))
with open(tmp_keyfile, "w", encoding="utf-8") as keyfile:
dump(config_data_decoded, keyfile)
Expand Down

0 comments on commit de1e4f1

Please sign in to comment.