Skip to content

Commit

Permalink
Refactor logging configuration (#3758)
Browse files Browse the repository at this point in the history
  • Loading branch information
ThomasLaPiana authored Jul 13, 2023
1 parent 12dff1a commit 10cebca
Show file tree
Hide file tree
Showing 13 changed files with 122 additions and 121 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ The types of changes are:
### Changed

- Bumped supported Python versions to `3.10.12`, `3.9.17`, and `3.8.17` [#3733](https://github.com/ethyca/fides/pull/3733)
- Logging Updates [#3758](https://github.com/ethyca/fides/pull/3758)
- Add polyfill service to fides-js route [#3759](https://github.com/ethyca/fides/pull/3759)

## [2.16.0](https://github.com/ethyca/fides/compare/2.15.1...2.16.0)
Expand Down
1 change: 1 addition & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ services:
FIDES__CLI__SERVER_PORT: "8080"
FIDES__DATABASE__SERVER: "fides-db"
FIDES__DEV_MODE: "True"
FIDES__LOGGING__COLORIZE: "True"
FIDES__USER__ANALYTICS_OPT_OUT: "True"
FIDES__SECURITY__ALLOW_CUSTOM_CONNECTOR_FUNCTIONS: "True"
FIDES__SECURITY__BASTION_SERVER_HOST: ${FIDES__SECURITY__BASTION_SERVER_HOST-}
Expand Down
6 changes: 1 addition & 5 deletions src/fides/api/alembic/migrations/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,7 @@
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(alembic_config.config_file_name)
setup_fidesapi_logger(
fides_config.logging.level,
serialize=fides_config.logging.serialization,
desination=fides_config.logging.destination,
)
setup_fidesapi_logger(CONFIG)

# add your model's MetaData object here
# for 'autogenerate' support
Expand Down
7 changes: 4 additions & 3 deletions src/fides/api/api/v1/endpoints/dataset_endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,6 @@
convert_dataset_to_graph,
to_graph_field,
)
from fides.api.models.sql_models import ( # type: ignore[attr-defined]
Dataset as CtlDataset,
)
from fides.api.oauth.utils import verify_oauth_client
from fides.api.schemas.api import BulkUpdateFailed
from fides.api.schemas.dataset import (
Expand Down Expand Up @@ -69,6 +66,10 @@
YAML_DATASETS,
)

from fides.api.models.sql_models import ( # type: ignore[attr-defined] # isort: skip
Dataset as CtlDataset,
)

X_YAML = "application/x-yaml"

router = APIRouter(tags=["Dataset Configs"], prefix=V1_URL_PREFIX)
Expand Down
6 changes: 1 addition & 5 deletions src/fides/api/app_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,11 +79,7 @@ def create_fides_app(
security_env: str = CONFIG.security.env,
) -> FastAPI:
"""Return a properly configured application."""
setup_logging(
CONFIG.logging.level,
serialize=CONFIG.logging.serialization,
desination=CONFIG.logging.destination,
)
setup_logging(CONFIG)
logger.bind(api_config=CONFIG.logging.json()).debug(
"Logger configuration options in use"
)
Expand Down
13 changes: 11 additions & 2 deletions src/fides/api/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,13 @@ async def prepare_and_log_request(
async def log_request(request: Request, call_next: Callable) -> Response:
"""Log basic information about every request handled by the server."""
start = datetime.now()
response = await call_next(request)

# If the request fails, we still want to log it
try:
response = await call_next(request)
except: # pylint: disable=bare-except
response = Response(status_code=500)

handler_time = datetime.now() - start
logger.bind(
method=request.method,
Expand Down Expand Up @@ -252,7 +258,10 @@ async def setup_server() -> None:
)
)

logger.info(FIDES_ASCII_ART)
# It's just a random bunch of strings when serialized
if not CONFIG.logging.serialization:
logger.info(FIDES_ASCII_ART)

logger.info(f"Fides startup complete! v{VERSION}")


Expand Down
5 changes: 3 additions & 2 deletions src/fides/api/models/datasetconfig.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,11 @@
)
from fides.api.graph.data_type import parse_data_type_string
from fides.api.models.connectionconfig import ConnectionConfig, ConnectionType
from fides.api.models.sql_models import ( # type: ignore[attr-defined]
from fides.api.util.saas_util import merge_datasets

from fides.api.models.sql_models import ( # type: ignore[attr-defined] # isort: skip
Dataset as CtlDataset,
)
from fides.api.util.saas_util import merge_datasets


class DatasetConfig(Base):
Expand Down
3 changes: 2 additions & 1 deletion src/fides/api/util/data_category.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
from sqlalchemy.orm import Session

from fides.api import common_exceptions
from fides.api.models.sql_models import ( # type: ignore[attr-defined]

from fides.api.models.sql_models import ( # type: ignore[attr-defined] # isort: skip
DataCategory as DataCategoryDbModel,
)

Expand Down
134 changes: 54 additions & 80 deletions src/fides/api/util/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,11 @@

import logging
import sys
from types import FrameType
from typing import Dict, List, Optional, Union
from typing import Dict, List

from loguru import logger

from fides.config import CONFIG
from fides.config import CONFIG, FidesConfig

MASKED = "MASKED"

Expand Down Expand Up @@ -42,77 +41,54 @@ def _log_warning(exc: BaseException, dev_mode: bool = False) -> None:
logger.error(exc)


class FidesAPIHandler(logging.Handler):
def create_handler_dicts(
level: str, sink: str, serialize: bool, colorize: bool, include_called_from: bool
) -> List[Dict]:
"""
The logging.Handler used by the api logger.
Creates dictionaries used for configuring loguru handlers.
Two dictionaries are returned, one for standard logs and another to handle
logs that include "extra" information.
"""
time_format = "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green>"
level_format = "<level>{level: <8}</level>"
called_from_format = (
("<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan>")
if include_called_from
else ""
)
message_format = "<level>{message}</level>"

log_format = (
time_format
+ " | "
+ level_format
+ " | "
+ called_from_format
+ " - "
+ message_format
)

def __init__(
self,
level: Union[int, str],
include_extra: bool = False,
serialize: str = "",
sink: str = "",
) -> None:
super().__init__(level=level)
self.loguru_vals: Dict = {
"level": level,
"serialize": serialize == "json",
"sink": sys.stdout if sink == "" else sink,
}

format_module = ""
if level == logging.DEBUG or level == logging.getLevelName(logging.DEBUG):
format_module = " (<c>{module}:{function}:{line}</c>)"

format_extra = ""
self.loguru_vals["filter"] = "lambda logRecord: not bool(logRecord['extra'])"
if include_extra:
format_extra = " | {extra}"
self.loguru_vals["filter"] = "lambda logRecord: bool(logRecord['extra'])"

self.loguru_vals["format"] = (
"<d>{time:YYYY-MM-DD HH:mm:ss.SSS}</d> [<lvl>{level}</lvl>]%s: {message}%s"
% (format_module, format_extra)
)

def emit(self, record: logging.LogRecord) -> None:
"""
Log the specified record.
"""
# Get corresponding Loguru level if it exists
level: Union[int, str]
try:
level = logger.level(record.levelname).name
except ValueError:
level = record.levelno

# Determine the caller that originated the log entry
frame: Optional[FrameType] = logging.currentframe()
depth = 2
while frame is not None and frame.f_code.co_filename == logging.__file__:
frame = frame.f_back
depth += 1

logger.opt(depth=depth, exception=record.exc_info).log(
level, record.getMessage()
)

def loguru_config(self) -> Dict:
"""
Returns only the fields required to pass a FidesAPIHandler
as a handler kwarg in Loguru's logger.configure().
"""
return {
"level": self.loguru_vals["level"],
"filter": eval(self.loguru_vals["filter"]),
"format": self.loguru_vals["format"],
"serialize": self.loguru_vals["serialize"],
"sink": self.loguru_vals["sink"],
}


def setup(level: str, serialize: str = "", desination: str = "") -> None:
standard_dict = {
"colorize": colorize,
"format": log_format,
"level": level,
"serialize": serialize,
"sink": sys.stdout if sink == "" else sink,
"filter": lambda logRecord: not bool(logRecord["extra"]),
"diagnose": False,
"backtrace": True,
"catch": True,
}
extra_dict = {
**standard_dict,
"format": log_format + " | {extra}",
"filter": lambda logRecord: bool(logRecord["extra"]),
}
return [standard_dict, extra_dict]


def setup(config: FidesConfig) -> None:
"""
Removes all handlers from all loggers, and sets those
loggers to propagate log entries to the root logger.
Expand All @@ -126,16 +102,14 @@ def setup(level: str, serialize: str = "", desination: str = "") -> None:
logging.getLogger(name).handlers = []
logging.getLogger(name).propagate = True

logger.configure(
handlers=[
FidesAPIHandler(
level, sink=desination, serialize=serialize
).loguru_config(),
FidesAPIHandler(
level, include_extra=True, sink=desination, serialize=serialize
).loguru_config(),
]
handlers = create_handler_dicts(
level=config.logging.level,
include_called_from=config.dev_mode,
sink=config.logging.destination,
serialize=config.logging.serialization == "json",
colorize=config.logging.colorize,
)
logger.configure(handlers=handlers)


def obfuscate_message(message: str) -> str:
Expand Down
4 changes: 4 additions & 0 deletions src/fides/config/logging_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ class LoggingSettings(FidesSettings):
default="",
description="The output location for log files. Accepts any valid file path. If left unset, log entries are printed to stdout and log files are not produced.",
)
colorize: bool = Field(
default=False,
description="Force colored logs. Any value set via environment variables is considered 'True'.",
)
level: str = Field(
default="INFO",
description="The minimum log entry level to produce. Also accepts TRACE, DEBUG, WARNING, ERROR, or CRITICAL (case insensitive).",
Expand Down
17 changes: 9 additions & 8 deletions tests/ctl/api/test_admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,13 @@ def test_db_reset_dev_mode_disabled(
test_config: FidesConfig,
test_config_dev_mode_disabled: FidesConfig, # temporarily switches off config.dev_mode
test_client: TestClient,
loguru_caplog,
) -> None:
with pytest.raises(
errors.FunctionalityNotConfigured,
match="unable to reset fides database outside of dev_mode.",
):
test_client.post(
test_config.cli.server_url + API_PREFIX + "/admin/db/reset/",
headers=test_config.user.auth_header,
)
error_message = "unable to reset fides database outside of dev_mode."
test_client.post(
test_config.cli.server_url + API_PREFIX + "/admin/db/reset/",
headers=test_config.user.auth_header,
)

assert "ERROR" in loguru_caplog.text
assert error_message in loguru_caplog.text
16 changes: 10 additions & 6 deletions tests/ops/api/v1/endpoints/test_messaging_endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -1459,19 +1459,23 @@ def test_get_active_default_app_setting_invalid(
url,
api_client: TestClient,
generate_auth_header,
loguru_caplog,
):
"""
This is contrived and should not be able to occur, but here we test what happens
if somehow the `notifications.notification_service_type` config property is set
to an invalid value.
"""

error_message = "Unknown notification_service_type"
auth_header = generate_auth_header([MESSAGING_READ])
with pytest.raises(ValueError) as e:
api_client.get(
url,
headers=auth_header,
)
assert "Unknown notification_service_type" in str(e)
api_client.get(
url,
headers=auth_header,
)

assert "ERROR" in loguru_caplog.text
assert error_message in loguru_caplog.text

@pytest.mark.usefixtures("notification_service_type_mailgun")
def test_get_active_default_config(
Expand Down
Loading

0 comments on commit 10cebca

Please sign in to comment.