Skip to content

Commit

Permalink
18 flask 1x upgrade
Browse files Browse the repository at this point in the history
http://flask.pocoo.org/docs/1.0/changelog/#version-1-0
https://trello.com/c/bBDir2Gp/18-flask-1x-upgrade

* Prevent overwriting of imported `logger` module in dmutils/logging
* New fixture to stream logs to StreamIO object (replaces `log_to_file` method)
* New fixture to mock `logger` attribute of `app`
* Changes to look for 'flask.app' new logger name
 * http://flask.pocoo.org/docs/1.0/changelog/#version-1-0
* Break out some parametrized tests to reduce complexity
* Use new fixtures where possible
  • Loading branch information
benvand committed Sep 7, 2018
1 parent a74d737 commit e0bdb67
Show file tree
Hide file tree
Showing 5 changed files with 166 additions and 142 deletions.
7 changes: 3 additions & 4 deletions dmutils/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,10 +101,9 @@ def after_request(response):
if app.config['DM_APP_NAME'] != 'search-api':
loggers.append(logging.getLogger('urllib3.util.retry'))

for logger in loggers:
logger.addHandler(handler)
logger.setLevel(loglevel)

for logger_ in loggers:
logger_.addHandler(handler)
logger_.setLevel(loglevel)
app.logger.info('Logging configured')


Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
install_requires=[
'Flask-Script==2.0.6',
'Flask-WTF==0.14.2',
'Flask==0.12.4',
'Flask==1.0.2',
'Flask-Login>=0.2.11',
'boto3==1.7.83',
'botocore<1.11.0',
Expand Down
35 changes: 24 additions & 11 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
import tempfile

import mock
import pytest

from flask import Flask
import mock
from io import StringIO
from logging import Logger, StreamHandler

from boto.ec2.cloudwatch import CloudWatchConnection

from dmutils.logging import init_app
from dmutils.logging import configure_handler, JSONFormatter, get_json_log_format


@pytest.fixture
Expand All @@ -17,15 +20,25 @@ def app(request):
return app


# it's deceptively difficult to capture & inspect *actual* log output in pytest (no, capfd doesn't seem to work)
@pytest.fixture
def app_logtofile(request):
with tempfile.NamedTemporaryFile() as f:
app = Flask(__name__)
app.root_path = request.fspath.dirname
app.config['DM_LOG_PATH'] = f.name
init_app(app)
yield app
def app_with_stream_logger(request):
stream = StringIO()
handler = StreamHandler(stream)
formatter = JSONFormatter(get_json_log_format())
app_for_config = app(request)

dm_json_stream_handler = configure_handler(handler, app_for_config, formatter)

with mock.patch('dmutils.logging.get_handler', return_value=dm_json_stream_handler):
yield app(request), stream


@pytest.fixture
def app_with_mocked_logger(request):
# Create a mock from the actual logger object class and attach that mock as the return value of 'create_logger'
with mock.patch('flask.app.create_logger', return_value=mock.Mock(spec=Logger('flask.app'), handlers=[])):
# Use the above app fixture method to return the app
yield app(request)


@pytest.yield_fixture
Expand Down
184 changes: 100 additions & 84 deletions tests/test_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def test_init_app_only_adds_handlers_to_defined_loggers(app):
and isinstance(v.handlers[0], logging.StreamHandler)
}

assert loggers == {'conftest', 'dmutils', 'dmapiclient', 'flask_wtf.csrf', 'urllib3.util.retry'}
assert loggers == {'flask.app', 'dmutils', 'dmapiclient', 'flask_wtf.csrf', 'urllib3.util.retry'}


def test_urllib3_retry_logger_not_configured_for_search_api(app):
Expand All @@ -119,87 +119,105 @@ class _Request(app.request_class):
app.request_class = _Request


@pytest.mark.parametrize("is_sampled", (None, False, True,))
def test_app_request_logs_responses_with_info_level(app, is_sampled):
@pytest.mark.parametrize("is_sampled", (None, False))
def test_app_request_logs_responses_with_info_level_not_sampled(app_with_mocked_logger, is_sampled):
if is_sampled is not None:
_set_request_class_is_sampled(app, is_sampled)

# since app.logger is a read-only property we need to patch the Flask class
with mock.patch('flask.Flask.logger') as logger:
app.test_client().get('/')

assert logger.log.call_args_list == ([
mock.call(
logging.DEBUG,
'Received request {method} {url}',
extra={
"url": "http://localhost/",
"method": "GET",
"endpoint": None,
"process_": RestrictedAny(lambda value: isinstance(value, int)),
"thread_": RestrictedAny(lambda value: isinstance(value, (str, bytes,))),
},
),
] if is_sampled else []) + [
mock.call(
logging.INFO,
'{method} {url} {status}',
extra={
"url": "http://localhost/",
"status": 404,
"method": "GET",
"endpoint": None,
"duration_real": RestrictedAny(lambda value: isinstance(value, float) and 0 < value),
"duration_process": RestrictedAny(lambda value: isinstance(value, float) and 0 < value),
"process_": RestrictedAny(lambda value: isinstance(value, int)),
"thread_": RestrictedAny(lambda value: isinstance(value, (str, bytes,))),
},
)
]


@pytest.mark.parametrize("is_sampled", (None, False, True,))
def test_app_request_logs_5xx_responses_with_error_level(app, is_sampled):
_set_request_class_is_sampled(app_with_mocked_logger, is_sampled)

app_with_mocked_logger.test_client().get('/')

assert app_with_mocked_logger.logger.log.call_args_list == [
mock.call(
logging.INFO,
'{method} {url} {status}',
extra={
"url": "http://localhost/",
"status": 404,
"method": "GET",
"endpoint": None,
"duration_real": RestrictedAny(lambda value: isinstance(value, float) and 0 < value),
"duration_process": RestrictedAny(lambda value: isinstance(value, float) and 0 < value),
"process_": RestrictedAny(lambda value: isinstance(value, int)),
"thread_": RestrictedAny(lambda value: isinstance(value, (str, bytes,))),
},
)
]


def test_app_request_logs_responses_with_info_level_sampled(app_with_mocked_logger):
_set_request_class_is_sampled(app_with_mocked_logger, True)

app_with_mocked_logger.test_client().get('/')

expected_call = mock.call(
logging.DEBUG,
'Received request {method} {url}',
extra={
"url": "http://localhost/",
"method": "GET",
"endpoint": None,
"process_": RestrictedAny(lambda value: isinstance(value, int)),
"thread_": RestrictedAny(lambda value: isinstance(value, (str, bytes,))),
},
)

assert expected_call in app_with_mocked_logger.logger.log.call_args_list


@pytest.mark.parametrize("is_sampled", (None, False,))
def test_app_request_logs_5xx_responses_with_error_level_not_sampled(app_with_mocked_logger, is_sampled):
if is_sampled is not None:
_set_request_class_is_sampled(app, is_sampled)
_set_request_class_is_sampled(app_with_mocked_logger, is_sampled)

@app.route('/')
@app_with_mocked_logger.route('/')
def error_route():
time.sleep(0.05)
return 'error', 500

# since app.logger is a read-only property we need to patch the Flask class
with mock.patch('flask.Flask.logger') as logger:
app.test_client().get('/')

assert logger.log.call_args_list == ([
mock.call(
logging.DEBUG,
'Received request {method} {url}',
extra={
"url": "http://localhost/",
"method": "GET",
"endpoint": "error_route",
"process_": RestrictedAny(lambda value: isinstance(value, int)),
"thread_": RestrictedAny(lambda value: isinstance(value, (str, bytes,))),
},
),
] if is_sampled else []) + [
mock.call(
logging.ERROR,
'{method} {url} {status}',
extra={
"url": "http://localhost/",
"status": 500,
"method": "GET",
"endpoint": "error_route",
"duration_real": RestrictedAny(lambda value: isinstance(value, float) and 0.05 <= value),
"duration_process": RestrictedAny(lambda value: isinstance(value, float) and 0 < value),
"process_": RestrictedAny(lambda value: isinstance(value, int)),
"thread_": RestrictedAny(lambda value: isinstance(value, (str, bytes,))),
},
),
]
app_with_mocked_logger.test_client().get('/')

assert app_with_mocked_logger.logger.log.call_args_list == [
mock.call(
logging.ERROR,
'{method} {url} {status}',
extra={
"url": "http://localhost/",
"status": 500,
"method": "GET",
"endpoint": "error_route",
"duration_real": RestrictedAny(lambda value: isinstance(value, float) and 0.05 <= value),
"duration_process": RestrictedAny(lambda value: isinstance(value, float) and 0 < value),
"process_": RestrictedAny(lambda value: isinstance(value, int)),
"thread_": RestrictedAny(lambda value: isinstance(value, (str, bytes,))),
},
)
]


def test_app_request_logs_5xx_responses_with_error_level_sampled(app_with_mocked_logger):

_set_request_class_is_sampled(app_with_mocked_logger, True)

@app_with_mocked_logger.route('/')
def error_route():
time.sleep(0.05)
return 'error', 500

app_with_mocked_logger.test_client().get('/')

expected_call = mock.call(
logging.DEBUG,
'Received request {method} {url}',
extra={
"url": "http://localhost/",
"method": "GET",
"endpoint": "error_route",
"process_": RestrictedAny(lambda value: isinstance(value, int)),
"thread_": RestrictedAny(lambda value: isinstance(value, (str, bytes,))),
},
)

assert expected_call in app_with_mocked_logger.logger.log.call_args_list


class TestJSONFormatter(object):
Expand Down Expand Up @@ -287,20 +305,18 @@ def test_failed_log_message_formatting_logs_an_error(self):


@pytest.mark.parametrize("is_sampled", (False, True,))
def test_log_context_handling_in_initialized_app_high_level(app_logtofile, is_sampled):
def test_log_context_handling_in_initialized_app_high_level(app_with_stream_logger, is_sampled):
app, stream = app_with_stream_logger

def _logging_call_site(app):
app.logger.info(
"Charming day {ankles}, {underleaves}, {parent_span_id}",
extra={"underleaves": "ample"},
)

with open(app_logtofile.config["DM_LOG_PATH"], "r") as log_file:
# consume log initialization line
log_file.read()

_set_request_class_is_sampled(app_logtofile, is_sampled)
_set_request_class_is_sampled(app_with_stream_logger, is_sampled)

with app_logtofile.test_request_context('/'):
with app.test_request_context('/'):
test_extra_log_context = {
"ankles": "thinsocked",
"span_id": "beesWaxed",
Expand All @@ -311,12 +327,12 @@ def _logging_call_site(app):

# we perform the log call in a specifically designated & named function to exercise & be able to reliably
# assert the behaviour of the introspective aspects of our logging
_logging_call_site(app_logtofile)
_logging_call_site(app)

# ensure buffers are flushed
logging.shutdown()

all_lines = tuple(json.loads(line) for line in log_file.read().splitlines())
all_lines = tuple(json.loads(line) for line in stream.getvalue().split('\n') if line)

assert all_lines == (
AnySupersetOf({
Expand Down
Loading

0 comments on commit e0bdb67

Please sign in to comment.