Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use inclusive naming #7156

Merged
merged 3 commits into from
Jul 20, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions envoy/datadog_checks/envoy/data/conf.yaml.example
Original file line number Diff line number Diff line change
Expand Up @@ -37,36 +37,36 @@ instances:
#
- stats_url: http://localhost:80/stats

## @param metric_whitelist - list of strings - optional
## Whitelist metrics using regular expressions.
## @param included_metrics - list of strings - optional
## Includes metrics using regular expressions.
## The filtering occurs before tag extraction, so you have the option
## to have certain tags decide whether or not to keep or ignore metrics.
## For an exhaustive list of all metrics and tags, see:
## https://github.com/DataDog/integrations-core/blob/master/envoy/datadog_checks/envoy/metrics.py
##
## If you surround patterns by quotes, be sure to escape backslashes with an extra backslash.
##
## The example whitelist below will include:
## The example list below will include:
## - cluster.in.0000.lb_subsets_active
## - cluster.out.alerting-event-evaluator-test.datadog.svc.cluster.local
#
# metric_whitelist:
# included_metrics:
# - cluster\.(in|out)\..*

## @param metric_blacklist - list of strings - optional
## Blacklist metrics using regular expressions.
## @param excluded_metrics - list of strings - optional
## Excludes metrics using regular expressions.
## The filtering occurs before tag extraction, so you have the option
## to have certain tags decide whether or not to keep or ignore metrics.
## For an exhaustive list of all metrics and tags, see:
## https://github.com/DataDog/integrations-core/blob/master/envoy/datadog_checks/envoy/metrics.py
##
## If you surround patterns by quotes, be sure to escape backslashes with an extra backslash.
##
## The example blacklist below will exclude:
## The example list below will exclude:
## - http.admin.downstream_cx_active
## - http.http.rds.0000.control_plane.rate_limit_enforced
#
# metric_blacklist:
# excluded_metrics:
# - ^http\..*

## @param cache_metrics - boolean - optional - default: true
Expand Down
61 changes: 32 additions & 29 deletions envoy/datadog_checks/envoy/envoy.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,19 +23,28 @@ def __init__(self, name, init_config, instances):
super(Envoy, self).__init__(name, init_config, instances)
self.unknown_metrics = defaultdict(int)
self.unknown_tags = defaultdict(int)
self.whitelist = None
self.blacklist = None

included_metrics = set(
re.sub(r'^envoy\\?\.', '', s, 1)
for s in self.instance.get('included_metrics', self.instance.get('metric_whitelist', []))
)
self.config_included_metrics = [re.compile(pattern) for pattern in included_metrics]

excluded_metrics = set(
re.sub(r'^envoy\\?\.', '', s, 1)
for s in self.instance.get('excluded_metrics', self.instance.get('metric_blacklist', []))
)
self.config_excluded_metrics = [re.compile(pattern) for pattern in excluded_metrics]

# The memory implications here are unclear to me. We may want a bloom filter
# or a data structure that expires elements based on inactivity.
self.whitelisted_metrics = set()
self.blacklisted_metrics = set()
self.included_metrics_cache = set()
self.excluded_metrics_cache = set()

self.caching_metrics = None

def check(self, instance):
custom_tags = instance.get('tags', [])

try:
stats_url = instance['stats_url']
except KeyError:
Expand All @@ -44,14 +53,6 @@ def check(self, instance):
self.log.error(msg)
return

if self.whitelist is None:
whitelist = set(re.sub(r'^envoy\\?\.', '', s, 1) for s in instance.get('metric_whitelist', []))
self.whitelist = [re.compile(pattern) for pattern in whitelist]

if self.blacklist is None:
blacklist = set(re.sub(r'^envoy\\?\.', '', s, 1) for s in instance.get('metric_blacklist', []))
self.blacklist = [re.compile(pattern) for pattern in blacklist]

if self.caching_metrics is None:
self.caching_metrics = instance.get('cache_metrics', True)

Expand Down Expand Up @@ -86,7 +87,7 @@ def check(self, instance):
except ValueError:
continue

if not self.whitelisted_metric(envoy_metric):
if not self.included_metrics(envoy_metric):
continue

try:
Expand Down Expand Up @@ -117,29 +118,31 @@ def check(self, instance):

self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK, tags=custom_tags)

def whitelisted_metric(self, metric):
def included_metrics(self, metric):
if self.caching_metrics:
if metric in self.whitelisted_metrics:
if metric in self.included_metrics_cache:
return True
elif metric in self.blacklisted_metrics:
elif metric in self.excluded_metrics_cache:
return False

if self.whitelist:
whitelisted = any(pattern.search(metric) for pattern in self.whitelist)
if self.blacklist:
whitelisted = whitelisted and not any(pattern.search(metric) for pattern in self.blacklist)
if self.config_included_metrics:
included_metrics = any(pattern.search(metric) for pattern in self.config_included_metrics)
if self.config_excluded_metrics:
included_metrics = included_metrics and not any(
pattern.search(metric) for pattern in self.config_excluded_metrics
)

if self.caching_metrics and whitelisted:
self.whitelisted_metrics.add(metric)
if self.caching_metrics and included_metrics:
self.included_metrics_cache.add(metric)

return whitelisted
elif self.blacklist:
blacklisted = any(pattern.search(metric) for pattern in self.blacklist)
return included_metrics
elif self.config_excluded_metrics:
excluded_metrics = any(pattern.search(metric) for pattern in self.config_excluded_metrics)

if self.caching_metrics and blacklisted:
self.blacklisted_metrics.add(metric)
if self.caching_metrics and excluded_metrics:
self.excluded_metrics_cache.add(metric)

return not blacklisted
return not excluded_metrics
else:
return True

Expand Down
16 changes: 11 additions & 5 deletions envoy/tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,18 @@
PORT = '8001'
INSTANCES = {
'main': {'stats_url': 'http://{}:{}/stats'.format(HOST, PORT)},
'whitelist': {'stats_url': 'http://{}:{}/stats'.format(HOST, PORT), 'metric_whitelist': [r'envoy\.cluster\..*']},
'blacklist': {'stats_url': 'http://{}:{}/stats'.format(HOST, PORT), 'metric_blacklist': [r'envoy\.cluster\..*']},
'whitelist_blacklist': {
'included_metrics': {
'stats_url': 'http://{}:{}/stats'.format(HOST, PORT),
'metric_whitelist': [r'envoy\.cluster\.'],
'metric_blacklist': [r'envoy\.cluster\.out\.'],
'metric_whitelist': [r'envoy\.cluster\..*'],
},
'excluded_metrics': {
'stats_url': 'http://{}:{}/stats'.format(HOST, PORT),
'metric_blacklist': [r'envoy\.cluster\..*'],
},
'included_excluded_metrics': {
'stats_url': 'http://{}:{}/stats'.format(HOST, PORT),
'included_metrics': [r'envoy\.cluster\.'],
'excluded_metrics': [r'envoy\.cluster\.out\.'],
},
}
ENVOY_VERSION = os.getenv('ENVOY_VERSION')
Expand Down
36 changes: 29 additions & 7 deletions envoy/tests/test_envoy.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
CHECK_NAME = 'envoy'


@pytest.mark.integration
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we put integration and unit tests in separate files ?

Copy link
Contributor Author

@hithwen hithwen Jul 20, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

probably, but out of scope for this PR, in any case they can be run separately with -m integration or -m unit

@pytest.mark.usefixtures('dd_environment')
def test_success(aggregator):
instance = INSTANCES['main']
Expand All @@ -28,6 +29,7 @@ def test_success(aggregator):
assert metrics_collected >= 250


@pytest.mark.unit
def test_success_fixture(aggregator):
instance = INSTANCES['main']
c = Envoy(CHECK_NAME, {}, [instance])
Expand All @@ -44,8 +46,21 @@ def test_success_fixture(aggregator):
assert 4412 <= metrics_collected == num_metrics


def test_success_fixture_whitelist(aggregator):
instance = INSTANCES['whitelist']
@pytest.mark.unit
def test_retrocompatible_config():
instance = deepcopy(INSTANCES['main'])
instance['metric_whitelist'] = deepcopy(INSTANCES['included_excluded_metrics']['included_metrics'])
instance['metric_blacklist'] = deepcopy(INSTANCES['included_excluded_metrics']['excluded_metrics'])

c1 = Envoy(CHECK_NAME, {}, [instance])
c2 = Envoy(CHECK_NAME, {}, [INSTANCES['included_excluded_metrics']])
assert c1.config_included_metrics == c2.config_included_metrics
assert c1.config_excluded_metrics == c2.config_excluded_metrics


@pytest.mark.unit
def test_success_fixture_included_metrics(aggregator):
instance = INSTANCES['included_metrics']
c = Envoy(CHECK_NAME, {}, [instance])

with mock.patch('requests.get', return_value=response('multiple_services')):
Expand All @@ -55,8 +70,9 @@ def test_success_fixture_whitelist(aggregator):
assert metric.startswith('envoy.cluster.')


def test_success_fixture_blacklist(aggregator):
instance = INSTANCES['blacklist']
@pytest.mark.unit
def test_success_fixture_excluded_metrics(aggregator):
instance = INSTANCES['excluded_metrics']
c = Envoy(CHECK_NAME, {}, [instance])

with mock.patch('requests.get', return_value=response('multiple_services')):
Expand All @@ -66,8 +82,9 @@ def test_success_fixture_blacklist(aggregator):
assert not metric.startswith('envoy.cluster.')


def test_success_fixture_whitelist_blacklist(aggregator):
instance = INSTANCES['whitelist_blacklist']
@pytest.mark.unit
def test_success_fixture_inclued_and_excluded_metrics(aggregator):
instance = INSTANCES['included_excluded_metrics']
c = Envoy(CHECK_NAME, {}, [instance])

with mock.patch('requests.get', return_value=response('multiple_services')):
Expand All @@ -77,6 +94,7 @@ def test_success_fixture_whitelist_blacklist(aggregator):
assert metric.startswith("envoy.cluster.") and not metric.startswith("envoy.cluster.out.")


@pytest.mark.unit
def test_service_check(aggregator):
instance = INSTANCES['main']
c = Envoy(CHECK_NAME, {}, [instance])
Expand All @@ -87,6 +105,7 @@ def test_service_check(aggregator):
assert aggregator.service_checks(Envoy.SERVICE_CHECK_NAME)[0].status == Envoy.OK


@pytest.mark.unit
def test_unknown():
instance = INSTANCES['main']
c = Envoy(CHECK_NAME, {}, [instance])
Expand All @@ -97,6 +116,7 @@ def test_unknown():
assert sum(c.unknown_metrics.values()) == 5


@pytest.mark.unit
@pytest.mark.parametrize(
'test_case, extra_config, expected_http_kwargs',
[
Expand All @@ -123,7 +143,8 @@ def test_config(test_case, extra_config, expected_http_kwargs):
r.get.assert_called_with('http://{}:8001/stats'.format(HOST), **http_wargs)


def test_metadata_unit(datadog_agent):
@pytest.mark.unit
def test_metadata(datadog_agent):
instance = INSTANCES['main']
check = Envoy(CHECK_NAME, {}, [instance])
check.check_id = 'test:123'
Expand Down Expand Up @@ -196,6 +217,7 @@ def test_metadata_unit(datadog_agent):
check.log.debug.assert_called_with('Version not matched.')


@pytest.mark.integration
@pytest.mark.usefixtures('dd_environment')
def test_metadata_integration(aggregator, datadog_agent):
instance = INSTANCES['main']
Expand Down