Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remote config options for 7.7 #778

Merged
merged 5 commits into from
Mar 30, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions docs/configuration.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ You can either configure the agent by setting environment variables:
ELASTIC_APM_SERVICE_NAME=foo python manage.py runserver
----

or with inline configuration:
or with inline configuration:

[source,python]
----
Expand Down Expand Up @@ -481,7 +481,7 @@ this setting has to be provided in *<<config-format-duration, duration format>>*

|============
| Environment | Django/Flask | Default
| `ELASTIC_APM_API_REQUEST_SIZE` | `API_REQUEST_SIZE` | `"724kb"`
| `ELASTIC_APM_API_REQUEST_SIZE` | `API_REQUEST_SIZE` | `"768kb"`
|============

Maximum queue length of the request buffer before sending the request to the APM Server.
Expand Down Expand Up @@ -692,7 +692,7 @@ NOTE: this setting only disables the *sending* of the given metrics, not collect
|============

Enable/disable the tracking and collection of breakdown metrics.
By setting this to `False`, tracking this metric is completely disabled, which can reduce the overhead of the agent.
By setting this to `False`, tracking this metric is completely disabled, which can reduce the overhead of the agent.

NOTE: This feature requires APM Server and Kibana >= 7.3.

Expand Down Expand Up @@ -824,12 +824,12 @@ The unit is provided as suffix directly after the number, without and separation

*Example*: `5ms`

*Supported units*
*Supported units*

* `ms` (milliseconds)
* `s` (seconds)
* `m` (minutes)

[float]
[[config-format-size]]
==== Size format
Expand Down
2 changes: 0 additions & 2 deletions elasticapm/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,6 @@ def __init__(self, config=None, **inline):
"verify_server_cert": self.config.verify_server_cert,
"server_cert": self.config.server_cert,
"timeout": self.config.server_timeout,
"max_flush_time": self.config.api_request_time / 1000.0,
"max_buffer_size": self.config.api_request_size,
"processors": self.load_processors(),
}
self._api_endpoint_url = compat.urlparse.urljoin(
Expand Down
2 changes: 1 addition & 1 deletion elasticapm/conf/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,7 @@ class Config(_ConfigBase):
breakdown_metrics = _BoolConfigValue("BREAKDOWN_METRICS", default=True)
disable_metrics = _ListConfigValue("DISABLE_METRICS", type=starmatch_to_regex, default=[])
central_config = _BoolConfigValue("CENTRAL_CONFIG", default=True)
api_request_size = _ConfigValue("API_REQUEST_SIZE", type=int, validators=[size_validator], default=750 * 1024)
api_request_size = _ConfigValue("API_REQUEST_SIZE", type=int, validators=[size_validator], default=768 * 1024)
api_request_time = _ConfigValue("API_REQUEST_TIME", type=int, validators=[duration_validator], default=10 * 1000)
transaction_sample_rate = _ConfigValue("TRANSACTION_SAMPLE_RATE", type=float, default=1.0)
transaction_max_spans = _ConfigValue("TRANSACTION_MAX_SPANS", type=int, default=500)
Expand Down
10 changes: 6 additions & 4 deletions elasticapm/traces.py
Original file line number Diff line number Diff line change
Expand Up @@ -505,11 +505,13 @@ def __init__(self, frames_collector_func, frames_processing_func, queue_func, co
self.frames_collector_func = frames_collector_func
self._agent = agent
self._ignore_patterns = [re.compile(p) for p in config.transactions_ignore_patterns or []]
if config.span_frames_min_duration in (-1, None):
# both None and -1 mean "no minimum"
self.span_frames_min_duration = None

@property
def span_frames_min_duration(self):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍

if self.config.span_frames_min_duration in (-1, None):
return None
else:
self.span_frames_min_duration = config.span_frames_min_duration / 1000.0
return self.config.span_frames_min_duration / 1000.0

def begin_transaction(self, transaction_type, trace_parent=None, start=None):
"""
Expand Down
14 changes: 8 additions & 6 deletions elasticapm/transport/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,6 @@ def __init__(
metadata=None,
compress_level=5,
json_serializer=json_encoder.dumps,
max_flush_time=None,
max_buffer_size=None,
queue_chill_count=500,
queue_chill_time=1.0,
processors=None,
Expand All @@ -80,17 +78,13 @@ def __init__(
:param metadata: Metadata object to prepend to every queue
:param compress_level: GZip compress level. If zero, no GZip compression will be used
:param json_serializer: serializer to use for JSON encoding
:param max_flush_time: Maximum time between flushes in seconds
:param max_buffer_size: Maximum size of buffer before flush
:param kwargs:
"""
self.client = client
self.state = TransportState()
self._metadata = metadata if metadata is not None else {}
self._compress_level = min(9, max(0, compress_level if compress_level is not None else 0))
self._json_serializer = json_serializer
self._max_flush_time = max_flush_time
self._max_buffer_size = max_buffer_size
self._queued_data = None
self._event_queue = self._init_event_queue(chill_until=queue_chill_count, max_chill_time=queue_chill_time)
self._is_chilled_queue = isinstance(self._event_queue, ChilledQueue)
Expand All @@ -101,6 +95,14 @@ def __init__(
self._closed = False
self._processors = processors if processors is not None else []

@property
def _max_flush_time(self):
return self.client.config.api_request_time / 1000.0 if self.client else None

@property
def _max_buffer_size(self):
return self.client.config.api_request_size if self.client else None

def queue(self, event_type, data, flush=False):
try:
self._flushed.clear()
Expand Down
68 changes: 68 additions & 0 deletions tests/client/client_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -478,6 +478,38 @@ def test_transaction_sampling(elasticapm_client, not_so_random):
assert transaction["sampled"] or not "context" in transaction


def test_transaction_sample_rate_dynamic(elasticapm_client, not_so_random):
elasticapm_client.config.update(version="1", transaction_sample_rate=0.4)
for i in range(10):
elasticapm_client.begin_transaction("test_type")
with elasticapm.capture_span("xyz"):
pass
elasticapm_client.end_transaction("test")

transactions = elasticapm_client.events[TRANSACTION]
spans_per_transaction = defaultdict(list)
for span in elasticapm_client.events[SPAN]:
spans_per_transaction[span["transaction_id"]].append(span)

# seed is fixed by not_so_random fixture
assert len([t for t in transactions if t["sampled"]]) == 3
for transaction in transactions:
assert transaction["sampled"] or not transaction["id"] in spans_per_transaction
assert transaction["sampled"] or not "context" in transaction

elasticapm_client.config.update(version="1", transaction_sample_rate=1.0)
for i in range(5):
elasticapm_client.begin_transaction("test_type")
with elasticapm.capture_span("xyz"):
pass
elasticapm_client.end_transaction("test")

transactions = elasticapm_client.events[TRANSACTION]

# seed is fixed by not_so_random fixture
assert len([t for t in transactions if t["sampled"]]) == 8


@pytest.mark.parametrize("elasticapm_client", [{"transaction_max_spans": 5}], indirect=True)
def test_transaction_max_spans(elasticapm_client):
elasticapm_client.begin_transaction("test_type")
Expand Down Expand Up @@ -561,6 +593,42 @@ def test_transaction_span_frames_min_duration_no_limit(elasticapm_client):
assert spans[1]["stacktrace"] is not None


def test_transaction_span_frames_min_duration_dynamic(elasticapm_client):
elasticapm_client.config.update(version="1", span_frames_min_duration=20)
elasticapm_client.begin_transaction("test_type")
with elasticapm.capture_span("noframes", duration=0.001):
pass
with elasticapm.capture_span("frames", duration=0.04):
pass
elasticapm_client.end_transaction("test")

spans = elasticapm_client.events[SPAN]

assert len(spans) == 2
assert spans[0]["name"] == "noframes"
assert "stacktrace" not in spans[0]

assert spans[1]["name"] == "frames"
assert spans[1]["stacktrace"] is not None

elasticapm_client.config.update(version="1", span_frames_min_duration=-1)
elasticapm_client.begin_transaction("test_type")
with elasticapm.capture_span("frames"):
pass
with elasticapm.capture_span("frames", duration=0.04):
pass
elasticapm_client.end_transaction("test")

spans = elasticapm_client.events[SPAN]

assert len(spans) == 4
assert spans[2]["name"] == "frames"
assert spans[2]["stacktrace"] is not None

assert spans[3]["name"] == "frames"
assert spans[3]["stacktrace"] is not None


@pytest.mark.parametrize("elasticapm_client", [{"transaction_max_spans": 3}], indirect=True)
def test_transaction_max_span_nested(elasticapm_client):
elasticapm_client.begin_transaction("test_type")
Expand Down
32 changes: 32 additions & 0 deletions tests/contrib/django/django_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -1347,6 +1347,38 @@ def test_capture_body_config_is_dynamic_for_transactions(client, django_elastica
assert transaction["context"]["request"]["body"] == "[REDACTED]"


def test_capture_headers_config_is_dynamic_for_errors(client, django_elasticapm_client):
django_elasticapm_client.config.update(version="1", capture_headers=True)
with pytest.raises(MyException):
client.post(reverse("elasticapm-raise-exc"))
error = django_elasticapm_client.events[ERROR][0]
assert error["context"]["request"]["headers"]

django_elasticapm_client.config.update(version="1", capture_headers=False)
with pytest.raises(MyException):
client.post(reverse("elasticapm-raise-exc"))
error = django_elasticapm_client.events[ERROR][1]
assert "headers" not in error["context"]["request"]


def test_capture_headers_config_is_dynamic_for_transactions(client, django_elasticapm_client):
django_elasticapm_client.config.update(version="1", capture_headers=True)
with override_settings(
**middleware_setting(django.VERSION, ["elasticapm.contrib.django.middleware.TracingMiddleware"])
):
client.post(reverse("elasticapm-no-error"))
transaction = django_elasticapm_client.events[TRANSACTION][0]
assert transaction["context"]["request"]["headers"]

django_elasticapm_client.config.update(version="1", capture_headers=False)
with override_settings(
**middleware_setting(django.VERSION, ["elasticapm.contrib.django.middleware.TracingMiddleware"])
):
client.post(reverse("elasticapm-no-error"))
transaction = django_elasticapm_client.events[TRANSACTION][1]
assert "headers" not in transaction["context"]["request"]


@pytest.mark.parametrize(
"django_elasticapm_client",
[{"capture_body": "errors"}, {"capture_body": "transactions"}, {"capture_body": "all"}, {"capture_body": "off"}],
Expand Down
36 changes: 32 additions & 4 deletions tests/contrib/flask/flask_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,14 +314,42 @@ def test_capture_body_config_is_dynamic_for_transactions(flask_apm_client):
flask_apm_client.client.config.update(version="1", capture_body="all")
resp = flask_apm_client.app.test_client().post("/users/", data={"foo": "bar"})
resp.close()
error = flask_apm_client.client.events[TRANSACTION][0]
assert error["context"]["request"]["body"] == {"foo": "bar"}
transaction = flask_apm_client.client.events[TRANSACTION][0]
assert transaction["context"]["request"]["body"] == {"foo": "bar"}

flask_apm_client.client.config.update(version="2", capture_body="off")
resp = flask_apm_client.app.test_client().post("/users/", data={"foo": "bar"})
resp.close()
error = flask_apm_client.client.events[TRANSACTION][1]
assert error["context"]["request"]["body"] == "[REDACTED]"
transaction = flask_apm_client.client.events[TRANSACTION][1]
assert transaction["context"]["request"]["body"] == "[REDACTED]"


def test_capture_headers_config_is_dynamic_for_errors(flask_apm_client):
flask_apm_client.client.config.update(version="1", capture_headers=True)
resp = flask_apm_client.app.test_client().post("/an-error/", data={"foo": "bar"})
resp.close()
error = flask_apm_client.client.events[ERROR][0]
assert error["context"]["request"]["headers"]

flask_apm_client.client.config.update(version="2", capture_headers=False)
resp = flask_apm_client.app.test_client().post("/an-error/", data={"foo": "bar"})
resp.close()
error = flask_apm_client.client.events[ERROR][1]
assert "headers" not in error["context"]["request"]


def test_capture_headers_config_is_dynamic_for_transactions(flask_apm_client):
flask_apm_client.client.config.update(version="1", capture_headers=True)
resp = flask_apm_client.app.test_client().post("/users/", data={"foo": "bar"})
resp.close()
transaction = flask_apm_client.client.events[TRANSACTION][0]
assert transaction["context"]["request"]["headers"]

flask_apm_client.client.config.update(version="2", capture_headers=False)
resp = flask_apm_client.app.test_client().post("/users/", data={"foo": "bar"})
resp.close()
transaction = flask_apm_client.client.events[TRANSACTION][1]
assert "headers" not in transaction["context"]["request"]


@pytest.mark.parametrize("elasticapm_client", [{"capture_body": "transactions"}], indirect=True)
Expand Down
72 changes: 62 additions & 10 deletions tests/transports/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,9 @@ def test_transport_state_set_success():


@mock.patch("elasticapm.transport.base.Transport.send")
def test_empty_queue_flush_is_not_sent(mock_send):
transport = Transport(client=None, metadata={"x": "y"}, max_flush_time=5)
@pytest.mark.parametrize("elasticapm_client", [{"api_request_time": "5s"}], indirect=True)
def test_empty_queue_flush_is_not_sent(mock_send, elasticapm_client):
transport = Transport(client=elasticapm_client, metadata={"x": "y"})
try:
transport.start_thread()
transport.flush()
Expand All @@ -95,8 +96,9 @@ def test_empty_queue_flush_is_not_sent(mock_send):


@mock.patch("elasticapm.transport.base.Transport.send")
def test_metadata_prepended(mock_send):
transport = Transport(client=None, metadata={"x": "y"}, max_flush_time=5, compress_level=0)
@pytest.mark.parametrize("elasticapm_client", [{"api_request_time": "5s"}], indirect=True)
def test_metadata_prepended(mock_send, elasticapm_client):
transport = Transport(client=elasticapm_client, metadata={"x": "y"}, compress_level=0)
transport.start_thread()
transport.queue("error", {}, flush=True)
transport.close()
Expand All @@ -111,9 +113,10 @@ def test_metadata_prepended(mock_send):


@mock.patch("elasticapm.transport.base.Transport.send")
def test_flush_time(mock_send, caplog):
@pytest.mark.parametrize("elasticapm_client", [{"api_request_time": "100ms"}], indirect=True)
def test_flush_time(mock_send, caplog, elasticapm_client):
with caplog.at_level("DEBUG", "elasticapm.transport"):
transport = Transport(client=None, metadata={}, max_flush_time=0.1)
transport = Transport(client=elasticapm_client, metadata={})
transport.start_thread()
# let first run finish
time.sleep(0.2)
Expand All @@ -123,9 +126,57 @@ def test_flush_time(mock_send, caplog):
assert mock_send.call_count == 0


@mock.patch("elasticapm.transport.base.Transport.send")
def test_api_request_time_dynamic(mock_send, caplog, elasticapm_client):
elasticapm_client.config.update(version="1", api_request_time="1s")
with caplog.at_level("DEBUG", "elasticapm.transport"):
transport = Transport(client=elasticapm_client, metadata={})
transport.start_thread()
# let first run finish
time.sleep(0.2)
transport.close()
assert not caplog.records
assert mock_send.call_count == 0
elasticapm_client.config.update(version="1", api_request_time="100ms")
with caplog.at_level("DEBUG", "elasticapm.transport"):
transport = Transport(client=elasticapm_client, metadata={})
transport.start_thread()
# let first run finish
time.sleep(0.2)
transport.close()
record = caplog.records[0]
assert "due to time since last flush" in record.message
assert mock_send.call_count == 0


@mock.patch("elasticapm.transport.base.Transport._flush")
def test_api_request_size_dynamic(mock_flush, caplog, elasticapm_client):
elasticapm_client.config.update(version="1", api_request_size="100b")
transport = Transport(client=elasticapm_client, metadata={}, queue_chill_count=1)
transport.start_thread()
try:
with caplog.at_level("DEBUG", "elasticapm.transport"):
# we need to add lots of uncompressible data to fill up the gzip-internal buffer
for i in range(12):
transport.queue("error", "".join(random.choice(string.ascii_letters) for i in range(2000)))
transport._flushed.wait(timeout=0.1)
assert mock_flush.call_count == 1
elasticapm_client.config.update(version="1", api_request_size="1mb")
with caplog.at_level("DEBUG", "elasticapm.transport"):
# we need to add lots of uncompressible data to fill up the gzip-internal buffer
for i in range(12):
transport.queue("error", "".join(random.choice(string.ascii_letters) for i in range(2000)))
transport._flushed.wait(timeout=0.1)
# Should be unchanged because our buffer limit is much higher.
assert mock_flush.call_count == 1
finally:
transport.close()


@mock.patch("elasticapm.transport.base.Transport._flush")
def test_flush_time_size(mock_flush, caplog):
transport = Transport(client=None, metadata={}, max_buffer_size=100, queue_chill_count=1)
@pytest.mark.parametrize("elasticapm_client", [{"api_request_size": "100b"}], indirect=True)
def test_flush_time_size(mock_flush, caplog, elasticapm_client):
transport = Transport(client=elasticapm_client, metadata={}, queue_chill_count=1)
transport.start_thread()
try:
with caplog.at_level("DEBUG", "elasticapm.transport"):
Expand All @@ -139,8 +190,9 @@ def test_flush_time_size(mock_flush, caplog):


@mock.patch("elasticapm.transport.base.Transport.send")
def test_forced_flush(mock_send, caplog):
transport = Transport(client=None, metadata={}, max_buffer_size=1000, compress_level=0)
@pytest.mark.parametrize("elasticapm_client", [{"api_request_size": "1000b"}], indirect=True)
def test_forced_flush(mock_send, caplog, elasticapm_client):
transport = Transport(client=elasticapm_client, metadata={}, compress_level=0)
transport.start_thread()
try:
with caplog.at_level("DEBUG", "elasticapm.transport"):
Expand Down