Skip to content

Commit

Permalink
Use the new RequestsWrapper for connecting to services (#4094)
Browse files Browse the repository at this point in the history
* Use the new RequestsWrapper for connecting to services

* Apply suggestions from code review

Co-Authored-By: Pierre Guceski <[email protected]>

* indent
  • Loading branch information
ofek authored Jul 11, 2019
1 parent 7a0b2a7 commit 7c7b6a2
Show file tree
Hide file tree
Showing 6 changed files with 239 additions and 93 deletions.
261 changes: 211 additions & 50 deletions mapreduce/datadog_checks/mapreduce/data/conf.yaml.example
Original file line number Diff line number Diff line change
@@ -1,3 +1,76 @@
init_config:

## @param general_counters - list of objects - optional
##
## `general_counters` are job agnostic metrics that create a metric for each specified counter
## Create a an object with the following layout:
##
## - counter_group_name: '<COUNTER_GROUP_NAME>'
## counters:
## - counter_name: 'MAP_INPUT_RECORDS'
## - counter_name: 'MAP_OUTPUT_RECORDS'
## - counter_name: 'REDUCE_INPUT_RECORDS'
## - counter_name: 'REDUCE_OUTPUT_RECORDS'
##
## For more information on counters visit the MapReduce documentation page:
## https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredAppMasterRest.html#Job_Counters_API #noqa
#
# general_counters:
# - counter_group_name: 'org.apache.hadoop.mapreduce.FileSystemCounter'
# counters:
# - counter_name: 'HDFS_BYTES_READ'

## @param job_specific_counters - list of objects - optional
## `job_specific_counters` are metrics that are specific to a particular job.
## Create an object with the following layout:
##
## - job_name: <JOB_NAME>
## metrics:
## - counter_group_name: <COUNTER_GROUP_NAME>
## counters:
## - counter_name: <COUNTER_NAME>
##
## For more information on counters visit the MapReduce documentation page:
## https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredAppMasterRest.html#Job_Counters_API #noqa
#
# job_specific_counters:
# - job_name: '<JOB_NAME>'
# metrics:
# - counter_group_name: 'org.apache.hadoop.mapreduce.FileSystemCounter'
# counters:
# - counter_name: 'FILE_BYTES_WRITTEN'
# - counter_name: 'HDFS_BYTES_WRITTEN'
# - counter_group_name: 'org.apache.hadoop.mapreduce.FileSystemCounter'
# counters:
# - counter_name: 'HDFS_BYTES_READ'

## @param proxy - object - optional
## Set HTTP or HTTPS proxies for all instances. Use the `no_proxy` list
## to specify hosts that must bypass proxies.
##
## The SOCKS protocol is also supported like so:
##
## socks5://user:pass@host:port
##
## Using the scheme `socks5` causes the DNS resolution to happen on the
## client, rather than on the proxy server. This is in line with `curl`,
## which uses the scheme to decide whether to do the DNS resolution on
## the client or proxy. If you want to resolve the domains on the proxy
## server, use `socks5h` as the scheme.
#
# proxy:
# http: http://<PROXY_SERVER_FOR_HTTP>:<PORT>
# https: https://<PROXY_SERVER_FOR_HTTPS>:<PORT>
# no_proxy:
# - <HOSTNAME_1>
# - <HOSTNAME_2>

## @param skip_proxy - boolean - optional - default: false
## If set to `true`, this makes the check bypass any proxy
## settings enabled and attempt to reach services directly.
#
# skip_proxy: false

instances:

## @param resourcemanager_uri - string - required
Expand Down Expand Up @@ -33,63 +106,151 @@ instances:
# - <KEY_1>:<VALUE_1>
# - <KEY_2>:<VALUE_2>

## @param proxy - object - optional
## This overrides the `proxy` setting in `init_config`.
##
## Set HTTP or HTTPS proxies. Use the `no_proxy` list
## to specify hosts that must bypass proxies.
##
## The SOCKS protocol is also supported like so:
##
## socks5://user:pass@host:port
##
## Using the scheme `socks5` causes the DNS resolution to happen on the
## client, rather than on the proxy server. This is in line with `curl`,
## which uses the scheme to decide whether to do the DNS resolution on
## the client or proxy. If you want to resolve the domains on the proxy
## server, use `socks5h` as the scheme.
#
# proxy:
# http: http://<PROXY_SERVER_FOR_HTTP>:<PORT>
# https: https://<PROXY_SERVER_FOR_HTTPS>:<PORT>
# no_proxy:
# - <HOSTNAME_1>
# - <HOSTNAME_2>

## @param skip_proxy - boolean - optional - default: false
## This overrides the `skip_proxy` setting in `init_config`.
##
## If set to `true`, this makes the check bypass any proxy
## settings enabled and attempt to reach services directly.
#
# skip_proxy: false

## @param username - string - optional
## If your service uses basic HTTP authentication, set here the username required.
## The username to use if services are behind basic auth.
#
# username: <USERNAME>

## @param ntlm_domain - string - optional
## If your services uses NTLM authentication, you can
## specify a domain that is used in the check. For NTLM Auth,
## append the username to domain, not as the `username` parameter.
## Example: <NTLM_DOMAIN>/<USERNAME>
#
# user: <USERNAME>
# ntlm_domain: <DOMAIN>

## @param password - string - optional
## If your service uses basic HTTP authentication, set here the password required.
## The password to use if services are behind basic or NTLM auth.
#
# password: <PASSWORD>

## @param ssl_verify - boolean - optional - default: true
## Instruct the check to validate SSL certificates when connecting to "resourcemanager_uri"
## @param kerberos_auth - string - optional - default: disabled
## If your service uses Kerberos authentication, you can specify the Kerberos
## strategy to use between:
## * required
## * optional
## * disabled
##
## See https://github.com/requests/requests-kerberos#mutual-authentication
#
# ssl_verify: true
# kerberos_auth: disabled

init_config:
## @param kerberos_delegate - boolean - optional - default: false
## Set to `true` to enable kerberos delegation of credentials to a server that requests delegation.
## See https://github.com/requests/requests-kerberos#delegation
#
# kerberos_delegate: false

## @param kerberos_force_initiate - boolean - optional - default: false
## Set to `true` to preemptively initiate the Kerberos GSS exchange and present a Kerberos ticket on the initial
## request (and all subsequent).
## See https://github.com/requests/requests-kerberos#preemptive-authentication
#
# kerberos_force_initiate: false

## @param kerberos_hostname - string - optional
## Override the hostname used for the Kerberos GSS exchange if its DNS name doesn't match its kerberos
## hostname (eg, behind a content switch or load balancer).
## See https://github.com/requests/requests-kerberos#hostname-override
#
# kerberos_hostname: null

## @param kerberos_principal - string - optional
## Set an explicit principal, to force Kerberos to look for a matching credential cache for the named user.
## See https://github.com/requests/requests-kerberos#explicit-principal
#
# kerberos_principal: null

## @param kerberos_keytab - string - optional
## Set the path to your Kerberos key tab file.
#
# kerberos_keytab: <KEYTAB_FILE_PATH>

## @param tls_verify - boolean - optional - default: true
## Instructs the check to validate the TLS certificate of services.
#
# tls_verify: true

## @param tls_ignore_warning - boolean - optional - default: false
## If `tls_verify` is disabled, security warnings are logged by the check.
## Disable those by setting `tls_ignore_warning` to true.
#
# tls_ignore_warning: false

## @param general_counters - list of objects - optional
##
## general_counters are job agnostic metrics that create a metric for each specified counter
## Create a an object with the following layout:
##
## - counter_group_name: '<COUNTER_GROUP_NAME>'
## counters:
## - counter_name: 'MAP_INPUT_RECORDS'
## - counter_name: 'MAP_OUTPUT_RECORDS'
## - counter_name: 'REDUCE_INPUT_RECORDS'
## - counter_name: 'REDUCE_OUTPUT_RECORDS'
##
## For more information on counters visit the MapReduce documentation page:
## https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredAppMasterRest.html#Job_Counters_API #noqa
#
# general_counters:
# - counter_group_name: 'org.apache.hadoop.mapreduce.FileSystemCounter'
# counters:
# - counter_name: 'HDFS_BYTES_READ'

## @param job_specific_counters - list of objects - optional
## job_specific_counters are metrics that are specific to a particular job.
## Create an object with the following layout:
##
## - job_name: <JOB_NAME>
## metrics:
## - counter_group_name: <COUNTER_GROUP_NAME>
## counters:
## - counter_name: <COUNTER_NAME>
##
## For more information on counters visit the MapReduce documentation page:
## https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredAppMasterRest.html#Job_Counters_API #noqa
#
# job_specific_counters:
# - job_name: '<JOB_NAME>'
# metrics:
# - counter_group_name: 'org.apache.hadoop.mapreduce.FileSystemCounter'
# counters:
# - counter_name: 'FILE_BYTES_WRITTEN'
# - counter_name: 'HDFS_BYTES_WRITTEN'
# - counter_group_name: 'org.apache.hadoop.mapreduce.FileSystemCounter'
# counters:
# - counter_name: 'HDFS_BYTES_READ'
## @param tls_cert - string - optional
## The path to a single file in PEM format containing a certificate as well as any
## number of CA certificates needed to establish the certificate’s authenticity for
## use when connecting to services. It may also contain an unencrypted private key to use.
#
# tls_cert: <CERT_PATH>

## @param tls_private_key - string - optional
## The unencrypted private key to use for `tls_cert` when connecting to services. This is
## required if `tls_cert` is set and it does not already contain a private key.
#
# tls_private_key: <PRIVATE_KEY_PATH>

## @param tls_ca_cert - string - optional
## The path to a file of concatenated CA certificates in PEM format or a directory
## containing several CA certificates in PEM format. If a directory, the directory
## must have been processed using the c_rehash utility supplied with OpenSSL. See:
## https://www.openssl.org/docs/manmaster/man3/SSL_CTX_load_verify_locations.html
#
# tls_ca_cert: <CA_CERT_PATH>

## @param headers - list of key:value elements - optional
## The headers parameter allows you to send specific headers with every request.
## You can use it for explicitly specifying the host header or adding headers for
## authorization purposes.
##
## This overrides any default headers.
#
# headers:
# Host: <ALTERNATIVE_HOSTNAME>
# X-Auth-Token: <AUTH_TOKEN>

## @param timeout - integer - optional - default: 10
## The timeout for connecting to services.
#
# timeout: 10

## @param log_requests - boolean - optional - default: false
## Whether or not to debug log the HTTP(S) requests made, including the method and URL.
#
# log_requests: false

## @param persist_connections - boolean - optional - default: false
## Whether or not to persist cookies and use connection pooling for increased performance.
#
# persist_connections: false
Loading

0 comments on commit 7c7b6a2

Please sign in to comment.