From ac556d673417fdbc198b2b293c7ac906bd467ad4 Mon Sep 17 00:00:00 2001 From: Andrew Dryga Date: Thu, 11 Apr 2024 17:17:07 -0600 Subject: [PATCH] Rewrite logger backends to formatters --- .github/workflows/ci.yml | 72 +++ .travis.yml | 37 -- README.md | 344 ++++++------- config/test.exs | 5 +- coveralls.json | 15 - lib/logger_json.ex | 79 +-- lib/logger_json/ecto.ex | 104 ---- lib/logger_json/formatter.ex | 29 -- lib/logger_json/formatter/code.ex | 8 + lib/logger_json/formatter/datetime.ex | 37 ++ lib/logger_json/formatter/encoder.ex | 71 +++ lib/logger_json/formatter/map_builder.ex | 13 + lib/logger_json/formatter/message.ex | 41 ++ lib/logger_json/formatter/metadata.ex | 60 +++ lib/logger_json/formatter/plug.ex | 36 ++ lib/logger_json/formatter_utils.ex | 77 --- lib/logger_json/formatters/basic.ex | 96 ++++ lib/logger_json/formatters/basic_logger.ex | 33 -- lib/logger_json/formatters/datadog.ex | 213 ++++++++ lib/logger_json/formatters/datadog_logger.ex | 147 ------ lib/logger_json/formatters/google_cloud.ex | 316 ++++++++++++ .../formatters/google_cloud_logger.ex | 85 ---- .../formatters/google_error_reporter.ex | 64 --- lib/logger_json/jason_safe_formatter.ex | 79 --- lib/logger_json/plug.ex | 50 -- .../metadata_formatters/datadog_logger.ex | 77 --- .../plug/metadata_formatters/elk.ex | 72 --- .../google_cloud_logger.ex | 85 ---- lib/logger_json/plug_utils.ex | 35 -- mix.exs | 30 +- mix.lock | 33 +- test/formatter/encoder_test.exs | 100 ++++ test/formatters/basic_test.exs | 210 ++++++++ test/formatters/datadog_test.exs | 261 ++++++++++ test/formatters/google_cloud_test.exs | 401 ++++++++++++++++ test/support/logger_case.ex | 34 +- test/test_helper.exs | 2 +- test/unit/logger_json/ecto_test.exs | 115 ----- .../formatters/basic_logger_test.exs | 94 ---- .../formatters/datadog_logger_test.exs | 453 ------------------ .../formatters/google_cloud_logger_test.exs | 360 -------------- .../formatters/google_error_reporter_test.exs | 95 ---- .../logger_json/json_safe_formatter_test.exs | 99 ---- test/unit/logger_json/plug_test.exs | 108 ----- 44 files changed, 2157 insertions(+), 2618 deletions(-) create mode 100644 .github/workflows/ci.yml delete mode 100644 .travis.yml delete mode 100644 coveralls.json delete mode 100644 lib/logger_json/ecto.ex delete mode 100644 lib/logger_json/formatter.ex create mode 100644 lib/logger_json/formatter/code.ex create mode 100644 lib/logger_json/formatter/datetime.ex create mode 100644 lib/logger_json/formatter/encoder.ex create mode 100644 lib/logger_json/formatter/map_builder.ex create mode 100644 lib/logger_json/formatter/message.ex create mode 100644 lib/logger_json/formatter/metadata.ex create mode 100644 lib/logger_json/formatter/plug.ex delete mode 100644 lib/logger_json/formatter_utils.ex create mode 100644 lib/logger_json/formatters/basic.ex delete mode 100644 lib/logger_json/formatters/basic_logger.ex create mode 100644 lib/logger_json/formatters/datadog.ex delete mode 100644 lib/logger_json/formatters/datadog_logger.ex create mode 100644 lib/logger_json/formatters/google_cloud.ex delete mode 100644 lib/logger_json/formatters/google_cloud_logger.ex delete mode 100644 lib/logger_json/formatters/google_error_reporter.ex delete mode 100644 lib/logger_json/jason_safe_formatter.ex delete mode 100644 lib/logger_json/plug.ex delete mode 100644 lib/logger_json/plug/metadata_formatters/datadog_logger.ex delete mode 100644 lib/logger_json/plug/metadata_formatters/elk.ex delete mode 100644 lib/logger_json/plug/metadata_formatters/google_cloud_logger.ex delete mode 100644 lib/logger_json/plug_utils.ex create mode 100644 test/formatter/encoder_test.exs create mode 100644 test/formatters/basic_test.exs create mode 100644 test/formatters/datadog_test.exs create mode 100644 test/formatters/google_cloud_test.exs delete mode 100644 test/unit/logger_json/ecto_test.exs delete mode 100644 test/unit/logger_json/formatters/basic_logger_test.exs delete mode 100644 test/unit/logger_json/formatters/datadog_logger_test.exs delete mode 100644 test/unit/logger_json/formatters/google_cloud_logger_test.exs delete mode 100644 test/unit/logger_json/formatters/google_error_reporter_test.exs delete mode 100644 test/unit/logger_json/json_safe_formatter_test.exs delete mode 100644 test/unit/logger_json/plug_test.exs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..7f0a3a5 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,72 @@ +name: Elixir + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + +permissions: + contents: read + +jobs: + build: + name: Build and Test + runs-on: ubuntu-latest + permissions: + # required by test reporter + pull-requests: write + checks: write + issues: write + statuses: write + strategy: + matrix: + include: + - otp-version: 24.3 + elixir-version: 1.16.0 + - otp-version: 25.0 + elixir-version: 1.16.0 + check-formatted: true + report-coverage: true + steps: + - uses: actions/checkout@v3 + - name: Set up Elixir + uses: erlef/setup-beam@988e02bfe678367a02564f65ca2e37726dc0268f + with: + elixir-version: ${{ matrix.elixir-version }} + otp-version: ${{ matrix.otp-version }} + - name: Restore dependencies cache + uses: actions/cache@v3 + with: + path: | + deps + _build + key: deps-${{ runner.os }}-mix-${{ hashFiles('**/mix.lock') }}-elixir-${{ matrix.elixir-version }}-otp-${{ matrix.otp-version }} + - name: Install and compile dependencies + env: + MIX_ENV: test + run: mix do deps.get, deps.compile + - name: Make sure code is formatted + env: + MIX_ENV: test + if: ${{ matrix.check-formatted == true }} + run: mix format --check-formatted + - name: Run tests + env: + MIX_ENV: test + run: mix test --exclude pending + - name: Test Report + env: + MIX_ENV: test + uses: dorny/test-reporter@v1 + if: success() || failure() + with: + name: Mix Tests on Elixir ${{ matrix.elixir-version }} / OTP ${{ matrix.otp-version }} + path: _build/test/lib/sage/test-junit-report.xml + reporter: java-junit + - name: Report code coverage + env: + MIX_ENV: test + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + if: ${{ matrix.report-coverage == true }} + run: mix coveralls.github diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 958af2c..0000000 --- a/.travis.yml +++ /dev/null @@ -1,37 +0,0 @@ -language: elixir -cache: - directories: - - deps -elixir: - - 1.12.1 -otp_release: - - 24.0 -matrix: - include: - - otp_release: 21.1 - elixir: 1.9.4 - - otp_release: 22.1 - elixir: 1.9.4 - - otp_release: 22.2 - elixir: 1.10.4 - - otp_release: 23.3.1 - elixir: 1.11.4 - - otp_release: 24.0 - elixir: 1.11.4 - - otp_release: 24.0 - elixir: 1.12.1 -env: - global: - - MIX_ENV=test -script: - # Install dependencies - - "mix deps.get" - # Run all tests except pending ones - - "mix test --exclude pending --trace" - # Submit code coverage report to Coveralls - - "mix coveralls.travis" - # Ensure code is formatted - - > - if [[ $(elixir -v) == *"Elixir 1.12.1"* ]]; then - mix format --check-formatted - fi diff --git a/README.md b/README.md index 79a7fa2..dd03bcf 100644 --- a/README.md +++ b/README.md @@ -1,250 +1,184 @@ # LoggerJSON [![Build Status](https://travis-ci.org/Nebo15/logger_json.svg?branch=master)](https://travis-ci.org/Nebo15/logger_json) -[![Coverage Status](https://coveralls.io/repos/github/Nebo15/logger_json/badge.svg?branch=master)](https://coveralls.io/github/Nebo15/logger_json?branch=master) [![Module Version](https://img.shields.io/hexpm/v/logger_json.svg)](https://hex.pm/packages/logger_json) [![Hex Docs](https://img.shields.io/badge/hex-docs-lightgreen.svg)](https://hexdocs.pm/logger_json/) [![Hex Download Total](https://img.shields.io/hexpm/dt/logger_json.svg)](https://hex.pm/packages/logger_json) [![License](https://img.shields.io/hexpm/l/logger_json.svg)](https://github.com/Nebo15/logger_json/blob/master/LICENSE) [![Last Updated](https://img.shields.io/github/last-commit/Nebo15/logger_json.svg)](https://github.com/Nebo15/logger_json/commits/master) -JSON console back-end for Elixir Logger. +A collection of formatters and utilities for JSON-based logging for various cloud tools and platforms. -It can be used as drop-in replacement for default `:console` Logger back-end in cases where you use Google Cloud Logger, DataDog or other JSON-based log collectors. After adding this back-end you may also be interested in [redirecting otp and sasl reports to Logger](https://hexdocs.pm/logger/Logger.html#error-logger-configuration) (see "Error Logger configuration" section). +## Supported formatters -Minimum supported Erlang/OTP version is 20. +- `LoggerJSON.Formatters.Basic` - a basic JSON formatter that logs messages in a structured format, + can be used with any JSON-based logging system, like ElasticSearch, Logstash, etc. -## Log Format +- `LoggerJSON.Formatters.GoogleCloud` - a formatter that logs messages in a structured format that can be + consumed by Google Cloud Logger and Google Cloud Error Reporter. -LoggerJSON provides three JSON formatters out of the box and allows developers to implement a custom one. +- `LoggerJSON.Formatters.Datadog` - a formatter that logs messages in a structured format that can be consumed + by Datadog. -### BasicLogger - -The `LoggerJSON.Formatters.BasicLogger` formatter provides a generic JSON formatted message with no vendor specific entries in the payload. A sample log entry from `LoggerJSON.Formatters.BasicLogger` looks like the following: - -```json -{ - "time": "2020-04-02T11:59:06.710Z", - "severity": "debug", - "message": "hello", - "metadata": { - "user_id": 13 - } -} -``` - -### GoogleCloudLogger - -Generates JSON that is compatible with the [Google Cloud Logger LogEntry](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry) format: - -```json -{ - "message": "hello", - "logging.googleapis.com/sourceLocation": { - "file": "/os/logger_json/test/unit/logger_json_test.exs", - "function": "Elixir.LoggerJSONGoogleTest.test metadata can be configured/1", - "line": 71 - }, - "severity": "DEBUG", - "time": "2018-10-19T01:10:49.582Z", - "user_id": 13 -} -``` +## Installation -Notice that GKE doesn't allow to set certain fields of the LogEntry, so support is limited. The results in Google Cloud Logger would looks something like this: - -```json -{ - "httpRequest": { - "latency": "0.350s", - "remoteIp": "::ffff:10.142.0.2", - "requestMethod": "GET", - "requestPath": "/", - "requestUrl": "http://10.16.0.70/", - "status": 200, - "userAgent": "kube-probe/1.10+" - }, - "insertId": "1g64u74fgmqqft", - "jsonPayload": { - "message": "", - "phoenix": { - "action": "index", - "controller": "Elixir.MyApp.Web.PageController" - }, - "request_id": "2lfbl1r3m81c40e5v40004c2", - "vm": { - "hostname": "myapp-web-66979fc-vbk4q", - "pid": 1 - } - }, - "logName": "projects/hammer-staging/logs/stdout", - "metadata": { - "systemLabels": {}, - "userLabels": {} - }, - "operation": { - "id": "2lfbl1r3m81c40e5v40004c2" - }, - "receiveTimestamp": "2018-10-18T14:33:35.515253723Z", - "resource": {}, - "severity": "INFO", - "sourceLocation": { - "file": "iex", - "function": "Elixir.LoggerJSON.Plug.call/2", - "line": "36" - }, - "timestamp": "2018-10-18T14:33:33.263Z" -} -``` +Add `logger_json` to your list of dependencies in `mix.exs`: -### DatadogLogger - -Adheres to the [default standard attribute list](https://docs.datadoghq.com/logs/processing/attributes_naming_convention/#default-standard-attribute-list). - -```json -{ - "domain": ["elixir"], - "duration": 3863403, - "http": { - "url": "http://localhost/create-account", - "status_code": 200, - "method": "GET", - "referer": "http://localhost:4000/login", - "request_id": "http_FlDCOItxeudZJ20AAADD", - "useragent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36", - "url_details": { - "host": "localhost", - "port": 4000, - "path": "/create-account", - "queryString": "", - "scheme": "http" - } - }, - "logger": { - "thread_name": "#PID<0.1042.0>", - "method_name": "Elixir.LoggerJSON.Plug.call/2" - }, - "message": "", - "network": { - "client": { - "ip": "127.0.0.1" - } - }, - "phoenix": { - "controller": "Elixir.RecognizerWeb.Accounts.UserRegistrationController", - "action": "new" - }, - "request_id": "http_FlDCOItxeudZJ20AAADD", - "syslog": { - "hostname": [10, 10, 100, 100, 100, 100, 100], - "severity": "info", - "timestamp": "2020-12-14T19:16:55.088Z" - } -} -``` + def deps do + [ + # ... + {:logger_json, "~> 6.0"} + # ... + ] + end -### Custom formatters +and install it running `mix deps.get`. -You can change this structure by implementing `LoggerJSON.Formatter` behaviour and passing module -name to `:formatter` config option. Example module can be found in `LoggerJSON.Formatters.GoogleCloudLogger`. +Then, enable the formatter in your `config.exs`: -```ex -config :logger_json, :backend, - formatter: MyFormatterImplementation -``` + config :logger, :default_handler, + formatter: {LoggerJSON.Formatters.Basic, []} -## Installation +or during runtime (eg. in your `application.ex`): -It's [available on Hex](https://hex.pm/packages/logger_json), the package can be installed as: + :logger.update_handler_config(:default, :formatter, {Basic, []}) -1. Add `:logger_json` to your list of dependencies in `mix.exs`: +Additionally, you may also be interested in [redirecting otp reports to Logger](https://hexdocs.pm/logger/Logger.html#module-configuration) (see "Configuration" section). -```ex -def deps do - [{:logger_json, "~> 5.1"}] -end -``` +## Configuration -2. Set configuration in your `config/config.exs`: +Configuration can be set using 2nd element of the tuple of the `:formatter` option in `Logger` configuration. +For example in `config.exs`: -```ex -config :logger_json, :backend, - metadata: :all, - json_encoder: Jason, - formatter: LoggerJSON.Formatters.GoogleCloudLogger + config :logger, :default_handler, + formatter: {LoggerJSON.Formatters.GoogleCloud, metadata: :all, project_id: "logger-101"} -``` +or during runtime: -Some integrations (for eg. Plug) use `metadata` to log request and response parameters. You can reduce log size by replacing `:all` (which means log all metadata) with a list of the ones that you actually need. + :logger.update_handler_config(:default, :formatter, {Basic, metadata: {:all_except, [:conn]}}) -Beware that LoggerJSON always ignores [some metadata keys](https://github.com/Nebo15/logger_json/blob/349c8174886135a02bb16317f76beac89d1aa20d/lib/logger_json.ex#L46), but formatters like `GoogleCloudLogger` and `DatadogLogger` still persist those metadata values into a structured output. This behavior is similar to the default Elixir logger backend. +## Docs -3. Replace default Logger `:console` back-end with `LoggerJSON`: +The docs can be found at [https://hexdocs.pm/logger_json](https://hexdocs.pm/logger_json). -```ex -config :logger, - backends: [LoggerJSON] -``` +## Examples -4. Optionally. Log requests and responses by replacing a `Plug.Logger` in your endpoint with a: +### Basic -```ex -plug LoggerJSON.Plug +```elixir +%{ + "message" => "Hello", + "metadata" => %{"domain" => ["elixir"]}, + "severity" => "notice", + "time" => "2024-04-11T21:31:01.403Z" +} ``` -`LoggerJSON.Plug` is configured by default to use `LoggerJSON.Plug.MetadataFormatters.GoogleCloudLogger`. -You can replace it with the `:metadata_formatter` config option. - -5. Optionally. Use Ecto telemetry for additional metadata: +### Google Cloud Logger -Attach telemetry handler for Ecto events in `start/2` function in `application.ex` +Follows the [Google Cloud Logger LogEntry](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry) format, +for more details see [special fields in structured payloads](https://cloud.google.com/logging/docs/agent/configuration#special_fields_in_structured_payloads). -```ex -:ok = - :telemetry.attach( - "logger-json-ecto", - [:my_app, :repo, :query], - &LoggerJSON.Ecto.telemetry_logging_handler/4, - :debug - ) +```elixir +%{ + "logging.googleapis.com/operation" => %{"pid" => "#PID<0.228.0>"}, + "logging.googleapis.com/sourceLocation" => %{ + "file" => "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", + "function" => "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs an LogEntry of a given level/1", + "line" => 44 + }, + "message" => %{"domain" => ["elixir"], "message" => "Hello"}, + "severity" => "NOTICE", + "time" => "2024-04-11T21:32:46.957Z" +} ``` -Prevent duplicate logging of events, by setting `log` configuration option to `false` - -```ex -config :my_app, MyApp.Repo, - adapter: Ecto.Adapters.Postgres, - log: false +Exception that can be sent to Google Cloud Error Reporter: + +```elixir +%{ + "httpRequest" => %{ + "protocol" => "HTTP/1.1", + "referer" => "http://www.example.com/", + "remoteIp" => "", + "requestMethod" => "PATCH", + "requestUrl" => "http://www.example.com/", + "status" => 503, + "userAgent" => "Mozilla/5.0" + }, + "logging.googleapis.com/operation" => %{"pid" => "#PID<0.250.0>"}, + "logging.googleapis.com/sourceLocation" => %{ + "file" => "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", + "function" => "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs exception http context/1", + "line" => 301 + }, + "message" => %{ + "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", + "context" => %{ + "httpRequest" => %{ + "protocol" => "HTTP/1.1", + "referer" => "http://www.example.com/", + "remoteIp" => "", + "requestMethod" => "PATCH", + "requestUrl" => "http://www.example.com/", + "status" => 503, + "userAgent" => "Mozilla/5.0" + }, + "reportLocation" => %{ + "filePath" => "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", + "functionName" => "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs exception http context/1", + "lineNumber" => 301 + } + }, + "domain" => ["elixir"], + "message" => "Hello", + "serviceContext" => %{"service" => "nonode@nohost"}, + "stack_trace" => "** (EXIT from #PID<0.250.0>) :foo" + }, + "severity" => "DEBUG", + "time" => "2024-04-11T21:34:53.503Z" +} ``` -## Dynamic configuration - -For dynamically configuring the endpoint, such as loading data -from environment variables or configuration files, LoggerJSON provides -an `:on_init` option that allows developers to set a module, function -and list of arguments that is invoked when the endpoint starts. - -```ex -config :logger_json, :backend, - on_init: {YourApp.Logger, :load_from_system_env, []} +## Datadog + +Adheres to the [default standard attribute list](https://docs.datadoghq.com/logs/processing/attributes_naming_convention/#default-standard-attribute-list) +as much as possible. + +```elixir +%{ + "domain" => ["elixir"], + "http" => %{ + "method" => "GET", + "referer" => "http://www.example2.com/", + "request_id" => nil, + "status_code" => 200, + "url" => "http://www.example.com/", + "url_details" => %{ + "host" => "www.example.com", + "path" => "/", + "port" => 80, + "queryString" => "", + "scheme" => "http" + }, + "useragent" => "Mozilla/5.0" + }, + "logger" => %{ + "file_name" => "/Users/andrew/Projects/os/logger_json/test/formatters/datadog_test.exs", + "line" => 239, + "method_name" => "Elixir.LoggerJSON.Formatters.DatadogTest.test logs http context/1", + "thread_name" => "#PID<0.225.0>" + }, + "message" => "Hello", + "network" => %{"client" => %{"ip" => "127.0.0.1"}}, + "syslog" => %{ + "hostname" => "MacBook-Pro", + "severity" => "debug", + "timestamp" => "2024-04-11T23:10:47.967Z" + } +} ``` -## Encoders support - -You can replace default Jason encoder with other module that supports `encode_to_iodata!/1` function and -encoding fragments. - -## Documentation - -The docs can be found at [https://hexdocs.pm/logger_json](https://hexdocs.pm/logger_json) - -## Thanks - -Many source code has been taken from original Elixir Logger `:console` back-end source code, so I want to thank all it's authors and contributors. - -Part of `LoggerJSON.Plug` module have origins from `plug_logger_json` by @bleacherreport, -originally licensed under Apache License 2.0. Part of `LoggerJSON.PlugTest` are from Elixir's Plug licensed under Apache 2. - ## Copyright and License Copyright (c) 2016 Nebo #15 diff --git a/config/test.exs b/config/test.exs index ec1770a..f8fa549 100644 --- a/config/test.exs +++ b/config/test.exs @@ -1,4 +1,5 @@ import Config -config :logger, backends: [LoggerJSON] -config :logger_json, :backend, json_encoder: Jason +config :logger, + handle_otp_reports: true, + handle_sasl_reports: false diff --git a/coveralls.json b/coveralls.json deleted file mode 100644 index c5128b8..0000000 --- a/coveralls.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "skip_files": [ - "test/*" - ], - "custom_stop_words": [ - "field", - "has_one", - "has_many", - "embeds_one", - "embeds_many", - "schema", - "send" - ], - "treat_no_relevant_lines_as_covered": true -} diff --git a/lib/logger_json.ex b/lib/logger_json.ex index 4d483c9..9787f99 100644 --- a/lib/logger_json.ex +++ b/lib/logger_json.ex @@ -1,51 +1,68 @@ defmodule LoggerJSON do @moduledoc """ - JSON console back-end for Elixir Logger. + A collection of formatters and utilities for JSON-based logging for various cloud tools and platforms. - It can be used as drop-in replacement for default `:console` Logger back-end in cases where you use - use Google Cloud Logger or other JSON-based log collectors. + ## Supported formatters - ## Log Format + * `LoggerJSON.Formatters.Basic` - a basic JSON formatter that logs messages in a structured format, + can be used with any JSON-based logging system, like ElasticSearch, Logstash, etc. - LoggerJSON provides three JSON formatters out of the box. + * `LoggerJSON.Formatters.GoogleCloud` - a formatter that logs messages in a structured format that can be + consumed by Google Cloud Logger and Google Cloud Error Reporter. - You can change this structure by implementing `LoggerJSON.Formatter` behaviour and passing module - name to `:formatter` config option. Example implementations can be found in `LoggerJSON.Formatters.GoogleCloudLogger`, - `LoggerJSON.Formatters.DatadogLogger`, and `LoggerJSON.Formatters.BasicLogger`. + * `LoggerJSON.Formatters.Datadog` - a formatter that logs messages in a structured format that can be consumed + by Datadog. - config :logger_json, :backend, - formatter: MyFormatterImplementation + ## Installation - If your formatter supports different options, you can specify them with `:formatter_opts`. + Add `logger_json` to your list of dependencies in `mix.exs`: - config :logger_json, :backend, - formatter: LoggerJSON.Formatters.DatadogLogger, - formatter_opts: %{hostname: "example.com"} + def deps do + [ + # ... + {:logger_json, "~> 6.0"} + # ... + ] + end + + and install it running `mix deps.get`. + + Then, enable the formatter in your `config.exs`: + + config :logger, :default_handler, + formatter: {LoggerJSON.Formatters.Basic, []} + + or during runtime (eg. in your `application.ex`): + + :logger.update_handler_config(:default, :formatter, {Basic, []}) + + ## Configuration + + Configuration can be set using 2nd element of the tuple of the `:formatter` option in `Logger` configuration. + For example in `config.exs`: + + config :logger, :default_handler, + formatter: {LoggerJSON.Formatters.GoogleCloud, metadata: :all, project_id: "logger-101"} - ## Encoders support + or during runtime: - You can replace default Jason encoder with other module that supports `encode!/1` function. This can be even used - as custom formatter callback. + :logger.update_handler_config(:default, :formatter, {Basic, metadata: {:all_except, [:conn]}}) - Popular Jason alternatives: + ### Shared Options - * [poison](https://hex.pm/packages/poison). - * [exjsx](https://github.com/talentdeficit/exjsx). - * [elixir-json](https://github.com/cblage/elixir-json) - native Elixir encoder implementation. - * [jiffy](https://github.com/davisp/jiffy). + Some formatters require additional configuration options. Here are the options that are common for each formatter: - ## Dynamic configuration + * `:metadata` - a list of metadata keys to include in the log entry. By default, no metadata is included. + If `:all`is given, all metadata is included. If `{:all_except, keys}` is given, all metadata except + the specified keys is included. - For dynamically configuring the endpoint, such as loading data - from environment variables or configuration files, LoggerJSON provides - an `:on_init` option that allows developers to set a module, function - and list of arguments that is invoked when the endpoint starts. If you - would like to disable the `:on_init` callback function dynamically, you - can pass in `:disabled` and no callback function will be called. + ## Metadata - config :logger_json, :backend, - on_init: {YourApp.Logger, :load_from_system_env, []} + You can set some well-known metadata keys to be included in the log entry. The following keys are supported + for all formatters: + * `:conn` - the `Plug.Conn` struct. This is useful when logging HTTP requests and responses, + each formatter may use it differently. """ @behaviour :gen_event diff --git a/lib/logger_json/ecto.ex b/lib/logger_json/ecto.ex deleted file mode 100644 index 2d5a397..0000000 --- a/lib/logger_json/ecto.ex +++ /dev/null @@ -1,104 +0,0 @@ -if Code.ensure_loaded?(Ecto) do - defmodule LoggerJSON.Ecto do - @moduledoc """ - Implements the behaviour of `Ecto.LogEntry` and sends query as a string - to Logger with additional metadata: - - * query.execution_time_μs - the time spent executing the query in microseconds; - * query.decode_time_μs - the time spent decoding the result in microseconds (it may be 0); - * query.queue_time_μs - the time spent to check the connection out in microseconds (it may be 0); - * query.duration_μs - time the query taken (sum of `query_time`, `decode_time` and `queue_time`); - * connection_pid - the connection process that executed the query; - * ansi_color - the color that should be used when logging the entry. - - For more information see [LogEntry](https://github.com/elixir-ecto/ecto/blob/master/lib/ecto/log_entry.ex) - source code. - """ - require Logger - - @doc """ - Logs query string with metadata from `Ecto.LogEntry` in with debug level. - """ - @spec log(entry :: map()) :: map() - def log(entry) do - {query, metadata} = query_and_metadata(entry) - - # The logger call will be removed at compile time if - # `compile_time_purge_level` is set to higher than debug. - Logger.debug(query, metadata) - - entry - end - - @doc """ - Overwritten to use JSON. - - Logs the given entry in the given level. - """ - @spec log(entry :: map(), level :: Logger.level()) :: map() - def log(entry, level) do - {query, metadata} = query_and_metadata(entry) - - # The logger call will not be removed at compile time, - # because we use level as a variable - Logger.log(level, query, metadata) - - entry - end - - defp query_and_metadata(%{ - query: query, - query_time: query_time, - decode_time: decode_time, - queue_time: queue_time - }) do - query_time = format_time(query_time, :native) - decode_time = format_time(decode_time, :native) - queue_time = format_time(queue_time, :native) - - metadata = [ - query: %{ - execution_time_μs: query_time, - decode_time_μs: decode_time, - queue_time_μs: queue_time, - latency_μs: query_time + decode_time + queue_time - } - ] - - {query, metadata} - end - - @spec telemetry_logging_handler( - event_name :: [atom()], - query_time :: %{ - query_time: non_neg_integer(), - queue_time: non_neg_integer(), - decode_time: non_neg_integer(), - total_time: non_neg_integer() - }, - log_entry :: map(), - level :: Logger.level() - ) :: :ok - def telemetry_logging_handler(_event_name, time, %{query: query, repo: repo}, level) do - query_time = Map.get(time, :query_time) |> format_time(:nanosecond) - queue_time = Map.get(time, :queue_time) |> format_time(:nanosecond) - decode_time = Map.get(time, :decode_time) |> format_time(:nanosecond) - latency = Map.get(time, :total_time) |> format_time(:nanosecond) - - metadata = [ - query: %{ - repo: inspect(repo), - execution_time_μs: query_time, - decode_time_μs: decode_time, - queue_time_μs: queue_time, - latency_μs: latency - } - ] - - Logger.log(level, query, metadata) - end - - defp format_time(nil, _unit), do: 0 - defp format_time(time, unit), do: System.convert_time_unit(time, unit, :microsecond) - end -end diff --git a/lib/logger_json/formatter.ex b/lib/logger_json/formatter.ex deleted file mode 100644 index c31765c..0000000 --- a/lib/logger_json/formatter.ex +++ /dev/null @@ -1,29 +0,0 @@ -defmodule LoggerJSON.Formatter do - @moduledoc """ - Behaviour that should be implemented by log formatters. - - Example implementations can be found in `LoggerJSON.Formatters.GoogleCloudLogger` and - `LoggerJSON.Formatters.BasicLogger`. - """ - - @doc """ - Initialization callback. Ran on startup with the given `formatter_opts` list. - - Returned list will be used as formatter_state in `format_event/6`. - """ - @callback init(Keyword.t()) :: term() - - @doc """ - Format event callback. - - Returned map will be encoded to JSON. - """ - @callback format_event( - level :: Logger.level(), - msg :: Logger.message(), - ts :: Logger.Formatter.time(), - md :: [atom] | :all, - state :: map, - formatter_state :: map - ) :: map | iodata() | %Jason.Fragment{} -end diff --git a/lib/logger_json/formatter/code.ex b/lib/logger_json/formatter/code.ex new file mode 100644 index 0000000..686ad28 --- /dev/null +++ b/lib/logger_json/formatter/code.ex @@ -0,0 +1,8 @@ +defmodule LoggerJSON.Formatter.Code do + @doc """ + Provide a string output of the MFA log entry. + """ + def format_function(nil, function), do: function + def format_function(module, function), do: "#{module}.#{function}" + def format_function(module, function, arity), do: "#{module}.#{function}/#{arity}" +end diff --git a/lib/logger_json/formatter/datetime.ex b/lib/logger_json/formatter/datetime.ex new file mode 100644 index 0000000..90c92d8 --- /dev/null +++ b/lib/logger_json/formatter/datetime.ex @@ -0,0 +1,37 @@ +defmodule LoggerJSON.Formatter.DateTime do + @doc """ + Returns either a `time` taken from metadata or current time in RFC3339 UTC "Zulu" format. + """ + def utc_time(%{time: time}) when is_integer(time) and time >= 0 do + system_time_to_rfc3339(time) + end + + def utc_time(_meta) do + :os.system_time(:microsecond) + |> system_time_to_rfc3339() + end + + defp system_time_to_rfc3339(system_time) do + micro = rem(system_time, 1_000_000) + + {date, {hours, minutes, seconds}} = :calendar.system_time_to_universal_time(system_time, :microsecond) + + [format_date(date), ?T, format_time({hours, minutes, seconds, div(micro, 1000)}), ?Z] + |> IO.iodata_to_binary() + end + + defp format_time({hh, mi, ss, ms}) do + [pad2(hh), ?:, pad2(mi), ?:, pad2(ss), ?., pad3(ms)] + end + + defp format_date({yy, mm, dd}) do + [Integer.to_string(yy), ?-, pad2(mm), ?-, pad2(dd)] + end + + defp pad3(int) when int < 10, do: [?0, ?0, Integer.to_string(int)] + defp pad3(int) when int < 100, do: [?0, Integer.to_string(int)] + defp pad3(int), do: Integer.to_string(int) + + defp pad2(int) when int < 10, do: [?0, Integer.to_string(int)] + defp pad2(int), do: Integer.to_string(int) +end diff --git a/lib/logger_json/formatter/encoder.ex b/lib/logger_json/formatter/encoder.ex new file mode 100644 index 0000000..7b75c7a --- /dev/null +++ b/lib/logger_json/formatter/encoder.ex @@ -0,0 +1,71 @@ +defmodule LoggerJSON.Formatter.Encoder do + @moduledoc """ + Utilities for converting metadata into data structures that can be safely passed to `Jason.encode!/1`. + """ + + @doc """ + Produces metadata that is "safe" for calling Jason.encode!/1 on without errors. + This means that unexpected Logger metadata won't cause logging crashes. + + Current formatting is... + * Maps: as is + * Printable binaries: as is + * Numbers: as is + * Structs that don't implement Jason.Encoder: converted to maps + * Tuples: converted to lists + * Keyword lists: converted to Maps + * everything else: inspected + """ + @spec encode(any()) :: any() + def encode(nil), do: nil + def encode(true), do: true + def encode(false), do: false + def encode(atom) when is_atom(atom), do: atom + def encode(tuple) when is_tuple(tuple), do: tuple |> Tuple.to_list() |> encode() + def encode(number) when is_number(number), do: number + def encode(binary) when is_binary(binary), do: encode_binary(binary) + def encode(%Jason.Fragment{} = fragment), do: fragment + + def encode(%_struct{} = struct) do + if protocol_implemented?(struct) do + struct + else + struct + |> Map.from_struct() + |> encode() + end + end + + def encode(%{} = map) do + for {key, value} <- map, into: %{}, do: {encode_map_key(key), encode(value)} + end + + def encode([{key, _} | _] = keyword) when is_atom(key) do + Enum.into(keyword, %{}, fn + {key, value} -> {encode_map_key(key), encode(value)} + end) + rescue + _ -> for(el <- keyword, do: encode(el)) + end + + def encode(list) when is_list(list), do: for(el <- list, do: encode(el)) + def encode({key, data}) when is_binary(key) or is_atom(key), do: %{encode_map_key(key) => encode(data)} + def encode(data), do: inspect(data, pretty: true, width: 80) + + defp encode_map_key(key) when is_binary(key), do: encode_binary(key) + defp encode_map_key(key) when is_atom(key) or is_number(key), do: key + defp encode_map_key(key), do: inspect(key) + + defp encode_binary(data) when is_binary(data) do + if String.valid?(data) && String.printable?(data) do + data + else + inspect(data) + end + end + + def protocol_implemented?(data) do + impl = Jason.Encoder.impl_for(data) + impl && impl != Jason.Encoder.Any + end +end diff --git a/lib/logger_json/formatter/map_builder.ex b/lib/logger_json/formatter/map_builder.ex new file mode 100644 index 0000000..d853ff7 --- /dev/null +++ b/lib/logger_json/formatter/map_builder.ex @@ -0,0 +1,13 @@ +defmodule LoggerJSON.Formatter.MapBuilder do + @doc """ + Optionally put a value to a map. + """ + def maybe_put(map, _key, nil), do: map + def maybe_put(map, key, value), do: Map.put(map, key, value) + + @doc """ + Optionally merge two maps. + """ + def maybe_merge(map, nil), do: map + def maybe_merge(left_map, right_map), do: Map.merge(right_map, left_map) +end diff --git a/lib/logger_json/formatter/message.ex b/lib/logger_json/formatter/message.ex new file mode 100644 index 0000000..58f8889 --- /dev/null +++ b/lib/logger_json/formatter/message.ex @@ -0,0 +1,41 @@ +defmodule LoggerJSON.Formatter.Message do + # crash + def format_message({:string, message}, %{crash_reason: crash_reason}, %{crash: crash_fmt}) do + crash_fmt.(message, crash_reason) + end + + # binary + def format_message({:string, message}, _meta, %{binary: binary_fmt}) do + binary_fmt.(message) + end + + # OTP report or structured logging data + def format_message( + {:report, data}, + %{report_cb: callback} = meta, + %{binary: binary_fmt, structured: structured_fmt} = formatters + ) do + cond do + is_function(callback, 1) and callback != (&:logger.format_otp_report/1) -> + format_message(callback.(data), meta, formatters) + + is_function(callback, 2) -> + callback.(data, %{depth: :unlimited, chars_limit: :unlimited, single_line: false}) + |> binary_fmt.() + + true -> + structured_fmt.(data) + end + end + + def format_message({:report, data}, _meta, %{structured: structured_fmt}) do + structured_fmt.(data) + end + + def format_message({format, args}, _meta, %{binary: binary_fmt}) do + format + |> Logger.Utils.scan_inspect(args, :infinity) + |> :io_lib.build_text() + |> binary_fmt.() + end +end diff --git a/lib/logger_json/formatter/metadata.ex b/lib/logger_json/formatter/metadata.ex new file mode 100644 index 0000000..096e963 --- /dev/null +++ b/lib/logger_json/formatter/metadata.ex @@ -0,0 +1,60 @@ +defmodule LoggerJSON.Formatter.Metadata do + @ignored_metadata_keys ~w[ansi_color initial_call crash_reason pid gl mfa report_cb time]a + + @doc """ + Takes current metadata option value and updates it to exclude the given keys. + """ + def update_metadata_selector(:all, processed_keys), + do: {:all_except, processed_keys} + + def update_metadata_selector({:all_except, except_keys}, processed_keys), + do: {:all_except, except_keys ++ processed_keys} + + def update_metadata_selector(keys, _processed_keys), + do: keys + + @doc """ + Takes metadata and returns a map with the given keys. + + The `keys` can be either a list of keys or one of the following terms: + + * `:all` - all metadata keys except the ones already processed by the formatter; + * `{:all_except, keys}` - all metadata keys except the ones given in the list and + the ones already processed by the formatter. + """ + def take_metadata(meta, {:all_except, keys}) do + meta + |> Map.drop(keys ++ @ignored_metadata_keys) + |> Enum.into(%{}) + end + + def take_metadata(meta, :all) do + meta + |> Map.drop(@ignored_metadata_keys) + |> Enum.into(%{}) + end + + def take_metadata(_meta, []) do + %{} + end + + def take_metadata(meta, keys) when is_list(keys) do + Map.take(meta, keys) + end + + @doc """ + Takes a map and a list of mappings and transforms the keys of the map according to the them. + + Transformers can override each other results but the last one in this list wins + """ + def transform_metadata_keys(output, md, mappings) do + Enum.reduce(mappings, output, fn {key, {new_key, transformer}}, acc -> + if Keyword.has_key?(md, key) do + new_value = transformer.(Keyword.get(md, key)) + Map.put(acc, new_key, new_value) + else + acc + end + end) + end +end diff --git a/lib/logger_json/formatter/plug.ex b/lib/logger_json/formatter/plug.ex new file mode 100644 index 0000000..757cfec --- /dev/null +++ b/lib/logger_json/formatter/plug.ex @@ -0,0 +1,36 @@ +if Code.ensure_loaded?(Plug) do + defmodule LoggerJSON.Formatter.Plug do + alias Plug.Conn + + @doc """ + Returns the first IP address from the `x-forwarded-for` header + if it exists, otherwise returns the remote IP address. + + Please keep in mind that returning first IP address from the + `x-forwarded-for` header is not very reliable, as it can be + easily spoofed. Additionally, we do not exclude the IP addresses + from list of well-known proxies, so it's possible that the + returned IP address is not the actual client IP address. + """ + def remote_ip(conn) do + if header_value = get_header(conn, "x-forwarded-for") do + header_value + |> String.split(",") + |> hd() + |> String.trim() + else + to_string(:inet_parse.ntoa(conn.remote_ip)) + end + end + + @doc """ + Returns the first value of the given header from the request. + """ + def get_header(conn, header) do + case Conn.get_req_header(conn, header) do + [] -> nil + [val | _] -> val + end + end + end +end diff --git a/lib/logger_json/formatter_utils.ex b/lib/logger_json/formatter_utils.ex deleted file mode 100644 index 7c5f169..0000000 --- a/lib/logger_json/formatter_utils.ex +++ /dev/null @@ -1,77 +0,0 @@ -defmodule LoggerJSON.FormatterUtils do - @moduledoc """ - This module contains functions that can be used across different - `LoggerJSON.Formatter` implementations to provide common functionality. - """ - - import Jason.Helpers, only: [json_map: 1] - - @doc """ - Format an exception for use within a log entry. - """ - def format_process_crash(md) do - if crash_reason = Keyword.get(md, :crash_reason) do - initial_call = Keyword.get(md, :initial_call) - - json_map( - initial_call: format_initial_call(initial_call), - reason: format_crash_reason(crash_reason) - ) - end - end - - @doc """ - RFC3339 UTC "Zulu" format. - """ - def format_timestamp({date, time}) do - [format_date(date), ?T, format_time(time), ?Z] - |> IO.iodata_to_binary() - end - - @doc """ - Provide a string output of the MFA log entry. - """ - def format_function(nil, function), do: function - def format_function(module, function), do: "#{module}.#{function}" - def format_function(module, function, arity), do: "#{module}.#{function}/#{arity}" - - @doc """ - Optionally put a value to a map. - """ - def maybe_put(map, _key, nil), do: map - def maybe_put(map, key, value), do: Map.put(map, key, value) - - defp format_initial_call(nil), do: nil - defp format_initial_call({module, function, arity}), do: format_function(module, function, arity) - - defp format_crash_reason({:throw, reason}) do - Exception.format(:throw, reason) - end - - defp format_crash_reason({:exit, reason}) do - Exception.format(:exit, reason) - end - - defp format_crash_reason({%{} = exception, stacktrace}) do - Exception.format(:error, exception, stacktrace) - end - - defp format_crash_reason(other) do - inspect(other) - end - - defp format_time({hh, mi, ss, ms}) do - [pad2(hh), ?:, pad2(mi), ?:, pad2(ss), ?., pad3(ms)] - end - - defp format_date({yy, mm, dd}) do - [Integer.to_string(yy), ?-, pad2(mm), ?-, pad2(dd)] - end - - defp pad3(int) when int < 10, do: [?0, ?0, Integer.to_string(int)] - defp pad3(int) when int < 100, do: [?0, Integer.to_string(int)] - defp pad3(int), do: Integer.to_string(int) - - defp pad2(int) when int < 10, do: [?0, Integer.to_string(int)] - defp pad2(int), do: Integer.to_string(int) -end diff --git a/lib/logger_json/formatters/basic.ex b/lib/logger_json/formatters/basic.ex new file mode 100644 index 0000000..fb70a8a --- /dev/null +++ b/lib/logger_json/formatters/basic.ex @@ -0,0 +1,96 @@ +defmodule LoggerJSON.Formatters.Basic do + @moduledoc """ + Custom Erlang's [`:logger` formatter](https://www.erlang.org/doc/apps/kernel/logger_chapter.html#formatters) which + writes logs in a JSON format. + + For list of options see "Shared options" in `LoggerJSON`. + + ## Examples + + %{ + "message" => "Hello", + "metadata" => %{"domain" => ["elixir"]}, + "severity" => "notice", + "time" => "2024-04-11T21:31:01.403Z" + } + """ + import Jason.Helpers, only: [json_map: 1] + import LoggerJSON.Formatter.{MapBuilder, DateTime, Message, Metadata, Plug, Encoder} + + @processed_metadata_keys ~w[file line mfa + otel_span_id span_id + otel_trace_id trace_id + conn]a + + @spec format(any(), any()) :: none() + def format(%{level: level, meta: meta, msg: msg}, opts) do + metadata_keys_or_selector = Keyword.get(opts, :metadata, []) + metadata_selector = update_metadata_selector(metadata_keys_or_selector, @processed_metadata_keys) + + message = + format_message(msg, meta, %{ + binary: &format_binary_message/1, + structured: &format_structured_message/1, + crash: &format_crash_reason(&1, &2, meta) + }) + + line = + %{ + time: utc_time(meta), + severity: Atom.to_string(level), + message: encode(message), + metadata: encode(take_metadata(meta, metadata_selector)) + } + |> maybe_put(:request, format_http_request(meta)) + |> maybe_put(:span, format_span(meta)) + |> maybe_put(:trace, format_trace(meta)) + |> Jason.encode_to_iodata!(pretty: true) + + [line, "\n"] + end + + def format_binary_message(binary) do + IO.chardata_to_string(binary) + end + + def format_structured_message(map) when is_map(map) do + map + end + + def format_structured_message(keyword) do + Enum.into(keyword, %{}) + end + + def format_crash_reason(binary, _reason, _meta) do + IO.chardata_to_string(binary) + end + + if Code.ensure_loaded?(Plug.Conn) do + defp format_http_request(%{conn: %Plug.Conn{} = conn}) do + json_map( + connection: + json_map( + protocol: Plug.Conn.get_http_protocol(conn), + method: conn.method, + path: conn.request_path, + status: conn.status + ), + client: + json_map( + user_agent: get_header(conn, "user-agent"), + ip: remote_ip(conn) + ) + ) + end + end + + defp format_http_request(_meta), do: nil + + defp format_span(%{otel_span_id: otel_span_id}), do: IO.chardata_to_string(otel_span_id) + defp format_span(%{span_id: span_id}), do: span_id + defp format_span(_meta), do: nil + + defp format_trace(%{otel_trace_id: otel_trace_id}), do: IO.chardata_to_string(otel_trace_id) + defp format_trace(%{trace_id: trace_id}), do: trace_id + defp format_trace(_meta), do: nil +end diff --git a/lib/logger_json/formatters/basic_logger.ex b/lib/logger_json/formatters/basic_logger.ex deleted file mode 100644 index dc19f25..0000000 --- a/lib/logger_json/formatters/basic_logger.ex +++ /dev/null @@ -1,33 +0,0 @@ -defmodule LoggerJSON.Formatters.BasicLogger do - @moduledoc """ - Basic JSON log formatter with no vendor specific formatting. - """ - - import Jason.Helpers, only: [json_map: 1] - - alias LoggerJSON.{FormatterUtils, JasonSafeFormatter} - - @behaviour LoggerJSON.Formatter - - @processed_metadata_keys ~w[pid file line function module application]a - - @impl true - def init(_formatter_opts), do: [] - - @impl true - def format_event(level, msg, ts, md, md_keys, _formatter_state) do - json_map( - time: FormatterUtils.format_timestamp(ts), - severity: Atom.to_string(level), - message: IO.chardata_to_string(msg), - metadata: format_metadata(md, md_keys) - ) - end - - defp format_metadata(md, md_keys) do - md - |> LoggerJSON.take_metadata(md_keys, @processed_metadata_keys) - |> FormatterUtils.maybe_put(:error, FormatterUtils.format_process_crash(md)) - |> JasonSafeFormatter.format() - end -end diff --git a/lib/logger_json/formatters/datadog.ex b/lib/logger_json/formatters/datadog.ex new file mode 100644 index 0000000..e579ef3 --- /dev/null +++ b/lib/logger_json/formatters/datadog.ex @@ -0,0 +1,213 @@ +defmodule LoggerJSON.Formatters.Datadog do + @moduledoc """ + Custom Erlang's [`:logger` formatter](https://www.erlang.org/doc/apps/kernel/logger_chapter.html#formatters) which + writes logs in a structured format that can be consumed by Datadog. + + This formatter adheres to the + [default standard attribute list](https://docs.datadoghq.com/logs/processing/attributes_naming_convention/#default-standard-attribute-list) + as much as possible. + + ## Formatter Configuration + + The formatter can be configured with the following options: + + * `:hostname` (optional) - changes how the `syslog.hostname` is set in logs. By default, it uses `:system` which uses + `:inet.gethostname/0` to resolve the value. If you are running in an environment where the hostname is not correct, + you can hard code it by setting `hostname` to a string. In places where the hostname is inaccurate but also dynamic + (like Kubernetes), you can set `hostname` to `:unset` to exclude it entirely. You'll then be relying on + [`dd-agent`](https://docs.datadoghq.com/agent/) to determine the hostname. + + For list of shared options see "Shared options" in `LoggerJSON`. + + ## Metadata + + For list of other well-known metadata keys see "Metadata" in `LoggerJSON`. + + ## Examples + + %{ + "domain" => ["elixir"], + "logger" => %{ + "file_name" => "/Users/andrew/Projects/os/logger_json/test/formatters/datadog_test.exs", + "line" => 44, + "method_name" => "Elixir.LoggerJSON.Formatters.DatadogTest.test logs an LogEntry of a given level/1", + "thread_name" => "#PID<0.234.0>" + }, + "message" => "Hello", + "syslog" => %{ + "hostname" => "MacBook-Pro", + "severity" => "notice", + "timestamp" => "2024-04-11T23:03:39.726Z" + } + } + """ + import Jason.Helpers, only: [json_map: 1] + import LoggerJSON.Formatter.{MapBuilder, DateTime, Message, Metadata, Code, Plug, Encoder} + + @processed_metadata_keys ~w[pid file line mfa + otel_span_id span_id + otel_trace_id trace_id + conn]a + + @spec format(any(), any()) :: none() + def format(%{level: level, meta: meta, msg: msg}, opts) do + hostname = Keyword.get(opts, :hostname, :system) + + metadata_keys_or_selector = Keyword.get(opts, :metadata, []) + metadata_selector = update_metadata_selector(metadata_keys_or_selector, @processed_metadata_keys) + + message = + format_message(msg, meta, %{ + binary: &format_binary_message/1, + structured: &format_structured_message/1, + crash: &format_crash_reason(&1, &2, meta) + }) + + metadata = + take_metadata(meta, metadata_selector) + |> maybe_put(:"dd.span_id", format_span(meta)) + |> maybe_put(:"dd.trace_id", format_trace(meta)) + + line = + %{syslog: syslog(level, meta, hostname)} + |> maybe_put(:logger, format_logger(meta)) + |> maybe_merge(format_http_request(meta)) + |> maybe_merge(encode(metadata)) + |> maybe_merge(encode(message)) + |> Jason.encode_to_iodata!() + + [line, "\n"] + end + + def format_binary_message(binary) do + %{message: IO.chardata_to_string(binary)} + end + + def format_structured_message(map) when is_map(map) do + %{message: map} + end + + def format_structured_message(keyword) do + %{message: Enum.into(keyword, %{})} + end + + def format_crash_reason(binary, {%{} = _exception, stacktrace}, _meta) do + %{ + error: %{ + message: IO.chardata_to_string(binary), + stack: Exception.format_stacktrace(stacktrace) + } + } + end + + # https://docs.datadoghq.com/standard-attributes/?search=logger+error&product=log+management + def format_crash_reason(binary, _other, _meta) do + %{ + error: %{ + message: binary + } + } + end + + defp syslog(level, meta, :system) do + {:ok, hostname} = :inet.gethostname() + + %{ + hostname: to_string(hostname), + severity: Atom.to_string(level), + timestamp: utc_time(meta) + } + end + + defp syslog(level, meta, :unset) do + %{ + severity: Atom.to_string(level), + timestamp: utc_time(meta) + } + end + + defp syslog(level, meta, hostname) do + %{ + hostname: hostname, + severity: Atom.to_string(level), + timestamp: utc_time(meta) + } + end + + defp format_logger(%{file: file, line: line, mfa: {m, f, a}} = meta) do + %{ + thread_name: encode(meta[:pid]), + method_name: format_function(m, f, a), + file_name: IO.chardata_to_string(file), + line: line + } + end + + defp format_logger(_meta), + do: nil + + # To connect logs and traces, span_id and trace_id keys are respectively dd.span_id and dd.trace_id + # https://docs.datadoghq.com/tracing/faq/why-cant-i-see-my-correlated-logs-in-the-trace-id-panel/?tab=jsonlogs + defp format_span(%{otel_span_id: otel_span_id}), do: convert_otel_field(otel_span_id) + defp format_span(%{span_id: span_id}), do: span_id + defp format_span(_meta), do: nil + + defp format_trace(%{otel_trace_id: otel_trace_id}), do: convert_otel_field(otel_trace_id) + defp format_trace(%{trace_id: trace_id}), do: trace_id + defp format_trace(_meta), do: nil + + # This converts native OpenTelemetry fields to the native Datadog format. + # This function is taken from the Datadog examples for converting. Mostly the Golang version + # https://docs.datadoghq.com/tracing/other_telemetry/connect_logs_and_traces/opentelemetry/?tab=go + # Tests were stolen from https://github.com/open-telemetry/opentelemetry-specification/issues/525 + # and https://go.dev/play/p/pUBHcLdXJNy + defp convert_otel_field(<>) do + {value, _} = Integer.parse(value, 16) + Integer.to_string(value, 10) + rescue + _ -> "" + end + + defp convert_otel_field(value) when byte_size(value) < 16, do: "" + + defp convert_otel_field(value) do + value = to_string(value) + len = byte_size(value) - 16 + <<_front::binary-size(len), value::binary>> = value + convert_otel_field(value) + rescue + _ -> "" + end + + if Code.ensure_loaded?(Plug.Conn) do + defp format_http_request(%{conn: %Plug.Conn{} = conn} = meta) do + request_url = Plug.Conn.request_url(conn) + user_agent = get_header(conn, "user-agent") + remote_ip = remote_ip(conn) + referer = get_header(conn, "referer") + + %{ + http: + json_map( + url: request_url, + status_code: conn.status, + method: conn.method, + referer: referer, + request_id: meta[:request_id], + useragent: user_agent, + url_details: + json_map( + host: conn.host, + port: conn.port, + path: conn.request_path, + queryString: conn.query_string, + scheme: conn.scheme + ) + ), + network: json_map(client: json_map(ip: remote_ip)) + } + end + end + + defp format_http_request(_meta), do: nil +end diff --git a/lib/logger_json/formatters/datadog_logger.ex b/lib/logger_json/formatters/datadog_logger.ex deleted file mode 100644 index fec2f90..0000000 --- a/lib/logger_json/formatters/datadog_logger.ex +++ /dev/null @@ -1,147 +0,0 @@ -defmodule LoggerJSON.Formatters.DatadogLogger do - @moduledoc """ - [DataDog](https://www.datadoghq.com) formatter. This will adhere to the - [default standard attribute list](https://docs.datadoghq.com/logs/processing/attributes_naming_convention/#default-standard-attribute-list) - as much as possible. - - ## Options - - This formatter has a couple of options to fine tune the logging output for - your deployed environment. - - ### `hostname` - - By setting the `hostname` value, you can change how the `syslog.hostname` is - set in logs. In most cases, you can leave this unset and it will use default - to `:system`, which uses `:inet.gethostname/0` to resolve the value. - - If you are running in an environment where the hostname is not correct, you - can hard code it by setting `hostname` to a string. In places where the - hostname is inaccurate but also dynamic (like Kubernetes), you can set - `hostname` to `:unset` to exclude it entirely. You'll then be relying on - [`dd-agent`](https://docs.datadoghq.com/agent/) to determine the hostname. - - """ - import Jason.Helpers, only: [json_map: 1] - - alias LoggerJSON.{FormatterUtils, JasonSafeFormatter} - - @behaviour LoggerJSON.Formatter - - @default_opts %{hostname: :system} - @processed_metadata_keys ~w[pid file line function module application span_id trace_id otel_span_id otel_trace_id]a - - @impl true - def init(formatter_opts) do - # Notice: we also accept formatter_opts for DataDog logger as a map for backwards compatibility - opts = Map.merge(@default_opts, Map.new(formatter_opts)) - - unless is_binary(opts.hostname) or opts.hostname in [:system, :unset] do - raise ArgumentError, - "invalid :hostname option for :formatter_opts logger_json backend. " <> - "Expected :system, :unset, or string, " <> "got: #{inspect(opts.hostname)}" - end - - opts - end - - @impl true - def format_event(level, msg, ts, md, md_keys, formatter_state) do - Map.merge( - %{ - logger: - json_map( - thread_name: inspect(Keyword.get(md, :pid)), - method_name: method_name(md), - file_name: Keyword.get(md, :file), - line: Keyword.get(md, :line) - ), - message: IO.chardata_to_string(msg), - syslog: syslog(level, ts, formatter_state.hostname) - }, - format_metadata(md, md_keys) - ) - end - - defp format_metadata(md, md_keys) do - LoggerJSON.take_metadata(md, md_keys, @processed_metadata_keys) - |> convert_tracing_keys(md) - |> JasonSafeFormatter.format() - |> FormatterUtils.maybe_put(:error, FormatterUtils.format_process_crash(md)) - end - - # To connect logs and traces, span_id and trace_id keys are respectively dd.span_id and dd.trace_id - # https://docs.datadoghq.com/tracing/faq/why-cant-i-see-my-correlated-logs-in-the-trace-id-panel/?tab=jsonlogs - defp convert_tracing_keys(output, md) do - # Notice: transformers can override each others but the last one in this list wins - [ - otel_span_id: {"dd.span_id", &convert_otel_field/1}, - otel_trace_id: {"dd.trace_id", &convert_otel_field/1}, - span_id: {"dd.span_id", & &1}, - trace_id: {"dd.trace_id", & &1} - ] - |> Enum.reduce(output, fn {key, {new_key, transformer}}, acc -> - if Keyword.has_key?(md, key) do - new_value = transformer.(Keyword.get(md, key)) - Map.put(acc, new_key, new_value) - else - acc - end - end) - end - - # This converts native OpenTelemetry fields to the native Datadog format. - # This function is taken from the Datadog examples for converting. Mostly the Golang version - # https://docs.datadoghq.com/tracing/other_telemetry/connect_logs_and_traces/opentelemetry/?tab=go - # Tests were stolen from https://github.com/open-telemetry/opentelemetry-specification/issues/525 - # and https://go.dev/play/p/pUBHcLdXJNy - defp convert_otel_field(<>) do - {value, _} = Integer.parse(value, 16) - Integer.to_string(value, 10) - rescue - _ -> "" - end - - defp convert_otel_field(value) when byte_size(value) < 16, do: "" - - defp convert_otel_field(value) do - value = to_string(value) - len = byte_size(value) - 16 - <<_front::binary-size(len), value::binary>> = value - convert_otel_field(value) - rescue - _ -> "" - end - - defp method_name(metadata) do - function = Keyword.get(metadata, :function) - module = Keyword.get(metadata, :module) - - FormatterUtils.format_function(module, function) - end - - defp syslog(level, ts, :system) do - {:ok, hostname} = :inet.gethostname() - - json_map( - hostname: to_string(hostname), - severity: Atom.to_string(level), - timestamp: FormatterUtils.format_timestamp(ts) - ) - end - - defp syslog(level, ts, :unset) do - json_map( - severity: Atom.to_string(level), - timestamp: FormatterUtils.format_timestamp(ts) - ) - end - - defp syslog(level, ts, hostname) do - json_map( - hostname: hostname, - severity: Atom.to_string(level), - timestamp: FormatterUtils.format_timestamp(ts) - ) - end -end diff --git a/lib/logger_json/formatters/google_cloud.ex b/lib/logger_json/formatters/google_cloud.ex new file mode 100644 index 0000000..291f2d1 --- /dev/null +++ b/lib/logger_json/formatters/google_cloud.ex @@ -0,0 +1,316 @@ +defmodule LoggerJSON.Formatters.GoogleCloud do + @moduledoc """ + Custom Erlang's [`:logger` formatter](https://www.erlang.org/doc/apps/kernel/logger_chapter.html#formatters) which + writes logs in a structured format that can be consumed by Google Cloud Logger. + + Even though the log messages on Google Cloud use [LogEntry](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry) + format, not all the fields are available in the structured payload. The fields that are available can be found in the + [special fields in structured payloads](https://cloud.google.com/logging/docs/agent/configuration#special_fields_in_structured_payloads). + + ## Formatter Configuration + + The formatter can be configured with the following options: + + * `:project_id` (optional) - the Google Cloud project ID. This is required for correctly logging OpenTelemetry trace and + span IDs so that they can be linked to the correct trace in Google Cloud Trace. + + * `:service_context` (optional) - a map with the following keys: + * `:service` - the name of the service that is logging the message. Default: `node()`. + * `:version` - the version of the service that is logging the message. + + For list of shared options see "Shared options" in `LoggerJSON`. + + ## Metadata + + You can extend the log entry with some additional metadata:application + + * `user_id`, `identity_id`, `actor_id`, `account_id` (order shows precedence) - the ID of the user that is performing the action. + It will be included along with the error report for Google Cloud Error Reporting; + + For list of other well-known metadata keys see "Metadata" in `LoggerJSON`. + + ## Examples + + Regular message: + + %{ + "logging.googleapis.com/operation" => %{"pid" => "#PID<0.228.0>"}, + "logging.googleapis.com/sourceLocation" => %{ + "file" => "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", + "function" => "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs an LogEntry of a given level/1", + "line" => 44 + }, + "message" => %{"domain" => ["elixir"], "message" => "Hello"}, + "severity" => "NOTICE", + "time" => "2024-04-11T21:32:46.957Z" + } + + Exception message that will be recognized by Google Cloud Error Reporting: + + %{ + "httpRequest" => %{ + "protocol" => "HTTP/1.1", + "referer" => "http://www.example.com/", + "remoteIp" => "", + "requestMethod" => "PATCH", + "requestUrl" => "http://www.example.com/", + "status" => 503, + "userAgent" => "Mozilla/5.0" + }, + "logging.googleapis.com/operation" => %{"pid" => "#PID<0.250.0>"}, + "logging.googleapis.com/sourceLocation" => %{ + "file" => "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", + "function" => "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs exception http context/1", + "line" => 301 + }, + "message" => %{ + "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", + "context" => %{ + "httpRequest" => %{ + "protocol" => "HTTP/1.1", + "referer" => "http://www.example.com/", + "remoteIp" => "", + "requestMethod" => "PATCH", + "requestUrl" => "http://www.example.com/", + "status" => 503, + "userAgent" => "Mozilla/5.0" + }, + "reportLocation" => %{ + "filePath" => "/Users/andrew/Projects/os/logger_json/test/formatters/google_cloud_test.exs", + "functionName" => "Elixir.LoggerJSON.Formatters.GoogleCloudTest.test logs exception http context/1", + "lineNumber" => 301 + } + }, + "domain" => ["elixir"], + "message" => "Hello", + "serviceContext" => %{"service" => "nonode@nohost"}, + "stack_trace" => "** (EXIT from #PID<0.250.0>) :foo" + }, + "severity" => "DEBUG", + "time" => "2024-04-11T21:34:53.503Z" + } + """ + import Jason.Helpers, only: [json_map: 1] + import LoggerJSON.Formatter.{MapBuilder, DateTime, Message, Metadata, Code, Plug, Encoder} + + @processed_metadata_keys ~w[pid file line mfa + otel_span_id span_id + otel_trace_id trace_id + conn]a + + @spec format(any(), any()) :: none() + def format(%{level: level, meta: meta, msg: msg}, opts) do + service_context = Keyword.get_lazy(opts, :service_context, fn -> %{service: to_string(node())} end) + project_id = Keyword.get(opts, :project_id) + metadata_keys_or_selector = Keyword.get(opts, :metadata, []) + metadata_selector = update_metadata_selector(metadata_keys_or_selector, @processed_metadata_keys) + + message = + format_message(msg, meta, %{ + binary: &format_binary_message/1, + structured: &format_structured_message/1, + crash: &format_crash_reason(&1, &2, service_context, meta) + }) + + metadata = + take_metadata(meta, metadata_selector) + + line = + %{ + time: utc_time(meta), + severity: log_level(level), + message: encode(Map.merge(message, metadata)) + } + |> maybe_put(:"logging.googleapis.com/sourceLocation", format_source_location(meta)) + |> maybe_put(:"logging.googleapis.com/operation", format_operation(meta)) + |> maybe_put(:"logging.googleapis.com/spanId", format_span(meta, project_id)) + |> maybe_put(:"logging.googleapis.com/trace", format_trace(meta, project_id)) + |> maybe_put(:httpRequest, format_http_request(meta)) + |> Jason.encode_to_iodata!() + + [line, "\n"] + end + + defp log_level(:emergency), do: "EMERGENCY" + defp log_level(:alert), do: "ALERT" + defp log_level(:critical), do: "CRITICAL" + defp log_level(:error), do: "ERROR" + defp log_level(:warning), do: "WARNING" + defp log_level(:notice), do: "NOTICE" + defp log_level(:info), do: "INFO" + defp log_level(:debug), do: "DEBUG" + defp log_level(_), do: "DEFAULT" + + def format_binary_message(binary) do + %{message: IO.chardata_to_string(binary)} + end + + def format_structured_message(map) when is_map(map) do + map + end + + def format_structured_message(keyword) do + Enum.into(keyword, %{}) + end + + # https://cloud.google.com/error-reporting/docs/formatting-error-messages + def format_crash_reason(binary, {{:EXIT, pid}, reason}, service_context, meta) do + stacktrace = Exception.format_banner({:EXIT, pid}, reason, []) + format_reported_error_event(binary, stacktrace, service_context, meta) + end + + def format_crash_reason(binary, {:exit, reason}, service_context, meta) do + stacktrace = Exception.format_banner(:exit, reason, []) + format_reported_error_event(binary, stacktrace, service_context, meta) + end + + def format_crash_reason(binary, {:throw, reason}, service_context, meta) do + stacktrace = Exception.format_banner(:throw, reason, []) + format_reported_error_event(binary, stacktrace, service_context, meta) + end + + def format_crash_reason(_binary, {%{} = exception, stacktrace}, service_context, meta) do + message = Exception.message(exception) + + ruby_stacktrace = + [ + Exception.format_banner(:error, exception, stacktrace), + format_stacktrace(stacktrace) + ] + |> Enum.join("\n") + + format_reported_error_event(message, ruby_stacktrace, service_context, meta) + end + + def format_crash_reason(binary, {error, reason}, service_context, meta) do + stacktrace = "** (#{error}) #{inspect(reason)}" + format_reported_error_event(binary, stacktrace, service_context, meta) + end + + defp format_reported_error_event(message, stacktrace, service_context, meta) do + %{ + "@type": "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", + stack_trace: stacktrace, + message: IO.chardata_to_string(message), + context: format_reported_error_event_context(meta), + serviceContext: service_context + } + end + + # https://cloud.google.com/error-reporting/docs/formatting-error-messages#reported-error-example + defp format_reported_error_event_context(meta) do + %{} + |> maybe_put(:reportLocation, format_crash_report_location(meta)) + |> maybe_put(:httpRequest, format_http_request(meta)) + |> maybe_put(:user, format_affected_user(meta)) + end + + defp format_crash_report_location(%{file: file, line: line, mfa: {m, f, a}}) do + %{ + filePath: IO.chardata_to_string(file), + lineNumber: line, + functionName: format_function(m, f, a) + } + end + + defp format_crash_report_location(_meta), do: nil + + if Code.ensure_loaded?(Plug.Conn) do + defp format_http_request(%{conn: %Plug.Conn{} = conn}) do + request_method = conn.method |> to_string() |> String.upcase() + request_url = Plug.Conn.request_url(conn) + status = conn.status + user_agent = get_header(conn, "user-agent") + remote_ip = remote_ip(conn) + referer = get_header(conn, "referer") + + json_map( + protocol: Plug.Conn.get_http_protocol(conn), + requestMethod: request_method, + requestUrl: request_url, + status: status, + userAgent: user_agent, + remoteIp: remote_ip, + referer: referer + ) + end + end + + defp format_http_request(_meta), do: nil + + defp format_affected_user(%{user_id: user_id}), do: "user:" <> user_id + defp format_affected_user(%{identity_id: identity_id}), do: "identity:" <> identity_id + defp format_affected_user(%{actor_id: actor_id}), do: "actor:" <> actor_id + defp format_affected_user(%{account_id: account_id}), do: "account:" <> account_id + defp format_affected_user(_meta), do: nil + + defp format_stacktrace(stacktrace) do + lines = + Exception.format_stacktrace(stacktrace) + |> String.trim_trailing() + |> String.split("\n") + |> Enum.map(&format_line/1) + |> Enum.group_by(fn {kind, _line} -> kind end) + + lines = format_lines(:trace, lines[:trace]) ++ format_lines(:context, lines[:context]) ++ [""] + + Enum.join(lines, "\n") + end + + defp format_line(line) do + case Regex.run(~r/(.+)\:(\d+)\: (.*)/, line) do + [_, file, line, function] -> {:trace, "#{file}:#{line}:in `#{function}'"} + _ -> {:context, line} + end + end + + defp format_lines(_kind, nil) do + [] + end + + defp format_lines(:trace, lines) do + Enum.map(lines, fn {:trace, line} -> line end) + end + + defp format_lines(:context, lines) do + ["Context:" | Enum.map(lines, fn {:context, line} -> line end)] + end + + # https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogEntryOperation + defp format_operation(%{request_id: request_id, pid: pid}), do: json_map(id: request_id, producer: inspect(pid)) + defp format_operation(%{pid: pid}), do: json_map(pid: inspect(pid)) + defp format_operation(_meta), do: nil + + # https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogEntrySourceLocation + defp format_source_location(%{file: file, line: line, mfa: {m, f, a}}) do + json_map( + file: IO.chardata_to_string(file), + line: line, + function: format_function(m, f, a) + ) + end + + defp format_source_location(_meta), + do: nil + + defp format_span(%{otel_span_id: otel_span_id}, _project_id_or_nil), + do: IO.chardata_to_string(otel_span_id) + + defp format_span(%{span_id: span_id}, _project_id_or_nil), + do: span_id + + defp format_span(_meta, _project_id_or_nil), + do: nil + + defp format_trace(%{otel_trace_id: otel_trace_id}, nil), + do: IO.chardata_to_string(otel_trace_id) + + defp format_trace(%{otel_trace_id: otel_trace_id}, project_id), + do: "projects/#{project_id}/traces/#{IO.chardata_to_string(otel_trace_id)}" + + defp format_trace(%{trace_id: trace_id}, _project_id_or_nil), + do: trace_id + + defp format_trace(_meta, _project_id_or_nil), + do: nil +end diff --git a/lib/logger_json/formatters/google_cloud_logger.ex b/lib/logger_json/formatters/google_cloud_logger.ex deleted file mode 100644 index 3124069..0000000 --- a/lib/logger_json/formatters/google_cloud_logger.ex +++ /dev/null @@ -1,85 +0,0 @@ -defmodule LoggerJSON.Formatters.GoogleCloudLogger do - @moduledoc """ - Google Cloud Logger formatter. - """ - import Jason.Helpers, only: [json_map: 1] - - alias LoggerJSON.{FormatterUtils, JasonSafeFormatter} - - @behaviour LoggerJSON.Formatter - - @processed_metadata_keys ~w[pid file line function module application]a - - # Severity levels can be found in Google Cloud Logger docs: - # https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity - @severity_levels [ - {:debug, "DEBUG"}, - {:info, "INFO"}, - {:warn, "WARNING"}, - {:error, "ERROR"} - ] - - @impl true - def init(_formatter_opts), do: [] - - @doc """ - Builds structured payload which is mapped to Google Cloud Logger - [`LogEntry`](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry) format. - - See: https://cloud.google.com/logging/docs/agent/configuration#special_fields_in_structured_payloads - """ - for {level, gcp_level} <- @severity_levels do - @impl true - def format_event(unquote(level), msg, ts, md, md_keys, _formatter_state) do - Map.merge( - %{ - time: FormatterUtils.format_timestamp(ts), - severity: unquote(gcp_level), - message: IO.chardata_to_string(msg) - }, - format_metadata(md, md_keys) - ) - end - end - - @impl true - def format_event(_level, msg, ts, md, md_keys, _formatter_state) do - Map.merge( - %{ - time: FormatterUtils.format_timestamp(ts), - severity: "DEFAULT", - message: IO.chardata_to_string(msg) - }, - format_metadata(md, md_keys) - ) - end - - defp format_metadata(md, md_keys) do - LoggerJSON.take_metadata(md, md_keys, @processed_metadata_keys) - |> JasonSafeFormatter.format() - |> FormatterUtils.maybe_put(:error, FormatterUtils.format_process_crash(md)) - |> FormatterUtils.maybe_put(:"logging.googleapis.com/sourceLocation", format_source_location(md)) - |> FormatterUtils.maybe_put(:"logging.googleapis.com/operation", format_operation(md)) - end - - defp format_operation(md) do - if request_id = Keyword.get(md, :request_id) do - json_map(id: request_id) - end - end - - # Description can be found in Google Cloud Logger docs; - # https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogEntrySourceLocation - defp format_source_location(metadata) do - file = Keyword.get(metadata, :file) - line = Keyword.get(metadata, :line, 0) - function = Keyword.get(metadata, :function) - module = Keyword.get(metadata, :module) - - json_map( - file: file, - line: line, - function: FormatterUtils.format_function(module, function) - ) - end -end diff --git a/lib/logger_json/formatters/google_error_reporter.ex b/lib/logger_json/formatters/google_error_reporter.ex deleted file mode 100644 index 459fd06..0000000 --- a/lib/logger_json/formatters/google_error_reporter.ex +++ /dev/null @@ -1,64 +0,0 @@ -defmodule LoggerJSON.Formatters.GoogleErrorReporter do - @moduledoc """ - Google Error Reporter formatter. - """ - require Logger - @googleErrorType "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent" - - def report(kind, reason, stacktrace, metadata \\ []) do - [format_banner(kind, reason, stacktrace) | format_stacktrace(stacktrace)] - |> Enum.join("\n") - |> Logger.error(Keyword.merge(build_metadata(), metadata)) - end - - defp format_banner(kind, reason, stacktrace) do - Exception.format_banner(kind, reason, stacktrace) - end - - defp format_stacktrace(stacktrace) do - lines = - Exception.format_stacktrace(stacktrace) - |> String.trim_trailing() - |> String.split("\n") - |> Enum.map(&format_line/1) - |> Enum.group_by(fn {kind, _line} -> kind end) - - format_lines(:trace, lines[:trace]) ++ format_lines(:context, lines[:context]) ++ [""] - end - - defp format_line(line) do - case Regex.run(~r/(.+)\:(\d+)\: (.*)/, line) do - [_, file, line, function] -> {:trace, "#{file}:#{line}:in `#{function}'"} - _ -> {:context, line} - end - end - - defp format_lines(_kind, nil) do - [] - end - - defp format_lines(:trace, lines) do - Enum.map(lines, fn {:trace, line} -> line end) - end - - defp format_lines(:context, lines) do - ["Context:" | Enum.map(lines, fn {:context, line} -> line end)] - end - - defp build_metadata() do - ["@type": @googleErrorType] - |> with_service_context() - end - - defp with_service_context(metadata) do - if service_context = config()[:service_context] do - Keyword.merge(metadata, serviceContext: service_context) - else - metadata - end - end - - defp config do - Application.get_env(:logger_json, :google_error_reporter, []) - end -end diff --git a/lib/logger_json/jason_safe_formatter.ex b/lib/logger_json/jason_safe_formatter.ex deleted file mode 100644 index 8aa5f32..0000000 --- a/lib/logger_json/jason_safe_formatter.ex +++ /dev/null @@ -1,79 +0,0 @@ -defmodule LoggerJSON.JasonSafeFormatter do - @moduledoc """ - Utilities for converting metadata into data structures that can be safely passed to `Jason.encode!/1`. - """ - - @doc """ - Produces metadata that is "safe" for calling Jason.encode!/1 on without errors. - This means that unexpected Logger metadata won't cause logging crashes. - - Current formatting is... - * Maps: as is - * Printable binaries: as is - * Numbers: as is - * Structs that don't implement Jason.Encoder: converted to maps - * Tuples: converted to lists - * Keyword lists: converted to Maps - * everything else: inspected - """ - @spec format(any()) :: any() - def format(%Jason.Fragment{} = data) do - data - end - - def format(nil), do: nil - def format(true), do: true - def format(false), do: false - def format(data) when is_atom(data), do: data - - def format(%_struct{} = data) do - if jason_implemented?(data) do - data - else - data - |> Map.from_struct() - |> format() - end - end - - def format(%{} = data) do - for {key, value} <- data, into: %{}, do: {format_map_key(key), format(value)} - end - - def format([{key, _} | _] = data) when is_atom(key) do - Enum.into(data, %{}, fn - {key, value} -> {format_map_key(key), format(value)} - end) - rescue - _ -> for(d <- data, do: format(d)) - end - - def format({key, data}) when is_binary(key) or is_atom(key), do: %{format_map_key(key) => format(data)} - - def format(data) when is_tuple(data), do: data |> Tuple.to_list() |> format() - - def format(data) when is_number(data), do: data - - def format(data) when is_binary(data), do: format_binary(data) - - def format(data) when is_list(data), do: for(d <- data, do: format(d)) - - def format(data), do: inspect(data, pretty: true, width: 80) - - defp format_map_key(key) when is_binary(key), do: format_binary(key) - defp format_map_key(key) when is_atom(key) or is_number(key), do: key - defp format_map_key(key), do: inspect(key) - - defp format_binary(data) when is_binary(data) do - if String.valid?(data) && String.printable?(data) do - data - else - inspect(data) - end - end - - def jason_implemented?(data) do - impl = Jason.Encoder.impl_for(data) - impl && impl != Jason.Encoder.Any - end -end diff --git a/lib/logger_json/plug.ex b/lib/logger_json/plug.ex deleted file mode 100644 index 020512b..0000000 --- a/lib/logger_json/plug.ex +++ /dev/null @@ -1,50 +0,0 @@ -if Code.ensure_loaded?(Plug) do - defmodule LoggerJSON.Plug do - @moduledoc """ - A Plug to log request information in JSON format. - """ - alias Plug.Conn - alias LoggerJSON.Plug.MetadataFormatters - require Logger - - @behaviour Plug - - @doc """ - Initializes the Plug. - - ### Available options - - * `:log` - log level which is used to log requests; - * `:version_header` - request header which is used to determine API version requested by - client, default: `x-api-version`; - * `:metadata_formatter` - module with `build_metadata/3` function that formats the metadata - before it's sent to logger, default - `LoggerJSON.Plug.MetadataFormatters.GoogleCloudLogger`. - - ### Available metadata formatters - - * `LoggerJSON.Plug.MetadataFormatters.DatadogLogger` see module for logged structure; - * `LoggerJSON.Plug.MetadataFormatters.GoogleCloudLogger` leverages GCP LogEntry format; - * `LoggerJSON.Plug.MetadataFormatters.ELK` see module for logged structure. - - """ - @impl true - def init(opts) do - level = Keyword.get(opts, :log, :info) - client_version_header = Keyword.get(opts, :version_header, "x-api-version") - metadata_formatter = Keyword.get(opts, :metadata_formatter, MetadataFormatters.GoogleCloudLogger) - {level, metadata_formatter, client_version_header} - end - - @impl true - def call(conn, {level, metadata_formatter, client_version_header}) do - start = System.monotonic_time() - - Conn.register_before_send(conn, fn conn -> - latency = System.monotonic_time() - start - metadata = metadata_formatter.build_metadata(conn, latency, client_version_header) - Logger.log(level, "", metadata) - conn - end) - end - end -end diff --git a/lib/logger_json/plug/metadata_formatters/datadog_logger.ex b/lib/logger_json/plug/metadata_formatters/datadog_logger.ex deleted file mode 100644 index 7b3d7cc..0000000 --- a/lib/logger_json/plug/metadata_formatters/datadog_logger.ex +++ /dev/null @@ -1,77 +0,0 @@ -if Code.ensure_loaded?(Plug) do - defmodule LoggerJSON.Plug.MetadataFormatters.DatadogLogger do - @moduledoc """ - This formatter builds a metadata which is natively supported by Datadog: - - * `http` - see [DataDog](https://docs.datadoghq.com/logs/processing/attributes_naming_convention/#http-requests); - * `phoenix.controller` - Phoenix controller that processed the request; - * `phoenix.action` - Phoenix action that processed the request; - """ - import Jason.Helpers, only: [json_map: 1] - - @doc false - def build_metadata(conn, latency, client_version_header) do - client_metadata(conn, client_version_header) ++ - phoenix_metadata(conn) ++ - [ - duration: native_to_nanoseconds(latency), - http: - json_map( - url: request_url(conn), - status_code: conn.status, - method: conn.method, - referer: LoggerJSON.PlugUtils.get_header(conn, "referer"), - request_id: Keyword.get(Logger.metadata(), :request_id), - useragent: LoggerJSON.PlugUtils.get_header(conn, "user-agent"), - url_details: - json_map( - host: conn.host, - port: conn.port, - path: conn.request_path, - queryString: conn.query_string, - scheme: conn.scheme - ) - ), - network: json_map(client: json_map(ip: LoggerJSON.PlugUtils.remote_ip(conn))) - ] - end - - defp native_to_nanoseconds(nil) do - nil - end - - defp native_to_nanoseconds(native) do - System.convert_time_unit(native, :native, :nanosecond) - end - - defp request_url(%{request_path: "/"} = conn), do: "#{conn.scheme}://#{conn.host}/" - defp request_url(conn), do: "#{conn.scheme}://#{Path.join(conn.host, conn.request_path)}" - - defp client_metadata(conn, client_version_header) do - if api_version = LoggerJSON.PlugUtils.get_header(conn, client_version_header) do - [client: json_map(api_version: api_version)] - else - [] - end - end - - defp phoenix_metadata(%{private: %{phoenix_controller: controller, phoenix_action: action}} = conn) do - [phoenix: json_map(controller: controller, action: action, route: phoenix_route(conn))] - end - - defp phoenix_metadata(_conn) do - [] - end - - if Code.ensure_loaded?(Phoenix.Router) do - defp phoenix_route(%{private: %{phoenix_router: router}, method: method, request_path: path, host: host}) do - case Phoenix.Router.route_info(router, method, path, host) do - %{route: route} -> route - _ -> nil - end - end - end - - defp phoenix_route(_conn), do: nil - end -end diff --git a/lib/logger_json/plug/metadata_formatters/elk.ex b/lib/logger_json/plug/metadata_formatters/elk.ex deleted file mode 100644 index 521f1e9..0000000 --- a/lib/logger_json/plug/metadata_formatters/elk.ex +++ /dev/null @@ -1,72 +0,0 @@ -if Code.ensure_loaded?(Plug) do - defmodule LoggerJSON.Plug.MetadataFormatters.ELK do - @moduledoc """ - Formats connection into Logger metadata: - - * `connection.type` - type of connection (Sent or Chunked); - * `connection.method` - HTTP request method; - * `connection.request_path` - HTTP request path; - * `connection.status` - HTTP status code sent to a client; - * `client.user_agent` - value of `User-Agent` header; - * `client.ip' - value of `X-Forwarded-For` header if present, otherwise - remote IP of a connected client; - * `client.api_version' - version of API that was requested by a client; - * `node.hostname` - system hostname; - * `node.vm_pid` - Erlang VM process identifier; - * `phoenix.controller` - Phoenix controller that processed the request; - * `phoenix.action` - Phoenix action that processed the request; - * `latency_μs` - time in microseconds taken to process the request. - """ - import Jason.Helpers, only: [json_map: 1] - - @doc false - def build_metadata(conn, latency, client_version_header) do - latency_μs = System.convert_time_unit(latency, :native, :microsecond) - user_agent = LoggerJSON.PlugUtils.get_header(conn, "user-agent") - ip = LoggerJSON.PlugUtils.remote_ip(conn) - api_version = LoggerJSON.PlugUtils.get_header(conn, client_version_header) - {hostname, vm_pid} = node_metadata() - - phoenix_metadata(conn) ++ - [ - connection: - json_map( - type: connection_type(conn), - method: conn.method, - request_path: conn.request_path, - status: conn.status - ), - client: - json_map( - user_agent: user_agent, - ip: ip, - api_version: api_version - ), - node: json_map(hostname: to_string(hostname), vm_pid: vm_pid), - latency_μs: latency_μs - ] - end - - defp connection_type(%{state: :set_chunked}), do: "chunked" - defp connection_type(_), do: "sent" - - defp phoenix_metadata(%{private: %{phoenix_controller: controller, phoenix_action: action}}) do - [phoenix: %{controller: controller, action: action}] - end - - defp phoenix_metadata(_conn) do - [] - end - - defp node_metadata do - {:ok, hostname} = :inet.gethostname() - - vm_pid = - case Integer.parse(System.pid()) do - {pid, _units} -> pid - _ -> nil - end - - {hostname, vm_pid} - end - end -end diff --git a/lib/logger_json/plug/metadata_formatters/google_cloud_logger.ex b/lib/logger_json/plug/metadata_formatters/google_cloud_logger.ex deleted file mode 100644 index cf842c6..0000000 --- a/lib/logger_json/plug/metadata_formatters/google_cloud_logger.ex +++ /dev/null @@ -1,85 +0,0 @@ -if Code.ensure_loaded?(Plug) do - defmodule LoggerJSON.Plug.MetadataFormatters.GoogleCloudLogger do - @moduledoc """ - This formatter builds a metadata which is natively supported by Google Cloud Logger: - - * `httpRequest` - see [LogEntry#HttpRequest](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#HttpRequest); - * `client.api_version` - version of API that was requested by a client; - * `phoenix.controller` - Phoenix controller that processed the request; - * `phoenix.action` - Phoenix action that processed the request; - * `node.hostname` - node hostname; - * `node.vm_pid` - Erlang VM process identifier. - """ - import Jason.Helpers, only: [json_map: 1] - - @nanoseconds_in_second System.convert_time_unit(1, :second, :nanosecond) - - @doc false - def build_metadata(conn, latency, client_version_header) do - latency_seconds = native_to_seconds(latency) - request_method = conn.method - request_url = request_url(conn) - status = conn.status - user_agent = LoggerJSON.PlugUtils.get_header(conn, "user-agent") - remote_ip = LoggerJSON.PlugUtils.remote_ip(conn) - referer = LoggerJSON.PlugUtils.get_header(conn, "referer") - {hostname, vm_pid} = node_metadata() - - client_metadata(conn, client_version_header) ++ - phoenix_metadata(conn) ++ - [ - httpRequest: - json_map( - requestMethod: request_method, - requestUrl: request_url, - status: status, - userAgent: user_agent, - remoteIp: remote_ip, - referer: referer, - latency: latency_seconds - ), - node: json_map(hostname: to_string(hostname), vm_pid: vm_pid) - ] - end - - defp native_to_seconds(nil) do - nil - end - - defp native_to_seconds(native) do - seconds = System.convert_time_unit(native, :native, :nanosecond) / @nanoseconds_in_second - :erlang.float_to_binary(seconds, [{:decimals, 8}, :compact]) <> "s" - end - - defp request_url(%{request_path: "/"} = conn), do: "#{conn.scheme}://#{conn.host}/" - defp request_url(conn), do: "#{conn.scheme}://#{Path.join(conn.host, conn.request_path)}" - - defp client_metadata(conn, client_version_header) do - if api_version = LoggerJSON.PlugUtils.get_header(conn, client_version_header) do - [client: json_map(api_version: api_version)] - else - [] - end - end - - defp phoenix_metadata(%{private: %{phoenix_controller: controller, phoenix_action: action}}) do - [phoenix: json_map(controller: controller, action: action)] - end - - defp phoenix_metadata(_conn) do - [] - end - - defp node_metadata do - {:ok, hostname} = :inet.gethostname() - - vm_pid = - case Integer.parse(System.pid()) do - {pid, _units} -> pid - _ -> nil - end - - {hostname, vm_pid} - end - end -end diff --git a/lib/logger_json/plug_utils.ex b/lib/logger_json/plug_utils.ex deleted file mode 100644 index 2117bcd..0000000 --- a/lib/logger_json/plug_utils.ex +++ /dev/null @@ -1,35 +0,0 @@ -if Code.ensure_loaded?(Plug) do - defmodule LoggerJSON.PlugUtils do - @moduledoc """ - This module contains functions that can be used across different - `LoggerJSON.Plug.MetadataFormatters` implementations to provide - common functionality. - """ - - alias Plug.Conn - - @doc """ - Grabs the client IP address from `Plug.Conn`. First we try - to grab the client IP from the `x-forwarded-for` header. - Then we fall back to the `conn.remote_ip` field. - """ - def remote_ip(conn) do - if header_value = get_header(conn, "x-forwarded-for") do - header_value - |> String.split(",") - |> hd() - |> String.trim() - else - to_string(:inet_parse.ntoa(conn.remote_ip)) - end - end - - @doc false - def get_header(conn, header) do - case Conn.get_req_header(conn, header) do - [] -> nil - [val | _] -> val - end - end - end -end diff --git a/mix.exs b/mix.exs index aa2749c..7cf6fd3 100644 --- a/mix.exs +++ b/mix.exs @@ -2,13 +2,13 @@ defmodule LoggerJSON.Mixfile do use Mix.Project @source_url "https://github.com/Nebo15/logger_json" - @version "5.1.4" + @version "6.0.0" def project do [ app: :logger_json, version: @version, - elixir: "~> 1.9", + elixir: "~> 1.16", elixirc_paths: elixirc_paths(Mix.env()), compilers: [] ++ Mix.compilers(), build_embedded: Mix.env() == :prod, @@ -35,29 +35,27 @@ defmodule LoggerJSON.Mixfile do defp deps do [ - {:jason, "~> 1.0"}, - {:ecto, "~> 2.1 or ~> 3.0", optional: true}, - {:plug, "~> 1.0", optional: true}, - {:phoenix, ">= 1.5.0", optional: true}, - {:telemetry, "~> 0.4.0 or ~> 1.0", optional: true}, - {:ex_doc, ">= 0.15.0", only: [:dev, :test], runtime: false}, + {:jason, "~> 1.4"}, + {:plug, "~> 1.15", optional: true}, + {:telemetry, "~> 1.0", optional: true}, + {:stream_data, "~> 0.5", only: [:dev, :test]}, + {:castore, "~> 1.0", only: [:dev, :test]}, {:excoveralls, ">= 0.15.0", only: [:dev, :test]}, - {:dialyxir, "~> 1.1.0", only: [:dev], runtime: false}, - {:stream_data, "~> 0.5", only: [:dev, :test]} + {:junit_formatter, "~> 3.3", only: [:test]}, + {:ex_doc, ">= 0.15.0", only: [:dev, :test], runtime: false}, + {:dialyxir, "~> 1.4.0", only: [:dev], runtime: false} ] end defp package do [ - description: - "Console Logger back-end, Plug and Ecto adapter " <> - "that writes logs in JSON format.", - contributors: ["Nebo #15"], - maintainers: ["Nebo #15"], + description: "A collection of Logger formatters that that write logs in JSON format", + contributors: ["Andrew Dryga"], + maintainers: ["Andrew Dryga"], licenses: ["MIT"], files: ~w(lib LICENSE.md mix.exs README.md), links: %{ - Changelog: "https://hexdocs.pm/logger_json/changelog.html", + Changelog: "https://github.com/Nebo15/logger_json/releases", GitHub: @source_url } ] diff --git a/mix.lock b/mix.lock index 4ad09c1..36ebd9f 100644 --- a/mix.lock +++ b/mix.lock @@ -1,38 +1,19 @@ %{ - "castore": {:hex, :castore, "1.0.5", "9eeebb394cc9a0f3ae56b813459f990abb0a3dedee1be6b27fdb50301930502f", [:mix], [], "hexpm", "8d7c597c3e4a64c395980882d4bca3cebb8d74197c590dc272cfd3b6a6310578"}, - "certifi": {:hex, :certifi, "2.9.0", "6f2a475689dd47f19fb74334859d460a2dc4e3252a3324bd2111b8f0429e7e21", [:rebar3], [], "hexpm", "266da46bdb06d6c6d35fde799bcb28d36d985d424ad7c08b5bb48f5b5cdd4641"}, - "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"}, - "dialyxir": {:hex, :dialyxir, "1.1.0", "c5aab0d6e71e5522e77beff7ba9e08f8e02bad90dfbeffae60eaf0cb47e29488", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "07ea8e49c45f15264ebe6d5b93799d4dd56a44036cf42d0ad9c960bc266c0b9a"}, - "earmark": {:hex, :earmark, "1.4.2", "3aa0bd23bc4c61cf2f1e5d752d1bb470560a6f8539974f767a38923bb20e1d7f", [:mix], [], "hexpm", "5e8806285d8a3a8999bd38e4a73c58d28534c856bc38c44818e5ba85bbda16fb"}, + "castore": {:hex, :castore, "1.0.6", "ffc42f110ebfdafab0ea159cd43d31365fa0af0ce4a02ecebf1707ae619ee727", [:mix], [], "hexpm", "374c6e7ca752296be3d6780a6d5b922854ffcc74123da90f2f328996b962d33a"}, + "dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"}, "earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"}, - "ecto": {:hex, :ecto, "3.11.1", "4b4972b717e7ca83d30121b12998f5fcdc62ba0ed4f20fd390f16f3270d85c3e", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ebd3d3772cd0dfcd8d772659e41ed527c28b2a8bde4b00fe03e0463da0f1983b"}, "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, - "ex_doc": {:hex, :ex_doc, "0.31.1", "8a2355ac42b1cc7b2379da9e40243f2670143721dd50748bf6c3b1184dae2089", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.1", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "3178c3a407c557d8343479e1ff117a96fd31bafe52a039079593fb0524ef61b0"}, - "excoveralls": {:hex, :excoveralls, "0.18.0", "b92497e69465dc51bc37a6422226ee690ab437e4c06877e836f1c18daeb35da9", [:mix], [{:castore, "~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1109bb911f3cb583401760be49c02cbbd16aed66ea9509fc5479335d284da60b"}, - "exjsx": {:hex, :exjsx, "4.0.0", "60548841e0212df401e38e63c0078ec57b33e7ea49b032c796ccad8cde794b5c", [:mix], [{:jsx, "~> 2.8.0", [hex: :jsx, repo: "hexpm", optional: false]}], "hexpm"}, - "hackney": {:hex, :hackney, "1.18.1", "f48bf88f521f2a229fc7bae88cf4f85adc9cd9bcf23b5dc8eb6a1788c662c4f6", [:rebar3], [{:certifi, "~>2.9.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "a4ecdaff44297e9b5894ae499e9a070ea1888c84afdd1fd9b7b2bc384950128e"}, - "idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"}, + "ex_doc": {:hex, :ex_doc, "0.32.0", "896afb57b1e00030f6ec8b2e19d3ca99a197afb23858d49d94aea673dc222f12", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.1", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "ed2c3e42c558f49bda3ff37e05713432006e1719a6c4a3320c7e4735787374e7"}, + "excoveralls": {:hex, :excoveralls, "0.18.1", "a6f547570c6b24ec13f122a5634833a063aec49218f6fff27de9df693a15588c", [:mix], [{:castore, "~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "d65f79db146bb20399f23046015974de0079668b9abb2f5aac074d078da60b8d"}, "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"}, - "jsx": {:hex, :jsx, "2.8.3", "a05252d381885240744d955fbe3cf810504eb2567164824e19303ea59eef62cf", [:mix, :rebar3], [], "hexpm"}, + "junit_formatter": {:hex, :junit_formatter, "3.4.0", "d0e8db6c34dab6d3c4154c3b46b21540db1109ae709d6cf99ba7e7a2ce4b1ac2", [:mix], [], "hexpm", "bb36e2ae83f1ced6ab931c4ce51dd3dbef1ef61bb4932412e173b0cfa259dacd"}, "makeup": {:hex, :makeup, "1.1.1", "fa0bc768698053b2b3869fa8a62616501ff9d11a562f3ce39580d60860c3a55e", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "5dc62fbdd0de44de194898b6710692490be74baa02d9d108bc29f007783b0b48"}, - "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, - "makeup_erlang": {:hex, :makeup_erlang, "0.1.3", "d684f4bac8690e70b06eb52dad65d26de2eefa44cd19d64a8095e1417df7c8fd", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "b78dc853d2e670ff6390b605d807263bf606da3c82be37f9d7f68635bd886fc9"}, - "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"}, + "makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"}, + "makeup_erlang": {:hex, :makeup_erlang, "0.1.5", "e0ff5a7c708dda34311f7522a8758e23bfcd7d8d8068dc312b5eb41c6fd76eba", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "94d2e986428585a21516d7d7149781480013c56e30c6a233534bedf38867a59a"}, "mime": {:hex, :mime, "2.0.5", "dc34c8efd439abe6ae0343edbb8556f4d63f178594894720607772a041b04b02", [:mix], [], "hexpm", "da0d64a365c45bc9935cc5c8a7fc5e49a0e0f9932a761c55d6c52b142780a05c"}, - "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"}, "nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"}, - "parse_trans": {:hex, :parse_trans, "3.3.1", "16328ab840cc09919bd10dab29e431da3af9e9e7e7e6f0089dd5a2d2820011d8", [:rebar3], [], "hexpm", "07cd9577885f56362d414e8c4c4e6bdf10d43a8767abb92d24cbe8b24c54888b"}, - "phoenix": {:hex, :phoenix, "1.7.10", "02189140a61b2ce85bb633a9b6fd02dff705a5f1596869547aeb2b2b95edd729", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "cf784932e010fd736d656d7fead6a584a4498efefe5b8227e9f383bf15bb79d0"}, - "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"}, - "phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"}, - "phoenix_view": {:hex, :phoenix_view, "1.1.2", "1b82764a065fb41051637872c7bd07ed2fdb6f5c3bd89684d4dca6e10115c95a", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "7ae90ad27b09091266f6adbb61e1d2516a7c3d7062c6789d46a7554ec40f3a56"}, "plug": {:hex, :plug, "1.15.3", "712976f504418f6dff0a3e554c40d705a9bcf89a7ccef92fc6a5ef8f16a30a97", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cc4365a3c010a56af402e0809208873d113e9c38c401cabd88027ef4f5c01fd2"}, "plug_crypto": {:hex, :plug_crypto, "2.0.0", "77515cc10af06645abbfb5e6ad7a3e9714f805ae118fa1a70205f80d2d70fe73", [:mix], [], "hexpm", "53695bae57cc4e54566d993eb01074e4d894b65a3766f1c43e2c61a1b0f45ea9"}, - "poolboy": {:hex, :poolboy, "1.5.1", "6b46163901cfd0a1b43d692657ed9d7e599853b3b21b95ae5ae0a777cf9b6ca8", [:rebar], [], "hexpm"}, - "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"}, "stream_data": {:hex, :stream_data, "0.6.0", "e87a9a79d7ec23d10ff83eb025141ef4915eeb09d4491f79e52f2562b73e5f47", [:mix], [], "hexpm", "b92b5031b650ca480ced047578f1d57ea6dd563f5b57464ad274718c9c29501c"}, "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"}, - "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"}, - "websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"}, - "websock_adapter": {:hex, :websock_adapter, "0.5.5", "9dfeee8269b27e958a65b3e235b7e447769f66b5b5925385f5a569269164a210", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "4b977ba4a01918acbf77045ff88de7f6972c2a009213c515a445c48f224ffce9"}, } diff --git a/test/formatter/encoder_test.exs b/test/formatter/encoder_test.exs new file mode 100644 index 0000000..04fa91e --- /dev/null +++ b/test/formatter/encoder_test.exs @@ -0,0 +1,100 @@ +defmodule LoggerJSON.Formatter.EncoderTest do + use Logger.Case, async: true + use ExUnitProperties + import LoggerJSON.Formatter.Encoder + + defmodule IDStruct, do: defstruct(id: nil) + + describe "encode/1" do + test "allows nils" do + assert encode(nil) == nil + end + + test "allows booleans" do + assert encode(true) == true + assert encode(false) == false + end + + test "allows printable strings" do + assert encode("hello") == "hello" + end + + test "inspects non-printable binaries" do + assert encode("hello" <> <<0>>) == "<<104, 101, 108, 108, 111, 0>>" + end + + test "allows atoms" do + assert encode(:hello) == :hello + end + + test "allows numbers" do + assert encode(123) == 123 + end + + test "strips Structs" do + assert %{id: "hello"} == encode(%IDStruct{id: "hello"}) + end + + test "does not strip structs for which Jason.Encoder is derived" do + assert %NameStruct{name: "B"} == encode(%NameStruct{name: "B"}) + end + + test "converts tuples to lists" do + assert encode({1, 2, 3}) == [1, 2, 3] + end + + test "converts nested tuples to nested lists" do + assert encode({{2000, 1, 1}, {13, 30, 15}}) == [[2000, 1, 1], [13, 30, 15]] + end + + test "converts Keyword lists to maps" do + assert encode(a: 1, b: 2) == %{a: 1, b: 2} + end + + test "converts non-string map keys" do + assert encode(%{1 => 2}) == %{1 => 2} + assert encode(%{:a => 1}) == %{:a => 1} + assert encode(%{{"a", "b"} => 1}) == %{"{\"a\", \"b\"}" => 1} + assert encode(%{%{a: 1, b: 2} => 3}) == %{"%{a: 1, b: 2}" => 3} + assert encode(%{[{:a, :b}] => 3}) == %{"[a: :b]" => 3} + end + + test "inspects functions" do + assert encode(&encode/1) == "&LoggerJSON.Formatter.Encoder.encode/1" + end + + test "inspects pids" do + assert encode(self()) == inspect(self()) + end + + test "doesn't choke on things that look like keyword lists but aren't" do + assert encode([{:a, 1}, {:b, 2, :c}]) == [[:a, 1], [:b, 2, :c]] + end + + test "formats nested structures" do + input = %{ + foo: [ + foo_a: %{"x" => 1, "y" => %IDStruct{id: 1}}, + foo_b: [foo_b_1: 1, foo_b_2: {"2a", "2b"}] + ], + self: self() + } + + assert encode(input) == %{ + foo: %{ + foo_a: %{"x" => 1, "y" => %{id: 1}}, + foo_b: %{foo_b_1: 1, foo_b_2: ["2a", "2b"]} + }, + self: inspect(self()) + } + end + + property "converts any term so that it can be encoded with Jason" do + check all value <- term() do + value + |> encode() + |> Jason.encode!() + end + end + end +end diff --git a/test/formatters/basic_test.exs b/test/formatters/basic_test.exs new file mode 100644 index 0000000..d48cf07 --- /dev/null +++ b/test/formatters/basic_test.exs @@ -0,0 +1,210 @@ +defmodule LoggerJSON.Formatters.BasicTest do + use Logger.Case + use ExUnitProperties + alias LoggerJSON.Formatters.Basic + require Logger + + setup do + formatter = {Basic, metadata: :all} + :logger.update_handler_config(:default, :formatter, formatter) + end + + property "allows to log any binary messages" do + check all message <- StreamData.binary() do + assert capture_log(fn -> + Logger.debug(message) + end) + |> decode_or_print_error() + |> Map.has_key?("message") + end + end + + property "allows to log any structured messages" do + check all message <- StreamData.map_of(StreamData.atom(:alphanumeric), StreamData.term()) do + assert capture_log(fn -> + Logger.debug(message) + end) + |> decode_or_print_error() + |> Map.has_key?("message") + end + + check all message <- StreamData.keyword_of(StreamData.term()) do + assert capture_log(fn -> + Logger.debug(message) + end) + |> decode_or_print_error() + |> Map.has_key?("message") + end + end + + test "logs an LogEntry of a given level" do + for level <- Logger.levels() do + log = + capture_log(level, fn -> + Logger.log(level, "Hello") + end) + |> decode_or_print_error() + + level_string = to_string(level) + + assert %{ + "message" => "Hello", + "metadata" => %{"domain" => ["elixir"]}, + "severity" => ^level_string, + "time" => _ + } = log + end + end + + test "logs an LogEntry with a map payload" do + log = + capture_log(fn -> + Logger.debug(%{foo: :bar, fiz: [1, 2, 3, "buz"]}) + end) + |> decode_or_print_error() + + assert log["message"] == %{ + "fiz" => [1, 2, 3, "buz"], + "foo" => "bar" + } + end + + test "logs an LogEntry with a keyword payload" do + log = + capture_log(fn -> + Logger.debug(a: {0, false}) + end) + |> decode_or_print_error() + + assert log["message"] == %{ + "a" => [0, false] + } + end + + test "logs OpenTelemetry span and trace ids" do + Logger.metadata( + otel_span_id: ~c"bff20904aa5883a6", + otel_trace_flags: ~c"01", + otel_trace_id: ~c"294740ce41cc9f202dedb563db123532" + ) + + log = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log["span"] == "bff20904aa5883a6" + assert log["trace"] == "294740ce41cc9f202dedb563db123532" + end + + test "logs span and trace ids" do + Logger.metadata( + span_id: "bff20904aa5883a6", + trace_id: "294740ce41cc9f202dedb563db123532" + ) + + log = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log["span"] == "bff20904aa5883a6" + assert log["trace"] == "294740ce41cc9f202dedb563db123532" + end + + test "logs metadata" do + Logger.metadata( + date: Date.utc_today(), + time: Time.new(10, 10, 11), + pid: self(), + ref: make_ref(), + atom: :atom, + list: [1, 2, 3], + map: %{foo: :bar}, + struct: URI.parse("https://example.com"), + binary: "binary", + node: node() + ) + + log = + capture_log(fn -> + Logger.debug("Hello", float: 3.14) + end) + |> decode_or_print_error() + + assert %{ + "metadata" => %{ + "atom" => "atom", + "binary" => "binary", + "date" => "2024-04-11", + "domain" => ["elixir"], + "list" => [1, 2, 3], + "map" => %{"foo" => "bar"}, + "node" => "nonode@nohost", + "ref" => _ref, + "float" => 3.14, + "struct" => %{ + "authority" => "example.com", + "fragment" => nil, + "host" => "example.com", + "path" => nil, + "port" => 443, + "query" => nil, + "scheme" => "https", + "userinfo" => nil + } + } + } = log + end + + test "logs exceptions" do + log = + capture_log(fn -> + pid = + spawn(fn -> + raise RuntimeError + end) + + ref = Process.monitor(pid) + assert_receive {:DOWN, ^ref, _, _, _} + Process.sleep(100) + end) + |> decode_or_print_error() + + assert log["message"] =~ "Process #PID<" + assert log["message"] =~ "> raised an exception" + assert log["message"] =~ "RuntimeError" + end + + test "logs http context" do + conn = + Plug.Test.conn("GET", "/", "") + |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0") + |> Plug.Conn.put_req_header("referer", "http://www.example2.com/") + |> Plug.Conn.put_req_header("x-forwarded-for", "127.0.0.1,200.111.222.111") + |> Plug.Conn.send_resp(200, "Hi!") + + Logger.metadata(conn: conn) + + log = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log["request"] == %{ + "client" => %{ + "ip" => "127.0.0.1", + "user_agent" => "Mozilla/5.0" + }, + "connection" => %{ + "method" => "GET", + "path" => "/", + "protocol" => "HTTP/1.1", + "status" => 200 + } + } + end +end diff --git a/test/formatters/datadog_test.exs b/test/formatters/datadog_test.exs new file mode 100644 index 0000000..9200a7b --- /dev/null +++ b/test/formatters/datadog_test.exs @@ -0,0 +1,261 @@ +defmodule LoggerJSON.Formatters.DatadogTest do + use Logger.Case + use ExUnitProperties + alias LoggerJSON.Formatters.Datadog + require Logger + + setup do + formatter = {Datadog, metadata: :all} + :logger.update_handler_config(:default, :formatter, formatter) + end + + property "allows to log any binary messages" do + check all message <- StreamData.binary() do + assert capture_log(fn -> + Logger.debug(message) + end) + |> decode_or_print_error() + |> Map.has_key?("message") + end + end + + property "allows to log any structured messages" do + check all message <- StreamData.map_of(StreamData.atom(:alphanumeric), StreamData.term()) do + assert capture_log(fn -> + Logger.debug(message) + end) + |> decode_or_print_error() + |> Map.has_key?("message") + end + + check all message <- StreamData.keyword_of(StreamData.term()) do + assert capture_log(fn -> + Logger.debug(message) + end) + |> decode_or_print_error() + |> Map.has_key?("message") + end + end + + test "logs an LogEntry of a given level" do + for level <- Logger.levels() do + log = + capture_log(level, fn -> + Logger.log(level, "Hello") + end) + |> decode_or_print_error() + + level_string = to_string(level) + + assert %{ + "message" => "Hello", + "domain" => ["elixir"], + "syslog" => %{"hostname" => _hostname, "severity" => ^level_string, "timestamp" => _time} + } = log + end + end + + test "logs an LogEntry with a map payload" do + log = + capture_log(fn -> + Logger.debug(%{foo: :bar, fiz: [1, 2, 3, "buz"]}) + end) + |> decode_or_print_error() + + assert log["message"] == %{ + "fiz" => [1, 2, 3, "buz"], + "foo" => "bar" + } + end + + test "logs an LogEntry with a keyword payload" do + log = + capture_log(fn -> + Logger.debug(a: {0, false}) + end) + |> decode_or_print_error() + + assert log["message"] == %{ + "a" => [0, false] + } + end + + test "logs hostname" do + # uses the hostname of the machine by default + log = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log["syslog"]["hostname"] == :inet.gethostname() |> elem(1) |> IO.chardata_to_string() + + # static value + formatter = {Datadog, hostname: "foo.bar1"} + :logger.update_handler_config(:default, :formatter, formatter) + + log = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log["syslog"]["hostname"] == "foo.bar1" + + # unset value + formatter = {Datadog, hostname: :unset} + :logger.update_handler_config(:default, :formatter, formatter) + + log = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + refute Map.has_key?(log["syslog"], "hostname") + end + + test "logs OpenTelemetry span and trace ids" do + Logger.metadata( + otel_span_id: ~c"bff20904aa5883a6", + otel_trace_flags: ~c"01", + otel_trace_id: ~c"294740ce41cc9f202dedb563db123532" + ) + + log = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log["dd.span_id"] == "13831127321250661286" + assert log["dd.trace_id"] == "3309500741668975922" + end + + test "logs span and trace ids" do + Logger.metadata( + span_id: "bff20904aa5883a6", + trace_id: "294740ce41cc9f202dedb563db123532" + ) + + log = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log["dd.span_id"] == "bff20904aa5883a6" + assert log["dd.trace_id"] == "294740ce41cc9f202dedb563db123532" + end + + test "logs metadata" do + Logger.metadata( + date: Date.utc_today(), + time: Time.new(10, 10, 11), + pid: self(), + ref: make_ref(), + atom: :atom, + list: [1, 2, 3], + map: %{foo: :bar}, + struct: URI.parse("https://example.com"), + binary: "binary", + node: node() + ) + + log = + capture_log(fn -> + Logger.debug("Hello", float: 3.14) + end) + |> decode_or_print_error() + + assert %{ + "atom" => "atom", + "binary" => "binary", + "date" => "2024-04-11", + "domain" => ["elixir"], + "list" => [1, 2, 3], + "map" => %{"foo" => "bar"}, + "message" => "Hello", + "node" => "nonode@nohost", + "ref" => _ref, + "float" => 3.14, + "struct" => %{ + "authority" => "example.com", + "fragment" => nil, + "host" => "example.com", + "path" => nil, + "port" => 443, + "query" => nil, + "scheme" => "https", + "userinfo" => nil + } + } = log + end + + test "logs exceptions" do + log = + capture_log(fn -> + pid = + spawn(fn -> + raise RuntimeError + end) + + ref = Process.monitor(pid) + assert_receive {:DOWN, ^ref, _, _, _} + Process.sleep(100) + end) + |> decode_or_print_error() + + assert %{ + "error" => %{ + "message" => message, + "stack" => stacktrace + }, + "syslog" => %{ + "hostname" => _, + "severity" => "error", + "timestamp" => _ + } + } = log + + assert message =~ "Process #PID<" + assert message =~ "> raised an exception" + assert message =~ "** (RuntimeError) runtime error" + assert stacktrace =~ "test/" + end + + test "logs http context" do + conn = + Plug.Test.conn("GET", "/", "") + |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0") + |> Plug.Conn.put_req_header("referer", "http://www.example2.com/") + |> Plug.Conn.put_req_header("x-forwarded-for", "127.0.0.1") + |> Plug.Conn.send_resp(200, "Hi!") + + Logger.metadata(conn: conn) + + log = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log["network"] == %{"client" => %{"ip" => "127.0.0.1"}} + + assert log["http"] == %{ + "referer" => "http://www.example2.com/", + "method" => "GET", + "request_id" => nil, + "status_code" => 200, + "url" => "http://www.example.com/", + "url_details" => %{ + "host" => "www.example.com", + "path" => "/", + "port" => 80, + "queryString" => "", + "scheme" => "http" + }, + "useragent" => "Mozilla/5.0" + } + end +end diff --git a/test/formatters/google_cloud_test.exs b/test/formatters/google_cloud_test.exs new file mode 100644 index 0000000..a7381df --- /dev/null +++ b/test/formatters/google_cloud_test.exs @@ -0,0 +1,401 @@ +defmodule LoggerJSON.Formatters.GoogleCloudTest do + use Logger.Case + use ExUnitProperties + alias LoggerJSON.Formatters.GoogleCloud + require Logger + + setup do + formatter = {GoogleCloud, metadata: :all, project_id: "myproj-101"} + :logger.update_handler_config(:default, :formatter, formatter) + end + + property "allows to log any binary messages" do + check all message <- StreamData.binary() do + assert capture_log(fn -> + Logger.debug(message) + end) + |> decode_or_print_error() + |> Map.has_key?("message") + end + end + + property "allows to log any structured messages" do + check all message <- StreamData.map_of(StreamData.atom(:alphanumeric), StreamData.term()) do + assert capture_log(fn -> + Logger.debug(message) + end) + |> decode_or_print_error() + |> Map.has_key?("message") + end + + check all message <- StreamData.keyword_of(StreamData.term()) do + assert capture_log(fn -> + Logger.debug(message) + end) + |> decode_or_print_error() + |> Map.has_key?("message") + end + end + + test "logs an LogEntry of a given level" do + for level <- Logger.levels() do + log_entry = + capture_log(level, fn -> + Logger.log(level, "Hello") + end) + |> decode_or_print_error() + + pid = inspect(self()) + level_string = String.upcase(to_string(level)) + + assert %{ + "logging.googleapis.com/operation" => %{"pid" => ^pid}, + "logging.googleapis.com/sourceLocation" => %{ + "file" => _, + "function" => _, + "line" => _ + }, + "message" => %{"domain" => ["elixir"], "message" => "Hello"}, + "severity" => ^level_string, + "time" => _ + } = log_entry + end + end + + test "logs an LogEntry with a map payload" do + log_entry = + capture_log(fn -> + Logger.debug(%{foo: :bar, fiz: [1, 2, 3, "buz"]}) + end) + |> decode_or_print_error() + + assert log_entry["message"] == %{ + "fiz" => [1, 2, 3, "buz"], + "foo" => "bar", + "domain" => ["elixir"] + } + end + + test "logs an LogEntry with a keyword payload" do + log_entry = + capture_log(fn -> + Logger.debug(a: {0, false}) + end) + |> decode_or_print_error() + + assert log_entry["message"] == %{ + "a" => [0, false], + "domain" => ["elixir"] + } + end + + test "logs OpenTelemetry span and trace ids" do + Logger.metadata( + otel_span_id: ~c"bff20904aa5883a6", + otel_trace_flags: ~c"01", + otel_trace_id: ~c"294740ce41cc9f202dedb563db123532" + ) + + log_entry = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log_entry["logging.googleapis.com/spanId"] == "bff20904aa5883a6" + assert log_entry["logging.googleapis.com/trace"] == "projects/myproj-101/traces/294740ce41cc9f202dedb563db123532" + end + + test "logs span and trace ids" do + Logger.metadata( + span_id: "bff20904aa5883a6", + trace_id: "294740ce41cc9f202dedb563db123532" + ) + + log_entry = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log_entry["logging.googleapis.com/spanId"] == "bff20904aa5883a6" + assert log_entry["logging.googleapis.com/trace"] == "294740ce41cc9f202dedb563db123532" + end + + test "logs request id" do + Logger.metadata(request_id: "1234567890") + + log_entry = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log_entry["logging.googleapis.com/operation"]["id"] == "1234567890" + + assert log_entry["message"]["domain"] == ["elixir"] + assert log_entry["message"]["request_id"] == "1234567890" + end + + test "logs metadata" do + Logger.metadata( + date: Date.utc_today(), + time: Time.new(10, 10, 11), + pid: self(), + ref: make_ref(), + atom: :atom, + list: [1, 2, 3], + map: %{foo: :bar}, + struct: URI.parse("https://example.com"), + binary: "binary", + node: node() + ) + + log_entry = + capture_log(fn -> + Logger.debug("Hello", float: 3.14) + end) + |> decode_or_print_error() + + assert %{ + "message" => %{ + "atom" => "atom", + "binary" => "binary", + "date" => "2024-04-11", + "domain" => ["elixir"], + "list" => [1, 2, 3], + "map" => %{"foo" => "bar"}, + "message" => "Hello", + "node" => "nonode@nohost", + "ref" => _ref, + "float" => 3.14, + "struct" => %{ + "authority" => "example.com", + "fragment" => nil, + "host" => "example.com", + "path" => nil, + "port" => 443, + "query" => nil, + "scheme" => "https", + "userinfo" => nil + } + } + } = log_entry + end + + test "logs exceptions" do + log_entry = + capture_log(fn -> + pid = + spawn(fn -> + raise RuntimeError + end) + + ref = Process.monitor(pid) + assert_receive {:DOWN, ^ref, _, _, _} + Process.sleep(100) + end) + |> decode_or_print_error() + + assert %{ + "message" => %{ + "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", + "message" => "runtime error", + "stack_trace" => stacktrace, + "serviceContext" => %{"service" => "nonode@nohost"} + } + } = log_entry + + assert stacktrace =~ "** (RuntimeError) runtime error" + assert stacktrace =~ "test/" + assert stacktrace =~ ":in `" + end + + test "logged exception stacktrace is in Ruby format for Elixir errors" do + error = %RuntimeError{message: "oops"} + + stacktrace = [ + {Foo, :bar, 0, [file: ~c"foo/bar.ex", line: 123]}, + {Foo.Bar, :baz, 1, [file: ~c"foo/bar/baz.ex", line: 456]} + ] + + Logger.metadata(crash_reason: {error, stacktrace}) + + log_entry = + capture_log(fn -> + Logger.debug("foo") + end) + |> decode_or_print_error() + + assert log_entry["message"]["stack_trace"] == + """ + ** (RuntimeError) oops + foo/bar.ex:123:in `Foo.bar/0' + foo/bar/baz.ex:456:in `Foo.Bar.baz/1' + """ + end + + test "logs exception user context" do + Logger.metadata(crash_reason: {{:EXIT, self()}, :foo}) + + # The keys are applied in the order of their precedence + [:user_id, :identity_id, :actor_id, :account_id] + |> Enum.reverse() + |> Enum.reduce([], fn key, metadata -> + metadata = Keyword.put(metadata, key, "foo_#{key}") + Logger.metadata(metadata) + + log_entry = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + [entity, _id] = key |> Atom.to_string() |> String.split("_") + + assert log_entry["message"]["context"]["user"] == "#{entity}:foo_#{key}" + + metadata + end) + end + + test "logs http context" do + conn = + Plug.Test.conn("GET", "/", "") + |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0") + |> Plug.Conn.put_req_header("referer", "http://www.example2.com/") + |> Plug.Conn.put_req_header("x-forwarded-for", "") + |> Plug.Conn.send_resp(200, "Hi!") + + Logger.metadata(conn: conn) + + log_entry = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log_entry["httpRequest"] == %{ + "protocol" => "HTTP/1.1", + "referer" => "http://www.example2.com/", + "remoteIp" => "", + "requestMethod" => "GET", + "requestUrl" => "http://www.example.com/", + "status" => 200, + "userAgent" => "Mozilla/5.0" + } + end + + test "logs exception http context" do + conn = + Plug.Test.conn("patch", "/", "") + |> Plug.Conn.put_req_header("user-agent", "Mozilla/5.0") + |> Plug.Conn.put_req_header("referer", "http://www.example.com/") + |> Plug.Conn.put_req_header("x-forwarded-for", "") + |> Plug.Conn.send_resp(503, "oops") + + Logger.metadata(crash_reason: {{:EXIT, self()}, :foo}, conn: conn) + + log_entry = + capture_log(fn -> + Logger.debug("Hello") + end) + |> decode_or_print_error() + + assert log_entry["message"]["context"]["httpRequest"] == %{ + "protocol" => "HTTP/1.1", + "referer" => "http://www.example.com/", + "remoteIp" => "", + "requestMethod" => "PATCH", + "requestUrl" => "http://www.example.com/", + "status" => 503, + "userAgent" => "Mozilla/5.0" + } + end + + test "logs throws" do + Logger.metadata(crash_reason: {:throw, {:error, :whatever}}) + + log_entry = + capture_log(fn -> + Logger.debug("oops!") + end) + |> decode_or_print_error() + + assert %{ + "message" => %{ + "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", + "message" => "oops!", + "stack_trace" => "** (throw) {:error, :whatever}", + "serviceContext" => %{"service" => "nonode@nohost"}, + "context" => %{ + "reportLocation" => %{ + "filePath" => _, + "functionName" => _, + "lineNumber" => _ + } + } + } + } = log_entry + end + + test "logs exits" do + Logger.metadata(crash_reason: {:exit, :sad_failure}) + + log_entry = + capture_log(fn -> + Logger.debug("oops!") + end) + |> decode_or_print_error() + + assert %{ + "message" => %{ + "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", + "message" => "oops!", + "stack_trace" => "** (exit) :sad_failure", + "serviceContext" => %{"service" => "nonode@nohost"} + } + } = log_entry + end + + test "logs process exits" do + Logger.metadata(crash_reason: {{:EXIT, self()}, :sad_failure}) + + log_entry = + capture_log(fn -> + Logger.debug("oops!") + end) + |> decode_or_print_error() + + assert %{ + "message" => %{ + "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", + "message" => "oops!", + "stack_trace" => stacktrace, + "serviceContext" => %{"service" => "nonode@nohost"} + } + } = log_entry + + assert stacktrace =~ "** (EXIT from #PID<" + assert stacktrace =~ ">) :sad_failure" + end + + test "logs reasons in tuple" do + Logger.metadata(crash_reason: {:socket_closed_unexpectedly, []}) + + log_entry = + capture_log(fn -> + Logger.debug("oops!") + end) + |> decode_or_print_error() + + assert %{ + "message" => %{ + "@type" => "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", + "message" => "oops!", + "stack_trace" => "** (socket_closed_unexpectedly) []", + "serviceContext" => %{"service" => "nonode@nohost"} + } + } = log_entry + end +end diff --git a/test/support/logger_case.ex b/test/support/logger_case.ex index 1fe8edc..dbd94e7 100644 --- a/test/support/logger_case.ex +++ b/test/support/logger_case.ex @@ -9,30 +9,6 @@ defmodule Logger.Case do end end - def msg(msg) do - ~r/\d\d\:\d\d\:\d\d\.\d\d\d #{Regex.escape(msg)}/ - end - - def wait_for_handler(manager, handler) do - unless handler in :gen_event.which_handlers(manager) do - Process.sleep(10) - wait_for_handler(manager, handler) - end - end - - def wait_for_logger do - try do - :gen_event.which_handlers(Logger) - else - _ -> - :ok - catch - :exit, _ -> - Process.sleep(10) - wait_for_logger() - end - end - def capture_log(level \\ :debug, fun) do Logger.configure(level: level) @@ -43,4 +19,14 @@ defmodule Logger.Case do after Logger.configure(level: :debug) end + + def decode_or_print_error(data) do + try do + Jason.decode!(data) + rescue + _reason -> + IO.puts(data) + flunk("Failed to decode JSON") + end + end end diff --git a/test/test_helper.exs b/test/test_helper.exs index 9411938..3140500 100644 --- a/test/test_helper.exs +++ b/test/test_helper.exs @@ -1,2 +1,2 @@ -Application.put_env(:plug, :validate_header_keys_during_test, true) +ExUnit.configure(formatters: [JUnitFormatter, ExUnit.CLIFormatter]) ExUnit.start() diff --git a/test/unit/logger_json/ecto_test.exs b/test/unit/logger_json/ecto_test.exs deleted file mode 100644 index f76874d..0000000 --- a/test/unit/logger_json/ecto_test.exs +++ /dev/null @@ -1,115 +0,0 @@ -defmodule LoggerJSON.EctoTest do - use Logger.Case, async: false - import ExUnit.CaptureIO - require Logger - - setup do - :ok = - Logger.configure_backend( - LoggerJSON, - device: :user, - level: nil, - metadata: [], - json_encoder: Jason, - on_init: :disabled, - formatter: LoggerJSON.Formatters.GoogleCloudLogger, - formatter_state: %{} - ) - - diff = :erlang.convert_time_unit(1, :microsecond, :native) - - entry = %{ - query: fn -> "done" end, - result: {:ok, []}, - params: [1, 2, 3, %Ecto.Query.Tagged{value: 1}], - query_time: 2100 * diff, - decode_time: 500 * diff, - queue_time: 100 * diff, - source: "test" - } - - %{log_entry: entry} - end - - test "logs ecto queries", %{log_entry: entry} do - Logger.configure_backend(LoggerJSON, device: :standard_error, metadata: :all) - - log = - capture_io(:standard_error, fn -> - LoggerJSON.Ecto.log(entry) - Logger.flush() - end) - - assert %{ - "message" => "done", - "query" => %{ - "decode_time_μs" => 500, - "latency_μs" => 2700, - "execution_time_μs" => 2100, - "queue_time_μs" => 100 - } - } = Jason.decode!(log) - end - - test "logs ecto queries with debug level", %{log_entry: entry} do - Logger.configure_backend(LoggerJSON, device: :standard_error, metadata: :all) - - log = - capture_io(:standard_error, fn -> - LoggerJSON.Ecto.log(entry, :debug) - Logger.flush() - end) - - assert %{ - "message" => "done", - "query" => %{ - "decode_time_μs" => 500, - "latency_μs" => 2700, - "execution_time_μs" => 2100, - "queue_time_μs" => 100 - } - } = Jason.decode!(log) - end - - test "logs ecto queries received via telemetry event" do - Logger.configure_backend(LoggerJSON, device: :standard_error, metadata: :all) - - log = - capture_io(:standard_error, fn -> - LoggerJSON.Ecto.telemetry_logging_handler( - [:repo, :query], - %{query_time: 2_930_000, queue_time: 106_000, total_time: 3_036_000}, - %{ - params: [], - query: "begin", - repo: Repo, - result: - {:ok, - %{ - columns: nil, - command: :savepoint, - connection_id: 26925, - messages: [], - num_rows: nil, - rows: nil - }}, - source: nil, - type: :ecto_sql_query - }, - :info - ) - - Logger.flush() - end) - - assert %{ - "message" => "begin", - "query" => %{ - "latency_μs" => 3036, - "execution_time_μs" => 2930, - "queue_time_μs" => 106, - "repo" => "Repo" - } - } = Jason.decode!(log) - end -end diff --git a/test/unit/logger_json/formatters/basic_logger_test.exs b/test/unit/logger_json/formatters/basic_logger_test.exs deleted file mode 100644 index c240fc7..0000000 --- a/test/unit/logger_json/formatters/basic_logger_test.exs +++ /dev/null @@ -1,94 +0,0 @@ -defmodule LoggerJSON.Formatters.BasicLoggerTest do - use Logger.Case, async: false - require Logger - alias LoggerJSON.Formatters.BasicLogger - - defmodule IDStruct, do: defstruct(id: nil) - - setup do - :ok = - Logger.configure_backend( - LoggerJSON, - device: :user, - level: nil, - metadata: [], - json_encoder: Jason, - on_init: :disabled, - formatter: BasicLogger, - formatter_state: %{} - ) - end - - describe "metadata" do - test "can be configured" do - Logger.configure_backend(LoggerJSON, metadata: [:user_id]) - - assert capture_log(fn -> - Logger.debug("hello") - end) =~ "hello" - - Logger.metadata(user_id: 11) - Logger.metadata(dynamic_metadata: 5) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"user_id" => 11} == log["metadata"] - end - - test "can be configured to :all" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - Logger.metadata(user_id: 11) - Logger.metadata(dynamic_metadata: 5) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"user_id" => 11, "dynamic_metadata" => 5} = log["metadata"] - end - - test "can be empty" do - Logger.configure_backend(LoggerJSON, metadata: []) - - Logger.metadata(user_id: 11) - Logger.metadata(dynamic_metadata: 5) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "hello"} = log - assert %{} == log["metadata"] - end - - test "converts Struct metadata to maps" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - Logger.metadata(id_struct: %IDStruct{id: "test"}) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"metadata" => %{"id_struct" => %{"id" => "test"}}} = log - end - end - - test "logs chardata messages" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug([?α, ?β, ?ω]) end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "αβω"} = log - end -end diff --git a/test/unit/logger_json/formatters/datadog_logger_test.exs b/test/unit/logger_json/formatters/datadog_logger_test.exs deleted file mode 100644 index a2ef055..0000000 --- a/test/unit/logger_json/formatters/datadog_logger_test.exs +++ /dev/null @@ -1,453 +0,0 @@ -defmodule LoggerJSONDatadogTest do - use Logger.Case, async: false - import ExUnit.CaptureIO - require Logger - alias LoggerJSON.Formatters.DatadogLogger - - defmodule IDStruct, do: defstruct(id: nil) - - setup do - :ok = - Logger.configure_backend( - LoggerJSON, - device: :user, - level: nil, - metadata: [], - json_encoder: Jason, - on_init: :disabled, - formatter: DatadogLogger, - formatter_state: %{} - ) - - :ok = Logger.reset_metadata([]) - end - - describe "configure_log_level!/1" do - test "tolerates nil values" do - assert :ok == LoggerJSON.configure_log_level!(nil) - end - - test "raises on invalid LOG_LEVEL" do - assert_raise ArgumentError, fn -> - LoggerJSON.configure_log_level!("super_critical") - end - - assert_raise ArgumentError, fn -> - LoggerJSON.configure_log_level!(1_337) - end - end - - test "configures log level with valid string value" do - :ok = LoggerJSON.configure_log_level!("debug") - end - - test "configures log level with valid atom value" do - :ok = LoggerJSON.configure_log_level!(:debug) - end - end - - test "logs empty binary messages" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug("") end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => ""} = log - end - - test "logs binary messages" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "hello"} = log - end - - test "logs empty iodata messages" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug([]) end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => ""} = log - end - - test "logs iodata messages" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug([?h, ?e, ?l, ?l, ?o]) end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "hello"} = log - end - - test "logs chardata messages" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug([?π, ?α, ?β]) end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "παβ"} = log - end - - test "log message does not break escaping" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug([?", ?h]) end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "\"h"} = log - - log = - fn -> Logger.debug("\"h") end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "\"h"} = log - end - - test "does not start when there is no user" do - :ok = Logger.remove_backend(LoggerJSON) - user = Process.whereis(:user) - - try do - Process.unregister(:user) - assert {:error, :ignore} == :gen_event.add_handler(Logger, LoggerJSON, LoggerJSON) - after - Process.register(user, :user) - end - after - {:ok, _} = Logger.add_backend(LoggerJSON) - end - - test "may use another device" do - Logger.configure_backend(LoggerJSON, device: :standard_error) - - assert capture_io(:standard_error, fn -> - Logger.debug("hello") - Logger.flush() - end) =~ "hello" - end - - describe "metadata" do - test "can be configured" do - Logger.configure_backend(LoggerJSON, metadata: [:user_id]) - - assert capture_log(fn -> - Logger.debug("hello") - end) =~ "hello" - - Logger.metadata(user_id: 13) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"user_id" => 13} = log - end - - test "can be configured to :all" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - Logger.metadata(user_id: 11) - Logger.metadata(dynamic_metadata: 5) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"user_id" => 11} = log - assert %{"dynamic_metadata" => 5} = log - end - - test "can be empty" do - Logger.configure_backend(LoggerJSON, metadata: []) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "hello"} = log - end - - test "ignore otp's metadata unixtime" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert not is_integer(log["time"]) - end - - test "converts Struct metadata to maps" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - Logger.metadata(id_struct: %IDStruct{id: "test"}) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"id_struct" => %{"id" => "test"}} = log - end - - test "convert trace_id/span_id to expected datadog keys" do - Logger.configure_backend(LoggerJSON, metadata: :all) - Logger.metadata(trace_id: "1", span_id: "2") - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"dd.trace_id" => "1", "dd.span_id" => "2"} = log - assert Map.has_key?(log, "trace_id") == false - assert Map.has_key?(log, "span_id") == false - end - - test "trace_id/span_id has greater priority than otel_trace_id/otel_span_id" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - otel_id = - <<98, 56, 49, 48, 100, 98, 97, 50, 57, 56, 48, 51, 101, 101, 54, 49, 101, 55, 99, 55, 49, 102, 102, 48, 99, 50, - 99, 57, 53, 97, 57, 100>> - - Logger.metadata(otel_trace_id: otel_id, trace_id: "1", span_id: "2", otel_span_id: otel_id) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"dd.trace_id" => "1", "dd.span_id" => "2"} = log - assert Map.has_key?(log, "trace_id") == false - assert Map.has_key?(log, "span_id") == false - end - - test "convert otel_trace_id/otel_span_id binary to expected datadog keys" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - Logger.metadata( - otel_trace_id: - <<98, 56, 49, 48, 100, 98, 97, 50, 57, 56, 48, 51, 101, 101, 54, 49, 101, 55, 99, 55, 49, 102, 102, 48, 99, - 50, 99, 57, 53, 97, 57, 100>> - ) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"dd.trace_id" => "16701352862047361693"} = log - assert Map.has_key?(log, "trace_id") == false - assert Map.has_key?(log, "span_id") == false - end - - test "convert otel_trace_id/otel_span_id charlist to expected datadog keys" do - Logger.configure_backend(LoggerJSON, metadata: :all) - Logger.metadata(otel_trace_id: 'b810dba29803ee61e7c71ff0c2c95a9d') - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"dd.trace_id" => "16701352862047361693"} = log - assert Map.has_key?(log, "trace_id") == false - assert Map.has_key?(log, "span_id") == false - end - - test "convert otel_trace_id/otel_span_id string to expected datadog keys" do - Logger.configure_backend(LoggerJSON, metadata: :all) - Logger.metadata(otel_trace_id: "e7c71ff0c2c95a9d") - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"dd.trace_id" => "16701352862047361693"} = log - assert Map.has_key?(log, "trace_id") == false - assert Map.has_key?(log, "span_id") == false - end - - test "does not error on incorrect otel_trace_id/otel_span_id metadata" do - Logger.configure_backend(LoggerJSON, metadata: :all) - Logger.metadata(otel_trace_id: {:noop}) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"dd.trace_id" => ""} = log - assert Map.has_key?(log, "trace_id") == false - assert Map.has_key?(log, "span_id") == false - end - end - - describe "on_init/1 callback" do - test "raises when invalid" do - assert_raise ArgumentError, - "invalid :on_init option for :logger_json application. " <> - "Expected a tuple with module, function and args, got: :atom", - fn -> - LoggerJSON.init({LoggerJSON, [on_init: :atom]}) - end - end - - test "is triggered" do - Logger.configure_backend(LoggerJSON, metadata: [], on_init: {LoggerJSONDatadogTest, :on_init_cb, []}) - - Logger.metadata(user_id: 11) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"user_id" => 11} = log - end - end - - test "contains source location" do - %{module: mod, function: {name, arity}, file: file, line: before_log_line_num} = __ENV__ - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - %{line: after_log_line_num} = __ENV__ - - function = "Elixir.#{inspect(mod)}.#{name}/#{arity}" - - assert %{ - "logger" => %{ - "method_name" => ^function, - "line" => log_line_num, - "file_name" => ^file, - "thread_name" => _ - } - } = log - - assert log_line_num > before_log_line_num and log_line_num < after_log_line_num - end - - test "may configure level" do - Logger.configure_backend(LoggerJSON, level: :info) - - assert capture_log(fn -> - Logger.debug("hello") - end) == "" - end - - test "logs hostname when set to :system" do - Logger.configure_backend(LoggerJSON, formatter_opts: %{hostname: :system}) - {:ok, hostname} = :inet.gethostname() - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert log["syslog"]["hostname"] == to_string(hostname) - end - - test "does not log hostname when set to :unset" do - Logger.configure_backend(LoggerJSON, formatter_opts: %{hostname: :unset}) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert log["syslog"]["hostname"] == nil - end - - test "logs hostname when set to string" do - Logger.configure_backend(LoggerJSON, formatter_opts: %{hostname: "testing"}) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert log["syslog"]["hostname"] == "testing" - end - - test "logs severity" do - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"syslog" => %{"severity" => "debug"}} = log - - log = - fn -> Logger.warn("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"syslog" => %{"severity" => "warn"}} = log - end - - test "logs crash reason when present" do - Logger.configure_backend(LoggerJSON, metadata: [:crash_reason]) - Logger.metadata(crash_reason: {%RuntimeError{message: "oops"}, []}) - - log = - capture_log(fn -> Logger.debug("hello") end) - |> Jason.decode!() - - assert is_nil(log["error"]["initial_call"]) - assert log["error"]["reason"] == "** (RuntimeError) oops" - end - - test "logs erlang style crash reasons" do - Logger.configure_backend(LoggerJSON, metadata: [:crash_reason]) - Logger.metadata(crash_reason: {:socket_closed_unexpectedly, []}) - - log = - capture_log(fn -> Logger.debug("hello") end) - |> Jason.decode!() - - assert is_nil(log["error"]["initial_call"]) - assert log["error"]["reason"] == "{:socket_closed_unexpectedly, []}" - end - - test "logs initial call when present" do - Logger.configure_backend(LoggerJSON, metadata: [:initial_call]) - Logger.metadata(crash_reason: {%RuntimeError{message: "oops"}, []}, initial_call: {Foo, :bar, 3}) - - log = - capture_log(fn -> Logger.debug("hello") end) - |> Jason.decode!() - - assert log["error"]["initial_call"] == "Elixir.Foo.bar/3" - end - - # Sets metadata to :all for test purposes - def on_init_cb(conf) do - {:ok, Keyword.put(conf, :metadata, :all)} - end -end diff --git a/test/unit/logger_json/formatters/google_cloud_logger_test.exs b/test/unit/logger_json/formatters/google_cloud_logger_test.exs deleted file mode 100644 index bed9af1..0000000 --- a/test/unit/logger_json/formatters/google_cloud_logger_test.exs +++ /dev/null @@ -1,360 +0,0 @@ -defmodule LoggerJSON.GoogleCloudLoggerTest do - use Logger.Case, async: false - import ExUnit.CaptureIO - require Logger - alias LoggerJSON.Formatters.GoogleCloudLogger - - defmodule IDStruct, do: defstruct(id: nil) - - setup do - :ok = - Logger.configure_backend( - LoggerJSON, - device: :user, - level: nil, - metadata: [], - json_encoder: Jason, - on_init: :disabled, - formatter: GoogleCloudLogger, - formatter_state: %{} - ) - - :ok = Logger.reset_metadata([]) - end - - describe "configure_log_level!/1" do - test "tolerates nil values" do - assert :ok == LoggerJSON.configure_log_level!(nil) - end - - test "raises on invalid LOG_LEVEL" do - assert_raise ArgumentError, fn -> - LoggerJSON.configure_log_level!("super_critical") - end - - assert_raise ArgumentError, fn -> - LoggerJSON.configure_log_level!(1_337) - end - end - - test "configures log level with valid string value" do - :ok = LoggerJSON.configure_log_level!("debug") - end - - test "configures log level with valid atom value" do - :ok = LoggerJSON.configure_log_level!(:debug) - end - end - - test "logs empty binary messages" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug("") end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => ""} = log - end - - test "logs binary messages" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "hello"} = log - end - - test "logs empty iodata messages" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug([]) end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => ""} = log - end - - test "logs iodata messages" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug([?h, ?e, ?l, ?l, ?o]) end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "hello"} = log - end - - test "logs chardata messages" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug([?π, ?α, ?β]) end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "παβ"} = log - end - - test "log message does not break escaping" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug([?", ?h]) end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "\"h"} = log - - log = - fn -> Logger.debug("\"h") end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "\"h"} = log - end - - test "does not start when there is no user" do - :ok = Logger.remove_backend(LoggerJSON) - user = Process.whereis(:user) - - try do - Process.unregister(:user) - assert {:error, :ignore} == :gen_event.add_handler(Logger, LoggerJSON, LoggerJSON) - after - Process.register(user, :user) - end - after - {:ok, _} = Logger.add_backend(LoggerJSON) - end - - test "may use another device" do - Logger.configure_backend(LoggerJSON, device: :standard_error) - - assert capture_io(:standard_error, fn -> - Logger.debug("hello") - Logger.flush() - end) =~ "hello" - end - - describe "metadata" do - test "can be configured" do - Logger.configure_backend(LoggerJSON, metadata: [:user_id]) - - assert capture_log(fn -> - Logger.debug("hello") - end) =~ "hello" - - Logger.metadata(user_id: 13) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"user_id" => 13} = log - end - - test "can be configured to :all" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - Logger.metadata(user_id: 11) - Logger.metadata(dynamic_metadata: 5) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"user_id" => 11} = log - assert %{"dynamic_metadata" => 5} = log - end - - test "can be configured to :all_except" do - Logger.configure_backend(LoggerJSON, metadata: {:all_except, [:user_id]}) - - Logger.metadata(user_id: 11) - Logger.metadata(dynamic_metadata: 5) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - refute Map.has_key?(log, "user_id") - assert Map.has_key?(log, "dynamic_metadata") - end - - test "can be empty" do - Logger.configure_backend(LoggerJSON, metadata: []) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"message" => "hello"} = log - end - - test "ignore otp's metadata unixtime" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert not is_integer(log["time"]) - end - - test "converts Struct metadata to maps" do - Logger.configure_backend(LoggerJSON, metadata: :all) - - Logger.metadata(id_struct: %IDStruct{id: "test"}) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"id_struct" => %{"id" => "test"}} = log - end - end - - describe "on_init/1 callback" do - test "raises when invalid" do - assert_raise ArgumentError, - "invalid :on_init option for :logger_json application. " <> - "Expected a tuple with module, function and args, got: :atom", - fn -> - LoggerJSON.init({LoggerJSON, [on_init: :atom]}) - end - end - - test "is triggered" do - Logger.configure_backend(LoggerJSON, metadata: [], on_init: {LoggerJSON.GoogleCloudLoggerTest, :on_init_cb, []}) - - Logger.metadata(user_id: 11) - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"user_id" => 11} = log - end - end - - test "contains source location" do - %{module: mod, function: {name, arity}, file: file, line: line} = __ENV__ - - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - line = line + 3 - function = "Elixir.#{inspect(mod)}.#{name}/#{arity}" - - assert %{ - "logging.googleapis.com/sourceLocation" => %{ - "file" => ^file, - "line" => ^line, - "function" => ^function - } - } = log - end - - test "may configure level" do - Logger.configure_backend(LoggerJSON, level: :info) - - assert capture_log(fn -> - Logger.debug("hello") - end) == "" - end - - test "logs severity" do - log = - fn -> Logger.debug("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"severity" => "DEBUG"} = log - - log = - fn -> Logger.warn("hello") end - |> capture_log() - |> Jason.decode!() - - assert %{"severity" => "WARNING"} = log - end - - test "logs crash reason for exceptions when present" do - Logger.configure_backend(LoggerJSON, metadata: [:crash_reason]) - Logger.metadata(crash_reason: {%RuntimeError{message: "oops"}, []}) - - log = - capture_log(fn -> Logger.debug("hello") end) - |> Jason.decode!() - - assert is_nil(log["error"]["initial_call"]) - assert log["error"]["reason"] == "** (RuntimeError) oops" - end - - test "logs crash reason for throws when present" do - Logger.configure_backend(LoggerJSON, metadata: [:crash_reason]) - Logger.metadata(crash_reason: {:throw, {:error, :whatever}}) - - log = - capture_log(fn -> Logger.debug("hello") end) - |> Jason.decode!() - - assert is_nil(log["error"]["initial_call"]) - assert log["error"]["reason"] == "** (throw) {:error, :whatever}" - end - - test "logs crash reason for exits when present" do - Logger.configure_backend(LoggerJSON, metadata: [:crash_reason]) - Logger.metadata(crash_reason: {:exit, :abnormal}) - - log = - capture_log(fn -> Logger.debug("hello") end) - |> Jason.decode!() - - assert is_nil(log["error"]["initial_call"]) - assert log["error"]["reason"] == "** (exit) :abnormal" - end - - test "logs erlang style crash reasons" do - Logger.configure_backend(LoggerJSON, metadata: [:crash_reason]) - Logger.metadata(crash_reason: {:socket_closed_unexpectedly, []}) - - log = - capture_log(fn -> Logger.debug("hello") end) - |> Jason.decode!() - - assert is_nil(log["error"]["initial_call"]) - assert log["error"]["reason"] == "{:socket_closed_unexpectedly, []}" - end - - test "logs initial call when present" do - Logger.configure_backend(LoggerJSON, metadata: [:initial_call]) - Logger.metadata(crash_reason: {%RuntimeError{message: "oops"}, []}, initial_call: {Foo, :bar, 3}) - - log = - capture_log(fn -> Logger.debug("hello") end) - |> Jason.decode!() - - assert log["error"]["initial_call"] == "Elixir.Foo.bar/3" - end - - # Sets metadata to :all for test purposes - def on_init_cb(conf) do - {:ok, Keyword.put(conf, :metadata, :all)} - end -end diff --git a/test/unit/logger_json/formatters/google_error_reporter_test.exs b/test/unit/logger_json/formatters/google_error_reporter_test.exs deleted file mode 100644 index 3c502bc..0000000 --- a/test/unit/logger_json/formatters/google_error_reporter_test.exs +++ /dev/null @@ -1,95 +0,0 @@ -defmodule LoggerJSON.Formatters.GoogleErrorReporterTest do - use Logger.Case, async: false - alias LoggerJSON.Formatters.GoogleCloudLogger - alias LoggerJSON.Formatters.GoogleErrorReporter - - setup do - :ok = - Logger.configure_backend( - LoggerJSON, - device: :user, - level: nil, - metadata: :all, - json_encoder: Jason, - on_init: :disabled, - formatter: GoogleCloudLogger, - formatter_state: %{} - ) - - :ok = Logger.reset_metadata([]) - end - - test "metadata" do - log = - capture_log(fn -> GoogleErrorReporter.report(:error, %RuntimeError{message: "oops"}, []) end) - |> Jason.decode!() - - assert log["severity"] == "ERROR" - assert log["@type"] == "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent" - end - - test "google_error_reporter metadata" do - :ok = - Application.put_env(:logger_json, :google_error_reporter, service_context: [service: "myapp", version: "abc123"]) - - log = - capture_log(fn -> GoogleErrorReporter.report(:error, %RuntimeError{message: "oops"}, []) end) - |> Jason.decode!() - - assert log["serviceContext"]["service"] == "myapp" - assert log["serviceContext"]["version"] == "abc123" - after - Application.delete_env(:logger_json, :google_error_reporter) - end - - test "optional metadata" do - log = - capture_log(fn -> GoogleErrorReporter.report(:error, %RuntimeError{message: "oops"}, [], foo: "bar") end) - |> Jason.decode!() - - assert log["foo"] == "bar" - end - - test "logs elixir error" do - error = %RuntimeError{message: "oops"} - - stacktrace = [ - {Foo, :bar, 0, [file: 'foo/bar.ex', line: 123]}, - {Foo.Bar, :baz, 1, [file: 'foo/bar/baz.ex', line: 456]} - ] - - log = - capture_log(fn -> GoogleErrorReporter.report(:error, error, stacktrace) end) - |> Jason.decode!() - - assert log["message"] == - """ - ** (RuntimeError) oops - foo/bar.ex:123:in `Foo.bar/0' - foo/bar/baz.ex:456:in `Foo.Bar.baz/1' - """ - end - - test "logs erlang error" do - error = :undef - - stacktrace = [ - {Foo, :bar, [123, 456], []}, - {Foo, :bar, 2, [file: 'foo/bar.ex', line: 123]}, - {Foo.Bar, :baz, 1, [file: 'foo/bar/baz.ex', line: 456]} - ] - - log = - capture_log(fn -> GoogleErrorReporter.report(:error, error, stacktrace) end) - |> Jason.decode!() - - assert log["message"] == - """ - ** (UndefinedFunctionError) function Foo.bar/2 is undefined (module Foo is not available) - foo/bar.ex:123:in `Foo.bar/2' - foo/bar/baz.ex:456:in `Foo.Bar.baz/1' - Context: - Foo.bar(123, 456) - """ - end -end diff --git a/test/unit/logger_json/json_safe_formatter_test.exs b/test/unit/logger_json/json_safe_formatter_test.exs deleted file mode 100644 index 760aa84..0000000 --- a/test/unit/logger_json/json_safe_formatter_test.exs +++ /dev/null @@ -1,99 +0,0 @@ -defmodule LoggerJSON.JasonSafeFormatterTest do - use Logger.Case, async: true - use ExUnitProperties - - alias LoggerJSON.JasonSafeFormatter, as: Formatter - - defmodule IDStruct, do: defstruct(id: nil) - - describe "format/1" do - test "allows nils" do - assert nil == Formatter.format(nil) - end - - test "allows booleans" do - assert true == Formatter.format(true) - assert false == Formatter.format(false) - end - - test "allows printable strings" do - assert "hello" == Formatter.format("hello") - end - - test "inspects non-printable binaries" do - assert "<<104, 101, 108, 108, 111, 0>>" == Formatter.format("hello" <> <<0>>) - end - - test "allows atoms" do - assert :hello == Formatter.format(:hello) - end - - test "allows numbers" do - assert 123 == Formatter.format(123) - end - - test "strips Structs" do - assert %{id: "hello"} == Formatter.format(%IDStruct{id: "hello"}) - end - - test "does not strip structs for which Jason.Encoder is derived" do - assert %NameStruct{name: "B"} == Formatter.format(%NameStruct{name: "B"}) - end - - test "converts tuples to lists" do - assert [1, 2, 3] == Formatter.format({1, 2, 3}) - end - - test "converts nested tuples to nested lists" do - assert [[2000, 1, 1], [13, 30, 15]] == Formatter.format({{2000, 1, 1}, {13, 30, 15}}) - end - - test "converts Keyword lists to maps" do - assert %{a: 1, b: 2} == Formatter.format(a: 1, b: 2) - end - - test "converts non-string map keys" do - assert Formatter.format(%{1 => 2}) == %{1 => 2} - assert Formatter.format(%{:a => 1}) == %{:a => 1} - assert Formatter.format(%{{"a", "b"} => 1}) == %{"{\"a\", \"b\"}" => 1} - assert Formatter.format(%{%{a: 1, b: 2} => 3}) == %{"%{a: 1, b: 2}" => 3} - assert Formatter.format(%{[{:a, :b}] => 3}) == %{"[a: :b]" => 3} - end - - test "inspects functions" do - assert "&LoggerJSON.JasonSafeFormatter.format/1" == Formatter.format(&Formatter.format/1) - end - - test "inspects pids" do - assert inspect(self()) == Formatter.format(self()) - end - - test "doesn't choke on things that look like keyword lists but aren't" do - assert [%{a: 1}, [:b, 2, :c]] == Formatter.format([{:a, 1}, {:b, 2, :c}]) - end - - test "formats nested structures" do - input = %{ - foo: [ - foo_a: %{"x" => 1, "y" => %IDStruct{id: 1}}, - foo_b: [foo_b_1: 1, foo_b_2: {"2a", "2b"}] - ], - self: self() - } - - assert %{ - foo: %{ - foo_a: %{"x" => 1, "y" => %{id: 1}}, - foo_b: %{foo_b_1: 1, foo_b_2: %{"2a" => "2b"}} - }, - self: inspect(self()) - } == Formatter.format(input) - end - - property "converts any term so that it can be encoded with Jason" do - check all value <- term() do - assert {:ok, _} = value |> Formatter.format() |> Jason.encode() - end - end - end -end diff --git a/test/unit/logger_json/plug_test.exs b/test/unit/logger_json/plug_test.exs deleted file mode 100644 index a5384c0..0000000 --- a/test/unit/logger_json/plug_test.exs +++ /dev/null @@ -1,108 +0,0 @@ -defmodule LoggerJSON.PlugTest do - use Logger.Case, async: false - use Plug.Test - import ExUnit.CaptureIO - require Logger - - defmodule MyPlug do - use Plug.Builder - - plug(LoggerJSON.Plug) - plug(:passthrough) - - defp passthrough(conn, _) do - Plug.Conn.send_resp(conn, 200, "Passthrough") - end - end - - setup do - :ok = - Logger.configure_backend( - LoggerJSON, - device: :standard_error, - level: nil, - metadata: :all, - json_encoder: Jason, - on_init: :disabled, - formatter: LoggerJSON.Formatters.GoogleCloudLogger, - formatter_state: %{} - ) - end - - test "logs request information" do - Logger.metadata(request_id: "request_id") - - log = - capture_io(:standard_error, fn -> - call(conn(:get, "/")) - Logger.flush() - end) - - assert %{ - "message" => "", - "httpRequest" => %{ - "latency" => latency, - "referer" => nil, - "remoteIp" => "127.0.0.1", - "requestMethod" => "GET", - "requestUrl" => "http://www.example.com/", - "status" => 200, - "userAgent" => nil - }, - "logging.googleapis.com/operation" => %{"id" => "request_id"}, - "severity" => "INFO" - } = Jason.decode!(log) - - assert {latency_number, "s"} = Float.parse(latency) - assert latency_number > 0 - - conn = %{conn(:get, "/hello/world") | private: %{phoenix_controller: MyController, phoenix_action: :foo}} - - log = - capture_io(:standard_error, fn -> - call(conn) - Logger.flush() - end) - - assert %{ - "httpRequest" => %{ - "requestUrl" => "http://www.example.com/hello/world" - }, - "phoenix" => %{ - "action" => "foo", - "controller" => "Elixir.MyController" - } - } = Jason.decode!(log) - end - - test "takes values from request headers" do - request_id = Ecto.UUID.generate() - - conn = - :get - |> conn("/") - |> Plug.Conn.put_resp_header("x-request-id", request_id) - |> Plug.Conn.put_req_header("user-agent", "chrome") - |> Plug.Conn.put_req_header("referer", "http://google.com") - |> Plug.Conn.put_req_header("x-forwarded-for", "127.0.0.10") - |> Plug.Conn.put_req_header("x-api-version", "2017-01-01") - - log = - capture_io(:standard_error, fn -> - call(conn) - Logger.flush() - end) - - assert %{ - "httpRequest" => %{ - "referer" => "http://google.com", - "remoteIp" => "127.0.0.10", - "userAgent" => "chrome" - } - } = Jason.decode!(log) - end - - defp call(conn) do - MyPlug.call(conn, []) - end -end