diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..45f954f --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +fluentd/ diff --git a/README.md b/README.md index 707e23a..66882bd 100644 --- a/README.md +++ b/README.md @@ -1,25 +1,35 @@ -## Scenario 1 logloss-benchmarking using loader and verify-loader programs as standalone setup +# logloss-benchmarking -This repo contains simulation script for +This repo used to experience and simulate log loss scenarios using Fluentd + +The repo contains simulation script for 1. replicating log-loss at different rate of logs generation (msg line per sec or no of bytes per sec) 2. measuring log-loss at a given setting of log-size-max limits - The loader and verifiy-loader scripts are taken from https://github.com/ViaQ/logging-load-driver + > Note: The loader and verify-loader scripts are taken from https://github.com/ViaQ/logging-load-driver They are further changed to have more debug/print statements for better understanding on log-loss. + +## Scenario 1: logloss-benchmarking using loader and verify-loader programs as standalone setup + - Simulation script can use a custom conmon binary which is changed to log in extra meta data on log-rotation event and timestamps - ${LOCALBIN}/podman run --log-level debug --conmon $conmonlatestlib --env MSGPERSEC --env PAYLOAD_GEN --env PAYLOAD_SIZE --env DISTRIBUTION --env STDDEV --env OUTPUT --env REPORT --env REPORT_INTERVAL --env TOTAL_SIZE --log-opt max-size=$MAXSIZE $imageid" - + > Note: Simulation script can use custom conmon binary which is changed to log in extra metadata on log-rotation event and timestamps + > + > ```${LOCALBIN}/podman run --log-level debug --conmon $conmonlatestlib --env MSGPERSEC --env PAYLOAD_GEN --env PAYLOAD_SIZE --env DISTRIBUTION --env STDDEV --env OUTPUT --env REPORT --env REPORT_INTERVAL --env TOTAL_SIZE --log-opt max-size=$MAXSIZE $imageid"``` + + Below Results are obtained when setting: + `MSGPERSEC = 100` + `log-lines per sec = 10000` + `pay-load size = 1024 bytes` + `payload_gen method as fixed (not random)` + `etc.` - Below Results are obtained on setting of MSGPERSEC=100 to 10000 log-lines per sec, pay-load size 1024 bytes, payload_gen method as fixed (not random) etc. - You can replicate these results by running the below scripts : + You can replicate these results by running the below scripts: - simulation-with-diff-config-variables.sh + `simulation-with-diff-config-variables.sh ` e.g. simulation-with-diff-config-variables.sh 1000 1024000 10 - [The Above generates 1000 loglines per sec of payload 1024 bytes, maxsize of logfile being 1024000 bytes, REPORT INTERVAL being set to 10 sec by loader program] - + > [The Above generates 1000 loglines per sec of payload 1024 bytes, maxsize of logfile being 1024000 bytes, REPORT INTERVAL being set to 10 sec by loader program] |msq-lines-per-sec log generation rate | Rate of writing to disk bytes per sec | Log-max-size limit set to.. | Loss rate(mb per sec) as we see.. |msg-len set to | |--|--|--|--|--| @@ -48,18 +58,23 @@ This repo contains simulation script for ## Scenario 2 logloss-benchmarking using loader and fluentd being the log collector -Steps to simulate this scenario is the below. +Steps to simulate this scenario: - 1. Install locally fluentd by following installing-fluentd-readme.txt doc - - - 2. Run the container for logging load driver program by using run-container-for-logging-load-driver-program.sh - - $run-container-for-logging-load-driver-program.sh - - 3. run fluentd on local host as the below + 1. Install locally fluentd by following [installing-fluentd-readme.md](installing-fluentd-readme.md) + 2. Deploy logging-load-driver docker image + `cd logging-load-driver` + `make all` + `cd ..` + 3. Start the logging load driver container using run-container-for-logging-load-driver-program.sh + `./run-container-for-logging-load-driver-program.sh ` + + > e.g `./run-container-for-logging-load-driver-program.sh 1 1000 1` + 3. Execute fluentd (in different shell) + `sudo chmod a=rwx /var/lib/docker/containers` + `sudo fluentd -c fluent-test-tail.conf` + + > Note: To tail root level docker directories execute fluentd with sudo permissions + > Note: To log debug messages execute fluentd with `-v` flag (`-vv` executes trace level ) - $sudo fluentd -c fluent-test-tail.conf - - The log lines generated by loader program in the step 1 gets collected by fluentd which is given fluent-test-tail.conf. The fluent-test-tail.conf must have the right path, position, and other configuration parameters specified. -please check the content of fluent-test-tail.conf for configuring input and output plugins for fluentd + Log lines generated by loader program in the step 1 gets collected by fluentd which is given fluent-test-tail.conf. The fluent-test-tail.conf must have the right path, position, and other configuration parameters specified. + please check the content of fluent-test-tail.conf for configuring input and output plugins for fluentd diff --git a/fluent-test-tail.conf b/fluent-test-tail.conf index a149399..136626c 100644 --- a/fluent-test-tail.conf +++ b/fluent-test-tail.conf @@ -1,32 +1,32 @@ # Have a source directive for each log file source file. -# Fluentd input tail plugin, will start reading from the tail of the log -@type tail -# Specify the log file path. This supports wild card character -path /var/lib/docker/containers/*/*json*.log -# This is recommended ?~@~S Fluentd will record the position it last read into this file. Change this folder according to your server -pos_file PATH /home/pmoogi/docker-containerid.log.pos -# tag is used to correlate the directives. For example, source with corresponding filter and match directives. -tag mytagloadlogs -format /(?.*)/ -#reads the fields from the log file in the specified format + # Fluentd input tail plugin, will start reading from the tail of the log + @type tail + # Specify the log file path. This supports wild card character + path /var/lib/docker/containers/*/*json*.log + # This is recommended ?~@~S Fluentd will record the position it last read into this file. Change this folder according to your server + pos_file /var/lib/docker/containers/*/docker-containerid.log.pos + # tag is used to correlate the directives. For example, source with corresponding filter and match directives. + tag dockerlog + format /(?.*)/ + #reads the fields from the log file in the specified format - - @type prometheus - +# +# @type prometheus +# - - @type prometheus_output_monitor - +# +# @type prometheus_output_monitor +# - - @type prometheus_monitor - +# +# @type prometheus_monitor +# - - @type prometheus_tail_monitor - +# +# @type prometheus_tail_monitor +# #for printing fluentd collected logs to stdout of host machine when you run fluentd diff --git a/installing-fluentd-readme.md b/installing-fluentd-readme.md new file mode 100644 index 0000000..e04a740 --- /dev/null +++ b/installing-fluentd-readme.md @@ -0,0 +1,37 @@ + +# Installing custom fluentd + +The below captures the steps to clone fluentd and other fluentd plugins, do custom build of a local gem file, and get that installed in your hostmachine + +> note: as pre-req you need ruby version 2.7 installed (e.g. instructions for [centos](https://tecadmin.net/install-ruby-latest-stable-centos/)) +##Steps + 1. git clone + `git clone https://github.com/pmoogi-redhat/fluentd.git` + > note: assuming you have forked the upstream fluentd into your own remote repo + 2. Go to your repo + `cd fluentd` + 3. Execute gem build + `gem build -V fluentd.gemspec` + 4. Execute bundler + `bundler install` + > note: ass needed execute `gem install bundler:2.2.7` to update version of bundler + + 5. Execute gem install + `gem install -V -l fluentd-1.12.0.gem` + + ## How to customize fluentd or plugins + For example change tail code: + `vi in_tail.rb` + + The changed plugin files can be reflected in the installed directory by copying update file into the below default installed directories + `cp /path/to/fluentd/lib/fluent/plugin/in_tail.rb /usr/local/share/gems/gems/fluentd-1.12.0/lib/fluent/plugin/.` + OR do a fresh gem build as described from step 3 to create a new installation + > Note: Step 5 actually copies plugin .rb files into `/usr/local/share/gems/gems/fluentd-1.12.0/lib/fluent/plugin/.` directory + + Similarly, other plugins can be changed and installed by hacking the above step + + ## How to run fluentd on your local machine + To execute fluentd on your local machine with the plugins and code changes, execute: + + `fluentd -c fluent.conf` + diff --git a/installing-fluentd-readme.txt b/installing-fluentd-readme.txt deleted file mode 100644 index 965332e..0000000 --- a/installing-fluentd-readme.txt +++ /dev/null @@ -1,23 +0,0 @@ -The below docs capture the steps to clone fluentd and other fluentd plugins, do customer build of a local gem file, and get that installed in your hostmachine -##Steps - 1. git clone as git clone https://github.com/pmoogi-redhat/fluentd.git [assuming that you have forked the upstream fluentd into your own remote repo] - 2. Go to your repo - $ cd path/to/fluentd/ - 3. Do - $gem build -V fluentd.gemspec - 4. Do gem build - $gem build -V -l - 5. Do gem install - $gem install -V -l fluentd-1.12.0.gem - - ## You may add changes in the plugins - $vi in_tail.rb - The changed plugin files can be reflected in the installed directory by simplying copying the to the below default installed directories - $cp /path/to/fluentd/lib/fluent/plugin/in_tail.rb /usr/local/share/gems/gems/fluentd-1.12.0/lib/fluent/plugin/. - OR do a fresh gem build as step 4 and that followed by step 5 for new installation. Step 5 actually copies plugin .rb files to /usr/local/share/gems/gems/fluentd-1.12.0/lib/fluent/plugin/. directory - - Similarly other plugins can be changed and installed by hacking the above step - - ## You may run fluentd process in your hostmachine by the following step - 6. Run fluentd in the local host machine with the above plugin code changes reflected by running the below - $sudo fluentd -c fluent-test-tail.conf diff --git a/logging-load-driver/Docker/Dockerfile b/logging-load-driver/Docker/Dockerfile index 4007389..75e8461 100644 --- a/logging-load-driver/Docker/Dockerfile +++ b/logging-load-driver/Docker/Dockerfile @@ -1,10 +1,4 @@ FROM registry.fedoraproject.org/fedora - -ENV MSGPERSEC=1000 REPORT_INTERVAL=10 PAYLOAD_SIZE=1024 \ - DISTRIBUTION=gaussian PAYLOAD_GEN=fixed \ - STDDEV=32 OUTPUT=stdout REPORT=inline \ - TOTAL_SIZE=100 - RUN dnf -y install python RUN rm -rf /loader COPY ./loader / @@ -16,4 +10,9 @@ RUN chmod +x /loader # docker logs $contid # docker stop $contid # docker rm $contid +ENV MSGPERSEC=1000 REPORT_INTERVAL=10 PAYLOAD_SIZE=1024 \ + DISTRIBUTION=gaussian PAYLOAD_GEN=fixed \ + STDDEV=32 OUTPUT=stdout REPORT=inline \ + TOTAL_SIZE=100 + CMD /loader --msgpersec=${MSGPERSEC} --report-interval=${REPORT_INTERVAL} --total-size=${TOTAL_SIZE} --distribution=${DISTRIBUTION} --payload-gen=${PAYLOAD_GEN} --stddev=${STDDEV} --output=${OUTPUT} --report=${REPORT} ${PAYLOAD_SIZE} diff --git a/logging-load-driver/Makefile b/logging-load-driver/Makefile index 62fa667..208cc88 100644 --- a/logging-load-driver/Makefile +++ b/logging-load-driver/Makefile @@ -5,5 +5,5 @@ Docker/loader: loader cp loader Docker/loader build: Docker/loader - podman build -t podman-logging-load-driver-image Docker + # podman build -t podman-logging-load-driver-image Docker docker build -t docker-logging-load-driver-image Docker diff --git a/run-container-for-logging-load-driver-program.sh b/run-container-for-logging-load-driver-program.sh old mode 100644 new mode 100755 index 9b3f6b3..3999a4f --- a/run-container-for-logging-load-driver-program.sh +++ b/run-container-for-logging-load-driver-program.sh @@ -1,5 +1,4 @@ #! /bin/bash - #ensure you got Docker version 20.10.2, installed if [ $1 = "--help" ] @@ -26,7 +25,19 @@ export TOTAL_SIZE=100 export NOW=$(date +"%m%d%Y%H%M") -echo Messagelines_per_sec=$MSGPERSEC Max_size_log_file_limit=$MAXSIZE Report Interval=$REPORT_INTERVAL Payload_size bytes=$PAYLOAD_SIZE Payload Gen Method=${PAYLOAD_GEN} Payload std dev=${STDDEV} Logs from container writing to data pipe type=$OUTPUT Reporting method=$REPORT Report Interval=$REPORT_INTERVAL TOTAL_SIZE considered for counting log-loss =$TOTAL_SIZE +echo "Configuration: +-=-=-=-=-=-=- +Messagelines_per_sec=$MSGPERSEC +Max_size_log_file_limit=$MAXSIZE +Report Interval=$REPORT_INTERVAL +Payload_size bytes=$PAYLOAD_SIZE +Payload Gen Method=${PAYLOAD_GEN} +Payload std dev=${STDDEV} +Logs from container writing to data pipe type=$OUTPUT +Reporting method=$REPORT +Report Interval=$REPORT_INTERVAL +TOTAL_SIZE considered for counting log-loss =$TOTAL_SIZE +" function pause(){ @@ -45,9 +56,12 @@ export dockerimageid=`docker images | grep latest | grep docker-logging-load-dri DockerCMD="docker run -v /etc/group:/etc/group:ro -v /etc/passwd:/etc/passwd:ro -u $( id -u $USER ):$( id -g $USER ) -v /var/lib/docker/containers:/var/lib/docker/containers:ro --log-opt max-size=$MAXSIZE --log-opt tag="docker.{{.ID}}" --env MSGPERSEC --env PAYLOAD_GEN --env PAYLOAD_SIZE --env DISTRIBUTION --env STDDEV --env OUTPUT --env REPORT --env REPORT_INTERVAL --env TOTAL_SIZE $dockerimageid" -pause 'Press [Enter] key to run container...with ' +echo -e "About to execute following (in docker): +-==--==-=-=-\n +${DockerCMD}\n +Press [Enter] key to execute" +pause #### -echo GOING To RUN Docker run with given image id --- ${DockerCMD} $DockerCMD pause 'post docker cmd execution' diff --git a/simulation-with-diff-config-variables.sh b/simulation-with-diff-config-variables.sh old mode 100644 new mode 100755