From 63f29277ec95a361796032da4320ec9be736facc Mon Sep 17 00:00:00 2001 From: Tomasz Szostak Date: Tue, 8 Dec 2020 15:25:31 +0100 Subject: [PATCH] Kafka update to 2.6.0 (#1890) * Upgrade kafka version to 2.6.0 * Update jmx exporter tar ball to 0.14.0 * Item: #1137 Requirements.txt with correct download link (archive.apache.org) * Kafka server.properties tak take more settings from yaml * Item: #1137 Changelog and components file updated * Item: #1137, Upgrade.md file updated. * Item: #1137, Changes requested in code review. Single place for version and filename. Co-authored-by: Steve Kieu --- CHANGELOG-0.9.md | 1 + .../playbooks/roles/kafka/defaults/main.yml | 8 ++-- .../playbooks/roles/kafka/tasks/main.yml | 2 +- .../roles/kafka/tasks/setup-kafka.yml | 42 ++++--------------- .../kafka/templates/server.properties.j2 | 10 ++--- .../centos-7/requirements.txt | 4 +- .../redhat-7/requirements.txt | 4 +- .../ubuntu-18.04/requirements.txt | 4 +- .../playbooks/roles/upgrade/tasks/kafka.yml | 2 +- .../upgrade/tasks/kafka/install-upgrade.yml | 10 ++--- .../tasks/kafka/set-updated-version.yml | 2 +- .../common/defaults/configuration/kafka.yml | 21 +++------- docs/home/COMPONENTS.md | 2 +- docs/home/howto/UPGRADE.md | 4 +- 14 files changed, 38 insertions(+), 78 deletions(-) diff --git a/CHANGELOG-0.9.md b/CHANGELOG-0.9.md index c46c87eefc..f6d37feac1 100644 --- a/CHANGELOG-0.9.md +++ b/CHANGELOG-0.9.md @@ -22,6 +22,7 @@ - [#1770](https://github.com/epiphany-platform/epiphany/issues/1770) - Upgrade Filebeat to the latest version (7.9.2) - [#1848](https://github.com/epiphany-platform/epiphany/issues/1848) - Update Ansible to v2.8.17 +- [#1137](https://github.com/epiphany-platform/epiphany/issues/1137) - Upgrade Kafka to 2.6.0 ### Breaking changes diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/kafka/defaults/main.yml b/core/src/epicli/data/common/ansible/playbooks/roles/kafka/defaults/main.yml index 1fac4f1ceb..cdf9a3627a 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/kafka/defaults/main.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/kafka/defaults/main.yml @@ -1,5 +1,3 @@ -specification: - kafka_var: - version: 2.3.1 - scala: - version: 2.12 \ No newline at end of file +kafka_version: 2.6.0 +scala_version: 2.12 +kafka_bin_filename: "kafka_2.12-2.6.0.tgz" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/kafka/tasks/main.yml b/core/src/epicli/data/common/ansible/playbooks/roles/kafka/tasks/main.yml index c34005f1e4..32068ef883 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/kafka/tasks/main.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/kafka/tasks/main.yml @@ -9,7 +9,7 @@ - include_tasks: setup-kafka.yml # - include_tasks: verify-kafka.yml # todo change testing kafka to bash or remove it? (since we test it using serverspec) - + - include_tasks: metrics.yml when: exporter.stat.exists diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml b/core/src/epicli/data/common/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml index 59c5a420ab..59703a1617 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml @@ -27,7 +27,7 @@ - name: Set Kafka file name to install set_fact: - kafka_file_name: "{{ specification.kafka_var.file_name }}" + kafka_file_name: "{{ kafka_bin_filename }}" - name: Download Kafka binaries include_role: @@ -36,34 +36,6 @@ vars: file_name: "{{ kafka_file_name }}" -# - name: Check for Kafka package -# stat: -# path: "/tmp/kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }}.tgz" -# register: kafka_check - -# - name: Fetch Kafka binary package -# get_url: -# url: "https://archive.apache.org/dist/kafka/{{ specification.kafka_var.version }}/kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }}.tgz" -# dest: "/tmp/kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }}.tgz" -# validate_certs: "{{ validate_certs | bool }}" -# when: not kafka_check.stat.exists - -# - name: Get sha512 sum of archive -# stat: -# path: "/tmp/kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }}.tgz" -# checksum_algorithm: sha512 -# get_checksum: yes -# register: kafka_download_stat - -# - name: Display sha of archive -# debug: -# msg: "Kafka SHA512: {{ kafka_download_stat.stat.checksum }}" - -# - name: Verify sha512 of archive before installation -# fail: -# msg: "File checksum is not correct." -# when: kafka_download_stat.stat.checksum != specification.kafka_var.sha - - name: Add Kafka's bin dir to the PATH copy: content: "export PATH=$PATH:/opt/kafka/bin" @@ -72,20 +44,20 @@ - name: Check for Kafka package stat: - path: /opt/kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }}/bin/kafka-server-start.sh + path: /opt/kafka_{{ scala_version }}-{{ kafka_version }}/bin/kafka-server-start.sh register: kafka_package - name: Uncompress the Kafka tar unarchive: remote_src: yes - creates: /opt/kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }} + creates: /opt/kafka_{{ scala_version }}-{{ kafka_version }} src: "{{ download_directory }}/{{ kafka_file_name }}" dest: /opt when: not kafka_package.stat.exists - name: Change ownership on kafka directory. file: - path: /opt/kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }} + path: /opt/kafka_{{ scala_version }}-{{ kafka_version }} state: directory owner: kafka group: kafka @@ -94,7 +66,7 @@ file: dest: /opt/kafka state: link - src: /opt/kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }} + src: /opt/kafka_{{ scala_version }}-{{ kafka_version }} - name: Create systemd config template: @@ -180,8 +152,8 @@ - name: configure system settings, file descriptors and number of threads for kafka pam_limits: domain: "{{ specification.kafka_var.user }}" - limit_type: "{{item.limit_type}}" - limit_item: "{{item.limit_item}}" + limit_type: "{{ item.limit_type }}" + limit_item: "{{ item.limit_item }}" value: "{{item.value}}" with_items: - { limit_type: '-', limit_item: 'nofile', value: 128000 } diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/kafka/templates/server.properties.j2 b/core/src/epicli/data/common/ansible/playbooks/roles/kafka/templates/server.properties.j2 index beed64bf9a..6f2643beac 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/kafka/templates/server.properties.j2 +++ b/core/src/epicli/data/common/ansible/playbooks/roles/kafka/templates/server.properties.j2 @@ -65,19 +65,19 @@ listeners=PLAINTEXT://{{ ansible_default_ipv4.address }}:{{ specification.kafka_ #listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL # The number of threads handling network requests -num.network.threads=3 +num.network.threads={{ specification.kafka_var.socket_settings.network_threads }} # The number of threads doing disk I/O -num.io.threads=8 +num.io.threads={{ specification.kafka_var.socket_settings.io_threads }} # The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes=102400 +socket.send.buffer.bytes={{ specification.kafka_var.socket_settings.send_buffer_bytes }} # The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes=102400 +socket.receive.buffer.bytes={{ specification.kafka_var.socket_settings.receive_buffer_bytes }} # The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes=104857600 +socket.request.max.bytes={{ specification.kafka_var.socket_settings.request_max_bytes }} ############################# Security ######################################### diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/centos-7/requirements.txt b/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/centos-7/requirements.txt index 85ccdb232f..37545b6489 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/centos-7/requirements.txt +++ b/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/centos-7/requirements.txt @@ -147,8 +147,8 @@ kubernetes-cni-0.8.6-0 [files] https://github.com/prometheus/haproxy_exporter/releases/download/v0.10.0/haproxy_exporter-0.10.0.linux-amd64.tar.gz -https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.12.0/jmx_prometheus_javaagent-0.12.0.jar -https://archive.apache.org/dist/kafka/2.3.1/kafka_2.12-2.3.1.tgz +https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.14.0/jmx_prometheus_javaagent-0.14.0.jar +https://archive.apache.org/dist/kafka/2.6.0/kafka_2.12-2.6.0.tgz https://github.com/danielqsj/kafka_exporter/releases/download/v1.2.0/kafka_exporter-1.2.0.linux-amd64.tar.gz https://github.com/prometheus/node_exporter/releases/download/v1.0.1/node_exporter-1.0.1.linux-amd64.tar.gz https://github.com/prometheus/prometheus/releases/download/v2.10.0/prometheus-2.10.0.linux-amd64.tar.gz diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/requirements.txt b/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/requirements.txt index 0c1e43ccbc..360ab9e2b6 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/requirements.txt +++ b/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/requirements.txt @@ -144,8 +144,8 @@ kubernetes-cni-0.8.6-0 [files] https://github.com/prometheus/haproxy_exporter/releases/download/v0.10.0/haproxy_exporter-0.10.0.linux-amd64.tar.gz -https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.12.0/jmx_prometheus_javaagent-0.12.0.jar -https://archive.apache.org/dist/kafka/2.3.1/kafka_2.12-2.3.1.tgz +https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.14.0/jmx_prometheus_javaagent-0.14.0.jar +https://archive.apache.org/dist/kafka/2.6.0/kafka_2.12-2.6.0.tgz https://github.com/danielqsj/kafka_exporter/releases/download/v1.2.0/kafka_exporter-1.2.0.linux-amd64.tar.gz https://github.com/prometheus/node_exporter/releases/download/v1.0.1/node_exporter-1.0.1.linux-amd64.tar.gz https://github.com/prometheus/prometheus/releases/download/v2.10.0/prometheus-2.10.0.linux-amd64.tar.gz diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-18.04/requirements.txt b/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-18.04/requirements.txt index 45a5372937..ef801d5092 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-18.04/requirements.txt +++ b/core/src/epicli/data/common/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-18.04/requirements.txt @@ -177,8 +177,8 @@ kubernetes-cni 0.7.5-00 kubernetes-cni 0.8.6-00 [files] -https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.12.0/jmx_prometheus_javaagent-0.12.0.jar -https://archive.apache.org/dist/kafka/2.3.1/kafka_2.12-2.3.1.tgz +https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.14.0/jmx_prometheus_javaagent-0.14.0.jar +https://archive.apache.org/dist/kafka/2.6.0/kafka_2.12-2.6.0.tgz https://archive.apache.org/dist/zookeeper/zookeeper-3.4.12/zookeeper-3.4.12.tar.gz https://github.com/danielqsj/kafka_exporter/releases/download/v1.2.0/kafka_exporter-1.2.0.linux-amd64.tar.gz https://github.com/prometheus/alertmanager/releases/download/v0.17.0/alertmanager-0.17.0.linux-amd64.tar.gz diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka.yml b/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka.yml index 41aee95a36..b0bdb4b742 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka.yml @@ -44,7 +44,7 @@ - name: Include set Kafka version tasks include_tasks: kafka/set-updated-version.yml when: - - lock_file_status.stat.exists or before_upgrade_kafka_version is version( specification.kafka_var.version, '<' ) + - lock_file_status.stat.exists or before_upgrade_kafka_version is version( kafka_version, '<' ) - name: Remove Kafka upgrade flag file file: diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka/install-upgrade.yml b/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka/install-upgrade.yml index 6a41da108d..498edd908d 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka/install-upgrade.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka/install-upgrade.yml @@ -1,7 +1,7 @@ --- - name: Set Kafka installation file name as fact set_fact: - kafka_installation_file_name: "kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }}.tgz" + kafka_installation_file_name: "kafka_{{ scala_version }}-{{ kafka_version }}.tgz" - name: Download Kafka binaries include_role: @@ -18,7 +18,7 @@ - name: Change ownership on kafka directory file: - path: /opt/kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }} + path: /opt/kafka_{{ scala_version }}-{{ kafka_version }} state: directory owner: kafka group: kafka @@ -27,19 +27,19 @@ copy: remote_src: yes src: /opt/kafka/config/ - dest: /opt/kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }}/config + dest: /opt/kafka_{{ scala_version }}-{{ kafka_version }}/config mode: preserve - name: Link /opt/kafka to recently installed version file: dest: /opt/kafka state: link - src: /opt/kafka_{{ specification.kafka_var.scala.version }}-{{ specification.kafka_var.version }} + src: /opt/kafka_{{ scala_version }}-{{ kafka_version }} force: yes - name: Remove previous version binaries file: - path: /opt/kafka_{{ specification.kafka_var.scala.version }}-{{ before_upgrade_kafka_version }} + path: /opt/kafka_{{ scala_version }}-{{ before_upgrade_kafka_version }} state: absent - name: Get log.dirs property diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka/set-updated-version.yml b/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka/set-updated-version.yml index 6d42789504..b301e776dd 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka/set-updated-version.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/upgrade/tasks/kafka/set-updated-version.yml @@ -7,7 +7,7 @@ lineinfile: path: /opt/kafka/config/server.properties regexp: "^inter.broker.protocol.version" - line: "inter.broker.protocol.version={{ specification.kafka_var.version }}" + line: "inter.broker.protocol.version={{ kafka_version }}" - name: Start kafka service systemd: diff --git a/core/src/epicli/data/common/defaults/configuration/kafka.yml b/core/src/epicli/data/common/defaults/configuration/kafka.yml index 3893e0dfb9..bc920ff144 100644 --- a/core/src/epicli/data/common/defaults/configuration/kafka.yml +++ b/core/src/epicli/data/common/defaults/configuration/kafka.yml @@ -2,16 +2,10 @@ kind: configuration/kafka title: "Kafka" name: default specification: - kafka_var: - version: 2.3.1 - scala: - version: 2.12 - file_name: "kafka_2.12-2.3.1.tgz" enabled: True admin: kafka admin_pwd: epiphany - security: ssl: enabled: False @@ -25,12 +19,10 @@ specification: keystore: PasswordToChange truststore: PasswordToChange key: PasswordToChange - endpoint_identification_algorithm: HTTPS client_auth: required encrypt_at_rest: False inter_broker_protocol: PLAINTEXT - authorization: enabled: False authorizer_class_name: kafka.security.auth.SimpleAclAuthorizer @@ -41,13 +33,11 @@ specification: users: - name: test_user topic: test_topic - authentication: enabled: False authentication_method: certificates sasl_mechanism_inter_broker_protocol: sasl_enabled_mechanisms: PLAIN - sha: "b28e81705e30528f1abb6766e22dfe9dae50b1e1e93330c880928ff7a08e6b38ee71cbfc96ec14369b2dfd24293938702cab422173c8e01955a9d1746ae43f98" port: 9092 min_insync_replicas: 1 # Minimum number of replicas (ack write) @@ -66,20 +56,21 @@ specification: jmx_opts: max_incremental_fetch_session_cache_slots: 1000 controlled_shutdown_enable: true - group: kafka user: kafka - conf_dir: /opt/kafka/config data_dir: /var/lib/kafka log_dir: /var/log/kafka - + socket_settings: + network_threads: 3 # The number of threads handling network requests + io_threads: 8 # The number of threads doing disk I/O + send_buffer_bytes: 102400 # The send buffer (SO_SNDBUF) used by the socket server + receive_buffer_bytes: 102400 # The receive buffer (SO_RCVBUF) used by the socket server + request_max_bytes: 104857600 # The maximum size of a request that the socket server will accept (protection against OOM) zookeeper_set_acl: false zookeeper_hosts: "{{ groups['zookeeper']|join(':2181,') }}:2181" - jmx_exporter_user: jmx-exporter jmx_exporter_group: jmx-exporter - prometheus_jmx_path: /opt/jmx-exporter/jmx_prometheus_javaagent.jar prometheus_jmx_exporter_web_listen_port: 7071 prometheus_jmx_config: /opt/kafka/config/jmx-kafka.config.yml diff --git a/docs/home/COMPONENTS.md b/docs/home/COMPONENTS.md index 459979d1b3..18702d52f5 100644 --- a/docs/home/COMPONENTS.md +++ b/docs/home/COMPONENTS.md @@ -11,7 +11,7 @@ Note that versions are default versions and can be changed in certain cases thro | Calico | 3.15.0 | https://github.com/projectcalico/calico | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | | Flannel | 0.12.0 | https://github.com/coreos/flannel/ | [Apache License](https://www.apache.org/licenses/LICENSE-1.0) | | Canal | 3.15.0 | https://github.com/projectcalico/calico | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | -| Kafka | 2.3.1 | https://github.com/apache/kafka | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | +| Kafka | 2.6.0 | https://github.com/apache/kafka | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | | Zookeeper | 3.4.12 | https://github.com/apache/zookeeper | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | | RabbitMQ | 3.8.9 | https://github.com/rabbitmq/rabbitmq-server | [Mozilla Public License](https://www.mozilla.org/en-US/MPL/) | | Docker-ce | 18.09 | https://github.com/docker/docker-ce/ | [Apache License](https://www.apache.org/licenses/LICENSE-1.0) | diff --git a/docs/home/howto/UPGRADE.md b/docs/home/howto/UPGRADE.md index dd5566e2ab..721b2e2a7b 100644 --- a/docs/home/howto/UPGRADE.md +++ b/docs/home/howto/UPGRADE.md @@ -113,9 +113,7 @@ The `epicli upgrade` command had an additional flag `--wait-for-pods`. When this ### Kafka upgrade -No downtime upgrades are possible to achieve when upgrading Kafka, but before you start thinking about upgrading you have to think about your topics configuration. Kafka topics are distributed accross partitions with replication. Default value for replication is 3, it means each partition will be replicated to 3 brokers. You should remember to enable redundancy and keep **at least two replicas all the time**, it is important when upgrading Kafka cluser. When one of your Kafka nodes will be down during upgrade ZooKeeper will direct your producers and consumers to working instances - having replicated partitions on working nodes will ensure no downtime and no data loss work. - -Upgrading Kafka could be different for every Kafka release, please refer to [Apache Kafka documentation](https://kafka.apache.org/documentation/#upgrade). Important point to remember during Kafka upgrade is the rule: **only one broker at the time** - to prevent downtime you should uprage you Kafka brokers one by one. +Kafka will be automatically updated to the latest version supported by Epiphany. You can check latest supported version [here](../COMPONENTS.md#epiphany-cluster-components). Kafka brokers are updated one by one - but the update procedure does not guarantee "zero downtime" because it depends on the number of available brokers, topic, and partitioning configuration. ### ZooKeeper upgrade