diff --git a/api/bazel/repository_locations.bzl b/api/bazel/repository_locations.bzl
index 77cd67d214fa..9372d16a9c4e 100644
--- a/api/bazel/repository_locations.bzl
+++ b/api/bazel/repository_locations.bzl
@@ -190,12 +190,12 @@ REPOSITORY_LOCATIONS_SPEC = dict(
         project_name = "envoy_toolshed",
         project_desc = "Tooling, libraries, runners and checkers for Envoy proxy's CI",
         project_url = "https://github.com/envoyproxy/toolshed",
-        version = "0.1.3",
-        sha256 = "ee6d0b08ae3d9659f5fc34d752578af195147b153f8ca68eb4f8530aceb764d9",
+        version = "0.1.4",
+        sha256 = "7ddfd251a89518b97c4eb8064a7d37454bbd998bf29e4cd3ad8f44227b5ca7b3",
         strip_prefix = "toolshed-bazel-v{version}/bazel",
         urls = ["https://github.com/envoyproxy/toolshed/archive/bazel-v{version}.tar.gz"],
         use_category = ["build"],
-        release_date = "2024-04-16",
+        release_date = "2024-07-22",
         cpe = "N/A",
         license = "Apache-2.0",
         license_url = "https://github.com/envoyproxy/envoy/blob/bazel-v{version}/LICENSE",
diff --git a/bazel/python_dependencies.bzl b/bazel/python_dependencies.bzl
index b015e851c047..9f2b336b1a36 100644
--- a/bazel/python_dependencies.bzl
+++ b/bazel/python_dependencies.bzl
@@ -1,6 +1,6 @@
 load("@com_google_protobuf//bazel:system_python.bzl", "system_python")
 load("@envoy_toolshed//:packages.bzl", "load_packages")
-load("@python3_11//:defs.bzl", "interpreter")
+load("@python3_12//:defs.bzl", "interpreter")
 load("@rules_python//python:pip.bzl", "pip_parse")
 
 def envoy_python_dependencies():
diff --git a/bazel/repositories.bzl b/bazel/repositories.bzl
index 63f74b807b60..d6bada685102 100644
--- a/bazel/repositories.bzl
+++ b/bazel/repositories.bzl
@@ -1434,13 +1434,6 @@ filegroup(
         build_file_content = BUILD_ALL_CONTENT,
     )
 
-    # This archive provides Kafka client in Python, so we can use it to interact with Kafka server
-    # during integration tests.
-    external_http_archive(
-        name = "kafka_python_client",
-        build_file_content = BUILD_ALL_CONTENT,
-    )
-
 def _com_github_fdio_vpp_vcl():
     external_http_archive(
         name = "com_github_fdio_vpp_vcl",
diff --git a/bazel/repositories_extra.bzl b/bazel/repositories_extra.bzl
index 7fd1dc4095ab..bfa5d750a62d 100644
--- a/bazel/repositories_extra.bzl
+++ b/bazel/repositories_extra.bzl
@@ -9,7 +9,7 @@ def _python_minor_version(python_version):
     return "_".join(python_version.split(".")[:-1])
 
 # Python version for `rules_python`
-PYTHON_VERSION = "3.11.9"
+PYTHON_VERSION = "3.12.3"
 PYTHON_MINOR_VERSION = _python_minor_version(PYTHON_VERSION)
 
 # Envoy deps that rely on a first stage of dependency loading in envoy_dependencies().
diff --git a/bazel/repository_locations.bzl b/bazel/repository_locations.bzl
index b4d3d606a1bb..f2f556eba863 100644
--- a/bazel/repository_locations.bzl
+++ b/bazel/repository_locations.bzl
@@ -1353,19 +1353,6 @@ REPOSITORY_LOCATIONS_SPEC = dict(
         release_date = "2023-07-21",
         use_category = ["test_only"],
     ),
-    kafka_python_client = dict(
-        project_name = "Kafka (Python client)",
-        project_desc = "Open-source distributed event streaming platform",
-        project_url = "https://kafka.apache.org",
-        version = "2.0.2",
-        sha256 = "5dcf87c559e7aee4f18d621a02e247db3e3552ee4589ca611d51eef87b37efed",
-        strip_prefix = "kafka-python-{version}",
-        urls = ["https://github.com/dpkp/kafka-python/archive/{version}.tar.gz"],
-        release_date = "2020-09-30",
-        use_category = ["test_only"],
-        license = "Apache-2.0",
-        license_url = "https://github.com/dpkp/kafka-python/blob/{version}/LICENSE",
-    ),
     proxy_wasm_cpp_sdk = dict(
         project_name = "WebAssembly for Proxies (C++ SDK)",
         project_desc = "WebAssembly for Proxies (C++ SDK)",
diff --git a/contrib/kafka/filters/network/test/broker/integration_test/BUILD b/contrib/kafka/filters/network/test/broker/integration_test/BUILD
index 30444088ecbc..c0cd122eb231 100644
--- a/contrib/kafka/filters/network/test/broker/integration_test/BUILD
+++ b/contrib/kafka/filters/network/test/broker/integration_test/BUILD
@@ -12,10 +12,7 @@ envoy_contrib_package()
 # This test sets up multiple services, and this can take variable amount of time (30-60 seconds).
 envoy_py_test(
     name = "kafka_broker_integration_test",
-    srcs = [
-        "kafka_broker_integration_test.py",
-        "@kafka_python_client//:all",
-    ],
+    srcs = ["kafka_broker_integration_test.py"],
     data = [
         "//bazel:remote_jdk11",
         "//contrib/exe:envoy-static",
@@ -24,6 +21,7 @@ envoy_py_test(
     flaky = True,
     deps = [
         requirement("Jinja2"),
+        requirement("kafka-python-ng"),
         requirement("MarkupSafe"),
     ],
 )
diff --git a/contrib/kafka/filters/network/test/mesh/integration_test/BUILD b/contrib/kafka/filters/network/test/mesh/integration_test/BUILD
index 3db16d298756..8d2101143176 100644
--- a/contrib/kafka/filters/network/test/mesh/integration_test/BUILD
+++ b/contrib/kafka/filters/network/test/mesh/integration_test/BUILD
@@ -12,10 +12,7 @@ envoy_contrib_package()
 # This test sets up multiple services, and this can take variable amount of time (30-60 seconds).
 envoy_py_test(
     name = "kafka_mesh_integration_test",
-    srcs = [
-        "kafka_mesh_integration_test.py",
-        "@kafka_python_client//:all",
-    ],
+    srcs = ["kafka_mesh_integration_test.py"],
     data = [
         "//bazel:remote_jdk11",
         "//contrib/exe:envoy-static",
@@ -24,6 +21,7 @@ envoy_py_test(
     flaky = True,
     deps = [
         requirement("Jinja2"),
+        requirement("kafka-python-ng"),
         requirement("MarkupSafe"),
     ],
 )
diff --git a/test/integration/python/hotrestart_handoff_test.py b/test/integration/python/hotrestart_handoff_test.py
index 913ed959940a..c9915b538fbd 100644
--- a/test/integration/python/hotrestart_handoff_test.py
+++ b/test/integration/python/hotrestart_handoff_test.py
@@ -451,7 +451,7 @@ async def test_connection_handoffs(self) -> None:
             "slow request should be incomplete when the test waits for it, otherwise the test is not necessarily validating during-drain behavior",
         )
         for response in slow_responses:
-            self.assertEquals(await response.join(), 0)
+            self.assertEqual(await response.join(), 0)
         log.info("waiting for parent instance to terminate")
         await envoy_process_1.wait()
         log.info("sending second request to fast upstream")
diff --git a/tools/api/validate_structure.py b/tools/api/validate_structure.py
index 0ccddf38ffc3..2b0af7de751a 100755
--- a/tools/api/validate_structure.py
+++ b/tools/api/validate_structure.py
@@ -40,7 +40,7 @@ class ValidationError(Exception):
 
 # Extract major version and full API version string from a proto path.
 def proto_api_version(proto_path):
-    match = re.match('v(\d+).*', proto_path.parent.name)
+    match = re.match(r'v(\d+).*', proto_path.parent.name)
     if match:
         return str(proto_path.parent.name)[1:], int(match.group(1))
     return None, 0
@@ -57,7 +57,7 @@ def validate_proto_path(proto_path):
 
     # Validate that v3+ versions are regular.
     if major_version >= 3:
-        if not re.match('\d+(alpha)?$', version_str):
+        if not re.match(r'\d+(alpha)?$', version_str):
             raise ValidationError('Invalid v3+ version: %s' % version_str)
 
         # Validate v2-only paths.
diff --git a/tools/api_proto_breaking_change_detector/detector.py b/tools/api_proto_breaking_change_detector/detector.py
index 8a4273cb76e2..3bde20521205 100644
--- a/tools/api_proto_breaking_change_detector/detector.py
+++ b/tools/api_proto_breaking_change_detector/detector.py
@@ -81,7 +81,7 @@ def __init__(
 
         if Path.cwd() not in Path(path_to_changed_dir).parents:
             raise ValueError(
-                f"path_to_changed_dir {path_to_changed_dir} must be a subdirectory of the cwd ({ Path.cwd() })"
+                f"path_to_changed_dir {path_to_changed_dir} must be a subdirectory of the cwd ({Path.cwd()})"
             )
 
         if not Path(git_path).exists():
diff --git a/tools/api_proto_plugin/annotations.py b/tools/api_proto_plugin/annotations.py
index 3ae1244150c3..bdff4125b014 100644
--- a/tools/api_proto_plugin/annotations.py
+++ b/tools/api_proto_plugin/annotations.py
@@ -4,7 +4,7 @@
 from functools import partial
 
 # Key-value annotation regex.
-ANNOTATION_REGEX = re.compile('\[#([\w-]+?):\s*(.*?)\](\s?)', re.DOTALL)
+ANNOTATION_REGEX = re.compile(r'\[#([\w-]+?):\s*(.*?)\](\s?)', re.DOTALL)
 
 # Page/section titles with special prefixes in the proto comments
 DOC_TITLE_ANNOTATION = 'protodoc-title'
diff --git a/tools/base/requirements.in b/tools/base/requirements.in
index 34077f286ead..bd86fd067c84 100644
--- a/tools/base/requirements.in
+++ b/tools/base/requirements.in
@@ -27,6 +27,7 @@ gitpython
 gsutil
 icalendar
 jinja2
+kafka-python-ng
 multidict>=6.0.2
 orjson
 pep8-naming
diff --git a/tools/base/requirements.txt b/tools/base/requirements.txt
index 65d051784d0d..024081a2dc9b 100644
--- a/tools/base/requirements.txt
+++ b/tools/base/requirements.txt
@@ -760,6 +760,10 @@ jinja2==3.1.4 \
     #   envoy-base-utils
     #   envoy-dependency-check
     #   sphinx
+kafka-python-ng==2.2.2 \
+    --hash=sha256:3fab1a03133fade1b6fd5367ff726d980e59031c4aaca9bf02c516840a4f8406 \
+    --hash=sha256:87ad3a766e2c0bec71d9b99bdd9e9c5cda62d96cfda61a8ca16510484d6ad7d4
+    # via -r requirements.in
 markupsafe==2.1.5 \
     --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \
     --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \
diff --git a/tools/build_profile.py b/tools/build_profile.py
index 5c47d7a5895f..c6212a9a20bf 100755
--- a/tools/build_profile.py
+++ b/tools/build_profile.py
@@ -13,7 +13,7 @@ def print_profile(f):
     prev_cmd = None
     prev_timestamp = None
     for line in f:
-        sr = re.match('\++ (\d+\.\d+) (.*)', line)
+        sr = re.match(r'\++ (\d+\.\d+) (.*)', line)
         if sr:
             timestamp, cmd = sr.groups()
             if prev_cmd:
diff --git a/tools/dependency/validate.py b/tools/dependency/validate.py
index b5c9e26aab88..f3025d40d2b8 100755
--- a/tools/dependency/validate.py
+++ b/tools/dependency/validate.py
@@ -15,8 +15,8 @@
 
 import envoy_repo
 
-BAZEL_QUERY_EXTERNAL_DEP_RE = re.compile('@(\w+)//')
-EXTENSION_LABEL_RE = re.compile('(//source/extensions/.*):')
+BAZEL_QUERY_EXTERNAL_DEP_RE = re.compile(r'@(\w+)//')
+EXTENSION_LABEL_RE = re.compile(r'(//source/extensions/.*):')
 
 # We can safely ignore these as they are from Bazel or internal repository structure.
 IGNORE_DEPS = set([
diff --git a/tools/deprecate_version/deprecate_version.py b/tools/deprecate_version/deprecate_version.py
index 5ccb22ee7571..72b3e3fa5850 100644
--- a/tools/deprecate_version/deprecate_version.py
+++ b/tools/deprecate_version/deprecate_version.py
@@ -171,7 +171,7 @@ def get_runtime_and_pr():
                 if runtime_guard == 'envoy_reloadable_features_test_feature_true':
                     found_test_feature_true = True
                     continue
-                pr_num = re.search('\(#(\d+)\)', commit.message)
+                pr_num = re.search(r'\(#(\d+)\)', commit.message)
                 # Some commits may not come from a PR (if they are part of a security point release).
                 pr = (int(pr_num.group(1))) if pr_num else None
                 pr_date = date.fromtimestamp(commit.committed_date)
diff --git a/tools/docs/generate_external_deps_rst.py b/tools/docs/generate_external_deps_rst.py
index e0ee92693163..3560728555dc 100755
--- a/tools/docs/generate_external_deps_rst.py
+++ b/tools/docs/generate_external_deps_rst.py
@@ -50,7 +50,7 @@ def csv_table(headers, widths, rows):
     csv_rows = '\n  '.join(', '.join(row) for row in rows)
     return f'''.. csv-table::
   :header: {', '.join(headers)}
-  :widths: {', '.join(str(w) for w in widths) }
+  :widths: {', '.join(str(w) for w in widths)}
 
   {csv_rows}
 
diff --git a/tools/gsutil/crcmod/_crcfunext.cpython-311-x86_64-linux-gnu.so b/tools/gsutil/crcmod/_crcfunext.cpython-311-x86_64-linux-gnu.so
deleted file mode 100755
index 017fb28eff64..000000000000
Binary files a/tools/gsutil/crcmod/_crcfunext.cpython-311-x86_64-linux-gnu.so and /dev/null differ
diff --git a/tools/gsutil/crcmod/_crcfunext.cpython-312-x86_64-linux-gnu.so b/tools/gsutil/crcmod/_crcfunext.cpython-312-x86_64-linux-gnu.so
new file mode 100755
index 000000000000..37da1f52bc44
Binary files /dev/null and b/tools/gsutil/crcmod/_crcfunext.cpython-312-x86_64-linux-gnu.so differ
diff --git a/tools/gsutil/vendor_util.sh b/tools/gsutil/vendor_util.sh
index ceded21a483d..f7e08dd1e9f8 100755
--- a/tools/gsutil/vendor_util.sh
+++ b/tools/gsutil/vendor_util.sh
@@ -16,7 +16,7 @@ fi
 ARCH=x86_64
 HOST_UID="$(id -u)"
 HOST_GID="$(id -g)"
-PYTHON_VERSION=3.11
+PYTHON_VERSION=3.12
 
 docker run --rm \
        -v "$PWD/tools/gsutil/crcmod:/output" \
diff --git a/tools/proto_format/format_api.py b/tools/proto_format/format_api.py
index d096f0677376..ca0b189c46cf 100644
--- a/tools/proto_format/format_api.py
+++ b/tools/proto_format/format_api.py
@@ -76,9 +76,9 @@
 )
 """)
 
-IMPORT_REGEX = re.compile('import "(.*)";')
-SERVICE_REGEX = re.compile('service \w+ {')
-PACKAGE_REGEX = re.compile('\npackage ([a-z0-9_\.]*);')
+IMPORT_REGEX = re.compile(r'import "(.*)";')
+SERVICE_REGEX = re.compile(r'service \w+ {')
+PACKAGE_REGEX = re.compile(r'\npackage ([a-z0-9_\.]*);')
 PREVIOUS_MESSAGE_TYPE_REGEX = re.compile(r'previous_message_type\s+=\s+"([^"]*)";')
 
 
diff --git a/tools/protoprint/protoprint.py b/tools/protoprint/protoprint.py
index ca081ef1bb43..67648576ecdd 100644
--- a/tools/protoprint/protoprint.py
+++ b/tools/protoprint/protoprint.py
@@ -57,11 +57,11 @@ def extract_clang_proto_style(clang_format_text):
     format_dict = {}
     for line in clang_format_text.split('\n'):
         if lang is None or lang != 'Proto':
-            match = re.match('Language:\s+(\w+)', line)
+            match = re.match(r'Language:\s+(\w+)', line)
             if match:
                 lang = match.group(1)
             continue
-        match = re.match('(\w+):\s+(\w+)', line)
+        match = re.match(r'(\w+):\s+(\w+)', line)
         if match:
             key, value = match.groups()
             format_dict[key] = value
@@ -196,7 +196,7 @@ def format_header_from_file(
         if t.startswith('envoy.') and typedb.types[t].proto_path != file_proto.name)
 
     def camel_case(s):
-        return ''.join(t.capitalize() for t in re.split('[\._]', s))
+        return ''.join(t.capitalize() for t in re.split(r'[\._]', s))
 
     package_line = 'package %s;\n' % file_proto.package
     file_block = '\n'.join(['syntax = "proto3";\n', package_line])
diff --git a/tools/spelling/check_spelling_pedantic.py b/tools/spelling/check_spelling_pedantic.py
index 909024b983ae..d94f60a1aed6 100755
--- a/tools/spelling/check_spelling_pedantic.py
+++ b/tools/spelling/check_spelling_pedantic.py
@@ -57,7 +57,7 @@ def cmp(x, y):
 TODO = re.compile(r'(TODO|NOTE)\s*\(@?[A-Za-z0-9-]+\):?')
 
 # Ignore parameter names in doxygen comments.
-METHOD_DOC = re.compile('@(param\s+\w+|return(\s+const)?\s+\w+)')
+METHOD_DOC = re.compile(r'@(param\s+\w+|return(\s+const)?\s+\w+)')
 
 # Camel Case splitter
 CAMEL_CASE = re.compile(r'[A-Z]?[a-z]+|[A-Z]+(?=[A-Z]|$)')
@@ -164,8 +164,8 @@ def start(self):
         self.prefixes = prefixes
         self.suffixes = suffixes
 
-        self.prefix_re = re.compile("(?:\s|^)((%s)-)" % ("|".join(prefixes)), re.IGNORECASE)
-        self.suffix_re = re.compile("(-(%s))(?:\s|$)" % ("|".join(suffixes)), re.IGNORECASE)
+        self.prefix_re = re.compile(r"(?:\s|^)((%s)-)" % ("|".join(prefixes)), re.IGNORECASE)
+        self.suffix_re = re.compile(r"(-(%s))(?:\s|$)" % ("|".join(suffixes)), re.IGNORECASE)
 
         # Generate aspell personal dictionary.
         pws = os.path.join(CURR_DIR, '.aspell.en.pws')
diff --git a/tools/stack_decode.py b/tools/stack_decode.py
index 105cf04acbd3..7465d6f0ff7d 100755
--- a/tools/stack_decode.py
+++ b/tools/stack_decode.py
@@ -28,18 +28,18 @@
 def decode_stacktrace_log(object_file, input_source, address_offset=0):
     # Match something like:
     #     [backtrace] [bazel-out/local-dbg/bin/source/server/_virtual_includes/backtrace_lib/server/backtrace.h:84]
-    backtrace_marker = "\[backtrace\] [^\s]+"
+    backtrace_marker = r'\[backtrace\] [^\s]+'
     # Match something like:
     #     ${backtrace_marker} Address mapping: 010c0000-02a77000
-    offset_re = re.compile("%s Address mapping: ([0-9A-Fa-f]+)-([0-9A-Fa-f]+)" % backtrace_marker)
+    offset_re = re.compile(r"%s Address mapping: ([0-9A-Fa-f]+)-([0-9A-Fa-f]+)" % backtrace_marker)
     # Match something like:
     #     ${backtrace_marker} #10: SYMBOL [0xADDR]
     # or:
     #     ${backtrace_marker} #10: [0xADDR]
-    stackaddr_re = re.compile("%s #\d+:(?: .*)? \[(0x[0-9a-fA-F]+)\]$" % backtrace_marker)
+    stackaddr_re = re.compile(r"%s #\d+:(?: .*)? \[(0x[0-9a-fA-F]+)\]$" % backtrace_marker)
     # Match something like:
     #     #10 0xLOCATION (BINARY+0xADDR)
-    asan_re = re.compile(" *#\d+ *0x[0-9a-fA-F]+ *\([^+]*\+(0x[0-9a-fA-F]+)\)")
+    asan_re = re.compile(r" *#\d+ *0x[0-9a-fA-F]+ *\([^+]*\+(0x[0-9a-fA-F]+)\)")
 
     try:
         while True:
diff --git a/tools/type_whisperer/typedb_gen.py b/tools/type_whisperer/typedb_gen.py
index 5d418c335591..d118cd40ab46 100644
--- a/tools/type_whisperer/typedb_gen.py
+++ b/tools/type_whisperer/typedb_gen.py
@@ -19,11 +19,11 @@
     (r'(envoy[\w\.]*\.)(v1alpha\d?|v1)', r'\1v3'),
     (r'(envoy[\w\.]*\.)(v2alpha\d?|v2)', r'\1v3'),
     # These are special cases, e.g. upgrading versionless packages.
-    ('envoy\.type\.matcher', 'envoy.type.matcher.v3'),
-    ('envoy\.type', 'envoy.type.v3'),
-    ('envoy\.config\.cluster\.redis', 'envoy.extensions.clusters.redis.v3'),
+    (r'envoy\.type\.matcher', 'envoy.type.matcher.v3'),
+    (r'envoy\.type', 'envoy.type.v3'),
+    (r'envoy\.config\.cluster\.redis', 'envoy.extensions.clusters.redis.v3'),
     (
-        'envoy\.config\.retry\.previous_priorities',
+        r'envoy\.config\.retry\.previous_priorities',
         'envoy.extensions.retry.priority.previous_priorities.v3'),
 ]