-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathvulnerabilitySearcher.py
255 lines (201 loc) · 9.62 KB
/
vulnerabilitySearcher.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
import sys
import requests
import re
from termcolor import colored
import json
from pyExploitDb import PyExploitDb
from bs4 import BeautifulSoup
import subprocess
art="""
__ __ ___ ____ _ _ __
| \/ |_ _| _ \ / \ | |/ /
| |\/| || || |_) | / _ \ | ' /
| | | || || _ < / ___ \| . \
|_| |_|___|_| \_\/_/ \_\_|\_\ """
print(colored(art, "magenta"))
def find_cpes(component, version):
base_url = "https://nvd.nist.gov/products/cpe/search/results"
params = {
"namingFormat": "2.3",
"keyword": f"{component} {version}"
}
response = requests.get(base_url, params=params)
content = response.text
cpe_matches = re.findall(r'cpe:(.*?)<', content)
return cpe_matches
def synk_db(cve_id):
res = requests.get(f"https://security.snyk.io/vuln/?search={cve_id}")
a_tag_pattern = r'data-snyk-test="vuln table title".*>([^"]+)<!----><!---->'
a_tag_matches = re.findall(a_tag_pattern, res.text)
if a_tag_matches:
snyk_short_name = a_tag_matches[0].strip()
return snyk_short_name
def search_exploit(cve_id):
try:
# Execute searchsploit command
output = subprocess.check_output(['searchsploit', cve_id], stderr=subprocess.STDOUT, universal_newlines=True)
# Check if any results were found
if output.strip():
return 1
else:
return 0
except subprocess.CalledProcessError:
# Error occurred (e.g., searchsploit not found, or no results found)
return "Error occurred while searching for exploit"
def fetch_cve_details(cpe_string):
base_url = "https://services.nvd.nist.gov/rest/json/cves/2.0"
cves = []
url = f"{base_url}?cpeName=cpe:{cpe_string}"
#print(colored(f">>> {url}", "red"))
response = requests.get(url)
if response.status_code != 200:
print(colored(f"Error: Unable to retrieve CVE data for CPE: {cpe_string}. Status code: {response.status_code}", "yellow"))
return []
try:
data = response.json()
except json.JSONDecodeError:
print(colored(f"Error decoding JSON for CPE: {cpe_string}. Skipping.", "red"))
return []
all_cve_details=[]
for cve_item in data["vulnerabilities"]:
cve_id = cve_item["cve"]["id"]
description_text = cve_item["cve"]["descriptions"][0]["value"]
severity = cve_item["cve"]["metrics"]["cvssMetricV2"][0]["baseSeverity"] if "cvssMetricV2" in cve_item["cve"]["metrics"] else "Not Available"
link = f"https://nvd.nist.gov/vuln/detail/{cve_id}"
snyk_short_name = synk_db(cve_id)
weaknesses = []
if "weaknesses" in cve_item["cve"]:
for problem_type in cve_item["cve"]["weaknesses"]:
for description in problem_type["description"]:
weaknesses.append(description["value"])
all_cve_details.append({
"CVE ID": cve_id,
"Short Name": snyk_short_name,
"Description": description_text,
"Weaknesses": ", ".join(weaknesses),
"severity": severity
})
else :
all_cve_details.append({
"CVE ID": cve_id,
"Short Name": snyk_short_name,
"Description": description_text,
"Weaknesses": "NO CWE",
"severity": severity
})
return all_cve_details
def fetch_github_urls(cve_id):
api_url = f"https://poc-in-github.motikan2010.net/api/v1/?cve_id={cve_id}"
response = requests.get(api_url)
if response.status_code == 200:
data = response.json()
if "pocs" in data and data["pocs"]:
github_urls = [poc["html_url"] for poc in data["pocs"]]
return github_urls
return []
def search_and_extract_download_links(product_name):
search_url = f"https://packetstormsecurity.com/search/?q={product_name}"
response = requests.get(search_url)
download_links = []
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
results = soup.find_all('a', href=True)
for result in results:
href = result['href']
if '/files/download/' in href and href.endswith('.txt'):
download_links.append(f"https://packetstormsecurity.com{href}")
if not download_links:
print(colored("No download links found on Packet Storm Security.", "green", attrs=["underline"]))
return None
return download_links
def search_marc_info(search_term):
# Make a GET request to the URL
url = f"https://marc.info/?l=full-disclosure&s={search_term}"
response = requests.get(url)
# Check if the request was successful
if response.status_code == 200:
# Parse the HTML content of the page
soup = BeautifulSoup(response.text, 'html.parser')
# Check if the response contains "No hits found for"
if "No hits found for" in soup.get_text():
print(colored("No matching exploits found.", "red", attrs=["underline"]))
else:
# Find all <a> tags within <pre> tags, excluding those with "full-disc" in the text
post_links = soup.find('pre').find_all('a', string=lambda text: "full-disc" not in text)
# Print all names and links
if post_links:
results = []
for link in post_links:
name = link.get_text(strip=True)
link_url = "https://marc.info" + link['href']
results.append({"Name": name, "Link": link_url})
return results
else:
print(colored("No matching exploits found.", "green"))
else:
print(colored("Failed to retrieve the web page.", "red"))
print(f"Status code: {response.status_code}")
return None
if __name__ == "__main__":
print(colored("CVE and Exploit Searcher - @kimko\n\n", "magenta", attrs=["bold"]))
component = input(colored("> Enter technology name : ", "magenta"))
version = input(colored("> Enter version : ", "magenta"))
cpe_strings = find_cpes(component, version)
if cpe_strings:
print(colored("\nCOMMON PLATFORM ENUMERATION Searcher", "green",attrs=["bold"]))
for cpe_string in cpe_strings:
print(colored(f"{cpe_string}", "white"))
for cpe_string in cpe_strings:
results=[]
#print(colored("\nCPE CHECK >> "+ cpe_string, "cyan",attrs=["underline"]))
results = fetch_cve_details(cpe_string)
if results:
for result in results:
cve_id = result["CVE ID"]
print ("____________________________________________________________________________________________")
if result["Short Name"]:
print(colored(f"\nCVE DETAILS > {cve_id} [{result['Short Name']}]", "magenta", attrs=["bold"]))
else :
print(colored(f"\nCVE DETAILS > {cve_id}", "magenta", attrs=["bold"]))
if result["Weaknesses"]:
print(colored(f"Weakness Enumeration {result['Weaknesses']}", "white"))
if result['severity'] == "CRITICAL" or result['severity'] == "HIGH" :
print("SEVERITY >> "+ colored(f"{result['severity']}", "red", attrs=["bold"]))
if result['severity'] == "MEDIUM":
print("SEVERITY >> "+colored(f"{result['severity']}", "yellow", attrs=["bold"]))
if result['severity'] == "LOW":
print("SEVERITY >> "+colored(f"{result['severity']}", "green", attrs=["bold"]))
print(colored(f"{result['Description']}", "yellow", attrs=["bold"]))
github_urls = fetch_github_urls(cve_id)
if github_urls:
print(colored("[Github] Public Exploit/POC >", "red"))
for url in github_urls:
print(colored(f" {url}", "blue"))
else:
print(colored("NO Public Exploit/POC is found Over Github", "green"))
if search_exploit(cve_id) == 1:
print(colored(f"[Exploit-DB] Public Exploit >", "red"))
print(colored(f" https://www.exploit-db.com/search?cve="+cve_id, "blue"))
elif search_exploit(cve_id) == 0:
print(colored(f"No Public Exploit Found over Exploit-DB\n", "green"))
else:
print(search_exploit(cve_id))
else:
print(colored("No CPEs found for the provided component and version.", "red"))
# Search for download links on Packet Storm Security even if no CPEs were found
download_links = search_and_extract_download_links(component)
if download_links:
print(colored("\nPossible Exploits [Packet Storm Security]", "magenta", attrs=["underline"]))
for link in download_links:
print(link)
else:
print(colored("No download links found on Packet Storm Security.", "red", attrs=["underline"]))
# Search Marc.Info
search_term_marc = f"{component} {version}"
print(f"\nUsing keyword "+search_term_marc+" for lookup...")
marc_results = search_marc_info(search_term_marc)
if marc_results:
print(colored("\nPossible Exploits", "magenta", attrs=["underline"]))
for result in marc_results:
print(colored(f"\nName: {result['Name']}", "white"))
print(colored(f"Link: {result['Link']}", "blue"))