Scanning $url"
+
+ # Save file
+ file=$(echo $url | grep -oP "://(www.\K[a-z0-9]+|\K[a-z0-9]+)")".html"
+
+ # Check if site is live
+ s=$(date +"%s")
+ curl --connect-timeout 7 -A "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36" -s $url -k -L -o $file
+ e=$(date +"%s")
+ t=$(($e-$s))
+ [[ $t>=7 ]] && { echo "Site might not be live"; return 1; }
+
+ # Check if the site is running wordpress
+ ch || { echo "Site is not running Wordpress"; return 1; }
+
+ # Check filetype
+ cf
+
+ # Enumerate Wordpress version
+ ver
+
+ # Enumerate Plugins
+ plugins
+
+ # Enumerate themes
+ themes
+
+ # Enumerate users
+ users
+}
+
+# Syntax
+[[ $# < 1 ]] && { echo "Syntax: ./enum.sh [url]"; exit; }
+
+# Check if URL is valid
+if [[ $# == 1 && $1 =~ ^(http|https)://.* ]]; then
+ url=`echo $1 | grep -oP "[a-z]+://[a-z0-9-_.]+\.[a-z.]+|[a-z]+://[\d.]+:[\d]+|[a-z]+://[\d.]+"`
+ do_it
+elif [[ $# > 1 ]]; then
+ for url in $@; do
+ do_it
+ echo
+ done
+else
+ echo "Invalid url"
+ exit
+fi
diff --git a/scrap.py b/scrap.py
new file mode 100644
index 0000000..b22621c
--- /dev/null
+++ b/scrap.py
@@ -0,0 +1,180 @@
+#!/usr/bin/python3
+
+# Fix the rel_themes, rel_plugins population issue
+
+import string
+import asyncio
+import httpx
+import re
+import html
+import requests
+from datetime import datetime
+
+def write_vulns(read_in, write_out, spec):
+ with open(read_in, "r") as file:
+ data = file.read()
+ match = re.findall(rf'(vulns_{spec}=\(.*\))', data)[0]
+ data = data.replace(match, f"vulns_{spec}=({write_out})")
+
+ with open(read_in, "w") as file:
+ file.write(data)
+
+def write_releases(read_in, write_out, spec):
+ with open(read_in, "r") as file:
+ data = file.read()
+ match = re.findall(rf'(releases_{spec}=\(.*\))', data)[0]
+ data = data.replace(match, f"releases_{spec}=({write_out})")
+
+ with open(read_in, "w") as file:
+ file.write(data)
+
+def write_wp(read_in, write_out, lv):
+ with open(read_in, "r") as file:
+ data = file.read()
+ match = re.findall(r'(releases=\(.*\))', data)[0]
+ data = data.replace(match, f"releases=({write_out})")
+ match = re.findall(r'lv=[\d.]+', data)[0]
+ data = data.replace(match, f"lv={lv[0]}")
+
+ with open(read_in, "w") as file:
+ file.write(data)
+
+def scrap_wordpress():
+ print("[+] Scrapping WordPress")
+ url = "https://wordpress.org/download/releases/"
+ r = requests.get(url)
+ versions = re.findall(r'">([\d.]+)', r.text)
+ del versions[7:55]
+ versions = versions[1:714]
+ lv.append(versions[0])
+ dates = [datetime.strptime(a, "%B %d, %Y").strftime("%d-%m-%Y") for a in re.findall(r'| ([A-Z][a-z]+ [\d]+, [\d]+) | ', r.text)][1:714]
+ for i in range(0, len(versions)):
+ wp.append(f"\"{versions[i]} {dates[i]}\"")
+
+async def first_scrap(client, c, spec):
+ params = {"page":f"{c}"}
+ r = await client.get(f"https://wpscan.com/{spec}/", params=params)
+ if "No results found" in r.text: return
+ s = html.unescape(r.text)
+ try:
+ m = re.findall(rf'\t\t([a-zA-Z0-9-]+)\t', s)
+ l = len(m)
+ plugins.update(m) if (l > 0 and spec == "plugins") else themes.update(m) if (l > 0 and spec == "themes") else ""
+ except:
+ pass
+
+async def second_scrap(client, c, char, spec):
+ params = {"page":f"{c}", "get":f"{char}"}
+ r = await client.get(f"https://wpscan.com/{spec}/", params=params)
+ if "No results found" in r.text: return
+ s = html.unescape(r.text)
+ try:
+ m = re.findall(rf'\t\t([a-zA-Z0-9-]+)\t', s)
+ l = len(m)
+ plugins.update(m) if (l > 0 and spec == "plugins") else themes.update(m) if (l > 0 and spec == "themes") else ""
+ except:
+ pass
+
+async def final_scrap(client, item, spec):
+ r = await client.get(f"https://wpscan.com/{spec}/{item}/")
+ s = html.unescape(r.text)
+ vuln = [a[:-1] if a[-1] == " " else a for a in re.findall(r'\n\t\t\t\t(.*)\t\t\t|checkmark-green-alt.svg"\n\t\t\t\t\talt\n\t\t\t\t/>\n\t\t\t\t[a-zA-Z ]+(.*)\t\t\t', s)
+ version = [a[0].replace(" ", ".") if a[0] else a[1] for a in version]
+ severity = re.findall(r'"vulnerabilities__table--cvss-text">\n\t\t\t\t\t<.*>\n\t\t\t\t(.*)', s)
+ severity = [a.split("(")[1].split(")")[0].upper() if a != "n/a" else "" for a in severity]
+
+ for i in range(len(severity)):
+ out_plugins.append(f"\"{item} {version[i]} {vuln[i]} {severity[i]}\"") if spec == "plugin" else out_themes.append(f"\"{item} {version[i]} {vuln[i]} {severity[i]}\"") if spec == "theme" else ""
+
+async def scrap_version(client, item, spec):
+ r = await client.get(f"https://wordpress.org/{spec}/{item}/")
+ try:
+ m = re.findall(r'Version (.*)', r.text)[0] if spec == "plugins" else re.findall(r'Version\n\t\t\t\t\t\t(.*)', r.text)[0] if spec == "themes" else ""
+ rel_plugins.append(f"\"{item} {m}\"") if spec == "plugins" else rel_themes.append(f"\"{item} {m}\"") if spec == "themes" else ""
+ except:
+ pass
+
+plugins=set([])
+themes=set([])
+out_plugins=[]
+out_themes=[]
+rel_plugins=[]
+rel_themes=[]
+wp=[]
+lv=[]
+
+async def main():
+ scrap_wordpress()
+ async with httpx.AsyncClient(timeout=None, verify=False) as client:
+ tasks=[first_scrap(client, c, "plugins") for c in range(1, 6)]
+ await asyncio.gather(*tasks)
+
+ tasks=[scrap_version(client, plugin, "plugins") for plugin in plugins]
+ tasks+=[final_scrap(client, plugin, "plugin") for plugin in plugins]
+ await asyncio.gather(*tasks)
+
+ plugins.clear()
+
+ charset=string.ascii_lowercase
+ for i in range(len(charset)):
+ char = charset[i]
+ print(f"\r[+] Scrapping Plugins starting with {char}", end="", flush=True)
+ for l in range(1, 200, 100):
+ tasks=[second_scrap(client, c, char, "plugins") for c in range(l, l+100)]
+ await asyncio.gather(*tasks)
+
+ tmp=[list(plugins)[i:i+50] for i in range(0, len(plugins), 50)]
+ for k in range(len(tmp)):
+ tasks=[scrap_version(client, plugin, "plugins") for plugin in tmp[k]]
+ tasks+=[final_scrap(client, plugin, "plugin") for plugin in tmp[k]]
+ await asyncio.gather(*tasks)
+
+ plugins.clear()
+
+ print()
+
+ tasks=[first_scrap(client, c, "themes") for c in range(1, 6)]
+ await asyncio.gather(*tasks)
+
+ tasks=[scrap_version(client, theme, "themes") for theme in themes]
+ tasks+=[final_scrap(client, theme, "theme") for theme in themes]
+ await asyncio.gather(*tasks)
+
+ themes.clear()
+
+ charset=string.ascii_lowercase
+ for i in range(len(charset)):
+ char = charset[i]
+ print(f"\r[+] Scrapping Themes starting with {char}", end="", flush=True)
+ for l in range(1, 200, 100):
+ tasks=[second_scrap(client, c, char, "themes") for c in range(l, l+100)]
+ await asyncio.gather(*tasks)
+
+ tmp=[list(themes)[i:i+50] for i in range(0, len(themes), 50)]
+ for k in range(len(tmp)):
+ tasks=[scrap_version(client, theme, "themes") for theme in tmp[k]]
+ tasks+=[final_scrap(client, theme, "theme") for theme in tmp[k]]
+ await asyncio.gather(*tasks)
+
+ themes.clear()
+
+ print()
+
+asyncio.run(main())
+
+# Update releases=()
+write_wp("enum.sh", " ".join(wp), lv)
+
+# Update releases_plugins=()
+write_releases("enum.sh", " ".join(rel_plugins), "plugins")
+
+# Update vulns_plugins=()
+write_vulns("enum.sh", " ".join(out_plugins), "plugins")
+
+# Update releases_themes=()
+write_releases("enum.sh", " ".join(rel_themes), "themes")
+
+# Update vulns_themes=()
+write_vulns("enum.sh", " ".join(out_themes), "themes")