# Coded by Z-winK 2022-05-04
# <https://twitter.com/_zwink>
# File crawls IP prefixes defined below and outputs domains.csv file with subdomains and their status codes
import requests, urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
o = open('domains.csv', 'w')
o.close()
ips = ['1.2.3.','3.2.1.','1.1.1.']
for prefix in ips:
i = 0
while i < 255:
i = i +1
ip = prefix+str(i)
print(ip)
try:
r = requests.get(url="https://"+ip, timeout=1)
except Exception as e:
hosts = str(e).split(", ")
for host in hosts:
thing = host.split("'")
for t in thing:
if "." in t:
if ip not in t and "max retries" not in t.lower() and "getaddrinfo" not in t.lower():
try:
r2 = requests.get(url="https://"+t, verify=False, timeout=1)
rc = r2.status_code
o = open('domains.csv', 'a')
o.write(ip+","+t+","+str(rc)+"\\n")
o.close()
print(ip,t,rc)
except:
o = open('domains.csv', 'a')
o.write(ip+","+t+",Error\\n")
o.close()
print(ip,t,"Error")
Release v1.12 ยท j3ssie/metabigor
https://github.com/s0md3v/Photon
Photon can extract the following data while crawling:
example.com/gallery.php?id=2
)https://github.com/hassan0x/ReconHunter