import requests
from bs4 import BeautifulSoup
import sys
import os
class Rapiddns:
def __init__(self, domain):
self.domain = domain
self.data = []
def run(self):
url = f'http://rapiddns.io/subdomain/{self.domain}'
params = {'full': '1'}
resp = requests.get(url, params)
return resp.text
# print(resp.text)
def filter(self):
text = self.run()
if text:
soup = BeautifulSoup(text, 'html.parser')
tables = soup.find_all('table', class_="table table-striped table-bordered")
trs = tables[0].find_all('tr')
for tr in trs:
row = []
cells = tr.find_all('th')
for cell in cells:
row.append(cell.get_text())
cells = tr.find_all('td')
for cell in cells:
row.append(cell.get_text())
# print(','.join(row))
d = {'#': row[0], 'Domain': row[1], 'Address': row[2], 'Type': row[3], 'Date': row[4] }
self.data.append(d)
def output(self):
dir = 'result'
if not os.path.exists(dir):
os.mkdir(dir)
if self.data:
logfile = dir + '/' + self.domain.strip() + '-' + str(self.__class__.__name__) + '.txt'
with open(logfile, 'w+', encoding='utf-8') as f:
for d in self.data:
f.write(str(d))
f.write('\n')
if __name__ == '__main__':
file = 'targets.txt'
with open(file, 'r', encoding='utf-8') as f:
for domain in f.readlines():
rapiddns = Rapiddns(domain.strip())
rapiddns.filter()
del rapiddns.data[0]
rapiddns.output()
'''
{'#': '1', 'Domain': 'wx.hao24.com', 'Address': 'pguis0rete0xeiqaupgmjan5ftxhzdid.aliyundunwaf.com.\n', 'Type': 'CNAME', 'Date': '2021-11-06'}
{'#': '2', 'Domain': 'www.hao24.com', 'Address': '101.37.43.33\n', 'Type': 'A', 'Date': '2021-11-06'}
{'#': '3', 'Domain': 'shop.hao24.com', 'Address': 'vbtyhzb7pyj4u3ssx2tkjbr5cvd4qyns.aliyundunwaf.com.\n', 'Type': 'CNAME', 'Date': '2021-11-06'}
{'#': '4', 'Domain': 'server.hao24.com', 'Address': '120.55.148.91\n', 'Type': 'A', 'Date': '2021-11-06'}
{'#': '5', 'Domain': 'open.hao24.com', 'Address': 'l9rrmyuqziy9pqbsm0z7oil7a2j9o5gr.aliyundunwaf.com.\n', 'Type': 'CNAME', 'Date': '2021-11-06'}
{'#': '6', 'Domain': 'openfire.hao24.com', 'Address': '116.62.85.172\n', 'Type': 'A', 'Date': '2021-11-06'}
'''