Files
AngelSword/information/robots_find.py

36 lines
1005 B
Python
Raw Permalink Normal View History

2017-02-20 17:25:03 +08:00
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
name: robots文件发现
referer: unknow
author: Lucifer
description: robots.txt是爬虫标准文件可从文件里找到屏蔽了哪些爬虫搜索的目录
'''
import sys
import requests
import warnings
from termcolor import cprint
class robots_find_BaseVerify:
def __init__(self, url):
self.url = url
def run(self):
payload = "/robots.txt"
vulnurl = self.url + payload
try:
req = requests.get(vulnurl, timeout=10, verify=False)
if "Disallow" in req.text:
2017-07-25 23:23:16 +08:00
cprint("[+]存在robots.txt爬虫文件...(敏感信息)"+"\tpayload: "+vulnurl, "green")
2018-10-31 11:40:59 +08:00
else:
cprint("[-]不存在robots_find漏洞", "white", "on_grey")
2017-02-20 17:25:03 +08:00
except:
2018-10-31 11:40:59 +08:00
cprint("[-] "+__file__+"====>可能不存在漏洞", "cyan")
2017-02-20 17:25:03 +08:00
if __name__ == "__main__":
warnings.filterwarnings("ignore")
testVuln = robots_find_BaseVerify(sys.argv[1])
testVuln.run()