2016-03-07 20:59:14 +05:00
|
|
|
import mechanize
|
|
|
|
|
import sys
|
2016-06-30 14:48:50 +12:00
|
|
|
import httplib
|
2017-02-22 13:09:53 +11:00
|
|
|
import argparse
|
2017-02-22 15:05:55 +11:00
|
|
|
import logging
|
2016-06-30 14:48:50 +12:00
|
|
|
from urlparse import urlparse
|
2016-03-07 20:59:14 +05:00
|
|
|
|
|
|
|
|
br = mechanize.Browser() #initiating the browser
|
|
|
|
|
br.addheaders = [('User-agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.8.1.11)Gecko/20071127 Firefox/2.0.0.11')]
|
|
|
|
|
br.set_handle_robots(False)
|
|
|
|
|
br.set_handle_refresh(False)
|
|
|
|
|
|
2017-02-22 18:03:42 +11:00
|
|
|
payloads = ['<svg "ons>', '" onfocus="alert(1);', 'javascript:alert(1)']
|
|
|
|
|
blacklist = ['.png', '.jpg', '.jpeg', '.mp3', '.mp4', '.avi', '.gif', '.svg', '.pdf']
|
2017-02-22 18:58:47 +11:00
|
|
|
xssLinks = [] #TOTAL CROSS SITE SCRIPTING FINDINGS
|
2016-03-07 20:59:14 +05:00
|
|
|
|
|
|
|
|
class color:
|
2017-02-22 18:21:29 +11:00
|
|
|
BLUE = '\033[94m'
|
2016-03-07 20:59:14 +05:00
|
|
|
RED = '\033[91m'
|
2017-02-22 15:05:55 +11:00
|
|
|
GREEN = '\033[92m'
|
|
|
|
|
YELLOW = '\033[93m'
|
2016-03-07 20:59:14 +05:00
|
|
|
BOLD = '\033[1m'
|
|
|
|
|
END = '\033[0m'
|
2017-02-22 15:05:55 +11:00
|
|
|
@staticmethod
|
|
|
|
|
def log(lvl, col, msg):
|
|
|
|
|
logger.log(lvl, col + msg + color.END)
|
2016-03-07 20:59:14 +05:00
|
|
|
|
|
|
|
|
print color.BOLD + color.RED + """
|
2016-03-07 21:03:45 +05:00
|
|
|
XssPy - Finding XSS made easier
|
2016-03-07 20:59:14 +05:00
|
|
|
Author: Faizan Ahmad (Fsecurify)
|
2016-03-07 21:19:58 +05:00
|
|
|
Email: fsecurify@gmail.com
|
2016-03-07 20:59:14 +05:00
|
|
|
Usage: pythonXssPy.py website.com (Do not write www.website.com OR http://www.website.com)
|
2017-02-22 18:21:29 +11:00
|
|
|
Comprehensive Scan: python XssPy.py -u website.com -e
|
|
|
|
|
Verbose logging: python XssPy.py -u website.com -v
|
|
|
|
|
Cookies: python XssPy.py -u website.complex -c name=val name=val
|
2016-03-07 20:59:14 +05:00
|
|
|
|
|
|
|
|
Description: XssPy is a python tool for finding Cross Site Scripting
|
|
|
|
|
vulnerabilities in websites. This tool is the first of its kind.
|
|
|
|
|
Instead of just checking one page as most of the tools do, this tool
|
|
|
|
|
traverses the website and find all the links and subdomains first.
|
|
|
|
|
After that, it starts scanning each and every input on each and every
|
|
|
|
|
page that it found while its traversal. It uses small yet effective
|
|
|
|
|
payloads to search for XSS vulnerabilities. XSS in many high
|
|
|
|
|
profile websites and educational institutes has been found
|
|
|
|
|
by using this very tool.
|
|
|
|
|
""" + color.END
|
|
|
|
|
|
2017-02-22 15:05:55 +11:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
lh = logging.StreamHandler() # Handler for the logger
|
|
|
|
|
logger.addHandler(lh)
|
|
|
|
|
formatter = logging.Formatter('[%(asctime)s] %(message)s', datefmt='%H:%M:%S')
|
|
|
|
|
lh.setFormatter(formatter)
|
|
|
|
|
|
2017-02-22 13:09:53 +11:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
|
parser.add_argument('-u', action='store', dest='url', help='The URL to analyze')
|
|
|
|
|
parser.add_argument('-e', action='store_true', dest='compOn', help='Enable comprehensive scan')
|
2017-02-22 15:05:55 +11:00
|
|
|
parser.add_argument('-v', action='store_true', dest='verbose', help='Enable verbose logging')
|
2017-02-22 18:21:29 +11:00
|
|
|
parser.add_argument('-c', action='store', dest='cookies', help='Space separated list of cookies', nargs='+', default=[])
|
2017-02-22 13:09:53 +11:00
|
|
|
results = parser.parse_args()
|
2016-03-07 20:59:14 +05:00
|
|
|
|
2017-02-22 15:05:55 +11:00
|
|
|
logger.setLevel(logging.DEBUG if results.verbose else logging.INFO)
|
|
|
|
|
|
2017-02-22 18:03:42 +11:00
|
|
|
def testPayload(payload, p, link):
|
|
|
|
|
br.form[str(p.name)] = payload
|
|
|
|
|
br.submit()
|
|
|
|
|
if payload in br.response().read(): #if payload is found in response, we have XSS
|
2017-02-22 18:58:47 +11:00
|
|
|
color.log(logging.DEBUG, color.BOLD+color.GREEN, 'XSS found!')
|
|
|
|
|
report = 'Link: %s, Payload: %s, Element: %s' % (str(link), payload, str(p.name))
|
|
|
|
|
color.log(logging.INFO, color.BOLD+color.GREEN, report)
|
|
|
|
|
xssLinks.append(report)
|
2017-02-22 18:03:42 +11:00
|
|
|
br.back()
|
|
|
|
|
|
2017-02-22 17:02:57 +11:00
|
|
|
def initializeAndFind():
|
2016-03-07 20:59:14 +05:00
|
|
|
|
2017-02-22 13:09:53 +11:00
|
|
|
if not results.url: #if the url has been passed or not
|
2017-02-22 15:05:55 +11:00
|
|
|
color.log(logging.INFO, color.GREEN, 'Url not provided correctly')
|
2017-02-22 18:03:42 +11:00
|
|
|
return []
|
2016-03-07 20:59:14 +05:00
|
|
|
|
2017-02-22 17:02:57 +11:00
|
|
|
firstDomains = [] #list of domains
|
2016-03-07 20:59:14 +05:00
|
|
|
allURLS = []
|
2017-02-22 17:02:57 +11:00
|
|
|
allURLS.append(results.url) #just one url at the moment
|
2016-03-07 20:59:14 +05:00
|
|
|
largeNumberOfUrls = [] #in case one wants to do comprehensive search
|
|
|
|
|
|
2017-02-22 17:02:57 +11:00
|
|
|
color.log(logging.INFO, color.GREEN, 'Doing a short traversal.') #doing a short traversal if no command line argument is being passed
|
|
|
|
|
for url in allURLS:
|
|
|
|
|
smallurl = str(url)
|
|
|
|
|
try: # Test HTTPS/HTTP compatibility. Prefers HTTPS but defaults to HTTP if any errors are encountered
|
|
|
|
|
test = httplib.HTTPSConnection(smallurl)
|
|
|
|
|
test.request("GET", "/")
|
|
|
|
|
response = test.getresponse()
|
|
|
|
|
if (response.status == 200) | (response.status == 302):
|
|
|
|
|
url = "https://www." + str(url)
|
|
|
|
|
elif response.status == 301:
|
|
|
|
|
loc = response.getheader('Location')
|
|
|
|
|
url = loc.scheme + '://' + loc.netloc
|
|
|
|
|
else:
|
|
|
|
|
url = "http://www." + str(url)
|
|
|
|
|
except:
|
|
|
|
|
url = "http://www." + str(url)
|
|
|
|
|
try:
|
|
|
|
|
br.open(url)
|
2017-02-22 18:21:29 +11:00
|
|
|
for cookie in results.cookies:
|
2017-02-23 16:15:47 +11:00
|
|
|
color.log(logging.INFO, color.BLUE, 'Adding cookie: %s' % cookie)
|
2017-02-22 18:21:29 +11:00
|
|
|
br.set_cookie(cookie)
|
|
|
|
|
br.open(url)
|
2017-02-22 17:02:57 +11:00
|
|
|
color.log(logging.INFO, color.GREEN, 'Finding all the links of the website ' + str(url))
|
|
|
|
|
for link in br.links(): #finding the links of the website
|
|
|
|
|
if smallurl in str(link.absolute_url):
|
|
|
|
|
firstDomains.append(str(link.absolute_url))
|
2017-02-22 18:03:42 +11:00
|
|
|
firstDomains = list(set(firstDomains))
|
2017-02-22 17:02:57 +11:00
|
|
|
except:
|
|
|
|
|
pass
|
2017-02-22 18:03:42 +11:00
|
|
|
color.log(logging.INFO, color.GREEN, 'Number of links to test are: ' + str(len(firstDomains)))
|
|
|
|
|
if results.compOn:
|
|
|
|
|
color.log(logging.INFO, color.GREEN, 'Doing a comprehensive traversal. This could take a while')
|
|
|
|
|
for link in firstDomains:
|
|
|
|
|
try:
|
|
|
|
|
br.open(link)
|
|
|
|
|
for newlink in br.links(): #going deeper into each link and finding its links
|
|
|
|
|
if smallurl in str(newlink.absolute_url):
|
|
|
|
|
largeNumberOfUrls.append(newlink.absolute_url)
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
firstDomains = list(set(firstDomains + largeNumberOfUrls))
|
|
|
|
|
color.log(logging.INFO, color.GREEN, 'Total Number of links to test have become: ' + str(len(firstDomains))) #all links have been found
|
2017-02-22 13:13:18 +11:00
|
|
|
return firstDomains
|
2016-03-07 20:59:14 +05:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def findxss(firstDomains):
|
2017-02-22 15:05:55 +11:00
|
|
|
color.log(logging.INFO, color.GREEN, 'Started finding XSS') #starting finding XSS
|
2017-02-22 17:02:57 +11:00
|
|
|
if firstDomains: #if there is atleast one link
|
2016-03-07 20:59:14 +05:00
|
|
|
for link in firstDomains:
|
2017-02-22 18:03:42 +11:00
|
|
|
blacklisted = False
|
2016-03-07 20:59:14 +05:00
|
|
|
y = str(link)
|
2017-02-22 15:05:55 +11:00
|
|
|
color.log(logging.DEBUG, color.YELLOW, str(link))
|
2017-02-22 18:03:42 +11:00
|
|
|
for ext in blacklist:
|
|
|
|
|
if ext in y:
|
|
|
|
|
color.log(logging.DEBUG, color.RED, '\tNot a good url to test')
|
|
|
|
|
blacklisted = True
|
|
|
|
|
break
|
|
|
|
|
if not blacklisted:
|
2017-02-22 17:02:57 +11:00
|
|
|
try:
|
2016-03-07 20:59:14 +05:00
|
|
|
br.open(str(link)) #open the link
|
2017-02-22 17:02:57 +11:00
|
|
|
if br.forms(): #if a form exists, submit it
|
2016-03-07 20:59:14 +05:00
|
|
|
params = list(br.forms())[0] #our form
|
|
|
|
|
br.select_form(nr=0) #submit the first form
|
2017-02-22 17:02:57 +11:00
|
|
|
for p in params.controls:
|
|
|
|
|
par = str(p)
|
|
|
|
|
if 'TextControl' in par: #submit only those forms which require text
|
|
|
|
|
color.log(logging.DEBUG, color.YELLOW, '\tParam: ' + str(p.name))
|
2017-02-22 18:03:42 +11:00
|
|
|
for item in payloads:
|
|
|
|
|
testPayload(item, p, link)
|
2017-02-22 17:02:57 +11:00
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
color.log(logging.DEBUG, color.GREEN+color.BOLD, 'The following links are vulnerable: ')
|
|
|
|
|
for link in xssLinks: #print all xss findings
|
2017-02-22 15:05:55 +11:00
|
|
|
color.log(logging.DEBUG, color.GREEN, '\t'+link)
|
2016-03-07 20:59:14 +05:00
|
|
|
else:
|
2017-02-22 15:05:55 +11:00
|
|
|
color.log(logging.INFO, color.RED+color.BOLD, '\tNo link found, exiting')
|
2016-03-07 20:59:14 +05:00
|
|
|
|
|
|
|
|
#calling the function
|
2017-02-22 17:02:57 +11:00
|
|
|
firstDomains = initializeAndFind()
|
2016-03-07 20:59:14 +05:00
|
|
|
findxss(firstDomains)
|