Files
XSStrike/modes/scan.py

149 lines
6.3 KiB
Python
Raw Normal View History

2018-11-17 22:43:09 +05:30
import copy
import re
2018-11-17 22:43:09 +05:30
from urllib.parse import urlparse, quote, unquote
from core.arjun import arjun
from core.browserEngine import browserEngine
2018-11-17 22:43:09 +05:30
from core.checker import checker
from core.colors import good, bad, end, info, green, red, que
2018-11-22 13:51:37 +05:30
import core.config
2018-11-17 22:43:09 +05:30
from core.config import xsschecker, minEfficiency
from core.dom import dom
from core.filterChecker import filterChecker
from core.generator import generator
from core.htmlParser import htmlParser
from core.requester import requester
from core.utils import getUrl, getParams
2018-11-17 22:43:09 +05:30
from core.wafDetector import wafDetector
from core.log import setup_logger
2018-11-17 22:43:09 +05:30
logger = setup_logger(__name__)
def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip):
2018-11-17 22:43:09 +05:30
GET, POST = (False, True) if paramData else (True, False)
# If the user hasn't supplied the root url with http(s), we will handle it
if not target.startswith('http'):
try:
response = requester('https://' + target, {},
headers, GET, delay, timeout)
target = 'https://' + target
except:
target = 'http://' + target
logger.debug('Scan target: {}'.format(target))
2018-11-17 22:43:09 +05:30
response = requester(target, {}, headers, GET, delay, timeout).text
2019-03-15 17:11:22 +08:00
2018-11-17 22:43:09 +05:30
if not skipDOM:
logger.run('Checking for DOM vulnerabilities')
2018-11-17 22:43:09 +05:30
highlighted = dom(response)
if highlighted:
logger.good('Potentially vulnerable objects found')
logger.red_line(level='good')
2018-11-17 22:43:09 +05:30
for line in highlighted:
logger.no_format(line, level='good')
logger.red_line(level='good')
2018-11-17 22:43:09 +05:30
host = urlparse(target).netloc # Extracts host out of the url
logger.debug('Host to scan: {}'.format(host))
2018-11-17 22:43:09 +05:30
url = getUrl(target, GET)
logger.debug('Url to scan: {}'.format(url))
2018-11-17 22:43:09 +05:30
params = getParams(target, paramData, GET)
logger.debug_json('Scan parameters:', params)
2018-11-17 22:43:09 +05:30
if find:
params = arjun(url, GET, headers, delay, timeout)
if not params:
logger.error('No parameters to test.')
2018-11-17 22:43:09 +05:30
quit()
WAF = wafDetector(
url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout)
if WAF:
logger.error('WAF detected: %s%s%s' % (green, WAF, end))
2018-11-17 22:43:09 +05:30
else:
logger.good('WAF Status: %sOffline%s' % (green, end))
2018-11-17 22:43:09 +05:30
for paramName in params.keys():
paramsCopy = copy.deepcopy(params)
logger.info('Testing parameter: %s' % paramName)
2018-11-17 22:43:09 +05:30
if encoding:
paramsCopy[paramName] = encoding(xsschecker)
else:
paramsCopy[paramName] = xsschecker
response = requester(url, paramsCopy, headers, GET, delay, timeout)
parsedResponse = htmlParser(response, encoding)
occurences = parsedResponse[0]
logger.debug('Scan occurences: {}'.format(occurences))
2018-11-17 22:43:09 +05:30
positions = parsedResponse[1]
logger.debug('Scan positions: {}'.format(positions))
2018-11-17 22:43:09 +05:30
if not occurences:
logger.error('No reflection found')
2018-11-17 22:43:09 +05:30
continue
else:
logger.info('Reflections found: %i' % len(occurences))
logger.run('Analysing reflections')
2018-11-17 22:43:09 +05:30
efficiencies = filterChecker(
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
logger.debug('Scan efficiencies: {}'.format(efficiencies))
logger.run('Generating payloads')
2018-11-17 22:43:09 +05:30
vectors = generator(occurences, response.text)
total = 0
for v in vectors.values():
total += len(v)
if total == 0:
logger.error('No vectors were crafted.')
2018-11-17 22:43:09 +05:30
continue
logger.info('Payloads generated: %i' % total)
2018-11-17 22:43:09 +05:30
progress = 0
for confidence, vects in vectors.items():
for vect in vects:
2018-11-22 13:51:37 +05:30
if core.config.globalVariables['path']:
vect = vect.replace('/', '%2F')
loggerVector = vect
2018-11-17 22:43:09 +05:30
progress += 1
logger.run('Progress: %i/%i\r' % (progress, total))
if confidence == 10:
if not GET:
vect = unquote(vect)
efficiencies = checker(
url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding)
if not efficiencies:
for i in range(len(occurences)):
efficiencies.append(0)
bestEfficiency = max(efficiencies)
if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95):
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
if not skip:
choice = input(
'%s Would you like to continue scanning? [y/N] ' % que).lower()
if choice != 'y':
quit()
elif bestEfficiency > minEfficiency:
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
else:
if re.search(r'<(a|d3|details)|lt;(a|d3|details)', vect.lower()):
continue
2018-11-17 22:43:09 +05:30
vect = unquote(vect)
if encoding:
paramsCopy[paramName] = encoding(vect)
else:
paramsCopy[paramName] = vect
response = requester(url, paramsCopy, headers, GET, delay, timeout).text
success = browserEngine(response)
if success:
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % 100)
logger.info('Confidence: %i' % 10)
if not skip:
choice = input(
'%s Would you like to continue scanning? [y/N] ' % que).lower()
if choice != 'y':
quit()
logger.no_format('')