Files
XSStrike/modes/scan.py

124 lines
5.0 KiB
Python
Raw Normal View History

2018-11-17 22:43:09 +05:30
import copy
import re
2018-11-17 22:43:09 +05:30
from urllib.parse import urlparse, quote, unquote
from core.arjun import arjun
from core.checker import checker
from core.colors import good, bad, end, info, green, red, que
2018-11-22 13:51:37 +05:30
import core.config
2018-11-17 22:43:09 +05:30
from core.config import xsschecker, minEfficiency
from core.dom import dom
from core.filterChecker import filterChecker
from core.generator import generator
from core.htmlParser import htmlParser
from core.requester import requester
2019-04-06 21:17:30 +05:30
from core.utils import getUrl, getParams, getVar
2018-11-17 22:43:09 +05:30
from core.wafDetector import wafDetector
from core.log import setup_logger
2018-11-17 22:43:09 +05:30
logger = setup_logger(__name__)
def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip):
2018-11-17 22:43:09 +05:30
GET, POST = (False, True) if paramData else (True, False)
# If the user hasn't supplied the root url with http(s), we will handle it
if not target.startswith('http'):
try:
response = requester('https://' + target, {},
headers, GET, delay, timeout)
target = 'https://' + target
except:
target = 'http://' + target
logger.debug('Scan target: {}'.format(target))
2018-11-17 22:43:09 +05:30
response = requester(target, {}, headers, GET, delay, timeout).text
2019-03-15 17:11:22 +08:00
2018-11-17 22:43:09 +05:30
if not skipDOM:
logger.run('Checking for DOM vulnerabilities')
2018-11-17 22:43:09 +05:30
highlighted = dom(response)
if highlighted:
logger.good('Potentially vulnerable objects found')
logger.red_line(level='good')
2018-11-17 22:43:09 +05:30
for line in highlighted:
logger.no_format(line, level='good')
logger.red_line(level='good')
2018-11-17 22:43:09 +05:30
host = urlparse(target).netloc # Extracts host out of the url
logger.debug('Host to scan: {}'.format(host))
2018-11-17 22:43:09 +05:30
url = getUrl(target, GET)
logger.debug('Url to scan: {}'.format(url))
2018-11-17 22:43:09 +05:30
params = getParams(target, paramData, GET)
logger.debug_json('Scan parameters:', params)
2018-11-17 22:43:09 +05:30
if find:
params = arjun(url, GET, headers, delay, timeout)
if not params:
logger.error('No parameters to test.')
2018-11-17 22:43:09 +05:30
quit()
WAF = wafDetector(
url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout)
if WAF:
logger.error('WAF detected: %s%s%s' % (green, WAF, end))
2018-11-17 22:43:09 +05:30
else:
logger.good('WAF Status: %sOffline%s' % (green, end))
2018-11-17 22:43:09 +05:30
for paramName in params.keys():
paramsCopy = copy.deepcopy(params)
logger.info('Testing parameter: %s' % paramName)
2018-11-17 22:43:09 +05:30
if encoding:
paramsCopy[paramName] = encoding(xsschecker)
else:
paramsCopy[paramName] = xsschecker
response = requester(url, paramsCopy, headers, GET, delay, timeout)
2019-11-01 00:23:25 +05:30
occurences = htmlParser(response, encoding)
positions = occurences.keys()
logger.debug('Scan occurences: {}'.format(occurences))
2018-11-17 22:43:09 +05:30
if not occurences:
logger.error('No reflection found')
2018-11-17 22:43:09 +05:30
continue
else:
logger.info('Reflections found: %i' % len(occurences))
logger.run('Analysing reflections')
2018-11-17 22:43:09 +05:30
efficiencies = filterChecker(
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
logger.debug('Scan efficiencies: {}'.format(efficiencies))
logger.run('Generating payloads')
2018-11-17 22:43:09 +05:30
vectors = generator(occurences, response.text)
total = 0
for v in vectors.values():
total += len(v)
if total == 0:
logger.error('No vectors were crafted.')
2018-11-17 22:43:09 +05:30
continue
logger.info('Payloads generated: %i' % total)
2018-11-17 22:43:09 +05:30
progress = 0
for confidence, vects in vectors.items():
for vect in vects:
2019-04-06 21:17:30 +05:30
if core.config.globalVariables['path']:
vect = vect.replace('/', '%2F')
2019-06-08 16:11:34 +05:30
loggerVector = vect
2019-04-06 21:17:30 +05:30
progress += 1
logger.run('Progress: %i/%i\r' % (progress, total))
2019-04-06 20:45:10 +05:30
if not GET:
2018-11-17 22:43:09 +05:30
vect = unquote(vect)
2019-06-08 16:11:34 +05:30
efficiencies = checker(
url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding)
if not efficiencies:
for i in range(len(occurences)):
efficiencies.append(0)
bestEfficiency = max(efficiencies)
if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95):
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
2019-04-06 20:45:10 +05:30
if not skip:
2019-06-08 16:11:34 +05:30
choice = input(
'%s Would you like to continue scanning? [y/N] ' % que).lower()
2019-04-06 20:45:10 +05:30
if choice != 'y':
quit()
2019-06-08 16:11:34 +05:30
elif bestEfficiency > minEfficiency:
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
logger.no_format('')