More Modular Structure, Fixes #13
This commit is contained in:
34
modes/bruteforcer.py
Normal file
34
modes/bruteforcer.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import copy
|
||||||
|
from urllib.parse import urlparse, unquote
|
||||||
|
|
||||||
|
from core.colors import run, good, bad, green, end
|
||||||
|
from core.requester import requester
|
||||||
|
from core.utils import getUrl, getParams, verboseOutput
|
||||||
|
|
||||||
|
def bruteforcer(target, paramData, payloadList, verbose, encoding, headers, delay, timeout):
|
||||||
|
GET, POST = (False, True) if paramData else (True, False)
|
||||||
|
host = urlparse(target).netloc # Extracts host out of the url
|
||||||
|
verboseOutput(host, 'host', verbose)
|
||||||
|
url = getUrl(target, GET)
|
||||||
|
verboseOutput(url, 'url', verbose)
|
||||||
|
params = getParams(target, paramData, GET)
|
||||||
|
if not params:
|
||||||
|
print('%s No parameters to test.' % bad)
|
||||||
|
quit()
|
||||||
|
verboseOutput(params, 'params', verbose)
|
||||||
|
for paramName in params.keys():
|
||||||
|
progress = 1
|
||||||
|
paramsCopy = copy.deepcopy(params)
|
||||||
|
for payload in payloadList:
|
||||||
|
print ('%s Bruteforcing %s[%s%s%s]%s: %i/%i' % (run, green, end, paramName, green, end, progress, len(payloadList)), end='\r')
|
||||||
|
if encoding:
|
||||||
|
payload = encoding(unquote(payload))
|
||||||
|
paramsCopy[paramName] = payload
|
||||||
|
response = requester(url, paramsCopy, headers,
|
||||||
|
GET, delay, timeout).text
|
||||||
|
if encoding:
|
||||||
|
payload = encoding(payload)
|
||||||
|
if payload in response:
|
||||||
|
print('%s %s' % (good, payload))
|
||||||
|
progress += 1
|
||||||
|
print ()
|
||||||
66
modes/crawl.py
Normal file
66
modes/crawl.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import copy
|
||||||
|
import re
|
||||||
|
|
||||||
|
from core.colors import red, good, green, end
|
||||||
|
from core.config import xsschecker
|
||||||
|
from core.dom import dom
|
||||||
|
from core.filterChecker import filterChecker
|
||||||
|
from core.generator import generator
|
||||||
|
from core.htmlParser import htmlParser
|
||||||
|
from core.requester import requester
|
||||||
|
|
||||||
|
def crawl(scheme, host, main_url, form, domURL, verbose, blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding):
|
||||||
|
if domURL and not skipDOM:
|
||||||
|
response = requester(domURL, {}, headers, True, delay, timeout).text
|
||||||
|
highlighted = dom(response)
|
||||||
|
if highlighted:
|
||||||
|
print('%s Potentially vulnerable objects found at %s' %
|
||||||
|
(good, domURL))
|
||||||
|
print(red + ('-' * 60) + end)
|
||||||
|
for line in highlighted:
|
||||||
|
print(line)
|
||||||
|
print(red + ('-' * 60) + end)
|
||||||
|
if form:
|
||||||
|
for each in form.values():
|
||||||
|
url = each['action']
|
||||||
|
if url:
|
||||||
|
if url.startswith(main_url):
|
||||||
|
pass
|
||||||
|
elif url.startswith('//') and url[2:].startswith(host):
|
||||||
|
url = scheme + '://' + url[2:]
|
||||||
|
elif url.startswith('/'):
|
||||||
|
url = scheme + '://' + host + url
|
||||||
|
elif re.match(r'\w', url[0]):
|
||||||
|
url = scheme + '://' + host + '/' + url
|
||||||
|
method = each['method']
|
||||||
|
GET = True if method == 'get' else False
|
||||||
|
inputs = each['inputs']
|
||||||
|
paramData = {}
|
||||||
|
for one in inputs:
|
||||||
|
paramData[one['name']] = one['value']
|
||||||
|
for paramName in paramData.keys():
|
||||||
|
paramsCopy = copy.deepcopy(paramData)
|
||||||
|
paramsCopy[paramName] = xsschecker
|
||||||
|
response = requester(
|
||||||
|
url, paramsCopy, headers, GET, delay, timeout)
|
||||||
|
parsedResponse = htmlParser(response, encoding)
|
||||||
|
occurences = parsedResponse[0]
|
||||||
|
positions = parsedResponse[1]
|
||||||
|
efficiencies = filterChecker(
|
||||||
|
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
|
||||||
|
vectors = generator(occurences, response.text)
|
||||||
|
if vectors:
|
||||||
|
for confidence, vects in vectors.items():
|
||||||
|
try:
|
||||||
|
payload = list(vects)[0]
|
||||||
|
print('%s Vulnerable webpage: %s%s%s' %
|
||||||
|
(good, green, url, end))
|
||||||
|
print('%s Vector for %s%s%s: %s' %
|
||||||
|
(good, green, paramName, end, payload))
|
||||||
|
break
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
if blindXSS and blindPayload:
|
||||||
|
paramsCopy[paramName] = blindPayload
|
||||||
|
requester(url, paramsCopy, headers,
|
||||||
|
GET, delay, timeout)
|
||||||
114
modes/scan.py
Normal file
114
modes/scan.py
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
import copy
|
||||||
|
from urllib.parse import urlparse, quote, unquote
|
||||||
|
|
||||||
|
from core.arjun import arjun
|
||||||
|
from core.checker import checker
|
||||||
|
from core.colors import good, bad, end, info, green, run, red, que
|
||||||
|
from core.config import xsschecker, minEfficiency
|
||||||
|
from core.dom import dom
|
||||||
|
from core.filterChecker import filterChecker
|
||||||
|
from core.generator import generator
|
||||||
|
from core.htmlParser import htmlParser
|
||||||
|
from core.requester import requester
|
||||||
|
from core.utils import getUrl, getParams, verboseOutput
|
||||||
|
from core.wafDetector import wafDetector
|
||||||
|
|
||||||
|
def scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip):
|
||||||
|
GET, POST = (False, True) if paramData else (True, False)
|
||||||
|
# If the user hasn't supplied the root url with http(s), we will handle it
|
||||||
|
if not target.startswith('http'):
|
||||||
|
try:
|
||||||
|
response = requester('https://' + target, {},
|
||||||
|
headers, GET, delay, timeout)
|
||||||
|
target = 'https://' + target
|
||||||
|
except:
|
||||||
|
target = 'http://' + target
|
||||||
|
response = requester(target, {}, headers, GET, delay, timeout).text
|
||||||
|
if not skipDOM:
|
||||||
|
print('%s Checking for DOM vulnerabilities' % run)
|
||||||
|
highlighted = dom(response)
|
||||||
|
if highlighted:
|
||||||
|
print('%s Potentially vulnerable objects found' % good)
|
||||||
|
print(red + ('-' * 60) + end)
|
||||||
|
for line in highlighted:
|
||||||
|
print(line)
|
||||||
|
print(red + ('-' * 60) + end)
|
||||||
|
host = urlparse(target).netloc # Extracts host out of the url
|
||||||
|
verboseOutput(host, 'host', verbose)
|
||||||
|
url = getUrl(target, GET)
|
||||||
|
verboseOutput(url, 'url', verbose)
|
||||||
|
params = getParams(target, paramData, GET)
|
||||||
|
verboseOutput(params, 'params', verbose)
|
||||||
|
if find:
|
||||||
|
params = arjun(url, GET, headers, delay, timeout)
|
||||||
|
if not params:
|
||||||
|
print('%s No parameters to test.' % bad)
|
||||||
|
quit()
|
||||||
|
WAF = wafDetector(
|
||||||
|
url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout)
|
||||||
|
if WAF:
|
||||||
|
print('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
|
||||||
|
else:
|
||||||
|
print('%s WAF Status: %sOffline%s' % (good, green, end))
|
||||||
|
|
||||||
|
for paramName in params.keys():
|
||||||
|
paramsCopy = copy.deepcopy(params)
|
||||||
|
print('%s Testing parameter: %s' % (info, paramName))
|
||||||
|
if encoding:
|
||||||
|
paramsCopy[paramName] = encoding(xsschecker)
|
||||||
|
else:
|
||||||
|
paramsCopy[paramName] = xsschecker
|
||||||
|
response = requester(url, paramsCopy, headers, GET, delay, timeout)
|
||||||
|
parsedResponse = htmlParser(response, encoding)
|
||||||
|
occurences = parsedResponse[0]
|
||||||
|
verboseOutput(occurences, 'occurences', verbose)
|
||||||
|
positions = parsedResponse[1]
|
||||||
|
verboseOutput(positions, 'positions', verbose)
|
||||||
|
if not occurences:
|
||||||
|
print('%s No reflection found' % bad)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
print('%s Reflections found: %s' % (info, len(occurences)))
|
||||||
|
print('%s Analysing reflections' % run)
|
||||||
|
efficiencies = filterChecker(
|
||||||
|
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
|
||||||
|
verboseOutput(efficiencies, 'efficiencies', verbose)
|
||||||
|
print('%s Generating payloads' % run)
|
||||||
|
vectors = generator(occurences, response.text)
|
||||||
|
verboseOutput(vectors, 'vectors', verbose)
|
||||||
|
total = 0
|
||||||
|
for v in vectors.values():
|
||||||
|
total += len(v)
|
||||||
|
if total == 0:
|
||||||
|
print('%s No vectors were crafted' % bad)
|
||||||
|
continue
|
||||||
|
print('%s Payloads generated: %i' % (info, total))
|
||||||
|
progress = 0
|
||||||
|
for confidence, vects in vectors.items():
|
||||||
|
for vect in vects:
|
||||||
|
progress += 1
|
||||||
|
if not GET:
|
||||||
|
vect = unquote(vect)
|
||||||
|
efficiencies = checker(
|
||||||
|
url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding)
|
||||||
|
if not GET:
|
||||||
|
vect = quote(vect)
|
||||||
|
if not efficiencies:
|
||||||
|
for i in range(len(occurences)):
|
||||||
|
efficiencies.append(0)
|
||||||
|
bestEfficiency = max(efficiencies)
|
||||||
|
if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95):
|
||||||
|
print(('%s-%s' % (red, end)) * 60)
|
||||||
|
print('%s Payload: %s' % (good, vect))
|
||||||
|
print('%s Efficiency: %i' % (info, bestEfficiency))
|
||||||
|
print('%s Confidence: %i' % (info, confidence))
|
||||||
|
if not skip:
|
||||||
|
choice = input(
|
||||||
|
'%s Would you like to continue scanning? [y/N] ' % que).lower()
|
||||||
|
if choice != 'y':
|
||||||
|
quit()
|
||||||
|
elif bestEfficiency > minEfficiency:
|
||||||
|
print(('%s-%s' % (red, end)) * 60)
|
||||||
|
print('%s Payload: %s' % (good, vect))
|
||||||
|
print('%s Efficiency: %i' % (info, bestEfficiency))
|
||||||
|
print('%s Confidence: %i' % (info, confidence))
|
||||||
42
modes/singleFuzz.py
Normal file
42
modes/singleFuzz.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import copy
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from core.colors import bad, green, end, good, info
|
||||||
|
from core.config import xsschecker
|
||||||
|
from core.fuzzer import fuzzer
|
||||||
|
from core.requester import requester
|
||||||
|
from core.utils import getUrl, getParams, verboseOutput
|
||||||
|
from core.wafDetector import wafDetector
|
||||||
|
|
||||||
|
def singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout):
|
||||||
|
GET, POST = (False, True) if paramData else (True, False)
|
||||||
|
# If the user hasn't supplied the root url with http(s), we will handle it
|
||||||
|
if not target.startswith('http'):
|
||||||
|
try:
|
||||||
|
response = requester('https://' + target, {},
|
||||||
|
headers, GET, delay, timeout)
|
||||||
|
target = 'https://' + target
|
||||||
|
except:
|
||||||
|
target = 'http://' + target
|
||||||
|
host = urlparse(target).netloc # Extracts host out of the url
|
||||||
|
verboseOutput(host, 'host', verbose)
|
||||||
|
url = getUrl(target, GET)
|
||||||
|
verboseOutput(url, 'url', verbose)
|
||||||
|
params = getParams(target, paramData, GET)
|
||||||
|
verboseOutput(params, 'params', verbose)
|
||||||
|
if not params:
|
||||||
|
print('%s No parameters to test.' % bad)
|
||||||
|
quit()
|
||||||
|
WAF = wafDetector(
|
||||||
|
url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout)
|
||||||
|
if WAF:
|
||||||
|
print('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
|
||||||
|
else:
|
||||||
|
print('%s WAF Status: %sOffline%s' % (good, green, end))
|
||||||
|
|
||||||
|
for paramName in params.keys():
|
||||||
|
print('%s Fuzzing parameter: %s' % (info, paramName))
|
||||||
|
paramsCopy = copy.deepcopy(params)
|
||||||
|
paramsCopy[paramName] = xsschecker
|
||||||
|
fuzzer(url, paramsCopy, headers, GET,
|
||||||
|
delay, timeout, WAF, encoding)
|
||||||
233
xsstrike.py
233
xsstrike.py
@@ -4,27 +4,21 @@ from __future__ import print_function
|
|||||||
|
|
||||||
# Let's import whatever we need from standard lib
|
# Let's import whatever we need from standard lib
|
||||||
import argparse
|
import argparse
|
||||||
import copy
|
|
||||||
import re
|
|
||||||
|
|
||||||
# ... and from core lib
|
# ... and from core lib
|
||||||
import core.config
|
import core.config
|
||||||
from core.arjun import arjun
|
from core.colors import end, info, red, run, white, bad
|
||||||
from core.checker import checker
|
from core.config import blindPayload
|
||||||
from core.colors import bad, end, good, green, info, que, red, run, white
|
|
||||||
from core.config import blindPayload, minEfficiency, xsschecker
|
|
||||||
from core.dom import dom
|
|
||||||
from core.encoders import base64
|
from core.encoders import base64
|
||||||
from core.filterChecker import filterChecker
|
|
||||||
from core.fuzzer import fuzzer
|
|
||||||
from core.generator import generator
|
|
||||||
from core.htmlParser import htmlParser
|
|
||||||
from core.photon import photon
|
from core.photon import photon
|
||||||
from core.prompt import prompt
|
from core.prompt import prompt
|
||||||
from core.requester import requester
|
|
||||||
from core.updater import updater
|
from core.updater import updater
|
||||||
from core.utils import extractHeaders, getParams, getUrl, verboseOutput
|
from core.utils import extractHeaders, verboseOutput
|
||||||
from core.wafDetector import wafDetector
|
|
||||||
|
from modes.bruteforcer import bruteforcer
|
||||||
|
from modes.crawl import crawl
|
||||||
|
from modes.scan import scan
|
||||||
|
from modes.singleFuzz import singleFuzz
|
||||||
|
|
||||||
# Just a fancy ass banner
|
# Just a fancy ass banner
|
||||||
print('''%s
|
print('''%s
|
||||||
@@ -33,7 +27,7 @@ print('''%s
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
from urllib.parse import quote, unquote, urlparse
|
from urllib.parse import urlparse
|
||||||
except ImportError: # throws error in python2
|
except ImportError: # throws error in python2
|
||||||
print('%s XSStrike isn\'t compatible with python2.\n Use python > 3.4 to run XSStrike.' % bad)
|
print('%s XSStrike isn\'t compatible with python2.\n Use python > 3.4 to run XSStrike.' % bad)
|
||||||
quit()
|
quit()
|
||||||
@@ -126,208 +120,13 @@ if not target: # if the user hasn't supplied a url
|
|||||||
print('\n' + parser.format_help().lower())
|
print('\n' + parser.format_help().lower())
|
||||||
quit()
|
quit()
|
||||||
|
|
||||||
|
if fuzz:
|
||||||
def singleTarget(target, paramData, verbose, encoding):
|
singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout)
|
||||||
GET, POST = (False, True) if paramData else (True, False)
|
elif not recursive:
|
||||||
# If the user hasn't supplied the root url with http(s), we will handle it
|
|
||||||
if not target.startswith('http'):
|
|
||||||
try:
|
|
||||||
response = requester('https://' + target, {},
|
|
||||||
headers, GET, delay, timeout)
|
|
||||||
target = 'https://' + target
|
|
||||||
except:
|
|
||||||
target = 'http://' + target
|
|
||||||
response = requester(target, {}, headers, GET, delay, timeout).text
|
|
||||||
if not skipDOM:
|
|
||||||
print('%s Checking for DOM vulnerabilities' % run)
|
|
||||||
highlighted = dom(response)
|
|
||||||
if highlighted:
|
|
||||||
print('%s Potentially vulnerable objects found' % good)
|
|
||||||
print(red + ('-' * 60) + end)
|
|
||||||
for line in highlighted:
|
|
||||||
print(line)
|
|
||||||
print(red + ('-' * 60) + end)
|
|
||||||
host = urlparse(target).netloc # Extracts host out of the url
|
|
||||||
verboseOutput(host, 'host', verbose)
|
|
||||||
url = getUrl(target, GET)
|
|
||||||
verboseOutput(url, 'url', verbose)
|
|
||||||
params = getParams(target, paramData, GET)
|
|
||||||
verboseOutput(params, 'params', verbose)
|
|
||||||
if find:
|
|
||||||
params = arjun(url, GET, headers, delay, timeout)
|
|
||||||
if not params:
|
|
||||||
print('%s No parameters to test.' % bad)
|
|
||||||
quit()
|
|
||||||
WAF = wafDetector(
|
|
||||||
url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout)
|
|
||||||
if WAF:
|
|
||||||
print('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
|
|
||||||
else:
|
|
||||||
print('%s WAF Status: %sOffline%s' % (good, green, end))
|
|
||||||
|
|
||||||
if fuzz:
|
|
||||||
for paramName in params.keys():
|
|
||||||
print('%s Fuzzing parameter: %s' % (info, paramName))
|
|
||||||
paramsCopy = copy.deepcopy(params)
|
|
||||||
paramsCopy[paramName] = xsschecker
|
|
||||||
fuzzer(url, paramsCopy, headers, GET,
|
|
||||||
delay, timeout, WAF, encoding)
|
|
||||||
quit()
|
|
||||||
|
|
||||||
for paramName in params.keys():
|
|
||||||
paramsCopy = copy.deepcopy(params)
|
|
||||||
print('%s Testing parameter: %s' % (info, paramName))
|
|
||||||
if encoding:
|
|
||||||
paramsCopy[paramName] = encoding(xsschecker)
|
|
||||||
else:
|
|
||||||
paramsCopy[paramName] = xsschecker
|
|
||||||
response = requester(url, paramsCopy, headers, GET, delay, timeout)
|
|
||||||
parsedResponse = htmlParser(response, encoding)
|
|
||||||
occurences = parsedResponse[0]
|
|
||||||
verboseOutput(occurences, 'occurences', verbose)
|
|
||||||
positions = parsedResponse[1]
|
|
||||||
verboseOutput(positions, 'positions', verbose)
|
|
||||||
if not occurences:
|
|
||||||
print('%s No reflection found' % bad)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
print('%s Reflections found: %s' % (info, len(occurences)))
|
|
||||||
print('%s Analysing reflections' % run)
|
|
||||||
efficiencies = filterChecker(
|
|
||||||
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
|
|
||||||
verboseOutput(efficiencies, 'efficiencies', verbose)
|
|
||||||
print('%s Generating payloads' % run)
|
|
||||||
vectors = generator(occurences, response.text)
|
|
||||||
verboseOutput(vectors, 'vectors', verbose)
|
|
||||||
total = 0
|
|
||||||
for v in vectors.values():
|
|
||||||
total += len(v)
|
|
||||||
if total == 0:
|
|
||||||
print('%s No vectors were crafted' % bad)
|
|
||||||
continue
|
|
||||||
print('%s Payloads generated: %i' % (info, total))
|
|
||||||
progress = 0
|
|
||||||
for confidence, vects in vectors.items():
|
|
||||||
for vect in vects:
|
|
||||||
progress += 1
|
|
||||||
if not GET:
|
|
||||||
vect = unquote(vect)
|
|
||||||
efficiencies = checker(
|
|
||||||
url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding)
|
|
||||||
if not GET:
|
|
||||||
vect = quote(vect)
|
|
||||||
if not efficiencies:
|
|
||||||
for i in range(len(occurences)):
|
|
||||||
efficiencies.append(0)
|
|
||||||
bestEfficiency = max(efficiencies)
|
|
||||||
if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95):
|
|
||||||
print(('%s-%s' % (red, end)) * 60)
|
|
||||||
print('%s Payload: %s' % (good, vect))
|
|
||||||
print('%s Efficiency: %i' % (info, bestEfficiency))
|
|
||||||
print('%s Confidence: %i' % (info, confidence))
|
|
||||||
if not skip:
|
|
||||||
choice = input(
|
|
||||||
'%s Would you like to continue scanning? [y/N] ' % que).lower()
|
|
||||||
if choice != 'y':
|
|
||||||
quit()
|
|
||||||
elif bestEfficiency > minEfficiency:
|
|
||||||
print(('%s-%s' % (red, end)) * 60)
|
|
||||||
print('%s Payload: %s' % (good, vect))
|
|
||||||
print('%s Efficiency: %i' % (info, bestEfficiency))
|
|
||||||
print('%s Confidence: %i' % (info, confidence))
|
|
||||||
|
|
||||||
|
|
||||||
def multiTargets(scheme, host, main_url, form, domURL, verbose, blindXSS, blindPayload, headers, delay, timeout):
|
|
||||||
if domURL and not skipDOM:
|
|
||||||
response = requester(domURL, {}, headers, True, delay, timeout).text
|
|
||||||
highlighted = dom(response)
|
|
||||||
if highlighted:
|
|
||||||
print('%s Potentially vulnerable objects found at %s' %
|
|
||||||
(good, domURL))
|
|
||||||
print(red + ('-' * 60) + end)
|
|
||||||
for line in highlighted:
|
|
||||||
print(line)
|
|
||||||
print(red + ('-' * 60) + end)
|
|
||||||
if form:
|
|
||||||
for each in form.values():
|
|
||||||
url = each['action']
|
|
||||||
if url:
|
|
||||||
if url.startswith(main_url):
|
|
||||||
pass
|
|
||||||
elif url.startswith('//') and url[2:].startswith(host):
|
|
||||||
url = scheme + '://' + url[2:]
|
|
||||||
elif url.startswith('/'):
|
|
||||||
url = scheme + '://' + host + url
|
|
||||||
elif re.match(r'\w', url[0]):
|
|
||||||
url = scheme + '://' + host + '/' + url
|
|
||||||
method = each['method']
|
|
||||||
GET = True if method == 'get' else False
|
|
||||||
inputs = each['inputs']
|
|
||||||
paramData = {}
|
|
||||||
for one in inputs:
|
|
||||||
paramData[one['name']] = one['value']
|
|
||||||
for paramName in paramData.keys():
|
|
||||||
paramsCopy = copy.deepcopy(paramData)
|
|
||||||
paramsCopy[paramName] = xsschecker
|
|
||||||
response = requester(
|
|
||||||
url, paramsCopy, headers, GET, delay, timeout)
|
|
||||||
parsedResponse = htmlParser(response, encoding)
|
|
||||||
occurences = parsedResponse[0]
|
|
||||||
positions = parsedResponse[1]
|
|
||||||
efficiencies = filterChecker(
|
|
||||||
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
|
|
||||||
vectors = generator(occurences, response.text)
|
|
||||||
if vectors:
|
|
||||||
for confidence, vects in vectors.items():
|
|
||||||
try:
|
|
||||||
payload = list(vects)[0]
|
|
||||||
print('%s Vulnerable webpage: %s%s%s' %
|
|
||||||
(good, green, url, end))
|
|
||||||
print('%s Vector for %s%s%s: %s' %
|
|
||||||
(good, green, paramName, end, payload))
|
|
||||||
break
|
|
||||||
except IndexError:
|
|
||||||
pass
|
|
||||||
if blindXSS and blindPayload:
|
|
||||||
paramsCopy[paramName] = blindPayload
|
|
||||||
requester(url, paramsCopy, headers,
|
|
||||||
GET, delay, timeout)
|
|
||||||
|
|
||||||
|
|
||||||
def bruteforcer(target, paramData, payloadList, verbose, encoding):
|
|
||||||
GET, POST = (False, True) if paramData else (True, False)
|
|
||||||
host = urlparse(target).netloc # Extracts host out of the url
|
|
||||||
verboseOutput(host, 'host', verbose)
|
|
||||||
url = getUrl(target, GET)
|
|
||||||
verboseOutput(url, 'url', verbose)
|
|
||||||
params = getParams(target, paramData, GET)
|
|
||||||
if not params:
|
|
||||||
print('%s No parameters to test.' % bad)
|
|
||||||
quit()
|
|
||||||
verboseOutput(params, 'params', verbose)
|
|
||||||
for paramName in params.keys():
|
|
||||||
progress = 1
|
|
||||||
paramsCopy = copy.deepcopy(params)
|
|
||||||
for payload in payloadList:
|
|
||||||
print ('%s Progress: %i/%i' % (run, progress, len(payloadList)), end='\r')
|
|
||||||
if encoding:
|
|
||||||
payload = encoding(unquote(payload))
|
|
||||||
paramsCopy[paramName] = payload
|
|
||||||
response = requester(url, paramsCopy, headers,
|
|
||||||
GET, delay, timeout).text
|
|
||||||
if encoding:
|
|
||||||
payload = encoding(payload)
|
|
||||||
if payload in response:
|
|
||||||
print('%s %s' % (good, payload))
|
|
||||||
progress += 1
|
|
||||||
print ('')
|
|
||||||
|
|
||||||
|
|
||||||
if not recursive:
|
|
||||||
if args_file:
|
if args_file:
|
||||||
bruteforcer(target, paramData, payloadList, verbose, encoding)
|
bruteforcer(target, paramData, payloadList, verbose, encoding, headers, delay, timeout)
|
||||||
else:
|
else:
|
||||||
singleTarget(target, paramData, verbose, encoding)
|
scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip)
|
||||||
else:
|
else:
|
||||||
print('%s Crawling the target' % run)
|
print('%s Crawling the target' % run)
|
||||||
scheme = urlparse(target).scheme
|
scheme = urlparse(target).scheme
|
||||||
@@ -346,8 +145,8 @@ else:
|
|||||||
for i in range(difference):
|
for i in range(difference):
|
||||||
domURLs.append(0)
|
domURLs.append(0)
|
||||||
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
|
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
|
||||||
futures = (threadpool.submit(multiTargets, scheme, host, main_url, form, domURL, verbose,
|
futures = (threadpool.submit(crawl, scheme, host, main_url, form, domURL, verbose,
|
||||||
blindXSS, blindPayload, headers, delay, timeout) for form, domURL in zip(forms, domURLs))
|
blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding) for form, domURL in zip(forms, domURLs))
|
||||||
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
|
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
|
||||||
if i + 1 == len(forms) or (i + 1) % threadCount == 0:
|
if i + 1 == len(forms) or (i + 1) % threadCount == 0:
|
||||||
print('%s Progress: %i/%i' % (info, i + 1, len(forms)), end='\r')
|
print('%s Progress: %i/%i' % (info, i + 1, len(forms)), end='\r')
|
||||||
|
|||||||
Reference in New Issue
Block a user