stable release
This commit is contained in:
@@ -6,7 +6,7 @@ from core.config import blindParams, xsschecker, threadCount
|
||||
|
||||
def checky(param, paraNames, url, headers, GET, delay):
|
||||
if param not in paraNames:
|
||||
response = requester(url, {param : xsschecker}, headers, GET, delay).text
|
||||
response = requester(url, {param : xsschecker}, headers, GET, delay, timeout).text
|
||||
if '\'%s\'' % xsschecker in response or '"%s"' % xsschecker in response or ' %s ' % xsschecker in response:
|
||||
paraNames[param] = ''
|
||||
print('%s Valid parameter found : %s%s%s' % (good, green, param, end))
|
||||
|
||||
@@ -6,10 +6,10 @@ from urllib.parse import quote_plus
|
||||
from core.requester import requester
|
||||
from core.utils import replacer, fillHoles
|
||||
|
||||
def checker(url, params, headers, GET, delay, payload, positions):
|
||||
def checker(url, params, headers, GET, delay, payload, positions, timeout):
|
||||
checkString = 'st4r7s' + payload
|
||||
paramsCopy = copy.deepcopy(params)
|
||||
response = requester(url, replacer(paramsCopy, xsschecker, checkString), headers, GET, delay).text.lower()
|
||||
response = requester(url, replacer(paramsCopy, xsschecker, checkString), headers, GET, delay, timeout).text.lower()
|
||||
reflectedPositions = []
|
||||
for match in re.finditer('st4r7s', response):
|
||||
reflectedPositions.append(match.start())
|
||||
|
||||
@@ -16,5 +16,5 @@ else:
|
||||
info = '\033[93m[!]\033[0m'
|
||||
que = '\033[94m[?]\033[0m'
|
||||
bad = '\033[91m[-]\033[0m'
|
||||
good = '\033[32m[+]\033[0m'
|
||||
good = '\033[92m[+]\033[0m'
|
||||
run = '\033[97m[~]\033[0m'
|
||||
|
||||
@@ -4,7 +4,7 @@ minEfficiency = 90
|
||||
|
||||
delay = 0
|
||||
threadCount = 10
|
||||
timeout = 7
|
||||
timeout = 10
|
||||
|
||||
specialAttributes = ['srcdoc', 'src']
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ from core.checker import checker
|
||||
from core.config import xsschecker
|
||||
from core.requester import requester
|
||||
|
||||
def filterChecker(url, params, headers, GET, delay, occurences):
|
||||
def filterChecker(url, params, headers, GET, delay, occurences, timeout):
|
||||
positions = {}
|
||||
environments = set(['<', '>'])
|
||||
sortedEfficiencies = {}
|
||||
@@ -25,7 +25,7 @@ def filterChecker(url, params, headers, GET, delay, occurences):
|
||||
if environment == '':
|
||||
efficiencies = [100 for i in range(len(occurences))]
|
||||
else:
|
||||
efficiencies = checker(url, params, headers, GET, delay, environment, positions)
|
||||
efficiencies = checker(url, params, headers, GET, delay, environment, positions, timeout)
|
||||
if len(efficiencies) < len(occurences):
|
||||
for i in range(len(occurences) - len(efficiencies)):
|
||||
efficiencies.append(0)
|
||||
|
||||
@@ -16,7 +16,7 @@ def counter(string):
|
||||
count += 1
|
||||
return count
|
||||
|
||||
def fuzzer(url, params, headers, GET, delay, WAF):
|
||||
def fuzzer(url, params, headers, GET, delay, timeout, WAF):
|
||||
for fuzz in fuzzes:
|
||||
if delay == 0:
|
||||
delay = 6
|
||||
@@ -24,7 +24,7 @@ def fuzzer(url, params, headers, GET, delay, WAF):
|
||||
sleep(t)
|
||||
paramsCopy = copy.deepcopy(params)
|
||||
try:
|
||||
response = requester(url, replacer(paramsCopy, xsschecker, fuzz), headers, GET, delay/2)
|
||||
response = requester(url, replacer(paramsCopy, xsschecker, fuzz), headers, GET, delay/2, timeout)
|
||||
except:
|
||||
print ('\n%s WAF is dropping suspicious requests.' % bad)
|
||||
if delay == 0:
|
||||
|
||||
@@ -8,7 +8,7 @@ from core.zetanize import zetanize
|
||||
from core.requester import requester
|
||||
from core.utils import getUrl, getParams
|
||||
|
||||
def photon(seedUrl, headers, level, threadCount):
|
||||
def photon(seedUrl, headers, level, threadCount, delay, timeout):
|
||||
forms = [] # web forms
|
||||
processed = set() # urls that have been crawled
|
||||
storage = set() # urls that belong to the target i.e. in-scope
|
||||
@@ -26,7 +26,7 @@ def photon(seedUrl, headers, level, threadCount):
|
||||
for name, value in params.items():
|
||||
inps.append({'name': name, 'value': value})
|
||||
forms.append({0: {'action': url, 'method': 'get', 'inputs': inps}})
|
||||
response = requester(url, params, headers, True, 0).text
|
||||
response = requester(url, params, headers, True, delay, timeout).text
|
||||
forms.append(zetanize(response))
|
||||
matches = findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response)
|
||||
for link in matches: # iterate over the matches
|
||||
@@ -47,4 +47,4 @@ def photon(seedUrl, headers, level, threadCount):
|
||||
futures = (threadpool.submit(rec, url) for url in urls)
|
||||
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
|
||||
pass
|
||||
return [forms, processed]
|
||||
return [forms, processed]
|
||||
@@ -5,7 +5,7 @@ import requests
|
||||
|
||||
warnings.filterwarnings('ignore') # Disable SSL related warnings
|
||||
|
||||
def requester(url, data, headers, GET, delay):
|
||||
def requester(url, data, headers, GET, delay, timeout):
|
||||
time.sleep(delay)
|
||||
user_agents = ['Mozilla/5.0 (X11; Linux i686; rv:60.0) Gecko/20100101 Firefox/60.0',
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36'
|
||||
@@ -15,7 +15,7 @@ def requester(url, data, headers, GET, delay):
|
||||
elif headers['User-Agent'] == '$':
|
||||
headers['User-Agent'] = random.choice(user_agents)
|
||||
if GET:
|
||||
response = requests.get(url, params=data, headers=headers, verify=False)
|
||||
response = requests.get(url, params=data, headers=headers, timeout=timeout, verify=False)
|
||||
else:
|
||||
response = requests.post(url, data=data, headers=headers, verify=False)
|
||||
response = requests.post(url, data=data, headers=headers, timeout=timeout, verify=False)
|
||||
return response
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import re
|
||||
from core.requester import requester
|
||||
|
||||
def wafDetector(url, params, headers, GET, delay):
|
||||
noise = '<script>alert(1)</script>' #a payload which is noisy enough to provoke the WAF
|
||||
def wafDetector(url, params, headers, GET, delay, timeout):
|
||||
noise = '<script>alert("XSS")</script>' #a payload which is noisy enough to provoke the WAF
|
||||
params['xss'] = noise
|
||||
response = requester(url, params, headers, GET, delay) # Opens the noise injected payload
|
||||
response = requester(url, params, headers, GET, delay, timeout) # Opens the noise injected payload
|
||||
code = str(response.status_code)
|
||||
response_headers = str(response.headers)
|
||||
response_text = response.text.lower()
|
||||
|
||||
24
xsstrike.py
24
xsstrike.py
@@ -9,6 +9,8 @@ print('''%s
|
||||
\tXSStrike %sv3.0-beta
|
||||
%s''' % (red, white, end))
|
||||
|
||||
changes = '''stable release'''
|
||||
|
||||
try:
|
||||
from urllib.parse import quote_plus, unquote, urlparse
|
||||
except ImportError: # throws error in python2
|
||||
@@ -47,7 +49,7 @@ parser.add_argument('-u', '--url', help='url', dest='target')
|
||||
parser.add_argument('--data', help='post data', dest='data')
|
||||
parser.add_argument('--fuzzer', help='fuzzer', dest='fuzz', action='store_true')
|
||||
parser.add_argument('--update', help='update', dest='update', action='store_true')
|
||||
parser.add_argument('--timeout', help='timeout', dest='timeout', action='store_true')
|
||||
parser.add_argument('--timeout', help='timeout', dest='timeout', type=int)
|
||||
parser.add_argument('--params', help='find params', dest='find', action='store_true')
|
||||
parser.add_argument('--crawl', help='crawl', dest='recursive', action='store_true')
|
||||
parser.add_argument('-l', '--level', help='level of crawling', dest='level', type=int)
|
||||
@@ -71,8 +73,8 @@ skipDOM = args.skipDOM
|
||||
skipPOC = args.skipPOC
|
||||
level = args.level or 2
|
||||
delay = args.delay or core.config.delay
|
||||
threadCount = args.threads or core.config.threadCount
|
||||
timeout = args.timeout or core.config.timeout
|
||||
threadCount = args.threads or core.config.threadCount
|
||||
|
||||
if args.update: # if the user has supplied --update argument
|
||||
updater()
|
||||
@@ -110,10 +112,10 @@ def singleTarget(target, paramData):
|
||||
url = getUrl(target, paramData, GET)
|
||||
params = getParams(target, paramData, GET)
|
||||
if args.find:
|
||||
params = arjun(url, GET, headers, delay)
|
||||
params = arjun(url, GET, headers, delay, timeout)
|
||||
if not params:
|
||||
quit()
|
||||
WAF = wafDetector(url, {list(params.keys())[0] : xsschecker}, headers, GET, delay)
|
||||
WAF = wafDetector(url, {list(params.keys())[0] : xsschecker}, headers, GET, delay, timeout)
|
||||
if WAF:
|
||||
print ('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
|
||||
else:
|
||||
@@ -124,14 +126,14 @@ def singleTarget(target, paramData):
|
||||
print ('%s Fuzzing parameter: %s' % (info, paramName))
|
||||
paramsCopy = copy.deepcopy(params)
|
||||
paramsCopy[paramName] = xsschecker
|
||||
fuzzer(url, paramsCopy, headers, GET, delay, WAF)
|
||||
fuzzer(url, paramsCopy, headers, GET, delay, timeout, WAF)
|
||||
quit()
|
||||
|
||||
for paramName in params.keys():
|
||||
paramsCopy = copy.deepcopy(params)
|
||||
print ('%s Testing parameter: %s' % (info, paramName))
|
||||
paramsCopy[paramName] = xsschecker
|
||||
response = requester(url, paramsCopy, headers, GET, delay)
|
||||
response = requester(url, paramsCopy, headers, GET, delay, timeout)
|
||||
parsedResponse = htmlParser(response)
|
||||
occurences = parsedResponse[0]
|
||||
positions = parsedResponse[1]
|
||||
@@ -141,7 +143,7 @@ def singleTarget(target, paramData):
|
||||
else:
|
||||
print ('%s Reflections found: %s' % (info, len(occurences)))
|
||||
print ('%s Analysing reflections' % run)
|
||||
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences)
|
||||
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout)
|
||||
print ('%s Generating payloads' % run)
|
||||
vectors = generator(occurences, response.text)
|
||||
total = 0
|
||||
@@ -158,7 +160,7 @@ def singleTarget(target, paramData):
|
||||
print ('%s Payloads tried [%i/%i]' % (run, progress, total), end='\r')
|
||||
if not GET:
|
||||
vect = unquote(vect)
|
||||
efficiencies = checker(url, paramsCopy, headers, GET, delay, vect, positions)
|
||||
efficiencies = checker(url, paramsCopy, headers, GET, delay, vect, positions, timeout)
|
||||
if not efficiencies:
|
||||
for i in range(len(occurences)):
|
||||
efficiencies.append(0)
|
||||
@@ -210,11 +212,11 @@ def multiTargets(scheme, host, main_url, form, domURL):
|
||||
for paramName in paramData.keys():
|
||||
paramsCopy = copy.deepcopy(paramData)
|
||||
paramsCopy[paramName] = xsschecker
|
||||
response = requester(url, paramsCopy, headers, GET, delay)
|
||||
response = requester(url, paramsCopy, headers, GET, delay, timeout)
|
||||
parsedResponse = htmlParser(response)
|
||||
occurences = parsedResponse[0]
|
||||
positions = parsedResponse[1]
|
||||
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences)
|
||||
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout)
|
||||
vectors = generator(occurences, response.text)
|
||||
if vectors:
|
||||
for confidence, vects in vectors.items():
|
||||
@@ -234,7 +236,7 @@ else:
|
||||
scheme = urlparse(target).scheme
|
||||
host = urlparse(target).netloc
|
||||
main_url = scheme + '://' + host
|
||||
crawlingResult = photon(target, headers, level, threadCount)
|
||||
crawlingResult = photon(target, headers, level, threadCount, delay, timeout)
|
||||
forms = crawlingResult[0]
|
||||
domURLs = list(crawlingResult[1])
|
||||
difference = abs(len(domURLs) - len(forms))
|
||||
|
||||
Reference in New Issue
Block a user