Handle dynamic number of reflections (Fixes #78)

This commit is contained in:
Somdev Sangwan
2018-10-30 16:28:56 +05:30
committed by GitHub
parent 0dfb25bc50
commit 15179d9886
5 changed files with 54 additions and 18 deletions

View File

@@ -1,21 +1,28 @@
import re
import copy
from fuzzywuzzy import fuzz
from core.utils import replacer
from core.config import xsschecker
from urllib.parse import quote_plus
from core.requester import requester
from core.utils import replacer, fillHoles
def checker(url, params, headers, GET, delay, payload):
checkString = 'st4r7' + payload
def checker(url, params, headers, GET, delay, payload, positions):
checkString = 'st4r7s' + payload
paramsCopy = copy.deepcopy(params)
response = requester(url, replacer(paramsCopy, xsschecker, checkString), headers, GET, delay).text.lower()
reflectedPositions = []
for match in re.finditer('st4r7s', response):
reflectedPositions.append(match.start())
filledPositions = fillHoles(positions, reflectedPositions)
# Itretating over the reflections
efficiencies = []
for m in re.finditer('st4r7', response):
reflected = response[m.start():m.start()+len(checkString)]
for position in reflectedPositions:
if position:
reflected = response[position:position+len(checkString)]
efficiency = fuzz.partial_ratio(reflected, checkString.lower())
if reflected[-1] == '\\':
efficiency += 1
efficiencies.append(efficiency)
else:
efficiencies.append(0)
return efficiencies

View File

@@ -4,13 +4,15 @@ from core.config import xsschecker
from core.requester import requester
def filterChecker(url, params, headers, GET, delay, occurences):
positions = {}
environments = set(['<', '>'])
sortedEfficiencies = {}
for i in range(len(occurences) + 10):
sortedEfficiencies[i] = {}
for occurence in occurences.values():
for i, occurence in zip(range(len(occurences)), occurences.values()):
environments.add(occurence['context'][1])
location = occurence['context'][0]
positions[str(i)] = occurence['position']
if location == 'comment':
environments.add('-->')
elif location == 'script':
@@ -19,9 +21,9 @@ def filterChecker(url, params, headers, GET, delay, occurences):
if environment == '':
efficiencies = [100 for i in range(len(occurences))]
else:
efficiencies = checker(url, params, headers, GET, delay, environment)
if not efficiencies:
for i in range(len(occurences)):
efficiencies = checker(url, params, headers, GET, delay, environment, positions)
if len(efficiencies) < len(occurences):
for i in range(len(occurences) - len(efficiencies)):
efficiencies.append(0)
for i, efficiency in zip(range(len(efficiencies)), efficiencies):
sortedEfficiencies[i][environment] = efficiency

View File

@@ -7,6 +7,9 @@ def htmlParser(response):
locations = [] # contexts in which the input is reflected
attributes = [] # attribute names
environments = [] # strings needed to break out of the context
positions = []
for match in re.finditer(xsschecker, response):
positions.append(match.start())
parts = response.split(xsschecker)
parts.remove(parts[0]) # remove first element since it doesn't contain xsschecker
parts = [xsschecker + s for s in parts] # add xsschecker in front of all elements
@@ -66,9 +69,10 @@ def htmlParser(response):
loc += 1
num += 1
occurences = {}
for i, loc, env, tag, attr in zip(range(len(locations)), locations, environments, tags, attributes):
for i, loc, env, tag, attr, position in zip(range(len(locations)), locations, environments, tags, attributes, positions):
occurences[i] = {}
if loc == 'comment':
value = '-->'
occurences[i]['position'] = position
occurences[i]['context'] = [loc, env, tag, attr]
return occurences
return [occurences, positions]

View File

@@ -2,6 +2,25 @@ import re
import random
from core.config import xsschecker
def closest(number, numbers):
difference = [abs(list(numbers.values())[0]), {}]
for index, i in numbers.items():
diff = abs(number - i)
if diff < difference[0]:
difference = [diff, {index : i}]
return difference[1]
def fillHoles(original, new):
filler = 0
filled = []
for x, y in zip(original, new):
if int(x) == (y + filler):
filled.append(y)
else:
filled.extend([0, y])
filler += (int(x) - y)
return filled
def stripper(string, substring, direction='right'):
done = False
strippedString = ''

View File

@@ -132,7 +132,9 @@ def singleTarget(target, paramData):
print ('%s Testing parameter: %s' % (info, paramName))
paramsCopy[paramName] = xsschecker
response = requester(url, paramsCopy, headers, GET, delay).text
occurences = htmlParser(response)
parsedResponse = htmlParser(response)
occurences = parsedResponse[0]
positions = parsedResponse[1]
if not occurences:
print ('%s No reflection found' % bad)
continue
@@ -156,7 +158,7 @@ def singleTarget(target, paramData):
print ('%s Payloads tried [%i/%i]' % (run, progress, total), end='\r')
if not GET:
vect = unquote(vect)
efficiencies = checker(url, paramsCopy, headers, GET, delay, vect)
efficiencies = checker(url, paramsCopy, headers, GET, delay, vect, positions)
if not efficiencies:
for i in range(len(occurences)):
efficiencies.append(0)
@@ -213,7 +215,9 @@ def multiTargets(scheme, host, main_url, form):
paramsCopy[paramName] = xsschecker
response = requester(url, paramsCopy, headers, GET, delay).text
try:
occurences = htmlParser(response)
parsedResponse = htmlParser(response)
occurences = parsedResponse[0]
positions = parsedResponse[1]
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences)
vectors = generator(occurences, response)
if vectors: