Handle dynamic number of reflections (Fixes #78)

This commit is contained in:
Somdev Sangwan
2018-10-30 16:28:56 +05:30
committed by GitHub
parent 0dfb25bc50
commit 15179d9886
5 changed files with 54 additions and 18 deletions

View File

@@ -1,21 +1,28 @@
import re import re
import copy import copy
from fuzzywuzzy import fuzz from fuzzywuzzy import fuzz
from core.utils import replacer
from core.config import xsschecker from core.config import xsschecker
from urllib.parse import quote_plus from urllib.parse import quote_plus
from core.requester import requester from core.requester import requester
from core.utils import replacer, fillHoles
def checker(url, params, headers, GET, delay, payload): def checker(url, params, headers, GET, delay, payload, positions):
checkString = 'st4r7' + payload checkString = 'st4r7s' + payload
paramsCopy = copy.deepcopy(params) paramsCopy = copy.deepcopy(params)
response = requester(url, replacer(paramsCopy, xsschecker, checkString), headers, GET, delay).text.lower() response = requester(url, replacer(paramsCopy, xsschecker, checkString), headers, GET, delay).text.lower()
reflectedPositions = []
for match in re.finditer('st4r7s', response):
reflectedPositions.append(match.start())
filledPositions = fillHoles(positions, reflectedPositions)
# Itretating over the reflections # Itretating over the reflections
efficiencies = [] efficiencies = []
for m in re.finditer('st4r7', response): for position in reflectedPositions:
reflected = response[m.start():m.start()+len(checkString)] if position:
efficiency = fuzz.partial_ratio(reflected, checkString.lower()) reflected = response[position:position+len(checkString)]
if reflected[-1] == '\\': efficiency = fuzz.partial_ratio(reflected, checkString.lower())
efficiency += 1 if reflected[-1] == '\\':
efficiencies.append(efficiency) efficiency += 1
efficiencies.append(efficiency)
else:
efficiencies.append(0)
return efficiencies return efficiencies

View File

@@ -4,13 +4,15 @@ from core.config import xsschecker
from core.requester import requester from core.requester import requester
def filterChecker(url, params, headers, GET, delay, occurences): def filterChecker(url, params, headers, GET, delay, occurences):
positions = {}
environments = set(['<', '>']) environments = set(['<', '>'])
sortedEfficiencies = {} sortedEfficiencies = {}
for i in range(len(occurences) + 10): for i in range(len(occurences) + 10):
sortedEfficiencies[i] = {} sortedEfficiencies[i] = {}
for occurence in occurences.values(): for i, occurence in zip(range(len(occurences)), occurences.values()):
environments.add(occurence['context'][1]) environments.add(occurence['context'][1])
location = occurence['context'][0] location = occurence['context'][0]
positions[str(i)] = occurence['position']
if location == 'comment': if location == 'comment':
environments.add('-->') environments.add('-->')
elif location == 'script': elif location == 'script':
@@ -19,9 +21,9 @@ def filterChecker(url, params, headers, GET, delay, occurences):
if environment == '': if environment == '':
efficiencies = [100 for i in range(len(occurences))] efficiencies = [100 for i in range(len(occurences))]
else: else:
efficiencies = checker(url, params, headers, GET, delay, environment) efficiencies = checker(url, params, headers, GET, delay, environment, positions)
if not efficiencies: if len(efficiencies) < len(occurences):
for i in range(len(occurences)): for i in range(len(occurences) - len(efficiencies)):
efficiencies.append(0) efficiencies.append(0)
for i, efficiency in zip(range(len(efficiencies)), efficiencies): for i, efficiency in zip(range(len(efficiencies)), efficiencies):
sortedEfficiencies[i][environment] = efficiency sortedEfficiencies[i][environment] = efficiency

View File

@@ -7,6 +7,9 @@ def htmlParser(response):
locations = [] # contexts in which the input is reflected locations = [] # contexts in which the input is reflected
attributes = [] # attribute names attributes = [] # attribute names
environments = [] # strings needed to break out of the context environments = [] # strings needed to break out of the context
positions = []
for match in re.finditer(xsschecker, response):
positions.append(match.start())
parts = response.split(xsschecker) parts = response.split(xsschecker)
parts.remove(parts[0]) # remove first element since it doesn't contain xsschecker parts.remove(parts[0]) # remove first element since it doesn't contain xsschecker
parts = [xsschecker + s for s in parts] # add xsschecker in front of all elements parts = [xsschecker + s for s in parts] # add xsschecker in front of all elements
@@ -66,9 +69,10 @@ def htmlParser(response):
loc += 1 loc += 1
num += 1 num += 1
occurences = {} occurences = {}
for i, loc, env, tag, attr in zip(range(len(locations)), locations, environments, tags, attributes): for i, loc, env, tag, attr, position in zip(range(len(locations)), locations, environments, tags, attributes, positions):
occurences[i] = {} occurences[i] = {}
if loc == 'comment': if loc == 'comment':
value = '-->' value = '-->'
occurences[i]['position'] = position
occurences[i]['context'] = [loc, env, tag, attr] occurences[i]['context'] = [loc, env, tag, attr]
return occurences return [occurences, positions]

View File

@@ -2,6 +2,25 @@ import re
import random import random
from core.config import xsschecker from core.config import xsschecker
def closest(number, numbers):
difference = [abs(list(numbers.values())[0]), {}]
for index, i in numbers.items():
diff = abs(number - i)
if diff < difference[0]:
difference = [diff, {index : i}]
return difference[1]
def fillHoles(original, new):
filler = 0
filled = []
for x, y in zip(original, new):
if int(x) == (y + filler):
filled.append(y)
else:
filled.extend([0, y])
filler += (int(x) - y)
return filled
def stripper(string, substring, direction='right'): def stripper(string, substring, direction='right'):
done = False done = False
strippedString = '' strippedString = ''

View File

@@ -132,7 +132,9 @@ def singleTarget(target, paramData):
print ('%s Testing parameter: %s' % (info, paramName)) print ('%s Testing parameter: %s' % (info, paramName))
paramsCopy[paramName] = xsschecker paramsCopy[paramName] = xsschecker
response = requester(url, paramsCopy, headers, GET, delay).text response = requester(url, paramsCopy, headers, GET, delay).text
occurences = htmlParser(response) parsedResponse = htmlParser(response)
occurences = parsedResponse[0]
positions = parsedResponse[1]
if not occurences: if not occurences:
print ('%s No reflection found' % bad) print ('%s No reflection found' % bad)
continue continue
@@ -156,7 +158,7 @@ def singleTarget(target, paramData):
print ('%s Payloads tried [%i/%i]' % (run, progress, total), end='\r') print ('%s Payloads tried [%i/%i]' % (run, progress, total), end='\r')
if not GET: if not GET:
vect = unquote(vect) vect = unquote(vect)
efficiencies = checker(url, paramsCopy, headers, GET, delay, vect) efficiencies = checker(url, paramsCopy, headers, GET, delay, vect, positions)
if not efficiencies: if not efficiencies:
for i in range(len(occurences)): for i in range(len(occurences)):
efficiencies.append(0) efficiencies.append(0)
@@ -213,7 +215,9 @@ def multiTargets(scheme, host, main_url, form):
paramsCopy[paramName] = xsschecker paramsCopy[paramName] = xsschecker
response = requester(url, paramsCopy, headers, GET, delay).text response = requester(url, paramsCopy, headers, GET, delay).text
try: try:
occurences = htmlParser(response) parsedResponse = htmlParser(response)
occurences = parsedResponse[0]
positions = parsedResponse[1]
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences) efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences)
vectors = generator(occurences, response) vectors = generator(occurences, response)
if vectors: if vectors: