Ability to encode payloads, Fixed a bug in bruteforcer

This commit is contained in:
Somdev Sangwan
2018-11-13 16:47:00 +05:30
committed by GitHub
parent bbf9201356
commit 65abbd265f
6 changed files with 58 additions and 22 deletions

View File

@@ -1,13 +1,16 @@
import re
import copy
from fuzzywuzzy import fuzz
from core.encoders import base64
from core.config import xsschecker
from urllib.parse import quote_plus
from core.requester import requester
from core.utils import replacer, fillHoles
from urllib.parse import quote_plus, unquote
def checker(url, params, headers, GET, delay, payload, positions, timeout):
def checker(url, params, headers, GET, delay, payload, positions, timeout, encoding):
checkString = 'st4r7s' + payload + '3nd'
if encoding:
checkString = encoding(unquote(checkString))
paramsCopy = copy.deepcopy(params)
response = requester(url, replacer(paramsCopy, xsschecker, checkString), headers, GET, delay, timeout).text.lower()
reflectedPositions = []
@@ -27,7 +30,9 @@ def checker(url, params, headers, GET, delay, payload, positions, timeout):
pass
if position:
reflected = response[position:position+len(checkString)]
efficiency = fuzz.partial_ratio(reflected, checkString.lower())
if encoding:
checkString = encoding(checkString.lower())
efficiency = fuzz.partial_ratio(reflected, checkString)
if reflected[:-2] == ('\\%s' % checkString.replace('st4r7s', '').replace('3nd', '')):
efficiency = 90
allEfficiencies.append(efficiency)

8
core/encoders.py Normal file
View File

@@ -0,0 +1,8 @@
import re
import base64 as b64
def base64(string):
if re.match(r'^[A-Za-z0-9+\/=]+$', string) and (len(string) % 4) == 0:
return b64.b64decode(string.encode('utf-8')).decode('utf-8')
else:
return b64.b64encode(string.encode('utf-8')).decode('utf-8')

View File

@@ -3,7 +3,7 @@ from core.checker import checker
from core.config import xsschecker
from core.requester import requester
def filterChecker(url, params, headers, GET, delay, occurences, timeout):
def filterChecker(url, params, headers, GET, delay, occurences, timeout, encoding):
positions = {}
environments = set(['<', '>'])
sortedEfficiencies = {}
@@ -25,7 +25,7 @@ def filterChecker(url, params, headers, GET, delay, occurences, timeout):
if environment == '':
efficiencies = [100 for i in range(len(occurences))]
else:
efficiencies = checker(url, params, headers, GET, delay, environment, positions, timeout)
efficiencies = checker(url, params, headers, GET, delay, environment, positions, timeout, encoding)
if len(efficiencies) < len(occurences):
for i in range(len(occurences) - len(efficiencies)):
efficiencies.append(0)

View File

@@ -3,9 +3,9 @@ import requests
from time import sleep
from random import randint
from core.utils import replacer
from urllib.parse import quote_plus
from core.requester import requester
from core.config import fuzzes, xsschecker
from urllib.parse import quote_plus, unquote
from core.colors import end, red, white, green, yellow, run, bad, good, info, que
def counter(string):
@@ -16,15 +16,18 @@ def counter(string):
count += 1
return count
def fuzzer(url, params, headers, GET, delay, timeout, WAF):
def fuzzer(url, params, headers, GET, delay, timeout, WAF, encoding):
for fuzz in fuzzes:
if delay == 0:
delay = 6
delay = 0
t = delay + randint(delay, delay * 2) + counter(fuzz)
sleep(t)
paramsCopy = copy.deepcopy(params)
try:
response = requester(url, replacer(paramsCopy, xsschecker, fuzz), headers, GET, delay/2, timeout)
if encoding:
fuzz = encoding(unquote(fuzz))
data = replacer(paramsCopy, xsschecker, fuzz)
response = requester(url, data, headers, GET, delay/2, timeout)
except:
print ('\n%s WAF is dropping suspicious requests.' % bad)
if delay == 0:
@@ -42,6 +45,8 @@ def fuzzer(url, params, headers, GET, delay, timeout, WAF):
except:
print ('\n%s Looks like WAF has blocked our IP Address. Sorry!' % bad)
break
if encoding:
fuzz = encoding(fuzz)
if fuzz.lower() in response.text.lower(): # if fuzz string is reflected in the response
result = ('%s[passed] %s' % (green, end))
elif str(response.status_code)[:1] != '2': # if the server returned an error (Maybe WAF blocked it)

View File

@@ -1,10 +1,13 @@
import re
from core.config import badTags
from core.encoders import base64
from core.config import xsschecker
def htmlParser(response):
def htmlParser(response, encoding):
rawResponse = response
response = response.text
if encoding:
response = response.replace(encoding(xsschecker), xsschecker)
tags = [] # tags in which the input is reflected
locations = [] # contexts in which the input is reflected
attributes = [] # attribute names

View File

@@ -32,6 +32,7 @@ from core.prompt import prompt
from core.fuzzer import fuzzer
from core.updater import updater
from core.checker import checker
from core.encoders import base64
from core.generator import generator
from core.requester import requester
from core.htmlParser import htmlParser
@@ -44,6 +45,7 @@ from core.utils import getUrl, getParams, flattenParams, extractHeaders, verbose
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--url', help='url', dest='target')
parser.add_argument('--data', help='post data', dest='data')
parser.add_argument('-e', '--encode', help='encode payloads', dest='encode')
parser.add_argument('--fuzzer', help='fuzzer', dest='fuzz', action='store_true')
parser.add_argument('--update', help='update', dest='update', action='store_true')
parser.add_argument('--timeout', help='timeout', dest='timeout', type=int)
@@ -66,6 +68,7 @@ else:
find = args.find
fuzz = args.fuzz
encode = args.encode
target = args.target
paramData = args.data
verbose = args.verbose
@@ -84,6 +87,11 @@ if args.file:
for line in f:
payloadList.append(line.rstrip('\n'))
encoding = False
if encode:
if encode == 'base64':
encoding = base64
if args.update: # if the user has supplied --update argument
updater()
quit() # quitting because files have been changed
@@ -92,7 +100,7 @@ if not target: # if the user hasn't supplied a url
print('\n' + parser.format_help().lower())
quit()
def singleTarget(target, paramData, verbose):
def singleTarget(target, paramData, verbose, encoding):
if paramData:
GET, POST = False, True
else:
@@ -137,15 +145,18 @@ def singleTarget(target, paramData, verbose):
print ('%s Fuzzing parameter: %s' % (info, paramName))
paramsCopy = copy.deepcopy(params)
paramsCopy[paramName] = xsschecker
fuzzer(url, paramsCopy, headers, GET, delay, timeout, WAF)
fuzzer(url, paramsCopy, headers, GET, delay, timeout, WAF, encoding)
quit()
for paramName in params.keys():
paramsCopy = copy.deepcopy(params)
print ('%s Testing parameter: %s' % (info, paramName))
if encoding:
paramsCopy[paramName] = encoding(xsschecker)
else:
paramsCopy[paramName] = xsschecker
response = requester(url, paramsCopy, headers, GET, delay, timeout)
parsedResponse = htmlParser(response)
parsedResponse = htmlParser(response, encoding)
occurences = parsedResponse[0]
verboseOutput(occurences, 'occurences', verbose)
positions = parsedResponse[1]
@@ -156,7 +167,7 @@ def singleTarget(target, paramData, verbose):
else:
print ('%s Reflections found: %s' % (info, len(occurences)))
print ('%s Analysing reflections' % run)
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout)
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
verboseOutput(efficiencies, 'efficiencies', verbose)
print ('%s Generating payloads' % run)
vectors = generator(occurences, response.text)
@@ -175,7 +186,7 @@ def singleTarget(target, paramData, verbose):
print ('%s Payloads tried [%i/%i]' % (run, progress, total), end='\r')
if not GET:
vect = unquote(vect)
efficiencies = checker(url, paramsCopy, headers, GET, delay, vect, positions, timeout)
efficiencies = checker(url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding)
if not efficiencies:
for i in range(len(occurences)):
efficiencies.append(0)
@@ -231,10 +242,10 @@ def multiTargets(scheme, host, main_url, form, domURL, verbose):
paramsCopy = copy.deepcopy(paramData)
paramsCopy[paramName] = xsschecker
response = requester(url, paramsCopy, headers, GET, delay, timeout)
parsedResponse = htmlParser(response)
parsedResponse = htmlParser(response, encoding)
occurences = parsedResponse[0]
positions = parsedResponse[1]
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout)
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
vectors = generator(occurences, response.text)
if vectors:
for confidence, vects in vectors.items():
@@ -247,30 +258,34 @@ def multiTargets(scheme, host, main_url, form, domURL, verbose):
pass
def brute(target, paramData, payloadList, verbose):
def brute(target, paramData, payloadList, verbose, encoding):
if paramData:
GET, POST = False, True
else:
GET, POST = True, False
host = urlparse(target).netloc # Extracts host out of the url
verboseOutput(host, 'host', verbose)
url = getUrl(target, paramData, GET)
url = getUrl(target, GET)
verboseOutput(url, 'url', verbose)
params = getParams(target, paramData, GET)
verboseOutput(params, 'params', verbose)
for paramName in params.keys():
paramsCopy = copy.deepcopy(params)
for payload in payloadList:
if encoding:
payload = encoding(unquote(payload))
paramsCopy[paramName] = payload
response = requester(url, paramsCopy, headers, GET, delay, timeout).text
if encoding:
payload = encoding(payload)
if payload in response:
print ('%s %s' % (good, payload))
if not args.recursive:
if args.file:
brute(target, paramData, payloadList, verbose)
brute(target, paramData, payloadList, verbose, encoding)
else:
singleTarget(target, paramData, verbose)
singleTarget(target, paramData, verbose, encoding)
else:
print ('%s Crawling the target' % run)
scheme = urlparse(target).scheme