bug fix round #1

This commit is contained in:
s0md3v
2022-03-20 14:51:41 +05:30
parent e03c2dbb05
commit ba5c75bee7
5 changed files with 26 additions and 52 deletions

View File

@@ -1,42 +0,0 @@
import concurrent.futures
import re
from core.colors import green, end
from core.config import blindParams, xsschecker, threadCount
from core.requester import requester
from core.log import setup_logger
logger = setup_logger(__name__)
def checky(param, paraNames, url, headers, GET, delay, timeout):
if param not in paraNames:
logger.debug('Checking param: {}'.format(param))
response = requester(url, {param: xsschecker},
headers, GET, delay, timeout).text
if '\'%s\'' % xsschecker in response or '"%s"' % xsschecker in response or ' %s ' % xsschecker in response:
paraNames[param] = ''
logger.good('Valid parameter found: %s%s', green, param)
def arjun(url, GET, headers, delay, timeout):
paraNames = {}
response = requester(url, {}, headers, GET, delay, timeout).text
matches = re.findall(
r'<input.*?name=\'(.*?)\'.*?>|<input.*?name="(.*?)".*?>', response)
for match in matches:
try:
foundParam = match[1]
except UnicodeDecodeError:
continue
logger.good('Heuristics found a potentially valid parameter: %s%s%s. Priortizing it.' % (
green, foundParam, end))
if foundParam not in blindParams:
blindParams.insert(0, foundParam)
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
futures = (threadpool.submit(checky, param, paraNames, url,
headers, GET, delay, timeout) for param in blindParams)
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
if i + 1 == len(blindParams) or (i + 1) % threadCount == 0:
logger.info('Progress: %i/%i\r' % (i + 1, len(blindParams)))
return paraNames

View File

@@ -34,13 +34,13 @@ def dom(response):
for part in parts: for part in parts:
if source in part: if source in part:
controlledVariables.add(re.search(r'[a-zA-Z$_][a-zA-Z0-9$_]+', part).group().replace('$', '\$')) controlledVariables.add(re.search(r'[a-zA-Z$_][a-zA-Z0-9$_]+', part).group().replace('$', '\$'))
sourceFound = True
line = line.replace(source, yellow + source + end) line = line.replace(source, yellow + source + end)
for controlledVariable in controlledVariables: for controlledVariable in controlledVariables:
allControlledVariables.add(controlledVariable) allControlledVariables.add(controlledVariable)
for controlledVariable in allControlledVariables: for controlledVariable in allControlledVariables:
matches = list(filter(None, re.findall(r'\b%s\b' % controlledVariable, line))) matches = list(filter(None, re.findall(r'\b%s\b' % controlledVariable, line)))
if matches: if matches:
sourceFound = True
line = re.sub(r'\b%s\b' % controlledVariable, yellow + controlledVariable + end, line) line = re.sub(r'\b%s\b' % controlledVariable, yellow + controlledVariable + end, line)
pattern = re.finditer(sinks, newLine) pattern = re.finditer(sinks, newLine)
for grp in pattern: for grp in pattern:
@@ -54,7 +54,7 @@ def dom(response):
num += 1 num += 1
except MemoryError: except MemoryError:
pass pass
if sinkFound and sourceFound: if sinkFound or sourceFound:
return highlighted return highlighted
else: else:
return [] return []

View File

@@ -2,7 +2,6 @@ import copy
import re import re
from urllib.parse import urlparse, quote, unquote from urllib.parse import urlparse, quote, unquote
from core.arjun import arjun
from core.checker import checker from core.checker import checker
from core.colors import end, green, que from core.colors import end, green, que
import core.config import core.config
@@ -19,7 +18,7 @@ from core.log import setup_logger
logger = setup_logger(__name__) logger = setup_logger(__name__)
def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip): def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, skip):
GET, POST = (False, True) if paramData else (True, False) GET, POST = (False, True) if paramData else (True, False)
# If the user hasn't supplied the root url with http(s), we will handle it # If the user hasn't supplied the root url with http(s), we will handle it
if not target.startswith('http'): if not target.startswith('http'):
@@ -47,8 +46,6 @@ def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, sk
logger.debug('Url to scan: {}'.format(url)) logger.debug('Url to scan: {}'.format(url))
params = getParams(target, paramData, GET) params = getParams(target, paramData, GET)
logger.debug_json('Scan parameters:', params) logger.debug_json('Scan parameters:', params)
if find:
params = arjun(url, GET, headers, delay, timeout)
if not params: if not params:
logger.error('No parameters to test.') logger.error('No parameters to test.')
quit() quit()

22
test.html Normal file
View File

@@ -0,0 +1,22 @@
<!DOCTYPE html>
<html>
<body>
<div>
<pre>document.writeln(new URL(window.location.href).searchParams.get("a"))</pre>
<script>document.writeln(new URL(window.location.href).searchParams.get("a"))</script>
</div>
<div>
<pre>document.write(new URLSearchParams(window.location.search).get("b"))</pre>
<script>document.write(new URLSearchParams(window.location.search).get("b"))</script>
</div>
<div>
<pre>eval(new URLSearchParams(window.location.search).get("c") || "")</pre>
<script>eval(new URLSearchParams(window.location.search).get("c") || "")</script>
</div>
<div>
<div id="xss-d"></div>
<pre>document.querySelector("#xss-d").innerHTML = new URLSearchParams(window.location.search).get("d")</pre>
<script>document.querySelector("#xss-d").innerHTML = new URLSearchParams(window.location.search).get("d")</script>
</div>
</body>
</html>

View File

@@ -49,8 +49,6 @@ parser.add_argument('--timeout', help='timeout',
dest='timeout', type=int, default=core.config.timeout) dest='timeout', type=int, default=core.config.timeout)
parser.add_argument('--proxy', help='use prox(y|ies)', parser.add_argument('--proxy', help='use prox(y|ies)',
dest='proxy', action='store_true') dest='proxy', action='store_true')
parser.add_argument('--params', help='find params',
dest='find', action='store_true')
parser.add_argument('--crawl', help='crawl', parser.add_argument('--crawl', help='crawl',
dest='recursive', action='store_true') dest='recursive', action='store_true')
parser.add_argument('--json', help='treat post data as json', parser.add_argument('--json', help='treat post data as json',
@@ -95,7 +93,6 @@ fuzz = args.fuzz
update = args.update update = args.update
timeout = args.timeout timeout = args.timeout
proxy = args.proxy proxy = args.proxy
find = args.find
recursive = args.recursive recursive = args.recursive
args_file = args.args_file args_file = args.args_file
args_seeds = args.args_seeds args_seeds = args.args_seeds
@@ -174,7 +171,7 @@ elif not recursive and not args_seeds:
if args_file: if args_file:
bruteforcer(target, paramData, payloadList, encoding, headers, delay, timeout) bruteforcer(target, paramData, payloadList, encoding, headers, delay, timeout)
else: else:
scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip) scan(target, paramData, encoding, headers, delay, timeout, skipDOM, skip)
else: else:
if target: if target:
seedList.append(target) seedList.append(target)