diff --git a/core/arjun.py b/core/arjun.py deleted file mode 100644 index dd4a037..0000000 --- a/core/arjun.py +++ /dev/null @@ -1,42 +0,0 @@ -import concurrent.futures -import re - -from core.colors import green, end -from core.config import blindParams, xsschecker, threadCount -from core.requester import requester -from core.log import setup_logger - -logger = setup_logger(__name__) - - -def checky(param, paraNames, url, headers, GET, delay, timeout): - if param not in paraNames: - logger.debug('Checking param: {}'.format(param)) - response = requester(url, {param: xsschecker}, - headers, GET, delay, timeout).text - if '\'%s\'' % xsschecker in response or '"%s"' % xsschecker in response or ' %s ' % xsschecker in response: - paraNames[param] = '' - logger.good('Valid parameter found: %s%s', green, param) - - -def arjun(url, GET, headers, delay, timeout): - paraNames = {} - response = requester(url, {}, headers, GET, delay, timeout).text - matches = re.findall( - r'|', response) - for match in matches: - try: - foundParam = match[1] - except UnicodeDecodeError: - continue - logger.good('Heuristics found a potentially valid parameter: %s%s%s. Priortizing it.' % ( - green, foundParam, end)) - if foundParam not in blindParams: - blindParams.insert(0, foundParam) - threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) - futures = (threadpool.submit(checky, param, paraNames, url, - headers, GET, delay, timeout) for param in blindParams) - for i, _ in enumerate(concurrent.futures.as_completed(futures)): - if i + 1 == len(blindParams) or (i + 1) % threadCount == 0: - logger.info('Progress: %i/%i\r' % (i + 1, len(blindParams))) - return paraNames diff --git a/core/dom.py b/core/dom.py index 20f3fa8..dcd7315 100644 --- a/core/dom.py +++ b/core/dom.py @@ -34,13 +34,13 @@ def dom(response): for part in parts: if source in part: controlledVariables.add(re.search(r'[a-zA-Z$_][a-zA-Z0-9$_]+', part).group().replace('$', '\$')) - sourceFound = True line = line.replace(source, yellow + source + end) for controlledVariable in controlledVariables: allControlledVariables.add(controlledVariable) for controlledVariable in allControlledVariables: matches = list(filter(None, re.findall(r'\b%s\b' % controlledVariable, line))) if matches: + sourceFound = True line = re.sub(r'\b%s\b' % controlledVariable, yellow + controlledVariable + end, line) pattern = re.finditer(sinks, newLine) for grp in pattern: @@ -54,7 +54,7 @@ def dom(response): num += 1 except MemoryError: pass - if sinkFound and sourceFound: + if sinkFound or sourceFound: return highlighted else: return [] diff --git a/modes/scan.py b/modes/scan.py index a7b69b7..912d68d 100644 --- a/modes/scan.py +++ b/modes/scan.py @@ -2,7 +2,6 @@ import copy import re from urllib.parse import urlparse, quote, unquote -from core.arjun import arjun from core.checker import checker from core.colors import end, green, que import core.config @@ -19,7 +18,7 @@ from core.log import setup_logger logger = setup_logger(__name__) -def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip): +def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, skip): GET, POST = (False, True) if paramData else (True, False) # If the user hasn't supplied the root url with http(s), we will handle it if not target.startswith('http'): @@ -47,8 +46,6 @@ def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, sk logger.debug('Url to scan: {}'.format(url)) params = getParams(target, paramData, GET) logger.debug_json('Scan parameters:', params) - if find: - params = arjun(url, GET, headers, delay, timeout) if not params: logger.error('No parameters to test.') quit() diff --git a/test.html b/test.html new file mode 100644 index 0000000..685199f --- /dev/null +++ b/test.html @@ -0,0 +1,22 @@ + + + +
+
document.writeln(new URL(window.location.href).searchParams.get("a"))
+ +
+
+
document.write(new URLSearchParams(window.location.search).get("b"))
+ +
+
+
eval(new URLSearchParams(window.location.search).get("c") || "")
+ +
+
+
+
document.querySelector("#xss-d").innerHTML = new URLSearchParams(window.location.search).get("d")
+ +
+ + diff --git a/xsstrike.py b/xsstrike.py index c05b2a4..1aab695 100644 --- a/xsstrike.py +++ b/xsstrike.py @@ -49,8 +49,6 @@ parser.add_argument('--timeout', help='timeout', dest='timeout', type=int, default=core.config.timeout) parser.add_argument('--proxy', help='use prox(y|ies)', dest='proxy', action='store_true') -parser.add_argument('--params', help='find params', - dest='find', action='store_true') parser.add_argument('--crawl', help='crawl', dest='recursive', action='store_true') parser.add_argument('--json', help='treat post data as json', @@ -95,7 +93,6 @@ fuzz = args.fuzz update = args.update timeout = args.timeout proxy = args.proxy -find = args.find recursive = args.recursive args_file = args.args_file args_seeds = args.args_seeds @@ -174,7 +171,7 @@ elif not recursive and not args_seeds: if args_file: bruteforcer(target, paramData, payloadList, encoding, headers, delay, timeout) else: - scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip) + scan(target, paramData, encoding, headers, delay, timeout, skipDOM, skip) else: if target: seedList.append(target)