3.1.4 (Faster crawling & negligible DOM XSS false positives)

- Negligible DOM XSS false positives
- x10 Faster crawling by
    - Removing additional request for detecting DOM XSS
    - Skipping testing of a parameter multiple times
This commit is contained in:
Somdev Sangwan
2019-04-08 13:56:10 +05:30
committed by GitHub
6 changed files with 64 additions and 48 deletions

View File

@@ -1,3 +1,9 @@
### 3.1.4
- Negligible DOM XSS false positives
- x10 Faster crawling by
- Removing additional request for detecting DOM XSS
- Skipping testing of a parameter multiple times
### 3.1.3
- Removed browser engine emulation
- Fixed a few bugs

View File

@@ -1,4 +1,4 @@
changes = '''Removed browser engine emulation;Fixed a few bugs;Added a plugin to scan for outdated JS libraries;Improved crawling and DOM scanning'''
changes = '''Negligible DOM XSS false positives;x10 faster crawling'''
globalVariables = {} # it holds variables during runtime for collaboration across modules
defaultEditor = 'nano'

View File

@@ -49,4 +49,7 @@ def dom(response):
num += 1
except MemoryError:
pass
if (yellow and red) in highlighted:
return highlighted
else:
return []

View File

@@ -1,18 +1,18 @@
import re
import concurrent.futures
from re import findall
from urllib.parse import urlparse
from plugins.retireJs import retireJs
from core.dom import dom
from core.log import setup_logger
from core.utils import getUrl, getParams
from core.requester import requester
from core.zetanize import zetanize
from core.log import setup_logger
from plugins.retireJs import retireJs
logger = setup_logger(__name__)
def photon(seedUrl, headers, level, threadCount, delay, timeout):
def photon(seedUrl, headers, level, threadCount, delay, timeout, skipDOM):
forms = [] # web forms
processed = set() # urls that have been crawled
storage = set() # urls that belong to the target i.e. in-scope
@@ -20,6 +20,7 @@ def photon(seedUrl, headers, level, threadCount, delay, timeout):
host = urlparse(seedUrl).netloc # extract the host e.g. example.com
main_url = schema + '://' + host # join scheme and host to make the root url
storage.add(seedUrl) # add the url to storage
checkedDOMs = []
def rec(target):
processed.add(target)
@@ -38,8 +39,18 @@ def photon(seedUrl, headers, level, threadCount, delay, timeout):
forms.append({0: {'action': url, 'method': 'get', 'inputs': inps}})
response = requester(url, params, headers, True, delay, timeout).text
retireJs(url, response)
if not skipDOM:
highlighted = dom(response)
clean_highlighted = ''.join([re.sub(r'^\d+\s+', '', line) for line in highlighted])
if highlighted and clean_highlighted not in checkedDOMs:
checkedDOMs.append(clean_highlighted)
logger.good('Potentially vulnerable objects found at %s' % url)
logger.red_line(level='good')
for line in highlighted:
logger.no_format(line, level='good')
logger.red_line(level='good')
forms.append(zetanize(response))
matches = findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response)
matches = re.findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response)
for link in matches: # iterate over the matches
# remove everything after a "#" to deal with in-page anchors
link = link.split('#')[0]

View File

@@ -1,9 +1,9 @@
import copy
import re
import core.config
from core.colors import red, good, green, end
from core.config import xsschecker
from core.dom import dom
from core.filterChecker import filterChecker
from core.generator import generator
from core.htmlParser import htmlParser
@@ -13,16 +13,7 @@ from core.log import setup_logger
logger = setup_logger(__name__)
def crawl(scheme, host, main_url, form, domURL, blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding):
if domURL and not skipDOM:
response = requester(domURL, {}, headers, True, delay, timeout).text
highlighted = dom(response)
if highlighted:
logger.good('Potentially vulnerable objects found at %s' % domURL)
logger.red_line(level='good')
for line in highlighted:
logger.no_format(line, level='good')
logger.red_line(level='good')
def crawl(scheme, host, main_url, form, blindXSS, blindPayload, headers, delay, timeout, encoding):
if form:
for each in form.values():
url = each['action']
@@ -35,6 +26,8 @@ def crawl(scheme, host, main_url, form, domURL, blindXSS, blindPayload, headers,
url = scheme + '://' + host + url
elif re.match(r'\w', url[0]):
url = scheme + '://' + host + '/' + url
if url not in core.config.globalVariables['checkedForms']:
core.config.globalVariables['checkedForms'][url] = []
method = each['method']
GET = True if method == 'get' else False
inputs = each['inputs']
@@ -42,6 +35,8 @@ def crawl(scheme, host, main_url, form, domURL, blindXSS, blindPayload, headers,
for one in inputs:
paramData[one['name']] = one['value']
for paramName in paramData.keys():
if paramName not in core.config.globalVariables['checkedForms'][url]:
core.config.globalVariables['checkedForms'][url].append(paramName)
paramsCopy = copy.deepcopy(paramData)
paramsCopy[paramName] = xsschecker
response = requester(

View File

@@ -6,7 +6,7 @@ from core.colors import end, red, white, bad, info
# Just a fancy ass banner
print('''%s
\tXSStrike %sv3.1.3
\tXSStrike %sv3.1.4
%s''' % (red, white, end))
try:
@@ -133,6 +133,7 @@ else:
core.config.globalVariables['headers'] = headers
core.config.globalVariables['checkedScripts'] = set()
core.config.globalVariables['checkedForms'] = {}
core.config.globalVariables['definitions'] = json.loads('\n'.join(reader(sys.path[0] + '/db/definitions.json')))
if path:
@@ -181,7 +182,7 @@ else:
host = urlparse(target).netloc
main_url = scheme + '://' + host
crawlingResult = photon(target, headers, level,
threadCount, delay, timeout)
threadCount, delay, timeout, skipDOM)
forms = crawlingResult[0]
domURLs = list(crawlingResult[1])
difference = abs(len(domURLs) - len(forms))
@@ -192,8 +193,8 @@ else:
for i in range(difference):
domURLs.append(0)
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
futures = (threadpool.submit(crawl, scheme, host, main_url, form, domURL,
blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding) for form, domURL in zip(forms, domURLs))
futures = (threadpool.submit(crawl, scheme, host, main_url, form,
blindXSS, blindPayload, headers, delay, timeout, encoding) for form, domURL in zip(forms, domURLs))
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
if i + 1 == len(forms) or (i + 1) % threadCount == 0:
logger.info('Progress: %i/%i\r' % (i + 1, len(forms)))