Add files via upload

This commit is contained in:
Somdev Sangwan
2019-04-08 13:48:44 +05:30
committed by GitHub
parent 7684889caa
commit 538ecea109
6 changed files with 61 additions and 48 deletions

View File

@@ -1,3 +1,6 @@
### 3.1.4
### 3.1.3 ### 3.1.3
- Removed browser engine emulation - Removed browser engine emulation
- Fixed a few bugs - Fixed a few bugs

View File

@@ -1,4 +1,4 @@
changes = '''Removed browser engine emulation;Fixed a few bugs;Added a plugin to scan for outdated JS libraries;Improved crawling and DOM scanning''' changes = '''Negligible DOM XSS false positives;x10 faster crawling'''
globalVariables = {} # it holds variables during runtime for collaboration across modules globalVariables = {} # it holds variables during runtime for collaboration across modules
defaultEditor = 'nano' defaultEditor = 'nano'

View File

@@ -49,4 +49,7 @@ def dom(response):
num += 1 num += 1
except MemoryError: except MemoryError:
pass pass
return highlighted if (yellow and red) in highlighted:
return highlighted
else:
return []

View File

@@ -1,18 +1,18 @@
import re
import concurrent.futures import concurrent.futures
from re import findall
from urllib.parse import urlparse from urllib.parse import urlparse
from core.dom import dom
from plugins.retireJs import retireJs from core.log import setup_logger
from core.utils import getUrl, getParams from core.utils import getUrl, getParams
from core.requester import requester from core.requester import requester
from core.zetanize import zetanize from core.zetanize import zetanize
from core.log import setup_logger from plugins.retireJs import retireJs
logger = setup_logger(__name__) logger = setup_logger(__name__)
def photon(seedUrl, headers, level, threadCount, delay, timeout): def photon(seedUrl, headers, level, threadCount, delay, timeout, skipDOM):
forms = [] # web forms forms = [] # web forms
processed = set() # urls that have been crawled processed = set() # urls that have been crawled
storage = set() # urls that belong to the target i.e. in-scope storage = set() # urls that belong to the target i.e. in-scope
@@ -20,6 +20,7 @@ def photon(seedUrl, headers, level, threadCount, delay, timeout):
host = urlparse(seedUrl).netloc # extract the host e.g. example.com host = urlparse(seedUrl).netloc # extract the host e.g. example.com
main_url = schema + '://' + host # join scheme and host to make the root url main_url = schema + '://' + host # join scheme and host to make the root url
storage.add(seedUrl) # add the url to storage storage.add(seedUrl) # add the url to storage
checkedDOMs = []
def rec(target): def rec(target):
processed.add(target) processed.add(target)
@@ -38,8 +39,18 @@ def photon(seedUrl, headers, level, threadCount, delay, timeout):
forms.append({0: {'action': url, 'method': 'get', 'inputs': inps}}) forms.append({0: {'action': url, 'method': 'get', 'inputs': inps}})
response = requester(url, params, headers, True, delay, timeout).text response = requester(url, params, headers, True, delay, timeout).text
retireJs(url, response) retireJs(url, response)
if not skipDOM:
highlighted = dom(response)
clean_highlighted = ''.join([re.sub(r'^\d+\s+', '', line) for line in highlighted])
if highlighted and clean_highlighted not in checkedDOMs:
checkedDOMs.append(clean_highlighted)
logger.good('Potentially vulnerable objects found at %s' % url)
logger.red_line(level='good')
for line in highlighted:
logger.no_format(line, level='good')
logger.red_line(level='good')
forms.append(zetanize(response)) forms.append(zetanize(response))
matches = findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response) matches = re.findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response)
for link in matches: # iterate over the matches for link in matches: # iterate over the matches
# remove everything after a "#" to deal with in-page anchors # remove everything after a "#" to deal with in-page anchors
link = link.split('#')[0] link = link.split('#')[0]

View File

@@ -1,9 +1,9 @@
import copy import copy
import re import re
import core.config
from core.colors import red, good, green, end from core.colors import red, good, green, end
from core.config import xsschecker from core.config import xsschecker
from core.dom import dom
from core.filterChecker import filterChecker from core.filterChecker import filterChecker
from core.generator import generator from core.generator import generator
from core.htmlParser import htmlParser from core.htmlParser import htmlParser
@@ -13,16 +13,7 @@ from core.log import setup_logger
logger = setup_logger(__name__) logger = setup_logger(__name__)
def crawl(scheme, host, main_url, form, domURL, blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding): def crawl(scheme, host, main_url, form, blindXSS, blindPayload, headers, delay, timeout, encoding):
if domURL and not skipDOM:
response = requester(domURL, {}, headers, True, delay, timeout).text
highlighted = dom(response)
if highlighted:
logger.good('Potentially vulnerable objects found at %s' % domURL)
logger.red_line(level='good')
for line in highlighted:
logger.no_format(line, level='good')
logger.red_line(level='good')
if form: if form:
for each in form.values(): for each in form.values():
url = each['action'] url = each['action']
@@ -35,6 +26,8 @@ def crawl(scheme, host, main_url, form, domURL, blindXSS, blindPayload, headers,
url = scheme + '://' + host + url url = scheme + '://' + host + url
elif re.match(r'\w', url[0]): elif re.match(r'\w', url[0]):
url = scheme + '://' + host + '/' + url url = scheme + '://' + host + '/' + url
if url not in core.config.globalVariables['checkedForms']:
core.config.globalVariables['checkedForms'][url] = []
method = each['method'] method = each['method']
GET = True if method == 'get' else False GET = True if method == 'get' else False
inputs = each['inputs'] inputs = each['inputs']
@@ -42,28 +35,30 @@ def crawl(scheme, host, main_url, form, domURL, blindXSS, blindPayload, headers,
for one in inputs: for one in inputs:
paramData[one['name']] = one['value'] paramData[one['name']] = one['value']
for paramName in paramData.keys(): for paramName in paramData.keys():
paramsCopy = copy.deepcopy(paramData) if paramName not in core.config.globalVariables['checkedForms'][url]:
paramsCopy[paramName] = xsschecker core.config.globalVariables['checkedForms'][url].append(paramName)
response = requester( paramsCopy = copy.deepcopy(paramData)
url, paramsCopy, headers, GET, delay, timeout) paramsCopy[paramName] = xsschecker
parsedResponse = htmlParser(response, encoding) response = requester(
occurences = parsedResponse[0] url, paramsCopy, headers, GET, delay, timeout)
positions = parsedResponse[1] parsedResponse = htmlParser(response, encoding)
efficiencies = filterChecker( occurences = parsedResponse[0]
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) positions = parsedResponse[1]
vectors = generator(occurences, response.text) efficiencies = filterChecker(
if vectors: url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
for confidence, vects in vectors.items(): vectors = generator(occurences, response.text)
try: if vectors:
payload = list(vects)[0] for confidence, vects in vectors.items():
logger.vuln('Vulnerable webpage: %s%s%s' % try:
(green, url, end)) payload = list(vects)[0]
logger.vuln('Vector for %s%s%s: %s' % logger.vuln('Vulnerable webpage: %s%s%s' %
(green, paramName, end, payload)) (green, url, end))
break logger.vuln('Vector for %s%s%s: %s' %
except IndexError: (green, paramName, end, payload))
pass break
if blindXSS and blindPayload: except IndexError:
paramsCopy[paramName] = blindPayload pass
requester(url, paramsCopy, headers, if blindXSS and blindPayload:
GET, delay, timeout) paramsCopy[paramName] = blindPayload
requester(url, paramsCopy, headers,
GET, delay, timeout)

View File

@@ -6,7 +6,7 @@ from core.colors import end, red, white, bad, info
# Just a fancy ass banner # Just a fancy ass banner
print('''%s print('''%s
\tXSStrike %sv3.1.3 \tXSStrike %sv3.1.4
%s''' % (red, white, end)) %s''' % (red, white, end))
try: try:
@@ -133,6 +133,7 @@ else:
core.config.globalVariables['headers'] = headers core.config.globalVariables['headers'] = headers
core.config.globalVariables['checkedScripts'] = set() core.config.globalVariables['checkedScripts'] = set()
core.config.globalVariables['checkedForms'] = {}
core.config.globalVariables['definitions'] = json.loads('\n'.join(reader(sys.path[0] + '/db/definitions.json'))) core.config.globalVariables['definitions'] = json.loads('\n'.join(reader(sys.path[0] + '/db/definitions.json')))
if path: if path:
@@ -181,7 +182,7 @@ else:
host = urlparse(target).netloc host = urlparse(target).netloc
main_url = scheme + '://' + host main_url = scheme + '://' + host
crawlingResult = photon(target, headers, level, crawlingResult = photon(target, headers, level,
threadCount, delay, timeout) threadCount, delay, timeout, skipDOM)
forms = crawlingResult[0] forms = crawlingResult[0]
domURLs = list(crawlingResult[1]) domURLs = list(crawlingResult[1])
difference = abs(len(domURLs) - len(forms)) difference = abs(len(domURLs) - len(forms))
@@ -192,8 +193,8 @@ else:
for i in range(difference): for i in range(difference):
domURLs.append(0) domURLs.append(0)
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
futures = (threadpool.submit(crawl, scheme, host, main_url, form, domURL, futures = (threadpool.submit(crawl, scheme, host, main_url, form,
blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding) for form, domURL in zip(forms, domURLs)) blindXSS, blindPayload, headers, delay, timeout, encoding) for form, domURL in zip(forms, domURLs))
for i, _ in enumerate(concurrent.futures.as_completed(futures)): for i, _ in enumerate(concurrent.futures.as_completed(futures)):
if i + 1 == len(forms) or (i + 1) % threadCount == 0: if i + 1 == len(forms) or (i + 1) % threadCount == 0:
logger.info('Progress: %i/%i\r' % (i + 1, len(forms))) logger.info('Progress: %i/%i\r' % (i + 1, len(forms)))