Add files via upload
This commit is contained in:
@@ -1,3 +1,6 @@
|
|||||||
|
### 3.1.4
|
||||||
|
|
||||||
|
|
||||||
### 3.1.3
|
### 3.1.3
|
||||||
- Removed browser engine emulation
|
- Removed browser engine emulation
|
||||||
- Fixed a few bugs
|
- Fixed a few bugs
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
changes = '''Removed browser engine emulation;Fixed a few bugs;Added a plugin to scan for outdated JS libraries;Improved crawling and DOM scanning'''
|
changes = '''Negligible DOM XSS false positives;x10 faster crawling'''
|
||||||
globalVariables = {} # it holds variables during runtime for collaboration across modules
|
globalVariables = {} # it holds variables during runtime for collaboration across modules
|
||||||
|
|
||||||
defaultEditor = 'nano'
|
defaultEditor = 'nano'
|
||||||
|
|||||||
@@ -49,4 +49,7 @@ def dom(response):
|
|||||||
num += 1
|
num += 1
|
||||||
except MemoryError:
|
except MemoryError:
|
||||||
pass
|
pass
|
||||||
|
if (yellow and red) in highlighted:
|
||||||
return highlighted
|
return highlighted
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|||||||
@@ -1,18 +1,18 @@
|
|||||||
|
import re
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
from re import findall
|
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from core.dom import dom
|
||||||
from plugins.retireJs import retireJs
|
from core.log import setup_logger
|
||||||
from core.utils import getUrl, getParams
|
from core.utils import getUrl, getParams
|
||||||
from core.requester import requester
|
from core.requester import requester
|
||||||
from core.zetanize import zetanize
|
from core.zetanize import zetanize
|
||||||
from core.log import setup_logger
|
from plugins.retireJs import retireJs
|
||||||
|
|
||||||
logger = setup_logger(__name__)
|
logger = setup_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def photon(seedUrl, headers, level, threadCount, delay, timeout):
|
def photon(seedUrl, headers, level, threadCount, delay, timeout, skipDOM):
|
||||||
forms = [] # web forms
|
forms = [] # web forms
|
||||||
processed = set() # urls that have been crawled
|
processed = set() # urls that have been crawled
|
||||||
storage = set() # urls that belong to the target i.e. in-scope
|
storage = set() # urls that belong to the target i.e. in-scope
|
||||||
@@ -20,6 +20,7 @@ def photon(seedUrl, headers, level, threadCount, delay, timeout):
|
|||||||
host = urlparse(seedUrl).netloc # extract the host e.g. example.com
|
host = urlparse(seedUrl).netloc # extract the host e.g. example.com
|
||||||
main_url = schema + '://' + host # join scheme and host to make the root url
|
main_url = schema + '://' + host # join scheme and host to make the root url
|
||||||
storage.add(seedUrl) # add the url to storage
|
storage.add(seedUrl) # add the url to storage
|
||||||
|
checkedDOMs = []
|
||||||
|
|
||||||
def rec(target):
|
def rec(target):
|
||||||
processed.add(target)
|
processed.add(target)
|
||||||
@@ -38,8 +39,18 @@ def photon(seedUrl, headers, level, threadCount, delay, timeout):
|
|||||||
forms.append({0: {'action': url, 'method': 'get', 'inputs': inps}})
|
forms.append({0: {'action': url, 'method': 'get', 'inputs': inps}})
|
||||||
response = requester(url, params, headers, True, delay, timeout).text
|
response = requester(url, params, headers, True, delay, timeout).text
|
||||||
retireJs(url, response)
|
retireJs(url, response)
|
||||||
|
if not skipDOM:
|
||||||
|
highlighted = dom(response)
|
||||||
|
clean_highlighted = ''.join([re.sub(r'^\d+\s+', '', line) for line in highlighted])
|
||||||
|
if highlighted and clean_highlighted not in checkedDOMs:
|
||||||
|
checkedDOMs.append(clean_highlighted)
|
||||||
|
logger.good('Potentially vulnerable objects found at %s' % url)
|
||||||
|
logger.red_line(level='good')
|
||||||
|
for line in highlighted:
|
||||||
|
logger.no_format(line, level='good')
|
||||||
|
logger.red_line(level='good')
|
||||||
forms.append(zetanize(response))
|
forms.append(zetanize(response))
|
||||||
matches = findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response)
|
matches = re.findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response)
|
||||||
for link in matches: # iterate over the matches
|
for link in matches: # iterate over the matches
|
||||||
# remove everything after a "#" to deal with in-page anchors
|
# remove everything after a "#" to deal with in-page anchors
|
||||||
link = link.split('#')[0]
|
link = link.split('#')[0]
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
import copy
|
import copy
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
import core.config
|
||||||
from core.colors import red, good, green, end
|
from core.colors import red, good, green, end
|
||||||
from core.config import xsschecker
|
from core.config import xsschecker
|
||||||
from core.dom import dom
|
|
||||||
from core.filterChecker import filterChecker
|
from core.filterChecker import filterChecker
|
||||||
from core.generator import generator
|
from core.generator import generator
|
||||||
from core.htmlParser import htmlParser
|
from core.htmlParser import htmlParser
|
||||||
@@ -13,16 +13,7 @@ from core.log import setup_logger
|
|||||||
logger = setup_logger(__name__)
|
logger = setup_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def crawl(scheme, host, main_url, form, domURL, blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding):
|
def crawl(scheme, host, main_url, form, blindXSS, blindPayload, headers, delay, timeout, encoding):
|
||||||
if domURL and not skipDOM:
|
|
||||||
response = requester(domURL, {}, headers, True, delay, timeout).text
|
|
||||||
highlighted = dom(response)
|
|
||||||
if highlighted:
|
|
||||||
logger.good('Potentially vulnerable objects found at %s' % domURL)
|
|
||||||
logger.red_line(level='good')
|
|
||||||
for line in highlighted:
|
|
||||||
logger.no_format(line, level='good')
|
|
||||||
logger.red_line(level='good')
|
|
||||||
if form:
|
if form:
|
||||||
for each in form.values():
|
for each in form.values():
|
||||||
url = each['action']
|
url = each['action']
|
||||||
@@ -35,6 +26,8 @@ def crawl(scheme, host, main_url, form, domURL, blindXSS, blindPayload, headers,
|
|||||||
url = scheme + '://' + host + url
|
url = scheme + '://' + host + url
|
||||||
elif re.match(r'\w', url[0]):
|
elif re.match(r'\w', url[0]):
|
||||||
url = scheme + '://' + host + '/' + url
|
url = scheme + '://' + host + '/' + url
|
||||||
|
if url not in core.config.globalVariables['checkedForms']:
|
||||||
|
core.config.globalVariables['checkedForms'][url] = []
|
||||||
method = each['method']
|
method = each['method']
|
||||||
GET = True if method == 'get' else False
|
GET = True if method == 'get' else False
|
||||||
inputs = each['inputs']
|
inputs = each['inputs']
|
||||||
@@ -42,6 +35,8 @@ def crawl(scheme, host, main_url, form, domURL, blindXSS, blindPayload, headers,
|
|||||||
for one in inputs:
|
for one in inputs:
|
||||||
paramData[one['name']] = one['value']
|
paramData[one['name']] = one['value']
|
||||||
for paramName in paramData.keys():
|
for paramName in paramData.keys():
|
||||||
|
if paramName not in core.config.globalVariables['checkedForms'][url]:
|
||||||
|
core.config.globalVariables['checkedForms'][url].append(paramName)
|
||||||
paramsCopy = copy.deepcopy(paramData)
|
paramsCopy = copy.deepcopy(paramData)
|
||||||
paramsCopy[paramName] = xsschecker
|
paramsCopy[paramName] = xsschecker
|
||||||
response = requester(
|
response = requester(
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from core.colors import end, red, white, bad, info
|
|||||||
|
|
||||||
# Just a fancy ass banner
|
# Just a fancy ass banner
|
||||||
print('''%s
|
print('''%s
|
||||||
\tXSStrike %sv3.1.3
|
\tXSStrike %sv3.1.4
|
||||||
%s''' % (red, white, end))
|
%s''' % (red, white, end))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -133,6 +133,7 @@ else:
|
|||||||
|
|
||||||
core.config.globalVariables['headers'] = headers
|
core.config.globalVariables['headers'] = headers
|
||||||
core.config.globalVariables['checkedScripts'] = set()
|
core.config.globalVariables['checkedScripts'] = set()
|
||||||
|
core.config.globalVariables['checkedForms'] = {}
|
||||||
core.config.globalVariables['definitions'] = json.loads('\n'.join(reader(sys.path[0] + '/db/definitions.json')))
|
core.config.globalVariables['definitions'] = json.loads('\n'.join(reader(sys.path[0] + '/db/definitions.json')))
|
||||||
|
|
||||||
if path:
|
if path:
|
||||||
@@ -181,7 +182,7 @@ else:
|
|||||||
host = urlparse(target).netloc
|
host = urlparse(target).netloc
|
||||||
main_url = scheme + '://' + host
|
main_url = scheme + '://' + host
|
||||||
crawlingResult = photon(target, headers, level,
|
crawlingResult = photon(target, headers, level,
|
||||||
threadCount, delay, timeout)
|
threadCount, delay, timeout, skipDOM)
|
||||||
forms = crawlingResult[0]
|
forms = crawlingResult[0]
|
||||||
domURLs = list(crawlingResult[1])
|
domURLs = list(crawlingResult[1])
|
||||||
difference = abs(len(domURLs) - len(forms))
|
difference = abs(len(domURLs) - len(forms))
|
||||||
@@ -192,8 +193,8 @@ else:
|
|||||||
for i in range(difference):
|
for i in range(difference):
|
||||||
domURLs.append(0)
|
domURLs.append(0)
|
||||||
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
|
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
|
||||||
futures = (threadpool.submit(crawl, scheme, host, main_url, form, domURL,
|
futures = (threadpool.submit(crawl, scheme, host, main_url, form,
|
||||||
blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding) for form, domURL in zip(forms, domURLs))
|
blindXSS, blindPayload, headers, delay, timeout, encoding) for form, domURL in zip(forms, domURLs))
|
||||||
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
|
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
|
||||||
if i + 1 == len(forms) or (i + 1) % threadCount == 0:
|
if i + 1 == len(forms) or (i + 1) % threadCount == 0:
|
||||||
logger.info('Progress: %i/%i\r' % (i + 1, len(forms)))
|
logger.info('Progress: %i/%i\r' % (i + 1, len(forms)))
|
||||||
|
|||||||
Reference in New Issue
Block a user