Logging functionality (#193)

* Add files via upload

* Add files via upload

* Logging functionality (Resolves #146)

* Created customized logger and setup file

* Start replacing prints

* Custom StreamHandler to allow '\r' as line terminator and updated more prints

* Remove setup.py

* Logger functionality to write red lines and records without format

* Possibility to set logging level when logging without format and usage of debug level instead of verboseOutput

* Replace utils logger function calls

* Fixes

* Import missing info color

* Move xsstrike.py imports to properly initialize loggers and add logger method to debug data using json

* Minor fix
This commit is contained in:
Somdev Sangwan
2019-01-21 04:57:55 +05:30
committed by GitHub
parent 7907db26be
commit 98c6b347b4
15 changed files with 386 additions and 146 deletions

View File

@@ -5,7 +5,7 @@ from urllib.parse import urlparse, quote, unquote
from core.arjun import arjun
from core.browserEngine import browserEngine
from core.checker import checker
from core.colors import good, bad, end, info, green, run, red, que
from core.colors import good, bad, end, info, green, red, que
import core.config
from core.config import xsschecker, minEfficiency
from core.dom import dom
@@ -13,10 +13,14 @@ from core.filterChecker import filterChecker
from core.generator import generator
from core.htmlParser import htmlParser
from core.requester import requester
from core.utils import getUrl, getParams, verboseOutput
from core.utils import getUrl, getParams
from core.wafDetector import wafDetector
from core.log import setup_logger
def scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip):
logger = setup_logger(__name__)
def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip):
GET, POST = (False, True) if paramData else (True, False)
# If the user hasn't supplied the root url with http(s), we will handle it
if not target.startswith('http'):
@@ -26,37 +30,38 @@ def scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM,
target = 'https://' + target
except:
target = 'http://' + target
logger.debug('Scan target: {}'.format(target))
response = requester(target, {}, headers, GET, delay, timeout).text
if not skipDOM:
print('%s Checking for DOM vulnerabilities' % run)
logger.run('Checking for DOM vulnerabilities')
highlighted = dom(response)
if highlighted:
print('%s Potentially vulnerable objects found' % good)
print(red + ('-' * 60) + end)
logger.good('Potentially vulnerable objects found')
logger.red_line(level='good')
for line in highlighted:
print(line)
print(red + ('-' * 60) + end)
logger.no_format(line, level='good')
logger.red_line(level='good')
host = urlparse(target).netloc # Extracts host out of the url
verboseOutput(host, 'host', verbose)
logger.debug('Host to scan: {}'.format(host))
url = getUrl(target, GET)
verboseOutput(url, 'url', verbose)
logger.debug('Url to scan: {}'.format(url))
params = getParams(target, paramData, GET)
verboseOutput(params, 'params', verbose)
logger.debug_json('Scan parameters:', params)
if find:
params = arjun(url, GET, headers, delay, timeout)
if not params:
print('%s No parameters to test.' % bad)
logger.error('No parameters to test.')
quit()
WAF = wafDetector(
url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout)
if WAF:
print('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
logger.error('WAF detected: %s%s%s' % (green, WAF, end))
else:
print('%s WAF Status: %sOffline%s' % (good, green, end))
logger.good('WAF Status: %sOffline%s' % (green, end))
for paramName in params.keys():
paramsCopy = copy.deepcopy(params)
print('%s Testing parameter: %s' % (info, paramName))
logger.info('Testing parameter: %s' % paramName)
if encoding:
paramsCopy[paramName] = encoding(xsschecker)
else:
@@ -64,36 +69,36 @@ def scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM,
response = requester(url, paramsCopy, headers, GET, delay, timeout)
parsedResponse = htmlParser(response, encoding)
occurences = parsedResponse[0]
verboseOutput(occurences, 'occurences', verbose)
logger.debug('Scan occurences: {}'.format(occurences))
positions = parsedResponse[1]
verboseOutput(positions, 'positions', verbose)
logger.debug('Scan positions: {}'.format(positions))
if not occurences:
print('%s No reflection found' % bad)
logger.error('No reflection found')
continue
else:
print('%s Reflections found: %s' % (info, len(occurences)))
print('%s Analysing reflections' % run)
logger.info('Reflections found: %i' % len(occurences))
logger.run('Analysing reflections')
efficiencies = filterChecker(
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
verboseOutput(efficiencies, 'efficiencies', verbose)
print('%s Generating payloads' % run)
logger.debug('Scan efficiencies: {}'.format(efficiencies))
logger.run('Generating payloads')
vectors = generator(occurences, response.text)
verboseOutput(vectors, 'vectors', verbose)
total = 0
for v in vectors.values():
total += len(v)
if total == 0:
print('%s No vectors were crafted' % bad)
logger.error('No vectors were crafted.')
continue
print('%s Payloads generated: %i' % (info, total))
logger.info('Payloads generated: %i' % total)
progress = 0
for confidence, vects in vectors.items():
for vect in vects:
if core.config.globalVariables['path']:
vect = vect.replace('/', '%2F')
printVector = vect
loggerVector = vect
progress += 1
print ('%s Progress: %i/%i' % (run, progress, total), end='\r')
logger.run('Progress: %i/%i\r' % (progress, total))
if confidence == 10:
if not GET:
vect = unquote(vect)
@@ -104,20 +109,20 @@ def scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM,
efficiencies.append(0)
bestEfficiency = max(efficiencies)
if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95):
print(('%s-%s' % (red, end)) * 60)
print('%s Payload: %s' % (good, printVector))
print('%s Efficiency: %i' % (info, bestEfficiency))
print('%s Confidence: %i' % (info, confidence))
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
if not skip:
choice = input(
'%s Would you like to continue scanning? [y/N] ' % que).lower()
if choice != 'y':
quit()
elif bestEfficiency > minEfficiency:
print(('%s-%s' % (red, end)) * 60)
print('%s Payload: %s' % (good, printVector))
print('%s Efficiency: %i' % (info, bestEfficiency))
print('%s Confidence: %i' % (info, confidence))
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
else:
if re.search(r'<(a|d3|details)|lt;(a|d3|details)', vect.lower()):
continue
@@ -129,13 +134,13 @@ def scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM,
response = requester(url, paramsCopy, headers, GET, delay, timeout).text
success = browserEngine(response)
if success:
print(('%s-%s' % (red, end)) * 60)
print('%s Payload: %s' % (good, printVector))
print('%s Efficiency: %i' % (info, 100))
print('%s Confidence: %i' % (info, 10))
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % 100)
logger.info('Confidence: %i' % 10)
if not skip:
choice = input(
'%s Would you like to continue scanning? [y/N] ' % que).lower()
if choice != 'y':
quit()
print ('')
logger.no_format('')