Verbose switch, Fixes #71, Fixes #93

This commit is contained in:
Somdev Sangwan
2018-11-13 12:43:47 +05:30
committed by GitHub
parent 35a11487f5
commit bbf9201356
3 changed files with 48 additions and 33 deletions

View File

@@ -19,7 +19,7 @@ def photon(seedUrl, headers, level, threadCount, delay, timeout):
def rec(target): def rec(target):
processed.add(target) processed.add(target)
print ('%s Parsing %s' % (run, target)) print ('%s Parsing %s' % (run, target))
url = getUrl(target, '', True) url = getUrl(target, True)
params = getParams(target, '', True) params = getParams(target, '', True)
if '=' in target: if '=' in target:
inps = [] inps = []
@@ -47,4 +47,4 @@ def photon(seedUrl, headers, level, threadCount, delay, timeout):
futures = (threadpool.submit(rec, url) for url in urls) futures = (threadpool.submit(rec, url) for url in urls)
for i, _ in enumerate(concurrent.futures.as_completed(futures)): for i, _ in enumerate(concurrent.futures.as_completed(futures)):
pass pass
return [forms, processed] return [forms, processed]

View File

@@ -1,6 +1,19 @@
import re import re
import json
import random import random
from core.config import xsschecker from core.config import xsschecker
from core.colors import info, red, end
def verboseOutput(data, name, verbose):
if verbose:
print ('%s %s %s%s%s' % (info, name, red, ('-' * 50), end))
if str(type(data)) == '<class \'dict\'>':
try:
print (json.dumps(data, indent=2))
except TypeError:
print (data)
print (data)
print ('%s%s%s' % (red, ('-' * 60), end))
def closest(number, numbers): def closest(number, numbers):
difference = [abs(list(numbers.values())[0]), {}] difference = [abs(list(numbers.values())[0]), {}]
@@ -55,7 +68,7 @@ def replacer(dic, toReplace, replaceWith):
dic[key] = replaceWith dic[key] = replaceWith
return dic return dic
def getUrl(url, data, GET): def getUrl(url, GET):
if GET: if GET:
return url.split('?')[0] return url.split('?')[0]
else: else:

View File

@@ -22,7 +22,6 @@ import sys
import copy import copy
import argparse import argparse
import requests import requests
import webbrowser
import concurrent.futures import concurrent.futures
import core.config import core.config
@@ -39,7 +38,7 @@ from core.htmlParser import htmlParser
from core.wafDetector import wafDetector from core.wafDetector import wafDetector
from core.filterChecker import filterChecker from core.filterChecker import filterChecker
from core.config import xsschecker, minEfficiency from core.config import xsschecker, minEfficiency
from core.utils import getUrl, getParams, flattenParams, extractHeaders from core.utils import getUrl, getParams, flattenParams, extractHeaders, verboseOutput
# Processing command line arguments # Processing command line arguments
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
@@ -55,9 +54,9 @@ parser.add_argument('-l', '--level', help='level of crawling', dest='level', typ
parser.add_argument('--headers', help='add headers', dest='headers', action='store_true') parser.add_argument('--headers', help='add headers', dest='headers', action='store_true')
parser.add_argument('-t', '--threads', help='number of threads', dest='threads', type=int) parser.add_argument('-t', '--threads', help='number of threads', dest='threads', type=int)
parser.add_argument('-d', '--delay', help='delay between requests', dest='delay', type=int) parser.add_argument('-d', '--delay', help='delay between requests', dest='delay', type=int)
parser.add_argument('--skip-poc', help='skip poc generation', dest='skipPOC', action='store_true')
parser.add_argument('--skip-dom', help='skip dom checking', dest='skipDOM', action='store_true')
parser.add_argument('--skip', help='don\'t ask to continue', dest='skip', action='store_true') parser.add_argument('--skip', help='don\'t ask to continue', dest='skip', action='store_true')
parser.add_argument('--skip-dom', help='skip dom checking', dest='skipDOM', action='store_true')
parser.add_argument('-v', '--vectors', help='verbose output', dest='verbose', action='store_true')
args = parser.parse_args() args = parser.parse_args()
if args.headers: if args.headers:
@@ -69,8 +68,8 @@ find = args.find
fuzz = args.fuzz fuzz = args.fuzz
target = args.target target = args.target
paramData = args.data paramData = args.data
verbose = args.verbose
skipDOM = args.skipDOM skipDOM = args.skipDOM
skipPOC = args.skipPOC
level = args.level or 2 level = args.level or 2
delay = args.delay or core.config.delay delay = args.delay or core.config.delay
timeout = args.timeout or core.config.timeout timeout = args.timeout or core.config.timeout
@@ -93,7 +92,7 @@ if not target: # if the user hasn't supplied a url
print('\n' + parser.format_help().lower()) print('\n' + parser.format_help().lower())
quit() quit()
def singleTarget(target, paramData): def singleTarget(target, paramData, verbose):
if paramData: if paramData:
GET, POST = False, True GET, POST = False, True
else: else:
@@ -107,24 +106,22 @@ def singleTarget(target, paramData):
target = 'https://' + target target = 'https://' + target
except: except:
target = 'http://' + target target = 'http://' + target
try: response = requester(target, {}, headers, GET, delay, timeout).text
response = requester(target, {}, headers, GET, delay, timeout).text if not skipDOM:
if not skipDOM: print ('%s Checking for DOM vulnerabilities' % run)
print ('%s Checking for DOM vulnerabilities' % run) highlighted = dom(response)
highlighted = dom(response) if highlighted:
if highlighted: print ('%s Potentially vulnerable objects found' % good)
print ('%s Potentially vulnerable objects found' % good) print (red + ('-' * 60) + end)
print (red + ('-' * 60) + end) for line in highlighted:
for line in highlighted: print (line)
print (line) print (red + ('-' * 60) + end)
print (red + ('-' * 60) + end)
except Exception as e:
print ('%s Unable to connect to the target' % bad)
print ('%s Error: %s' % (bad, e))
quit()
host = urlparse(target).netloc # Extracts host out of the url host = urlparse(target).netloc # Extracts host out of the url
url = getUrl(target, paramData, GET) verboseOutput(host, 'host', verbose)
url = getUrl(target, GET)
verboseOutput(url, 'url', verbose)
params = getParams(target, paramData, GET) params = getParams(target, paramData, GET)
verboseOutput(params, 'params', verbose)
if args.find: if args.find:
params = arjun(url, GET, headers, delay, timeout) params = arjun(url, GET, headers, delay, timeout)
if not params: if not params:
@@ -150,7 +147,9 @@ def singleTarget(target, paramData):
response = requester(url, paramsCopy, headers, GET, delay, timeout) response = requester(url, paramsCopy, headers, GET, delay, timeout)
parsedResponse = htmlParser(response) parsedResponse = htmlParser(response)
occurences = parsedResponse[0] occurences = parsedResponse[0]
verboseOutput(occurences, 'occurences', verbose)
positions = parsedResponse[1] positions = parsedResponse[1]
verboseOutput(positions, 'positions', verbose)
if not occurences: if not occurences:
print ('%s No reflection found' % bad) print ('%s No reflection found' % bad)
continue continue
@@ -158,8 +157,10 @@ def singleTarget(target, paramData):
print ('%s Reflections found: %s' % (info, len(occurences))) print ('%s Reflections found: %s' % (info, len(occurences)))
print ('%s Analysing reflections' % run) print ('%s Analysing reflections' % run)
efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout) efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout)
verboseOutput(efficiencies, 'efficiencies', verbose)
print ('%s Generating payloads' % run) print ('%s Generating payloads' % run)
vectors = generator(occurences, response.text) vectors = generator(occurences, response.text)
verboseOutput(vectors, 'vectors', verbose)
total = 0 total = 0
for v in vectors.values(): for v in vectors.values():
total += len(v) total += len(v)
@@ -185,9 +186,6 @@ def singleTarget(target, paramData):
print ('%s Efficiency: %i' % (info, bestEfficiency)) print ('%s Efficiency: %i' % (info, bestEfficiency))
print ('%s Confidence: %i' % (info, confidence)) print ('%s Confidence: %i' % (info, confidence))
if not args.skip: if not args.skip:
if GET and not skipPOC:
flatParams = flattenParams(paramName, paramsCopy, vect)
webbrowser.open(url + flatParams)
choice = input('%s Would you like to continue scanning? [y/N] ' % que).lower() choice = input('%s Would you like to continue scanning? [y/N] ' % que).lower()
if choice != 'y': if choice != 'y':
quit() quit()
@@ -197,7 +195,7 @@ def singleTarget(target, paramData):
print ('%s Efficiency: %i' % (info, bestEfficiency)) print ('%s Efficiency: %i' % (info, bestEfficiency))
print ('%s Confidence: %i' % (info, confidence)) print ('%s Confidence: %i' % (info, confidence))
def multiTargets(scheme, host, main_url, form, domURL): def multiTargets(scheme, host, main_url, form, domURL, verbose):
signatures = set() signatures = set()
if domURL and not skipDOM: if domURL and not skipDOM:
response = requests.get(domURL).text response = requests.get(domURL).text
@@ -249,14 +247,17 @@ def multiTargets(scheme, host, main_url, form, domURL):
pass pass
def brute(target, paramData, payloadList): def brute(target, paramData, payloadList, verbose):
if paramData: if paramData:
GET, POST = False, True GET, POST = False, True
else: else:
GET, POST = True, False GET, POST = True, False
host = urlparse(target).netloc # Extracts host out of the url host = urlparse(target).netloc # Extracts host out of the url
verboseOutput(host, 'host', verbose)
url = getUrl(target, paramData, GET) url = getUrl(target, paramData, GET)
verboseOutput(url, 'url', verbose)
params = getParams(target, paramData, GET) params = getParams(target, paramData, GET)
verboseOutput(params, 'params', verbose)
for paramName in params.keys(): for paramName in params.keys():
paramsCopy = copy.deepcopy(params) paramsCopy = copy.deepcopy(params)
for payload in payloadList: for payload in payloadList:
@@ -267,12 +268,13 @@ def brute(target, paramData, payloadList):
if not args.recursive: if not args.recursive:
if args.file: if args.file:
brute(target, paramData, payloadList) brute(target, paramData, payloadList, verbose)
else: else:
singleTarget(target, paramData) singleTarget(target, paramData, verbose)
else: else:
print ('%s Crawling the target' % run) print ('%s Crawling the target' % run)
scheme = urlparse(target).scheme scheme = urlparse(target).scheme
verboseOutput(scheme, 'scheme', verbose)
host = urlparse(target).netloc host = urlparse(target).netloc
main_url = scheme + '://' + host main_url = scheme + '://' + host
crawlingResult = photon(target, headers, level, threadCount, delay, timeout) crawlingResult = photon(target, headers, level, threadCount, delay, timeout)
@@ -286,7 +288,7 @@ else:
for i in range(difference): for i in range(difference):
domURLs.append(0) domURLs.append(0)
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
futures = (threadpool.submit(multiTargets, scheme, host, main_url, form, domURL) for form, domURL in zip(forms, domURLs)) futures = (threadpool.submit(multiTargets, scheme, host, main_url, form, domURL, verbose) for form, domURL in zip(forms, domURLs))
for i, _ in enumerate(concurrent.futures.as_completed(futures)): for i, _ in enumerate(concurrent.futures.as_completed(futures)):
if i + 1 == len(forms) or (i + 1) % threadCount == 0: if i + 1 == len(forms) or (i + 1) % threadCount == 0:
print('%s Progress: %i/%i' % (info, i + 1, len(forms)), end='\r') print('%s Progress: %i/%i' % (info, i + 1, len(forms)), end='\r')