Clearer argument handling, pep8, import order, less unused vars (#123)

* Proposal for less redundant argument handling, autopep8, sorted imports, etc.
* dest labels in sync with local target vars and safe names
* only one special handling before transfer of values to local vars (headers prompt)
* some initial comments - there was a quest for help on documentation ;-)
* few oneliners from if else variable setters
* left the simple script style as is (might be a preference for author and users)
* Adapted code for static checks and removed unused mports sys and requests, many unused  variables remain
This commit is contained in:
Stefan Hagen
2018-11-15 10:37:38 +01:00
committed by Somdev Sangwan
parent 02938d3822
commit 15bc638708
6 changed files with 115 additions and 126 deletions

View File

@@ -13,3 +13,4 @@ before_script:
- flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
script:
- python xsstrike.py -u "https://public-firing-range.appspot.com/reflected/index.html" --crawl
- echo "No" > input.txt && python xsstrike.py -u https://public-firing-range.appspot.com/reflected/parameter/body?q=query < input.txt

View File

@@ -1,11 +1,10 @@
import re
import copy
from fuzzywuzzy import fuzz
from core.encoders import base64
from core.config import xsschecker
from core.requester import requester
from core.utils import replacer, fillHoles
from urllib.parse import quote_plus, unquote
from urllib.parse import unquote
def checker(url, params, headers, GET, delay, payload, positions, timeout, encoding):
checkString = 'st4r7s' + payload + '3nd'

View File

@@ -1,7 +1,4 @@
from core.utils import replacer
from core.checker import checker
from core.config import xsschecker
from core.requester import requester
def filterChecker(url, params, headers, GET, delay, occurences, timeout, encoding):
positions = {}

View File

@@ -1,12 +1,11 @@
import copy
import requests
from time import sleep
from random import randint
from core.utils import replacer
from core.requester import requester
from core.config import fuzzes, xsschecker
from urllib.parse import quote_plus, unquote
from core.colors import end, red, white, green, yellow, run, bad, good, info, que
from urllib.parse import unquote
from core.colors import end, red, green, yellow, bad, good, info
def counter(string):
special = '\'"=/:*&)(}{][><'

View File

@@ -1,6 +1,5 @@
import re
from core.config import badTags
from core.encoders import base64
from core.config import xsschecker
def htmlParser(response, encoding):

View File

@@ -2,7 +2,30 @@
from __future__ import print_function
from core.colors import end, red, white, green, yellow, run, bad, good, info, que
# Let's import whatever we need from standard lib
import argparse
import concurrent.futures
import copy
import re
# ... and from core lib
import core.config
from core.arjun import arjun
from core.checker import checker
from core.colors import bad, end, good, green, info, que, red, run, white
from core.config import blindPayload, minEfficiency, xsschecker
from core.dom import dom
from core.encoders import base64
from core.filterChecker import filterChecker
from core.fuzzer import fuzzer
from core.generator import generator
from core.htmlParser import htmlParser
from core.photon import photon
from core.prompt import prompt
from core.requester import requester
from core.updater import updater
from core.utils import extractHeaders, getParams, getUrl, verboseOutput
from core.wafDetector import wafDetector
# Just a fancy ass banner
print('''%s
@@ -15,91 +38,71 @@ except ImportError: # throws error in python2
print('%s XSStrike isn\'t compatible with python2.\n Use python > 3.4 to run XSStrike.' % bad)
quit()
# Let's import whatever we need
import re
import os
import sys
import copy
import argparse
import requests
import concurrent.futures
import core.config
from core.dom import dom
from core.arjun import arjun
from core.photon import photon
from core.prompt import prompt
from core.fuzzer import fuzzer
from core.updater import updater
from core.checker import checker
from core.encoders import base64
from core.generator import generator
from core.requester import requester
from core.htmlParser import htmlParser
from core.wafDetector import wafDetector
from core.filterChecker import filterChecker
from core.config import xsschecker, minEfficiency, blindPayload
from core.utils import getUrl, getParams, flattenParams, extractHeaders, verboseOutput
# Processing command line arguments
# Processing command line arguments, where dest var names will be mapped to local vars with the same name
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--url', help='url', dest='target')
parser.add_argument('--data', help='post data', dest='data')
parser.add_argument('--data', help='post data', dest='paramData')
parser.add_argument('-e', '--encode', help='encode payloads', dest='encode')
parser.add_argument('--fuzzer', help='fuzzer', dest='fuzz', action='store_true')
parser.add_argument('--update', help='update', dest='update', action='store_true')
parser.add_argument('--timeout', help='timeout', dest='timeout', type=int)
parser.add_argument('--timeout', help='timeout', dest='timeout', type=int, default=core.config.timeout)
parser.add_argument('--proxy', help='use prox(y|ies)', dest='proxy', action='store_true')
parser.add_argument('--params', help='find params', dest='find', action='store_true')
parser.add_argument('--crawl', help='crawl', dest='recursive', action='store_true')
parser.add_argument('-f', '--file', help='load payloads from a file', dest='file')
parser.add_argument('-l', '--level', help='level of crawling', dest='level', type=int)
parser.add_argument('--headers', help='add headers', dest='headers', action='store_true')
parser.add_argument('-t', '--threads', help='number of threads', dest='threads', type=int)
parser.add_argument('-d', '--delay', help='delay between requests', dest='delay', type=int)
parser.add_argument('-f', '--file', help='load payloads from a file', dest='args_file')
parser.add_argument('-l', '--level', help='level of crawling', dest='level', type=int, default=2)
parser.add_argument('--headers', help='add headers', dest='add_headers', action='store_true')
parser.add_argument('-t', '--threads', help='number of threads', dest='threadCount', type=int, default=core.config.threadCount)
parser.add_argument('-d', '--delay', help='delay between requests', dest='delay', type=int, default=core.config.delay)
parser.add_argument('--skip', help='don\'t ask to continue', dest='skip', action='store_true')
parser.add_argument('--skip-dom', help='skip dom checking', dest='skipDOM', action='store_true')
parser.add_argument('-v', '--vectors', help='verbose output', dest='verbose', action='store_true')
parser.add_argument('--blind', help='inject blind XSS payload while crawling', dest='blindXSS', action='store_true')
args = parser.parse_args()
if args.headers:
if args.add_headers:
headers = extractHeaders(prompt())
else:
from core.config import headers
find = args.find
fuzz = args.fuzz
encode = args.encode
# Pull all parameter values of dict from argparse namespace into local variables of name == key
# The following works, but the static checkers are too static ;-) locals().update(vars(args))
target = args.target
paramData = args.data
verbose = args.verbose
paramData = args.paramData
encode = args.encode
fuzz = args.fuzz
update = args.update
timeout = args.timeout
proxy = args.proxy
find = args.find
recursive = args.recursive
args_file = args.args_file
level = args.level
add_headers = args.add_headers
threadCount = args.threadCount
delay = args.delay
skip = args.skip
skipDOM = args.skipDOM
level = args.level or 2
verbose = args.verbose
blindXSS = args.blindXSS
delay = args.delay or core.config.delay
timeout = args.timeout or core.config.timeout
threadCount = args.threads or core.config.threadCount
if args.file:
if args.file == 'default':
if args_file:
if args_file == 'default':
payloadList = core.config.payloads
else:
payloadList = []
with open(args.file, 'r') as f:
with open(args_file, 'r') as f:
for line in f:
payloadList.append(line.strip('\n').encode('utf-8').decode('utf-8'))
payloadList = list(filter(None, payloadList))
encoding = False
if encode:
if encode == 'base64':
encoding = base64
encoding = base64 if encode and encode == 'base64' else False
if not args.proxy:
if not proxy:
core.config.proxies = {}
if args.update: # if the user has supplied --update argument
if update: # if the user has supplied --update argument
updater()
quit() # quitting because files have been changed
@@ -107,15 +110,11 @@ if not target: # if the user hasn't supplied a url
print('\n' + parser.format_help().lower())
quit()
def singleTarget(target, paramData, verbose, encoding):
if paramData:
GET, POST = False, True
else:
GET, POST = True, False
GET, POST = (False, True) if paramData else (True, False)
# If the user hasn't supplied the root url with http(s), we will handle it
if target.startswith('http'):
target = target
else:
if not target.startswith('http'):
try:
response = requester('https://' + target, {}, headers, GET, delay, timeout)
target = 'https://' + target
@@ -137,7 +136,7 @@ def singleTarget(target, paramData, verbose, encoding):
verboseOutput(url, 'url', verbose)
params = getParams(target, paramData, GET)
verboseOutput(params, 'params', verbose)
if args.find:
if find:
params = arjun(url, GET, headers, delay, timeout)
if not params:
print('%s No parameters to test.' % bad)
@@ -204,7 +203,7 @@ def singleTarget(target, paramData, verbose, encoding):
print('%s Payload: %s' % (good, vect))
print('%s Efficiency: %i' % (info, bestEfficiency))
print('%s Confidence: %i' % (info, confidence))
if not args.skip:
if not skip:
choice = input('%s Would you like to continue scanning? [y/N] ' % que).lower()
if choice != 'y':
quit()
@@ -214,8 +213,9 @@ def singleTarget(target, paramData, verbose, encoding):
print('%s Efficiency: %i' % (info, bestEfficiency))
print('%s Confidence: %i' % (info, confidence))
def multiTargets(scheme, host, main_url, form, domURL, verbose, blindXSS, blindPayload, headers, delay, timeout):
signatures = set()
signatures = set() # TODO(NN) unused
if domURL and not skipDOM:
response = requester(domURL, {}, headers, True, delay, timeout).text
highlighted = dom(response)
@@ -238,10 +238,7 @@ def multiTargets(scheme, host, main_url, form, domURL, verbose, blindXSS, blindP
elif re.match(r'\w', url[0]):
url = scheme + '://' + host + '/' + url
method = each['method']
if method == 'get':
GET = True
else:
GET = False
GET = True if method == 'get' else False
inputs = each['inputs']
paramData = {}
for one in inputs:
@@ -270,10 +267,7 @@ def multiTargets(scheme, host, main_url, form, domURL, verbose, blindXSS, blindP
def bruteforcer(target, paramData, payloadList, verbose, encoding):
if paramData:
GET, POST = False, True
else:
GET, POST = True, False
GET, POST = (False, True) if paramData else (True, False)
host = urlparse(target).netloc # Extracts host out of the url
verboseOutput(host, 'host', verbose)
url = getUrl(target, GET)
@@ -299,8 +293,8 @@ def bruteforcer(target, paramData, payloadList, verbose, encoding):
progress += 1
print ('')
if not args.recursive:
if args.file:
if not recursive:
if args_file:
bruteforcer(target, paramData, payloadList, verbose, encoding)
else:
singleTarget(target, paramData, verbose, encoding)
@@ -325,4 +319,4 @@ else:
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
if i + 1 == len(forms) or (i + 1) % threadCount == 0:
print('%s Progress: %i/%i' % (info, i + 1, len(forms)), end='\r')
print ('')
print()