2018-11-17 22:43:09 +05:30
|
|
|
import copy
|
|
|
|
|
import re
|
|
|
|
|
|
2019-04-08 13:48:44 +05:30
|
|
|
import core.config
|
2018-11-17 22:43:09 +05:30
|
|
|
from core.colors import red, good, green, end
|
|
|
|
|
from core.config import xsschecker
|
|
|
|
|
from core.filterChecker import filterChecker
|
|
|
|
|
from core.generator import generator
|
|
|
|
|
from core.htmlParser import htmlParser
|
|
|
|
|
from core.requester import requester
|
2019-01-21 04:57:55 +05:30
|
|
|
from core.log import setup_logger
|
2018-11-17 22:43:09 +05:30
|
|
|
|
2019-01-21 04:57:55 +05:30
|
|
|
logger = setup_logger(__name__)
|
|
|
|
|
|
|
|
|
|
|
2019-04-08 13:48:44 +05:30
|
|
|
def crawl(scheme, host, main_url, form, blindXSS, blindPayload, headers, delay, timeout, encoding):
|
2018-11-17 22:43:09 +05:30
|
|
|
if form:
|
|
|
|
|
for each in form.values():
|
|
|
|
|
url = each['action']
|
|
|
|
|
if url:
|
|
|
|
|
if url.startswith(main_url):
|
|
|
|
|
pass
|
|
|
|
|
elif url.startswith('//') and url[2:].startswith(host):
|
|
|
|
|
url = scheme + '://' + url[2:]
|
|
|
|
|
elif url.startswith('/'):
|
|
|
|
|
url = scheme + '://' + host + url
|
|
|
|
|
elif re.match(r'\w', url[0]):
|
|
|
|
|
url = scheme + '://' + host + '/' + url
|
2019-04-08 13:48:44 +05:30
|
|
|
if url not in core.config.globalVariables['checkedForms']:
|
|
|
|
|
core.config.globalVariables['checkedForms'][url] = []
|
2018-11-17 22:43:09 +05:30
|
|
|
method = each['method']
|
|
|
|
|
GET = True if method == 'get' else False
|
|
|
|
|
inputs = each['inputs']
|
|
|
|
|
paramData = {}
|
|
|
|
|
for one in inputs:
|
|
|
|
|
paramData[one['name']] = one['value']
|
|
|
|
|
for paramName in paramData.keys():
|
2019-04-08 13:48:44 +05:30
|
|
|
if paramName not in core.config.globalVariables['checkedForms'][url]:
|
|
|
|
|
core.config.globalVariables['checkedForms'][url].append(paramName)
|
|
|
|
|
paramsCopy = copy.deepcopy(paramData)
|
|
|
|
|
paramsCopy[paramName] = xsschecker
|
|
|
|
|
response = requester(
|
|
|
|
|
url, paramsCopy, headers, GET, delay, timeout)
|
|
|
|
|
parsedResponse = htmlParser(response, encoding)
|
|
|
|
|
occurences = parsedResponse[0]
|
|
|
|
|
positions = parsedResponse[1]
|
|
|
|
|
efficiencies = filterChecker(
|
|
|
|
|
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
|
|
|
|
|
vectors = generator(occurences, response.text)
|
|
|
|
|
if vectors:
|
|
|
|
|
for confidence, vects in vectors.items():
|
|
|
|
|
try:
|
|
|
|
|
payload = list(vects)[0]
|
|
|
|
|
logger.vuln('Vulnerable webpage: %s%s%s' %
|
|
|
|
|
(green, url, end))
|
|
|
|
|
logger.vuln('Vector for %s%s%s: %s' %
|
|
|
|
|
(green, paramName, end, payload))
|
|
|
|
|
break
|
|
|
|
|
except IndexError:
|
|
|
|
|
pass
|
|
|
|
|
if blindXSS and blindPayload:
|
|
|
|
|
paramsCopy[paramName] = blindPayload
|
|
|
|
|
requester(url, paramsCopy, headers,
|
|
|
|
|
GET, delay, timeout)
|