Add files via upload

This commit is contained in:
Somdev Sangwan
2019-04-06 20:45:10 +05:30
committed by GitHub
parent e66cfddecd
commit d7f2a1bbf1
9 changed files with 1822 additions and 57 deletions

View File

@@ -7,7 +7,7 @@ def dom(response):
highlighted = [] highlighted = []
sources = r'''document\.(URL|documentURI|URLUnencoded|baseURI|cookie|referrer)|location\.(href|search|hash|pathname)|window\.name|history\.(pushState|replaceState)(local|session)Storage''' sources = r'''document\.(URL|documentURI|URLUnencoded|baseURI|cookie|referrer)|location\.(href|search|hash|pathname)|window\.name|history\.(pushState|replaceState)(local|session)Storage'''
sinks = r'''eval|evaluate|execCommand|assign|navigate|getResponseHeaderopen|showModalDialog|Function|set(Timeout|Interval|Immediate)|execScript|crypto.generateCRMFRequest|ScriptElement\.(src|text|textContent|innerText)|.*?\.onEventName|document\.(write|writeln)|.*?\.innerHTML|Range\.createContextualFragment|(document|window)\.location''' sinks = r'''eval|evaluate|execCommand|assign|navigate|getResponseHeaderopen|showModalDialog|Function|set(Timeout|Interval|Immediate)|execScript|crypto.generateCRMFRequest|ScriptElement\.(src|text|textContent|innerText)|.*?\.onEventName|document\.(write|writeln)|.*?\.innerHTML|Range\.createContextualFragment|(document|window)\.location'''
scripts = re.findall(r'(?i)(?s)<scrip[^>]*(.*?)</script>', response) scripts = re.findall(r'(?i)(?s)<script[^>]*>(.*?)</script>', response)
for script in scripts: for script in scripts:
script = script.split('\n') script = script.split('\n')
num = 1 num = 1

View File

@@ -3,6 +3,7 @@ from re import findall
from urllib.parse import urlparse from urllib.parse import urlparse
from plugins.retireJs import retireJs
from core.utils import getUrl, getParams from core.utils import getUrl, getParams
from core.requester import requester from core.requester import requester
from core.zetanize import zetanize from core.zetanize import zetanize
@@ -36,6 +37,7 @@ def photon(seedUrl, headers, level, threadCount, delay, timeout):
inps.append({'name': name, 'value': value}) inps.append({'name': name, 'value': value})
forms.append({0: {'action': url, 'method': 'get', 'inputs': inps}}) forms.append({0: {'action': url, 'method': 'get', 'inputs': inps}})
response = requester(url, params, headers, True, delay, timeout).text response = requester(url, params, headers, True, delay, timeout).text
retireJs(url, response)
forms.append(zetanize(response)) forms.append(zetanize(response))
matches = findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response) matches = findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response)
for link in matches: # iterate over the matches for link in matches: # iterate over the matches
@@ -53,9 +55,11 @@ def photon(seedUrl, headers, level, threadCount, delay, timeout):
storage.add(main_url + '/' + link) storage.add(main_url + '/' + link)
for x in range(level): for x in range(level):
urls = storage - processed # urls to crawl = all urls - urls that have been crawled urls = storage - processed # urls to crawl = all urls - urls that have been crawled
# for url in urls:
# rec(url)
threadpool = concurrent.futures.ThreadPoolExecutor( threadpool = concurrent.futures.ThreadPoolExecutor(
max_workers=threadCount) max_workers=threadCount)
futures = (threadpool.submit(rec, url) for url in urls) futures = (threadpool.submit(rec, url) for url in urls)
for i, _ in enumerate(concurrent.futures.as_completed(futures)): for i in concurrent.futures.as_completed(futures):
pass pass
return [forms, processed] return [forms, processed]

View File

@@ -5,8 +5,7 @@ from urllib3.exceptions import ProtocolError
import warnings import warnings
import core.config import core.config
from core.config import globalVariables from core.utils import converter, getVar
from core.utils import converter
from core.log import setup_logger from core.log import setup_logger
logger = setup_logger(__name__) logger = setup_logger(__name__)
@@ -15,9 +14,9 @@ warnings.filterwarnings('ignore') # Disable SSL related warnings
def requester(url, data, headers, GET, delay, timeout): def requester(url, data, headers, GET, delay, timeout):
if core.config.globalVariables['jsonData']: if getVar('jsonData'):
data = converter(data) data = converter(data)
elif core.config.globalVariables['path']: elif getVar('path'):
url = converter(data, url) url = converter(data, url)
data = [] data = []
GET, POST = True, False GET, POST = True, False
@@ -37,7 +36,7 @@ def requester(url, data, headers, GET, delay, timeout):
if GET: if GET:
response = requests.get(url, params=data, headers=headers, response = requests.get(url, params=data, headers=headers,
timeout=timeout, verify=False, proxies=core.config.proxies) timeout=timeout, verify=False, proxies=core.config.proxies)
elif core.config.globalVariables['jsonData']: elif getVar('jsonData'):
response = requests.get(url, json=data, headers=headers, response = requests.get(url, json=data, headers=headers,
timeout=timeout, verify=False, proxies=core.config.proxies) timeout=timeout, verify=False, proxies=core.config.proxies)
else: else:

View File

@@ -163,7 +163,7 @@ def getParams(url, data, GET):
if data[:1] == '?': if data[:1] == '?':
data = data[1:] data = data[1:]
elif data: elif data:
if core.config.globalVariables['jsonData'] or core.config.globalVariables['path']: if getVar('jsonData') or getVar('path'):
params = data params = data
else: else:
try: try:
@@ -197,6 +197,51 @@ def writer(obj, path):
def reader(path): def reader(path):
with open(path, 'r') as f: with open(path, 'r') as f:
result = [line.strip( result = [line.rstrip(
'\n').encode('utf-8').decode('utf-8') for line in f] '\n').encode('utf-8').decode('utf-8') for line in f]
return result return result
def js_extractor(response):
"""Extract js files from the response body"""
scripts = []
matches = re.findall(r'<(?:script|SCRIPT).*?(?:src|SRC)=([^\s>]+)', response)
for match in matches:
match = match.replace('\'', '').replace('"', '').replace('`', '')
scripts.append(match)
return scripts
def handle_anchor(parent_url, url):
if parent_url.count('/') > 2:
replacable = re.search(r'/[^/]*?$', parent_url).group()
if replacable != '/':
parent_url = parent_url.replace(replacable, '')
scheme = urlparse(parent_url).scheme
if url[:4] == 'http':
return url
elif url[:2] == '//':
return scheme + ':' + url
elif url[:1] == '/':
return parent_url + url
else:
if parent_url.endswith('/') or url.startswith('/'):
return parent_url + url
else:
return parent_url + '/' + url
def deJSON(data):
return data.replace('\\\\', '\\')
def getVar(name):
return core.config.globalVariables[name]
def updateVar(name, data, mode=None):
if mode:
if mode == 'append':
core.config.globalVariables[name].append(data)
elif mode == 'add':
core.config.globalVariables[name].add(data)
else:
core.config.globalVariables[name] = data

1519
db/definitions.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,6 @@ import re
from urllib.parse import urlparse, quote, unquote from urllib.parse import urlparse, quote, unquote
from core.arjun import arjun from core.arjun import arjun
from core.browserEngine import browserEngine
from core.checker import checker from core.checker import checker
from core.colors import good, bad, end, info, green, red, que from core.colors import good, bad, end, info, green, red, que
import core.config import core.config
@@ -33,7 +32,6 @@ def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, sk
logger.debug('Scan target: {}'.format(target)) logger.debug('Scan target: {}'.format(target))
response = requester(target, {}, headers, GET, delay, timeout).text response = requester(target, {}, headers, GET, delay, timeout).text
if not skipDOM: if not skipDOM:
logger.run('Checking for DOM vulnerabilities') logger.run('Checking for DOM vulnerabilities')
highlighted = dom(response) highlighted = dom(response)
@@ -96,53 +94,36 @@ def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, sk
progress = 0 progress = 0
for confidence, vects in vectors.items(): for confidence, vects in vectors.items():
for vect in vects: for vect in vects:
if core.config.globalVariables['path']:
vect = vect.replace('/', '%2F')
loggerVector = vect
progress += 1 progress += 1
loggerVector = vect
logger.run('Progress: %i/%i\r' % (progress, total)) logger.run('Progress: %i/%i\r' % (progress, total))
if confidence == 10: if not GET:
if not GET:
vect = unquote(vect)
efficiencies = checker(
url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding)
if not efficiencies:
for i in range(len(occurences)):
efficiencies.append(0)
bestEfficiency = max(efficiencies)
if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95):
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
if not skip:
choice = input(
'%s Would you like to continue scanning? [y/N] ' % que).lower()
if choice != 'y':
quit()
elif bestEfficiency > minEfficiency:
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
else:
if re.search(r'<(a|d3|details)|lt;(a|d3|details)', vect.lower()):
continue
vect = unquote(vect) vect = unquote(vect)
if encoding: if encoding:
paramsCopy[paramName] = encoding(vect) paramsCopy[paramName] = encoding(vect)
else: else:
paramsCopy[paramName] = vect paramsCopy[paramName] = vect
response = requester(url, paramsCopy, headers, GET, delay, timeout).text if not GET:
success = browserEngine(response) vect = quote(vect)
if success: efficiencies = checker(
logger.red_line() url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding)
logger.good('Payload: %s' % loggerVector) if not efficiencies:
logger.info('Efficiency: %i' % 100) for i in range(len(occurences)):
logger.info('Confidence: %i' % 10) efficiencies.append(0)
if not skip: bestEfficiency = max(efficiencies)
choice = input( if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95):
'%s Would you like to continue scanning? [y/N] ' % que).lower() logger.red_line()
if choice != 'y': logger.good('Payload: %s' % loggerVector)
quit() logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
if not skip:
choice = input(
'%s Would you like to continue scanning? [y/N] ' % que).lower()
if choice != 'y':
quit()
elif bestEfficiency > minEfficiency:
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
logger.no_format('') logger.no_format('')

1
plugins/__init__.py Normal file
View File

@@ -0,0 +1 @@

210
plugins/retireJs.py Normal file
View File

@@ -0,0 +1,210 @@
import re
import json
import hashlib
from urllib.parse import urlparse
from core.requester import requester
from core.utils import deJSON, js_extractor, handle_anchor, getVar, updateVar
from core.log import setup_logger
logger = setup_logger(__name__)
def is_defined(o):
return o is not None
def scan(data, extractor, definitions, matcher=None):
matcher = matcher or _simple_match
detected = []
for component in definitions:
extractors = definitions[component].get(
"extractors", None).get(
extractor, None)
if (not is_defined(extractors)):
continue
for i in extractors:
match = matcher(i, data)
if (match):
detected.append({"version": match,
"component": component,
"detection": extractor})
return detected
def _simple_match(regex, data):
regex = deJSON(regex)
match = re.search(regex, data)
return match.group(1) if match else None
def _replacement_match(regex, data):
try:
regex = deJSON(regex)
group_parts_of_regex = r'^\/(.*[^\\])\/([^\/]+)\/$'
ar = re.search(group_parts_of_regex, regex)
search_for_regex = "(" + ar.group(1) + ")"
match = re.search(search_for_regex, data)
ver = None
if (match):
ver = re.sub(ar.group(1), ar.group(2), match.group(0))
return ver
return None
except:
return None
def _scanhash(hash, definitions):
for component in definitions:
hashes = definitions[component].get("extractors", None).get("hashes", None)
if (not is_defined(hashes)):
continue
for i in hashes:
if (i == hash):
return [{"version": hashes[i],
"component": component,
"detection": 'hash'}]
return []
def check(results, definitions):
for r in results:
result = r
if (not is_defined(definitions[result.get("component", None)])):
continue
vulns = definitions[
result.get(
"component",
None)].get(
"vulnerabilities",
None)
for i in range(len(vulns)):
if (not _is_at_or_above(result.get("version", None),
vulns[i].get("below", None))):
if (is_defined(vulns[i].get("atOrAbove", None)) and not _is_at_or_above(
result.get("version", None), vulns[i].get("atOrAbove", None))):
continue
vulnerability = {"info": vulns[i].get("info", None)}
if (vulns[i].get("severity", None)):
vulnerability["severity"] = vulns[i].get("severity", None)
if (vulns[i].get("identifiers", None)):
vulnerability["identifiers"] = vulns[
i].get("identifiers", None)
result["vulnerabilities"] = result.get(
"vulnerabilities", None) or []
result["vulnerabilities"].append(vulnerability)
return results
def unique(ar):
return list(set(ar))
def _is_at_or_above(version1, version2):
# print "[",version1,",", version2,"]"
v1 = re.split(r'[.-]', version1)
v2 = re.split(r'[.-]', version2)
l = len(v1) if len(v1) > len(v2) else len(v2)
for i in range(l):
v1_c = _to_comparable(v1[i] if len(v1) > i else None)
v2_c = _to_comparable(v2[i] if len(v2) > i else None)
# print v1_c, "vs", v2_c
if (not isinstance(v1_c, type(v2_c))):
return isinstance(v1_c, int)
if (v1_c > v2_c):
return True
if (v1_c < v2_c):
return False
return True
def _to_comparable(n):
if (not is_defined(n)):
return 0
if (re.search(r'^[0-9]+$', n)):
return int(str(n), 10)
return n
def _replace_version(jsRepoJsonAsText):
return re.sub(r'[.0-9]*', '[0-9][0-9.a-z_\-]+', jsRepoJsonAsText)
def is_vulnerable(results):
for r in results:
if ('vulnerabilities' in r):
# print r
return True
return False
def scan_uri(uri, definitions):
result = scan(uri, 'uri', definitions)
return check(result, definitions)
def scan_filename(fileName, definitions):
result = scan(fileName, 'filename', definitions)
return check(result, definitions)
def scan_file_content(content, definitions):
result = scan(content, 'filecontent', definitions)
if (len(result) == 0):
result = scan(content, 'filecontentreplace', definitions, _replacement_match)
if (len(result) == 0):
result = _scanhash(
hashlib.sha1(
content.encode('utf8')).hexdigest(),
definitions)
return check(result, definitions)
def main_scanner(uri, response):
definitions = getVar('definitions')
uri_scan_result = scan_uri(uri, definitions)
filecontent = response
filecontent_scan_result = scan_file_content(filecontent, definitions)
uri_scan_result.extend(filecontent_scan_result)
result = {}
if uri_scan_result:
result['component'] = uri_scan_result[0]['component']
result['version'] = uri_scan_result[0]['version']
result['vulnerabilities'] = []
vulnerabilities = set()
for i in uri_scan_result:
k = set()
try:
for j in i['vulnerabilities']:
vulnerabilities.add(str(j))
except KeyError:
pass
for vulnerability in vulnerabilities:
result['vulnerabilities'].append(json.loads(vulnerability.replace('\'', '"')))
return result
def retireJs(url, response):
scripts = js_extractor(response)
for script in scripts:
if script not in getVar('checkedScripts'):
updateVar('checkedScripts', script, 'add')
uri = handle_anchor(url, script)
response = requester(uri, '', getVar('headers'), True, getVar('delay'), getVar('timeout')).text
result = main_scanner(uri, response)
if result:
logger.red_line()
print (json.dumps(result, indent=4))
logger.red_line()

View File

@@ -25,6 +25,8 @@ except ImportError: # throws error in python2
quit() quit()
# Let's import whatever we need from standard lib # Let's import whatever we need from standard lib
import sys
import json
import argparse import argparse
# ... and configurations core lib # ... and configurations core lib
@@ -129,6 +131,10 @@ elif type(args.add_headers) == str:
else: else:
from core.config import headers from core.config import headers
core.config.globalVariables['headers'] = headers
core.config.globalVariables['checkedScripts'] = set()
core.config.globalVariables['definitions'] = json.loads('\n'.join(reader(sys.path[0] + '/db/definitions.json')))
if path: if path:
paramData = converter(target, target) paramData = converter(target, target)
elif jsonData: elif jsonData: