2019-11-25 10:06:20 +05:30
|
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
2020-01-17 02:44:46 +05:30
|
|
|
|
import sys
|
|
|
|
|
|
import json
|
2019-11-24 21:12:10 +05:30
|
|
|
|
import argparse
|
|
|
|
|
|
|
|
|
|
|
|
from core.tests import active_tests
|
2020-08-21 11:34:43 +05:30
|
|
|
|
from core.utils import host, prompt, format_result, extractHeaders, create_url_list,create_url_list, create_stdin_list
|
2020-01-17 03:19:49 +05:30
|
|
|
|
from core.colors import bad, end, red, run, good, grey, green, white, yellow
|
2019-11-24 21:12:10 +05:30
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
print('''
|
2020-08-21 11:10:58 +05:30
|
|
|
|
%sCORSY %s{%sv1.0-beta%s}%s
|
2019-11-24 21:12:10 +05:30
|
|
|
|
''' % (green, white, grey, white, end))
|
|
|
|
|
|
|
2020-01-17 02:44:46 +05:30
|
|
|
|
|
|
|
|
|
|
try:
|
2020-08-21 11:10:58 +05:30
|
|
|
|
import concurrent.futures
|
|
|
|
|
|
from urllib.parse import urlparse
|
2020-01-17 02:44:46 +05:30
|
|
|
|
except ImportError:
|
2020-08-21 11:10:58 +05:30
|
|
|
|
print(' %s corsy needs Python > 3.4 to run.' % bad)
|
|
|
|
|
|
quit()
|
2020-01-17 02:44:46 +05:30
|
|
|
|
|
2019-11-24 21:12:10 +05:30
|
|
|
|
parser = argparse.ArgumentParser()
|
2020-01-17 02:44:46 +05:30
|
|
|
|
parser.add_argument('-u', help='target url', dest='target')
|
|
|
|
|
|
parser.add_argument('-o', help='json output file', dest='json_file')
|
|
|
|
|
|
parser.add_argument('-i', help='input file urls/subdomains', dest='inp_file')
|
|
|
|
|
|
parser.add_argument('-t', help='thread count', dest='threads', type=int, default=2)
|
2019-11-24 21:12:10 +05:30
|
|
|
|
parser.add_argument('-d', help='request delay', dest='delay', type=float, default=0)
|
2020-01-17 02:44:46 +05:30
|
|
|
|
parser.add_argument('-q', help='don\'t print help tips', dest='quiet', action='store_true')
|
|
|
|
|
|
parser.add_argument('--headers', help='add headers', dest='header_dict', nargs='?', const=True)
|
2020-08-19 22:51:54 +05:30
|
|
|
|
parser.add_argument('-v', help='verbose', dest='verbose',type=str, default=False)
|
2019-11-24 21:12:10 +05:30
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
|
|
delay = args.delay
|
2020-01-17 02:44:46 +05:30
|
|
|
|
quiet = args.quiet
|
|
|
|
|
|
target = args.target
|
|
|
|
|
|
threads = args.threads
|
|
|
|
|
|
inp_file = args.inp_file
|
|
|
|
|
|
json_file = args.json_file
|
|
|
|
|
|
header_dict = args.header_dict
|
2020-08-19 22:51:54 +05:30
|
|
|
|
verbose = args.verbose
|
|
|
|
|
|
|
|
|
|
|
|
if verbose:
|
2020-08-21 11:10:58 +05:30
|
|
|
|
print ('verbos is enabled')
|
2020-01-17 02:44:46 +05:30
|
|
|
|
|
|
|
|
|
|
if type(header_dict) == bool:
|
2020-08-21 11:10:58 +05:30
|
|
|
|
header_dict = extractHeaders(prompt())
|
2020-01-17 02:44:46 +05:30
|
|
|
|
elif type(header_dict) == str:
|
2020-08-21 11:10:58 +05:30
|
|
|
|
header_dict = extractHeaders(header_dict)
|
2020-08-19 22:51:54 +05:30
|
|
|
|
else:
|
2020-08-21 11:10:58 +05:30
|
|
|
|
header_dict = {
|
|
|
|
|
|
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:70.0) Gecko/20100101 Firefox/70.0',
|
|
|
|
|
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
|
|
|
|
'Accept-Language': 'en-US,en;q=0.5',
|
|
|
|
|
|
'Accept-Encoding': 'gzip',
|
|
|
|
|
|
'DNT': '1',
|
|
|
|
|
|
'Connection': 'close',
|
|
|
|
|
|
}
|
2020-08-19 22:51:54 +05:30
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# PIPE output from other tools such as httprobe etc
|
|
|
|
|
|
if ( sys.stdin.isatty()):
|
2020-08-21 11:10:58 +05:30
|
|
|
|
urls = create_url_list(target, inp_file)
|
2020-01-17 02:44:46 +05:30
|
|
|
|
else:
|
2020-08-21 11:10:58 +05:30
|
|
|
|
urls = create_stdin_list(target, sys.stdin)
|
2020-01-17 02:44:46 +05:30
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cors(target, header_dict, delay):
|
2020-08-21 11:10:58 +05:30
|
|
|
|
url = target
|
|
|
|
|
|
root = host(url)
|
|
|
|
|
|
parsed = urlparse(url)
|
|
|
|
|
|
netloc = parsed.netloc
|
|
|
|
|
|
scheme = parsed.scheme
|
|
|
|
|
|
url = scheme + '://' + netloc + parsed.path
|
|
|
|
|
|
return active_tests(url, root, scheme, header_dict, delay)
|
2020-01-17 02:44:46 +05:30
|
|
|
|
|
2019-11-24 21:12:10 +05:30
|
|
|
|
|
2020-01-17 02:44:46 +05:30
|
|
|
|
if urls:
|
2020-08-21 11:10:58 +05:30
|
|
|
|
if len(urls) > 1:
|
|
|
|
|
|
print(' %s Estimated scan time: %i secs' % (run, round(len(urls) * 1.75)))
|
|
|
|
|
|
results = []
|
|
|
|
|
|
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threads)
|
|
|
|
|
|
futures = (threadpool.submit(cors, url, header_dict, delay) for url in urls)
|
|
|
|
|
|
for each in concurrent.futures.as_completed(futures):
|
|
|
|
|
|
result = each.result()
|
|
|
|
|
|
results.append(result)
|
|
|
|
|
|
if result:
|
|
|
|
|
|
for i in result:
|
|
|
|
|
|
print(' %s %s' % (good, i))
|
|
|
|
|
|
print(' %s-%s Class: %s' % (yellow, end, result[i]['class']))
|
|
|
|
|
|
if not quiet:
|
|
|
|
|
|
print(' %s-%s Description: %s' % (yellow, end, result[i]['description']))
|
|
|
|
|
|
print(' %s-%s Severity: %s' % (yellow, end, result[i]['severity']))
|
|
|
|
|
|
print(' %s-%s Exploitation: %s' % (yellow, end, result[i]['exploitation']))
|
|
|
|
|
|
print(' %s-%s ACAO Header: %s' % (yellow, end, result[i]['acao header']))
|
|
|
|
|
|
print(' %s-%s ACAC Header: %s\n' % (yellow, end, result[i]['acac header']))
|
|
|
|
|
|
results = format_result(results)
|
|
|
|
|
|
if results:
|
|
|
|
|
|
if json_file:
|
|
|
|
|
|
with open(json_file, 'w+') as file:
|
|
|
|
|
|
json.dump(results, file, indent=4)
|
|
|
|
|
|
else:
|
|
|
|
|
|
print(' %s No misconfigurations found.' % bad)
|
2019-11-24 21:12:10 +05:30
|
|
|
|
else:
|
2020-08-21 11:10:58 +05:30
|
|
|
|
print(' %s No valid URLs to test.' % bad)
|