v2.1.6 build

This commit is contained in:
s0md3v
2022-09-11 02:57:13 +05:30
parent 01c755d9c2
commit 14da1394a7
8 changed files with 50 additions and 16 deletions

View File

@@ -1,3 +1,8 @@
#### 2.1.6
- Fixed multiple crash-level bugs
- Various improvements to output in multi-target mode
- Export results as they come in multi-target mode
#### 2.1.5 #### 2.1.5
- Fixed header comparison (will fix infinite bruteforce on some targets) - Fixed header comparison (will fix infinite bruteforce on some targets)
- Fixed catastrophic backtracking in some regexes (arjun used to get stuck) - Fixed catastrophic backtracking in some regexes (arjun used to get stuck)

View File

@@ -1 +1 @@
__version__ = '2.1.5' __version__ = '2.1.6'

View File

@@ -10,7 +10,7 @@ import arjun.core.config as mem
from arjun.core.bruter import bruter from arjun.core.bruter import bruter
from arjun.core.exporter import exporter from arjun.core.exporter import exporter
from arjun.core.requester import requester from arjun.core.requester import requester
from arjun.core.anomaly import define from arjun.core.anomaly import define, compare
from arjun.core.utils import fetch_params, stable_request, random_str, slicer, confirm, populate, reader, nullify, prepare_requests, compatible_path from arjun.core.utils import fetch_params, stable_request, random_str, slicer, confirm, populate, reader, nullify, prepare_requests, compatible_path
from arjun.plugins.heuristic import heuristic from arjun.plugins.heuristic import heuristic
@@ -24,12 +24,12 @@ parser.add_argument('-o', '-oJ', help='Path for json output file.', dest='json_f
parser.add_argument('-oT', help='Path for text output file.', dest='text_file') parser.add_argument('-oT', help='Path for text output file.', dest='text_file')
parser.add_argument('-oB', help='Port for output to Burp Suite Proxy. Default port is 8080.', dest='burp_port', nargs='?', const=8080) parser.add_argument('-oB', help='Port for output to Burp Suite Proxy. Default port is 8080.', dest='burp_port', nargs='?', const=8080)
parser.add_argument('-d', help='Delay between requests in seconds. (default: 0)', dest='delay', type=float, default=0) parser.add_argument('-d', help='Delay between requests in seconds. (default: 0)', dest='delay', type=float, default=0)
parser.add_argument('-t', help='Number of concurrent threads. (default: 2)', dest='threads', type=int, default=2) parser.add_argument('-t', help='Number of concurrent threads. (default: 5)', dest='threads', type=int, default=5)
parser.add_argument('-w', help='Wordlist file path. (default: {arjundir}/db/large.txt)', dest='wordlist', default=arjun_dir+'/db/large.txt') parser.add_argument('-w', help='Wordlist file path. (default: {arjundir}/db/large.txt)', dest='wordlist', default=arjun_dir+'/db/large.txt')
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON. (default: GET)', dest='method', default='GET') parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON. (default: GET)', dest='method', default='GET')
parser.add_argument('-i', help='Import target URLs from file.', dest='import_file', nargs='?', const=True) parser.add_argument('-i', help='Import target URLs from file.', dest='import_file', nargs='?', const=True)
parser.add_argument('-T', help='HTTP request timeout in seconds. (default: 15)', dest='timeout', type=float, default=15) parser.add_argument('-T', help='HTTP request timeout in seconds. (default: 15)', dest='timeout', type=float, default=15)
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=300) parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=500)
parser.add_argument('-q', help='Quiet mode. No output.', dest='quiet', action='store_true') parser.add_argument('-q', help='Quiet mode. No output.', dest='quiet', action='store_true')
parser.add_argument('--headers', help='Add headers. Separate multiple headers with a new line.', dest='headers', nargs='?', const=True) parser.add_argument('--headers', help='Add headers. Separate multiple headers with a new line.', dest='headers', nargs='?', const=True)
parser.add_argument('--passive', help='Collect parameter names from passive sources like wayback, commoncrawl and otx.', dest='passive', nargs='?', const='-') parser.add_argument('--passive', help='Collect parameter names from passive sources like wayback, commoncrawl and otx.', dest='passive', nargs='?', const='-')
@@ -102,7 +102,7 @@ def narrower(request, factors, param_groups):
return anomalous_params return anomalous_params
def initialize(request, wordlist): def initialize(request, wordlist, single_url=False):
""" """
handles parameter finding process for a single request object handles parameter finding process for a single request object
returns 'skipped' (on error), list on success returns 'skipped' (on error), list on success
@@ -118,27 +118,37 @@ def initialize(request, wordlist):
else: else:
fuzz = random_str(6) fuzz = random_str(6)
response_1 = requester(request, {fuzz: fuzz[::-1]}) response_1 = requester(request, {fuzz: fuzz[::-1]})
if single_url:
print('%s Analysing HTTP response for anomalies' % run) print('%s Analysing HTTP response for anomalies' % run)
fuzz = random_str(6) fuzz = random_str(6)
response_2 = requester(request, {fuzz: fuzz[::-1]}) response_2 = requester(request, {fuzz: fuzz[::-1]})
if type(response_1) == str or type(response_2) == str: if type(response_1) == str or type(response_2) == str:
return 'skipped' return 'skipped'
factors = define(response_1, response_2, fuzz, fuzz[::-1], wordlist) factors = define(response_1, response_2, fuzz, fuzz[::-1], wordlist)
if single_url:
print('%s Analysing HTTP response for potential parameter names' % run) print('%s Analysing HTTP response for potential parameter names' % run)
found = heuristic(response_1.text, wordlist) found = heuristic(response_1.text, wordlist)
if found: if found:
num = len(found) num = len(found)
s = 's' if num > 1 else '' s = 's' if num > 1 else ''
print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found))) print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found)))
if single_url:
print('%s Logicforcing the URL endpoint' % run) print('%s Logicforcing the URL endpoint' % run)
populated = populate(wordlist) populated = populate(wordlist)
param_groups = slicer(populated, int(len(wordlist)/mem.var['chunks'])) param_groups = slicer(populated, int(len(wordlist)/mem.var['chunks']))
prev_chunk_count = len(param_groups)
last_params = [] last_params = []
while True: while True:
param_groups = narrower(request, factors, param_groups) param_groups = narrower(request, factors, param_groups)
if len(param_groups) > prev_chunk_count:
response_3 = requester(request, {fuzz: fuzz[::-1]})
if compare(response_3, factors, [fuzz]) != '':
print('%s Target is misbehaving. Try the --stable swtich.' % bad)
return []
if mem.var['kill']: if mem.var['kill']:
return 'skipped' return 'skipped'
param_groups = confirm(param_groups, last_params) param_groups = confirm(param_groups, last_params)
prev_chunk_count = len(param_groups)
if not param_groups: if not param_groups:
break break
confirmed_params = [] confirmed_params = []
@@ -147,7 +157,7 @@ def initialize(request, wordlist):
if reason: if reason:
name = list(param.keys())[0] name = list(param.keys())[0]
confirmed_params.append(name) confirmed_params.append(name)
print('%s name: %s, factor: %s' % (res, name, reason)) print('%s parameter detected: %s, based on: %s' % (res, name, reason))
return confirmed_params return confirmed_params
@@ -169,12 +179,17 @@ def main():
final_result[url]['params'] = these_params final_result[url]['params'] = these_params
final_result[url]['method'] = request['method'] final_result[url]['method'] = request['method']
final_result[url]['headers'] = request['headers'] final_result[url]['headers'] = request['headers']
exporter(final_result)
else:
print('%s No parameters were discovered.' % info)
elif type(request) == list: elif type(request) == list:
# in case of multiple targets # in case of multiple targets
count = 0
for each in request: for each in request:
count += 1
url = each['url'] url = each['url']
mem.var['kill'] = False mem.var['kill'] = False
print('%s Scanning: %s' % (run, url)) print('%s Scanning %d/%d: %s' % (run, count, len(request), url))
these_params = initialize(each, list(wordlist)) these_params = initialize(each, list(wordlist))
if these_params == 'skipped': if these_params == 'skipped':
print('%s Skipped %s due to errors' % (bad, url)) print('%s Skipped %s due to errors' % (bad, url))
@@ -183,12 +198,16 @@ def main():
final_result[url]['params'] = these_params final_result[url]['params'] = these_params
final_result[url]['method'] = each['method'] final_result[url]['method'] = each['method']
final_result[url]['headers'] = each['headers'] final_result[url]['headers'] = each['headers']
print('%s Parameters found: %s' % (good, ', '.join(final_result[url]))) exporter(final_result)
print('%s Parameters found: %s\n' % (good, ', '.join(final_result[url]['params'])))
if not mem.var['json_file']:
final_result = {}
continue
else:
print('%s No parameters were discovered.\n' % info)
except KeyboardInterrupt: except KeyboardInterrupt:
exit() exit()
exporter(final_result)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -57,6 +57,8 @@ def compare(response, factors, params):
detects anomalies by comparing a HTTP response against a rule list detects anomalies by comparing a HTTP response against a rule list
returns string, list (anomaly, list of parameters that caused it) returns string, list (anomaly, list of parameters that caused it)
""" """
if response == '':
return ('', [])
these_headers = list(response.headers.keys()) these_headers = list(response.headers.keys())
these_headers.sort() these_headers.sort()
if factors['same_code'] and response.status_code != factors['same_code']: if factors['same_code'] and response.status_code != factors['same_code']:
@@ -67,7 +69,7 @@ def compare(response, factors, params):
if factors['same_redirect'] and urlparse(response.headers.get('Location', '')).path != factors['same_redirect']: if factors['same_redirect'] and urlparse(response.headers.get('Location', '')).path != factors['same_redirect']:
return ('redirection', params) return ('redirection', params)
elif factors['same_redirect'] and 'Location' in response.headers: elif factors['same_redirect'] and 'Location' in response.headers:
if urlparse(response.headers.get['Location']).path != factors['same_redirect']: if urlparse(response.headers.get('Location', '')).path != factors['same_redirect']:
return ('redirection', params) return ('redirection', params)
if factors['same_body'] and response.text != factors['same_body']: if factors['same_body'] and response.text != factors['same_body']:
return ('body length', params) return ('body length', params)
@@ -87,6 +89,8 @@ def compare(response, factors, params):
return ('param name reflection', params) return ('param name reflection', params)
if factors['value_missing']: if factors['value_missing']:
for value in params.values(): for value in params.values():
if type(value) != str:
continue
if value in response.text and re.search(r'[\'"\s]%s[\'"\s]' % value, response.text): if value in response.text and re.search(r'[\'"\s]%s[\'"\s]' % value, response.text):
return ('param value reflection', params) return ('param value reflection', params)
return ('', []) return ('', [])

View File

@@ -33,7 +33,7 @@ def text_export(result):
""" """
exports results to a text file, one url per line exports results to a text file, one url per line
""" """
with open(mem.var['text_file'], 'w+', encoding='utf8') as text_file: with open(mem.var['text_file'], 'a+', encoding='utf8') as text_file:
for url, data in result.items(): for url, data in result.items():
clean_url = url.lstrip('/') clean_url = url.lstrip('/')
if data['method'] == 'JSON': if data['method'] == 'JSON':

View File

@@ -15,7 +15,7 @@ def requester(request, payload={}):
central function for making http requests central function for making http requests
returns str on error otherwise response object of requests library returns str on error otherwise response object of requests library
""" """
if 'include' in request and request['include']: if len(request.get('include', '')) != 0:
payload.update(request['include']) payload.update(request['include'])
if mem.var['stable']: if mem.var['stable']:
mem.var['delay'] = random.choice(range(6, 12)) mem.var['delay'] = random.choice(range(6, 12))

View File

@@ -122,6 +122,8 @@ def get_params(include):
if include.startswith('{'): if include.startswith('{'):
try: try:
params = json.loads(str(include).replace('\'', '"')) params = json.loads(str(include).replace('\'', '"'))
if type(params) != dict:
return {}
return params return params
except json.decoder.JSONDecodeError: except json.decoder.JSONDecodeError:
return {} return {}

View File

@@ -3,6 +3,8 @@ import re
from arjun.core.utils import extract_js from arjun.core.utils import extract_js
re_not_junk = re.compile(r'^[A-Za-z0-9_]+$') re_not_junk = re.compile(r'^[A-Za-z0-9_]+$')
def is_not_junk(param): def is_not_junk(param):
return (re_not_junk.match(param) is not None) return (re_not_junk.match(param) is not None)
@@ -11,6 +13,8 @@ re_input_names = re.compile(r'''(?i)<input.+?name=["']?([^"'\s>]+)''')
re_input_ids = re.compile(r'''(?i)<input.+?id=["']?([^"'\s>]+)''') re_input_ids = re.compile(r'''(?i)<input.+?id=["']?([^"'\s>]+)''')
re_empty_vars = re.compile(r'''(?:[;\n]|\bvar|\blet)(\w+)\s*=\s*(?:['"`]{1,2}|true|false|null)''') re_empty_vars = re.compile(r'''(?:[;\n]|\bvar|\blet)(\w+)\s*=\s*(?:['"`]{1,2}|true|false|null)''')
re_map_keys = re.compile(r'''['"](\w+?)['"]\s*:\s*['"`]''') re_map_keys = re.compile(r'''['"](\w+?)['"]\s*:\s*['"`]''')
def heuristic(response, wordlist): def heuristic(response, wordlist):
potential_params = [] potential_params = []