2.2.0 build
This commit is contained in:
@@ -1,3 +1,9 @@
|
|||||||
|
#### 2.2.0
|
||||||
|
- Ability to detect parameters that respond to a certain value e.g. "?debug=yes"
|
||||||
|
- Added "required parameter" detection
|
||||||
|
- Heuristic can now extract words out of json/text responses
|
||||||
|
- Fixed -oB option description
|
||||||
|
|
||||||
#### 2.1.6
|
#### 2.1.6
|
||||||
- Fixed multiple crash-level bugs
|
- Fixed multiple crash-level bugs
|
||||||
- Various improvements to output in multi-target mode
|
- Various improvements to output in multi-target mode
|
||||||
|
|||||||
@@ -75,3 +75,4 @@ Optionally, you can use the `--help` argument to explore Arjun on your own.
|
|||||||
|
|
||||||
##### Credits
|
##### Credits
|
||||||
The parameter names wordlist is created by extracting top parameter names from [CommonCrawl](http://commoncrawl.org) dataset and merging best words from [SecLists](https://github.com/danielmiessler/SecLists) and [param-miner](https://github.com/PortSwigger/param-miner) wordlists into that.
|
The parameter names wordlist is created by extracting top parameter names from [CommonCrawl](http://commoncrawl.org) dataset and merging best words from [SecLists](https://github.com/danielmiessler/SecLists) and [param-miner](https://github.com/PortSwigger/param-miner) wordlists into that.
|
||||||
|
`special.json` wordlist is taken from [data-payloads](https://github.com/yehgdotnet/data-payloads).
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
__version__ = '2.1.6'
|
__version__ = '2.2.0'
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
from arjun.core.colors import green, end, info, bad, good, run, res
|
from arjun.core.colors import green, end, info, bad, good, run, res
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import json
|
||||||
|
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
import arjun.core.config as mem
|
import arjun.core.config as mem
|
||||||
@@ -26,7 +27,7 @@ parser.add_argument('-oB', help='Port for output to Burp Suite Proxy. Default po
|
|||||||
parser.add_argument('-d', help='Delay between requests in seconds. (default: 0)', dest='delay', type=float, default=0)
|
parser.add_argument('-d', help='Delay between requests in seconds. (default: 0)', dest='delay', type=float, default=0)
|
||||||
parser.add_argument('-t', help='Number of concurrent threads. (default: 5)', dest='threads', type=int, default=5)
|
parser.add_argument('-t', help='Number of concurrent threads. (default: 5)', dest='threads', type=int, default=5)
|
||||||
parser.add_argument('-w', help='Wordlist file path. (default: {arjundir}/db/large.txt)', dest='wordlist', default=arjun_dir+'/db/large.txt')
|
parser.add_argument('-w', help='Wordlist file path. (default: {arjundir}/db/large.txt)', dest='wordlist', default=arjun_dir+'/db/large.txt')
|
||||||
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON. (default: GET)', dest='method', default='GET')
|
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON/HEADERS. (default: GET)', dest='method', default='GET')
|
||||||
parser.add_argument('-i', help='Import target URLs from file.', dest='import_file', nargs='?', const=True)
|
parser.add_argument('-i', help='Import target URLs from file.', dest='import_file', nargs='?', const=True)
|
||||||
parser.add_argument('-T', help='HTTP request timeout in seconds. (default: 15)', dest='timeout', type=float, default=15)
|
parser.add_argument('-T', help='HTTP request timeout in seconds. (default: 15)', dest='timeout', type=float, default=15)
|
||||||
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=500)
|
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=500)
|
||||||
@@ -127,14 +128,19 @@ def initialize(request, wordlist, single_url=False):
|
|||||||
factors = define(response_1, response_2, fuzz, fuzz[::-1], wordlist)
|
factors = define(response_1, response_2, fuzz, fuzz[::-1], wordlist)
|
||||||
if single_url:
|
if single_url:
|
||||||
print('%s Analysing HTTP response for potential parameter names' % run)
|
print('%s Analysing HTTP response for potential parameter names' % run)
|
||||||
found = heuristic(response_1.text, wordlist)
|
found, words_exist = heuristic(response_1, wordlist)
|
||||||
if found:
|
if found:
|
||||||
num = len(found)
|
num = len(found)
|
||||||
s = 's' if num > 1 else ''
|
if words_exist:
|
||||||
print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found)))
|
print('%s Heuristic scanner found %i parameters' % (good, num))
|
||||||
|
else:
|
||||||
|
s = 's' if num > 1 else ''
|
||||||
|
print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found)))
|
||||||
if single_url:
|
if single_url:
|
||||||
print('%s Logicforcing the URL endpoint' % run)
|
print('%s Logicforcing the URL endpoint' % run)
|
||||||
populated = populate(wordlist)
|
populated = populate(wordlist)
|
||||||
|
with open(f'{arjun_dir}/db/special.json', 'r') as f:
|
||||||
|
populated.update(json.load(f))
|
||||||
param_groups = slicer(populated, int(len(wordlist)/mem.var['chunks']))
|
param_groups = slicer(populated, int(len(wordlist)/mem.var['chunks']))
|
||||||
prev_chunk_count = len(param_groups)
|
prev_chunk_count = len(param_groups)
|
||||||
last_params = []
|
last_params = []
|
||||||
@@ -157,7 +163,8 @@ def initialize(request, wordlist, single_url=False):
|
|||||||
if reason:
|
if reason:
|
||||||
name = list(param.keys())[0]
|
name = list(param.keys())[0]
|
||||||
confirmed_params.append(name)
|
confirmed_params.append(name)
|
||||||
print('%s parameter detected: %s, based on: %s' % (res, name, reason))
|
if single_url:
|
||||||
|
print('%s parameter detected: %s, based on: %s' % (res, name, reason))
|
||||||
return confirmed_params
|
return confirmed_params
|
||||||
|
|
||||||
|
|
||||||
@@ -171,7 +178,7 @@ def main():
|
|||||||
# in case of a single target
|
# in case of a single target
|
||||||
mem.var['kill'] = False
|
mem.var['kill'] = False
|
||||||
url = request['url']
|
url = request['url']
|
||||||
these_params = initialize(request, wordlist)
|
these_params = initialize(request, wordlist, single_url=True)
|
||||||
if these_params == 'skipped':
|
if these_params == 'skipped':
|
||||||
print('%s Skipped %s due to errors' % (bad, request['url']))
|
print('%s Skipped %s due to errors' % (bad, request['url']))
|
||||||
elif these_params:
|
elif these_params:
|
||||||
@@ -179,6 +186,7 @@ def main():
|
|||||||
final_result[url]['params'] = these_params
|
final_result[url]['params'] = these_params
|
||||||
final_result[url]['method'] = request['method']
|
final_result[url]['method'] = request['method']
|
||||||
final_result[url]['headers'] = request['headers']
|
final_result[url]['headers'] = request['headers']
|
||||||
|
print('%s Parameters found: %s' % (good, ', '.join(final_result[url]['params'])))
|
||||||
exporter(final_result)
|
exporter(final_result)
|
||||||
else:
|
else:
|
||||||
print('%s No parameters were discovered.' % info)
|
print('%s No parameters were discovered.' % info)
|
||||||
|
|||||||
@@ -89,7 +89,7 @@ def compare(response, factors, params):
|
|||||||
return ('param name reflection', params)
|
return ('param name reflection', params)
|
||||||
if factors['value_missing']:
|
if factors['value_missing']:
|
||||||
for value in params.values():
|
for value in params.values():
|
||||||
if type(value) != str:
|
if type(value) != str or len(value) != 6:
|
||||||
continue
|
continue
|
||||||
if value in response.text and re.search(r'[\'"\s]%s[\'"\s]' % value, response.text):
|
if value in response.text and re.search(r'[\'"\s]%s[\'"\s]' % value, response.text):
|
||||||
return ('param value reflection', params)
|
return ('param value reflection', params)
|
||||||
|
|||||||
@@ -18,8 +18,8 @@ def burp_export(result):
|
|||||||
exports results to Burp Suite by sending request to Burp proxy
|
exports results to Burp Suite by sending request to Burp proxy
|
||||||
"""
|
"""
|
||||||
proxies = {
|
proxies = {
|
||||||
'http': 'http://' + mem.var['burp_port'],
|
'http': 'http://127.0.0.1:' + mem.var['burp_port'],
|
||||||
'https': 'https://' + mem.var['burp_port']
|
'https': 'https://127.0.0.1:' + mem.var['burp_port']
|
||||||
}
|
}
|
||||||
for url, data in result.items():
|
for url, data in result.items():
|
||||||
if data['method'] == 'GET':
|
if data['method'] == 'GET':
|
||||||
|
|||||||
153
arjun/db/special.json
Normal file
153
arjun/db/special.json
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
{
|
||||||
|
"debug": "yes",
|
||||||
|
"debug": "true",
|
||||||
|
"debug": "1",
|
||||||
|
"debug": "on",
|
||||||
|
"test": "yes",
|
||||||
|
"test": "true",
|
||||||
|
"test": "1",
|
||||||
|
"test": "on",
|
||||||
|
"source": "yes",
|
||||||
|
"source": "true",
|
||||||
|
"source": "1",
|
||||||
|
"source": "on",
|
||||||
|
"admin": "yes",
|
||||||
|
"admin": "true",
|
||||||
|
"admin": "1",
|
||||||
|
"admin": "on",
|
||||||
|
"show": "yes",
|
||||||
|
"show": "true",
|
||||||
|
"show": "1",
|
||||||
|
"show": "on",
|
||||||
|
"bot": "yes",
|
||||||
|
"bot": "1",
|
||||||
|
"bot": "on",
|
||||||
|
"antibot": "off",
|
||||||
|
"antibot": "0",
|
||||||
|
"antibot": "no",
|
||||||
|
"antibot": "none",
|
||||||
|
"antibot": "nil",
|
||||||
|
"antirobot": "off",
|
||||||
|
"antirobot": "0",
|
||||||
|
"antirobot": "no",
|
||||||
|
"antirobot": "none",
|
||||||
|
"antirobot": "nil",
|
||||||
|
"env": "staging",
|
||||||
|
"env": "test",
|
||||||
|
"env": "testing",
|
||||||
|
"env": "pre",
|
||||||
|
"env": "pre-staging",
|
||||||
|
"env": "daily",
|
||||||
|
"env": "uat",
|
||||||
|
"anticrawl": "off",
|
||||||
|
"anticrawl": "0",
|
||||||
|
"anticrawl": "none",
|
||||||
|
"anticrawl": "no",
|
||||||
|
"anticrawl": "nil",
|
||||||
|
"captcha": "off",
|
||||||
|
"captcha": "0",
|
||||||
|
"captcha": "none",
|
||||||
|
"captcha": "no",
|
||||||
|
"captcha": "nil",
|
||||||
|
"signing": "off",
|
||||||
|
"signing": "0",
|
||||||
|
"signing": "none",
|
||||||
|
"signing": "no",
|
||||||
|
"signing": "nil",
|
||||||
|
"signature": "off",
|
||||||
|
"signature": "0",
|
||||||
|
"signature": "none",
|
||||||
|
"signature": "no",
|
||||||
|
"signature": "nil",
|
||||||
|
"enc": "off",
|
||||||
|
"enc": "0",
|
||||||
|
"enc": "none",
|
||||||
|
"enc": "no",
|
||||||
|
"enc": "nil",
|
||||||
|
"encryption": "off",
|
||||||
|
"encryption": "0",
|
||||||
|
"encryption": "none",
|
||||||
|
"encryption": "no",
|
||||||
|
"encryption": "nil",
|
||||||
|
"automation": "on",
|
||||||
|
"automation": "1",
|
||||||
|
"automation": "yes",
|
||||||
|
"waf": "disabled",
|
||||||
|
"waf": "disable",
|
||||||
|
"waf": "off",
|
||||||
|
"waf": "0",
|
||||||
|
"waf": "no",
|
||||||
|
"security": "disabled",
|
||||||
|
"security": "disable",
|
||||||
|
"security": "0",
|
||||||
|
"security": "no",
|
||||||
|
"isdebug": "yes",
|
||||||
|
"isdebug": "true",
|
||||||
|
"isdebug": "1",
|
||||||
|
"isdebug": "on",
|
||||||
|
"istest": "yes",
|
||||||
|
"istest": "true",
|
||||||
|
"istest": "1",
|
||||||
|
"istest": "on",
|
||||||
|
"isadmin": "yes",
|
||||||
|
"isadmin": "true",
|
||||||
|
"isadmin": "1",
|
||||||
|
"isadmin": "on",
|
||||||
|
"isbot": "yes",
|
||||||
|
"isbot": "1",
|
||||||
|
"isbot": "on",
|
||||||
|
"isenv": "staging",
|
||||||
|
"isenv": "test",
|
||||||
|
"isenv": "testing",
|
||||||
|
"isenv": "pre",
|
||||||
|
"isenv": "pre-staging",
|
||||||
|
"isenv": "daily",
|
||||||
|
"isenv": "uat",
|
||||||
|
"hascaptcha": "off",
|
||||||
|
"hascaptcha": "0",
|
||||||
|
"hascaptcha": "none",
|
||||||
|
"hascaptcha": "no",
|
||||||
|
"hascaptcha": "nil",
|
||||||
|
"hassigning": "off",
|
||||||
|
"hassigning": "0",
|
||||||
|
"hassigning": "none",
|
||||||
|
"hassigning": "no",
|
||||||
|
"hassigning": "nil",
|
||||||
|
"hassignature": "off",
|
||||||
|
"hassignature": "0",
|
||||||
|
"hassignature": "none",
|
||||||
|
"hassignature": "no",
|
||||||
|
"hassignature": "nil",
|
||||||
|
"isenc": "off",
|
||||||
|
"isenc": "0",
|
||||||
|
"isenc": "none",
|
||||||
|
"isenc": "no",
|
||||||
|
"isenc": "nil",
|
||||||
|
"isencryption": "off",
|
||||||
|
"isencryption": "0",
|
||||||
|
"isencryption": "none",
|
||||||
|
"isencryption": "no",
|
||||||
|
"isencryption": "nil",
|
||||||
|
"hasautomation": "on",
|
||||||
|
"hasautomation": "1",
|
||||||
|
"hasautomation": "yes",
|
||||||
|
"haswaf": "disabled",
|
||||||
|
"haswaf": "disable",
|
||||||
|
"haswaf": "off",
|
||||||
|
"haswaf": "0",
|
||||||
|
"haswaf": "no",
|
||||||
|
"issecurity": "disabled",
|
||||||
|
"issecurity": "disable",
|
||||||
|
"hassecurity": "0",
|
||||||
|
"hassecurity": "no",
|
||||||
|
"disable": "waf",
|
||||||
|
"disable": "security",
|
||||||
|
"disabled": "waf",
|
||||||
|
"disabled": "security",
|
||||||
|
"dosinglesignon": "1",
|
||||||
|
"singlesignon": "1",
|
||||||
|
"hassinglesignon": "1",
|
||||||
|
"dosso": "1",
|
||||||
|
"sso": "1",
|
||||||
|
"hassso": "1"
|
||||||
|
}
|
||||||
@@ -1,23 +1,32 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
|
from arjun.core.colors import info
|
||||||
|
import arjun.core.config as mem
|
||||||
from arjun.core.utils import extract_js
|
from arjun.core.utils import extract_js
|
||||||
|
|
||||||
re_not_junk = re.compile(r'^[A-Za-z0-9_]+$')
|
|
||||||
|
|
||||||
|
|
||||||
def is_not_junk(param):
|
|
||||||
return (re_not_junk.match(param) is not None)
|
|
||||||
|
|
||||||
# TODO: for map keys, javascript tolerates { param: "value" }
|
# TODO: for map keys, javascript tolerates { param: "value" }
|
||||||
|
re_words = re.compile(r'[A-Za-z][A-Za-z0-9_]*')
|
||||||
|
re_not_junk = re.compile(r'^[A-Za-z0-9_]+$')
|
||||||
re_input_names = re.compile(r'''(?i)<input.+?name=["']?([^"'\s>]+)''')
|
re_input_names = re.compile(r'''(?i)<input.+?name=["']?([^"'\s>]+)''')
|
||||||
re_input_ids = re.compile(r'''(?i)<input.+?id=["']?([^"'\s>]+)''')
|
re_input_ids = re.compile(r'''(?i)<input.+?id=["']?([^"'\s>]+)''')
|
||||||
re_empty_vars = re.compile(r'''(?:[;\n]|\bvar|\blet)(\w+)\s*=\s*(?:['"`]{1,2}|true|false|null)''')
|
re_empty_vars = re.compile(r'''(?:[;\n]|\bvar|\blet)(\w+)\s*=\s*(?:['"`]{1,2}|true|false|null)''')
|
||||||
re_map_keys = re.compile(r'''['"](\w+?)['"]\s*:\s*['"`]''')
|
re_map_keys = re.compile(r'''['"](\w+?)['"]\s*:\s*['"`]''')
|
||||||
|
|
||||||
|
def is_not_junk(param):
|
||||||
|
return (re_not_junk.match(param) is not None)
|
||||||
|
|
||||||
def heuristic(response, wordlist):
|
def heuristic(raw_response, wordlist):
|
||||||
|
words_exist = False
|
||||||
potential_params = []
|
potential_params = []
|
||||||
|
|
||||||
|
headers, response = raw_response.headers, raw_response.text
|
||||||
|
if headers.get('content-type', '').startswith(('application/json', 'text/plain')):
|
||||||
|
if len(response) < 200:
|
||||||
|
if ('required' or 'missing' or 'not found' or 'requires') in response.lower() and ('param' or 'parameter' or 'field') in response.lower():
|
||||||
|
if not mem.var['quiet']:
|
||||||
|
print('%s The endpoint seems to require certain parameters to function. Check the repsonse and use the --include option appropriately for better results.' % info)
|
||||||
|
words_exist = True
|
||||||
|
potential_params = re_words.findall(response)
|
||||||
# Parse Inputs
|
# Parse Inputs
|
||||||
input_names = re_input_names.findall(response)
|
input_names = re_input_names.findall(response)
|
||||||
potential_params += input_names
|
potential_params += input_names
|
||||||
@@ -34,7 +43,7 @@ def heuristic(response, wordlist):
|
|||||||
potential_params += map_keys
|
potential_params += map_keys
|
||||||
|
|
||||||
if len(potential_params) == 0:
|
if len(potential_params) == 0:
|
||||||
return []
|
return [], words_exist
|
||||||
|
|
||||||
found = set()
|
found = set()
|
||||||
for word in potential_params:
|
for word in potential_params:
|
||||||
@@ -45,4 +54,4 @@ def heuristic(response, wordlist):
|
|||||||
wordlist.remove(word)
|
wordlist.remove(word)
|
||||||
wordlist.insert(0, word)
|
wordlist.insert(0, word)
|
||||||
|
|
||||||
return list(found)
|
return list(found), words_exist
|
||||||
|
|||||||
Reference in New Issue
Block a user