Files
Arjun/arjun.py

260 lines
10 KiB
Python
Raw Normal View History

2018-11-09 20:32:08 +05:30
#!/usr/bin/env python3
from __future__ import print_function
2019-10-23 12:59:47 +05:30
from core.colors import green, white, end, info, bad, good, run
print('''%s _
/_| _ '
2019-10-23 14:32:34 +05:30
( |/ /(//) v1.6
_/ %s
2019-10-23 14:32:34 +05:30
''' % (green, end))
try:
import concurrent.futures
except ImportError:
2019-10-23 12:59:47 +05:30
print('%s Please use Python > 3.2 to run Arjun.' % bad)
quit()
2018-11-09 20:32:08 +05:30
import re
import json
2019-10-23 12:59:47 +05:30
import time
2018-03-01 19:16:28 +05:30
import argparse
import core.config
2018-11-09 20:32:08 +05:30
from core.prompt import prompt
from core.requester import requester
2019-10-23 12:59:47 +05:30
from core.utils import e, d, stabilize, randomString, slicer, joiner, unityExtracter, getParams, removeTags, extractHeaders
2018-03-01 19:16:28 +05:30
2019-10-23 12:59:47 +05:30
parser = argparse.ArgumentParser() # defines the parser
# Arguments that can be supplied
2018-11-09 20:32:08 +05:30
parser.add_argument('-u', help='target url', dest='url')
parser.add_argument('-o', help='path for the output file', dest='output_file')
2019-10-23 21:37:22 +05:30
parser.add_argument('-d', help='request delay', dest='delay', type=float, default=0)
2019-10-23 12:59:47 +05:30
parser.add_argument('-t', help='number of threads', dest='threads', type=int, default=2)
parser.add_argument('-f', help='wordlist path', dest='wordlist', default='./db/params.txt')
parser.add_argument('--urls', help='file containing target urls', dest='url_file')
2018-11-09 20:32:08 +05:30
parser.add_argument('--get', help='use get method', dest='GET', action='store_true')
parser.add_argument('--post', help='use post method', dest='POST', action='store_true')
parser.add_argument('--headers', help='add headers', dest='headers', nargs='?', const=True)
parser.add_argument('--json', help='treat post data as json', dest='jsonData', action='store_true')
2019-10-23 12:59:47 +05:30
parser.add_argument('--stable', help='prefer stability over speed', dest='stable', action='store_true')
parser.add_argument('--include', help='include this data in every request', dest='include', default={})
args = parser.parse_args() # arguments to be parsed
2018-03-01 19:16:28 +05:30
url = args.url
2019-10-23 12:59:47 +05:30
delay = args.delay
stable = args.stable
include = args.include
2018-11-09 20:32:08 +05:30
headers = args.headers
2019-10-23 12:59:47 +05:30
jsonData = args.jsonData
url_file = args.url_file
2019-10-23 12:59:47 +05:30
wordlist = args.wordlist
threadCount = args.threads
if stable or delay:
threadCount = 1
2018-11-09 20:32:08 +05:30
core.config.globalVariables = vars(args)
if type(headers) == bool:
2018-11-09 20:32:08 +05:30
headers = extractHeaders(prompt())
elif type(headers) == str:
headers = extractHeaders(headers)
2019-03-03 16:17:06 +05:30
else:
2019-10-23 12:59:47 +05:30
headers = {'User-Agent' : 'Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0',
'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language' : 'en-US,en;q=0.5',
'Accept-Encoding' : 'gzip, deflate',
'Connection' : 'keep-alive',
'Upgrade-Insecure-Requests' : '1'}
2018-03-01 19:16:28 +05:30
if jsonData:
headers['Content-type'] = 'application/json'
2019-10-23 12:59:47 +05:30
if not (args.GET or args.POST or args.jsonData) or args.GET:
2018-11-09 20:32:08 +05:30
GET = True
2018-03-01 19:16:28 +05:30
else:
2018-11-09 20:32:08 +05:30
GET = False
2019-03-03 16:17:06 +05:30
include = getParams(include)
2018-11-09 20:32:08 +05:30
paramList = []
try:
2019-12-09 15:45:11 +03:30
with open(wordlist, 'r', encoding="utf8") as file:
2018-11-09 20:32:08 +05:30
for line in file:
paramList.append(line.strip('\n'))
except FileNotFoundError:
2019-10-23 12:59:47 +05:30
print('%s The specified file for parameters doesn\'t exist' % bad)
2018-11-09 20:32:08 +05:30
quit()
urls = []
if url_file:
try:
2019-12-09 15:45:11 +03:30
with open(url_file, 'r', encoding="utf8") as file:
for line in file:
urls.append(line.strip('\n'))
except FileNotFoundError:
2019-10-23 12:59:47 +05:30
print('%s The specified file for URLs doesn\'t exist' % bad)
quit()
if not url and not url_file:
2019-10-23 12:59:47 +05:30
print('%s No URL specified.' % bad)
quit()
2018-11-09 20:32:08 +05:30
def heuristic(response, paramList):
done = []
forms = re.findall(r'(?i)(?s)<form.*?</form.*?>', response)
for form in forms:
method = re.search(r'(?i)method=[\'"](.*?)[\'"]', form)
inputs = re.findall(r'(?i)(?s)<input.*?>', response)
for inp in inputs:
inpName = re.search(r'(?i)name=[\'"](.*?)[\'"]', inp)
if inpName:
inpName = d(e(inpName.group(1)))
if inpName not in done:
if inpName in paramList:
paramList.remove(inpName)
done.append(inpName)
paramList.insert(0, inpName)
2019-10-23 12:59:47 +05:30
print('%s Heuristic found a potential %s parameter: %s%s%s' % (good, method.group(1), green, inpName, end))
print('%s Prioritizing it' % info)
emptyJSvars = re.finditer(r'var\s+([^=]+)\s*=\s*[\'"`][\'"`]', response)
for each in emptyJSvars:
inpName = each.group(1)
done.append(inpName)
paramList.insert(0, inpName)
print('%s Heuristic found a potential parameter: %s%s%s' % (good, green, inpName, end))
print('%s Prioritizing it' % info)
2018-11-09 20:32:08 +05:30
2019-04-17 20:12:16 +05:30
def quickBruter(params, originalResponse, originalCode, reflections, factors, include, delay, headers, url, GET):
2019-04-17 20:01:36 +05:30
joined = joiner(params, include)
newResponse = requester(url, joined, headers, GET, delay)
2019-07-02 13:21:25 +05:30
if newResponse.status_code == 429:
2019-10-23 12:59:47 +05:30
if core.config.globalVariables['stable']:
print('%s Hit rate limit, stabilizing the connection..')
time.sleep(30)
return params
else:
print('%s Target has rate limiting in place, please use --stable switch' % bad)
raise ConnectionError
2019-03-02 07:31:47 +05:30
if newResponse.status_code != originalCode:
return params
elif factors['sameHTML'] and len(newResponse.text) != (len(originalResponse)):
2019-03-03 16:17:06 +05:30
return params
elif factors['samePlainText'] and len(removeTags(originalResponse)) != len(removeTags(newResponse.text)):
2019-03-02 07:31:47 +05:30
return params
2019-04-17 20:12:16 +05:30
elif True:
2019-04-17 20:01:36 +05:30
for param, value in joined.items():
2019-04-17 20:12:16 +05:30
if param not in include and newResponse.text.count(value) != reflections:
2019-04-17 20:01:36 +05:30
return params
2019-03-02 07:31:47 +05:30
else:
return False
2019-04-17 20:12:16 +05:30
def narrower(oldParamList, url, include, headers, GET, delay, originalResponse, originalCode, reflections, factors, threadCount):
2019-03-02 07:31:47 +05:30
newParamList = []
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
2019-04-17 20:12:16 +05:30
futures = (threadpool.submit(quickBruter, part, originalResponse, originalCode, reflections, factors, include, delay, headers, url, GET) for part in oldParamList)
2019-03-02 07:31:47 +05:30
for i, result in enumerate(concurrent.futures.as_completed(futures)):
if result.result():
newParamList.extend(slicer(result.result()))
2019-10-23 12:59:47 +05:30
print('%s Processing: %i/%-6i' % (info, i + 1, len(oldParamList)), end='\r')
2019-03-02 07:31:47 +05:30
return newParamList
def initialize(url, include, headers, GET, delay, paramList, threadCount):
url = stabilize(url)
2019-10-23 12:59:47 +05:30
if not url:
return {}
else:
print('%s Analysing the content of the webpage' % run)
firstResponse = requester(url, include, headers, GET, delay)
print('%s Analysing behaviour for a non-existent parameter' % run)
originalFuzz = randomString(6)
data = {originalFuzz : originalFuzz[::-1]}
data.update(include)
response = requester(url, data, headers, GET, delay)
reflections = response.text.count(originalFuzz[::-1])
print('%s Reflections: %s%i%s' % (info, green, reflections, end))
originalResponse = response.text
originalCode = response.status_code
print('%s Response Code: %s%i%s' % (info, green, originalCode, end))
newLength = len(response.text)
plainText = removeTags(originalResponse)
plainTextLength = len(plainText)
print('%s Content Length: %s%i%s' % (info, green, newLength, end))
print('%s Plain-text Length: %s%i%s' % (info, green, plainTextLength, end))
factors = {'sameHTML': False, 'samePlainText': False}
if len(firstResponse.text) == len(originalResponse):
factors['sameHTML'] = True
elif len(removeTags(firstResponse.text)) == len(plainText):
factors['samePlainText'] = True
print('%s Parsing webpage for potential parameters' % run)
heuristic(firstResponse.text, paramList)
fuzz = randomString(8)
data = {fuzz : fuzz[::-1]}
data.update(include)
print('%s Performing heuristic level checks' % run)
toBeChecked = slicer(paramList, 50)
foundParamsTemp = []
while True:
2019-07-02 13:53:54 +05:30
toBeChecked = narrower(toBeChecked, url, include, headers, GET, delay, originalResponse, originalCode, reflections, factors, threadCount)
2019-10-23 12:59:47 +05:30
toBeChecked = unityExtracter(toBeChecked, foundParamsTemp)
2019-07-02 13:53:54 +05:30
if not toBeChecked:
break
2019-10-23 12:59:47 +05:30
foundParams = []
2019-10-23 12:59:47 +05:30
for param in foundParamsTemp:
exists = quickBruter([param], originalResponse, originalCode, reflections, factors, include, delay, headers, url, GET)
if exists:
foundParams.append(param)
2019-10-23 12:59:47 +05:30
print('%s Scan Completed ' % info)
2019-10-23 12:59:47 +05:30
for each in foundParams:
print('%s Valid parameter found: %s%s%s' % (good, green, each, end))
if not foundParams:
print('%s Unable to verify existence of parameters detected by heuristic.' % bad)
return foundParams
2019-07-02 10:06:49 +05:30
finalResult = {}
2019-10-23 12:59:47 +05:30
try:
if url:
2019-07-02 10:06:49 +05:30
finalResult[url] = []
2019-07-02 13:21:25 +05:30
try:
2019-10-23 12:59:47 +05:30
finalResult[url] = initialize(url, include, headers, GET, delay, paramList, threadCount)
2019-07-02 13:21:25 +05:30
except ConnectionError:
2019-10-23 12:59:47 +05:30
print('%s Target has rate limiting in place, please use --stable switch.' % bad)
quit()
elif urls:
for url in urls:
finalResult[url] = []
print('%s Scanning: %s' % (run, url))
try:
finalResult[url] = initialize(url, include, headers, GET, delay, list(paramList), threadCount)
if finalResult[url]:
print('%s Parameters found: %s' % (good, ', '.join(finalResult[url])))
except ConnectionError:
print('%s Target has rate limiting in place, please use --stable switch.' % bad)
pass
except KeyboardInterrupt:
print('%s Exiting.. ' % bad)
quit()
# Finally, export to json
if args.output_file and finalResult:
2019-10-23 12:59:47 +05:30
print('%s Saving output to JSON file in %s' % (info, args.output_file))
2019-12-09 15:45:11 +03:30
with open(str(args.output_file), 'w+', encoding="utf8") as json_output:
2019-07-02 10:06:49 +05:30
json.dump(finalResult, json_output, sort_keys=True, indent=4)