Files
Arjun/arjun.py

214 lines
7.8 KiB
Python
Raw Normal View History

2018-11-09 20:32:08 +05:30
#!/usr/bin/env python3
import re
2018-03-01 19:16:28 +05:30
import sys
2018-11-09 20:32:08 +05:30
import requests
2018-03-01 19:16:28 +05:30
import argparse
2018-11-09 20:32:08 +05:30
import concurrent.futures
import json
2018-11-09 20:32:08 +05:30
from urllib.parse import unquote
from core.prompt import prompt
from core.requester import requester
from core.colors import red, green, white, end, info, bad, good, run
2019-03-03 16:17:06 +05:30
from core.utils import e, d, stabilize, randomString, slicer, joiner, unityExtracter, getParams, flattenParams, removeTags
2018-11-09 20:32:08 +05:30
print ('''%s _
2018-03-01 19:16:28 +05:30
/_| _ '
2019-03-03 16:17:06 +05:30
( |/ /(//) %sv1.3%s
2018-11-09 20:32:08 +05:30
_/ %s''' % (green, white, green, end))
2018-03-01 19:16:28 +05:30
parser = argparse.ArgumentParser() #defines the parser
#Arguements that can be supplied
2018-11-09 20:32:08 +05:30
parser.add_argument('-u', help='target url', dest='url')
parser.add_argument('-d', help='request delay', dest='delay', type=int)
parser.add_argument('-t', help='number of threads', dest='threads', type=int)
parser.add_argument('-f', help='file path', dest='file')
parser.add_argument('-o', help='Path for the output file', dest='output_file')
2018-11-09 20:32:08 +05:30
parser.add_argument('--get', help='use get method', dest='GET', action='store_true')
parser.add_argument('--post', help='use post method', dest='POST', action='store_true')
parser.add_argument('--headers', help='http headers prompt', dest='headers', action='store_true')
2019-03-03 16:17:06 +05:30
parser.add_argument('--include', help='include this data in every request', dest='include')
2018-03-01 19:16:28 +05:30
args = parser.parse_args() #arguments to be parsed
url = args.url
2018-11-09 20:32:08 +05:30
file = args.file or './db/params.txt'
headers = args.headers
delay = args.delay or 0
2019-03-03 16:17:06 +05:30
include = args.include or {}
2018-11-09 20:32:08 +05:30
threadCount = args.threads or 2
def extractHeaders(headers):
sortedHeaders = {}
2018-11-10 05:31:51 +05:30
matches = re.findall(r'(.*):\s(.*)', headers)
2018-11-09 20:32:08 +05:30
for match in matches:
header = match[0]
value = match[1]
2018-03-01 19:16:28 +05:30
try:
2018-11-09 20:32:08 +05:30
if value[-1] == ',':
value = value[:-1]
sortedHeaders[header] = value
except IndexError:
pass
return sortedHeaders
2018-03-01 19:16:28 +05:30
2018-11-09 20:32:08 +05:30
if headers:
headers = extractHeaders(prompt())
2019-03-03 16:17:06 +05:30
else:
headers = {}
2018-03-01 19:16:28 +05:30
2018-11-09 20:32:08 +05:30
if args.GET:
GET = True
2018-03-01 19:16:28 +05:30
else:
2018-11-09 20:32:08 +05:30
GET = False
2019-03-03 16:17:06 +05:30
include = getParams(include)
2018-11-09 20:32:08 +05:30
paramList = []
try:
with open(file, 'r') as file:
for line in file:
paramList.append(line.strip('\n'))
except FileNotFoundError:
print ('%s The specified file doesn\'t exist' % bad)
quit()
def heuristic(response, paramList):
done = []
forms = re.findall(r'(?i)(?s)<form.*?</form.*?>', response)
for form in forms:
method = re.search(r'(?i)method=[\'"](.*?)[\'"]', form)
inputs = re.findall(r'(?i)(?s)<input.*?>', response)
for inp in inputs:
inpName = re.search(r'(?i)name=[\'"](.*?)[\'"]', inp)
if inpName:
inpType = re.search(r'(?i)type=[\'"](.*?)[\'"]', inp)
inpValue = re.search(r'(?i)value=[\'"](.*?)[\'"]', inp)
inpName = d(e(inpName.group(1)))
if inpName not in done:
if inpName in paramList:
paramList.remove(inpName)
done.append(inpName)
paramList.insert(0, inpName)
print ('%s Heuristic found a potential parameter: %s%s%s' % (good, green, inpName, end))
2018-11-09 20:32:08 +05:30
print ('%s Prioritizing it' % good)
2018-03-01 19:16:28 +05:30
2018-11-09 20:32:08 +05:30
url = stabilize(url)
2018-03-08 15:08:37 +05:30
2018-11-09 20:32:08 +05:30
print ('%s Analysing the content of the webpage' % run)
2019-03-03 16:17:06 +05:30
firstResponse = requester(url, include, headers, GET, delay)
2018-11-09 20:32:08 +05:30
print ('%s Now lets see how target deals with a non-existent parameter' % run)
originalFuzz = randomString(6)
data = {originalFuzz : originalFuzz[::-1]}
2019-03-03 16:17:06 +05:30
data.update(include)
2018-11-09 20:32:08 +05:30
response = requester(url, data, headers, GET, delay)
reflections = response.text.count(originalFuzz[::-1])
print ('%s Reflections: %s%i%s' % (info, green, reflections, end))
2019-03-03 16:17:06 +05:30
originalResponse = response.text
2018-11-09 20:32:08 +05:30
originalCode = response.status_code
print ('%s Response Code: %s%i%s' % (info, green, originalCode, end))
2019-03-03 16:17:06 +05:30
newLength = len(response.text)
plainText = removeTags(originalResponse)
plainTextLength = len(plainText)
2018-11-09 20:32:08 +05:30
print ('%s Content Length: %s%i%s' % (info, green, newLength, end))
2019-03-03 16:17:06 +05:30
print ('%s Plain-text Length: %s%i%s' % (info, green, plainTextLength, end))
factors = {'sameHTML': False, 'samePlainText': False}
if len(firstResponse.text) == len(originalResponse):
factors['sameHTML'] = True
elif len(removeTags(firstResponse.text)) == len(plainText):
factors['samePlainText'] = True
2018-11-09 20:32:08 +05:30
print ('%s Parsing webpage for potenial parameters' % run)
heuristic(firstResponse.text, paramList)
fuzz = randomString(8)
data = {fuzz : fuzz[::-1]}
2019-03-03 16:17:06 +05:30
data.update(include)
2018-11-09 20:32:08 +05:30
2019-03-03 16:17:06 +05:30
def quickBruter(params, originalResponse, originalCode, factors, include, delay, headers, url, GET):
newResponse = requester(url, joiner(params, include), headers, GET, delay)
2019-03-02 07:31:47 +05:30
if newResponse.status_code != originalCode:
return params
2019-03-03 16:17:06 +05:30
elif not factors['sameHTML'] and len(newResponse.text) != (len(originalResponse)):
return params
elif not factors['samePlainText'] and len(removeTags(originalResponse)) != len(removeTags(newResponse.text)):
2019-03-02 07:31:47 +05:30
return params
else:
return False
2019-03-03 16:17:06 +05:30
def bruter(param, originalResponse, originalCode, factors, include, reflections, delay, headers, url, GET):
2018-11-09 20:32:08 +05:30
fuzz = randomString(6)
data = {param : fuzz}
2019-03-03 16:17:06 +05:30
data.update(include)
2018-11-09 20:32:08 +05:30
response = requester(url, data, headers, GET, delay)
newReflections = response.text.count(fuzz)
2019-03-03 16:17:06 +05:30
reason = False
2018-11-09 20:32:08 +05:30
if response.status_code != originalCode:
2019-03-03 16:17:06 +05:30
reason = 'Different response code'
2018-11-09 20:32:08 +05:30
elif reflections != newReflections:
2019-03-03 16:17:06 +05:30
reason = 'Different number of reflections'
elif not factors['sameHTML'] and len(response.text) != (len(originalResponse)):
reason = 'Different content length'
elif not factors['samePlainText'] and len(removeTags(response.text)) != (len(removeTags(originalResponse))):
reason = 'Different plain-text content length'
if reason:
return {param : reason}
else:
return None
2019-03-02 07:31:47 +05:30
print ('%s Performing heuristic level checks' % run)
def narrower(oldParamList):
newParamList = []
potenialParameters = 0
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
2019-03-03 16:17:06 +05:30
futures = (threadpool.submit(quickBruter, part, originalResponse, originalCode, factors, include, delay, headers, url, GET) for part in oldParamList)
2019-03-02 07:31:47 +05:30
for i, result in enumerate(concurrent.futures.as_completed(futures)):
if result.result():
potenialParameters += 1
newParamList.extend(slicer(result.result()))
print('%s Processing: %i/%-6i' % (info, i + 1, len(oldParamList)), end='\r')
return newParamList
2019-03-02 18:15:31 +05:30
toBeChecked = slicer(paramList, 25)
2019-03-02 07:31:47 +05:30
foundParams = []
while True:
toBeChecked = narrower(toBeChecked)
toBeChecked = unityExtracter(toBeChecked, foundParams)
if not toBeChecked:
break
if foundParams:
print ('%s Heuristic found %i potential parameters.' % (info, len(foundParams)))
2019-03-02 07:31:47 +05:30
paramList = foundParams
2019-03-03 16:17:06 +05:30
finalResult = []
jsonResult = []
2019-03-03 16:17:06 +05:30
2018-11-09 20:32:08 +05:30
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
2019-03-03 16:17:06 +05:30
futures = (threadpool.submit(bruter, param, originalResponse, originalCode, factors, include, reflections, delay, headers, url, GET) for param in foundParams)
for i, result in enumerate(concurrent.futures.as_completed(futures)):
if result.result():
finalResult.append(result.result())
2019-03-02 07:31:47 +05:30
print('%s Progress: %i/%i' % (info, i + 1, len(paramList)), end='\r')
2019-03-03 16:17:06 +05:30
print('%s Scan Completed' % info)
for each in finalResult:
for param, reason in each.items():
print ('%s Valid parameter found: %s%s%s' % (good, green, param, end))
print ('%s Reason: %s' % (info, reason))
jsonResult.append({"param": param, "reason": reason})
# Finally, export to json
if args.output_file and jsonResult:
print("Saving output to JSON file in %s" % args.output_file)
with open(str(args.output_file), 'w') as json_output:
json.dump({"results":jsonResult}, json_output, sort_keys=True, indent=4,)