Arjun 1.3 (Improved logic and --include option)

This commit is contained in:
Somdev Sangwan
2019-03-03 16:18:27 +05:30
committed by GitHub
3 changed files with 88 additions and 37 deletions

View File

@@ -10,11 +10,11 @@ from urllib.parse import unquote
from core.prompt import prompt
from core.requester import requester
from core.colors import red, green, white, end, info, bad, good, run
from core.utils import e, d, stabilize, flattenParams, randomString, slicer, joiner, unityExtracter
from core.utils import e, d, stabilize, randomString, slicer, joiner, unityExtracter, getParams, flattenParams, removeTags
print ('''%s _
/_| _ '
( |/ /(//) %sv1.2-beta%s
( |/ /(//) %sv1.3%s
_/ %s''' % (green, white, green, end))
@@ -27,12 +27,14 @@ parser.add_argument('-f', help='file path', dest='file')
parser.add_argument('--get', help='use get method', dest='GET', action='store_true')
parser.add_argument('--post', help='use post method', dest='POST', action='store_true')
parser.add_argument('--headers', help='http headers prompt', dest='headers', action='store_true')
parser.add_argument('--include', help='include this data in every request', dest='include')
args = parser.parse_args() #arguments to be parsed
url = args.url
file = args.file or './db/params.txt'
headers = args.headers
delay = args.delay or 0
include = args.include or {}
threadCount = args.threads or 2
def extractHeaders(headers):
@@ -51,13 +53,15 @@ def extractHeaders(headers):
if headers:
headers = extractHeaders(prompt())
else:
headers = {}
if args.GET:
GET = True
else:
GET = False
headers = {}
include = getParams(include)
paramList = []
try:
@@ -92,59 +96,70 @@ def heuristic(response, paramList):
url = stabilize(url)
print ('%s Analysing the content of the webpage' % run)
firstResponse = requester(url, '', headers, GET, delay)
firstResponse = requester(url, include, headers, GET, delay)
print ('%s Now lets see how target deals with a non-existent parameter' % run)
originalFuzz = randomString(6)
data = {originalFuzz : originalFuzz[::-1]}
data.update(include)
response = requester(url, data, headers, GET, delay)
reflections = response.text.count(originalFuzz[::-1])
print ('%s Reflections: %s%i%s' % (info, green, reflections, end))
originalHTML = response.text
originalResponse = response.text.replace(originalFuzz + '=' + originalFuzz[::-1], '')
originalResponse = response.text
originalCode = response.status_code
print ('%s Response Code: %s%i%s' % (info, green, originalCode, end))
newLength = len(response.text) - len(flattenParams(data))
newLength = len(response.text)
plainText = removeTags(originalResponse)
plainTextLength = len(plainText)
print ('%s Content Length: %s%i%s' % (info, green, newLength, end))
print ('%s Plain-text Length: %s%i%s' % (info, green, plainTextLength, end))
factors = {'sameHTML': False, 'samePlainText': False}
if len(firstResponse.text) == len(originalResponse):
factors['sameHTML'] = True
elif len(removeTags(firstResponse.text)) == len(plainText):
factors['samePlainText'] = True
print ('%s Parsing webpage for potenial parameters' % run)
heuristic(firstResponse.text, paramList)
fuzz = randomString(8)
data = {fuzz : fuzz[::-1]}
responseMulti = requester(url, data, headers, GET, delay)
multiplier = int((len(responseMulti.text.replace(fuzz + '=' + fuzz[::-1], '')) - len(response.text.replace(originalFuzz + '=' + originalFuzz[::-1], ''))) / 2)
print ('%s Content Length Multiplier: %s%i%s' % (info, green, multiplier, end))
data.update(include)
def quickBruter(params, originalResponse, originalCode, delay, headers, url, GET):
newResponse = requester(url, joiner(params), headers, GET, delay)
def quickBruter(params, originalResponse, originalCode, factors, include, delay, headers, url, GET):
newResponse = requester(url, joiner(params, include), headers, GET, delay)
if newResponse.status_code != originalCode:
return params
elif originalResponse and originalResponse != newResponse.text:
elif not factors['sameHTML'] and len(newResponse.text) != (len(originalResponse)):
return params
elif not factors['samePlainText'] and len(removeTags(originalResponse)) != len(removeTags(newResponse.text)):
return params
else:
return False
def bruter(param, originalResponse, originalCode, multiplier, reflections, delay, headers, url, GET):
def bruter(param, originalResponse, originalCode, factors, include, reflections, delay, headers, url, GET):
fuzz = randomString(6)
data = {param : fuzz}
data.update(include)
response = requester(url, data, headers, GET, delay)
newReflections = response.text.count(fuzz)
reason = False
if response.status_code != originalCode:
print ('%s Found a valid parameter: %s%s%s' % (good, green, param, end))
print ('%s Reason: Different response code' % info)
reason = 'Different response code'
elif reflections != newReflections:
print ('%s Found a valid parameter: %s%s%s' % (good, green, param, end))
print ('%s Reason: Different number of reflections' % info)
elif len(response.text.replace(param + '=' + fuzz, '')) != (len(originalResponse.text.replace(originalFuzz + '=' + originalFuzz[::-1], '')) + (len(param) * multiplier)):
print ('%s Found a valid parameter: %s%s%s' % (good, green, param, end))
print ('%s Reason: Different content length' % info)
if firstResponse.text != originalHTML:
originalHTML = False
reason = 'Different number of reflections'
elif not factors['sameHTML'] and len(response.text) != (len(originalResponse)):
reason = 'Different content length'
elif not factors['samePlainText'] and len(removeTags(response.text)) != (len(removeTags(originalResponse))):
reason = 'Different plain-text content length'
if reason:
return {param : reason}
else:
return None
print ('%s Performing heuristic level checks' % run)
@@ -152,7 +167,7 @@ def narrower(oldParamList):
newParamList = []
potenialParameters = 0
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
futures = (threadpool.submit(quickBruter, part, originalHTML, originalCode, delay, headers, url, GET) for part in oldParamList)
futures = (threadpool.submit(quickBruter, part, originalResponse, originalCode, factors, include, delay, headers, url, GET) for part in oldParamList)
for i, result in enumerate(concurrent.futures.as_completed(futures)):
if result.result():
potenialParameters += 1
@@ -172,8 +187,16 @@ if foundParams:
print ('%s Heuristic found %i potenial parameters.' % (info, len(foundParams)))
paramList = foundParams
finalResult = []
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
futures = (threadpool.submit(bruter, param, originalResponse, originalCode, multiplier, reflections, delay, headers, url, GET) for param in paramList)
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
futures = (threadpool.submit(bruter, param, originalResponse, originalCode, factors, include, reflections, delay, headers, url, GET) for param in foundParams)
for i, result in enumerate(concurrent.futures.as_completed(futures)):
if result.result():
finalResult.append(result.result())
print('%s Progress: %i/%i' % (info, i + 1, len(paramList)), end='\r')
print('\n%s Scan Completed' % info)
print('%s Scan Completed' % info)
for each in finalResult:
for param, reason in each.items():
print ('%s Valid parameter found: %s%s%s' % (good, green, param, end))
print ('%s Reason: %s' % (info, reason))

View File

@@ -1,10 +1,12 @@
import re
import json
import string
import random
import requests
from core.colors import bad
def unityExtracter(arrayOfArrays, usable):
"extracts the value from single valued list from a list of lists"
remainingArray = []
for array in arrayOfArrays:
if len(array) == 1:
@@ -14,16 +16,20 @@ def unityExtracter(arrayOfArrays, usable):
return remainingArray
def slicer(array, n=2):
"divides a list into n parts"
k, m = divmod(len(array), n)
return list(array[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(n))
def joiner(array):
def joiner(array, include):
"converts a list of parameters into parameter and value pair"
params = {}
for element in array:
params[element] = randomString(6)
params.update(include)
return params
def stabilize(url):
"picks up the best suiting protocol if not present already"
if 'http' not in url:
try:
requests.get('http://%s' % url) # Makes request to the target with http schema
@@ -42,9 +48,11 @@ def stabilize(url):
return url
def removeTags(html):
"removes all the html from a webpage source"
return re.sub(r'(?s)<.*?>', '', html)
def lineComparer(response1, response2):
"compares two webpage and finds the non-matching lines"
response1 = response1.split('\n')
response2 = response2.split('\n')
num = 0
@@ -55,8 +63,17 @@ def lineComparer(response1, response2):
num += 1
return dynamicLines
def randomString(length):
return ''.join(random.choice(string.ascii_lowercase) for i in range(length))
def randomString(n):
"generates a random string of length n"
return ''.join(random.choice(string.ascii_lowercase) for i in range(n))
def e(string):
"utf encodes a string"
return string.encode('utf-8')
def d(string):
"utf decodes a string"
return string.decode('utf-8')
def flattenParams(params):
flatted = []
@@ -64,8 +81,19 @@ def flattenParams(params):
flatted.append(name + '=' + value)
return '?' + '&'.join(flatted)
def e(string):
return string.encode('utf-8')
def d(string):
return string.decode('utf-8')
def getParams(data):
params = {}
try:
params = json.loads(str(data).replace('\'', '"'))
return params
except json.decoder.JSONDecodeError:
if data.startswith('?'):
data = data[1:]
parts = data.split('&')
for part in parts:
each = part.split('=')
try:
params[each[0]] = each[1]
except IndexError:
params = None
return params

View File

@@ -25887,4 +25887,4 @@ zzv
zzw
zzx
zzy
zzz
zzz