Merge pull request #2 from yobo000/master
Add logging & argparse by my poor insignificant skill
This commit is contained in:
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
*.pyc
|
||||||
|
.DS_Store
|
||||||
|
*.log
|
||||||
|
data/*.*
|
||||||
78
AutoSqli.py
78
AutoSqli.py
@@ -1,20 +1,22 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
#-*-coding:utf-8-*-
|
#-*-coding:utf-8-*-
|
||||||
|
from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
import threading
|
import threading
|
||||||
import Queue
|
import Queue
|
||||||
from search.baidu import *
|
from search import baidu
|
||||||
|
import logging
|
||||||
|
from config import LOG, API_URL
|
||||||
|
|
||||||
|
|
||||||
class AutoSqli(object):
|
class AutoSqli(object):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
使用sqlmapapi的方法进行与sqlmapapi建立的server进行交互
|
使用sqlmapapi的方法进行与sqlmapapi建立的server进行交互
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, server='', target='',data = '',referer = '',cookie = ''):
|
def __init__(self, server='', target='',data = '',referer = '',cookie = ''):
|
||||||
super(AutoSqli, self).__init__()
|
super(AutoSqli, self).__init__()
|
||||||
self.server = server
|
self.server = server
|
||||||
@@ -28,17 +30,22 @@ class AutoSqli(object):
|
|||||||
self.referer = referer
|
self.referer = referer
|
||||||
self.cookie = cookie
|
self.cookie = cookie
|
||||||
self.start_time = time.time()
|
self.start_time = time.time()
|
||||||
|
self.logger = logging.getLogger('app.run')
|
||||||
|
self.logger.info('Creating an instance of AutoSqli for {0}.'.format(self.target))
|
||||||
|
|
||||||
def task_new(self):
|
def task_new(self):
|
||||||
self.taskid = json.loads(
|
try:
|
||||||
requests.get(self.server + 'task/new').text)['taskid']
|
self.taskid = json.loads(
|
||||||
#print 'Created new task: ' + self.taskid
|
requests.get(self.server + 'task/new').text)['taskid']
|
||||||
if len(self.taskid) > 0:
|
#print 'Created new task: ' + self.taskid
|
||||||
return True
|
if len(self.taskid) > 0:
|
||||||
return False
|
return True
|
||||||
|
return False
|
||||||
|
except ConnectionError:
|
||||||
|
self.logging.error("sqlmapapi.py is not running")
|
||||||
|
|
||||||
def task_delete(self):
|
def task_delete(self):
|
||||||
json_kill=requests.get(self.server + 'task/' + self.taskid + '/delete').text
|
json_kill = requests.get(self.server + 'task/' + self.taskid + '/delete').text
|
||||||
# if json.loads(requests.get(self.server + 'task/' + self.taskid + '/delete').text)['success']:
|
# if json.loads(requests.get(self.server + 'task/' + self.taskid + '/delete').text)['success']:
|
||||||
# #print '[%s] Deleted task' % (self.taskid)
|
# #print '[%s] Deleted task' % (self.taskid)
|
||||||
# return True
|
# return True
|
||||||
@@ -46,7 +53,7 @@ class AutoSqli(object):
|
|||||||
|
|
||||||
def scan_start(self):
|
def scan_start(self):
|
||||||
headers = {'Content-Type': 'application/json'}
|
headers = {'Content-Type': 'application/json'}
|
||||||
print "starting to scan "+ self.target +".................."
|
self.logger.debug("Starting to scan "+ self.target +"..................")
|
||||||
payload = {'url': self.target}
|
payload = {'url': self.target}
|
||||||
url = self.server + 'scan/' + self.taskid + '/start'
|
url = self.server + 'scan/' + self.taskid + '/start'
|
||||||
t = json.loads(
|
t = json.loads(
|
||||||
@@ -74,9 +81,10 @@ class AutoSqli(object):
|
|||||||
#print 'not injection\t'
|
#print 'not injection\t'
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
f=open('data/injection.txt','a')
|
f = open('data/injection.txt','a')
|
||||||
f.write(self.target+'\n')
|
f.write(self.target+'\n')
|
||||||
print 'injection \t'
|
f.close()
|
||||||
|
self.logger.warning('injection \t')
|
||||||
|
|
||||||
def option_set(self):
|
def option_set(self):
|
||||||
headers = {'Content-Type': 'application/json'}
|
headers = {'Content-Type': 'application/json'}
|
||||||
@@ -134,28 +142,46 @@ class myThread(threading.Thread):
|
|||||||
objects=self.q.get()
|
objects=self.q.get()
|
||||||
result=objects.run()
|
result=objects.run()
|
||||||
|
|
||||||
|
def main():
|
||||||
|
import argparse
|
||||||
if __name__ == '__main__':
|
parser = argparse.ArgumentParser()
|
||||||
urls=[]
|
parser.add_argument('-n', '--num', default=4, nargs='?', type=int, dest='num', help="Thread num")
|
||||||
print 'the program starts!'
|
parser.add_argument('-p', '--page', default=3, nargs='?', type=int, dest='page', help="Search Page num")
|
||||||
key='inurl:asp?id='
|
parser.add_argument('-d', '--log', default=LOG["filename"], nargs='?', type=str, dest='log', help="The path of debug log")
|
||||||
pages=3
|
args = parser.parse_args()
|
||||||
urls=geturl(key,pages)
|
logger = logging.getLogger('app')
|
||||||
|
logger.setLevel(LOG["level"])
|
||||||
|
fh = logging.FileHandler(args.log)
|
||||||
|
fh.setLevel(LOG["level"])
|
||||||
|
formatter = logging.Formatter(LOG['format'], LOG["datefmt"])
|
||||||
|
fh.setFormatter(formatter)
|
||||||
|
sh = logging.StreamHandler()
|
||||||
|
sh.setLevel(LOG["level"])
|
||||||
|
sh.setFormatter(formatter)
|
||||||
|
logger.addHandler(fh)
|
||||||
|
logger.addHandler(sh)
|
||||||
|
urls = []
|
||||||
|
logger.info('the program starts!')
|
||||||
|
pages = args.page
|
||||||
|
key = 'inurl:asp?id='
|
||||||
|
urls = baidu.geturl(key, pages)
|
||||||
#print urls
|
#print urls
|
||||||
workQueue=Queue.Queue()
|
workQueue = Queue.Queue()
|
||||||
for tar in urls:
|
for tar in urls:
|
||||||
s = AutoSqli('http://127.0.0.1:8775', tar)
|
s = AutoSqli(API_URL, tar)
|
||||||
workQueue.put(s)
|
workQueue.put(s)
|
||||||
threads = []
|
threads = []
|
||||||
nloops = range(4) #threads Num
|
nloops = range(args.num) #threads Num
|
||||||
for i in nloops:
|
for i in nloops:
|
||||||
t = myThread(workQueue,i)
|
t = myThread(workQueue, i)
|
||||||
t.start()
|
t.start()
|
||||||
threads.append(t)
|
threads.append(t)
|
||||||
for i in nloops:
|
for i in nloops:
|
||||||
threads[i].join()
|
threads[i].join()
|
||||||
print "Exiting Main Thread"
|
logger.info("Exiting Main Thread")
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
|
|
||||||
**Useage:**
|
**Useage:**
|
||||||
- 在sqlmap的目录下执行`python sqlmapapi.py -s`进行监听操作。
|
- 在sqlmap的目录下执行`python sqlmapapi.py -s`进行监听操作。
|
||||||
- 运行AutoSqli.py
|
- 运行AutoSqli.py `python AutoSqli.py` 参数可通过`-h`查看
|
||||||
|
|
||||||
**Tips:**
|
**Tips:**
|
||||||
* 这里要注意的是在代码里自定义搜索关键字:`key='inurl:asp?id='`
|
* 这里要注意的是在代码里自定义搜索关键字:`key='inurl:asp?id='`
|
||||||
|
|||||||
18
config.py
Normal file
18
config.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
#!/bin/env python
|
||||||
|
# -*- coding=utf-8 -*-
|
||||||
|
import logging
|
||||||
|
|
||||||
|
API_URL = "http://127.0.0.1:8775"
|
||||||
|
|
||||||
|
LEVELS = {'debug': logging.DEBUG,
|
||||||
|
'info': logging.INFO,
|
||||||
|
'warning': logging.WARNING,
|
||||||
|
'error': logging.ERROR,
|
||||||
|
'critical': logging.CRITICAL}
|
||||||
|
|
||||||
|
LOG = {
|
||||||
|
"level" : LEVELS["debug"],
|
||||||
|
"filename" : "autosqli.log",
|
||||||
|
"format" : '[%(asctime)s] %(levelname)-8s %(name)-12s %(message)s',
|
||||||
|
"datefmt" : '%Y-%m-%d %H:%M:%S'
|
||||||
|
}
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
http://www.lamarche.com.tw/production_detail.php?shop_category=64&sn=248
|
|
||||||
http://www.70jj.com/shop/index.php?shop_id=1
|
|
||||||
http://www.cosmax.com.hk/products_detail.php?product_id=17
|
|
||||||
http://www.etron.com/en/products/u3hc_detial.php?Product_ID=5
|
|
||||||
http://www.fembooks.com.tw/indexstore.php?product_id=5423
|
|
||||||
http://www.guangzhouflower.net.cn/product.php?pid=12
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
http://www.99166.com/zjinfo.asp?id=5
|
|
||||||
http://www.yh8z.com/Secondary/guding.asp?Id=68&Parent_ID=18&Type_Class=news&GS_Class=22
|
|
||||||
http://www.gdkszx.com.cn/ksxx/kszc_show.asp?id=2205
|
|
||||||
http://www.smxs.gov.cn/viewtexti.asp?id=275079&npage=6
|
|
||||||
http://www.juancheng.gov.cn/wsbs-view.asp?id=9285
|
|
||||||
http://rc.sz.zj.cn/company.asp?id=4291
|
|
||||||
http://www.law-lib.com/fxj/fxj.asp?id=940
|
|
||||||
http://www.kfws.gov.cn/Article_read.asp?id=2289
|
|
||||||
http://www.zjghtcm.com/new_show.asp?id=1178
|
|
||||||
http://www.medsci.cn/sci/journal.asp?id=0bc61099
|
|
||||||
http://www.dylaw.gov.cn/zhongc/web60/classshow.asp?id=51848&classid=15
|
|
||||||
http://club.kdnet.net/dispbbs.asp?id=11095423&boardid=1
|
|
||||||
http://people.rednet.cn/PeopleShow.asp?ID=2410432
|
|
||||||
http://www.dhzsxx.com/ShowNews.asp?id=1591
|
|
||||||
http://www.chinawutong.com/co/huoyuan_01/index.asp?id=213633
|
|
||||||
http://news.chinaxinge.com/shownews.asp?id=53866&sjm=49600b363e048e05
|
|
||||||
http://www.gxxgty.com/news_show.asp?id=1583
|
|
||||||
http://szb.keq0475.com/Qnews.asp?ID=49506
|
|
||||||
http://www.cyfy.cn/kssz.asp?id=42
|
|
||||||
http://www.szkweekly.com/List.asp?ID=54284
|
|
||||||
Binary file not shown.
@@ -1,11 +1,16 @@
|
|||||||
#coding: utf-8
|
#coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import urllib2
|
import urllib2
|
||||||
import string
|
import string
|
||||||
import urllib
|
import urllib
|
||||||
import re
|
import re
|
||||||
import random
|
import random
|
||||||
|
import logging
|
||||||
|
|
||||||
user_agents = ['Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20130406 Firefox/23.0', \
|
__all__ = ["geturl"]
|
||||||
|
|
||||||
|
USER_AGENTS = ['Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20130406 Firefox/23.0', \
|
||||||
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:18.0) Gecko/20100101 Firefox/18.0', \
|
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:18.0) Gecko/20100101 Firefox/18.0', \
|
||||||
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533+ \
|
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533+ \
|
||||||
(KHTML, like Gecko) Element Browser 5.0', \
|
(KHTML, like Gecko) Element Browser 5.0', \
|
||||||
@@ -18,26 +23,31 @@ user_agents = ['Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20130406 Fire
|
|||||||
Chrome/28.0.1468.0 Safari/537.36', \
|
Chrome/28.0.1468.0 Safari/537.36', \
|
||||||
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/5.0; TheWorld)']
|
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/5.0; TheWorld)']
|
||||||
|
|
||||||
def baidu_search(keyword,pn):
|
logger = logging.getLogger('app.baidu')
|
||||||
p= {'wd': keyword}
|
|
||||||
res=urllib2.urlopen(("http://www.baidu.com/s?"+urllib.urlencode(p)+"&pn={0}&cl=3&rn=10").format(pn)) #rn为每页的显示数目 pn表示当前显示的是第pn条搜索结果
|
def baidu_search(keyword, pn):
|
||||||
html=res.read()
|
p = {'wd': keyword}
|
||||||
|
res = urllib2.urlopen(("http://www.baidu.com/s?"+urllib.urlencode(p)+"&pn={0}&cl=3&rn=10").format(pn)) #rn为每页的显示数目 pn表示当前显示的是第pn条搜索结果
|
||||||
|
html = res.read()
|
||||||
return html
|
return html
|
||||||
def getList(regex,text): #将获取的url去重并存入list
|
|
||||||
|
def getList(regex, text): #将获取的url去重并存入list
|
||||||
arr = []
|
arr = []
|
||||||
res = re.findall(regex, text)
|
res = re.findall(regex, text)
|
||||||
if res:
|
if res:
|
||||||
for r in res:
|
for r in res:
|
||||||
arr.append(r)
|
arr.append(r)
|
||||||
return arr
|
return arr
|
||||||
def getMatch(regex,text): #匹配函数
|
|
||||||
|
def getMatch(regex, text): #匹配函数
|
||||||
res = re.findall(regex, text)
|
res = re.findall(regex, text)
|
||||||
if res:
|
if res:
|
||||||
return res[0]
|
return res[0]
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def is_get(url): #是否是sqlmap可识别的get型链接
|
def is_get(url): #是否是sqlmap可识别的get型链接
|
||||||
regex=r'(\S*?)\?.*=.*'
|
regex = r'(\S*?)\?.*=.*'
|
||||||
res=re.match(regex,url)
|
res = re.match(regex,url)
|
||||||
if res:
|
if res:
|
||||||
#print res.group(1)
|
#print res.group(1)
|
||||||
return res.group(1)
|
return res.group(1)
|
||||||
@@ -46,12 +56,12 @@ def is_get(url): #是否是sqlmap可识别的get型链接
|
|||||||
# def Deduplication():
|
# def Deduplication():
|
||||||
# regex=r'\S'
|
# regex=r'\S'
|
||||||
|
|
||||||
def geturl(keyword,pages): #获取url
|
def geturl(keyword, pages): #获取url
|
||||||
targets = []
|
targets = []
|
||||||
hosts=[]
|
hosts = []
|
||||||
for page in range(0,int(pages)):
|
for page in range(0,int(pages)):
|
||||||
pn=(page+1)*10
|
pn = (page+1)*10
|
||||||
html = baidu_search(keyword,pn)
|
html = baidu_search(keyword, pn)
|
||||||
content = unicode(html, 'utf-8','ignore')
|
content = unicode(html, 'utf-8','ignore')
|
||||||
arrList = getList(u"<div class=\"f13\">(.*)</div>", content) #分割页面块
|
arrList = getList(u"<div class=\"f13\">(.*)</div>", content) #分割页面块
|
||||||
#print arrList
|
#print arrList
|
||||||
@@ -61,28 +71,29 @@ def geturl(keyword,pages): #获取url
|
|||||||
for item in arrList:
|
for item in arrList:
|
||||||
regex = u"data-tools='\{\"title\":\"(.*)\",\"url\":\"(.*)\"\}'"
|
regex = u"data-tools='\{\"title\":\"(.*)\",\"url\":\"(.*)\"\}'"
|
||||||
link = getMatch(regex,item)
|
link = getMatch(regex,item)
|
||||||
url=link[1] #获取百度改写url
|
url = link[1] #获取百度改写url
|
||||||
try:
|
try:
|
||||||
domain=urllib2.Request(url)
|
domain = urllib2.Request(url)
|
||||||
r=random.randint(0,11)
|
r = random.randint(0, len(USER_AGENTS))
|
||||||
domain.add_header('User-agent', user_agents[r])
|
domain.add_header('User-agent', USER_AGENTS[r])
|
||||||
domain.add_header('connection','keep-alive')
|
domain.add_header('connection', 'keep-alive')
|
||||||
response=urllib2.urlopen(domain)
|
response = urllib2.urlopen(domain)
|
||||||
uri=response.geturl() #获取真实url
|
uri = response.geturl() #获取真实url
|
||||||
urs=is_get(uri) #是否是传统的get型
|
urs = is_get(uri) #是否是传统的get型
|
||||||
if (uri in targets) or (urs in hosts) :
|
if (uri in targets) or (urs in hosts) :
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
targets.append(uri)
|
targets.append(uri)
|
||||||
hosts.append(urs)
|
hosts.append(urs)
|
||||||
f1=open('data/targets.txt','a') #存放url链接
|
f1 = open('data/targets.txt','a') #存放url链接
|
||||||
f1.write(uri+'\n')
|
f1.write(uri+'\n')
|
||||||
f1.close()
|
f1.close()
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
print "urls have been grabed already!!!"
|
logger.info("urls have been grabed already!!!")
|
||||||
return targets
|
return targets
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
BIN
search/baidu.pyc
BIN
search/baidu.pyc
Binary file not shown.
Reference in New Issue
Block a user