update ipquery
This commit is contained in:
6
.vscode/settings.json
vendored
Normal file
6
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"python.linting.flake8Enabled": true,
|
||||||
|
"python.linting.pylintEnabled": false,
|
||||||
|
"python.linting.enabled": true,
|
||||||
|
"python.pythonPath": "C:\\Python37\\python.exe"
|
||||||
|
}
|
||||||
@@ -2,13 +2,16 @@
|
|||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
import socket
|
import socket
|
||||||
import requests
|
import requests
|
||||||
|
import urllib3
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
|
||||||
def attack(URL):
|
def attack(URL):
|
||||||
|
# 百度查询模块
|
||||||
url = URL
|
url = URL
|
||||||
URL = urlparse(URL).netloc
|
URL = urlparse(URL).netloc
|
||||||
if URL == '':
|
if URL == '':
|
||||||
URL = url
|
URL = url
|
||||||
#print('IP查询目标:' + URL)
|
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
@@ -16,18 +19,79 @@ def attack(URL):
|
|||||||
payload = {'query': ip, 'resource_id': '6006'}
|
payload = {'query': ip, 'resource_id': '6006'}
|
||||||
r = requests.get("https://sp0.baidu.com/8aQDcjqpAAV3otqbppnN2DJv/api.php", params=payload)
|
r = requests.get("https://sp0.baidu.com/8aQDcjqpAAV3otqbppnN2DJv/api.php", params=payload)
|
||||||
|
|
||||||
print(r.json().get('data')[0].get('location'))
|
print('百度结果:',r.json().get('data')[0].get('location'))
|
||||||
except:
|
except Exception:
|
||||||
try:
|
try:
|
||||||
ip = socket.gethostbyname(URL)
|
ip = socket.gethostbyname(URL)
|
||||||
print('IP查询目标:' + ip)
|
print('IP查询目标:' + ip)
|
||||||
payload = {'query': ip, 'resource_id': '6006'}
|
payload = {'query': ip, 'resource_id': '6006'}
|
||||||
r = requests.get("https://sp0.baidu.com/8aQDcjqpAAV3otqbppnN2DJv/api.php", params=payload)
|
r = requests.get("https://sp0.baidu.com/8aQDcjqpAAV3otqbppnN2DJv/api.php", params=payload)
|
||||||
print(r.json().get('data')[0].get('location'))
|
print('百度结果:',r.json().get('data')[0].get('location'))
|
||||||
except:
|
except Exception:
|
||||||
print('获取IP地址错误:'+URL)
|
print('获取IP地址错误:'+URL)
|
||||||
|
|
||||||
|
# IPIP查询模块
|
||||||
|
print('IPIP结果:')
|
||||||
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
url = 'https://www.ipip.net/ip.html'
|
||||||
|
headers = {"User-Agent": 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.9 Safari/537.36'}
|
||||||
|
data = {'ip': URL}
|
||||||
|
re = requests.post(url,headers=headers,data=data,verify=False,timeout=5)
|
||||||
|
soup=BeautifulSoup(re.content,"lxml")
|
||||||
|
|
||||||
|
def information_one(table):
|
||||||
|
length = len(table.find_all('td'))
|
||||||
|
data2 = table.find_all('td')[2].get_text().strip()
|
||||||
|
information2 = table.find_all('span')[3].get_text().strip()
|
||||||
|
print(data2, ':', information2)
|
||||||
|
m = 5
|
||||||
|
x = 4
|
||||||
|
while x < length:
|
||||||
|
data = table.find_all('td')[x].get_text().strip()
|
||||||
|
information = table.find_all('span')[m].get_text().strip()
|
||||||
|
print(data, ':', information)
|
||||||
|
x = x + 2
|
||||||
|
m = m + 1
|
||||||
|
|
||||||
|
def information_two(table):
|
||||||
|
data = table.find_all('td')[0].get_text().strip()
|
||||||
|
information = table.find_all('span')[1].get_text().strip()
|
||||||
|
if '威胁情报' in data:
|
||||||
|
print(data, ':', information)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def information_three(table):
|
||||||
|
data1 = table.find_all('th')[0].get_text().strip()
|
||||||
|
information1 = table.find_all('td')[0].get_text().strip()
|
||||||
|
data2 = table.find_all('th')[1].get_text().strip()
|
||||||
|
information2 = table.find_all('td')[1].get_text().strip()
|
||||||
|
data3 = table.find_all('th')[2].get_text().strip()
|
||||||
|
information3 = table.find_all('td')[2].get_text().strip()
|
||||||
|
if '纯真IP库数据' in data2:
|
||||||
|
print(data1, ':', information1)
|
||||||
|
print(data2, ':', information2)
|
||||||
|
print(data3, ':', information3)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
for x in range(0, 7):
|
||||||
|
try:
|
||||||
|
table = soup.find_all('div')[6].find_all('table')[x]
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
information_one(table)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
information_two(table)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
information_three(table)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
attack()
|
attack()
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user