2.1.0 build
This commit is contained in:
@@ -1,5 +1,28 @@
|
||||
import re
|
||||
from core.utils import reader, parse_request
|
||||
|
||||
def reader(path, mode='string'):
|
||||
"""
|
||||
reads a file
|
||||
returns a string/array containing the content of the file
|
||||
"""
|
||||
with open(path, 'r', encoding='utf-8') as file:
|
||||
if mode == 'lines':
|
||||
return list(filter(None, [line.rstrip('\n') for line in file]))
|
||||
else:
|
||||
return ''.join([line for line in file])
|
||||
|
||||
def parse_request(string):
|
||||
"""
|
||||
parses http request
|
||||
returns dict
|
||||
"""
|
||||
result = {}
|
||||
match = re.search(r'(?:([a-zA-Z0-9]+) ([^ ]+) [^ ]+\n)?([\s\S]+\n)\n?([\s\S]+)?', string)
|
||||
result['method'] = match.group(1)
|
||||
result['path'] = match.group(2)
|
||||
result['headers'] = parse_headers(match.group(3))
|
||||
result['data'] = match.group(4)
|
||||
return result
|
||||
|
||||
burp_regex = re.compile(r'''(?m)^ <url><!\[CDATA\[(.+?)\]\]></url>
|
||||
<host ip="[^"]*">[^<]+</host>
|
||||
@@ -15,44 +38,59 @@ burp_regex = re.compile(r'''(?m)^ <url><!\[CDATA\[(.+?)\]\]></url>
|
||||
|
||||
|
||||
def burp_import(path):
|
||||
requests = []
|
||||
content = reader(path)
|
||||
matches = re.finditer(burp_regex, content)
|
||||
for match in matches:
|
||||
request = parse_request(match.group(4))
|
||||
headers = request['headers']
|
||||
if match.group(7) in ('HTML', 'JSON'):
|
||||
requests.append({
|
||||
'url': match.group(1),
|
||||
'method': match.group(2),
|
||||
'extension': match.group(3),
|
||||
'headers': headers,
|
||||
'include': request['data'],
|
||||
'code': match.group(5),
|
||||
'length': match.group(6),
|
||||
'mime': match.group(7)
|
||||
})
|
||||
return requests
|
||||
"""
|
||||
imports targets from burp suite
|
||||
returns list (of request objects)
|
||||
"""
|
||||
requests = []
|
||||
content = reader(path)
|
||||
matches = re.finditer(burp_regex, content)
|
||||
for match in matches:
|
||||
request = parse_request(match.group(4))
|
||||
headers = request['headers']
|
||||
if match.group(7) in ('HTML', 'JSON'):
|
||||
requests.append({
|
||||
'url': match.group(1),
|
||||
'method': match.group(2),
|
||||
'extension': match.group(3),
|
||||
'headers': headers,
|
||||
'include': request['data'],
|
||||
'code': match.group(5),
|
||||
'length': match.group(6),
|
||||
'mime': match.group(7)
|
||||
})
|
||||
return requests
|
||||
|
||||
|
||||
def urls_import(path, method, headers, include):
|
||||
requests = []
|
||||
urls = reader(path, mode='lines')
|
||||
for url in urls:
|
||||
requests.append({
|
||||
'url': url,
|
||||
'method': method,
|
||||
'headers': headers,
|
||||
'data': include
|
||||
})
|
||||
return requests
|
||||
"""
|
||||
imports urls from a newline delimited text file
|
||||
returns list (of request objects)
|
||||
"""
|
||||
requests = []
|
||||
urls = reader(path, mode='lines')
|
||||
for url in urls:
|
||||
requests.append({
|
||||
'url': url,
|
||||
'method': method,
|
||||
'headers': headers,
|
||||
'data': include
|
||||
})
|
||||
return requests
|
||||
|
||||
|
||||
def request_import(path):
|
||||
return parse_request(reader(path))
|
||||
"""
|
||||
imports request from a raw request file
|
||||
returns dict
|
||||
"""
|
||||
return parse_request(reader(path))
|
||||
|
||||
|
||||
def importer(path, method, headers, include):
|
||||
"""
|
||||
main importer function that calls other import functions
|
||||
"""
|
||||
with open(path, 'r', encoding='utf-8') as file:
|
||||
for line in file:
|
||||
if line.startswith('<?xml'):
|
||||
|
||||
Reference in New Issue
Block a user