use urls from a file as seeds (Resolves #135)

This commit is contained in:
Somdev Sangwan
2018-11-17 23:17:50 +05:30
committed by GitHub
parent b9bf006e2c
commit 0a053d351d

View File

@@ -50,6 +50,8 @@ parser.add_argument('--params', help='find params',
dest='find', action='store_true')
parser.add_argument('--crawl', help='crawl',
dest='recursive', action='store_true')
parser.add_argument(
'--seeds', help='load crawling seeds from a file', dest='args_seeds')
parser.add_argument(
'-f', '--file', help='load payloads from a file', dest='args_file')
parser.add_argument('-l', '--level', help='level of crawling',
@@ -87,6 +89,7 @@ proxy = args.proxy
find = args.find
recursive = args.recursive
args_file = args.args_file
args_seeds = args.args_seeds
level = args.level
add_headers = args.add_headers
threadCount = args.threadCount
@@ -107,6 +110,14 @@ if args_file:
'\n').encode('utf-8').decode('utf-8'))
payloadList = list(filter(None, payloadList))
seedList = []
if args_seeds:
with open(args_seeds, 'r') as f:
for line in f:
seedList.append(line.strip(
'\n').encode('utf-8').decode('utf-8'))
seedList = list(filter(None, seedList))
encoding = base64 if encode and encode == 'base64' else False
if not proxy:
@@ -116,18 +127,21 @@ if update: # if the user has supplied --update argument
updater()
quit() # quitting because files have been changed
if not target: # if the user hasn't supplied a url
if not target and not args_seeds: # if the user hasn't supplied a url
print('\n' + parser.format_help().lower())
quit()
if fuzz:
singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout)
elif not recursive:
elif not recursive and not args_seeds:
if args_file:
bruteforcer(target, paramData, payloadList, verbose, encoding, headers, delay, timeout)
else:
scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip)
else:
if target:
seedList.append(target)
for target in seedList:
print('%s Crawling the target' % run)
scheme = urlparse(target).scheme
verboseOutput(scheme, 'scheme', verbose)