Skip to content

Commit

Permalink
use urls from a file as seeds (Resolves #135)
Browse files Browse the repository at this point in the history
  • Loading branch information
s0md3v authored Nov 17, 2018
1 parent b9bf006 commit 0a053d3
Showing 1 changed file with 39 additions and 25 deletions.
64 changes: 39 additions & 25 deletions xsstrike.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@
dest='find', action='store_true')
parser.add_argument('--crawl', help='crawl',
dest='recursive', action='store_true')
parser.add_argument(
'--seeds', help='load crawling seeds from a file', dest='args_seeds')
parser.add_argument(
'-f', '--file', help='load payloads from a file', dest='args_file')
parser.add_argument('-l', '--level', help='level of crawling',
Expand Down Expand Up @@ -87,6 +89,7 @@
find = args.find
recursive = args.recursive
args_file = args.args_file
args_seeds = args.args_seeds
level = args.level
add_headers = args.add_headers
threadCount = args.threadCount
Expand All @@ -107,6 +110,14 @@
'\n').encode('utf-8').decode('utf-8'))
payloadList = list(filter(None, payloadList))

seedList = []
if args_seeds:
with open(args_seeds, 'r') as f:
for line in f:
seedList.append(line.strip(
'\n').encode('utf-8').decode('utf-8'))
seedList = list(filter(None, seedList))

encoding = base64 if encode and encode == 'base64' else False

if not proxy:
Expand All @@ -116,38 +127,41 @@
updater()
quit() # quitting because files have been changed

if not target: # if the user hasn't supplied a url
if not target and not args_seeds: # if the user hasn't supplied a url
print('\n' + parser.format_help().lower())
quit()

if fuzz:
singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout)
elif not recursive:
elif not recursive and not args_seeds:
if args_file:
bruteforcer(target, paramData, payloadList, verbose, encoding, headers, delay, timeout)
else:
scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip)
else:
print('%s Crawling the target' % run)
scheme = urlparse(target).scheme
verboseOutput(scheme, 'scheme', verbose)
host = urlparse(target).netloc
main_url = scheme + '://' + host
crawlingResult = photon(target, headers, level,
threadCount, delay, timeout)
forms = crawlingResult[0]
domURLs = list(crawlingResult[1])
difference = abs(len(domURLs) - len(forms))
if len(domURLs) > len(forms):
for i in range(difference):
forms.append(0)
elif len(forms) > len(domURLs):
for i in range(difference):
domURLs.append(0)
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
futures = (threadpool.submit(crawl, scheme, host, main_url, form, domURL, verbose,
blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding) for form, domURL in zip(forms, domURLs))
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
if i + 1 == len(forms) or (i + 1) % threadCount == 0:
print('%s Progress: %i/%i' % (info, i + 1, len(forms)), end='\r')
print()
if target:
seedList.append(target)
for target in seedList:
print('%s Crawling the target' % run)
scheme = urlparse(target).scheme
verboseOutput(scheme, 'scheme', verbose)
host = urlparse(target).netloc
main_url = scheme + '://' + host
crawlingResult = photon(target, headers, level,
threadCount, delay, timeout)
forms = crawlingResult[0]
domURLs = list(crawlingResult[1])
difference = abs(len(domURLs) - len(forms))
if len(domURLs) > len(forms):
for i in range(difference):
forms.append(0)
elif len(forms) > len(domURLs):
for i in range(difference):
domURLs.append(0)
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
futures = (threadpool.submit(crawl, scheme, host, main_url, form, domURL, verbose,
blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding) for form, domURL in zip(forms, domURLs))
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
if i + 1 == len(forms) or (i + 1) % threadCount == 0:
print('%s Progress: %i/%i' % (info, i + 1, len(forms)), end='\r')
print()

0 comments on commit 0a053d3

Please sign in to comment.