From 0a053d351ded91acc11c01ae22ffa2f1125ac8d1 Mon Sep 17 00:00:00 2001 From: Somdev Sangwan Date: Sat, 17 Nov 2018 23:17:50 +0530 Subject: [PATCH] use urls from a file as seeds (Resolves #135) --- xsstrike.py | 64 ++++++++++++++++++++++++++++++++--------------------- 1 file changed, 39 insertions(+), 25 deletions(-) diff --git a/xsstrike.py b/xsstrike.py index d6eae68c..10046d5c 100644 --- a/xsstrike.py +++ b/xsstrike.py @@ -50,6 +50,8 @@ dest='find', action='store_true') parser.add_argument('--crawl', help='crawl', dest='recursive', action='store_true') +parser.add_argument( + '--seeds', help='load crawling seeds from a file', dest='args_seeds') parser.add_argument( '-f', '--file', help='load payloads from a file', dest='args_file') parser.add_argument('-l', '--level', help='level of crawling', @@ -87,6 +89,7 @@ find = args.find recursive = args.recursive args_file = args.args_file +args_seeds = args.args_seeds level = args.level add_headers = args.add_headers threadCount = args.threadCount @@ -107,6 +110,14 @@ '\n').encode('utf-8').decode('utf-8')) payloadList = list(filter(None, payloadList)) +seedList = [] +if args_seeds: + with open(args_seeds, 'r') as f: + for line in f: + seedList.append(line.strip( + '\n').encode('utf-8').decode('utf-8')) + seedList = list(filter(None, seedList)) + encoding = base64 if encode and encode == 'base64' else False if not proxy: @@ -116,38 +127,41 @@ updater() quit() # quitting because files have been changed -if not target: # if the user hasn't supplied a url +if not target and not args_seeds: # if the user hasn't supplied a url print('\n' + parser.format_help().lower()) quit() if fuzz: singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout) -elif not recursive: +elif not recursive and not args_seeds: if args_file: bruteforcer(target, paramData, payloadList, verbose, encoding, headers, delay, timeout) else: scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip) else: - print('%s Crawling the target' % run) - scheme = urlparse(target).scheme - verboseOutput(scheme, 'scheme', verbose) - host = urlparse(target).netloc - main_url = scheme + '://' + host - crawlingResult = photon(target, headers, level, - threadCount, delay, timeout) - forms = crawlingResult[0] - domURLs = list(crawlingResult[1]) - difference = abs(len(domURLs) - len(forms)) - if len(domURLs) > len(forms): - for i in range(difference): - forms.append(0) - elif len(forms) > len(domURLs): - for i in range(difference): - domURLs.append(0) - threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) - futures = (threadpool.submit(crawl, scheme, host, main_url, form, domURL, verbose, - blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding) for form, domURL in zip(forms, domURLs)) - for i, _ in enumerate(concurrent.futures.as_completed(futures)): - if i + 1 == len(forms) or (i + 1) % threadCount == 0: - print('%s Progress: %i/%i' % (info, i + 1, len(forms)), end='\r') - print() + if target: + seedList.append(target) + for target in seedList: + print('%s Crawling the target' % run) + scheme = urlparse(target).scheme + verboseOutput(scheme, 'scheme', verbose) + host = urlparse(target).netloc + main_url = scheme + '://' + host + crawlingResult = photon(target, headers, level, + threadCount, delay, timeout) + forms = crawlingResult[0] + domURLs = list(crawlingResult[1]) + difference = abs(len(domURLs) - len(forms)) + if len(domURLs) > len(forms): + for i in range(difference): + forms.append(0) + elif len(forms) > len(domURLs): + for i in range(difference): + domURLs.append(0) + threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) + futures = (threadpool.submit(crawl, scheme, host, main_url, form, domURL, verbose, + blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding) for form, domURL in zip(forms, domURLs)) + for i, _ in enumerate(concurrent.futures.as_completed(futures)): + if i + 1 == len(forms) or (i + 1) % threadCount == 0: + print('%s Progress: %i/%i' % (info, i + 1, len(forms)), end='\r') + print()