Skip to content

Commit

Permalink
More Modular Structure, Fixes #13
Browse files Browse the repository at this point in the history
  • Loading branch information
s0md3v authored Nov 17, 2018
1 parent 5fe6577 commit b9bf006
Show file tree
Hide file tree
Showing 5 changed files with 272 additions and 217 deletions.
34 changes: 34 additions & 0 deletions modes/bruteforcer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import copy
from urllib.parse import urlparse, unquote

from core.colors import run, good, bad, green, end
from core.requester import requester
from core.utils import getUrl, getParams, verboseOutput

def bruteforcer(target, paramData, payloadList, verbose, encoding, headers, delay, timeout):
GET, POST = (False, True) if paramData else (True, False)
host = urlparse(target).netloc # Extracts host out of the url
verboseOutput(host, 'host', verbose)
url = getUrl(target, GET)
verboseOutput(url, 'url', verbose)
params = getParams(target, paramData, GET)
if not params:
print('%s No parameters to test.' % bad)
quit()
verboseOutput(params, 'params', verbose)
for paramName in params.keys():
progress = 1
paramsCopy = copy.deepcopy(params)
for payload in payloadList:
print ('%s Bruteforcing %s[%s%s%s]%s: %i/%i' % (run, green, end, paramName, green, end, progress, len(payloadList)), end='\r')
if encoding:
payload = encoding(unquote(payload))
paramsCopy[paramName] = payload
response = requester(url, paramsCopy, headers,
GET, delay, timeout).text
if encoding:
payload = encoding(payload)
if payload in response:
print('%s %s' % (good, payload))
progress += 1
print ()
66 changes: 66 additions & 0 deletions modes/crawl.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import copy
import re

from core.colors import red, good, green, end
from core.config import xsschecker
from core.dom import dom
from core.filterChecker import filterChecker
from core.generator import generator
from core.htmlParser import htmlParser
from core.requester import requester

def crawl(scheme, host, main_url, form, domURL, verbose, blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding):
if domURL and not skipDOM:
response = requester(domURL, {}, headers, True, delay, timeout).text
highlighted = dom(response)
if highlighted:
print('%s Potentially vulnerable objects found at %s' %
(good, domURL))
print(red + ('-' * 60) + end)
for line in highlighted:
print(line)
print(red + ('-' * 60) + end)
if form:
for each in form.values():
url = each['action']
if url:
if url.startswith(main_url):
pass
elif url.startswith('//') and url[2:].startswith(host):
url = scheme + '://' + url[2:]
elif url.startswith('/'):
url = scheme + '://' + host + url
elif re.match(r'\w', url[0]):
url = scheme + '://' + host + '/' + url
method = each['method']
GET = True if method == 'get' else False
inputs = each['inputs']
paramData = {}
for one in inputs:
paramData[one['name']] = one['value']
for paramName in paramData.keys():
paramsCopy = copy.deepcopy(paramData)
paramsCopy[paramName] = xsschecker
response = requester(
url, paramsCopy, headers, GET, delay, timeout)
parsedResponse = htmlParser(response, encoding)
occurences = parsedResponse[0]
positions = parsedResponse[1]
efficiencies = filterChecker(
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
vectors = generator(occurences, response.text)
if vectors:
for confidence, vects in vectors.items():
try:
payload = list(vects)[0]
print('%s Vulnerable webpage: %s%s%s' %
(good, green, url, end))
print('%s Vector for %s%s%s: %s' %
(good, green, paramName, end, payload))
break
except IndexError:
pass
if blindXSS and blindPayload:
paramsCopy[paramName] = blindPayload
requester(url, paramsCopy, headers,
GET, delay, timeout)
114 changes: 114 additions & 0 deletions modes/scan.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
import copy
from urllib.parse import urlparse, quote, unquote

from core.arjun import arjun
from core.checker import checker
from core.colors import good, bad, end, info, green, run, red, que
from core.config import xsschecker, minEfficiency
from core.dom import dom
from core.filterChecker import filterChecker
from core.generator import generator
from core.htmlParser import htmlParser
from core.requester import requester
from core.utils import getUrl, getParams, verboseOutput
from core.wafDetector import wafDetector

def scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip):
GET, POST = (False, True) if paramData else (True, False)
# If the user hasn't supplied the root url with http(s), we will handle it
if not target.startswith('http'):
try:
response = requester('https://' + target, {},
headers, GET, delay, timeout)
target = 'https://' + target
except:
target = 'http://' + target
response = requester(target, {}, headers, GET, delay, timeout).text
if not skipDOM:
print('%s Checking for DOM vulnerabilities' % run)
highlighted = dom(response)
if highlighted:
print('%s Potentially vulnerable objects found' % good)
print(red + ('-' * 60) + end)
for line in highlighted:
print(line)
print(red + ('-' * 60) + end)
host = urlparse(target).netloc # Extracts host out of the url
verboseOutput(host, 'host', verbose)
url = getUrl(target, GET)
verboseOutput(url, 'url', verbose)
params = getParams(target, paramData, GET)
verboseOutput(params, 'params', verbose)
if find:
params = arjun(url, GET, headers, delay, timeout)
if not params:
print('%s No parameters to test.' % bad)
quit()
WAF = wafDetector(
url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout)
if WAF:
print('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
else:
print('%s WAF Status: %sOffline%s' % (good, green, end))

for paramName in params.keys():
paramsCopy = copy.deepcopy(params)
print('%s Testing parameter: %s' % (info, paramName))
if encoding:
paramsCopy[paramName] = encoding(xsschecker)
else:
paramsCopy[paramName] = xsschecker
response = requester(url, paramsCopy, headers, GET, delay, timeout)
parsedResponse = htmlParser(response, encoding)
occurences = parsedResponse[0]
verboseOutput(occurences, 'occurences', verbose)
positions = parsedResponse[1]
verboseOutput(positions, 'positions', verbose)
if not occurences:
print('%s No reflection found' % bad)
continue
else:
print('%s Reflections found: %s' % (info, len(occurences)))
print('%s Analysing reflections' % run)
efficiencies = filterChecker(
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
verboseOutput(efficiencies, 'efficiencies', verbose)
print('%s Generating payloads' % run)
vectors = generator(occurences, response.text)
verboseOutput(vectors, 'vectors', verbose)
total = 0
for v in vectors.values():
total += len(v)
if total == 0:
print('%s No vectors were crafted' % bad)
continue
print('%s Payloads generated: %i' % (info, total))
progress = 0
for confidence, vects in vectors.items():
for vect in vects:
progress += 1
if not GET:
vect = unquote(vect)
efficiencies = checker(
url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding)
if not GET:
vect = quote(vect)
if not efficiencies:
for i in range(len(occurences)):
efficiencies.append(0)
bestEfficiency = max(efficiencies)
if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95):
print(('%s-%s' % (red, end)) * 60)
print('%s Payload: %s' % (good, vect))
print('%s Efficiency: %i' % (info, bestEfficiency))
print('%s Confidence: %i' % (info, confidence))
if not skip:
choice = input(
'%s Would you like to continue scanning? [y/N] ' % que).lower()
if choice != 'y':
quit()
elif bestEfficiency > minEfficiency:
print(('%s-%s' % (red, end)) * 60)
print('%s Payload: %s' % (good, vect))
print('%s Efficiency: %i' % (info, bestEfficiency))
print('%s Confidence: %i' % (info, confidence))
42 changes: 42 additions & 0 deletions modes/singleFuzz.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import copy
from urllib.parse import urlparse

from core.colors import bad, green, end, good, info
from core.config import xsschecker
from core.fuzzer import fuzzer
from core.requester import requester
from core.utils import getUrl, getParams, verboseOutput
from core.wafDetector import wafDetector

def singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout):
GET, POST = (False, True) if paramData else (True, False)
# If the user hasn't supplied the root url with http(s), we will handle it
if not target.startswith('http'):
try:
response = requester('https://' + target, {},
headers, GET, delay, timeout)
target = 'https://' + target
except:
target = 'http://' + target
host = urlparse(target).netloc # Extracts host out of the url
verboseOutput(host, 'host', verbose)
url = getUrl(target, GET)
verboseOutput(url, 'url', verbose)
params = getParams(target, paramData, GET)
verboseOutput(params, 'params', verbose)
if not params:
print('%s No parameters to test.' % bad)
quit()
WAF = wafDetector(
url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout)
if WAF:
print('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
else:
print('%s WAF Status: %sOffline%s' % (good, green, end))

for paramName in params.keys():
print('%s Fuzzing parameter: %s' % (info, paramName))
paramsCopy = copy.deepcopy(params)
paramsCopy[paramName] = xsschecker
fuzzer(url, paramsCopy, headers, GET,
delay, timeout, WAF, encoding)
Loading

0 comments on commit b9bf006

Please sign in to comment.