diff --git a/README.md b/README.md index ef86560..89508eb 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ creating, retrieving, updating, or deleting sites, scan commands, scans, etc. * Engine: If you plan on compiling masscan on an engine, you'll need at least 1024 MB of memory. It fails to build with only 512 MB. If you do not want to build masscan, set `install_masscan_on_engine` to `False` in -`ansible-playbooks/group_vars/all` +`ansible-playbooks/group_vars/all.yml` * Console: 512 MB of memory was the smallest amount successfully tested, however, if you plan on processing large scan files (using the scripts found in `console/scan_results`: `masscan_json_to_csv.py`, `nmap_to_csv.py` or @@ -120,7 +120,7 @@ The recommendation is to deploy the console first. #### Update Console Ansible Variables -Edit any variables in `ansible-playbooks/group_vars/all` before running playbook. Note the time zone variables: +Edit any variables in `ansible-playbooks/group_vars/all.yml` before running playbook. Note the time zone variables: * `timezone_server` - Set this to be the timezone you want the server to be in, usually UTC. * `timezone_django` - Set this to be your local timezone. It makes dealing with dates, times, and scheduling easier. @@ -192,7 +192,7 @@ python3 manage.py changepassword admin Edit any variables in these files before running playbook: -* `ansible-playbooks/group_vars/all` +* `ansible-playbooks/group_vars/all.yml` * `ansible-playbooks/roles/engine/vars/main.yml` #### Ensure proper user permissions diff --git a/ansible-playbooks/group_vars/all b/ansible-playbooks/group_vars/all.yml similarity index 100% rename from ansible-playbooks/group_vars/all rename to ansible-playbooks/group_vars/all.yml diff --git a/ansible-playbooks/roles/console/vars/main.yml b/ansible-playbooks/roles/console/vars/main.yml index 4b464a3..e0f719b 100644 --- a/ansible-playbooks/roles/console/vars/main.yml +++ b/ansible-playbooks/roles/console/vars/main.yml @@ -1,6 +1,6 @@ --- enable_ufw_firewall: true -reboot_box: true +reboot_box: false install_packages: - autossh @@ -11,8 +11,8 @@ install_packages: - nmap - postfix - postgresql-{{ postgresql_version }} - - python-dev - python3-pip + - python-dev - python-virtualenv - python3-psycopg2 # python3-psycopg2 required for Ansible. - redis diff --git a/ansible-playbooks/roles/engine/vars/main.yml b/ansible-playbooks/roles/engine/vars/main.yml index a81ec2d..9af3441 100644 --- a/ansible-playbooks/roles/engine/vars/main.yml +++ b/ansible-playbooks/roles/engine/vars/main.yml @@ -1,6 +1,6 @@ --- enable_ufw_firewall: true -reboot_box: true +reboot_box: false install_packages: - autossh diff --git a/console/django_scantron/__init__.py b/console/django_scantron/__init__.py index cf38ef0..a29c76c 100644 --- a/console/django_scantron/__init__.py +++ b/console/django_scantron/__init__.py @@ -1 +1 @@ -__version__ = "1.45" +__version__ = "1.46.0" diff --git a/console/django_scantron/api/serializers.py b/console/django_scantron/api/serializers.py index 8dfed9f..2cf8d3f 100644 --- a/console/django_scantron/api/serializers.py +++ b/console/django_scantron/api/serializers.py @@ -48,7 +48,7 @@ def validate(self, attrs): globally_excluded_targets = attrs["globally_excluded_targets"] target_extractor = extract_targets.TargetExtractor( - targets_string=globally_excluded_targets, private_ips_allowed=True, sort_targets=True + targets_string=globally_excluded_targets, sort_targets=True ) targets_dict = target_extractor.targets_dict @@ -56,6 +56,8 @@ def validate(self, attrs): invalid_targets = ",".join(targets_dict["invalid_targets"]) raise serializers.ValidationError(f"Invalid globally excluded targets provided: {invalid_targets}") + attrs["globally_excluded_targets"] = targets_dict["as_nmap"] + return attrs class Meta: @@ -87,28 +89,28 @@ def validate(self, attrs): if "targets" in attrs: targets = attrs["targets"] - target_extractor = extract_targets.TargetExtractor( - targets_string=targets, private_ips_allowed=True, sort_targets=True - ) + target_extractor = extract_targets.TargetExtractor(targets_string=targets, sort_targets=True) targets_dict = target_extractor.targets_dict if targets_dict["invalid_targets"]: invalid_targets = ",".join(targets_dict["invalid_targets"]) raise serializers.ValidationError(f"Invalid targets provided: {invalid_targets}") + attrs["targets"] = targets_dict["as_nmap"] + # Excluded targets if "excluded_targets" in attrs: excluded_targets = attrs["excluded_targets"] - target_extractor = extract_targets.TargetExtractor( - targets_string=excluded_targets, private_ips_allowed=True, sort_targets=True - ) + target_extractor = extract_targets.TargetExtractor(targets_string=excluded_targets, sort_targets=True) targets_dict = target_extractor.targets_dict if targets_dict["invalid_targets"]: invalid_targets = ",".join(targets_dict["invalid_targets"]) raise serializers.ValidationError(f"Invalid excluded targets provided: {invalid_targets}") + attrs["excluded_targets"] = targets_dict["as_nmap"] + # Email scan alerts and email address. if ("email_scan_alerts" in attrs) and ("email_alert_addresses" in attrs): @@ -116,7 +118,7 @@ def validate(self, attrs): email_alert_addresses = attrs["email_alert_addresses"] if email_scan_alerts and not email_alert_addresses: - raise serializers.ValidationError(f"Provide an email address if enabling 'Email scan alerts'") + raise serializers.ValidationError("Provide an email address if enabling 'Email scan alerts'") # Check for valid email addresseses string. if "email_alert_addresses" in attrs: @@ -134,7 +136,7 @@ def validate(self, attrs): email_scan_diff_addresses = attrs["email_scan_diff_addresses"] if email_scan_diff and not email_scan_diff_addresses: - raise serializers.ValidationError(f"Provide an email address if enabling 'Email nmap scan diff'") + raise serializers.ValidationError("Provide an email address if enabling 'Email nmap scan diff'") # Check for valid email addresseses string. if "email_scan_diff_addresses" in attrs: diff --git a/console/django_scantron/models.py b/console/django_scantron/models.py index 46e010d..84a4b56 100644 --- a/console/django_scantron/models.py +++ b/console/django_scantron/models.py @@ -111,7 +111,7 @@ def clean(self): # Globally excluded targets. target_extractor = extract_targets.TargetExtractor( - targets_string=self.globally_excluded_targets, private_ips_allowed=True, sort_targets=True + targets_string=self.globally_excluded_targets, sort_targets=True ) targets_dict = target_extractor.targets_dict @@ -225,9 +225,7 @@ def clean(self): raise ValidationError("Select a single scan engine or scan engine pool.") # Targets - target_extractor = extract_targets.TargetExtractor( - targets_string=self.targets, private_ips_allowed=True, sort_targets=True - ) + target_extractor = extract_targets.TargetExtractor(targets_string=self.targets, sort_targets=True) targets_dict = target_extractor.targets_dict if targets_dict["invalid_targets"]: @@ -237,9 +235,7 @@ def clean(self): self.targets = targets_dict["as_nmap"] # Excluded targets - target_extractor = extract_targets.TargetExtractor( - targets_string=self.excluded_targets, private_ips_allowed=True, sort_targets=True - ) + target_extractor = extract_targets.TargetExtractor(targets_string=self.excluded_targets, sort_targets=True) targets_dict = target_extractor.targets_dict if targets_dict["invalid_targets"]: diff --git a/console/email_validation_utils.py b/console/email_validation_utils.py index 133ec7b..bce2f8a 100644 --- a/console/email_validation_utils.py +++ b/console/email_validation_utils.py @@ -1,5 +1,5 @@ """ -Email validation methods for models.py and DRF's serializers.py. Not kept in utility.py because of Django project +Email validation methods for models.py and DRF's serializers.py. Not kept in utility.py because of Django project loading issues. """ # Standard Python libraries. diff --git a/console/extract_targets.py b/console/extract_targets.py index ab9ca1e..0acfd54 100755 --- a/console/extract_targets.py +++ b/console/extract_targets.py @@ -1,63 +1,275 @@ -"""Extracts FQDNs, IPv4, and IPv6 addresses from a string or file.""" +"""Extract FQDNs, IPv4, and IPv6 networks/addresses from a string or file.""" # Standard Python libraries. import argparse import ipaddress +import json import os -import pprint import sys # Third party Python libraries. - +import fqdn +import requests +import tld # Custom Python libraries. -import fqdn +__version__ = "1.0.0" -class TargetExtractor: - def __init__(self, targets_string=None, targets_file=None, private_ips_allowed=False, sort_targets=False): - self.targets_string = str(targets_string).strip() - self.targets_file = targets_file - self.private_ips_allowed = private_ips_allowed - self.sort_targets = sort_targets - # Read targets from file as string. - if self.targets_file: - with open(self.targets_file, "r") as fh: - self.targets_string = fh.read().strip() +def is_ip_address(ip): + """Returns True/False if a string is a valid IPv4 or IPv6 address.""" - self.targets_dict = self.extract_targets(self.targets_string) + ip = str(ip) + + try: + ipaddress.ip_address(ip) + return True + + except ValueError: + return False - def is_ip_address(self, ip): - """Takes an IP address returns True/False if it is a valid IPv4 or IPv6 address.""" - ip = str(ip) +def is_ipv4_address(ip): + """Returns True/False if a string is a valid IPv4 address.""" - try: - ipaddress.ip_address(ip) + ip = str(ip) + + try: + if ipaddress.ip_address(ip).version == 4: return True - except ValueError: + else: return False - def is_ip_network(self, address, strict=False): - """Takes an address returns True/False if it is a valid network.""" + except ValueError as e: + print(f"{e}") + - address = str(address) +def is_ipv6_address(ip): + """Returns True/False if a string is a valid IPv6 address.""" - try: - ipaddress.ip_network(address, strict) + ip = str(ip) + + try: + if ipaddress.ip_address(ip).version == 6: return True - except ValueError: + else: return False - def is_valid_fqdn(self, domain): - """Test if a provided domain is a valid FQDN.""" + except ValueError as e: + print(f"{e}") + + +def is_ip_network(network, strict=False): + """Returns True/False if a string is a valid network.""" + + network = str(network) + + try: + ipaddress.ip_network(network, strict) + return True + + except ValueError: + return False + + +def is_valid_fqdn(domain): + """Return True/False if a provided domain is a valid FQDN, not necessarily if it contains a valid top level domain.""" + + domain_is_valid_fqdn = fqdn.FQDN(domain).is_valid + + return domain_is_valid_fqdn + + +def domain_has_valid_fqdn(domain): + """Return True/False if a FQDN has a valid top level domain (TLD).""" + + try: + tld.get_tld(domain, fix_protocol=True) + return True + + except tld.exceptions.TldDomainNotFound: + return False + + +def retrieve_cloudflare_ip_networks( + retrieve_new_data=False, cloudflare_filename="cloudflare_ip_networks.txt", write_to_disk=True +): + """Retrieve the IPv4 and IPv6 ranges for Cloudflare servers. + + https://www.cloudflare.com/ips/ + """ + + cloudflare_dict = { + "list_of_strings": set(), + "list_of_ipaddress_objects": set(), + } + + # If cloudflare_filename already exists and fresh data isn't requested. + if os.path.exists(cloudflare_filename) and not retrieve_new_data: + + print(f"File already exists: {cloudflare_filename}") + + with open(cloudflare_filename, "r") as fh: + for ip_network in fh.readlines(): + cloudflare_dict["list_of_ipaddress_objects"].add(ipaddress.ip_network(ip_network.strip())) + + else: + + for ip_version in ["4", "6"]: + + print(f"Retrieving Cloudflare IPv{ip_version} networks") + + url = f"https://www.cloudflare.com/ips-v{ip_version}" + response = requests.get(url, timeout=2, verify=True) + + if response.status_code == 200: + text = response.text + + for ip_network in text.strip().split("\n"): + cloudflare_dict["list_of_ipaddress_objects"].add(ipaddress.ip_network(ip_network)) + + else: + print("Cloudflare IP networks could not be retrieved.") + + # Return a list of sorted IPv4 and IPv6 networks. + # See https://docs.python.org/3/library/ipaddress.html#ipaddress.get_mixed_type_key + cloudflare_dict["list_of_ipaddress_objects"] = sorted( + cloudflare_dict["list_of_ipaddress_objects"], key=lambda obj: ipaddress.get_mixed_type_key(obj) + ) + + # Convert ipaddress objects to strings. + cloudflare_dict["list_of_strings"] = [str(obj) for obj in cloudflare_dict["list_of_ipaddress_objects"]] + + # Only write to disk if fresh data is requested. + if write_to_disk and retrieve_new_data: + print(f"Writing CloudFront IP networks to disk: {cloudflare_filename}") + with open(cloudflare_filename, "w") as fh: + for ip_network in cloudflare_dict["list_of_strings"]: + fh.write(f"{ip_network}\n") + + # print(f"cloudflare_dict: {cloudflare_dict}") + + return cloudflare_dict + + +def retrieve_amazon_cloudfront_ip_ranges( + retrieve_new_data=False, aws_cloudfront_filename="aws_cloudfront_ip_networks.txt", write_to_disk=True +): + """Retrieve the IPv4 and IPv6 ranges for AWS' CloudFront servers. + + https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/LocationsOfEdgeServers.html + """ + + cloudfront_dict = { + "list_of_strings": set(), + "list_of_ipaddress_objects": set(), + } + + # If aws_cloudfront_filename already exists and fresh data isn't requested. + if os.path.exists(aws_cloudfront_filename) and not retrieve_new_data: + + print(f"File already exists: {aws_cloudfront_filename}") + + with open(aws_cloudfront_filename, "r") as fh: + for ip_network in fh.readlines(): + cloudfront_dict["list_of_ipaddress_objects"].add(ipaddress.ip_network(ip_network.strip())) + + else: + + print("Retrieving IPv4 and IPv6 network ranges for AWS' CloudFront servers.") + + url = "https://ip-ranges.amazonaws.com/ip-ranges.json" + response = requests.get(url, verify=True) + + if response.status_code == 200: + json_data = response.json() + + for service in json_data["prefixes"]: + if service["service"] == "CLOUDFRONT": + cloudfront_dict["list_of_ipaddress_objects"].add(ipaddress.ip_network(service["ip_prefix"])) + + for service in json_data["ipv6_prefixes"]: + if service["service"] == "CLOUDFRONT": + cloudfront_dict["list_of_ipaddress_objects"].add(ipaddress.ip_network(service["ipv6_prefix"])) + + else: + print("CloudFront IP networks could not be retrieved.") + + # Return a list of sorted IPv4 and IPv6 networks. + # See https://docs.python.org/3/library/ipaddress.html#ipaddress.get_mixed_type_key + cloudfront_dict["list_of_ipaddress_objects"] = sorted( + cloudfront_dict["list_of_ipaddress_objects"], key=lambda obj: ipaddress.get_mixed_type_key(obj) + ) + + # Convert ipaddress objects to strings. + cloudfront_dict["list_of_strings"] = [str(obj) for obj in cloudfront_dict["list_of_ipaddress_objects"]] + + # Only write to disk if fresh data is requested. + if write_to_disk and retrieve_new_data: + print(f"Writing CloudFront IP networks to disk: {aws_cloudfront_filename}") + with open(aws_cloudfront_filename, "w") as fh: + for ip_network in cloudfront_dict["list_of_strings"]: + fh.write(f"{ip_network}\n") + + # print(f"cloudfront_dict: {cloudfront_dict}") + + return cloudfront_dict + + +def retrieve_cdn_ip_networks(retrieve_new_data=False): + """Create a list of CDN IPv4 and IPv6 networks.""" + + # Collect all CDN networks. + cdn_ip_networks = [] + + # Retrieve AWS' CloudFront CDN IP networks. + cloudfront_dict = retrieve_amazon_cloudfront_ip_ranges(retrieve_new_data) + cdn_ip_networks += cloudfront_dict["list_of_ipaddress_objects"] + + # Retrieve Cloudflare's CDN IP networks. + cloudflare_dict = retrieve_cloudflare_ip_networks(retrieve_new_data) + cdn_ip_networks += cloudflare_dict["list_of_ipaddress_objects"] + + # Return a list of sorted IPv4 and IPv6 networks. + # See https://docs.python.org/3/library/ipaddress.html#ipaddress.get_mixed_type_key + cdn_ip_networks = sorted(cdn_ip_networks, key=lambda obj: ipaddress.get_mixed_type_key(obj)) + + return cdn_ip_networks + + +class TargetExtractor: + def __init__( + self, + delimiter="", + targets_string=None, + targets_file=None, + exclude_private_ips=False, + sort_targets=False, + exclude_cdn_ip_networks=False, + retrieve_new_cdn_ip_data=False, + write_to_disk=False, + ): + self.delimiter = delimiter + self.targets_string = str(targets_string).strip() + self.targets_file = targets_file + self.exclude_private_ips = exclude_private_ips + self.sort_targets = sort_targets + self.exclude_cdn_ip_networks = exclude_cdn_ip_networks + self.retrieve_new_cdn_ip_data = retrieve_new_cdn_ip_data + self.write_to_disk = write_to_disk + + if self.exclude_cdn_ip_networks: + self.cdn_ip_networks = retrieve_cdn_ip_networks(self.retrieve_new_cdn_ip_data) - is_valid_fqdn = fqdn.FQDN(domain).is_valid + # Read targets from file as string. + if self.targets_file: + with open(self.targets_file, "r") as fh: + self.targets_string = fh.read().strip() - return is_valid_fqdn + self.targets_dict = self.extract_targets(self.targets_string) # def expand_range_of_ips(self, start_ip, end_ip): # """Takes an IP range and returns all the IPs in that range. @@ -85,94 +297,72 @@ def is_valid_fqdn(self, domain): # return ip_range - def is_ipv4_address(self, ip): - """Takes an IP address and returns True/False if it is a valid IPv4 address.""" - - ip = str(ip) - - try: - if ipaddress.ip_address(ip).version == 4: - return True - - else: - return False - - except ValueError as e: - print(f"{e}") - - def is_ipv6_address(self, ip): - """Takes an IP address and returns True/False if it is a valid IPv6 address.""" - - ip = str(ip) - - try: - if ipaddress.ip_address(ip).version == 6: - return True - - else: - return False - - except ValueError as e: - print(f"{e}") - def update_disallowed_target(self, targets_dict, target): - """Update disallowed target list and count.""" + """Update disallowed target list.""" - targets_dict["disallowed_targets"].append(str(target)) + targets_dict["disallowed_targets"].add(target) def extract_targets(self, targets_string): - """Extracts valid IPv4 IP addresses from a string.""" + """Extracts valid domains and IPv4/IPv6 addresses/networks from a string.""" - # Dictionary to track valid and invalid targets. - # fmt:off + # Dictionary to track valid, invalid, and disallowed targets. All sets are eventually converted to lists. targets_dict = { - "ip_addresses": { - "as_list": [], + "ipv4_addresses": { + "as_list": set(), + "as_csv": "", + "as_nmap": "", + "total": 0, + }, + "ipv4_networks": { + "as_list": set(), + "as_csv": "", + "as_nmap": "", + "total": 0, + }, + "ipv6_addresses": { + "as_list": set(), "as_csv": "", "as_nmap": "", "total": 0, }, - "ip_networks": { - "as_list": [], + "ipv6_networks": { + "as_list": set(), "as_csv": "", "as_nmap": "", "total": 0, }, "domains": { - "as_list": [], + "as_list": set(), "as_csv": "", "as_nmap": "", "total": 0, }, - "invalid_targets": [], + "invalid_targets": set(), "invalid_targets_total": 0, - "disallowed_targets": [], + "disallowed_targets": set(), "disallowed_targets_total": 0, "as_list": [], "as_csv": "", "as_nmap": "", "total": 0, } - # fmt:on - # Split on spaces. - target_list = targets_string.split() + # Split on delimiter if provided. + if self.delimiter: + print(f'Using delimiter: "{self.delimiter}"') + target_list = targets_string.split(self.delimiter) + else: + target_list = targets_string.split() for target in target_list: - # Convert to a ipaddress object if it is an IP address. - if self.is_ip_address(target): + # Check if target is an IP address. + if is_ip_address(target): + # If so, convert it to an ipaddress.ip_address object. ip_address = ipaddress.ip_address(target) - # Ensure they are not RFC1918. - # Cloud metadata IPs are covered under this as well: 169.254.169.254 - if ip_address.is_private and not self.private_ips_allowed: - print(f"IP address is a private IP: {ip_address}") - self.update_disallowed_target(targets_dict, ip_address) - continue - - elif ip_address.is_multicast: + if ip_address.is_multicast: print(f"IP address is a multicast IP: {ip_address}") self.update_disallowed_target(targets_dict, ip_address) continue @@ -182,110 +372,236 @@ def extract_targets(self, targets_string): self.update_disallowed_target(targets_dict, ip_address) continue + # Cloud metadata IPs are covered under this as well: 169.254.169.254 elif ip_address.is_link_local: print(f"IP address is a link local IP: {ip_address}") self.update_disallowed_target(targets_dict, ip_address) continue - # Double check and make sure IP is a public (global) IP if private IPs are not allowed. - if not ip_address.is_global and not self.private_ips_allowed: + # Lastly, check if it is a RFC1918 IP address. 169.254.169.254 will be flagged as + # private (which it technically is) instead of link local if this check is first. This check is saved + # for last. + if ip_address.is_private and self.exclude_private_ips: + print(f"IP address is private IP: {ip_address}") + self.update_disallowed_target(targets_dict, ip_address) + continue + + # Double-check and make sure IP is not a public IP (and thus it's private) if private IPs are not + # allowed...probably redundant. + if not ip_address.is_global and self.exclude_private_ips: print(f"IP address is not a public IP: {ip_address}") + self.update_disallowed_target(targets_dict, ip_address) continue - if self.is_ipv4_address(ip_address): - targets_dict["ip_addresses"]["as_list"].append(ip_address) - elif self.is_ipv6_address(ip_address): - targets_dict["ip_addresses"]["as_list"].append(ip_address) + # Check if IP is in a CDN network. + if self.exclude_cdn_ip_networks: + + cdn_ip_found = False + + # Not efficient to loop through each CDN network, but necessary to test if an ip_address + # (ipaddress.ip_address object) is in a network (ipaddress.IPv4Network or ipaddress.IPv6Network). + # Note that self.cdn_ip_networks is a mix of IPv4 and IPv6 networks. + for cdn_ip_network in self.cdn_ip_networks: + + if ip_address in cdn_ip_network: + print(f"IP address {ip_address} is in CDN network: {cdn_ip_network}") + self.update_disallowed_target(targets_dict, ip_address) + # Using "continue" only returns to the local self.cdn_ip_networks for loop, not the parent + # target_list for loop. Set cdn_ip_found to True so we can check and bail properly. + cdn_ip_found = True + break + + if cdn_ip_found: + continue + + # At this point, the IP address is legit. + if is_ipv4_address(ip_address): + targets_dict["ipv4_addresses"]["as_list"].add(ip_address) + elif is_ipv6_address(ip_address): + targets_dict["ipv6_addresses"]["as_list"].add(ip_address) else: print(f"Unknown IP address type: {ip_address}") # Check if it is an IP network. - elif self.is_ip_network(target): + elif is_ip_network(target): + + # Convert to a ipaddress.ip_network object. + ip_network = ipaddress.ip_network(target, strict=False) # Ignore private networks if they are not allowed. - if not self.private_ips_allowed and ipaddress.ip_network(target).is_private: - print(f"IP network is private and private networks are not allowed: {target}") + if ip_network.is_private and self.exclude_private_ips: + print(f"IP network is private: {target}") + self.update_disallowed_target(targets_dict, ip_network) continue - targets_dict["ip_networks"]["as_list"].append(target) + # IPv4 network. + if type(ip_network) == ipaddress.IPv4Network: + targets_dict["ipv4_networks"]["as_list"].add(target) + # IPv6 network. + else: + targets_dict["ipv6_networks"]["as_list"].add(target) - # Check if it is a FQDN. - elif self.is_valid_fqdn(target): - targets_dict["domains"]["as_list"].append(target.strip(".")) + # Check if it is a FQDN with a valid top level domain (TLD). + # Without the TLD check, it will categorize fat-fingered IP addresses (192.168.1.999) as valid FQDNs just + # based off allowable characters in a FQDN. + elif is_valid_fqdn(target) and domain_has_valid_fqdn(target): + targets_dict["domains"]["as_list"].add(target.strip(".")) # Not a valid target. else: - print(f"Invalid target type: {target}") - targets_dict["invalid_targets"].append(target) + # print(f"Invalid target type: {target}") + targets_dict["invalid_targets"].add(target) print("=" * 10) - for target_type in ["ip_addresses", "ip_networks", "domains"]: + # Loop through each category and perform some cleanup maintenance. + for target_type in ["ipv4_addresses", "ipv4_networks", "ipv6_addresses", "ipv6_networks", "domains"]: - # Remove duplicates. - targets_dict[target_type]["as_list"] = list(set(targets_dict[target_type]["as_list"])) + temp_list_of_objects = targets_dict[target_type]["as_list"] - temp_list = [] - - # Standardize object type to string. - for target in targets_dict[target_type]["as_list"]: - temp_list.append(str(target)) - - # Sort within each individual target type: "ip_addresses", "ip_networks", or "domains" + # Sort within each individual target type: "ipv4_addresses", "ipv4_networks", "ipv6_addresses", + # "ipv6_networks", "domains" if self.sort_targets: try: - temp_list.sort() + # Calling sorted() returns temp_list as a list. + temp_list_of_objects = sorted(temp_list_of_objects) except Exception as e: print(f"Exception sorting targets in '{target_type}': {e}") - targets_dict[target_type]["as_list"] = temp_list - targets_dict[target_type]["as_csv"] = ",".join(temp_list) - targets_dict[target_type]["as_nmap"] = " ".join(temp_list) + # Convert objects to strings. + temp_list_of_strings = [str(obj) for obj in temp_list_of_objects] - targets_dict[target_type]["total"] = len(temp_list) + # Re-assign to the coresponding keys. + targets_dict[target_type]["as_list"] = temp_list_of_strings + targets_dict[target_type]["as_csv"] = ",".join(temp_list_of_strings) + targets_dict[target_type]["as_nmap"] = " ".join(temp_list_of_strings) - # Extend array with target_type's list. If requested, will sort later. - targets_dict["as_list"].extend(temp_list) + # For IP networks, calculate the number of targets in the network. At this point in the logic, + # ip_network has been vetted to be either an IPv4 or IPv6 network (see is_ip_network() function). + if target_type in ["ipv4_networks", "ipv6_networks"]: - targets_dict["total"] += len(temp_list) + for ip_network in temp_list_of_objects: - # Sort for combined "as_list" targets. - if self.sort_targets: - try: - targets_dict["as_list"].sort() - except Exception as e: - print(f"Exception sorting targets: {e}") + ip_network = ipaddress.ip_network(ip_network, strict=False) - # Remove invalid duplicate targets. - targets_dict["invalid_targets"] = list(set(targets_dict["invalid_targets"])) - targets_dict["invalid_targets_total"] = len(targets_dict["invalid_targets"]) + # IPv4 network. Only need to check the network type here. + if type(ip_network) == ipaddress.IPv4Network: + targets_in_ip_subnet = ip_network.num_addresses + # IPv6 network. No need to check the network type here, if it is not IPv4, it has to be IPv6. + else: + targets_in_ip_subnet = ipaddress.IPv6Network(ip_network).num_addresses - # Remove disallowed duplicate targets. - targets_dict["disallowed_targets"] = list(set(targets_dict["disallowed_targets"])) - targets_dict["disallowed_targets_total"] = len(targets_dict["disallowed_targets"]) + targets_dict[target_type]["total"] += targets_in_ip_subnet + targets_dict["total"] += targets_in_ip_subnet + else: + targets_dict[target_type]["total"] = len(temp_list_of_strings) + targets_dict["total"] += len(temp_list_of_strings) + + # Extend array with target_type's list. This is a kind of soft sort by putting them in order of the + # target_type for loop ("ipv4_addresses", "ipv4_networks", "ipv6_addresses", "ipv6_networks", "domains"). + # The traditional sorted() will not work with the various object types. + targets_dict["as_list"].extend(temp_list_of_strings) + + # Convert to a csv delimited string. targets_dict["as_csv"] = ",".join(targets_dict["as_list"]) + + # Convert to a space-delimited string. targets_dict["as_nmap"] = " ".join(targets_dict["as_list"]) + # Housekeeping for invalid_targets. + # Convert from set to list. + targets_dict["invalid_targets"] = list(targets_dict["invalid_targets"]) + targets_dict["invalid_targets_total"] = len(targets_dict["invalid_targets"]) + + # Convert invalid_targets objects to strings. + targets_dict["invalid_targets"] = [str(obj) for obj in targets_dict["invalid_targets"]] + + # Housekeeping for disallowed_targets. + # Convert from set to list. + targets_dict["disallowed_targets"] = list(targets_dict["disallowed_targets"]) + targets_dict["disallowed_targets_total"] = len(targets_dict["disallowed_targets"]) + + # Convert disallowed_targets objects to strings. + targets_dict["disallowed_targets"] = [str(obj) for obj in targets_dict["disallowed_targets"]] + + # At this stage, all the values in invalid_targets and disallowed_targets are strings. Thus, sorting may not be + # perfect looking, but we can do it anyway. + if self.sort_targets: + for target_type in ["invalid_targets", "disallowed_targets"]: + try: + targets_dict[target_type].sort() + except Exception as e: + print(f"Exception sorting targets in '{target_type}': {e}") + + # Write output to disk. + if self.write_to_disk: + print("Writing targets_dict to disk") + with open("targets_dict.json", "w") as fh: + fh.write(json.dumps(targets_dict, indent=4)) + return targets_dict if __name__ == "__main__": parser = argparse.ArgumentParser(description="Extract IPs and domains from a string or file.") - - group = parser.add_mutually_exclusive_group(required=True) - group.add_argument("-f", dest="targets_file", action="store", help="File with targets.") + parser.add_argument( + "-d", + "--delimiter", + dest="delimiter", + action="store", + default="", + type=str, + help="Delimiter to find targets using Python's split().", + ) + parser.add_argument( + "-i", + "--exclude-cdn-ip-networks", + dest="exclude_cdn_ip_networks", + action="store_true", + required=False, + default=False, + help="Exclude IPs belonging to CDNs like AWS' CloudFront, Cloudflare, etc.", + ) + parser.add_argument( + "-n", + "--retrieve-latest-cdn-data", + dest="retrieve_new_cdn_ip_data", + action="store_true", + required=False, + default=False, + help=( + "Retrieve new CDN IP data from AWS' CloudFront, Cloudflare, etc. instead of utilizing previous data stored " + "on local files." + ), + ) parser.add_argument( "-p", - dest="private_ips_allowed", + "--exclude-private-ips", + dest="exclude_private_ips", + action="store_true", + default=False, + help="Exclude private RFC1918 IPs (192.168.1.1) and networks (192.168.1.0/24).", + ) + parser.add_argument("-s", "--sort", dest="sort_targets", action="store_true", default=False, help="Sort targets") + parser.add_argument( + "-w", + "--write-to-disk", + dest="write_to_disk", action="store_true", + required=False, default=False, - help="Private RFC1918 IPs (192.168.1.1) and networks (192.168.1.0/24) are allowed.", + help="Write the targets_dict to disk.", + ) + + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument( + "-f", "--targets-file", dest="targets_file", action="store", help="File containing potential targets." ) - parser.add_argument("-s", dest="sort_targets", action="store_true", default=False, help="Sort targets") group.add_argument( "-t", + "--target-string", dest="targets_string", action="store", help="String of targets '8.8.8.8 4.4.4.4 scanme.nmap.org ::ffff:c0a8:101'", @@ -297,9 +613,9 @@ def extract_targets(self, targets_string): print("[!] Specify a valid file containing targets.") sys.exit(1) - # args.targets_string = "100.12.43.55 1.2.3.4 1.5.4.8 22.22.224.24 2.2.2.2 127.0.0.1 2001:978:1:2::d 7.7.7.0/24 4.4.4.4 . : % ^ 2.2.3.) 1.84.5.2555 224.0.1.10 169.254.169.254 2.2.2.3 2.2.2.4" + # args.targets_string = "rackspace.com rackspace.comm 100.12.43.55 1.2.3.4 1.5.4.8 22.22.224.24 2.2.2.2 127.0.0.1 2001:978:1:2::d 7.7.7.0/24 4.4.4.4 . : % ^ 2.2.3.) 1.84.5.2555 224.0.1.10 169.254.169.254 2.2.2.3 2.2.2.4 13.228.69.5 2405:b500:ffff:ffff:ffff:ffff:ffff:fff3 103.31.4.105" te = TargetExtractor(**vars(args)) targets_dict = te.targets_dict - pprint.pprint(targets_dict) + print(json.dumps(targets_dict, indent=4)) diff --git a/console/requirements/base.txt b/console/requirements/base.txt index 377a773..bb2234e 100644 --- a/console/requirements/base.txt +++ b/console/requirements/base.txt @@ -53,7 +53,10 @@ python-libnmap==0.7.2 redis==3.5.3 # python-rq - Redis Queue -rq==1.8.1 +rq==1.9.0 + +# Validate top level dommains +tld==0.12.6 # XML to JSON xmljson==0.2.1 diff --git a/console/tests_for_extract_targets.txt b/console/tests_for_extract_targets.txt new file mode 100644 index 0000000..cc62bec --- /dev/null +++ b/console/tests_for_extract_targets.txt @@ -0,0 +1,33 @@ +# Valid Dommains +rackspace.com +scanme.nmap.org + +# Invalid domains +rackspace.comm + +# IPv4 +127.0.0.1 # loopback +224.0.1.10 # multicast +169.254.169.254 # private +1.2.3.4 +8.8.8.8 +99.99.99.999 # Fat-fingered IP + +# IPv4 Networks +7.7.7.0/24 +10.1.1.0/24 # RFC1918 private IP +192.168.1.0/24 # RFC1918 private IP +172.16.1.0/24 # RFC1918 private IP + +# IPv6 +::1 # Loopback +2001:4860:4860::8888 # Google DNS + +# IPv6 Networks +2345:425:2CA1:0000:0000:567:5673:23b5/126 +fc00::/7 # private + +# CDN IPs +13.228.69.5 # AWS CloudFront +103.31.4.105 # Cloudflare +2405:b500:ffff:ffff:ffff:ffff:ffff:fff3 # Cloudflare diff --git a/engine/engine b/engine/engine index 016a5b4..968bb12 100755 Binary files a/engine/engine and b/engine/engine differ diff --git a/engine/engine.py b/engine/engine.py index 7a0aa78..7660ead 100644 --- a/engine/engine.py +++ b/engine/engine.py @@ -20,7 +20,7 @@ import time import urllib.request -__version__ = "1.03" +__version__ = "1.0.4" # Disable SSL/TLS verification. ssl._create_default_https_context = ssl._create_unverified_context @@ -524,11 +524,11 @@ def go(self): sys.exit(0) if not shutil.which("nmap"): - print(f"Path for nmap cannot be found. Exiting...") + print("Path for nmap cannot be found. Exiting...") sys.exit(0) if not shutil.which("masscan"): - print(f"Path for masscan cannot be found. Exiting...") + print("Path for masscan cannot be found. Exiting...") sys.exit(0) if not os.path.isdir("./logs"): diff --git a/engine/standalone_engine_binary.txt b/engine/standalone_engine_binary.txt index 4723028..7b62586 100644 --- a/engine/standalone_engine_binary.txt +++ b/engine/standalone_engine_binary.txt @@ -1,3 +1,3 @@ -Scantron engine version: 1.03 -MD5 af814d01c63926ba7927b5dc6aae894f engine -SHA-256 80e5689497c639bde8d5472f47254b63b55004a279b84f8055ca20c01ec7bf84 engine +Scantron engine version: 1.0.4 +MD5 10ce119d0a943012aa3096b7bf27cfbe engine +SHA-256 22cf9fa28b58741564eef3d8e40f84bcd80864cd277d50e924c8ea2092e098da engine diff --git a/scantron_api_client/scantron_api_client.py b/scantron_api_client/scantron_api_client.py index 06a5976..5c9a331 100644 --- a/scantron_api_client/scantron_api_client.py +++ b/scantron_api_client/scantron_api_client.py @@ -1,5 +1,6 @@ # Standard Python libraries. import datetime +import ipaddress import json import sys import time @@ -7,12 +8,156 @@ # Third party Python libraries. import requests +from requests_toolbelt.utils import dump # Custom Python libraries. -import utility -__version__ = "0.0.6" +__version__ = "1.0.7" + + +def debug_requests_response(response): + """Provide debug print info for a requests response object.""" + + data = dump.dump_all(response) + print(data.decode("utf-8")) + + +def get_timestamp(): + """Generates a timestamp.""" + + now = time.localtime() + timestamp = time.strftime("%Y%m%d_%H%M%S", now) + + return timestamp + + +def get_iso_8601_timestamp_no_second(): + """Generates an ISO 8601 standardized timestamp. + https://en.wikipedia.org/wiki/ISO_8601""" + + now = time.localtime() + timestamp = time.strftime("%Y-%m-%dT%H:%M", now) + + return timestamp + + +def expand_range_of_ips(start_ip, end_ip): + """Takes an IP range and returns all the IPs in that range. + # http://cmikavac.net/2011/09/11/how-to-generate-an-ip-range-list-in-python/ + """ + + ip_range = [] + + if (ipaddress.ip_address(start_ip).version == 6) or (ipaddress.ip_address(end_ip).version == 6): + print("IPv6 IP range not supported in this function: {} - {}".format(start_ip, end_ip)) + return ip_range + + start = list(map(int, start_ip.split("."))) + end = list(map(int, end_ip.split("."))) + temp = start + + ip_range.append(start_ip) + while temp != end: + start[3] += 1 + for i in (3, 2, 1): + if temp[i] == 256: + temp[i] = 0 + temp[i - 1] += 1 + ip_range.append(".".join(map(str, temp))) + + return ip_range + + +def http_status_code(http_code): + """Contains a database of all known HTTP status codes and their corresponding plain text description.  For use in + both program output as well as parsing for specific issue types. + + Args: + http_code (int): A number containing the HTTP status code to lookup + + Returns: + string: Returns a description of the status code. + """ + + http_codes = { + 200: "OK", + 201: "OK: Created", + 202: "OK: Accepted", + 203: "OK: Non-Authoritative Information", + 204: "OK: No Content", + 205: "OK: Reset Content", + 206: "OK: Partial Content", + 207: "OK: Multi-Status", + 208: "OK: Already Reported", + 226: "OK: IM Used", + 300: "Redirected: Multiple Choices", + 301: "Redirected: Moved Permanently", + 302: "Redirected: Found", + 303: "Redirected: See Other", + 304: "Redirected: Not Modified", + 305: "Redirected: Use Proxy", + 306: "Redirected: Switch Proxy", + 307: "Redirected: Temporary Redirect", + 308: "Redirected: Permanent Redirect", + 400: "Client Error: Bad Request", + 401: "Client Error: Unauthorized", + 402: "Client Error: Payment Required", + 403: "Client Error: Forbidden", + 404: "Client Error: Not Found", + 405: "Client Error: Method Not Allowed", + 406: "Client Error: Not Acceptable", + 407: "Client Error: Proxy Authentication Required", + 408: "Client Error: Request Timeout", + 409: "Client Error: Conflict", + 410: "Client Error: Gone", + 411: "Client Error: Length Required", + 412: "Client Error: Precondition Failled", + 413: "Client Error: Payload Too Large", + 414: "Client Error: URI Too Large", + 415: "Client Error: Unsupported Media Type", + 416: "Client Error: Range Not Satisfiable", + 417: "Client Error: Expectation Failed", + 418: "Client Error: I'm a teapot", + 421: "Client Error: Misdirected Request", + 422: "Client Error: Un-processable Entity", + 423: "Client Error: Locked", + 424: "Client Error: Failed Dependency", + 426: "Client Error: Upgrade Required", + 428: "Client Error: Precondition Required", + 429: "Client Error: Too Many Requests", + 431: "Client Error: Request Header Fields Too Large", + 440: "Client Error: Login Time-Out", + 444: "Client Error: No Response", + 449: "Client Error: Retry With", + 451: "Client Error: Unavailable For Legal Reasons", + 495: "Client Error: SSL Certificate Error", + 496: "Client Error: SSL Certificate Required", + 497: "Client Error: HTTP Request Sent to HTTPS Port", + 499: "Client Error: Client Closed Request", + 500: "Server Error: Internal Server Error", + 501: "Server Error: Not Implemented", + 502: "Server Error: Bad Gateway", + 503: "Server Error: Service Unavailable", + 504: "Server Error: Gateway Timeout", + 505: "Server Error: HTTP Version Not Supported", + 507: "Server Error: Insufficient Storage", + 508: "Server Error: Loop Detected", + 510: "Server Error: Not Extended", + 511: "Server Error: Network Authentication Required", + 520: "Server Error: Unknown Error when connecting to server behind load balancer", + 521: "Server Error: Web Server behind load balancer is down", + 522: "Server Error: Connection Timed Out to server behind load balancer", + 523: "Server Error: Server behind load balancer is unreachable", + 524: "Server Error: TCP handshake with server behind load balancer completed but timed out", + 525: "Server Error: Load balancer could not negotiate a SSL/TLS handshake with server behind load balancer", + 526: "Server Error: Server behind load balancer returned invalid SSL/TLS cert to load balancer", + 527: "Server Error: Load balancer request timed out/failed after WAN connection was established to origin server", + } + + http_status = http_codes.get(http_code, "NA") + + return http_status class ScantronClient: @@ -113,7 +258,7 @@ def scantron_api_query(self, endpoint, **kwargs): ) if response.status_code != 200: - utility.debug_requests_response(response) + debug_requests_response(response) break @@ -128,7 +273,7 @@ def scantron_api_query(self, endpoint, **kwargs): ) if response.status_code != 201: - utility.debug_requests_response(response) + debug_requests_response(response) break @@ -143,7 +288,7 @@ def scantron_api_query(self, endpoint, **kwargs): ) if response.status_code != 200: - utility.debug_requests_response(response) + debug_requests_response(response) break @@ -158,7 +303,7 @@ def scantron_api_query(self, endpoint, **kwargs): ) if response.status_code != 200: - utility.debug_requests_response(response) + debug_requests_response(response) break @@ -173,7 +318,7 @@ def scantron_api_query(self, endpoint, **kwargs): ) if response.status_code != 204: - utility.debug_requests_response(response) + debug_requests_response(response) break @@ -196,7 +341,7 @@ def scantron_api_query(self, endpoint, **kwargs): print("Packet loss when attempting to reach the Scantron API.") if self.debug_print: - utility.debug_requests_response(response) + debug_requests_response(response) return response @@ -307,18 +452,18 @@ def create_globally_excluded_target(self, payload): def retrieve_globally_excluded_target(self, globally_excluded_target_id): """Retrieve globally excluded target.""" return self.scantron_api_query( - f"/api/globally excluded targets/{globally_excluded_target_id}", method="GET" + f"/api/globally_excluded_targets/{globally_excluded_target_id}", method="GET" ).json() def update_globally_excluded_target(self, globally_excluded_target_id, payload): """Update globally excluded target for specific globally excluded target ID.""" return self.scantron_api_query( - f"/api/globally excluded targets/{globally_excluded_target_id}", method="PATCH", payload=payload + f"/api/globally_excluded_targets/{globally_excluded_target_id}", method="PATCH", payload=payload ) def delete_globally_excluded_target(self, globally_excluded_target_id): """Delete a globally excluded target.""" - return self.scantron_api_query(f"/api/globally excluded targets/{globally_excluded_target_id}", method="DELETE") + return self.scantron_api_query(f"/api/globally_excluded_targets/{globally_excluded_target_id}", method="DELETE") # Globally Excluded Targets- Miscellaneous functions. def retrieve_globally_excluded_targets(self): @@ -657,8 +802,8 @@ def retrieve_all_masscan_targets_with_a_specific_port_and_protocol_from_scan_id( scan_results_json = self.retrieve_scan_results(scan_id, file_type) masscan_dict = self.generate_masscan_dict_from_masscan_result(scan_results_json) - all_targets_with_a_specific_port_and_protocol_dict = self.retrieve_all_masscan_targets_with_a_specific_port_and_protocol( - masscan_dict, port, protocol + all_targets_with_a_specific_port_and_protocol_dict = ( + self.retrieve_all_masscan_targets_with_a_specific_port_and_protocol(masscan_dict, port, protocol) ) # Add scan ID to returned dictionary. diff --git a/scantron_api_client/utility.py b/scantron_api_client/utility.py deleted file mode 100644 index 446f3e6..0000000 --- a/scantron_api_client/utility.py +++ /dev/null @@ -1,155 +0,0 @@ -""" -Various utility methods. -""" -# Standard Python libraries. -import ipaddress -import time - -# Third party Python libraries. -from requests_toolbelt.utils import dump - -# Custom Python libraries. - - -def debug_requests_response(response): - """Provide debug print info for a requests response object.""" - - data = dump.dump_all(response) - print(data.decode("utf-8")) - - -def get_timestamp(): - """Generates a timestamp.""" - - now = time.localtime() - timestamp = time.strftime("%Y%m%d_%H%M%S", now) - - return timestamp - - -def get_iso_8601_timestamp_no_second(): - """Generates an ISO 8601 standardized timestamp. - https://en.wikipedia.org/wiki/ISO_8601""" - - now = time.localtime() - timestamp = time.strftime("%Y-%m-%dT%H:%M", now) - - return timestamp - - -def expand_range_of_ips(start_ip, end_ip): - """Takes an IP range and returns all the IPs in that range. - # http://cmikavac.net/2011/09/11/how-to-generate-an-ip-range-list-in-python/ - """ - - ip_range = [] - - if (ipaddress.ip_address(start_ip).version == 6) or (ipaddress.ip_address(end_ip).version == 6): - print("IPv6 IP range not supported in this function: {} - {}".format(start_ip, end_ip)) - return ip_range - - start = list(map(int, start_ip.split("."))) - end = list(map(int, end_ip.split("."))) - temp = start - - ip_range.append(start_ip) - while temp != end: - start[3] += 1 - for i in (3, 2, 1): - if temp[i] == 256: - temp[i] = 0 - temp[i - 1] += 1 - ip_range.append(".".join(map(str, temp))) - - return ip_range - - -def http_status_code(http_code): - """Contains a database of all known HTTP status codes and their corresponding plain text description.  For use in - both program output as well as parsing for specific issue types. - - Args: - http_code (int): A number containing the HTTP status code to lookup - - Returns: - string: Returns a description of the status code. - """ - - http_codes = { - 200: "OK", - 201: "OK: Created", - 202: "OK: Accepted", - 203: "OK: Non-Authoritative Information", - 204: "OK: No Content", - 205: "OK: Reset Content", - 206: "OK: Partial Content", - 207: "OK: Multi-Status", - 208: "OK: Already Reported", - 226: "OK: IM Used", - 300: "Redirected: Multiple Choices", - 301: "Redirected: Moved Permanently", - 302: "Redirected: Found", - 303: "Redirected: See Other", - 304: "Redirected: Not Modified", - 305: "Redirected: Use Proxy", - 306: "Redirected: Switch Proxy", - 307: "Redirected: Temporary Redirect", - 308: "Redirected: Permanent Redirect", - 400: "Client Error: Bad Request", - 401: "Client Error: Unauthorized", - 402: "Client Error: Payment Required", - 403: "Client Error: Forbidden", - 404: "Client Error: Not Found", - 405: "Client Error: Method Not Allowed", - 406: "Client Error: Not Acceptable", - 407: "Client Error: Proxy Authentication Required", - 408: "Client Error: Request Timeout", - 409: "Client Error: Conflict", - 410: "Client Error: Gone", - 411: "Client Error: Length Required", - 412: "Client Error: Precondition Failled", - 413: "Client Error: Payload Too Large", - 414: "Client Error: URI Too Large", - 415: "Client Error: Unsupported Media Type", - 416: "Client Error: Range Not Satisfiable", - 417: "Client Error: Expectation Failed", - 418: "Client Error: I'm a teapot", - 421: "Client Error: Misdirected Request", - 422: "Client Error: Un-processable Entity", - 423: "Client Error: Locked", - 424: "Client Error: Failed Dependency", - 426: "Client Error: Upgrade Required", - 428: "Client Error: Precondition Required", - 429: "Client Error: Too Many Requests", - 431: "Client Error: Request Header Fields Too Large", - 440: "Client Error: Login Time-Out", - 444: "Client Error: No Response", - 449: "Client Error: Retry With", - 451: "Client Error: Unavailable For Legal Reasons", - 495: "Client Error: SSL Certificate Error", - 496: "Client Error: SSL Certificate Required", - 497: "Client Error: HTTP Request Sent to HTTPS Port", - 499: "Client Error: Client Closed Request", - 500: "Server Error: Internal Server Error", - 501: "Server Error: Not Implemented", - 502: "Server Error: Bad Gateway", - 503: "Server Error: Service Unavailable", - 504: "Server Error: Gateway Timeout", - 505: "Server Error: HTTP Version Not Supported", - 507: "Server Error: Insufficient Storage", - 508: "Server Error: Loop Detected", - 510: "Server Error: Not Extended", - 511: "Server Error: Network Authentication Required", - 520: "Server Error: Unknown Error when connecting to server behind load balancer", - 521: "Server Error: Web Server behind load balancer is down", - 522: "Server Error: Connection Timed Out to server behind load balancer", - 523: "Server Error: Server behind load balancer is unreachable", - 524: "Server Error: TCP handshake with server behind load balancer completed but timed out", - 525: "Server Error: Load balancer could not negotiate a SSL/TLS handshake with server behind load balancer", - 526: "Server Error: Server behind load balancer returned invalid SSL/TLS cert to load balancer", - 527: "Server Error: Load balancer request timed out/failed after WAN connection was established to origin server", - } - - http_status = http_codes.get(http_code, "NA") - - return http_status