Skip to content

Commit

Permalink
Merge pull request #241 from rackerlabs/issue-238
Browse files Browse the repository at this point in the history
Console/Engine updates
  • Loading branch information
derpadoo authored Oct 28, 2020
2 parents 1d7782a + a5468c7 commit e016860
Show file tree
Hide file tree
Showing 12 changed files with 71 additions and 95 deletions.
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -332,10 +332,13 @@ virtualenv -p python3.6 .venv
source .venv/bin/activate
pip install pyinstaller
pyinstaller --onefile engine.py --name engine
mv dist/engine .

./engine -v > standalone_engine_binary.txt
echo -e "MD5 `md5sum engine`" >> standalone_engine_binary.txt
echo -e "SHA-256 `sha256sum engine`" >> standalone_engine_binary.txt

rm -rf __pycache__ build dist engine.spec .venv
```

### Engine Execution
Expand Down
36 changes: 6 additions & 30 deletions ansible-playbooks/roles/console/tasks/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -408,6 +408,7 @@
owner: nobody
group: nogroup
recurse: yes
mode: 0777 # Not ideal, but only way to allow Django's www-data user to move files within scan_results.
tags: update_code

- name: Restart NFS service.
Expand Down Expand Up @@ -435,18 +436,17 @@
group: root
mode: 0755

- name: Make nmap_to_csv.sh and nmap_to_csv.py executable.
- name: Change Python scripts owner and file permissions.
file:
path: "/home/{{ non_root_user }}/console/scan_results/{{ item }}"
owner: root
group: root
mode: 0700
owner: "{{ non_root_user }}"
group: "{{ non_root_user }}"
mode: 0755
with_items:
- nmap_to_csv.sh
- nmap_to_csv.py
- masscan_json_to_csv.sh
- masscan_json_to_csv.py
- xml_to_json_nmap_results.py
tags: update_code

- name: chown-ing console files to root in "/home/{{ non_root_user }}/console"
file:
Expand Down Expand Up @@ -476,30 +476,6 @@
job: "{{ scantron_dir }}/scan_scheduler.sh"
user: root

- name: Add crontab entry for nmap_to_csv.sh
cron:
name: Convert nmap scan files for big data analytics platform ingestion every minute.
disabled: false
minute: "*"
hour: "*"
day: "*"
month: "*"
weekday: "*"
job: "{{ scantron_dir }}/scan_results/nmap_to_csv.sh"
user: root

- name: Add crontab entry for masscan_json_to_csv.sh
cron:
name: Convert masscan json scan files for big data analytics platform ingestion every minute.
disabled: false
minute: "*"
hour: "*"
day: "*"
month: "*"
weekday: "*"
job: "{{ scantron_dir }}/scan_results/masscan_json_to_csv.sh"
user: root

- name: Disable MAILTO for root's crontab.
cronvar:
name: MAILTO
Expand Down
2 changes: 1 addition & 1 deletion console/django_scantron/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "1.38"
__version__ = "1.39"
30 changes: 28 additions & 2 deletions console/django_scantron/api/views.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# Standard Python libraries.
import datetime
import os
import pytz

# Third party Python libraries.
Expand Down Expand Up @@ -163,8 +164,33 @@ def partial_update(self, request, pk=None, **kwargs):

return JsonResponse(response_dict)

# Update the scheduled_scan_dict with the most recent scan_status state from the PUT request. When
# originally querying above, the old state is passed to utility.py unless it is updated.
# Setup folder directories.
scan_results_dir = "/home/scantron/console/scan_results"
pending_files_dir = os.path.join(scan_results_dir, "pending")
completed_files_dir = os.path.join(scan_results_dir, "complete")
cancelled_files_dir = os.path.join(scan_results_dir, "cancelled")

if new_scan_status == "cancelled":
# Move scan files to the "cancelled" directory for historical purposes.
utility.move_wildcard_files(
f"{scheduled_scan_dict['result_file_base_name']}*", pending_files_dir, cancelled_files_dir
)

if new_scan_status == "completed":
# Move files from "pending" directory to "complete" directory.
utility.move_wildcard_files(
f"{scheduled_scan_dict['result_file_base_name']}*", pending_files_dir, completed_files_dir
)

# Django compliant pre-formated datetimestamp.
now_datetime = get_current_time()
ScheduledScan.objects.filter(scan_engine=request.user).filter(pk=pk).update(
completed_time=now_datetime
)

# Update the scheduled_scan_dict with the most recent scan_status state from the PATCH request. When
# originally querying above, the old state would passed to utility.py since it hasn't officially been
# updated by Django's .update() yet.
scheduled_scan_dict["scan_status"] = new_scan_status

# Create a redis connection object.
Expand Down
2 changes: 1 addition & 1 deletion console/django_scantron/templates/base.html
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ <h1>
"ordering": true,
"info": true,
"autoWidth": true,
"order": [[ 6, "desc"]],
"order": [[ 0, "desc"]],
"pageLength": 100
});
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ <h3>
<th>Scan Command</th>
<th>Scan Status</th>
<th>Completion Time</th>
<th>Results File</th>
<th>Result Files</th>
</tr>
</thead>

Expand Down
8 changes: 0 additions & 8 deletions console/scan_results/masscan_json_to_csv.sh

This file was deleted.

8 changes: 0 additions & 8 deletions console/scan_results/nmap_to_csv.sh

This file was deleted.

25 changes: 21 additions & 4 deletions console/utility.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,10 @@
Utility methods for other scripts to use.
"""
# Standard Python libraries.
import fnmatch
import logging
import os
import shutil
from logging import handlers

# Third party Python libraries.
Expand All @@ -11,7 +14,7 @@

# Custom Python libraries.
import django_connector

from scan_results import nmap_to_csv, masscan_json_to_csv

# Setup logging configuration.
logger = logging.getLogger("rq.worker")
Expand All @@ -37,7 +40,15 @@
)
# fmt: on

# https://github.com/pennersr/django-allauth/blob/7b81531bc89ae98dc6f687611743db5b36cda9a2/allauth/account/adapter.py#L448

def move_wildcard_files(wildcard_filename, source_directory, destination_directory):
"""Move files with supported fnmatch patterns (* and ?)."""

file_list = os.listdir(source_directory)

for file_name in file_list:
if fnmatch.fnmatch(file_name, wildcard_filename):
shutil.move(os.path.join(source_directory, file_name), os.path.join(destination_directory, file_name))


def process_scan_status_change(scheduled_scan_dict):
Expand Down Expand Up @@ -90,5 +101,11 @@ def process_scan_status_change(scheduled_scan_dict):

logger.info(f"Successfully sent email for Scheduled Scan ID: {scheduled_scan_id}")

# 2 Do other stuff
# TODO
# 2) Convert scan results to .csv for big data analytics.
# Calling the functions here instead of relying on cron job that runs every minute. The scripts also moves the
# .xml files from console/scan_results/complete to console/scan_results/processed
if scan_status == "completed":
if scan_binary == "nmap":
nmap_to_csv.main()
elif scan_binary == "masscan":
masscan_json_to_csv.main()
Binary file modified engine/engine
Binary file not shown.
44 changes: 7 additions & 37 deletions engine/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@

# Standard Python libraries.
import argparse
import datetime
import fnmatch
import json
import logging
import os
Expand All @@ -22,7 +20,7 @@
import time
import urllib.request

__version__ = "1.02"
__version__ = "1.03"

# Disable SSL/TLS verification.
ssl._create_default_https_context = ssl._create_unverified_context
Expand All @@ -37,13 +35,6 @@
SCAN_PROCESS_DICT = {}


def get_current_time():
"""Retrieve a Django compliant pre-formated datetimestamp."""

now_datetime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
return now_datetime


def build_masscan_command(scan_command, target_file, excluded_target_file, json_file, http_useragent):
"""Builds the masscan command."""

Expand All @@ -59,16 +50,6 @@ def build_masscan_command(scan_command, target_file, excluded_target_file, json_
return masscan_command


def move_wildcard_files(wildcard_filename, source_directory, destination_directory):
"""Move files with supported fnmatch patterns (* and ?)."""

file_list = os.listdir(source_directory)

for file_name in file_list:
if fnmatch.fnmatch(file_name, wildcard_filename):
shutil.move(os.path.join(source_directory, file_name), os.path.join(destination_directory, file_name))


def check_for_scan_jobs():
"""Check for new scans through the API."""

Expand Down Expand Up @@ -179,8 +160,6 @@ def scan_job_handler(scan_job_dict):

# Setup folder directories.
pending_files_dir = os.path.join(scan_results_dir, "pending")
completed_files_dir = os.path.join(scan_results_dir, "complete")
cancelled_files_dir = os.path.join(scan_results_dir, "cancelled")

if scan_binary not in supported_scan_binaries:
ROOT_LOGGER.error(f"Invalid scan binary specified: {scan_binary}")
Expand Down Expand Up @@ -248,9 +227,6 @@ def scan_job_handler(scan_job_dict):
SCAN_PROCESS_DICT.pop(scan_binary_process_id)

if scan_status == "cancel":
# Move scan files to the "cancelled" directory for historical purposes.
move_wildcard_files(f"{result_file_base_name}*", pending_files_dir, cancelled_files_dir)

updated_scan_status = "cancelled"

elif scan_status == "pause":
Expand Down Expand Up @@ -400,15 +376,9 @@ def scan_job_handler(scan_job_dict):
# check ensures the scan status of a masscan process isn't "paused".
if SCAN_PROCESS_DICT[scan_binary_process_id]["scan_status"] == "started":

# Move files from "pending" directory to "complete" directory.
move_wildcard_files(f"{result_file_base_name}*", pending_files_dir, completed_files_dir)

# Update completed_time, scan_status, and result_file_base_name.
now_datetime = get_current_time()
# Update scan_status.
update_info = {
"completed_time": now_datetime,
"scan_status": "completed",
"result_file_base_name": result_file_base_name,
}

update_scan_information(scan_job, update_info)
Expand Down Expand Up @@ -544,11 +514,7 @@ def go(self):
help="Configuration file. Defaults to 'engine_config.json'",
)
parser.add_argument(
"-v",
dest="version",
action="store_true",
required=False,
help="Print engine version",
"-v", dest="version", action="store_true", required=False, help="Print engine version",
)

args = parser.parse_args()
Expand All @@ -565,6 +531,10 @@ def go(self):
print(f"Path for masscan cannot be found. Exiting...")
sys.exit(0)

if not os.path.isdir("./logs"):
print("./logs directory does not exist, creating it.")
os.mkdir("./logs", mode=0o700)

# Log level is controlled in engine_config.json and assigned after reading that file.
# Setup file logging
log_file_handler = logging.FileHandler(os.path.join("logs", "engine.log"))
Expand Down
6 changes: 3 additions & 3 deletions engine/standalone_engine_binary.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
Scantron engine version: 1.02
MD5 d3230abc4947e5bcf57adf531db3707b engine
SHA-256 9e0cd1c160802ddc5abcdf1171efb7aa583842511bcb8811ceed5dac747a25d0 engine
Scantron engine version: 1.03
MD5 af814d01c63926ba7927b5dc6aae894f engine
SHA-256 80e5689497c639bde8d5472f47254b63b55004a279b84f8055ca20c01ec7bf84 engine

0 comments on commit e016860

Please sign in to comment.