diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d2b3343..f798eed 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -194,3 +194,14 @@ in a sort of pre-release mode. * Contributions from @da667 - thank you! * Added cyberchef container (#235) * Updated base OS to Ubuntu 24.04 (#234) + +3.6.0 (2026-01-24) +################## + +* Moved to using **Docker Compose Version 2** in start-dalton.sh +* Added functionality for users to set username, along with simple shared auth (see dalton.conf) +* Updated queue page to display user who submitted the job (if so configured) +* Fixed support for Suricata Socket Control in Suricata version 8 and later. + The necessary Python libraries for suricatasc are no longer included with the Suricata + source beginning with Suricata version 8. +* Updated docker-compose to have more recent versions of the Suriata and Zeek agents by default diff --git a/README.rst b/README.rst index c0d64a6..25e76ca 100644 --- a/README.rst +++ b/README.rst @@ -23,7 +23,7 @@ or this which does the same thing: .. code:: text - docker-compose build && docker-compose up -d + docker compose build && docker compose up -d Then navigate to ``http:///dalton/`` @@ -157,7 +157,8 @@ Requirements ============ - `Docker `__ -- `Docker Compose `__ +- `Docker Compose V2 `__. + Note that this should be `Docker Compose Version 2 `__ - Internet connection (to build) Installing and Running Dalton @@ -175,7 +176,7 @@ or this which does the same thing: .. code:: bash - docker-compose build && docker-compose up -d + docker compose build && docker compose up -d To specify or add what agents (specific sensors and versions) are built and run, edit the docker-compose.yml file as appropriate. See also diff --git a/VERSION b/VERSION index 5ae69bd..40c341b 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.2.5 +3.6.0 diff --git a/app/dalton.py b/app/dalton.py index 1824882..2b573ca 100644 --- a/app/dalton.py +++ b/app/dalton.py @@ -40,7 +40,7 @@ import traceback import zipfile from distutils.version import LooseVersion -from functools import lru_cache +from functools import lru_cache, wraps from logging.handlers import RotatingFileHandler from threading import Thread @@ -91,6 +91,8 @@ def setup_dalton_logging(): RULECAT_SCRIPT = dalton_config.get("dalton", "rulecat_script") MAX_PCAP_FILES = dalton_config.getint("dalton", "max_pcap_files") DEBUG = dalton_config.getboolean("dalton", "debug") + AUTH_PREFIX = dalton_config.get("dalton", "auth_prefix") + AUTH_MAX = dalton_config.getint("dalton", "auth_max") # options for flowsynth FS_BIN_PATH = dalton_config.get( @@ -105,6 +107,7 @@ def setup_dalton_logging(): "Problem parsing config file '%s': %s" % (dalton_config_filename, e) ) + if DEBUG or ("CONTROLLER_DEBUG" in os.environ and int(os.getenv("CONTROLLER_DEBUG"))): logger.setLevel(logging.DEBUG) DEBUG = True @@ -532,28 +535,63 @@ def delete_old_job_files(): return str(total_deleted) +def check_user(f): + @wraps(f) + def check_user_fun(*args, **kwargs): + if AUTH_PREFIX == 'disabled': + # auth disabled + return f(*args, **kwargs) + user = None + try: + user = request.cookies.get('dalton_user') + except Exception: + user = None + if user is None or len(user) == 0 or not user.startswith(AUTH_PREFIX) or len(user) > AUTH_MAX: + return redirect(url_for('dalton_blueprint.set_user')) + return f(*args, **kwargs) + return check_user_fun + + @dalton_blueprint.route("/") +@check_user def index(): - logger.debug("ENVIRON:\n%s" % request.environ) - # make sure redirect is set to use http or https as appropriate - rurl = url_for("dalton_blueprint.page_index", _external=True) - if rurl.startswith("http"): - if "HTTP_X_FORWARDED_PROTO" in request.environ: - # if original request was https, make sure redirect uses https - rurl = rurl.replace("http", request.environ["HTTP_X_FORWARDED_PROTO"]) + return redirect(url_for("dalton_blueprint.page_index")) + +@dalton_blueprint.route("/dalton/logout", methods=["GET"]) +@dalton_blueprint.route("/dalton/logout/", methods=["GET"]) +@dalton_blueprint.route("/logout", methods=["GET"]) +@dalton_blueprint.route("/logout/", methods=["GET"]) +def logout(): + response = redirect(url_for('dalton_blueprint.set_user')) + response.set_cookie('dalton_user', "") + return response + + +@dalton_blueprint.route("/dalton/setuser", methods=["GET", "POST"]) +def set_user(): + if AUTH_PREFIX == 'disabled': + # auth disabled + return redirect(url_for('dalton_blueprint.page_index')) + + user = None + try: + if request.method == 'POST': + user = request.form.get("username") else: - logger.warning( - "Could not find request.environ['HTTP_X_FORWARDED_PROTO']. Make sure the web server (proxy) is configured to send it." - ) - else: - # this shouldn't be the case with '_external=True' passed to url_for() - logger.warning("URL does not start with 'http': %s" % rurl) - return redirect(rurl) + user = request.cookies.get('dalton_user') + except Exception: + user = None + if user is None or len(user) == 0 or not user.startswith(AUTH_PREFIX) or len(user) > AUTH_MAX: + return render_template("/dalton/setuser.html", user="") + + response = redirect(url_for('dalton_blueprint.page_index')) + response.set_cookie('dalton_user', user, max_age=432000) + return response @dalton_blueprint.route("/dalton") @dalton_blueprint.route("/dalton/") -# @login_required() +@check_user def page_index(): """the default homepage for Dalton""" return render_template("/dalton/index.html", page="") @@ -562,7 +600,7 @@ def page_index(): # 'sensor' value includes forward slashes so this isn't a RESTful endpoint # and 'sensor' value must be passed as a GET parameter @dalton_blueprint.route("/dalton/controller_api/request_engine_conf", methods=["GET"]) -# @auth_required() +@check_user def api_get_engine_conf_file(): try: sensor = request.args["sensor"] @@ -966,7 +1004,7 @@ def post_job_results(jobid): @dalton_blueprint.route("/dalton/controller_api/job_status/", methods=["GET"]) -# @login_required() +@check_user def get_ajax_job_status_msg(jobid): """return the job status msg (as a string)""" redis = get_redis() @@ -1001,7 +1039,7 @@ def get_ajax_job_status_msg(jobid): @dalton_blueprint.route( "/dalton/controller_api/job_status_code/", methods=["GET"] ) -# @login_required() +@check_user def get_ajax_job_status_code(jobid): """return the job status code (AS A STRING! -- you need to cast the return value as an int if you want to use it as an int)""" redis = get_redis() @@ -1080,7 +1118,7 @@ def clear_old_agents(redis): @dalton_blueprint.route("/dalton/sensor", methods=["GET"]) -# @login_required() +@check_user def page_sensor_default(return_dict=False): """the default sensor page""" redis = get_redis() @@ -1146,6 +1184,7 @@ def validate_jobid(jid): @dalton_blueprint.route("/dalton/coverage/job/", methods=["GET"]) +@check_user def page_coverage_jid(jid, error=None): redis = get_redis() @@ -1237,7 +1276,7 @@ def page_coverage_jid(jid, error=None): @dalton_blueprint.route("/dalton/coverage//", methods=["GET"]) -# @login_required() +@check_user def page_coverage_default(sensor_tech, error=None): """the default coverage wizard page""" redis = get_redis() @@ -1348,7 +1387,7 @@ def page_coverage_default(sensor_tech, error=None): @dalton_blueprint.route("/dalton/job/") -# @auth_required() +@check_user def page_show_job(jid): redis = get_redis() tech = redis.get("%s-tech" % jid) @@ -1673,8 +1712,7 @@ def submit_job(): @dalton_blueprint.route("/dalton/coverage/summary", methods=["POST"]) -# @auth_required() -# ^^ can change and add resource and group permissions if we want to restrict who can submit jobs +@check_user def page_coverage_summary(): """Handle job submission from UI.""" # user submitting a job to Dalton via the web interface @@ -1685,8 +1723,10 @@ def page_coverage_summary(): prod_ruleset_name = None - # get the user who submitted the job .. not implemented - user = "undefined" + # get the user who submitted the job + user = request.cookies.get('dalton_user') + if user is None: + user = "" # generate job_id based of pcap filenames and timestamp digest.update(str(datetime.datetime.now()).encode("utf-8")) @@ -2964,29 +3004,14 @@ def page_coverage_summary(): # make sure redirect is set to use http or https as appropriate if bSplitCap: # TODO: something better than just redirect to queue page - rurl = url_for("dalton_blueprint.page_queue_default", _external=True) - else: - rurl = url_for( - "dalton_blueprint.page_show_job", jid=jid, _external=True - ) - if rurl.startswith("http"): - if "HTTP_X_FORWARDED_PROTO" in request.environ: - # if original request was https, make sure redirect uses https - rurl = rurl.replace( - "http", request.environ["HTTP_X_FORWARDED_PROTO"] - ) - else: - logger.warning( - "Could not find request.environ['HTTP_X_FORWARDED_PROTO']. Make sure the web server (proxy) is configured to send it." - ) + rurl = url_for("dalton_blueprint.page_queue_default") else: - # this shouldn't be the case with '_external=True' passed to url_for() - logger.warning("URL does not start with 'http': %s" % rurl) + rurl = url_for("dalton_blueprint.page_show_job", jid=jid) return redirect(rurl) @dalton_blueprint.route("/dalton/queue") -# @login_required() +@check_user def page_queue_default(): """the default queue page""" redis = get_redis() @@ -3059,8 +3084,16 @@ def page_queue_default(): job["jid"] = jid job["tech"] = "%s" % redis.get("%s-tech" % jid) job["time"] = "%s" % redis.get("%s-submission_time" % jid) - job["user"] = "%s" % redis.get("%s-user" % jid) job["status"] = status_msg + # strip out auth prefix for display on queue page + user = redis.get("%s-user" % jid) + if user is None: + pass # handled by template + elif user.startswith(AUTH_PREFIX): + user = user[len(AUTH_PREFIX):] + elif '_' in user: + user = user.split('_', 1)[1] + job["user"] = user alert_count = get_alert_count(redis, jid) if status != STAT_CODE_DONE: job["alert_count"] = "-" @@ -3076,11 +3109,12 @@ def page_queue_default(): queued_jobs=queued_jobs, running_jobs=running_jobs, num_jobs=num_jobs_to_show, + non_empty_user_count=len([x['user'] for x in queue if x['user'] != "" and x["user"] is not None]), ) @dalton_blueprint.route("/dalton/about") -# @login_required() +@check_user def page_about_default(): """the about/help page""" # Need to `import app` here, not at the top of the file. @@ -3211,7 +3245,7 @@ def controller_api_get_job_data(redis, jid, requested_data): @dalton_blueprint.route( "/dalton/controller_api/v2///", methods=["GET"] ) -# @auth_required() +@check_user def controller_api_get_request(jid, requested_data, raw): logger.debug( f"controller_api_get_request() called, raw: {'True' if raw == 'raw' else 'False'}" @@ -3249,6 +3283,7 @@ def controller_api_get_request(jid, requested_data, raw): @dalton_blueprint.route( "/dalton/controller_api/get-current-sensors/", methods=["GET"] ) +@check_user def controller_api_get_current_sensors(engine): """Returns a list of current active sensors""" redis = get_redis() @@ -3300,6 +3335,7 @@ def controller_api_get_current_sensors(engine): @dalton_blueprint.route( "/dalton/controller_api/get-current-sensors-json-full", methods=["GET"] ) +@check_user def controller_api_get_current_sensors_json_full(): """Returns json with details about all the current active sensors""" sensors = page_sensor_default(return_dict=True) @@ -3312,6 +3348,7 @@ def controller_api_get_current_sensors_json_full(): @dalton_blueprint.route("/dalton/controller_api/get-max-pcap-files", methods=["GET"]) +@check_user def controller_api_get_max_pcap_files(): """Returns the config value of max_pcap_files (the number of pcap or compressed that can be uploaded per job). diff --git a/app/templates/dalton/queue.html b/app/templates/dalton/queue.html index 1cab5e3..a248253 100644 --- a/app/templates/dalton/queue.html +++ b/app/templates/dalton/queue.html @@ -23,7 +23,9 @@
Show Recent: - + {% if non_empty_user_count > 0 %} + + {% endif %} @@ -32,7 +34,9 @@
Show Recent: {% for job in queue %}
- + {% if non_empty_user_count > 0 %} + + {% endif %}
Job IDUserAlert Count Queue Submission Time
{{ job.jid }}{{ job.user }} {% if job.alert_count is number %} {{ job.alert_count|int }} diff --git a/app/templates/dalton/setuser.html b/app/templates/dalton/setuser.html new file mode 100644 index 0000000..1f7acbe --- /dev/null +++ b/app/templates/dalton/setuser.html @@ -0,0 +1,14 @@ +{% block body %} + +{% endblock %} diff --git a/dalton-agent/Dockerfiles/Dockerfile_suricata b/dalton-agent/Dockerfiles/Dockerfile_suricata index cbb19d3..5721e7a 100644 --- a/dalton-agent/Dockerfiles/Dockerfile_suricata +++ b/dalton-agent/Dockerfiles/Dockerfile_suricata @@ -24,6 +24,11 @@ RUN apt-get update -y && \ # for debugging agent #RUN apt-get install -y less nano +# get suricatasc; needed for Suricata 8 and later because it is no longer included with the Suricata source +WORKDIR /opt +ADD https://github.com/jasonish/python-suricatasc/archive/refs/heads/main.tar.gz suricatasc.tar.gz +RUN tar -zxf suricatasc.tar.gz + # download, build, and install Suricata from source RUN mkdir -p /src/suricata-${SURI_VERSION} WORKDIR /src diff --git a/dalton-agent/dalton-agent.conf b/dalton-agent/dalton-agent.conf index 885dbf2..ae4a017 100644 --- a/dalton-agent/dalton-agent.conf +++ b/dalton-agent/dalton-agent.conf @@ -70,6 +70,8 @@ USE_SURICATA_SOCKET_CONTROL = True # Location of Suricata Socket Control Python Module (should be # included with Suricata source), if not in PYTHONPATH. These are # utilized by the Agent to interact with the Suricata Unix socket. +# If built from the Dockerfile, the Dalton Agent should handle this, +# even for Suri 8 and later where suricatasc was removed from source. SURICATA_SC_PYTHON_MODULE = /src/suricata-REPLACE_AT_DOCKER_BUILD-VERSION/python # File name of the socket used for Suricata socket control. Must be full path. diff --git a/dalton-agent/dalton-agent.py b/dalton-agent/dalton-agent.py index 1efb279..7974293 100755 --- a/dalton-agent/dalton-agent.py +++ b/dalton-agent/dalton-agent.py @@ -366,6 +366,9 @@ def hash_file(filenames): ) ) ) + else: + # downloaded by Dockerfile + sys.path.insert(0, '/opt/python-suricatasc-main') # Used as Suricata default-log-dir when in SC mode os.makedirs(os.path.dirname(SURICATA_SOCKET_NAME), exist_ok=True) @@ -657,10 +660,14 @@ class DaltonError(Exception): try: import suricatasc except Exception: - logger.error( - f"Unable to import 'suricatasc' module (SURICATA_SC_PYTHON_MODULE set to '{SURICATA_SC_PYTHON_MODULE}'). Suricata Socket Control will be disabled." - ) - USE_SURICATA_SOCKET_CONTROL = False + sys.path.insert(0, '/opt/python-suricatasc-main') + try: + import suricatasc + except Exception: + logger.error( + f"Unable to import 'suricatasc' module (SURICATA_SC_PYTHON_MODULE set to '{SURICATA_SC_PYTHON_MODULE}'). Suricata Socket Control will be disabled." + ) + USE_SURICATA_SOCKET_CONTROL = False # **************************************** diff --git a/dalton.conf b/dalton.conf index 93a939e..0ef02d3 100644 --- a/dalton.conf +++ b/dalton.conf @@ -54,6 +54,22 @@ agent_purge_time = 20 # API Keys valid for Dalton Agents (currently not used) api_keys = bmV2ZXIgdW5kZXJlc3RpbWF0ZSB5b3VyIG9wcG9uZW50,ZXhwZWN0IHRoZSB1bmV4cGVjdGVk,dGFrZSBpdCBvdXRzaWRl,UGFpbiBkb24ndCBodXJ0 +# If you want to force users to set a username to use Dalton, set 'auth_prefix' to something other than 'disabled'. +# If the auth_prefix value is the empty string, then any username will be accepted (unless is is longer than the +# auth_max value. +# If not 'disabled' or the empty string, then the auth_prefix value should end with an underscore ('_') and the users +# must prefix their chosen username with the auth_prefix value in order to access Dalton. In this case the "auth_prefix" +# value acts as shared secret, providing cursory, "poor man's" authentication. + +# Prefix for valid dalton_user cookie value +auth_prefix = disabled +#auth_prefix = +#auth_prefix = sharedsecret_ + +# Max length for dalton_user value (including auth_prefix) +auth_max = 24 + + # location of mergecap binary; needed to combine multiple pcaps for Suricata jobs mergecap_binary = /usr/bin/mergecap diff --git a/docker-compose.yml b/docker-compose.yml index 3d60ac6..766893a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,3 @@ -version: '3' - services: controller: build: @@ -73,7 +71,7 @@ services: # to "--enable-rust". e.g.: # - ENABLE_RUST=--enable-rust -# Suricata current (latest) from source +# Suricata current (latest) from source; will be v8.x until late 2026 agent-suricata-current: build: context: ./dalton-agent @@ -89,22 +87,38 @@ services: - AGENT_DEBUG=${AGENT_DEBUG} restart: always -# Suricata 6.0.20 from source, with Rust support - agent-suricata-6.0.20: +# Suricata 7.0.14 from source, with Rust support + agent-suricata-7.0.14: build: context: ./dalton-agent dockerfile: Dockerfiles/Dockerfile_suricata args: - - SURI_VERSION=6.0.20 + - SURI_VERSION=7.0.14 - http_proxy=${http_proxy} - https_proxy=${https_proxy} - no_proxy=${no_proxy} - image: suricata-6.0.20:latest - container_name: suricata-6.0.20 + image: suricata-7.0.14:latest + container_name: suricata-7.0.14 environment: - AGENT_DEBUG=${AGENT_DEBUG} restart: always +# Suricata 6.0.20 from source, with Rust support +# agent-suricata-6.0.20: +# build: +# context: ./dalton-agent +# dockerfile: Dockerfiles/Dockerfile_suricata +# args: +# - SURI_VERSION=6.0.20 +# - http_proxy=${http_proxy} +# - https_proxy=${https_proxy} +# - no_proxy=${no_proxy} +# image: suricata-6.0.20:latest +# container_name: suricata-6.0.20 +# environment: +# - AGENT_DEBUG=${AGENT_DEBUG} +# restart: always + # Suricata 5.0.6 from source, with Rust support # agent-suricata-5.0.7: # build: @@ -423,30 +437,31 @@ services: ####### Zeek Agents ####### ########################### -# Zeek current (latest) from source +# Zeek 7.0.11 agent-zeek-current: build: context: ./dalton-agent dockerfile: Dockerfiles/Dockerfile_zeek args: - - ZEEK_VERSION=7.0.1 + - ZEEK_VERSION=7.0.11 image: zeek-current:latest container_name: zeek-current volumes: - ./rulesets/zeek:/opt/dalton-agent/zeek_scripts:ro restart: always - agent-zeek-6.0.6: - build: - context: ./dalton-agent - dockerfile: Dockerfiles/Dockerfile_zeek - args: - - ZEEK_VERSION=6.0.6 - image: zeek-6.0.6 - container_name: zeek-6.0.6 - volumes: - - ./rulesets/zeek:/opt/dalton-agent/zeek_scripts:ro - restart: always +# Zeek 6.0.6 +# agent-zeek-6.0.6: +# build: +# context: ./dalton-agent +# dockerfile: Dockerfiles/Dockerfile_zeek +# args: +# - ZEEK_VERSION=6.0.6 +# image: zeek-6.0.6 +# container_name: zeek-6.0.6 +# volumes: +# - ./rulesets/zeek:/opt/dalton-agent/zeek_scripts:ro +# restart: always ########################### ## Cyberchef Integration ## diff --git a/pyproject.toml b/pyproject.toml index a540935..989c05a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,7 @@ dependencies = [ "ruamel.yaml<0.18.0", "idstools==0.6.5", "flowsynth>=1.4.1", - "Werkzeug==3.1.3", + "Werkzeug==3.1.5", "itsdangerous==2.2.0", ] authors = [ diff --git a/start-dalton.sh b/start-dalton.sh index d817817..cfb952c 100755 --- a/start-dalton.sh +++ b/start-dalton.sh @@ -3,4 +3,4 @@ # build the docker containers and start them up cd "${0%/*}" -docker-compose build && docker-compose up -d +docker compose build && docker compose up -d