Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

pcap_analyzers adjusts + new playbook for PCAP files + upgraded Suricata to v7 #2325

Merged
merged 7 commits into from
May 17, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion api_app/analyzers_manager/file_analyzers/hfinger.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,17 @@ class Hfinger(FileAnalyzer):
fingerprint_report_mode: int = 2

def run(self):
return hfinger_analyze(self.filepath, self.fingerprint_report_mode)
reports = dict()
reports["extraction"] = hfinger_analyze(
self.filepath, self.fingerprint_report_mode
)
fingerprints = set()
for report in reports["extraction"]:
fingerprint = report.get("fingerprint", "")
if fingerprint:
fingerprints.add(fingerprint)
reports["fingerprints_summary"] = list(fingerprints)
return reports

@classmethod
def update(cls) -> bool:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
from django.db import migrations
from django.db.models.fields.related_descriptors import (
ForwardManyToOneDescriptor,
ForwardOneToOneDescriptor,
ManyToManyDescriptor,
)

plugin = {
"id": 10,
"analyzers": ["Hfinger", "Suricata"],
"connectors": [],
"pivots": [],
"for_organization": False,
"name": "PCAP_Analysis",
"description": "A Playbook containing the analyzers that support PCAP analysis",
"disabled": False,
"type": ["file"],
"runtime_configuration": {
"pivots": {},
"analyzers": {},
"connectors": {},
"visualizers": {},
},
"scan_mode": 2,
"scan_check_time": "1 00:00:00",
"tlp": "RED",
"starting": True,
"owner": None,
"tags": [],
"model": "playbooks_manager.PlaybookConfig",
}

params = []

values = []


def _get_real_obj(Model, field, value):
def _get_obj(Model, other_model, value):
if isinstance(value, dict):
real_vals = {}
for key, real_val in value.items():
real_vals[key] = _get_real_obj(other_model, key, real_val)
value = other_model.objects.get_or_create(**real_vals)[0]
# it is just the primary key serialized
else:
if isinstance(value, int):
if Model.__name__ == "PluginConfig":
value = other_model.objects.get(name=plugin["name"])
else:
value = other_model.objects.get(pk=value)
else:
value = other_model.objects.get(name=value)
return value

if (
type(getattr(Model, field))
in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor]
and value
):
other_model = getattr(Model, field).get_queryset().model
value = _get_obj(Model, other_model, value)
elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value:
other_model = getattr(Model, field).rel.model
value = [_get_obj(Model, other_model, val) for val in value]
return value


def _create_object(Model, data):
mtm, no_mtm = {}, {}
for field, value in data.items():
value = _get_real_obj(Model, field, value)
if type(getattr(Model, field)) is ManyToManyDescriptor:
mtm[field] = value
else:
no_mtm[field] = value
try:
o = Model.objects.get(**no_mtm)
except Model.DoesNotExist:
o = Model(**no_mtm)
o.full_clean()
o.save()
for field, value in mtm.items():
attribute = getattr(o, field)
if value is not None:
attribute.set(value)
return False
return True


def migrate(apps, schema_editor):
Parameter = apps.get_model("api_app", "Parameter")
PluginConfig = apps.get_model("api_app", "PluginConfig")
python_path = plugin.pop("model")
Model = apps.get_model(*python_path.split("."))
if not Model.objects.filter(name=plugin["name"]).exists():
exists = _create_object(Model, plugin)
if not exists:
for param in params:
_create_object(Parameter, param)
for value in values:
_create_object(PluginConfig, value)


def reverse_migrate(apps, schema_editor):
python_path = plugin.pop("model")
Model = apps.get_model(*python_path.split("."))
Model.objects.get(name=plugin["name"]).delete()


class Migration(migrations.Migration):
atomic = False
dependencies = [
("api_app", "0062_alter_parameter_python_module"),
("playbooks_manager", "0042_add_blint_to_free_analyzers_and_static_analyzers"),
]

operations = [migrations.RunPython(migrate, reverse_migrate)]
2 changes: 1 addition & 1 deletion docs/source/Contribute.md
Original file line number Diff line number Diff line change
Expand Up @@ -412,7 +412,7 @@ In the Pull Request remember to provide some real world examples (screenshots an
To allow other people to use your configuration, that is now stored in your local database, you have to export it and create a data migration
You can use the django management command `dumpplugin` to automatically create the migration file for your new analyzer (you will find it under `api_app/playbook_manager/migrations`).

Example: `docker exec -ti intelowl_uwsgi python3 manage.py dumpplugin PluginConfig <new_analyzer_name>`
Example: `docker exec -ti intelowl_uwsgi python3 manage.py dumpplugin PlaybookConfig <new_analyzer_name>`

## How to modify a plugin

Expand Down
2 changes: 1 addition & 1 deletion docs/source/Installation.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ In some systems you could find pre-installed older versions. Please check this a
<p class="admonition-title">Note</p>
<ul>
<li>The project uses public docker images that are available on <a href="https://hub.docker.com/repository/docker/intelowlproject/intelowl">Docker Hub</a></li>
<li>IntelOwl is tested and supported to work in a Debian distro. Other Linux-based OS <i>should</i> work but that has not been tested much. It <i>may</i> also run on Windows, but that is not officially supported.</li>
<li>IntelOwl is tested and supported to work in a Debian distro. More precisely we suggest using Ubuntu. Other Linux-based OS <i>should</i> work but that has not been tested much. It <i>may</i> also run on Windows, but that is not officially supported.</li>
<li>Before installing remember that you must comply with the <a href="https://github.com/certego/IntelOwl/blob/master/LICENSE">LICENSE</a> and the <a href="https://github.com/certego/IntelOwl/blob/master/.github/legal_notice.md">Legal Terms</a></li>
</ul>
</div>
Expand Down
13 changes: 7 additions & 6 deletions integrations/pcap_analyzers/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
# https://github.com/jasonish/docker-suricata/tree/master/6.0
FROM jasonish/suricata:6.0
# https://github.com/jasonish/docker-suricata/tree/master/7.0
FROM jasonish/suricata:7.0

ENV PROJECT_PATH /opt/deploy
ENV LOG_PATH /var/log/intel_owl/pcap_analyzers
ENV USER pcap_analyzers-user
# Python 3.8 is required to have Flask work correctly. Base leverage Python 3.6
RUN dnf -y remove python3 && dnf -y install python3.8 && dnf clean all && useradd -ms /bin/bash ${USER}
RUN dnf -y install python3-pip && dnf clean all && useradd -ms /bin/bash ${USER}

# Build Flask REST API
WORKDIR ${PROJECT_PATH}/pcap_analyzers-flask
Expand All @@ -19,8 +18,10 @@ COPY config/suricata/rules/* /var/lib/suricata/rules

RUN pip3 install -r requirements.txt --no-cache-dir \
&& chown -R ${USER}:${USER} . /etc/suricata /var/lib/suricata \
&& touch /var/log/cron.log \
&& chmod 0644 /etc/cron.d/suricata /var/log/cron.log
&& mkdir -p ${LOG_PATH}/suricata/ \
&& touch /var/log/cron.log ${LOG_PATH}/suricata/suricata.log \
&& chmod 0666 ${LOG_PATH}/suricata \
&& chmod 0644 /etc/cron.d/suricata /var/log/cron.log ${LOG_PATH}/suricata/suricata.log

# Serve Flask application using gunicorn
EXPOSE 4004
Expand Down
4 changes: 2 additions & 2 deletions integrations/pcap_analyzers/config/suricata/etc/suricata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ vars:
# The default logging directory. Any log or output file will be
# placed here if it's not specified with a full path name. This can be
# overridden with the -l command line parameter.
default-log-dir: /var/log/intel_owl/suricata/
default-log-dir: /var/log/intel_owl/pcap_analyzers/suricata/

# Global stats configuration
stats:
Expand Down Expand Up @@ -494,7 +494,7 @@ logging:
- file:
enabled: no
level: info
filename: /var/log/intel_owl/suricata/suricata.log
filename: /var/log/intel_owl/pcap_analyzers/suricata/suricata.log
# type: json
- syslog:
enabled: no
Expand Down
4 changes: 2 additions & 2 deletions integrations/pcap_analyzers/entrypoint.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/sh
mkdir -p ${LOG_PATH}
touch ${LOG_PATH}/gunicorn_access.log ${LOG_PATH}/gunicorn_errors.log
mkdir -p ${LOG_PATH} ${LOG_PATH}/suricata
touch ${LOG_PATH}/gunicorn_access.log ${LOG_PATH}/gunicorn_errors.log ${LOG_PATH}/suricata/suricata.log
chown -R pcap_analyzers-user:pcap_analyzers-user ${LOG_PATH}
su pcap_analyzers-user -s /bin/bash
suricata-update update-sources
Expand Down
7 changes: 3 additions & 4 deletions start
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ check_parameters "$@" && shift 2
load_env "docker/.env"
current_version=${REACT_APP_INTELOWL_VERSION/"v"/""}

docker_analyzers=("tor_analyzers" "malware_tools_analyzers" "cyberchef" "pcap_analyzers" "phoneinfoga")
docker_analyzers=("pcap_analyzers" "tor_analyzers" "malware_tools_analyzers" "cyberchef" "phoneinfoga")

declare -A path_mapping=(["default"]="docker/default.yml" ["postgres"]="docker/postgres.override.yml" ["rabbitmq"]="docker/rabbitmq.override.yml" ["test"]="docker/test.override.yml" ["ci"]="docker/ci.override.yml" ["custom"]="docker/custom.override.yml" ["traefik"]="docker/traefik.override.yml" ["multi_queue"]="docker/multi-queue.override.yml" ["test_multi_queue"]="docker/test.multi-queue.override.yml" ["flower"]="docker/flower.override.yml" ["test_flower"]="docker/test.flower.override.yml" ["elastic"]="docker/elasticsearch.override.yml" ["https"]="docker/https.override.yml" ["nfs"]="docker/nfs.override.yml" ["redis"]="docker/redis.override.yml")
for value in "${docker_analyzers[@]}"; do
Expand Down Expand Up @@ -160,12 +160,11 @@ while [[ $# -gt 0 ]]; do
shift 1
;;
--pcap_analyzers)
params["pcap_analyzers"]=true
analyzers["pcap_analyzers"]=true
shift 1
;;

--phoneinfoga)
params["phoneinfoga"]=true
analyzers["phoneinfoga"]=true
shift 1
;;
--multi_queue)
Expand Down
Loading