Skip to content

Commit

Permalink
Merge pull request #124 from nttcom/feature-#59_black
Browse files Browse the repository at this point in the history
#59 blackで整形するように設定
  • Loading branch information
Takuma Tsubaki authored Jul 3, 2023
2 parents 7e3fa51 + a112144 commit fce16cf
Show file tree
Hide file tree
Showing 21 changed files with 86 additions and 95 deletions.
10 changes: 10 additions & 0 deletions .github/workflows/black.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
name: black

on: pull_request

jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: psf/black@stable
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,5 @@
*.log
*.pcap
*~
Dockerfile.*
Dockerfile.*
__pycache__
Binary file not shown.
Binary file not shown.
36 changes: 18 additions & 18 deletions osect_sensor/Application/edge_cron/common/common_config.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
LABEL_ID = ''
LABEL_ID = ""
""" 複数スイッチ対応用のラベルID """

PCAP_UPLOADING_FILE_PATH = 'paper/sc_src/input/pcap/uploading/'
PCAP_UPLOADING_FILE_PATH = "paper/sc_src/input/pcap/uploading/"
"""pcapのアップロード先の配置パス"""

PCAP_UPLOADED_FILE_PATH = 'paper/sc_src/input/pcap/uploaded/'
PCAP_UPLOADED_FILE_PATH = "paper/sc_src/input/pcap/uploaded/"
"""pcapのアップロードが終わったファイルを配置するパス"""

PCAP_ANALYZE_FILE_PATH = 'paper/sc_src/input/pcap/analyze/'
PCAP_ANALYZE_FILE_PATH = "paper/sc_src/input/pcap/analyze/"
"""解析中のpcap及び中間ファイルの配置パス"""

PCAP_COMPLETE_FILE_PATH = 'paper/sc_src/input/pcap/complete/'
PCAP_COMPLETE_FILE_PATH = "paper/sc_src/input/pcap/complete/"
"""全ての処理が終わったpcap及び中間ファイルの配置パス"""

PCAP_COMPLETE_ARCHIVES_FILE_PATH = 'paper/sc_src/input/pcap/complete_archives/'
PCAP_COMPLETE_ARCHIVES_FILE_PATH = "paper/sc_src/input/pcap/complete_archives/"
"""全ての処理が終わったpcap及び中間ファイルのアーカイブパス"""

PCAP_SERVER_UPLOADING_FILE_PATH = 'paper/sc_src/input/pcap/server_uploading/'
PCAP_SERVER_UPLOADING_FILE_PATH = "paper/sc_src/input/pcap/server_uploading/"
""" ログ解析が終わったディレクトリをuploadするための一時領域 """

SURICATA_ENABLE = True
Expand All @@ -34,24 +34,24 @@
MODBUS_ENABLE = False
""" Modbusトラフィックを取り込むか否か """

BRO_SHELL_COMMAND = '/opt/ot_tools/bro.sh'
BRO_SHELL_COMMAND = "/opt/ot_tools/bro.sh"
"""broのログ取得コマンド"""

P0F_SHELL_COMMAND = '/opt/ot_tools/p0f.sh'
P0F_SHELL_COMMAND = "/opt/ot_tools/p0f.sh"

P0F_AWK_COMMAND = '/opt/ot_tools/p0f-dic/p0f-dic-awk.sh'
P0F_AWK_COMMAND = "/opt/ot_tools/p0f-dic/p0f-dic-awk.sh"

BACNET_SHELL_COMMAND = '/opt/ot_tools/bacnet.sh'
BACNET_SHELL_COMMAND = "/opt/ot_tools/bacnet.sh"

SURICATA_SHELL_COMMAND = '/opt/ot_tools/suricata.sh'
SURICATA_SHELL_COMMAND = "/opt/ot_tools/suricata.sh"
""" SURICATAのログ取得コマンド """

YAF_SHELL_COMMAND = '/opt/ot_tools/yaf.sh'
YAF_SHELL_COMMAND = "/opt/ot_tools/yaf.sh"

SURICATA_YAML = '/opt/ot_tools/suricata.yaml'
SURICATA_YAML = "/opt/ot_tools/suricata.yaml"
""" SURICATAの設定ファイル保存場所 """

ALLOWED_PCAP_EXT = '.pcap,.cap,.pkt'
ALLOWED_PCAP_EXT = ".pcap,.cap,.pkt"
""" アップロード出来るPCAPファイルの拡張子 """

ALLOWED_LOG_EXT = ".tar.zst"
Expand All @@ -63,7 +63,7 @@
PCAP_TO_DB_CPU = 5
""" PCAPをログ化する際に使用するCPU数 """

API_URL = 'https://your url/paper/api/v1/createlogdata/post'
API_URL = "https://your url/paper/api/v1/createlogdata/post"
""" SaaS連携用APIのURL """

TIME_OUT_VAL = 120
Expand All @@ -78,10 +78,10 @@
PCAP_COMPLETE_ARCHIVES_DELETE_LIMIT_CAPACITY = 150
"""completeとcomplete_archivesの保持容量上限値(GB)"""

CLIENT_CERTIFICATE_PATH = '/etc/ssl/private/client.pem'
CLIENT_CERTIFICATE_PATH = "/etc/ssl/private/client.pem"
"""クライアント認証のための証明書・秘密鍵"""

SEND_VERSION_API_URL = 'https://your url/paper/api/v1/sensor_status/post'
SEND_VERSION_API_URL = "https://your url/paper/api/v1/sensor_status/post"
"""Suricataシグネチャのバージョンを送るURL"""

SEND_REQUST_TIMEOUT = 180
Expand Down
17 changes: 9 additions & 8 deletions osect_sensor/Application/edge_cron/common/common_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,24 @@
# import dpkt
# import ipaddress
from unicodedata import category

# import re
# import struct


def deleteCc(s):
r = ''
r = ""
for c in s:
if category(c) == 'Cc':
if category(c) == "Cc":
continue
r += c

return r


#def parseDHCP(udpData):
#def parseNBNS(udpData):
#def parseMWBP(udpData):
#def parseSSDP(udpData):
#def parseDHCPv6(udpData):
#def pcap2log(pcapFile, logDir):
# def parseDHCP(udpData):
# def parseNBNS(udpData):
# def parseMWBP(udpData):
# def parseSSDP(udpData):
# def parseDHCPv6(udpData):
# def pcap2log(pcapFile, logDir):
Binary file not shown.
Binary file not shown.
2 changes: 1 addition & 1 deletion osect_sensor/Application/edge_cron/cron/apps.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@


class CronConfig(AppConfig):
name = 'cron'
name = "cron"
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,7 @@ def exec_complete_to_archives(logger):
logger.info("There is no compression file or directory")

# complete_archives配下の保持期限切れファイルの削除
file_date_archives_infos = _get_file_date_info(
PCAP_COMPLETE_ARCHIVES_FILE_PATH
)
file_date_archives_infos = _get_file_date_info(PCAP_COMPLETE_ARCHIVES_FILE_PATH)
target_archives_list = _get_target_list(
file_date_archives_infos, PCAP_COMPLETE_ARCHIVES_DELETE_LIMIT_DATE
)
Expand Down Expand Up @@ -94,9 +92,9 @@ def _get_file_date_info(target_path):
file_date_infos = [
{
"file_name": file_name,
"file_date": datetime.fromtimestamp(
os.path.getmtime(file_name)
).strftime("%Y%m%d"),
"file_date": datetime.fromtimestamp(os.path.getmtime(file_name)).strftime(
"%Y%m%d"
),
}
for file_name in file_names
]
Expand All @@ -113,9 +111,7 @@ def _get_target_list(file_date_infos, limit_date):
:param limit_date: 保有日付
:return: target_file_list
"""
limit_date = (datetime.now() + timedelta(days=-limit_date)).strftime(
"%Y%m%d"
)
limit_date = (datetime.now() + timedelta(days=-limit_date)).strftime("%Y%m%d")

target_file_list = []
for file_date_info in file_date_infos:
Expand All @@ -140,9 +136,7 @@ def _compress_file(target_compression_list):
compression_file = os.path.join(
PCAP_COMPLETE_ARCHIVES_FILE_PATH, "%s.zip" % (file_name)
)
compression_path = os.path.join(
PCAP_COMPLETE_ARCHIVES_FILE_PATH, file_name
)
compression_path = os.path.join(PCAP_COMPLETE_ARCHIVES_FILE_PATH, file_name)

if os.path.isfile(target_compression):
# ファイル
Expand All @@ -154,9 +148,7 @@ def _compress_file(target_compression_list):
os.remove(target_compression)
else:
# ディレクトリ
shutil.make_archive(
compression_path, "zip", root_dir=target_compression
)
shutil.make_archive(compression_path, "zip", root_dir=target_compression)
# 圧縮後ディレクトリ削除
shutil.rmtree(target_compression)

Expand Down Expand Up @@ -204,9 +196,7 @@ def _get_file_size_info():
file_info_list.append(file_info)
else:
# ディレクトリ
file_names = glob.glob(
os.path.join(file_path, "**"), recursive=True
)
file_names = glob.glob(os.path.join(file_path, "**"), recursive=True)
total_path_size = sum(
(
os.path.getsize(file_name)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
LABEL_ID,
CLIENT_CERTIFICATE_PATH,
)

# from common.common_function import pcap2log
from django.core.management.base import BaseCommand
from edge_cron.settings import BASE_DIR
Expand Down Expand Up @@ -80,7 +81,7 @@ def handle(self, *args, **options):
logger.debug("DEBUG pcap_num=" + str(pcap_num))
logger.debug(str(pcap_list))

#if pcap_num == 0:
# if pcap_num == 0:
# logger.info(
# "There is no target file [" + ", ".join(allowed_ext_list) + "]"
# )
Expand All @@ -93,8 +94,8 @@ def handle(self, *args, **options):
log_info(start, "end pcap_to_log")

# 処理済みのPCAPファイルを移動
#move_pcap_file(analyze_pcap_list)
#log_info(start, "end move_pcap_file")
# move_pcap_file(analyze_pcap_list)
# log_info(start, "end move_pcap_file")

# 処理済みのログが含まれるディレクトリを完了ディレクトリに移動
move_pcap_dir(analyze_pcap_dir_list, PCAP_COMPLETE_FILE_PATH)
Expand All @@ -117,9 +118,7 @@ def handle(self, *args, **options):
mode="w",
) as tar:
for file_name in os.listdir(log_dir):
tar.add(
os.path.join(log_dir, file_name), arcname=file_name
)
tar.add(os.path.join(log_dir, file_name), arcname=file_name)

# 圧縮 (zstandard)
compress_name = dir_name + ALLOWED_LOG_EXT
Expand All @@ -141,10 +140,10 @@ def handle(self, *args, **options):
end_time = time.perf_counter()

# 乱数 < 処理時間の場合はsleepしない
#processing_time = math.ceil(end_time - start_time)
#sleep_time = max(0, (random.randrange(1, 60, 1) - processing_time))
#time.sleep(sleep_time)
#logger.info("sleep " + str(sleep_time) + "s")
# processing_time = math.ceil(end_time - start_time)
# sleep_time = max(0, (random.randrange(1, 60, 1) - processing_time))
# time.sleep(sleep_time)
# logger.info("sleep " + str(sleep_time) + "s")

try:
send_server(tar_list)
Expand Down Expand Up @@ -192,11 +191,11 @@ def wrapper_log_function(func_type, analyze_full_path, dir_name, pcap_name):
elif func_type == 2:
# pcap2logのログ作成処理
logger.info("pcap to log")
#pcap2log(
# pcap2log(
# PCAP_ANALYZE_FILE_PATH + pcap_name,
# PCAP_ANALYZE_FILE_PATH + dir_name,
#)
#proc.wait()
# )
# proc.wait()
elif func_type == 4:
if SURICATA_ENABLE:
# suricataログの処理
Expand Down Expand Up @@ -252,11 +251,7 @@ def get_pcap_list():
extend_pcap_list = pcap_list.extend
for ext in allowed_ext_list:
extend_pcap_list(
sorted(
glob.glob(
PCAP_UPLOADED_FILE_PATH + "**/*" + ext, recursive=True
)
)
sorted(glob.glob(PCAP_UPLOADED_FILE_PATH + "**/*" + ext, recursive=True))
)

return pcap_list, allowed_ext_list
Expand All @@ -277,20 +272,20 @@ def pcap_to_log(pcap_list):
# pcap_name = os.path.basename(pcap)
# dir_name = os.path.splitext(os.path.basename(pcap))[0]
dir_name = "realtime-" + datetime.datetime.now().strftime("%Y-%m-%d-%H:%M:%S")
pcap_name = "paper" # ダミー
pcap_name = "paper" # ダミー
analyze_pcap_dir = analyze_full_path + dir_name
analyze_pcap_dir_list.append(analyze_pcap_dir)

#try:
# # pcap移動処理
# logger.info("move pcap file")
# analyze_pcap = shutil.move(pcap, PCAP_ANALYZE_FILE_PATH)
# logger.info("end move pcap file")
# analyze_pcap_list.append(analyze_pcap)
# logger.info("end append pcap list")
#except Exception as e:
# logger.error("pcap move error (to analyze directory): " + str(e))
# continue
# try:
# # pcap移動処理
# logger.info("move pcap file")
# analyze_pcap = shutil.move(pcap, PCAP_ANALYZE_FILE_PATH)
# logger.info("end move pcap file")
# analyze_pcap_list.append(analyze_pcap)
# logger.info("end append pcap list")
# except Exception as e:
# logger.error("pcap move error (to analyze directory): " + str(e))
# continue

try:
# broログ格納用ディレクトリ作成処理
Expand All @@ -302,17 +297,13 @@ def pcap_to_log(pcap_list):

try:
if YAF_ENABLE:
func_type_list = (
[0, 1, 2, 4, 5]
)
func_type_list = [0, 1, 2, 4, 5]
else:
func_type_list = (
[0, 1, 2, 4]
)
func_type_list = [0, 1, 2, 4]
analyze_full_path_list = [analyze_full_path] * len(func_type_list)
dir_name_list = [dir_name] * len(func_type_list)
pcap_name_list = [pcap_name] * len(func_type_list)

with Pool(PCAP_TO_DB_CPU) as pool:
args = list(
zip(
Expand Down Expand Up @@ -362,9 +353,7 @@ def move_pcap_dir(log_dir_list, dst_dir):
logger.info("move analyzed log directory [" + pcap_dir + "]")
shutil.move(pcap_dir, dst_dir)
except Exception as e:
logger.error(
"log directory move error (to " + dst_dir + "): " + str(e)
)
logger.error("log directory move error (to " + dst_dir + "): " + str(e))
continue


Expand Down
Binary file not shown.
Binary file not shown.
Binary file not shown.
4 changes: 2 additions & 2 deletions osect_sensor/Application/edge_cron/manage.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
import os
import sys

if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'edge_cron.settings')
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "edge_cron.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
Expand Down
16 changes: 8 additions & 8 deletions osect_sensor/tools/sosreport/plugins/osect_probe_on_docker.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@

import os

from sos.report.plugins import Plugin, IndependentPlugin
from sos.report.plugins import IndependentPlugin, Plugin


class osect_probe_on_docker_logs(Plugin, IndependentPlugin):
def setup(self):
sos_logs_path = os.environ.get('SOS_LOGS_PATH', None)
sos_logs_path = os.environ.get("SOS_LOGS_PATH", None)
if sos_logs_path:
self.add_copy_spec([
'/var/log',
'%s/logs/ottools' % (sos_logs_path),
])

self.add_copy_spec(
[
"/var/log",
"%s/logs/ottools" % (sos_logs_path),
]
)

0 comments on commit fce16cf

Please sign in to comment.