Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Dev to main - Enhancements and Bug Fixes #24

Merged
merged 8 commits into from
Sep 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ __pycache__/
*.pyc
.flake8
.vscode/
*.sqlite-journal
33 changes: 25 additions & 8 deletions database_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import random
import string
import datetime
import hashlib


def connectDatabase(name, verbose):
Expand Down Expand Up @@ -357,6 +358,9 @@ def insertHandshake(cursor, verbose, bssid, mac, file):
# Insert file
error += insertFile(cursor, verbose, file)

# Get file hash MD5
hash = hashlib.md5(open(file,'rb').read()).hexdigest()

# insertHandshake Client and AP CONSTRAINT
ssid = ""
manuf = ""
Expand All @@ -382,8 +386,8 @@ def insertHandshake(cursor, verbose, bssid, mac, file):

# print(row[5].replace(' ', ''))
cursor.execute(
'''INSERT INTO handshake VALUES(?,?,?,?)''',
(bssid.upper(), mac.upper(), file, ""))
'''INSERT INTO handshake VALUES(?,?,?,?,?)''',
(bssid.upper(), mac.upper(), file, hash, ""))
return int(error)
except sqlite3.IntegrityError as error:
# errors += 1
Expand Down Expand Up @@ -482,10 +486,13 @@ def insertSeenAP(cursor, verbose, bssid, time, tool, signal_rsi,
return int(1)


def setHashcat(cursor, verbose, bssid, mac, file, hash):
def setHashcat(cursor, verbose, bssid, mac, file, hashcat):
try:
cursor.execute('''INSERT OR REPLACE INTO Handshake VALUES(?,?,?,?)''',
(bssid.upper(), mac.upper(), file, hash))
hashMD5 = hashlib.md5(open(file,'rb').read()).hexdigest()
if verbose:
print("HASH: ", hash)
cursor.execute('''INSERT OR REPLACE INTO Handshake VALUES(?,?,?,?,?)''',
(bssid.upper(), mac.upper(), file, hashMD5, hashcat))
return int(0)
except sqlite3.IntegrityError as error:
print("setHashcat" + str(error))
Expand All @@ -494,8 +501,12 @@ def setHashcat(cursor, verbose, bssid, mac, file, hash):

def insertFile(cursor, verbose, file):
try:
cursor.execute('''INSERT OR REPLACE INTO Files VALUES(?,?,?)''',
(file, "False", datetime.datetime.now()))
# Get MD5
hash = hashlib.md5(open(file,'rb').read()).hexdigest()
if verbose:
print("HASH: ", hash)
cursor.execute('''INSERT OR REPLACE INTO Files VALUES(?,?,?,?)''',
(file, "False", hash, datetime.datetime.now()))
return int(0)
except sqlite3.IntegrityError as error:
print("insertFile" + str(error))
Expand All @@ -513,8 +524,14 @@ def setFileProcessed(cursor, verbose, file):


def checkFileProcessed(cursor, verbose, file):
if not os.path.exists(file):
if verbose:
print("File", file, "does not exist")
return int(0)

hash = hashlib.md5(open(file,'rb').read()).hexdigest()
try:
sql = "SELECT file from Files where file = '" + file + "' AND processed = 'True';"
sql = "SELECT file from Files where hashMD5 = '" + hash + "' AND processed = 'True';"
cursor.execute(sql)

output = cursor.fetchall()
Expand Down
97 changes: 56 additions & 41 deletions oui.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from shutil import copyfile
import csv
import os
import time


def load_vendors():
Expand All @@ -14,48 +15,62 @@ def load_vendors():
url = 'https://maclookup.app/downloads/csv-database/get-db'
# urlOld = 'https://macaddress.io/database/macaddress.io-db.json'#Notfree
oui = {}
script_path = os.path.dirname(os.path.abspath(__file__))
fileCSV = script_path + "/mac-vendors-export.csv"

with tempfile.NamedTemporaryFile(delete=False) as tmp:
print(tmp.name)
try:
import requests
headersR = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; "
"Win64; x64; rv:93.0) Gecko/20100101 Firefox/93.0",
"Accept": "text/html,application/"
"xhtml+xml,application/xml;"
"q=0.9,image/avif,image/webp,*/*;q=0.8",
"Accept-Language":
"es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3",
"Accept-Encoding": "gzip, deflate",
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "document",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "none", "Sec-Fetch-User": "?1",
"Te": "trailers"}
response = requests.get(url, headers=headersR)
tmp.write(response.content)
tmp.seek(0)
# error control and copy local (old file)
except requests.exceptions.RequestException as e:
# catastrophic error. bail.
script_path = os.path.dirname(os.path.abspath(__file__))
print(e)
print("Copy local file")
src = script_path + "/mac-vendors-export.csv"
dst = tmp.name
copyfile(src, dst)
tmp.close()
with open(tmp.name, encoding='cp850') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count = 0
for row in csv_reader:
if line_count == 0:
line_count += 1
else:
line_count += 1
oui[row[0].replace(':', '')] = row[1]
# print(f'Processed {line_count} lines.')
os.unlink(tmp.name)
# Check if file downloaded in last 2h
redownload = True
if os.path.exists(fileCSV):
modification_time = os.path.getmtime(fileCSV)
current_time = time.time()
# Check if the file was modified more than 24 hours ago
if current_time - modification_time < 2 * 60 * 60:
print("File was download within the last 2 hours - SKIP")
redownload = False

if redownload: # download again if >2h or file dont exists
with tempfile.NamedTemporaryFile(delete=True) as tmp:
print(tmp.name)
try:
import requests
headersR = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; "
"Win64; x64; rv:93.0) Gecko/20100101 Firefox/93.0",
"Accept": "text/html,application/"
"xhtml+xml,application/xml;"
"q=0.9,image/avif,image/webp,*/*;q=0.8",
"Accept-Language":
"es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3",
"Accept-Encoding": "gzip, deflate",
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "document",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "none", "Sec-Fetch-User": "?1",
"Te": "trailers"}
response = requests.get(url, headers=headersR)
tmp.write(response.content)
tmp.seek(0)

# if downloaded update the saved
src = tmp.name
print("Copy new file to", fileCSV)
copyfile(src, fileCSV)
# error control and copy local (old file)
except requests.exceptions.RequestException as e:
# catastrophic error. bail.
print(e)
tmp.close()
#os.unlink(tmp.name)

with open(fileCSV, encoding='cp850') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count = 0
for row in csv_reader:
if line_count == 0:
line_count += 1
else:
line_count += 1
oui[row[0].replace(':', '')] = row[1]
# print(f'Processed {line_count} lines.')

return oui

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
ftfy==6.1.1
nest_asyncio==1.5.7
nest_asyncio==1.5.8
pyshark==0.6
requests==2.31.0
73 changes: 49 additions & 24 deletions wifi_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# import nest_asyncio ; nest_asyncio.apply() ->
# Fix RuntimeError: This event loop is already running”

VERSION = '1.2'
VERSION = '1.3-dev'



Expand Down Expand Up @@ -251,35 +251,60 @@ def process_capture(ouiMap, capture, database,
capture = capture[:-1]

captureFormat = capture + ".kismet.netxml"
database_utils.insertFile(cursor, verbose, captureFormat)
wifi_db_aircrack.parse_netxml(ouiMap, captureFormat,
database, verbose)
database_utils.setFileProcessed(cursor, verbose, captureFormat)

captureFormat = capture + "kismet.csv"
database_utils.insertFile(cursor, verbose, captureFormat)
wifi_db_aircrack.parse_kismet_csv(ouiMap, captureFormat,
database, verbose)
database_utils.setFileProcessed(cursor, verbose, captureFormat)
print("Parsing file:", captureFormat)
if database_utils.checkFileProcessed(cursor,
verbose, captureFormat) == 1 and not force:
print("File","already processed\n")
else:
database_utils.insertFile(cursor, verbose, captureFormat)
wifi_db_aircrack.parse_netxml(ouiMap, captureFormat,
database, verbose)
database_utils.setFileProcessed(cursor, verbose, captureFormat)

captureFormat = capture + ".kismet.csv"
print("Parsing file:", captureFormat)
if database_utils.checkFileProcessed(cursor,
verbose, captureFormat) == 1 and not force:
print("File","already processed\n")
else:
database_utils.insertFile(cursor, verbose, captureFormat)
wifi_db_aircrack.parse_kismet_csv(ouiMap, captureFormat,
database, verbose)
database_utils.setFileProcessed(cursor, verbose, captureFormat)

captureFormat = capture + ".csv"
database_utils.insertFile(cursor, verbose, captureFormat)
wifi_db_aircrack.parse_csv(ouiMap, captureFormat,
database, verbose)
database_utils.setFileProcessed(cursor, verbose, captureFormat)
print("Parsing file:", captureFormat)
if database_utils.checkFileProcessed(cursor,
verbose, captureFormat) == 1 and not force:
print("File","already processed\n")
else:
database_utils.insertFile(cursor, verbose, captureFormat)
wifi_db_aircrack.parse_csv(ouiMap, captureFormat,
database, verbose)
database_utils.setFileProcessed(cursor, verbose, captureFormat)

captureFormat = capture + ".log.csv"
database_utils.insertFile(cursor, verbose, captureFormat)
wifi_db_aircrack.parse_log_csv(ouiMap, captureFormat,
database, verbose, fake_lat,
fake_lon)
database_utils.setFileProcessed(cursor, verbose, captureFormat)
print("Parsing file:", captureFormat)
if database_utils.checkFileProcessed(cursor,
verbose, captureFormat) == 1 and not force:
print("File","already processed\n")
else:
database_utils.insertFile(cursor, verbose, captureFormat)
wifi_db_aircrack.parse_log_csv(ouiMap, captureFormat,
database, verbose, fake_lat,
fake_lon)
database_utils.setFileProcessed(cursor, verbose, captureFormat)

captureFormat = capture + ".cap"
database_utils.insertFile(cursor, verbose, captureFormat)
wifi_db_aircrack.parse_cap(captureFormat, database, verbose,
hcxpcapngtool, tshark)
database_utils.setFileProcessed(cursor, verbose, captureFormat)
print("Parsing file:", captureFormat)
if database_utils.checkFileProcessed(cursor,
verbose, captureFormat) == 1 and not force:
print("File","already processed\n")
else:
database_utils.insertFile(cursor, verbose, captureFormat)
wifi_db_aircrack.parse_cap(captureFormat, database, verbose,
hcxpcapngtool, tshark)
database_utils.setFileProcessed(cursor, verbose, captureFormat)


if __name__ == "__main__":
Expand Down
7 changes: 4 additions & 3 deletions wifi_db_database.sql
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ CREATE TABLE IF NOT EXISTS AP
mfpc BOOLEAN,
mfpr BOOLEAN,
firstTimeSeen timestamp,

CONSTRAINT Key1 PRIMARY KEY (bssid)
);

Expand Down Expand Up @@ -99,11 +98,12 @@ CREATE TABLE IF NOT EXISTS Handshake
bssid TEXT NOT NULL,
mac TEXT NOT NULL,
file TEXT NOT NULL,
hashMD5 TEXT NOT NULL,
hashcat TEXT,
CONSTRAINT Key6 PRIMARY KEY (bssid,mac,file)
CONSTRAINT FRelationship4 FOREIGN KEY (bssid) REFERENCES AP (bssid) ON UPDATE CASCADE ON DELETE CASCADE,
CONSTRAINT FRelationship5 FOREIGN KEY (mac) REFERENCES Client (mac) ON UPDATE CASCADE ON DELETE CASCADE,
CONSTRAINT FRelationship8 FOREIGN KEY (file) REFERENCES Files (file) ON UPDATE CASCADE ON DELETE CASCADE
CONSTRAINT FRelationship8 FOREIGN KEY (file,hashMD5) REFERENCES Files (file,hashMD5) ON UPDATE CASCADE ON DELETE CASCADE
);

CREATE TABLE IF NOT EXISTS Identity
Expand All @@ -122,6 +122,7 @@ CREATE TABLE IF NOT EXISTS Files
(
file TEXT NOT NULL,
processed BOOLEAN,
hashMD5 TEXT NOT NULL,
time datetime,
CONSTRAINT Key8 PRIMARY KEY (file)
CONSTRAINT Key8 PRIMARY KEY (file,hashMD5)
);