diff --git a/.gitignore b/.gitignore index 78a43cf..b5353c1 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ __pycache__/ *.pyc .flake8 .vscode/ +*.sqlite-journal diff --git a/database_utils.py b/database_utils.py index dfed362..7766766 100644 --- a/database_utils.py +++ b/database_utils.py @@ -6,6 +6,7 @@ import random import string import datetime +import hashlib def connectDatabase(name, verbose): @@ -357,6 +358,9 @@ def insertHandshake(cursor, verbose, bssid, mac, file): # Insert file error += insertFile(cursor, verbose, file) + # Get file hash MD5 + hash = hashlib.md5(open(file,'rb').read()).hexdigest() + # insertHandshake Client and AP CONSTRAINT ssid = "" manuf = "" @@ -382,8 +386,8 @@ def insertHandshake(cursor, verbose, bssid, mac, file): # print(row[5].replace(' ', '')) cursor.execute( - '''INSERT INTO handshake VALUES(?,?,?,?)''', - (bssid.upper(), mac.upper(), file, "")) + '''INSERT INTO handshake VALUES(?,?,?,?,?)''', + (bssid.upper(), mac.upper(), file, hash, "")) return int(error) except sqlite3.IntegrityError as error: # errors += 1 @@ -482,10 +486,13 @@ def insertSeenAP(cursor, verbose, bssid, time, tool, signal_rsi, return int(1) -def setHashcat(cursor, verbose, bssid, mac, file, hash): +def setHashcat(cursor, verbose, bssid, mac, file, hashcat): try: - cursor.execute('''INSERT OR REPLACE INTO Handshake VALUES(?,?,?,?)''', - (bssid.upper(), mac.upper(), file, hash)) + hashMD5 = hashlib.md5(open(file,'rb').read()).hexdigest() + if verbose: + print("HASH: ", hash) + cursor.execute('''INSERT OR REPLACE INTO Handshake VALUES(?,?,?,?,?)''', + (bssid.upper(), mac.upper(), file, hashMD5, hashcat)) return int(0) except sqlite3.IntegrityError as error: print("setHashcat" + str(error)) @@ -494,8 +501,12 @@ def setHashcat(cursor, verbose, bssid, mac, file, hash): def insertFile(cursor, verbose, file): try: - cursor.execute('''INSERT OR REPLACE INTO Files VALUES(?,?,?)''', - (file, "False", datetime.datetime.now())) + # Get MD5 + hash = hashlib.md5(open(file,'rb').read()).hexdigest() + if verbose: + print("HASH: ", hash) + cursor.execute('''INSERT OR REPLACE INTO Files VALUES(?,?,?,?)''', + (file, "False", hash, datetime.datetime.now())) return int(0) except sqlite3.IntegrityError as error: print("insertFile" + str(error)) @@ -513,8 +524,14 @@ def setFileProcessed(cursor, verbose, file): def checkFileProcessed(cursor, verbose, file): + if not os.path.exists(file): + if verbose: + print("File", file, "does not exist") + return int(0) + + hash = hashlib.md5(open(file,'rb').read()).hexdigest() try: - sql = "SELECT file from Files where file = '" + file + "' AND processed = 'True';" + sql = "SELECT file from Files where hashMD5 = '" + hash + "' AND processed = 'True';" cursor.execute(sql) output = cursor.fetchall() diff --git a/oui.py b/oui.py index 81a8504..0e75ba9 100644 --- a/oui.py +++ b/oui.py @@ -6,6 +6,7 @@ from shutil import copyfile import csv import os +import time def load_vendors(): @@ -14,48 +15,62 @@ def load_vendors(): url = 'https://maclookup.app/downloads/csv-database/get-db' # urlOld = 'https://macaddress.io/database/macaddress.io-db.json'#Notfree oui = {} + script_path = os.path.dirname(os.path.abspath(__file__)) + fileCSV = script_path + "/mac-vendors-export.csv" - with tempfile.NamedTemporaryFile(delete=False) as tmp: - print(tmp.name) - try: - import requests - headersR = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; " - "Win64; x64; rv:93.0) Gecko/20100101 Firefox/93.0", - "Accept": "text/html,application/" - "xhtml+xml,application/xml;" - "q=0.9,image/avif,image/webp,*/*;q=0.8", - "Accept-Language": - "es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3", - "Accept-Encoding": "gzip, deflate", - "Upgrade-Insecure-Requests": "1", - "Sec-Fetch-Dest": "document", - "Sec-Fetch-Mode": "navigate", - "Sec-Fetch-Site": "none", "Sec-Fetch-User": "?1", - "Te": "trailers"} - response = requests.get(url, headers=headersR) - tmp.write(response.content) - tmp.seek(0) - # error control and copy local (old file) - except requests.exceptions.RequestException as e: - # catastrophic error. bail. - script_path = os.path.dirname(os.path.abspath(__file__)) - print(e) - print("Copy local file") - src = script_path + "/mac-vendors-export.csv" - dst = tmp.name - copyfile(src, dst) - tmp.close() - with open(tmp.name, encoding='cp850') as csv_file: - csv_reader = csv.reader(csv_file, delimiter=',') - line_count = 0 - for row in csv_reader: - if line_count == 0: - line_count += 1 - else: - line_count += 1 - oui[row[0].replace(':', '')] = row[1] - # print(f'Processed {line_count} lines.') - os.unlink(tmp.name) + # Check if file downloaded in last 2h + redownload = True + if os.path.exists(fileCSV): + modification_time = os.path.getmtime(fileCSV) + current_time = time.time() + # Check if the file was modified more than 24 hours ago + if current_time - modification_time < 2 * 60 * 60: + print("File was download within the last 2 hours - SKIP") + redownload = False + + if redownload: # download again if >2h or file dont exists + with tempfile.NamedTemporaryFile(delete=True) as tmp: + print(tmp.name) + try: + import requests + headersR = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; " + "Win64; x64; rv:93.0) Gecko/20100101 Firefox/93.0", + "Accept": "text/html,application/" + "xhtml+xml,application/xml;" + "q=0.9,image/avif,image/webp,*/*;q=0.8", + "Accept-Language": + "es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3", + "Accept-Encoding": "gzip, deflate", + "Upgrade-Insecure-Requests": "1", + "Sec-Fetch-Dest": "document", + "Sec-Fetch-Mode": "navigate", + "Sec-Fetch-Site": "none", "Sec-Fetch-User": "?1", + "Te": "trailers"} + response = requests.get(url, headers=headersR) + tmp.write(response.content) + tmp.seek(0) + + # if downloaded update the saved + src = tmp.name + print("Copy new file to", fileCSV) + copyfile(src, fileCSV) + # error control and copy local (old file) + except requests.exceptions.RequestException as e: + # catastrophic error. bail. + print(e) + tmp.close() + #os.unlink(tmp.name) + + with open(fileCSV, encoding='cp850') as csv_file: + csv_reader = csv.reader(csv_file, delimiter=',') + line_count = 0 + for row in csv_reader: + if line_count == 0: + line_count += 1 + else: + line_count += 1 + oui[row[0].replace(':', '')] = row[1] + # print(f'Processed {line_count} lines.') return oui diff --git a/requirements.txt b/requirements.txt index 0645372..f3cffc1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ ftfy==6.1.1 -nest_asyncio==1.5.7 +nest_asyncio==1.5.8 pyshark==0.6 requests==2.31.0 diff --git a/wifi_db.py b/wifi_db.py index 3b2f716..bf59588 100755 --- a/wifi_db.py +++ b/wifi_db.py @@ -17,7 +17,7 @@ # import nest_asyncio ; nest_asyncio.apply() -> # Fix RuntimeError: This event loop is already running” -VERSION = '1.2' +VERSION = '1.3-dev' @@ -251,35 +251,60 @@ def process_capture(ouiMap, capture, database, capture = capture[:-1] captureFormat = capture + ".kismet.netxml" - database_utils.insertFile(cursor, verbose, captureFormat) - wifi_db_aircrack.parse_netxml(ouiMap, captureFormat, - database, verbose) - database_utils.setFileProcessed(cursor, verbose, captureFormat) - - captureFormat = capture + "kismet.csv" - database_utils.insertFile(cursor, verbose, captureFormat) - wifi_db_aircrack.parse_kismet_csv(ouiMap, captureFormat, - database, verbose) - database_utils.setFileProcessed(cursor, verbose, captureFormat) + print("Parsing file:", captureFormat) + if database_utils.checkFileProcessed(cursor, + verbose, captureFormat) == 1 and not force: + print("File","already processed\n") + else: + database_utils.insertFile(cursor, verbose, captureFormat) + wifi_db_aircrack.parse_netxml(ouiMap, captureFormat, + database, verbose) + database_utils.setFileProcessed(cursor, verbose, captureFormat) + + captureFormat = capture + ".kismet.csv" + print("Parsing file:", captureFormat) + if database_utils.checkFileProcessed(cursor, + verbose, captureFormat) == 1 and not force: + print("File","already processed\n") + else: + database_utils.insertFile(cursor, verbose, captureFormat) + wifi_db_aircrack.parse_kismet_csv(ouiMap, captureFormat, + database, verbose) + database_utils.setFileProcessed(cursor, verbose, captureFormat) captureFormat = capture + ".csv" - database_utils.insertFile(cursor, verbose, captureFormat) - wifi_db_aircrack.parse_csv(ouiMap, captureFormat, - database, verbose) - database_utils.setFileProcessed(cursor, verbose, captureFormat) + print("Parsing file:", captureFormat) + if database_utils.checkFileProcessed(cursor, + verbose, captureFormat) == 1 and not force: + print("File","already processed\n") + else: + database_utils.insertFile(cursor, verbose, captureFormat) + wifi_db_aircrack.parse_csv(ouiMap, captureFormat, + database, verbose) + database_utils.setFileProcessed(cursor, verbose, captureFormat) captureFormat = capture + ".log.csv" - database_utils.insertFile(cursor, verbose, captureFormat) - wifi_db_aircrack.parse_log_csv(ouiMap, captureFormat, - database, verbose, fake_lat, - fake_lon) - database_utils.setFileProcessed(cursor, verbose, captureFormat) + print("Parsing file:", captureFormat) + if database_utils.checkFileProcessed(cursor, + verbose, captureFormat) == 1 and not force: + print("File","already processed\n") + else: + database_utils.insertFile(cursor, verbose, captureFormat) + wifi_db_aircrack.parse_log_csv(ouiMap, captureFormat, + database, verbose, fake_lat, + fake_lon) + database_utils.setFileProcessed(cursor, verbose, captureFormat) captureFormat = capture + ".cap" - database_utils.insertFile(cursor, verbose, captureFormat) - wifi_db_aircrack.parse_cap(captureFormat, database, verbose, - hcxpcapngtool, tshark) - database_utils.setFileProcessed(cursor, verbose, captureFormat) + print("Parsing file:", captureFormat) + if database_utils.checkFileProcessed(cursor, + verbose, captureFormat) == 1 and not force: + print("File","already processed\n") + else: + database_utils.insertFile(cursor, verbose, captureFormat) + wifi_db_aircrack.parse_cap(captureFormat, database, verbose, + hcxpcapngtool, tshark) + database_utils.setFileProcessed(cursor, verbose, captureFormat) if __name__ == "__main__": diff --git a/wifi_db_database.sql b/wifi_db_database.sql index 3d3f3dc..d0368ad 100644 --- a/wifi_db_database.sql +++ b/wifi_db_database.sql @@ -14,7 +14,6 @@ CREATE TABLE IF NOT EXISTS AP mfpc BOOLEAN, mfpr BOOLEAN, firstTimeSeen timestamp, - CONSTRAINT Key1 PRIMARY KEY (bssid) ); @@ -99,11 +98,12 @@ CREATE TABLE IF NOT EXISTS Handshake bssid TEXT NOT NULL, mac TEXT NOT NULL, file TEXT NOT NULL, + hashMD5 TEXT NOT NULL, hashcat TEXT, CONSTRAINT Key6 PRIMARY KEY (bssid,mac,file) CONSTRAINT FRelationship4 FOREIGN KEY (bssid) REFERENCES AP (bssid) ON UPDATE CASCADE ON DELETE CASCADE, CONSTRAINT FRelationship5 FOREIGN KEY (mac) REFERENCES Client (mac) ON UPDATE CASCADE ON DELETE CASCADE, - CONSTRAINT FRelationship8 FOREIGN KEY (file) REFERENCES Files (file) ON UPDATE CASCADE ON DELETE CASCADE + CONSTRAINT FRelationship8 FOREIGN KEY (file,hashMD5) REFERENCES Files (file,hashMD5) ON UPDATE CASCADE ON DELETE CASCADE ); CREATE TABLE IF NOT EXISTS Identity @@ -122,6 +122,7 @@ CREATE TABLE IF NOT EXISTS Files ( file TEXT NOT NULL, processed BOOLEAN, + hashMD5 TEXT NOT NULL, time datetime, - CONSTRAINT Key8 PRIMARY KEY (file) + CONSTRAINT Key8 PRIMARY KEY (file,hashMD5) ); \ No newline at end of file