diff --git a/scripts/artifacts/Cast.py b/scripts/artifacts/Cast.py index af9cf8d6..b19ad2a4 100755 --- a/scripts/artifacts/Cast.py +++ b/scripts/artifacts/Cast.py @@ -1,3 +1,18 @@ +__artifacts_v2__ = { + "Cast": { + "name": "Cast", + "description": "Parses Cast device information", + "author": "@deagler4n6", + "version": "0.0.2", + "date": "2021-01-11", + "requirements": "none", + "category": "Cast", + "notes": "2023-10-12 - Updated by @KevinPagano3", + "paths": ('*/com.google.android.gms/databases/cast.db*'), + "function": "get_Cast" + } +} + import sqlite3 import textwrap @@ -6,50 +21,59 @@ def get_Cast(files_found, report_folder, seeker, wrap_text, time_offset): - file_found = str(files_found[0]) - db = open_sqlite_db_readonly(file_found) - cursor = db.cursor() - cursor.execute(''' - SELECT - case last_published_timestamp_millis - when 0 then '' - else datetime(last_published_timestamp_millis/1000, 'unixepoch') - end as "Last Published Timestamp", - device_id, - capabilities, - device_version, - friendly_name, - model_name, - receiver_metrics_id, - service_instance_name, - service_address, - service_port, - supported_criteria, - rcn_enabled_status, - hotspot_bssid, - cloud_devcie_id, - case last_discovered_timestamp_millis - when 0 then '' - else datetime(last_discovered_timestamp_millis/1000, 'unixepoch') - end as "Last Discovered Timestamp", - case last_discovered_by_ble_timestamp_millis - when 0 then '' - else datetime(last_discovered_by_ble_timestamp_millis/1000, 'unixepoch') - end as "Last Discovered By BLE Timestamp" - from DeviceInfo - ''') + data_list = [] + + for file_found in files_found: + file_found = str(file_found) + + if file_found.endswith('cast.db'): + db = open_sqlite_db_readonly(file_found) + cursor = db.cursor() + cursor.execute(''' + SELECT + case last_published_timestamp_millis + when 0 then '' + else datetime(last_published_timestamp_millis/1000, 'unixepoch') + end as "Last Published Timestamp", + device_id, + capabilities, + device_version, + friendly_name, + model_name, + receiver_metrics_id, + service_instance_name, + service_address, + service_port, + supported_criteria, + rcn_enabled_status, + hotspot_bssid, + cloud_devcie_id, + case last_discovered_timestamp_millis + when 0 then '' + else datetime(last_discovered_timestamp_millis/1000, 'unixepoch') + end as "Last Discovered Timestamp", + case last_discovered_by_ble_timestamp_millis + when 0 then '' + else datetime(last_discovered_by_ble_timestamp_millis/1000, 'unixepoch') + end as "Last Discovered By BLE Timestamp" + from DeviceInfo + ''') - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + for row in all_rows: + data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13],row[14],row[15],file_found)) + db.close() + else: + continue # Skip all other files + + if data_list: report = ArtifactHtmlReport('Cast') report.start_artifact_report(report_folder, 'Cast') report.add_script() - data_headers = ('Last Published Timestamp','Device ID (SSDP UDN)','Capabilities','Device Version','Device Friendly Name','Device Model Name','Receiver Metrics ID','Service Instance Name','Device IP Address','Device Port','Supported Criteria','RCN Enabled Status','Hotspot BSSID','Cloud Device ID','Last Discovered Timestamp','Last Discovered By BLE Timestamp') - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13],row[14],row[15])) - + data_headers = ('Last Published Timestamp','Device ID (SSDP UDN)','Capabilities','Device Version','Device Friendly Name','Device Model Name','Receiver Metrics ID','Service Instance Name','Device IP Address','Device Port','Supported Criteria','RCN Enabled Status','Hotspot BSSID','Cloud Device ID','Last Discovered Timestamp','Last Discovered By BLE Timestamp','Source') + report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() @@ -59,13 +83,4 @@ def get_Cast(files_found, report_folder, seeker, wrap_text, time_offset): tlactivity = f'Cast' timeline(report_folder, tlactivity, data_list, data_headers) else: - logfunc('No Cast data available') - - db.close() - -__artifacts__ = { - "Cast": ( - "Cast", - ('*/com.google.android.gms/databases/cast.db'), - get_Cast) -} + logfunc('No Cast data available') \ No newline at end of file diff --git a/scripts/artifacts/SimpleStorage_applaunch.py b/scripts/artifacts/SimpleStorage_applaunch.py index c8ba423c..a0809304 100644 --- a/scripts/artifacts/SimpleStorage_applaunch.py +++ b/scripts/artifacts/SimpleStorage_applaunch.py @@ -1,8 +1,17 @@ -# Module Description: Parses SimpleStorage for application launch -# Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) -# Date: 2022-12-13 -# Artifact version: 0.0.1 -# Much thanks to Josh Hickman (@josh_hickman1) for the research, testing and query +__artifacts_v2__ = { + "SimpleStorage_applaunch": { + "name": "SimpleStorage", + "description": "Parses SimpleStorage for application launch", + "author": "@KevinPagano3", + "version": "0.0.1", + "date": "2022-12-13", + "requirements": "none", + "category": "Android System Intelligence", + "notes": "Much thanks to Josh Hickman (@josh_hickman1) for the research, testing and query", + "paths": ('*/com.google.android.as/databases/SimpleStorage*',), + "function": "get_SimpleStorage_applaunch" + } +} import os import sqlite3 @@ -14,63 +23,59 @@ def get_SimpleStorage_applaunch(files_found, report_folder, seeker, wrap_text, time_offset): + data_list = [] + for file_found in files_found: file_name = str(file_found) - if not os.path.basename(file_name) == 'SimpleStorage': # skip -journal and other files - continue + if file_name.endswith('SimpleStorage'): # skip -journal and other files - db = open_sqlite_db_readonly(file_name) - - cursor = db.cursor() - cursor.execute(''' - SELECT DISTINCT - datetime(EchoAppLaunchMetricsEvents.timestampMillis/1000,'unixepoch') AS "Time App Launched", - EchoAppLaunchMetricsEvents.packageName AS "App", - CASE - WHEN EchoAppLaunchMetricsEvents.launchLocationId=1 THEN "Home Screen" - WHEN EchoAppLaunchMetricsEvents.launchLocationId=2 THEN "Suggested Apps (Home Screen)" - WHEN EchoAppLaunchMetricsEvents.launchLocationId=4 THEN "App Drawer" - WHEN EchoAppLaunchMetricsEvents.launchLocationId=7 THEN "Suggested Apps (App Drawer)" - WHEN EchoAppLaunchMetricsEvents.launchLocationId=8 THEN "Search (Top of App Drawer/GSB)" - WHEN EchoAppLaunchMetricsEvents.launchLocationId=12 THEN "Recent Apps/Multi-Tasking Menu" - WHEN EchoAppLaunchMetricsEvents.launchLocationId=1000 THEN "Notification" - ELSE EchoAppLaunchMetricsEvents.launchLocationId - END AS "Launched From" - FROM EchoAppLaunchMetricsEvents - ''') + db = open_sqlite_db_readonly(file_name) + cursor = db.cursor() + cursor.execute(''' + SELECT DISTINCT + datetime(EchoAppLaunchMetricsEvents.timestampMillis/1000,'unixepoch') AS "Time App Launched", + EchoAppLaunchMetricsEvents.packageName AS "App", + CASE + WHEN EchoAppLaunchMetricsEvents.launchLocationId=1 THEN "Home Screen" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=2 THEN "Suggested Apps (Home Screen)" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=4 THEN "App Drawer" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=7 THEN "Suggested Apps (App Drawer)" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=8 THEN "Search (Top of App Drawer/GSB)" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=12 THEN "Recent Apps/Multi-Tasking Menu" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=1000 THEN "Notification" + ELSE EchoAppLaunchMetricsEvents.launchLocationId + END AS "Launched From" + FROM EchoAppLaunchMetricsEvents + ''') - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - description = '' - report = ArtifactHtmlReport('SimpleStorage - App Launch') - report.start_artifact_report(report_folder, 'SimpleStorage - App Launch') - report.add_script() - data_headers = ('App Launched Timestamp','App Name','Launched From') - data_list = [] - data_list_stripped = [] - for row in all_rows: - - data_list.append((row[0],row[1],row[2])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'SimpleStorage - App Launch' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'SimpleStorage - App Launch' - timeline(report_folder, tlactivity, data_list, data_headers) - + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + for row in all_rows: + data_list.append((row[0],row[1],row[2], file_found)) + db.close() + else: - logfunc('SimpleStorage - App Launch data available') + continue # Skip all other files + + if data_list: + description = '' + report = ArtifactHtmlReport('SimpleStorage - App Launch') + report.start_artifact_report(report_folder, 'SimpleStorage - App Launch') + report.add_script() + data_headers = ('App Launched Timestamp','App Name','Launched From', 'Source') + + report.write_artifact_data_table(data_headers, data_list, file_found) + report.end_artifact_report() + + tsvname = f'SimpleStorage - App Launch' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'SimpleStorage - App Launch' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('SimpleStorage - App Launch data available') - db.close() -__artifacts__ = { - "SimpleStorage_applaunch": ( - "Android System Intelligence", - ('*/com.google.android.as/databases/SimpleStorage*'), - get_SimpleStorage_applaunch) -} diff --git a/scripts/artifacts/Turbo_AppUsage.py b/scripts/artifacts/Turbo_AppUsage.py index 83feebb1..3b7b8e66 100755 --- a/scripts/artifacts/Turbo_AppUsage.py +++ b/scripts/artifacts/Turbo_AppUsage.py @@ -1,3 +1,18 @@ +__artifacts_v2__ = { + "Turbo_AppUsage": { + "name": "Turbo_AppUsage", + "description": "Parses application usage via Device Health Services", + "author": "@KevinPagano3", + "version": "0.0.1", + "date": "2021-06-29", + "requirements": "none", + "category": "Device Health Services", + "notes": "", + "paths": ('*/com.google.android.apps.turbo/shared_prefs/app_usage_stats.xml'), + "function": "get_Turbo_AppUsage" + } +} + import datetime import struct import xml.etree.ElementTree as ET @@ -33,7 +48,7 @@ def get_Turbo_AppUsage(files_found, report_folder, seeker, wrap_text, time_offse report = ArtifactHtmlReport('Turbo - Application Usage') report.start_artifact_report(report_folder, f'Turbo - Application Usage') report.add_script() - data_headers = ('App Launch Timestamp','App Name','File Path') + data_headers = ('App Launch Timestamp','App Name','Source') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() @@ -45,9 +60,3 @@ def get_Turbo_AppUsage(files_found, report_folder, seeker, wrap_text, time_offse else: logfunc(f'No Turbo - Application Usage data available') -__artifacts__ = { - "Turbo_AppUsage": ( - "Device Health Services", - ('*/com.google.android.apps.turbo/shared_prefs/app_usage_stats.xml'), - get_Turbo_AppUsage) -} \ No newline at end of file diff --git a/scripts/artifacts/Turbo_Battery.py b/scripts/artifacts/Turbo_Battery.py index 78e83380..4145a33e 100755 --- a/scripts/artifacts/Turbo_Battery.py +++ b/scripts/artifacts/Turbo_Battery.py @@ -1,6 +1,19 @@ +__artifacts_v2__ = { + "Turbo_Battery": { + "name": "Turbo_Battery", + "description": "Parses provider calendars and events", + "author": "@KevinPagano3", + "version": "0.0.1", + "date": "2021-06-29", + "requirements": "none", + "category": "Device Health Services", + "notes": "", + "paths": ('*/com.google.android.apps.turbo/databases/turbo.db*','*/com.google.android.apps.turbo/databases/bluetooth.db*'), + "function": "get_Turbo_Battery" + } +} + import sqlite3 -import io -import os import textwrap from packaging import version @@ -13,106 +26,105 @@ def get_Turbo_Battery(files_found, report_folder, seeker, wrap_text, time_offset source_file_turbo = '' bluetooth_db = '' turbo_db = '' - + data_list_battery = [] + data_list_bluetooth = [] + for file_found in files_found: - - file_name = str(file_found) - if file_name.lower().endswith('turbo.db'): - turbo_db = str(file_found) - source_file_turbo = file_found.replace(seeker.directory, '') + file_found = str(file_found) + if file_found.lower().endswith('turbo.db'): + turbo_db = str(file_found) + source_file_turbo = file_found.replace(seeker.directory, '') + + db = open_sqlite_db_readonly(turbo_db) + cursor = db.cursor() + cursor.execute(''' + select + case timestamp_millis + when 0 then '' + else datetime(timestamp_millis/1000,'unixepoch') + End as D_T, + battery_level, + case charge_type + when 0 then '' + when 1 then 'Charging Rapidly' + when 2 then 'Charging Slowly' + when 3 then 'Charging Wirelessly' + End as C_Type, + case battery_saver + when 2 then '' + when 1 then 'Enabled' + End as B_Saver, + timezone + from battery_event + ''') - if file_name.lower().endswith('bluetooth.db'): - bluetooth_db = str(file_found) - source_file_bluetooth = file_found.replace(seeker.directory, '') - - db = open_sqlite_db_readonly(turbo_db) - cursor = db.cursor() - cursor.execute(''' - select - case timestamp_millis - when 0 then '' - else datetime(timestamp_millis/1000,'unixepoch') - End as D_T, - battery_level, - case charge_type - when 0 then '' - when 1 then 'Charging Rapidly' - when 2 then 'Charging Slowly' - when 3 then 'Charging Wirelessly' - End as C_Type, - case battery_saver - when 2 then '' - when 1 then 'Enabled' - End as B_Saver, - timezone - from battery_event - ''') + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + for row in all_rows: + data_list_battery.append((row[0],row[1],row[2],row[3],row[4],file_found)) + + db.close() - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: + if file_found.lower().endswith('bluetooth.db'): + bluetooth_db = str(file_found) + source_file_bluetooth = file_found.replace(seeker.directory, '') + + db = open_sqlite_db_readonly(bluetooth_db) + cursor = db.cursor() + cursor.execute(''' + select + datetime(timestamp_millis/1000,'unixepoch'), + bd_addr, + device_identifier, + battery_level, + volume_level, + time_zone + from battery_event + join device_address on battery_event.device_idx = device_address.device_idx + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + for row in all_rows: + data_list_bluetooth.append((row[0],row[1],row[2],row[3],row[4],row[5],file_found)) + db.close() + + else: + continue # Skip all other files + + if data_list_battery: report = ArtifactHtmlReport('Turbo - Phone Battery') report.start_artifact_report(report_folder, 'Turbo - Phone Battery') report.add_script() - data_headers = ('Timestamp','Battery Level','Charge Type','Battery Saver','Timezone') # Don't remove the comma, that is required to make this a tuple as there is only 1 element - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2],row[3],row[4])) + data_headers = ('Timestamp','Battery Level','Charge Type','Battery Saver','Timezone','Source') # Don't remove the comma, that is required to make this a tuple as there is only 1 element - report.write_artifact_data_table(data_headers, data_list, source_file_turbo) + report.write_artifact_data_table(data_headers, data_list_battery, source_file_turbo) report.end_artifact_report() tsvname = f'Turbo - Phone Battery' - tsv(report_folder, data_headers, data_list, tsvname) + tsv(report_folder, data_headers, data_list_battery, tsvname) tlactivity = f'Turbo - Phone Battery' - timeline(report_folder, tlactivity, data_list, data_headers) + timeline(report_folder, tlactivity, data_list_battery, data_headers) else: logfunc('No Turbo - Phone Battery data available') - db.close() - db = open_sqlite_db_readonly(bluetooth_db) - cursor = db.cursor() - cursor.execute(''' - select - datetime(timestamp_millis/1000,'unixepoch'), - bd_addr, - device_identifier, - battery_level, - volume_level, - time_zone - from battery_event - join device_address on battery_event.device_idx = device_address.device_idx - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: + if data_list_bluetooth: report = ArtifactHtmlReport('Turbo - Bluetooth Device Info') report.start_artifact_report(report_folder, 'Turbo - Bluetooth Device Info') report.add_script() - data_headers = ('Timestamp','BT Device MAC Address','BT Device ID','Battery Level','Volume Level','Timezone') # Don't remove the comma, that is required to make this a tuple as there is only 1 element - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2],row[3],row[4],row[5])) + data_headers = ('Timestamp','BT Device MAC Address','BT Device ID','Battery Level','Volume Level','Timezone','Source') # Don't remove the comma, that is required to make this a tuple as there is only 1 element - report.write_artifact_data_table(data_headers, data_list, source_file_bluetooth) + report.write_artifact_data_table(data_headers, data_list_bluetooth, source_file_bluetooth) report.end_artifact_report() tsvname = f'Turbo - Bluetooth Device Info' - tsv(report_folder, data_headers, data_list, tsvname) + tsv(report_folder, data_headers, data_list_bluetooth, tsvname) tlactivity = f'Turbo - Bluetooth Device Info' - timeline(report_folder, tlactivity, data_list, data_headers) + timeline(report_folder, tlactivity, data_list_bluetooth, data_headers) else: - logfunc('No Turbo - Bluetooth Device Info data available') - - db.close() - -__artifacts__ = { - "Turbo_Battery": ( - "Device Health Services", - ('*/com.google.android.apps.turbo/databases/turbo.db*','*/com.google.android.apps.turbo/databases/bluetooth.db*'), - get_Turbo_Battery) -} \ No newline at end of file + logfunc('No Turbo - Bluetooth Device Info data available') \ No newline at end of file diff --git a/scripts/artifacts/downloads.py b/scripts/artifacts/downloads.py index f0e066b1..7c09f30e 100644 --- a/scripts/artifacts/downloads.py +++ b/scripts/artifacts/downloads.py @@ -1,66 +1,75 @@ -# Module Description: Parses native downloads database -# Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) -# Date: 2023-01-09 -# Artifact version: 0.0.1 +__artifacts_v2__ = { + "Downloads": { + "name": "Downloads", + "description": "Parses native downloads database", + "author": "@KevinPagano3", + "version": "0.0.1", + "date": "2023-01-09", + "requirements": "none", + "category": "Downloads", + "notes": "", + "paths": ('*/data/com.android.providers.downloads/databases/downloads.db*'), + "function": "get_downloads" + } +} import sqlite3 -import os -from datetime import datetime from scripts.artifact_report import ArtifactHtmlReport from scripts.ilapfuncs import logfunc, timeline, tsv, is_platform_windows, open_sqlite_db_readonly def get_downloads(files_found, report_folder, seeker, wrap_text, time_offset): + data_list = [] + for file_found in files_found: file_found = str(file_found) - if not os.path.basename(file_found) == 'downloads.db': # skip -journal and other files - continue - else: - break - db = open_sqlite_db_readonly(file_found) - - #Get file downloads - cursor = db.cursor() - cursor.execute(''' - select - datetime(lastmod/1000,'unixepoch') as "Modified/Downloaded Timestamp", - title, - description, - uri, - _data, - mimetype, - notificationpackage, - current_bytes, - total_bytes, - status, - errorMsg, - etag, - case is_visible_in_downloads_ui - when 0 then 'No' - when 1 then 'Yes' - end, - case deleted - when 0 then '' - when 1 then 'Yes' - end - from downloads - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - data_list = [] - for row in all_rows: - - data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13])) + if file_found.endswith('downloads.db'): + db = open_sqlite_db_readonly(file_found) + #Get file downloads + cursor = db.cursor() + cursor.execute(''' + select + datetime(lastmod/1000,'unixepoch') as "Modified/Downloaded Timestamp", + title, + description, + uri, + _data, + mimetype, + notificationpackage, + current_bytes, + total_bytes, + status, + errorMsg, + etag, + case is_visible_in_downloads_ui + when 0 then 'No' + when 1 then 'Yes' + end, + case deleted + when 0 then '' + when 1 then 'Yes' + end + from downloads + ''') + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + for row in all_rows: + data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13],file_found)) + db.close() + + else: + continue + + if data_list: description = 'Native downloads' report = ArtifactHtmlReport('Native Downloads') report.start_artifact_report(report_folder, 'Native Downloads', description) report.add_script() - data_headers = ('Modified/Downloaded Timestamp','Title','Description','Provider URI','Save Location','Mime Type','App Provider Package','Current Bytes','Total Bytes','Status','Error Message','ETAG','Visible in Downloads UI','Deleted') + data_headers = ('Modified/Downloaded Timestamp','Title','Description','Provider URI','Save Location','Mime Type','App Provider Package','Current Bytes','Total Bytes','Status','Error Message','ETAG','Visible in Downloads UI','Deleted','Source') report.write_artifact_data_table(data_headers, data_list, file_found,html_escape=False) report.end_artifact_report() @@ -71,13 +80,4 @@ def get_downloads(files_found, report_folder, seeker, wrap_text, time_offset): timeline(report_folder, tlactivity, data_list, data_headers) else: - logfunc('No Native Downloads data available') - - db.close() - -__artifacts__ = { - "Downloads": ( - "Downloads", - ('*/data/com.android.providers.downloads/databases/downloads.db*'), - get_downloads) -} \ No newline at end of file + logfunc('No Native Downloads data available') \ No newline at end of file diff --git a/scripts/artifacts/emulatedSmeta.py b/scripts/artifacts/emulatedSmeta.py index 7e0ae366..92dddf03 100755 --- a/scripts/artifacts/emulatedSmeta.py +++ b/scripts/artifacts/emulatedSmeta.py @@ -1,10 +1,32 @@ -import os +__artifacts_v2__ = { + "EmulatedSmeta": { + "name": "Emulated Storage Metadata", + "description": "Parses Cast device information", + "author": "@AlexisBrignoni", + "version": "0.0.2", + "date": "2020-10-19", + "requirements": "none", + "category": "Emulated Storage Metadata", + "notes": "2023-02-10 - Updated by @KevinPagano3", + "paths": ('*/com.google.android.providers.media.module/databases/external.db*','*/com.android.providers.media/databases/external.db*'), + "function": "get_emulatedSmeta" + } +} + import sqlite3 + from scripts.artifact_report import ArtifactHtmlReport from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly def get_emulatedSmeta(files_found, report_folder, seeker, wrap_text, time_offset): + data_list_downloads = [] + data_list_images = [] + data_list_files = [] + data_list_videos = [] + data_list_audio = [] + data_list = [] + for file_found in files_found: file_found = str(file_found) if not file_found.endswith('external.db'): @@ -12,6 +34,7 @@ def get_emulatedSmeta(files_found, report_folder, seeker, wrap_text, time_offset if 'media.module' in file_found: db = open_sqlite_db_readonly(file_found) + # Downloads cursor = db.cursor() cursor.execute(''' SELECT @@ -46,35 +69,19 @@ def get_emulatedSmeta(files_found, report_folder, seeker, wrap_text, time_offset all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: - report = ArtifactHtmlReport('Emulated Storage Metadata - Downloads') - report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Downloads') - report.add_script() - data_headers = ('Key Timestamp','Date Added','Date Modified','Date Taken','Path','Title','Display Name','Size','Owner Package Name','Bucket Display Name','Referer URI','Download URI','Relative Path','Is Downloaded?','Is Favorited?','Is Trashed?','XMP') - data_list = [] for row in all_rows: if bool(row[0]): keytime = row[0] else: keytime = row[1] - if isinstance(row[15], bytes): xmp = str(row[15])[2:-1] else: xmp = row[15] - data_list.append((keytime, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], xmp)) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Emulated Storage Metadata - Downloads' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Emulated Storage Metadata - Downloads' - timeline(report_folder, tlactivity, data_list, data_headers) - else: - logfunc('No Emulated Storage Metadata - Downloads data available') + data_list_downloads.append((keytime, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], xmp, file_found)) + # Images cursor.execute(''' SELECT datetime(date_added, 'unixepoch'), @@ -111,30 +118,14 @@ def get_emulatedSmeta(files_found, report_folder, seeker, wrap_text, time_offset all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: - report = ArtifactHtmlReport('Emulated Storage Metadata - Images') - report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Images') - report.add_script() - data_headers = ('Key Timestamp','Date Added','Date Modified','Date Taken','Path','Title','Display Name','Size','Latitude','Longitude','Orientation','Owner Package Name','Bucket Display Name','Relative Path','Is Downloaded?','Is Favorited?','Is Trashed?') - data_list = [] for row in all_rows: if bool(row[0]): keytime = row[0] else: keytime = row[1] - data_list.append((keytime, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15])) + data_list_images.append((keytime, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], file_found)) - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Emulated Storage Metadata - Images' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Emulated Storage Metadata - Images' - timeline(report_folder, tlactivity, data_list, data_headers) - else: - logfunc('No Emulated Storage Metadata - Images data available') - - + # Files (newer version) cursor.execute(''' SELECT datetime(date_added, 'unixepoch'), @@ -173,29 +164,15 @@ def get_emulatedSmeta(files_found, report_folder, seeker, wrap_text, time_offset all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: - report = ArtifactHtmlReport('Emulated Storage Metadata - Files') - report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Files') - report.add_script() - data_headers = ('Key Timestamp','Date Added','Date Modified','Date Taken','Path','Title','Display Name','Size','Latitude','Longitude','Orientation','Owner Package Name','Bucket Display Name','Referer URI','Download URI','Relative Path','Is Downloaded?','Is Favorited?','Is Trashed?') - data_list = [] + for row in all_rows: if bool(row[0]): keytime = row[0] else: keytime = row[1] - data_list.append((keytime, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16], row[17])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Emulated Storage Metadata - Files' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Emulated Storage Metadata - Files' - timeline(report_folder, tlactivity, data_list, data_headers) - else: - logfunc('No Emulated Storage Metadata - Files data available') + data_list_files.append((keytime, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16], row[17], file_found)) + # Videos cursor.execute(''' SELECT datetime(date_added, 'unixepoch'), @@ -232,29 +209,14 @@ def get_emulatedSmeta(files_found, report_folder, seeker, wrap_text, time_offset all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: - report = ArtifactHtmlReport('Emulated Storage Metadata - Videos') - report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Videos') - report.add_script() - data_headers = ('Key Timestamp','Date Added','Date Modified','Date Taken','Path','Title','Display Name','Size','Latitude','Longitude','Orientation','Owner Package Name','Bucket Display Name','Relative Path','Is Downloaded?','Is Favorited?','Is Trashed?') - data_list = [] for row in all_rows: if bool(row[0]): keytime = row[0] else: keytime = row[1] - data_list.append((keytime, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Emulated Storage Metadata - Videos' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Emulated Storage Metadata - Videos' - timeline(report_folder, tlactivity, data_list, data_headers) - else: - logfunc('No Emulated Storage Metadata - Videos data available') + data_list_videos.append((keytime, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], file_found)) + # Audio cursor.execute(''' SELECT datetime(date_added, 'unixepoch'), @@ -285,32 +247,17 @@ def get_emulatedSmeta(files_found, report_folder, seeker, wrap_text, time_offset all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: - report = ArtifactHtmlReport('Emulated Storage Metadata - Audio') - report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Audio') - report.add_script() - data_headers = ('Key Timestamp','Date Added','Date Modified','Date Taken','Path','Title','Display Name','Size','Owner Package Name','Bucket Display Name','Relative Path','Is Downloaded?','Is Favorited?','Is Trashed?') - data_list = [] for row in all_rows: if bool(row[0]): keytime = row[0] else: keytime = row[1] - data_list.append((keytime, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Emulated Storage Metadata - Audio' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Emulated Storage Metadata - Audio' - timeline(report_folder, tlactivity, data_list, data_headers) - else: - logfunc('No Emulated Storage Metadata - Audio data available') + data_list_audio.append((keytime, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], file_found)) db.close() else: + # Files (older version) db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' @@ -338,30 +285,116 @@ def get_emulatedSmeta(files_found, report_folder, seeker, wrap_text, time_offset all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: - report = ArtifactHtmlReport('Emulated Storage Metadata - Files') - report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Files') - report.add_script() - data_headers = ('Timestamp Added','Timestamp Modified','Timestamp Taken','Path','Title','Display Name','Size','Latitude','Longitude','Orientation','Bucket Display Name','Parent Path','Width','Height','ID') - data_list = [] - parent = '' + #parent = '' for row in all_rows: - - data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13])) + data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], file_found)) - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Emulated Storage Metadata - Files' - tsv(report_folder, data_headers, data_list, tsvname) + db.close() - tlactivity = f'Emulated Storage Metadata - Files' - timeline(report_folder, tlactivity, data_list, data_headers) - else: - logfunc('No Emulated Storage Metadata - Files data available') + # Downloads Report + if data_list_downloads: + report = ArtifactHtmlReport('Emulated Storage Metadata - Downloads') + report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Downloads') + report.add_script() + data_headers = ('Key Timestamp','Date Added','Date Modified','Date Taken','Path','Title','Display Name','Size','Owner Package Name','Bucket Display Name','Referer URI','Download URI','Relative Path','Is Downloaded?','Is Favorited?','Is Trashed?','XMP','Source') + + report.write_artifact_data_table(data_headers, data_list_downloads, file_found) + report.end_artifact_report() + + tsvname = f'Emulated Storage Metadata - Downloads' + tsv(report_folder, data_headers, data_list_downloads, tsvname) + + tlactivity = f'Emulated Storage Metadata - Downloads' + timeline(report_folder, tlactivity, data_list_downloads, data_headers) + else: + logfunc('No Emulated Storage Metadata - Downloads data available') + + # Images Report + if data_list_images: + report = ArtifactHtmlReport('Emulated Storage Metadata - Images') + report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Images') + report.add_script() + data_headers = ('Key Timestamp','Date Added','Date Modified','Date Taken','Path','Title','Display Name','Size','Latitude','Longitude','Orientation','Owner Package Name','Bucket Display Name','Relative Path','Is Downloaded?','Is Favorited?','Is Trashed?','Source') + + report.write_artifact_data_table(data_headers, data_list_images, file_found) + report.end_artifact_report() + + tsvname = f'Emulated Storage Metadata - Images' + tsv(report_folder, data_headers, data_list_images, tsvname) + + tlactivity = f'Emulated Storage Metadata - Images' + timeline(report_folder, tlactivity, data_list_images, data_headers) + else: + logfunc('No Emulated Storage Metadata - Images data available') + + # Files (newer version) Report + if data_list_files: + report = ArtifactHtmlReport('Emulated Storage Metadata - Files') + report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Files') + report.add_script() + data_headers = ('Key Timestamp','Date Added','Date Modified','Date Taken','Path','Title','Display Name','Size','Latitude','Longitude','Orientation','Owner Package Name','Bucket Display Name','Referer URI','Download URI','Relative Path','Is Downloaded?','Is Favorited?','Is Trashed?','Source') + + report.write_artifact_data_table(data_headers, data_list_files, file_found) + report.end_artifact_report() + + tsvname = f'Emulated Storage Metadata - Files' + tsv(report_folder, data_headers, data_list_files, tsvname) + + tlactivity = f'Emulated Storage Metadata - Files' + timeline(report_folder, tlactivity, data_list_files, data_headers) + else: + logfunc('No Emulated Storage Metadata - Files data available') + + # Videos Report + if data_list_videos: + report = ArtifactHtmlReport('Emulated Storage Metadata - Videos') + report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Videos') + report.add_script() + data_headers = ('Key Timestamp','Date Added','Date Modified','Date Taken','Path','Title','Display Name','Size','Latitude','Longitude','Orientation','Owner Package Name','Bucket Display Name','Relative Path','Is Downloaded?','Is Favorited?','Is Trashed?','Source') + + report.write_artifact_data_table(data_headers, data_list_videos, file_found) + report.end_artifact_report() + + tsvname = f'Emulated Storage Metadata - Videos' + tsv(report_folder, data_headers, data_list_videos, tsvname) + + tlactivity = f'Emulated Storage Metadata - Videos' + timeline(report_folder, tlactivity, data_list_videos, data_headers) + else: + logfunc('No Emulated Storage Metadata - Videos data available') + + # Audio Report + if data_list_audio: + report = ArtifactHtmlReport('Emulated Storage Metadata - Audio') + report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Audio') + report.add_script() + data_headers = ('Key Timestamp','Date Added','Date Modified','Date Taken','Path','Title','Display Name','Size','Owner Package Name','Bucket Display Name','Relative Path','Is Downloaded?','Is Favorited?','Is Trashed?','Source') -__artifacts__ = { - "EmulatedSmeta": ( - "Emulated Storage Metadata", - ('*/com.google.android.providers.media.module/databases/external.db*','*/com.android.providers.media/databases/external.db*'), - get_emulatedSmeta) -} \ No newline at end of file + report.write_artifact_data_table(data_headers, data_list_audio, file_found) + report.end_artifact_report() + + tsvname = f'Emulated Storage Metadata - Audio' + tsv(report_folder, data_headers, data_list_audio, tsvname) + + tlactivity = f'Emulated Storage Metadata - Audio' + timeline(report_folder, tlactivity, data_list_audio, data_headers) + else: + logfunc('No Emulated Storage Metadata - Audio data available') + + # Files (older version) + if data_list: + report = ArtifactHtmlReport('Emulated Storage Metadata - Files') + report.start_artifact_report(report_folder, 'Emulated Storage Metadata - Files') + report.add_script() + data_headers = ('Timestamp Added','Timestamp Modified','Timestamp Taken','Path','Title','Display Name','Size','Latitude','Longitude','Orientation','Bucket Display Name','Parent Path','Width','Height','ID') + + report.write_artifact_data_table(data_headers, data_list, file_found) + report.end_artifact_report() + + tsvname = f'Emulated Storage Metadata - Files' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'Emulated Storage Metadata - Files' + timeline(report_folder, tlactivity, data_list, data_headers) + else: + logfunc('No Emulated Storage Metadata - Files data available') \ No newline at end of file diff --git a/scripts/artifacts/googleCalendar.py b/scripts/artifacts/googleCalendar.py index 47697de1..83ea09eb 100644 --- a/scripts/artifacts/googleCalendar.py +++ b/scripts/artifacts/googleCalendar.py @@ -1,12 +1,21 @@ -# Module Description: Parses provider calendars and events -# Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) -# Date: 2023-01-06 -# Artifact version: 0.0.1 +__artifacts_v2__ = { + "Calendar": { + "name": "Calendar", + "description": "Parses provider calendars and events", + "author": "@KevinPagano3", + "version": "0.0.1", + "date": "2023-01-06", + "requirements": "none", + "category": "Calendar", + "notes": "", + "paths": ('*/data/com.google.android.calendar/databases/cal_v2a*','*/com.android.providers.calendar/databases/calendar.db*'), + "function": "get_calendar" + } +} import zlib import sqlite3 import blackboxprotobuf -import os from datetime import datetime from scripts.artifact_report import ArtifactHtmlReport @@ -14,130 +23,123 @@ def get_calendar(files_found, report_folder, seeker, wrap_text, time_offset): + data_list_events = [] + data_list_calendars = [] + for file_found in files_found: file_found = str(file_found) - if file_found.endswith('-wal'): - continue - elif file_found.endswith('-shm'): - continue - elif file_found.endswith('-journal'): - continue - if os.path.basename(file_found).endswith('calendar.db'): + + if file_found.endswith('calendar.db'): calendarDB = file_found source_calendarDB = file_found.replace(seeker.directory, '') - - if os.path.basename(file_found).endswith('cal_v2a'): + + db = open_sqlite_db_readonly(calendarDB) + + #Get provider calendar events + cursor = db.cursor() + cursor.execute(''' + select + datetime(Events.dtstart/1000,'unixepoch') as "Event Start Timestamp", + datetime(Events.dtend/1000,'unixepoch') as "Event End Timestamp", + Events.eventTimezone, + Events.title, + Events.description, + Events.eventLocation, + Events._sync_id, + Events.organizer, + Calendars.calendar_displayName, + case Events.allDay + when 0 then '' + when 1 then 'Yes' + end, + case Events.hasAlarm + when 0 then '' + when 1 then 'Yes' + end + from Events + left join Calendars on Calendars._id = Events.calendar_id + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + for row in all_rows: + data_list_events.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10], calendarDB)) + + # Get provider calendars + cursor = db.cursor() + cursor.execute(''' + select + case + when cal_sync8 is NULL then '' + else datetime(cal_sync8/1000,'unixepoch') + end, + name, + calendar_displayName, + account_name, + account_type, + case visible + when 0 then 'No' + when 1 then 'Yes' + end, + calendar_location, + calendar_timezone, + ownerAccount, + case isPrimary + when 0 then '' + when 1 then 'Yes' + end, + calendar_color, + calendar_color_index + from Calendars + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + for row in all_rows: + data_list_calendars.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11], calendarDB)) + + if file_found.endswith('cal_v2a'): g_calendarDB = file_found source_g_calendarDB = file_found.replace(seeker.directory, '') + + else: + continue # Skip all other files - db = open_sqlite_db_readonly(calendarDB) - #Get provider calendar events - cursor = db.cursor() - cursor.execute(''' - select - datetime(Events.dtstart/1000,'unixepoch') as "Event Start Timestamp", - datetime(Events.dtend/1000,'unixepoch') as "Event End Timestamp", - Events.eventTimezone, - Events.title, - Events.description, - Events.eventLocation, - Events._sync_id, - Events.organizer, - Calendars.calendar_displayName, - case Events.allDay - when 0 then '' - when 1 then 'Yes' - end, - case Events.hasAlarm - when 0 then '' - when 1 then 'Yes' - end - from Events - left join Calendars on Calendars._id = Events.calendar_id - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - data_list = [] - for row in all_rows: - - data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10])) - + if data_list_events: description = 'Calendar - Events' report = ArtifactHtmlReport('Calendar - Events') report.start_artifact_report(report_folder, 'Calendar - Events', description) report.add_script() - data_headers = ('Event Start Timestamp','Event End Timestamp','Event Timezone','Title','Description','Event Location','Sync ID','Organizer','Calendar Display Name','All Day Event','Has Alarm') - report.write_artifact_data_table(data_headers, data_list, source_calendarDB,html_escape=False) + data_headers = ('Event Start Timestamp','Event End Timestamp','Event Timezone','Title','Description','Event Location','Sync ID','Organizer','Calendar Display Name','All Day Event','Has Alarm','Source') + report.write_artifact_data_table(data_headers, data_list_events, source_calendarDB,html_escape=False) report.end_artifact_report() tsvname = 'Calendar - Events' - tsv(report_folder, data_headers, data_list, tsvname) + tsv(report_folder, data_headers, data_list_events, tsvname) tlactivity = 'Calendar - Events' - timeline(report_folder, tlactivity, data_list, data_headers) + timeline(report_folder, tlactivity, data_list_events, data_headers) else: logfunc('No Calendar - Events data available') - #Get provider calendars - cursor = db.cursor() - cursor.execute(''' - select - case - when cal_sync8 is NULL then '' - else datetime(cal_sync8/1000,'unixepoch') - end, - name, - calendar_displayName, - account_name, - account_type, - case visible - when 0 then 'No' - when 1 then 'Yes' - end, - calendar_location, - calendar_timezone, - ownerAccount, - case isPrimary - when 0 then '' - when 1 then 'Yes' - end, - calendar_color, - calendar_color_index - from Calendars - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - data_list = [] - for row in all_rows: - - data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11])) - + if data_list_calendars: description = 'Calendar - Calendars' report = ArtifactHtmlReport('Calendar - Calendars') report.start_artifact_report(report_folder, 'Calendar - Calendars', description) report.add_script() - data_headers = ('Created Timestamp','Calendar Name','Calendar Display Name','Account Name','Account Type','Visible','Calendar Location','Timezone','Owner Account','Is Primary','Color','Color Index') - report.write_artifact_data_table(data_headers, data_list, source_calendarDB,html_escape=False) + data_headers = ('Created Timestamp','Calendar Name','Calendar Display Name','Account Name','Account Type','Visible','Calendar Location','Timezone','Owner Account','Is Primary','Color','Color Index','Source') + report.write_artifact_data_table(data_headers, data_list_calendars, source_calendarDB,html_escape=False) report.end_artifact_report() tsvname = 'Calendar - Calendars' - tsv(report_folder, data_headers, data_list, tsvname) + tsv(report_folder, data_headers, data_list_calendars, tsvname) tlactivity = 'Calendar - Calendars' - timeline(report_folder, tlactivity, data_list, data_headers) + timeline(report_folder, tlactivity, data_list_calendars, data_headers) else: - logfunc('No Calendar - Calendars data available') - -__artifacts__ = { - "Calendar": ( - "Calendar", - ('*/data/com.google.android.calendar/databases/cal_v2a*','*/com.android.providers.calendar/databases/calendar.db*'), - get_calendar) -} \ No newline at end of file + logfunc('No Calendar - Calendars data available') \ No newline at end of file