diff --git a/scripts/artifacts/routineDLocationsLocal.py b/scripts/artifacts/routineDLocationsLocal.py
index d9102bc8..d14d2602 100644
--- a/scripts/artifacts/routineDLocationsLocal.py
+++ b/scripts/artifacts/routineDLocationsLocal.py
@@ -15,13 +15,13 @@ def get_routineDLocationsLocal(files_found, report_folder, seeker):
else:
for file_found in files_found:
file_found = str(file_found)
-
+
if file_found.endswith('Local.sqlite'):
break
-
+
db = open_sqlite_db_readonly(file_found)
cursor = db.cursor()
- if version.parse(iOSversion) >= version.parse("14"): # Tested 14.1
+ if version.parse(iOSversion) >= version.parse("14"): # Tested 14.1
cursor.execute('''
select
datetime(zrtlearnedlocationofinterestvisitmo.zentrydate + 978307200, 'unixepoch'),
@@ -45,32 +45,35 @@ def get_routineDLocationsLocal(files_found, report_folder, seeker):
all_rows = cursor.fetchall()
usageentries = len(all_rows)
- data_list = []
+ data_list = []
if usageentries > 0:
for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12]))
-
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9],
+ row[10], row[11], row[12]))
+
description = 'Significant Locations - Location of Interest Entry (Historical)'
report = ArtifactHtmlReport('Locations')
report.start_artifact_report(report_folder, 'RoutineD Locations Entry', description)
report.add_script()
- data_headers = ('Timestamp','Exit','Entry Time (Minutes)','Latitude', 'Longitude','Confidence','Location Vertical Uncertainty','Location Horizontal Uncertainty','Data Point Count','Place Creation Date','Expiration','Visit latitude', 'Visit Longitude' )
+ data_headers = ('Timestamp', 'Exit', 'Entry Time (Minutes)', 'Latitude', 'Longitude', 'Confidence',
+ 'Location Vertical Uncertainty', 'Location Horizontal Uncertainty', 'Data Point Count',
+ 'Place Creation Date', 'Expiration', 'Visit latitude', 'Visit Longitude')
report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
-
+
tsvname = 'RoutineD Locations Entry'
tsv(report_folder, data_headers, data_list, tsvname)
-
+
tlactivity = 'RoutineD Locations Entry'
timeline(report_folder, tlactivity, data_list, data_headers)
-
+
kmlactivity = 'RoutineD Locations Entry'
kmlgen(report_folder, kmlactivity, data_list, data_headers)
else:
logfunc('No RoutineD Significant Locations Entry data available')
-
- else: # < ios 14
+
+ else: # < ios 14
cursor.execute('''
select
datetime(zrtlearnedlocationofinterestvisitmo.zentrydate + 978307200, 'unixepoch'),
@@ -96,28 +99,31 @@ def get_routineDLocationsLocal(files_found, report_folder, seeker):
data_list = []
if usageentries > 0:
for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11]))
-
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9],
+ row[10], row[11]))
+
description = 'Significant Locations - Location of Interest Entry (Historical)'
report = ArtifactHtmlReport('Locations')
report.start_artifact_report(report_folder, 'RoutineD Locations Entry', description)
report.add_script()
- data_headers = ('Timestamp','Exit','Entry Time (Minutes)','Latitude', 'Longitude','Confidence','Location Uncertainty','Data Point Count','Place Creation Date','Expiration','Visit latitude', 'Visit Longitude' )
+ data_headers = ('Timestamp', 'Exit', 'Entry Time (Minutes)', 'Latitude', 'Longitude', 'Confidence',
+ 'Location Uncertainty', 'Data Point Count', 'Place Creation Date', 'Expiration',
+ 'Visit latitude', 'Visit Longitude')
report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
-
+
tsvname = 'RoutineD Locations Entry'
tsv(report_folder, data_headers, data_list, tsvname)
-
+
tlactivity = 'RoutineD Locations Entry'
timeline(report_folder, tlactivity, data_list, data_headers)
-
+
kmlactivity = 'RoutineD Locations Entry'
kmlgen(report_folder, kmlactivity, data_list, data_headers)
else:
logfunc('No RoutineD Significant Locations Entry data available')
-
+
if version.parse(iOSversion) >= version.parse("12"):
cursor.execute('''
select
@@ -129,75 +135,33 @@ def get_routineDLocationsLocal(files_found, report_folder, seeker):
zrtlearnedlocationofinterestmo.zlocationlongitude
from
zrtlearnedlocationofinteresttransitionmo
- left join
- zrtlearnedlocationofinterestmo
+ left join zrtlearnedlocationofinterestmo
on zrtlearnedlocationofinterestmo.z_pk = zrtlearnedlocationofinteresttransitionmo.zlocationofinterest
''')
all_rows = cursor.fetchall()
usageentries = len(all_rows)
- data_list = []
+ data_list = []
if usageentries > 0:
for row in all_rows:
data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))
-
- description = 'Significant Locations - Location of Interest Transition Start (Historical)'
- report = ArtifactHtmlReport('Locations')
- report.start_artifact_report(report_folder, 'RoutineD Transtition Start', description)
- report.add_script()
- data_headers = ('Timestamp','Stop','Creation Date', 'Expiration','Latitude','Longitude' )
- report.write_artifact_data_table(data_headers, data_list, file_found)
- report.end_artifact_report()
-
- tsvname = 'RoutineD Transtition Start'
- tsv(report_folder, data_headers, data_list, tsvname)
-
- tlactivity = 'RoutineD Transtition Start'
- timeline(report_folder, tlactivity, data_list, data_headers)
-
- tlactivity = 'RoutineD Transtition Start'
- timeline(report_folder, tlactivity, data_list, data_headers)
- else:
- logfunc('No RoutineD Significant Locations Transtition Start data available')
-
- if (version.parse(iOSversion) >= version.parse("12")):
- cursor.execute('''
- select
- datetime(zrtlearnedlocationofinteresttransitionmo.zstartdate + 978307200, 'unixepoch'),
- datetime(zrtlearnedlocationofinteresttransitionmo.zstopdate + 978307200, 'unixepoch'),
- datetime(zrtlearnedlocationofinteresttransitionmo.zcreationdate + 978307200, 'unixepoch'),
- datetime(zrtlearnedlocationofinteresttransitionmo.zexpirationdate + 978307200, 'unixepoch'),
- zrtlearnedlocationofinterestmo.zlocationlatitude,
- zrtlearnedlocationofinterestmo.zlocationlongitude
- from
- zrtlearnedlocationofinteresttransitionmo, zrtlearnedlocationofinterestmo
- where zrtlearnedlocationofinterestmo.z_pk = zrtlearnedlocationofinteresttransitionmo.zlocationofinterest
- ''')
-
- all_rows = cursor.fetchall()
- usageentries = len(all_rows)
- data_list = []
- if usageentries > 0:
- for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))
-
- description = 'Significant Locations - Location of Interest Transition Stop (Historical)'
+ description = 'Significant Locations - Location of Interest Transition(Historical)'
report = ArtifactHtmlReport('Locations')
- report.start_artifact_report(report_folder, 'RoutineD Transtition Stop', description)
+ report.start_artifact_report(report_folder, 'RoutineD Transition', description)
report.add_script()
- data_headers = ('Timestamp','Stop','Creation Date', 'Expiration','Latitude','Longitude' )
+ data_headers = ('Start', 'Stop', 'Creation Date', 'Expiration', 'Latitude', 'Longitude')
report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
-
- tsvname = 'RoutineD Transtition Stop'
+
+ tsvname = 'RoutineD Transition'
tsv(report_folder, data_headers, data_list, tsvname)
-
- tlactivity = 'RoutineD Transtition Stop'
+
+ tlactivity = 'RoutineD Transition'
timeline(report_folder, tlactivity, data_list, data_headers)
-
- tlactivity = 'RoutineD Transtition Stop'
+
+ tlactivity = 'RoutineD Transition'
timeline(report_folder, tlactivity, data_list, data_headers)
else:
- logfunc('No RoutineD Significant Locations Transtition Stop data available')
\ No newline at end of file
+ logfunc('No RoutineD Significant Locations Transition data available')
diff --git a/scripts/artifacts/weatherAppLocations.py b/scripts/artifacts/weatherAppLocations.py
index 0e6c6fa0..9c929449 100644
--- a/scripts/artifacts/weatherAppLocations.py
+++ b/scripts/artifacts/weatherAppLocations.py
@@ -47,4 +47,3 @@ def get_weatherAppLocations(files_found, report_folder, seeker):
else:
logfunc('No data available for Weather App Locations')
-
\ No newline at end of file
diff --git a/scripts/ilapfuncs.py b/scripts/ilapfuncs.py
index 102ef0b6..b41c6db4 100644
--- a/scripts/ilapfuncs.py
+++ b/scripts/ilapfuncs.py
@@ -1,24 +1,21 @@
import csv
import datetime
import os
-import pathlib
import re
-import sys
import codecs
import sqlite3
-import string
import binascii
import math
import simplekml
import shutil
-from bs4 import BeautifulSoup
from PIL import Image
thumbnail_root = '**/Media/PhotoData/Thumbnails/**/'
media_root = '**/Media/'
thumb_size = 256, 256
+
class OutputParameters:
'''Defines the parameters that are common for '''
# static parameters
@@ -28,30 +25,37 @@ class OutputParameters:
def __init__(self, output_folder):
now = datetime.datetime.now()
currenttime = str(now.strftime('%Y-%m-%d_%A_%H%M%S'))
- self.report_folder_base = os.path.join(output_folder, 'iLEAPP_Reports_' + currenttime) # aleapp , aleappGUI, ileap_artifacts, report.py
+ self.report_folder_base = os.path.join(output_folder,
+ 'iLEAPP_Reports_' + currenttime) # aleapp , aleappGUI, ileap_artifacts, report.py
self.temp_folder = os.path.join(self.report_folder_base, 'temp')
- OutputParameters.screen_output_file_path = os.path.join(self.report_folder_base, 'Script Logs', 'Screen Output.html')
- OutputParameters.screen_output_file_path_devinfo = os.path.join(self.report_folder_base, 'Script Logs', 'DeviceInfo.html')
+ OutputParameters.screen_output_file_path = os.path.join(self.report_folder_base, 'Script Logs',
+ 'Screen Output.html')
+ OutputParameters.screen_output_file_path_devinfo = os.path.join(self.report_folder_base, 'Script Logs',
+ 'DeviceInfo.html')
os.makedirs(os.path.join(self.report_folder_base, 'Script Logs'))
os.makedirs(self.temp_folder)
+
def is_platform_windows():
'''Returns True if running on Windows'''
return os.name == 'nt'
+
def sanitize_file_path(filename, replacement_char='_'):
'''
Removes illegal characters (for windows) from the string passed. Does not replace \ or /
'''
return re.sub(r'[*?:"<>|\'\r\n]', replacement_char, filename)
+
def sanitize_file_name(filename, replacement_char='_'):
'''
Removes illegal characters (for windows) from the string passed.
'''
return re.sub(r'[\\/*?:"<>|\'\r\n]', replacement_char, filename)
+
def get_next_unused_name(path):
'''Checks if path exists, if it does, finds an unused name by appending -xx
where xx=00-99. Return value is new path.
@@ -72,24 +76,26 @@ def get_next_unused_name(path):
num += 1
return os.path.join(folder, new_name)
+
def open_sqlite_db_readonly(path):
'''Opens an sqlite db in read-only mode, so original db (and -wal/journal are intact)'''
if is_platform_windows():
- if path.startswith('\\\\?\\UNC\\'): # UNC long path
+ if path.startswith('\\\\?\\UNC\\'): # UNC long path
path = "%5C%5C%3F%5C" + path[4:]
- elif path.startswith('\\\\?\\'): # normal long path
+ elif path.startswith('\\\\?\\'): # normal long path
path = "%5C%5C%3F%5C" + path[4:]
- elif path.startswith('\\\\'): # UNC path
+ elif path.startswith('\\\\'): # UNC path
path = "%5C%5C%3F%5C\\UNC" + path[1:]
- else: # normal path
+ else: # normal path
path = "%5C%5C%3F%5C" + path
- return sqlite3.connect (f"file:{path}?mode=ro", uri=True)
+ return sqlite3.connect(f"file:{path}?mode=ro", uri=True)
+
def does_column_exist_in_db(db, table_name, col_name):
'''Checks if a specific col exists'''
col_name = col_name.lower()
try:
- db.row_factory = sqlite3.Row # For fetching columns by name
+ db.row_factory = sqlite3.Row # For fetching columns by name
query = f"pragma table_info('{table_name}');"
cursor = db.cursor()
cursor.execute(query)
@@ -102,6 +108,7 @@ def does_column_exist_in_db(db, table_name, col_name):
pass
return False
+
def does_table_exist(db, table_name):
'''Checks if a table with specified name exists in an sqlite db'''
try:
@@ -113,9 +120,10 @@ def does_table_exist(db, table_name):
logfunc(f"Query error, query={query} Error={str(ex)}")
return False
+
class GuiWindow:
'''This only exists to hold window handle if script is run from GUI'''
- window_handle = None # static variable
+ window_handle = None # static variable
progress_bar_total = 0
progress_bar_handle = None
@@ -123,7 +131,8 @@ class GuiWindow:
def SetProgressBar(n):
if GuiWindow.progress_bar_handle:
GuiWindow.progress_bar_handle.UpdateBar(n)
-
+
+
def logfunc(message=""):
with open(OutputParameters.screen_output_file_path, 'a', encoding='utf8') as a:
print(message)
@@ -132,28 +141,30 @@ def logfunc(message=""):
if GuiWindow.window_handle:
GuiWindow.window_handle.refresh()
+
def logdevinfo(message=""):
with open(OutputParameters.screen_output_file_path_devinfo, 'a', encoding='utf8') as b:
b.write(message + '
' + OutputParameters.nl)
-
+
+
def tsv(report_folder, data_headers, data_list, tsvname):
report_folder = report_folder.rstrip('/')
report_folder = report_folder.rstrip('\\')
report_folder_base, tail = os.path.split(report_folder)
tsv_report_folder = os.path.join(report_folder_base, '_TSV Exports')
-
+
if os.path.isdir(tsv_report_folder):
pass
else:
os.makedirs(tsv_report_folder)
-
-
- with codecs.open(os.path.join(tsv_report_folder, tsvname +'.tsv'), 'a', 'utf-8-sig') as tsvfile:
+
+ with codecs.open(os.path.join(tsv_report_folder, tsvname + '.tsv'), 'a', 'utf-8-sig') as tsvfile:
tsv_writer = csv.writer(tsvfile, delimiter='\t')
tsv_writer.writerow(data_headers)
for i in data_list:
tsv_writer.writerow(i)
-
+
+
def timeline(report_folder, tlactivity, data_list, data_headers):
report_folder = report_folder.rstrip('/')
report_folder = report_folder.rstrip('\\')
@@ -169,26 +180,28 @@ def timeline(report_folder, tlactivity, data_list, data_headers):
db.commit()
else:
os.makedirs(tl_report_folder)
- #create database
+ # create database
tldb = os.path.join(tl_report_folder, 'tl.db')
- db = sqlite3.connect(tldb, isolation_level = 'exclusive')
+ db = sqlite3.connect(tldb, isolation_level='exclusive')
cursor = db.cursor()
cursor.execute(
- """
+ """
CREATE TABLE data(key TEXT, activity TEXT, datalist TEXT)
"""
- )
+ )
db.commit()
-
+
a = 0
length = (len(data_list))
- while a < length:
- modifiedList = list(map(lambda x, y: x.upper() + ': ' + str(y), data_headers, data_list[a]))
- cursor.executemany("INSERT INTO data VALUES(?,?,?)", [(str(data_list[a][0]), tlactivity.upper(), str(modifiedList))])
+ while a < length:
+ modifiedList = list(map(lambda x, y: x.upper() + ': ' + str(y), data_headers, data_list[a]))
+ cursor.executemany("INSERT INTO data VALUES(?,?,?)",
+ [(str(data_list[a][0]), tlactivity.upper(), str(modifiedList))])
a += 1
db.commit()
db.close()
+
def kmlgen(report_folder, kmlactivity, data_list, data_headers):
report_folder = report_folder.rstrip('/')
report_folder = report_folder.rstrip('\\')
@@ -207,19 +220,25 @@ def kmlgen(report_folder, kmlactivity, data_list, data_headers):
db = sqlite3.connect(latlongdb)
cursor = db.cursor()
cursor.execute(
- """
+ """
CREATE TABLE data(key TEXT, latitude TEXT, longitude TEXT, activity TEXT)
"""
- )
+ )
db.commit()
-
+
kml = simplekml.Kml(open=1)
-
+
a = 0
length = (len(data_list))
while a < length:
modifiedDict = dict(zip(data_headers, data_list[a]))
- times = modifiedDict['Timestamp']
+ if 'Timestamp' in modifiedDict:
+ times = modifiedDict['Timestamp']
+ elif 'Start' in modifiedDict:
+ times = modifiedDict['Start']
+ elif 'Update Time' in modifiedDict:
+ times = modifiedDict['Update Time']
+
lon = modifiedDict['Longitude']
lat = modifiedDict['Latitude']
if lat:
@@ -232,23 +251,32 @@ def kmlgen(report_folder, kmlactivity, data_list, data_headers):
db.commit()
db.close()
kml.save(os.path.join(kml_report_folder, f'{kmlactivity}.kml'))
-
+
+
''' Returns string of printable characters. Replacing non-printable characters
with '.', or CHR(46)
``'''
+
+
def strings_raw(data):
return "".join([chr(byte) if byte >= 0x20 and byte < 0x7F else chr(46) for byte in data])
+
''' Returns string of printable characters. Works similar to the Linux
`string` function.
'''
+
+
def strings(data):
cleansed = "".join([chr(byte) if byte >= 0x20 and byte < 0x7F else chr(0) for byte in data])
return filter(lambda string: len(string) >= 4, cleansed.split(chr(0)))
+
''' Retuns HTML table of the hexdump of the passed in data.
'''
-def generate_hexdump(data, char_per_row = 5):
+
+
+def generate_hexdump(data, char_per_row=5):
data_hex = binascii.hexlify(data).decode('utf-8')
str_raw = strings_raw(data)
str_hex = ''
@@ -256,9 +284,9 @@ def generate_hexdump(data, char_per_row = 5):
''' Generates offset column
'''
- offset_rows = math.ceil(len(data_hex)/(char_per_row * 2))
- offsets = [i for i in range(0, len(data_hex), char_per_row)][:offset_rows]
- str_offset = '
'.join([ str(hex(s)[2:]).zfill(4).upper() for s in offsets ])
+ offset_rows = math.ceil(len(data_hex) / (char_per_row * 2))
+ offsets = [i for i in range(0, len(data_hex), char_per_row)][:offset_rows]
+ str_offset = '
'.join([str(hex(s)[2:]).zfill(4).upper() for s in offsets])
''' Generates hex data column
'''
@@ -294,26 +322,29 @@ def generate_hexdump(data, char_per_row = 5):
'''
+
'''
searching for thumbnails, copy it to report folder and return tag to insert in html
'''
+
+
def generate_thumbnail(imDirectory, imFilename, seeker, report_folder):
- thumb = thumbnail_root+imDirectory+'/'+imFilename+'/'
- thumblist = seeker.search(thumb+'**.JPG', return_on_first_hit=True)
- thumbname = imDirectory.replace('/','_')+'_'+imFilename+'.JPG'
+ thumb = thumbnail_root + imDirectory + '/' + imFilename + '/'
+ thumblist = seeker.search(thumb + '**.JPG', return_on_first_hit=True)
+ thumbname = imDirectory.replace('/', '_') + '_' + imFilename + '.JPG'
pathToThumb = os.path.join(os.path.basename(os.path.abspath(report_folder)), thumbname)
htmlThumbTag = ''.format(pathToThumb)
if thumblist:
- shutil.copyfile(thumblist[0],os.path.join(report_folder, thumbname))
+ shutil.copyfile(thumblist[0], os.path.join(report_folder, thumbname))
else:
- #recreate thumbnail from image
- #TODO: handle videos and HEIC
- files = seeker.search(media_root+imDirectory+'/'+imFilename, return_on_first_hit=True)
+ # recreate thumbnail from image
+ # TODO: handle videos and HEIC
+ files = seeker.search(media_root + imDirectory + '/' + imFilename, return_on_first_hit=True)
if files:
try:
im = Image.open(files[0])
im.thumbnail(thumb_size)
im.save(os.path.join(report_folder, thumbname))
except:
- pass #unsupported format
+ pass # unsupported format
return htmlThumbTag