mirror of
https://github.com/mvt-project/mvt.git
synced 2026-02-15 18:02:44 +00:00
Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a5ae729b65 | ||
|
|
86a0772eb2 | ||
|
|
7d0be9db4f | ||
|
|
4e120b2640 | ||
|
|
dbe9e5db9b | ||
|
|
0b00398729 | ||
|
|
87034d2c7a | ||
|
|
595a2f6536 | ||
|
|
8ead44a31e | ||
|
|
5c19d02a73 | ||
|
|
14ebc9ee4e | ||
|
|
de53cc07f8 | ||
|
|
22e066fc4a | ||
|
|
242052b8ec | ||
|
|
1df61b5bbf | ||
|
|
b691de2cc0 | ||
|
|
10915f250c | ||
|
|
c60cef4009 | ||
|
|
dda798df8e | ||
|
|
49108e67e2 |
@@ -1,5 +1,5 @@
|
|||||||
mkdocs==1.6.1
|
mkdocs==1.6.1
|
||||||
mkdocs-autorefs==1.4.2
|
mkdocs-autorefs==1.4.2
|
||||||
mkdocs-material==9.6.14
|
mkdocs-material==9.6.16
|
||||||
mkdocs-material-extensions==1.3.1
|
mkdocs-material-extensions==1.3.1
|
||||||
mkdocstrings==0.29.1
|
mkdocstrings==0.30.0
|
||||||
@@ -20,7 +20,7 @@ classifiers = [
|
|||||||
]
|
]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"click==8.2.1",
|
"click==8.2.1",
|
||||||
"rich==14.0.0",
|
"rich==14.1.0",
|
||||||
"tld==0.13.1",
|
"tld==0.13.1",
|
||||||
"requests==2.32.4",
|
"requests==2.32.4",
|
||||||
"simplejson==3.20.1",
|
"simplejson==3.20.1",
|
||||||
@@ -29,12 +29,12 @@ dependencies = [
|
|||||||
"iOSbackup==0.9.925",
|
"iOSbackup==0.9.925",
|
||||||
"adb-shell[usb]==0.4.4",
|
"adb-shell[usb]==0.4.4",
|
||||||
"libusb1==3.3.1",
|
"libusb1==3.3.1",
|
||||||
"cryptography==45.0.4",
|
"cryptography==45.0.6",
|
||||||
"PyYAML>=6.0.2",
|
"PyYAML>=6.0.2",
|
||||||
"pyahocorasick==2.2.0",
|
"pyahocorasick==2.2.0",
|
||||||
"betterproto==1.2.5",
|
"betterproto==1.2.5",
|
||||||
"pydantic==2.11.7",
|
"pydantic==2.11.7",
|
||||||
"pydantic-settings==2.9.1",
|
"pydantic-settings==2.10.1",
|
||||||
"NSKeyedUnArchiver==1.5.2",
|
"NSKeyedUnArchiver==1.5.2",
|
||||||
"python-dateutil==2.9.0.post0",
|
"python-dateutil==2.9.0.post0",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -21,12 +21,22 @@ class DumpsysADBArtifact(AndroidArtifact):
|
|||||||
stack = [res]
|
stack = [res]
|
||||||
cur_indent = 0
|
cur_indent = 0
|
||||||
in_multiline = False
|
in_multiline = False
|
||||||
for line in dump_data.strip(b"\n").split(b"\n"):
|
# Normalize line endings to handle both Unix (\n) and Windows (\r\n)
|
||||||
|
normalized_data = dump_data.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
|
||||||
|
for line in normalized_data.strip(b"\n").split(b"\n"):
|
||||||
|
# Skip completely empty lines
|
||||||
|
if not line.strip():
|
||||||
|
continue
|
||||||
|
|
||||||
# Track the level of indentation
|
# Track the level of indentation
|
||||||
indent = len(line) - len(line.lstrip())
|
indent = len(line) - len(line.lstrip())
|
||||||
if indent < cur_indent:
|
if indent < cur_indent:
|
||||||
# If the current line is less indented than the previous one, back out
|
# If the current line is less indented than the previous one, back out
|
||||||
stack.pop()
|
while len(stack) > 1 and indent < cur_indent:
|
||||||
|
stack.pop()
|
||||||
|
# Check if we were in multiline mode and need to exit it
|
||||||
|
if in_multiline and not isinstance(stack[-1], list):
|
||||||
|
in_multiline = False
|
||||||
cur_indent = indent
|
cur_indent = indent
|
||||||
else:
|
else:
|
||||||
cur_indent = indent
|
cur_indent = indent
|
||||||
@@ -38,12 +48,30 @@ class DumpsysADBArtifact(AndroidArtifact):
|
|||||||
|
|
||||||
# Annoyingly, some values are multiline and don't have a key on each line
|
# Annoyingly, some values are multiline and don't have a key on each line
|
||||||
if in_multiline:
|
if in_multiline:
|
||||||
if key == "":
|
if key == "" and len(vals) < 2:
|
||||||
# If the line is empty, it's the terminator for the multiline value
|
# If the line is empty, it's the terminator for the multiline value
|
||||||
in_multiline = False
|
in_multiline = False
|
||||||
stack.pop()
|
stack.pop()
|
||||||
|
current_dict = stack[-1]
|
||||||
|
elif len(vals) >= 2 and (key in self.multiline_fields or key == "}" or vals[1] == b"{"):
|
||||||
|
# If we encounter a new field while in multiline mode, exit multiline mode
|
||||||
|
# and process this line as a new field
|
||||||
|
in_multiline = False
|
||||||
|
stack.pop()
|
||||||
|
current_dict = stack[-1]
|
||||||
|
# Don't continue here - let the line be processed as a new field
|
||||||
else:
|
else:
|
||||||
current_dict.append(line.lstrip())
|
# When in multiline mode, the top of stack should be a list
|
||||||
|
if isinstance(stack[-1], list):
|
||||||
|
stack[-1].append(line.lstrip())
|
||||||
|
else:
|
||||||
|
# Something went wrong with the stack, exit multiline mode
|
||||||
|
in_multiline = False
|
||||||
|
current_dict = stack[-1]
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip lines that don't have a value after '='
|
||||||
|
if len(vals) < 2:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if key == "}":
|
if key == "}":
|
||||||
@@ -133,7 +161,16 @@ class DumpsysADBArtifact(AndroidArtifact):
|
|||||||
|
|
||||||
# TODO: Parse AdbDebuggingManager line in output.
|
# TODO: Parse AdbDebuggingManager line in output.
|
||||||
start_of_json = content.find(b"\n{") + 2
|
start_of_json = content.find(b"\n{") + 2
|
||||||
end_of_json = content.rfind(b"}\n") - 2
|
|
||||||
|
# Handle both Unix (\n) and Windows (\r\n) line endings
|
||||||
|
end_of_json = content.rfind(b"}\n")
|
||||||
|
if end_of_json == -1:
|
||||||
|
end_of_json = content.rfind(b"}\r\n")
|
||||||
|
if end_of_json == -1:
|
||||||
|
self.log.error("Unable to find end of JSON block in dumpsys output")
|
||||||
|
return
|
||||||
|
|
||||||
|
end_of_json -= 2
|
||||||
json_content = content[start_of_json:end_of_json].rstrip()
|
json_content = content[start_of_json:end_of_json].rstrip()
|
||||||
|
|
||||||
parsed = self.indented_dump_parser(json_content)
|
parsed = self.indented_dump_parser(json_content)
|
||||||
|
|||||||
@@ -51,11 +51,6 @@ ANDROID_DANGEROUS_SETTINGS = [
|
|||||||
"key": "send_action_app_error",
|
"key": "send_action_app_error",
|
||||||
"safe_value": "1",
|
"safe_value": "1",
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"description": "enabled installation of non Google Play apps",
|
|
||||||
"key": "install_non_market_apps",
|
|
||||||
"safe_value": "0",
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"description": "enabled accessibility services",
|
"description": "enabled accessibility services",
|
||||||
"key": "accessibility_enabled",
|
"key": "accessibility_enabled",
|
||||||
|
|||||||
@@ -112,10 +112,18 @@ class Files(AndroidQFModule):
|
|||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
if timezone := self._get_device_timezone():
|
if timezone := self._get_device_timezone():
|
||||||
device_timezone = zoneinfo.ZoneInfo(timezone)
|
try:
|
||||||
|
device_timezone = zoneinfo.ZoneInfo(timezone)
|
||||||
|
except zoneinfo.ZoneInfoNotFoundError:
|
||||||
|
self.log.warning("Device timezone '%s' not found, using UTC", timezone)
|
||||||
|
device_timezone = datetime.timezone.utc
|
||||||
else:
|
else:
|
||||||
self.log.warning("Unable to determine device timezone, using UTC")
|
self.log.warning("Unable to determine device timezone, using UTC")
|
||||||
device_timezone = zoneinfo.ZoneInfo("UTC")
|
try:
|
||||||
|
device_timezone = zoneinfo.ZoneInfo("UTC")
|
||||||
|
except zoneinfo.ZoneInfoNotFoundError:
|
||||||
|
# Fallback for Windows systems where zoneinfo might not have UTC
|
||||||
|
device_timezone = datetime.timezone.utc
|
||||||
|
|
||||||
for file in self._get_files_by_pattern("*/files.json"):
|
for file in self._get_files_by_pattern("*/files.json"):
|
||||||
rawdata = self._get_file_content(file).decode("utf-8", errors="ignore")
|
rawdata = self._get_file_content(file).decode("utf-8", errors="ignore")
|
||||||
|
|||||||
@@ -654,7 +654,8 @@ class Indicators:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
for ioc in self.get_iocs("processes"):
|
for ioc in self.get_iocs("processes"):
|
||||||
parts = file_path.split("/")
|
# Use os-agnostic path splitting to handle both Windows (\) and Unix (/) separators
|
||||||
|
parts = file_path.replace("\\", "/").split("/")
|
||||||
if ioc["value"] in parts:
|
if ioc["value"] in parts:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Found known suspicious process name mentioned in file at "
|
"Found known suspicious process name mentioned in file at "
|
||||||
|
|||||||
@@ -1131,5 +1131,9 @@
|
|||||||
{
|
{
|
||||||
"version": "18.5",
|
"version": "18.5",
|
||||||
"build": "22F76"
|
"build": "22F76"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "18.6",
|
||||||
|
"build": "22G86"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -95,14 +95,17 @@ class SafariBrowserState(IOSExtraction):
|
|||||||
)
|
)
|
||||||
except sqlite3.OperationalError:
|
except sqlite3.OperationalError:
|
||||||
# Old version iOS <12 likely
|
# Old version iOS <12 likely
|
||||||
cur.execute(
|
try:
|
||||||
|
cur.execute(
|
||||||
|
"""
|
||||||
|
SELECT
|
||||||
|
title, url, user_visible_url, last_viewed_time, session_data
|
||||||
|
FROM tabs
|
||||||
|
ORDER BY last_viewed_time;
|
||||||
"""
|
"""
|
||||||
SELECT
|
)
|
||||||
title, url, user_visible_url, last_viewed_time, session_data
|
except sqlite3.OperationalError as e:
|
||||||
FROM tabs
|
self.log.error(f"Error executing query: {e}")
|
||||||
ORDER BY last_viewed_time;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
for row in cur:
|
for row in cur:
|
||||||
session_entries = []
|
session_entries = []
|
||||||
|
|||||||
@@ -116,13 +116,16 @@ class TCC(IOSExtraction):
|
|||||||
)
|
)
|
||||||
db_version = "v2"
|
db_version = "v2"
|
||||||
except sqlite3.OperationalError:
|
except sqlite3.OperationalError:
|
||||||
cur.execute(
|
try:
|
||||||
"""SELECT
|
cur.execute(
|
||||||
service, client, client_type, allowed,
|
"""SELECT
|
||||||
prompt_count
|
service, client, client_type, allowed,
|
||||||
FROM access;"""
|
prompt_count
|
||||||
)
|
FROM access;"""
|
||||||
db_version = "v1"
|
)
|
||||||
|
db_version = "v1"
|
||||||
|
except sqlite3.OperationalError as e:
|
||||||
|
self.log.error(f"Error parsing TCC database: {e}")
|
||||||
|
|
||||||
for row in cur:
|
for row in cur:
|
||||||
service = row[0]
|
service = row[0]
|
||||||
|
|||||||
Reference in New Issue
Block a user