Compare commits

..

19 Commits

Author SHA1 Message Date
Janik Besendorf
00a00ca6e9 Remove resolved TODO about --output requirement in download-apks 2026-03-25 09:03:03 +01:00
Janik Besendorf
2792988626 Support SHA1 and MD5 hash matching in AQF files module 2026-03-25 09:02:09 +01:00
Janik Besendorf
711f7cedc1 Clarify command_line list format matches protobuf schema in tombstone parser 2026-03-25 09:00:44 +01:00
Janik Besendorf
d8c3d1e418 Extract additional timestamps from WebKit ObservedDomains table
Query mostRecentUserInteractionTime and mostRecentWebPushInteractionTime
with fallback to the original 4-column query for older iOS versions.
2026-03-25 08:55:44 +01:00
Janik Besendorf
856c008bc0 Remove confirmed Chrome database path TODOs
Backup IDs verified via SHA-1 of AppDomain-com.google.chrome.ios paths.
2026-03-25 08:55:40 +01:00
Janik Besendorf
5820bc95c1 Replace bare KeyError catch with explicit key check in net_base 2026-03-25 08:44:16 +01:00
Janik Besendorf
7609760cfb Pass branch parameter to GitHub commits API in update checker 2026-03-25 08:44:12 +01:00
Janik Besendorf
7d985f3c97 Refactor b64 encoding in configuration_profiles into helper methods 2026-03-25 08:44:08 +01:00
Janik Besendorf
dcfcf51988 Fix typo in aqf_files.py comment 2026-03-25 08:44:04 +01:00
Janik Besendorf
869f3a110c Narrow bare except to specific exception types in convert_mactime_to_datetime 2026-03-25 08:44:00 +01:00
Janik Besendorf
4495a46688 Remove stale FIXME comment in command.py 2026-03-25 08:43:56 +01:00
Janik Besendorf
d4b1c6bd25 Remove dead commented-out code in webkit_session_resource_log 2026-03-25 08:43:52 +01:00
Janik Besendorf
efbd4bbdc0 Replace split("\n") with splitlines() for platform compatibility 2026-03-24 23:52:04 +01:00
besendorf
f2d9f420f2 Detect uninstall and downgrade in battery daily (#736) 2026-03-16 12:32:54 +01:00
github-actions[bot]
e2f8437831 Add new iOS versions and build numbers (#742)
Co-authored-by: DonnchaC <DonnchaC@users.noreply.github.com>
2026-03-05 05:48:15 +01:00
github-actions[bot]
0134bf80d1 Add new iOS versions and build numbers (#739)
Co-authored-by: DonnchaC <DonnchaC@users.noreply.github.com>
2026-02-19 08:47:07 -05:00
Max-RSF
c8f82f796b Add AQF support for bugreport modules (#741) 2026-02-16 17:11:16 +01:00
github-actions[bot]
61947d17af Add new iOS versions and build numbers (#738) 2026-02-04 20:51:11 +01:00
viktor3002
7173e02a6f Check receiver names for IoCs (#721)
* receiver names are checked if a known malicious app id is a substring

* ruff syntax fixes

---------

Co-authored-by: Viktor <vik@tor.me>
Co-authored-by: besendorf <janik@besendorf.org>
2026-01-10 15:24:20 +01:00
21 changed files with 222 additions and 126 deletions

View File

@@ -14,12 +14,23 @@ class DumpsysBatteryDailyArtifact(AndroidArtifact):
"""
def serialize(self, record: dict) -> Union[dict, list]:
action = record.get("action", "update")
package_name = record["package_name"]
vers = record["vers"]
if vers == "0":
data = f"Recorded uninstall of package {package_name} (vers 0)"
elif action == "downgrade":
prev_vers = record.get("previous_vers", "unknown")
data = f"Recorded downgrade of package {package_name} from vers {prev_vers} to vers {vers}"
else:
data = f"Recorded update of package {package_name} with vers {vers}"
return {
"timestamp": record["from"],
"module": self.__class__.__name__,
"event": "battery_daily",
"data": f"Recorded update of package {record['package_name']} "
f"with vers {record['vers']}",
"data": data,
}
def check_indicators(self) -> None:
@@ -36,6 +47,7 @@ class DumpsysBatteryDailyArtifact(AndroidArtifact):
def parse(self, output: str) -> None:
daily = None
daily_updates = []
package_versions = {} # Track package versions to detect downgrades
for line in output.splitlines():
if line.startswith(" Daily from "):
if len(daily_updates) > 0:
@@ -64,15 +76,44 @@ class DumpsysBatteryDailyArtifact(AndroidArtifact):
break
if not already_seen:
daily_updates.append(
{
"action": "update",
"from": daily["from"],
"to": daily["to"],
"package_name": package_name,
"vers": vers_nr,
}
)
update_record = {
"action": "update",
"from": daily["from"],
"to": daily["to"],
"package_name": package_name,
"vers": vers_nr,
}
# Check for uninstall (version 0)
if vers_nr == "0":
self.log.warning(
"Detected uninstall of package %s (vers 0) on %s",
package_name,
daily["from"],
)
# Check for downgrade
elif package_name in package_versions:
try:
current_vers = int(vers_nr)
previous_vers = int(package_versions[package_name])
if current_vers < previous_vers:
update_record["action"] = "downgrade"
update_record["previous_vers"] = str(previous_vers)
self.log.warning(
"Detected downgrade of package %s from vers %d to vers %d on %s",
package_name,
previous_vers,
current_vers,
daily["from"],
)
except ValueError:
# If version numbers aren't integers, skip comparison
pass
# Update tracking dictionary
package_versions[package_name] = vers_nr
daily_updates.append(update_record)
if len(daily_updates) > 0:
self.results.extend(daily_updates)

View File

@@ -186,7 +186,7 @@ class DumpsysPackagesArtifact(AndroidArtifact):
package = []
in_package_list = False
for line in content.split("\n"):
for line in content.splitlines():
if line.startswith("Packages:"):
in_package_list = True
continue

View File

@@ -8,7 +8,7 @@ from .artifact import AndroidArtifact
class Processes(AndroidArtifact):
def parse(self, entry: str) -> None:
for line in entry.split("\n")[1:]:
for line in entry.splitlines()[1:]:
proc = line.split()
# Skip empty lines

View File

@@ -193,7 +193,7 @@ class TombstoneCrashArtifact(AndroidArtifact):
# eg. "Process uptime: 40s"
tombstone[destination_key] = int(value_clean.rstrip("s"))
elif destination_key == "command_line":
# XXX: Check if command line should be a single string in a list, or a list of strings.
# Wrap in list for consistency with protobuf format (repeated string).
tombstone[destination_key] = [value_clean]
else:
tombstone[destination_key] = value_clean

View File

@@ -117,8 +117,6 @@ def download_apks(ctx, all_apks, virustotal, output, from_file, serial, verbose)
if from_file:
download = DownloadAPKs.from_json(from_file)
else:
# TODO: Do we actually want to be able to run without storing any
# file?
if not output:
log.critical("You need to specify an output folder with --output!")
ctx.exit(1)

View File

@@ -105,15 +105,15 @@ class AQFFiles(AndroidQFModule):
)
self.detected.append(result)
if result.get("sha256", "") == "":
continue
ioc = self.indicators.check_file_hash(result["sha256"])
if ioc:
result["matched_indicator"] = ioc
self.detected.append(result)
# TODO: adds SHA1 and MD5 when available in MVT
for hash_key in ("sha256", "sha1", "md5"):
file_hash = result.get(hash_key, "")
if not file_hash:
continue
ioc = self.indicators.check_file_hash(file_hash)
if ioc:
result["matched_indicator"] = ioc
self.detected.append(result)
break
def run(self) -> None:
if timezone := self._get_device_timezone():
@@ -128,7 +128,7 @@ class AQFFiles(AndroidQFModule):
data = json.loads(rawdata)
except json.decoder.JSONDecodeError:
data = []
for line in rawdata.split("\n"):
for line in rawdata.splitlines():
if line.strip() == "":
continue
data.append(json.loads(line))
@@ -139,7 +139,7 @@ class AQFFiles(AndroidQFModule):
utc_timestamp = datetime.datetime.fromtimestamp(
file_data[ts], tz=datetime.timezone.utc
)
# Convert the UTC timestamp to local tiem on Android device's local timezone
# Convert the UTC timestamp to local time on Android device's local timezone
local_timestamp = utc_timestamp.astimezone(device_timezone)
# HACK: We only output the UTC timestamp in convert_datetime_to_iso, we

View File

@@ -39,7 +39,7 @@ class AQFSettings(SettingsArtifact, AndroidQFModule):
self.results[namespace] = {}
data = self._get_file_content(setting_file)
for line in data.decode("utf-8").split("\n"):
for line in data.decode("utf-8").splitlines():
line = line.strip()
try:
key, value = line.split("=", 1)

View File

@@ -84,13 +84,17 @@ class BugReportModule(MVTModule):
return self._get_file_content(main_content.decode().strip())
except KeyError:
return None
else:
dumpstate_logs = self._get_files_by_pattern("dumpState_*.log")
if not dumpstate_logs:
return None
dumpstate_logs = self._get_files_by_pattern("dumpState_*.log")
if dumpstate_logs:
return self._get_file_content(dumpstate_logs[0])
dumpsys_files = self._get_files_by_pattern("*/dumpsys.txt")
if dumpsys_files:
return self._get_file_content(dumpsys_files[0])
return None
def _get_file_modification_time(self, file_path: str) -> dict:
if self.zip_archive:
file_timetuple = self.zip_archive.getinfo(file_path).date_time

View File

@@ -34,6 +34,20 @@ class DumpsysReceivers(DumpsysReceiversArtifact, BugReportModule):
self.results = results if results else {}
def check_indicators(self) -> None:
for result in self.results:
if self.indicators:
receiver_name = self.results[result][0]["receiver"]
# return IoC if the stix2 process name a substring of the receiver name
ioc = self.indicators.check_receiver_prefix(receiver_name)
if ioc:
self.results[result][0]["matched_indicator"] = ioc
self.detected.append(result)
continue
def run(self) -> None:
content = self._get_dumpstate_file()
if not content:

View File

@@ -222,7 +222,6 @@ class Command:
if self.module_name and module.__name__ != self.module_name:
continue
# FIXME: do we need the logger here
module_logger = logging.getLogger(module.__module__)
m = module(

View File

@@ -768,6 +768,30 @@ class Indicators:
return None
def check_receiver_prefix(self, receiver_name: str) -> Union[dict, None]:
"""Check the provided receiver name against the list of indicators.
An IoC match is detected when a substring of the receiver matches the indicator
:param app_id: App ID to check against the list of indicators
:type app_id: str
:returns: Indicator details if matched, otherwise None
"""
if not receiver_name:
return None
for ioc in self.get_iocs("app_ids"):
if ioc["value"].lower() in receiver_name.lower():
self.log.warning(
'Found a known suspicious receiver with name "%s" '
'matching indicators from "%s"',
receiver_name,
ioc["name"],
)
return ioc
return None
def check_android_property_name(self, property_name: str) -> Optional[dict]:
"""Check the android property name against the list of indicators.

View File

@@ -180,10 +180,8 @@ class IndicatorsUpdates:
def _get_remote_file_latest_commit(
self, owner: str, repo: str, branch: str, path: str
) -> int:
# TODO: The branch is currently not taken into consideration.
# How do we specify which branch to look up to the API?
file_commit_url = (
f"https://api.github.com/repos/{owner}/{repo}/commits?path={path}"
f"https://api.github.com/repos/{owner}/{repo}/commits?path={path}&sha={branch}"
)
try:
res = requests.get(file_commit_url, timeout=5)

View File

@@ -119,10 +119,9 @@ def convert_mactime_to_datetime(timestamp: Union[int, float], from_2001: bool =
if from_2001:
timestamp = timestamp + 978307200
# TODO: This is rather ugly. Happens sometimes with invalid timestamps.
try:
return convert_unix_to_utc_datetime(timestamp)
except Exception:
except (OSError, OverflowError, ValueError):
return None

View File

@@ -631,6 +631,10 @@
"build": "16H81",
"version": "12.5.7"
},
{
"version": "12.5.8",
"build": "16H88"
},
{
"build": "17A577",
"version": "13.0"
@@ -899,6 +903,10 @@
"version": "15.8.5",
"build": "19H394"
},
{
"version": "15.8.6",
"build": "19H402"
},
{
"build": "20A362",
"version": "16.0"
@@ -1008,6 +1016,10 @@
"version": "16.7.12",
"build": "20H364"
},
{
"version": "16.7.14",
"build": "20H370"
},
{
"version": "17.0",
"build": "21A327"
@@ -1164,6 +1176,18 @@
"version": "18.7.3",
"build": "22H217"
},
{
"version": "18.7.4",
"build": "22H218"
},
{
"version": "18.7.5",
"build": "22H311"
},
{
"version": "18.7.6",
"build": "22H320"
},
{
"version": "26",
"build": "23A341"
@@ -1179,5 +1203,17 @@
{
"version": "26.2",
"build": "23C55"
},
{
"version": "26.2.1",
"build": "23C71"
},
{
"version": "26.3",
"build": "23D127"
},
{
"version": "26.3.1",
"build": "23D8133"
}
]

View File

@@ -87,6 +87,35 @@ class ConfigurationProfiles(IOSExtraction):
self.detected.append(result)
continue
@staticmethod
def _b64encode_key(d: dict, key: str) -> None:
if key in d:
d[key] = b64encode(d[key])
@staticmethod
def _b64encode_keys(d: dict, keys: list) -> None:
for key in keys:
if key in d:
d[key] = b64encode(d[key])
def _b64encode_plist_bytes(self, plist: dict) -> None:
"""Encode binary plist values to base64 for JSON serialization."""
if "SignerCerts" in plist:
plist["SignerCerts"] = [b64encode(x) for x in plist["SignerCerts"]]
self._b64encode_keys(plist, ["PushTokenDataSentToServerKey", "LastPushTokenHash"])
if "OTAProfileStub" in plist:
stub = plist["OTAProfileStub"]
if "SignerCerts" in stub:
stub["SignerCerts"] = [b64encode(x) for x in stub["SignerCerts"]]
if "PayloadContent" in stub:
self._b64encode_key(stub["PayloadContent"], "EnrollmentIdentityPersistentID")
if "PayloadContent" in plist:
for entry in plist["PayloadContent"]:
self._b64encode_keys(entry, ["PERSISTENT_REF", "IdentityPersistentRef"])
def run(self) -> None:
for conf_file in self._get_backup_files_from_manifest(
domain=CONF_PROFILES_DOMAIN
@@ -115,65 +144,7 @@ class ConfigurationProfiles(IOSExtraction):
except Exception:
conf_plist = {}
# TODO: Tidy up the following code hell.
if "SignerCerts" in conf_plist:
conf_plist["SignerCerts"] = [
b64encode(x) for x in conf_plist["SignerCerts"]
]
if "OTAProfileStub" in conf_plist:
if "SignerCerts" in conf_plist["OTAProfileStub"]:
conf_plist["OTAProfileStub"]["SignerCerts"] = [
b64encode(x)
for x in conf_plist["OTAProfileStub"]["SignerCerts"]
]
if "PayloadContent" in conf_plist["OTAProfileStub"]:
if (
"EnrollmentIdentityPersistentID"
in conf_plist["OTAProfileStub"]["PayloadContent"]
):
conf_plist["OTAProfileStub"]["PayloadContent"][
"EnrollmentIdentityPersistentID"
] = b64encode(
conf_plist["OTAProfileStub"]["PayloadContent"][
"EnrollmentIdentityPersistentID"
]
)
if "PushTokenDataSentToServerKey" in conf_plist:
conf_plist["PushTokenDataSentToServerKey"] = b64encode(
conf_plist["PushTokenDataSentToServerKey"]
)
if "LastPushTokenHash" in conf_plist:
conf_plist["LastPushTokenHash"] = b64encode(
conf_plist["LastPushTokenHash"]
)
if "PayloadContent" in conf_plist:
for content_entry in range(len(conf_plist["PayloadContent"])):
if "PERSISTENT_REF" in conf_plist["PayloadContent"][content_entry]:
conf_plist["PayloadContent"][content_entry][
"PERSISTENT_REF"
] = b64encode(
conf_plist["PayloadContent"][content_entry][
"PERSISTENT_REF"
]
)
if (
"IdentityPersistentRef"
in conf_plist["PayloadContent"][content_entry]
):
conf_plist["PayloadContent"][content_entry][
"IdentityPersistentRef"
] = b64encode(
conf_plist["PayloadContent"][content_entry][
"IdentityPersistentRef"
]
)
self._b64encode_plist_bytes(conf_plist)
self.results.append(
{

View File

@@ -73,7 +73,7 @@ class ShutdownLog(IOSExtraction):
recent_processes = []
times_delayed = 0
delay = 0.0
for line in content.split("\n"):
for line in content.splitlines():
line = line.strip()
if line.startswith("remaining client pid:"):

View File

@@ -11,7 +11,6 @@ from mvt.common.utils import convert_chrometime_to_datetime, convert_datetime_to
from ..base import IOSExtraction
CHROME_FAVICON_BACKUP_IDS = ["55680ab883d0fdcffd94f959b1632e5fbbb18c5b"]
# TODO: Confirm Chrome database path.
CHROME_FAVICON_ROOT_PATHS = [
"private/var/mobile/Containers/Data/Application/*/Library/Application Support/Google/Chrome/Default/Favicons",
]

View File

@@ -13,7 +13,6 @@ from ..base import IOSExtraction
CHROME_HISTORY_BACKUP_IDS = [
"faf971ce92c3ac508c018dce1bef2a8b8e9838f1",
]
# TODO: Confirm Chrome database path.
CHROME_HISTORY_ROOT_PATHS = [
"private/var/mobile/Containers/Data/Application/*/Library/Application Support/Google/Chrome/Default/History", # pylint: disable=line-too-long
]

View File

@@ -79,32 +79,55 @@ class WebkitResourceLoadStatistics(IOSExtraction):
cur = conn.cursor()
try:
# FIXME: table contains extra fields with timestamp here
cur.execute(
"""
SELECT
domainID,
registrableDomain,
lastSeen,
hadUserInteraction
hadUserInteraction,
mostRecentUserInteractionTime,
mostRecentWebPushInteractionTime
from ObservedDomains;
"""
)
has_extra_timestamps = True
except sqlite3.OperationalError:
return
try:
cur.execute(
"""
SELECT
domainID,
registrableDomain,
lastSeen,
hadUserInteraction
from ObservedDomains;
"""
)
has_extra_timestamps = False
except sqlite3.OperationalError:
return
for row in cur:
self.results.append(
{
"domain_id": row[0],
"registrable_domain": row[1],
"last_seen": row[2],
"had_user_interaction": bool(row[3]),
"last_seen_isodate": convert_unix_to_iso(row[2]),
"domain": domain,
"path": path,
}
)
result = {
"domain_id": row[0],
"registrable_domain": row[1],
"last_seen": row[2],
"had_user_interaction": bool(row[3]),
"last_seen_isodate": convert_unix_to_iso(row[2]),
"domain": domain,
"path": path,
}
if has_extra_timestamps:
result["most_recent_user_interaction_time"] = row[4]
result["most_recent_user_interaction_time_isodate"] = (
convert_unix_to_iso(row[4])
)
result["most_recent_web_push_interaction_time"] = row[5]
result["most_recent_web_push_interaction_time_isodate"] = (
convert_unix_to_iso(row[5])
)
self.results.append(result)
if len(self.results) > 0:
self.log.info(

View File

@@ -76,12 +76,6 @@ class WebkitSessionResourceLog(IOSExtraction):
entry["redirect_destination"]
)
# TODO: Currently not used.
# subframe_origins = self._extract_domains(
# entry["subframe_under_origin"])
# subresource_domains = self._extract_domains(
# entry["subresource_under_origin"])
all_origins = set(
[entry["origin"]] + source_domains + destination_domains
)

View File

@@ -311,14 +311,11 @@ class NetBase(IOSExtraction):
self.results = sorted(self.results, key=operator.itemgetter("first_isodate"))
def check_indicators(self) -> None:
# Check for manipulated process records.
# TODO: Catching KeyError for live_isodate for retro-compatibility.
# This is not very good.
try:
# check_manipulated/find_deleted require "live_isodate" and
# "live_proc_id" keys which may be absent in older result formats.
if self.results and "live_isodate" in self.results[0]:
self.check_manipulated()
self.find_deleted()
except KeyError:
pass
if not self.indicators:
return