diff --git a/src/mvt/android/artifacts/mounts.py b/src/mvt/android/artifacts/mounts.py new file mode 100644 index 0000000..6e7b0b6 --- /dev/null +++ b/src/mvt/android/artifacts/mounts.py @@ -0,0 +1,186 @@ +# Mobile Verification Toolkit (MVT) +# Copyright (c) 2021-2023 The MVT Authors. +# Use of this software is governed by the MVT License 1.1 that can be found at +# https://license.mvt.re/1.1/ + +from typing import Any + +from .artifact import AndroidArtifact + +SUSPICIOUS_MOUNT_POINTS = [ + "/system", + "/vendor", + "/product", + "/system_ext", +] + +SUSPICIOUS_OPTIONS = [ + "rw", + "remount", + "noatime", + "nodiratime", +] + +ALLOWLIST_NOATIME = [ + "/system_dlkm", + "/system_ext", + "/product", + "/vendor", + "/vendor_dlkm", +] + + +class Mounts(AndroidArtifact): + """ + This artifact parses mount information from /proc/mounts or similar mount data. + It can detect potentially suspicious mount configurations that may indicate + a rooted or compromised device. + """ + + def parse(self, entry: str) -> None: + """ + Parse mount information from the provided entry. + + Examples: + /dev/block/bootdevice/by-name/system /system ext4 ro,seclabel,relatime 0 0 + /dev/block/dm-12 on / type ext4 (ro,seclabel,noatime) + """ + self.results: list[dict[str, Any]] = [] + + for line in entry.splitlines(): + line = line.strip() + if not line: + continue + + device = None + mount_point = None + filesystem_type = None + mount_options = "" + + if " on " in line and " type " in line: + try: + # Format: device on mount_point type filesystem_type (options) + device_part, rest = line.split(" on ", 1) + device = device_part.strip() + + # Split by 'type' to get mount_point and filesystem info + mount_part, fs_part = rest.split(" type ", 1) + mount_point = mount_part.strip() + + # Parse filesystem and options + if "(" in fs_part and fs_part.endswith(")"): + # Format: filesystem_type (options) + fs_and_opts = fs_part.strip() + paren_idx = fs_and_opts.find("(") + filesystem_type = fs_and_opts[:paren_idx].strip() + mount_options = fs_and_opts[paren_idx + 1 : -1].strip() + else: + # No options in parentheses, just filesystem type + filesystem_type = fs_part.strip() + mount_options = "" + + # Skip if we don't have essential info + if not device or not mount_point or not filesystem_type: + continue + + # Parse options into list + options_list = ( + [opt.strip() for opt in mount_options.split(",") if opt.strip()] + if mount_options + else [] + ) + + # Check if it's a system partition + is_system_partition = mount_point in SUSPICIOUS_MOUNT_POINTS or any( + mount_point.startswith(sp) for sp in SUSPICIOUS_MOUNT_POINTS + ) + + # Check if it's mounted read-write + is_read_write = "rw" in options_list + + mount_entry = { + "device": device, + "mount_point": mount_point, + "filesystem_type": filesystem_type, + "mount_options": mount_options, + "options_list": options_list, + "is_system_partition": is_system_partition, + "is_read_write": is_read_write, + } + + self.results.append(mount_entry) + + except ValueError: + # If parsing fails, skip this line + continue + else: + # Skip lines that don't match expected format + continue + + def check_indicators(self) -> None: + """ + Check for suspicious mount configurations that may indicate root access + or other security concerns. + """ + system_rw_mounts = [] + suspicious_mounts = [] + + for mount in self.results: + mount_point = mount["mount_point"] + options = mount["options_list"] + + # Check for system partitions mounted as read-write + if mount["is_system_partition"] and mount["is_read_write"]: + system_rw_mounts.append(mount) + if mount_point == "/system": + self.log.warning( + "Root detected /system partition is mounted as read-write (rw). " + ) + else: + self.log.warning( + "System partition %s is mounted as read-write (rw). This may indicate system modifications.", + mount_point, + ) + + # Check for other suspicious mount options + suspicious_opts = [opt for opt in options if opt in SUSPICIOUS_OPTIONS] + if suspicious_opts and mount["is_system_partition"]: + if ( + "noatime" in mount["mount_options"] + and mount["mount_point"] in ALLOWLIST_NOATIME + ): + continue + suspicious_mounts.append(mount) + self.log.warning( + "Suspicious mount options found for %s: %s", + mount_point, + ", ".join(suspicious_opts), + ) + + # Log interesting mount information + if mount_point == "/data" or mount_point.startswith("/sdcard"): + self.log.info( + "Data partition: %s mounted as %s with options: %s", + mount_point, + mount["filesystem_type"], + mount["mount_options"], + ) + + self.log.info("Parsed %d mount entries", len(self.results)) + + # Check indicators if available + if not self.indicators: + return + + for mount in self.results: + # Check if any mount points match indicators + ioc = self.indicators.check_file_path(mount.get("mount_point", "")) + if ioc: + mount["matched_indicator"] = ioc + self.detected.append(mount) + + # Check device paths for indicators + ioc = self.indicators.check_file_path(mount.get("device", "")) + if ioc: + mount["matched_indicator"] = ioc + self.detected.append(mount) diff --git a/src/mvt/android/artifacts/tombstone_crashes.py b/src/mvt/android/artifacts/tombstone_crashes.py index 5193ff8..0b8e522 100644 --- a/src/mvt/android/artifacts/tombstone_crashes.py +++ b/src/mvt/android/artifacts/tombstone_crashes.py @@ -70,7 +70,7 @@ class TombstoneCrashResult(pydantic.BaseModel): class TombstoneCrashArtifact(AndroidArtifact): - """ " + """ Parser for Android tombstone crash files. This parser can parse both text and protobuf tombstone crash files. @@ -121,9 +121,7 @@ class TombstoneCrashArtifact(AndroidArtifact): def parse_protobuf( self, file_name: str, file_timestamp: datetime.datetime, data: bytes ) -> None: - """ - Parse Android tombstone crash files from a protobuf object. - """ + """Parse Android tombstone crash files from a protobuf object.""" tombstone_pb = Tombstone().parse(data) tombstone_dict = tombstone_pb.to_dict( betterproto.Casing.SNAKE, include_default_values=True @@ -144,21 +142,23 @@ class TombstoneCrashArtifact(AndroidArtifact): def parse( self, file_name: str, file_timestamp: datetime.datetime, content: bytes ) -> None: - """ - Parse text Android tombstone crash files. - """ - - # Split the tombstone file into a dictonary + """Parse text Android tombstone crash files.""" tombstone_dict = { "file_name": file_name, "file_timestamp": convert_datetime_to_iso(file_timestamp), } lines = content.decode("utf-8").splitlines() - for line in lines: + for line_num, line in enumerate(lines, 1): if not line.strip() or TOMBSTONE_DELIMITER in line: continue - for key, destination_key in TOMBSTONE_TEXT_KEY_MAPPINGS.items(): - self._parse_tombstone_line(line, key, destination_key, tombstone_dict) + try: + for key, destination_key in TOMBSTONE_TEXT_KEY_MAPPINGS.items(): + if self._parse_tombstone_line( + line, key, destination_key, tombstone_dict + ): + break + except Exception as e: + raise ValueError(f"Error parsing line {line_num}: {str(e)}") # Validate the tombstone and add it to the results tombstone = TombstoneCrashResult.model_validate(tombstone_dict) @@ -168,7 +168,7 @@ class TombstoneCrashArtifact(AndroidArtifact): self, line: str, key: str, destination_key: str, tombstone: dict ) -> bool: if not line.startswith(f"{key}"): - return None + return False if key == "pid": return self._load_pid_line(line, tombstone) @@ -200,51 +200,50 @@ class TombstoneCrashArtifact(AndroidArtifact): return True def _load_pid_line(self, line: str, tombstone: dict) -> bool: - pid_part, tid_part, name_part = [part.strip() for part in line.split(",")] + try: + parts = line.split(" >>> ") if " >>> " in line else line.split(">>>") + process_info = parts[0] - pid_key, pid_value = pid_part.split(":", 1) - if pid_key != "pid": - raise ValueError(f"Expected key pid, got {pid_key}") - pid_value = int(pid_value.strip()) + # Parse pid, tid, name from process info + info_parts = [p.strip() for p in process_info.split(",")] + for info in info_parts: + key, value = info.split(":", 1) + key = key.strip() + value = value.strip() - tid_key, tid_value = tid_part.split(":", 1) - if tid_key != "tid": - raise ValueError(f"Expected key tid, got {tid_key}") - tid_value = int(tid_value.strip()) + if key == "pid": + tombstone["pid"] = int(value) + elif key == "tid": + tombstone["tid"] = int(value) + elif key == "name": + tombstone["process_name"] = value - name_key, name_value = name_part.split(":", 1) - if name_key != "name": - raise ValueError(f"Expected key name, got {name_key}") - name_value = name_value.strip() - process_name, binary_path = self._parse_process_name(name_value, tombstone) + # Extract binary path if it exists + if len(parts) > 1: + tombstone["binary_path"] = parts[1].strip().rstrip(" <") - tombstone["pid"] = pid_value - tombstone["tid"] = tid_value - tombstone["process_name"] = process_name - tombstone["binary_path"] = binary_path - return True + return True - def _parse_process_name(self, process_name_part, tombstone: dict) -> bool: - process_name, process_path = process_name_part.split(">>>") - process_name = process_name.strip() - binary_path = process_path.strip().split(" ")[0] - return process_name, binary_path + except Exception as e: + raise ValueError(f"Failed to parse PID line: {str(e)}") def _load_signal_line(self, line: str, tombstone: dict) -> bool: - signal, code, _ = [part.strip() for part in line.split(",", 2)] - signal = signal.split("signal ")[1] - signal_code, signal_name = signal.split(" ") - signal_name = signal_name.strip("()") + signal_part, code_part = map(str.strip, line.split(",")[:2]) - code_part = code.split("code ")[1] - code_number, code_name = code_part.split(" ") - code_name = code_name.strip("()") + def parse_part(part: str, prefix: str) -> tuple[int, str]: + match = part.split(prefix)[1] + number = int(match.split()[0]) + name = match.split("(")[1].split(")")[0] if "(" in match else "UNKNOWN" + return number, name + + signal_number, signal_name = parse_part(signal_part, "signal ") + code_number, code_name = parse_part(code_part, "code ") tombstone["signal_info"] = { - "code": int(code_number), + "code": code_number, "code_name": code_name, "name": signal_name, - "number": int(signal_code), + "number": signal_number, } return True @@ -256,7 +255,6 @@ class TombstoneCrashArtifact(AndroidArtifact): @staticmethod def _parse_timestamp_string(timestamp: str) -> str: timestamp_parsed = parser.parse(timestamp) - # HACK: Swap the local timestamp to UTC, so keep the original time and avoid timezone conversion. local_timestamp = timestamp_parsed.replace(tzinfo=datetime.timezone.utc) return convert_datetime_to_iso(local_timestamp) diff --git a/src/mvt/android/cli.py b/src/mvt/android/cli.py index 8e9086f..ae225d9 100644 --- a/src/mvt/android/cli.py +++ b/src/mvt/android/cli.py @@ -31,6 +31,8 @@ from mvt.common.help import ( HELP_MSG_HASHES, HELP_MSG_CHECK_IOCS, HELP_MSG_STIX2, + HELP_MSG_DISABLE_UPDATE_CHECK, + HELP_MSG_DISABLE_INDICATOR_UPDATE_CHECK, ) from mvt.common.logo import logo from mvt.common.updates import IndicatorsUpdates @@ -53,12 +55,37 @@ log = logging.getLogger("mvt") CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) +def _get_disable_flags(ctx): + """Helper function to safely get disable flags from context.""" + if ctx.obj is None: + return False, False + return ( + ctx.obj.get("disable_version_check", False), + ctx.obj.get("disable_indicator_check", False), + ) + + # ============================================================================== # Main # ============================================================================== @click.group(invoke_without_command=False) -def cli(): - logo() +@click.option( + "--disable-update-check", is_flag=True, help=HELP_MSG_DISABLE_UPDATE_CHECK +) +@click.option( + "--disable-indicator-update-check", + is_flag=True, + help=HELP_MSG_DISABLE_INDICATOR_UPDATE_CHECK, +) +@click.pass_context +def cli(ctx, disable_update_check, disable_indicator_update_check): + ctx.ensure_object(dict) + ctx.obj["disable_version_check"] = disable_update_check + ctx.obj["disable_indicator_check"] = disable_indicator_update_check + logo( + disable_version_check=disable_update_check, + disable_indicator_check=disable_indicator_update_check, + ) # ============================================================================== @@ -166,6 +193,8 @@ def check_adb( module_name=module, serial=serial, module_options=module_options, + disable_version_check=_get_disable_flags(ctx)[0], + disable_indicator_check=_get_disable_flags(ctx)[1], ) if list_modules: @@ -212,6 +241,8 @@ def check_bugreport(ctx, iocs, output, list_modules, module, verbose, bugreport_ ioc_files=iocs, module_name=module, hashes=True, + disable_version_check=_get_disable_flags(ctx)[0], + disable_indicator_check=_get_disable_flags(ctx)[1], ) if list_modules: @@ -274,6 +305,8 @@ def check_backup( "interactive": not non_interactive, "backup_password": cli_load_android_backup_password(log, backup_password), }, + disable_version_check=_get_disable_flags(ctx)[0], + disable_indicator_check=_get_disable_flags(ctx)[1], ) if list_modules: @@ -338,6 +371,8 @@ def check_androidqf( "interactive": not non_interactive, "backup_password": cli_load_android_backup_password(log, backup_password), }, + disable_version_check=_get_disable_flags(ctx)[0], + disable_indicator_check=_get_disable_flags(ctx)[1], ) if list_modules: @@ -372,7 +407,13 @@ def check_androidqf( @click.argument("FOLDER", type=click.Path(exists=True)) @click.pass_context def check_iocs(ctx, iocs, list_modules, module, folder): - cmd = CmdCheckIOCS(target_path=folder, ioc_files=iocs, module_name=module) + cmd = CmdCheckIOCS( + target_path=folder, + ioc_files=iocs, + module_name=module, + disable_version_check=_get_disable_flags(ctx)[0], + disable_indicator_check=_get_disable_flags(ctx)[1], + ) cmd.modules = BACKUP_MODULES + ADB_MODULES + BUGREPORT_MODULES if list_modules: diff --git a/src/mvt/android/cmd_check_adb.py b/src/mvt/android/cmd_check_adb.py index c1444f4..3388792 100644 --- a/src/mvt/android/cmd_check_adb.py +++ b/src/mvt/android/cmd_check_adb.py @@ -26,6 +26,8 @@ class CmdAndroidCheckADB(Command): module_options: Optional[dict] = None, hashes: Optional[bool] = False, sub_command: Optional[bool] = False, + disable_version_check: bool = False, + disable_indicator_check: bool = False, ) -> None: super().__init__( target_path=target_path, @@ -38,6 +40,8 @@ class CmdAndroidCheckADB(Command): hashes=hashes, sub_command=sub_command, log=log, + disable_version_check=disable_version_check, + disable_indicator_check=disable_indicator_check, ) self.name = "check-adb" diff --git a/src/mvt/android/cmd_check_androidqf.py b/src/mvt/android/cmd_check_androidqf.py index 580b5e2..2a7ef20 100644 --- a/src/mvt/android/cmd_check_androidqf.py +++ b/src/mvt/android/cmd_check_androidqf.py @@ -45,6 +45,9 @@ class CmdAndroidCheckAndroidQF(Command): module_options: Optional[dict] = None, hashes: Optional[bool] = False, sub_command: Optional[bool] = False, + hashes: bool = False, + disable_version_check: bool = False, + disable_indicator_check: bool = False, ) -> None: super().__init__( target_path=target_path, @@ -57,6 +60,8 @@ class CmdAndroidCheckAndroidQF(Command): hashes=hashes, sub_command=sub_command, log=log, + disable_version_check=disable_version_check, + disable_indicator_check=disable_indicator_check, ) self.name = "check-androidqf" diff --git a/src/mvt/android/cmd_check_backup.py b/src/mvt/android/cmd_check_backup.py index e366d2b..8bdb05e 100644 --- a/src/mvt/android/cmd_check_backup.py +++ b/src/mvt/android/cmd_check_backup.py @@ -39,6 +39,8 @@ class CmdAndroidCheckBackup(Command): module_options: Optional[dict] = None, hashes: Optional[bool] = False, sub_command: Optional[bool] = False, + disable_version_check: bool = False, + disable_indicator_check: bool = False, ) -> None: super().__init__( target_path=target_path, @@ -51,6 +53,8 @@ class CmdAndroidCheckBackup(Command): hashes=hashes, sub_command=sub_command, log=log, + disable_version_check=disable_version_check, + disable_indicator_check=disable_indicator_check, ) self.name = "check-backup" diff --git a/src/mvt/android/cmd_check_bugreport.py b/src/mvt/android/cmd_check_bugreport.py index a3d9b3b..7cc827f 100644 --- a/src/mvt/android/cmd_check_bugreport.py +++ b/src/mvt/android/cmd_check_bugreport.py @@ -30,6 +30,8 @@ class CmdAndroidCheckBugreport(Command): module_options: Optional[dict] = None, hashes: Optional[bool] = False, sub_command: Optional[bool] = False, + disable_version_check: bool = False, + disable_indicator_check: bool = False, ) -> None: super().__init__( target_path=target_path, @@ -42,6 +44,8 @@ class CmdAndroidCheckBugreport(Command): hashes=hashes, sub_command=sub_command, log=log, + disable_version_check=disable_version_check, + disable_indicator_check=disable_indicator_check, ) self.name = "check-bugreport" diff --git a/src/mvt/android/modules/androidqf/__init__.py b/src/mvt/android/modules/androidqf/__init__.py index c6a3e1f..852f73f 100644 --- a/src/mvt/android/modules/androidqf/__init__.py +++ b/src/mvt/android/modules/androidqf/__init__.py @@ -9,6 +9,10 @@ from .aqf_processes import AQFProcesses from .aqf_settings import AQFSettings from .aqf_files import AQFFiles from .sms import SMS +from .files import Files +from .root_binaries import RootBinaries +from .mounts import Mounts + ANDROIDQF_MODULES = [ AQFPackages, @@ -17,4 +21,8 @@ ANDROIDQF_MODULES = [ AQFSettings, AQFFiles, SMS, + DumpsysPackages, + Files, + RootBinaries, + Mounts, ] diff --git a/src/mvt/android/modules/androidqf/mounts.py b/src/mvt/android/modules/androidqf/mounts.py new file mode 100644 index 0000000..1a5ba5c --- /dev/null +++ b/src/mvt/android/modules/androidqf/mounts.py @@ -0,0 +1,74 @@ +# Mobile Verification Toolkit (MVT) +# Copyright (c) 2021-2023 The MVT Authors. +# Use of this software is governed by the MVT License 1.1 that can be found at +# https://license.mvt.re/1.1/ + +import logging +import json +from typing import Optional + +from mvt.android.artifacts.mounts import Mounts as MountsArtifact + +from .base import AndroidQFModule + + +class Mounts(MountsArtifact, AndroidQFModule): + """This module extracts and analyzes mount information from AndroidQF acquisitions.""" + + def __init__( + self, + file_path: Optional[str] = None, + target_path: Optional[str] = None, + results_path: Optional[str] = None, + module_options: Optional[dict] = None, + log: logging.Logger = logging.getLogger(__name__), + results: Optional[list] = None, + ) -> None: + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + module_options=module_options, + log=log, + results=results, + ) + self.results = [] + + def run(self) -> None: + """ + Run the mounts analysis module. + + This module looks for mount information files collected by androidqf + and analyzes them for suspicious configurations, particularly focusing + on detecting root access indicators like /system mounted as read-write. + """ + mount_files = self._get_files_by_pattern("*/mounts.json") + + if not mount_files: + self.log.info("No mount information file found") + return + + self.log.info("Found mount information file: %s", mount_files[0]) + + try: + data = self._get_file_content(mount_files[0]).decode( + "utf-8", errors="replace" + ) + except Exception as exc: + self.log.error("Failed to read mount information file: %s", exc) + return + + # Parse the mount data + try: + json_data = json.loads(data) + + if isinstance(json_data, list): + # AndroidQF format: array of strings like + # "/dev/block/dm-12 on / type ext4 (ro,seclabel,noatime)" + mount_content = "\n".join(json_data) + self.parse(mount_content) + except Exception as exc: + self.log.error("Failed to parse mount information: %s", exc) + return + + self.log.info("Extracted a total of %d mount entries", len(self.results)) diff --git a/src/mvt/android/modules/androidqf/root_binaries.py b/src/mvt/android/modules/androidqf/root_binaries.py new file mode 100644 index 0000000..c5df729 --- /dev/null +++ b/src/mvt/android/modules/androidqf/root_binaries.py @@ -0,0 +1,121 @@ +# Mobile Verification Toolkit (MVT) +# Copyright (c) 2021-2023 The MVT Authors. +# Use of this software is governed by the MVT License 1.1 that can be found at +# https://license.mvt.re/1.1/ + +import json +import logging +from typing import Optional + +from .base import AndroidQFModule + + +class RootBinaries(AndroidQFModule): + """This module analyzes root_binaries.json for root binaries found by androidqf.""" + + def __init__( + self, + file_path: Optional[str] = None, + target_path: Optional[str] = None, + results_path: Optional[str] = None, + module_options: Optional[dict] = None, + log: logging.Logger = logging.getLogger(__name__), + results: Optional[list] = None, + ) -> None: + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + module_options=module_options, + log=log, + results=results, + ) + + def serialize(self, record: dict) -> dict: + return { + "timestamp": record.get("timestamp"), + "module": self.__class__.__name__, + "event": "root_binary_found", + "data": f"Root binary found: {record['path']} (binary: {record['binary_name']})", + } + + def check_indicators(self) -> None: + """Check for indicators of device rooting.""" + if not self.results: + return + + # All found root binaries are considered indicators of rooting + for result in self.results: + self.log.warning( + 'Found root binary "%s" at path "%s"', + result["binary_name"], + result["path"], + ) + self.detected.append(result) + + if self.detected: + self.log.warning( + "Device shows signs of rooting with %d root binaries found", + len(self.detected), + ) + + def run(self) -> None: + """Run the root binaries analysis.""" + root_binaries_files = self._get_files_by_pattern("*/root_binaries.json") + + if not root_binaries_files: + self.log.info("No root_binaries.json file found") + return + + rawdata = self._get_file_content(root_binaries_files[0]).decode( + "utf-8", errors="ignore" + ) + + try: + root_binary_paths = json.loads(rawdata) + except json.JSONDecodeError as e: + self.log.error("Failed to parse root_binaries.json: %s", e) + return + + if not isinstance(root_binary_paths, list): + self.log.error("Expected root_binaries.json to contain a list of paths") + return + + # Known root binary names that might be found and their descriptions + # This maps the binary name to a human-readable description + known_root_binaries = { + "su": "SuperUser binary", + "busybox": "BusyBox utilities", + "supersu": "SuperSU root management", + "Superuser.apk": "Superuser app", + "KingoUser.apk": "KingRoot app", + "SuperSu.apk": "SuperSU app", + "magisk": "Magisk root framework", + "magiskhide": "Magisk hide utility", + "magiskinit": "Magisk init binary", + "magiskpolicy": "Magisk policy binary", + } + + for path in root_binary_paths: + if not path or not isinstance(path, str): + continue + + # Extract binary name from path + binary_name = path.split("/")[-1].lower() + + # Check if this matches a known root binary by exact name match + description = "Unknown root binary" + for known_binary in known_root_binaries: + if binary_name == known_binary.lower(): + description = known_root_binaries[known_binary] + break + + result = { + "path": path.strip(), + "binary_name": binary_name, + "description": description, + } + + self.results.append(result) + + self.log.info("Found %d root binaries", len(self.results)) diff --git a/src/mvt/common/cmd_check_iocs.py b/src/mvt/common/cmd_check_iocs.py index 5e12c68..1111b77 100644 --- a/src/mvt/common/cmd_check_iocs.py +++ b/src/mvt/common/cmd_check_iocs.py @@ -24,6 +24,8 @@ class CmdCheckIOCS(Command): module_options: Optional[dict] = None, hashes: Optional[bool] = False, sub_command: Optional[bool] = False, + disable_version_check: bool = False, + disable_indicator_check: bool = False, ) -> None: super().__init__( target_path=target_path, @@ -35,6 +37,8 @@ class CmdCheckIOCS(Command): hashes=hashes, sub_command=sub_command, log=log, + disable_version_check=disable_version_check, + disable_indicator_check=disable_indicator_check, ) self.name = "check-iocs" diff --git a/src/mvt/common/command.py b/src/mvt/common/command.py index b6d7aaf..0eb633a 100644 --- a/src/mvt/common/command.py +++ b/src/mvt/common/command.py @@ -34,6 +34,8 @@ class Command: hashes: Optional[bool] = False, sub_command: Optional[bool] = False, log: logging.Logger = logging.getLogger(__name__), + disable_version_check: bool = False, + disable_indicator_check: bool = False, ) -> None: self.name = "" self.modules = [] @@ -45,6 +47,8 @@ class Command: self.serial = serial self.log = log self.sub_command = sub_command + self.disable_version_check = disable_version_check + self.disable_indicator_check = disable_indicator_check # This dictionary can contain options that will be passed down from # the Command to all modules. This can for example be used to pass diff --git a/src/mvt/common/help.py b/src/mvt/common/help.py index 0cca7ab..9695e57 100644 --- a/src/mvt/common/help.py +++ b/src/mvt/common/help.py @@ -15,6 +15,8 @@ HELP_MSG_HASHES = "Generate hashes of all the files analyzed" HELP_MSG_VERBOSE = "Verbose mode" HELP_MSG_CHECK_IOCS = "Compare stored JSON results to provided indicators" HELP_MSG_STIX2 = "Download public STIX2 indicators" +HELP_MSG_DISABLE_UPDATE_CHECK = "Disable MVT version update check" +HELP_MSG_DISABLE_INDICATOR_UPDATE_CHECK = "Disable indicators update check" # IOS Specific HELP_MSG_DECRYPT_BACKUP = "Decrypt an encrypted iTunes backup" diff --git a/src/mvt/common/logo.py b/src/mvt/common/logo.py index 04071c8..048ee22 100644 --- a/src/mvt/common/logo.py +++ b/src/mvt/common/logo.py @@ -12,74 +12,85 @@ from .updates import IndicatorsUpdates, MVTUpdates from .version import MVT_VERSION -def check_updates() -> None: +def check_updates( + disable_version_check: bool = False, disable_indicator_check: bool = False +) -> None: log = logging.getLogger("mvt") + # First we check for MVT version updates. - try: - mvt_updates = MVTUpdates() - latest_version = mvt_updates.check() - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - rich_print( - "\t\t[bold]Note: Could not check for MVT updates.[/bold] " - "You may be working offline. Please update MVT regularly." - ) - except Exception as e: - log.error("Error encountered when trying to check latest MVT version: %s", e) - else: - if latest_version: + if not disable_version_check: + try: + mvt_updates = MVTUpdates() + latest_version = mvt_updates.check() + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): rich_print( - f"\t\t[bold]Version {latest_version} is available! " - "Upgrade mvt with `pip3 install -U mvt` or with `pipx upgrade mvt`[/bold]" + "\t\t[bold]Note: Could not check for MVT updates.[/bold] " + "You may be working offline. Please update MVT regularly." ) - - # Then we check for indicators files updates. - ioc_updates = IndicatorsUpdates() - - # Before proceeding, we check if we have downloaded an indicators index. - # If not, there's no point in proceeding with the updates check. - if ioc_updates.get_latest_update() == 0: - rich_print( - "\t\t[bold]You have not yet downloaded any indicators, check " - "the `download-iocs` command![/bold]" - ) - return - - # We only perform this check at a fixed frequency, in order to not - # overburden the user with too many lookups if the command is being run - # multiple times. - should_check, hours = ioc_updates.should_check() - if not should_check: - rich_print( - f"\t\tIndicators updates checked recently, next automatic check " - f"in {int(hours)} hours" - ) - return - - try: - ioc_to_update = ioc_updates.check() - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - rich_print( - "\t\t[bold]Note: Could not check for indicator updates.[/bold] " - "You may be working offline. Please update MVT indicators regularly." - ) - except Exception as e: - log.error("Error encountered when trying to check latest MVT indicators: %s", e) - else: - if ioc_to_update: - rich_print( - "\t\t[bold]There are updates to your indicators files! " - "Run the `download-iocs` command to update![/bold]" + except Exception as e: + log.error( + "Error encountered when trying to check latest MVT version: %s", e ) else: - rich_print("\t\tYour indicators files seem to be up to date.") + if latest_version: + rich_print( + f"\t\t[bold]Version {latest_version} is available! " + "Upgrade mvt with `pip3 install -U mvt` or with `pipx upgrade mvt`[/bold]" + ) + + # Then we check for indicators files updates. + if not disable_indicator_check: + ioc_updates = IndicatorsUpdates() + + # Before proceeding, we check if we have downloaded an indicators index. + # If not, there's no point in proceeding with the updates check. + if ioc_updates.get_latest_update() == 0: + rich_print( + "\t\t[bold]You have not yet downloaded any indicators, check " + "the `download-iocs` command![/bold]" + ) + return + + # We only perform this check at a fixed frequency, in order to not + # overburden the user with too many lookups if the command is being run + # multiple times. + should_check, hours = ioc_updates.should_check() + if not should_check: + rich_print( + f"\t\tIndicators updates checked recently, next automatic check " + f"in {int(hours)} hours" + ) + return + + try: + ioc_to_update = ioc_updates.check() + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + rich_print( + "\t\t[bold]Note: Could not check for indicator updates.[/bold] " + "You may be working offline. Please update MVT indicators regularly." + ) + except Exception as e: + log.error( + "Error encountered when trying to check latest MVT indicators: %s", e + ) + else: + if ioc_to_update: + rich_print( + "\t\t[bold]There are updates to your indicators files! " + "Run the `download-iocs` command to update![/bold]" + ) + else: + rich_print("\t\tYour indicators files seem to be up to date.") -def logo() -> None: +def logo( + disable_version_check: bool = False, disable_indicator_check: bool = False +) -> None: rich_print("\n") rich_print("\t[bold]MVT[/bold] - Mobile Verification Toolkit") rich_print("\t\thttps://mvt.re") rich_print(f"\t\tVersion: {MVT_VERSION}") - check_updates() + check_updates(disable_version_check, disable_indicator_check) rich_print("\n") diff --git a/src/mvt/common/updates.py b/src/mvt/common/updates.py index e782b91..c9c380b 100644 --- a/src/mvt/common/updates.py +++ b/src/mvt/common/updates.py @@ -24,7 +24,11 @@ INDICATORS_CHECK_FREQUENCY = 12 class MVTUpdates: def check(self) -> str: - res = requests.get(settings.PYPI_UPDATE_URL, timeout=15) + try: + res = requests.get(settings.PYPI_UPDATE_URL, timeout=5) + except requests.exceptions.RequestException as e: + log.error("Failed to check for updates, skipping updates: %s", e) + return "" data = res.json() latest_version = data.get("info", {}).get("version", "") @@ -93,7 +97,12 @@ class IndicatorsUpdates: url = self.github_raw_url.format( self.index_owner, self.index_repo, self.index_branch, self.index_path ) - res = requests.get(url, timeout=15) + try: + res = requests.get(url, timeout=5) + except requests.exceptions.RequestException as e: + log.error("Failed to retrieve indicators index from %s: %s", url, e) + return None + if res.status_code != 200: log.error( "Failed to retrieve indicators index located at %s (error %d)", @@ -105,7 +114,12 @@ class IndicatorsUpdates: return yaml.safe_load(res.content) def download_remote_ioc(self, ioc_url: str) -> Optional[str]: - res = requests.get(ioc_url, timeout=15) + try: + res = requests.get(ioc_url, timeout=15) + except requests.exceptions.RequestException as e: + log.error("Failed to download indicators file from %s: %s", ioc_url, e) + return None + if res.status_code != 200: log.error( "Failed to download indicators file from %s (error %d)", @@ -171,7 +185,12 @@ class IndicatorsUpdates: file_commit_url = ( f"https://api.github.com/repos/{owner}/{repo}/commits?path={path}" ) - res = requests.get(file_commit_url, timeout=15) + try: + res = requests.get(file_commit_url, timeout=5) + except requests.exceptions.RequestException as e: + log.error("Failed to get details about file %s: %s", file_commit_url, e) + return -1 + if res.status_code != 200: log.error( "Failed to get details about file %s (error %d)", diff --git a/src/mvt/ios/cli.py b/src/mvt/ios/cli.py index 1d06c96..3cb3421 100644 --- a/src/mvt/ios/cli.py +++ b/src/mvt/ios/cli.py @@ -37,6 +37,8 @@ from mvt.common.help import ( HELP_MSG_CHECK_IOCS, HELP_MSG_STIX2, HELP_MSG_CHECK_IOS_BACKUP, + HELP_MSG_DISABLE_UPDATE_CHECK, + HELP_MSG_DISABLE_INDICATOR_UPDATE_CHECK, ) from .cmd_check_backup import CmdIOSCheckBackup from .cmd_check_fs import CmdIOSCheckFS @@ -53,12 +55,37 @@ MVT_IOS_BACKUP_PASSWORD = "MVT_IOS_BACKUP_PASSWORD" CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) +def _get_disable_flags(ctx): + """Helper function to safely get disable flags from context.""" + if ctx.obj is None: + return False, False + return ( + ctx.obj.get("disable_version_check", False), + ctx.obj.get("disable_indicator_check", False), + ) + + # ============================================================================== # Main # ============================================================================== @click.group(invoke_without_command=False) -def cli(): - logo() +@click.option( + "--disable-update-check", is_flag=True, help=HELP_MSG_DISABLE_UPDATE_CHECK +) +@click.option( + "--disable-indicator-update-check", + is_flag=True, + help=HELP_MSG_DISABLE_INDICATOR_UPDATE_CHECK, +) +@click.pass_context +def cli(ctx, disable_update_check, disable_indicator_update_check): + ctx.ensure_object(dict) + ctx.obj["disable_version_check"] = disable_update_check + ctx.obj["disable_indicator_check"] = disable_indicator_update_check + logo( + disable_version_check=disable_update_check, + disable_indicator_check=disable_indicator_update_check, + ) # ============================================================================== @@ -219,6 +246,8 @@ def check_backup( module_name=module, module_options=module_options, hashes=hashes, + disable_version_check=_get_disable_flags(ctx)[0], + disable_indicator_check=_get_disable_flags(ctx)[1], ) if list_modules: @@ -266,6 +295,8 @@ def check_fs(ctx, iocs, output, fast, list_modules, module, hashes, verbose, dum module_name=module, module_options=module_options, hashes=hashes, + disable_version_check=_get_disable_flags(ctx)[0], + disable_indicator_check=_get_disable_flags(ctx)[1], ) if list_modules: @@ -300,7 +331,13 @@ def check_fs(ctx, iocs, output, fast, list_modules, module, hashes, verbose, dum @click.argument("FOLDER", type=click.Path(exists=True)) @click.pass_context def check_iocs(ctx, iocs, list_modules, module, folder): - cmd = CmdCheckIOCS(target_path=folder, ioc_files=iocs, module_name=module) + cmd = CmdCheckIOCS( + target_path=folder, + ioc_files=iocs, + module_name=module, + disable_version_check=_get_disable_flags(ctx)[0], + disable_indicator_check=_get_disable_flags(ctx)[1], + ) cmd.modules = BACKUP_MODULES + FS_MODULES + MIXED_MODULES if list_modules: diff --git a/src/mvt/ios/cmd_check_backup.py b/src/mvt/ios/cmd_check_backup.py index dcdc013..9200964 100644 --- a/src/mvt/ios/cmd_check_backup.py +++ b/src/mvt/ios/cmd_check_backup.py @@ -27,6 +27,8 @@ class CmdIOSCheckBackup(Command): module_options: Optional[dict] = None, hashes: bool = False, sub_command: bool = False, + disable_version_check: bool = False, + disable_indicator_check: bool = False, ) -> None: super().__init__( target_path=target_path, @@ -39,6 +41,8 @@ class CmdIOSCheckBackup(Command): hashes=hashes, sub_command=sub_command, log=log, + disable_version_check=disable_version_check, + disable_indicator_check=disable_indicator_check, ) self.name = "check-backup" diff --git a/src/mvt/ios/cmd_check_fs.py b/src/mvt/ios/cmd_check_fs.py index 605bfd0..78325ba 100644 --- a/src/mvt/ios/cmd_check_fs.py +++ b/src/mvt/ios/cmd_check_fs.py @@ -27,6 +27,8 @@ class CmdIOSCheckFS(Command): module_options: Optional[dict] = None, hashes: bool = False, sub_command: bool = False, + disable_version_check: bool = False, + disable_indicator_check: bool = False, ) -> None: super().__init__( target_path=target_path, @@ -34,11 +36,12 @@ class CmdIOSCheckFS(Command): ioc_files=ioc_files, iocs=iocs, module_name=module_name, - serial=serial, module_options=module_options, hashes=hashes, sub_command=sub_command, log=log, + disable_version_check=disable_version_check, + disable_indicator_check=disable_indicator_check, ) self.name = "check-fs" diff --git a/src/mvt/ios/modules/mixed/webkit_session_resource_log.py b/src/mvt/ios/modules/mixed/webkit_session_resource_log.py index 0ae2545..5acbc81 100644 --- a/src/mvt/ios/modules/mixed/webkit_session_resource_log.py +++ b/src/mvt/ios/modules/mixed/webkit_session_resource_log.py @@ -127,6 +127,24 @@ class WebkitSessionResourceLog(IOSExtraction): browsing_stats = file_plist["browsingStatistics"] for item in browsing_stats: + most_recent_interaction, last_seen = None, None + if "mostRecentUserInteraction" in item: + try: + most_recent_interaction = convert_datetime_to_iso( + item["mostRecentUserInteraction"] + ) + except Exception: + self.log.error( + f'Error converting date of Safari resource"most recent interaction": {item["mostRecentUserInteraction"]}' + ) + if "lastSeen" in item: + try: + last_seen = convert_datetime_to_iso(item["lastSeen"]) + except Exception: + self.log.error( + f'Error converting date of Safari resource"last seen": {item["lastSeen"]}' + ) + items.append( { "origin": item.get("PrevalentResourceOrigin", ""), @@ -139,10 +157,8 @@ class WebkitSessionResourceLog(IOSExtraction): "subresourceUnderTopFrameOrigins", "" ), "user_interaction": item.get("hadUserInteraction"), - "most_recent_interaction": convert_datetime_to_iso( - item["mostRecentUserInteraction"] - ), - "last_seen": convert_datetime_to_iso(item["lastSeen"]), + "most_recent_interaction": most_recent_interaction, + "last_seen": last_seen, } ) diff --git a/tests/android_androidqf/test_mounts.py b/tests/android_androidqf/test_mounts.py new file mode 100644 index 0000000..89e5e17 --- /dev/null +++ b/tests/android_androidqf/test_mounts.py @@ -0,0 +1,97 @@ +# Mobile Verification Toolkit (MVT) +# Copyright (c) 2021-2023 The MVT Authors. +# Use of this software is governed by the MVT License 1.1 that can be found at +# https://license.mvt.re/1.1/ + +import logging +from pathlib import Path + +from mvt.common.module import run_module + +from ..utils import get_android_androidqf, list_files + + +class TestAndroidqfMountsArtifact: + def test_parse_mounts_token_checks(self): + """ + Test the artifact-level `parse` method using tolerant token checks. + + Different parser variants may place mount tokens into different dict + keys (for example `mount_options`, `pass_num`, `dump_freq`, etc.). To + avoid brittle assertions we concatenate each parsed entry's values and + look for expected tokens (device names, mount points, options) somewhere + in the combined representation. + """ + from mvt.android.artifacts.mounts import Mounts as MountsArtifact + + m = MountsArtifact() + + mount_lines = [ + "/dev/block/dm-12 on / type ext4 (ro,seclabel,noatime)", + "/dev/block/by-name/system on /system type ext4 (rw,seclabel,noatime)", + "/dev/block/by-name/data on /data type f2fs (rw,nosuid,nodev,noatime)", + ] + mount_content = "\n".join(mount_lines) + + # Parse the mount lines (artifact-level) + m.parse(mount_content) + + # Basic sanity: parser should return one entry per input line + assert len(m.results) == 3, f"Expected 3 parsed mounts, got: {m.results}" + + # Concatenate each entry's values into a single string so token checks + # are tolerant to which dict keys were used by the parser. + def concat_values(entry): + parts = [] + for v in entry.values(): + try: + parts.append(str(v)) + except Exception: + # Skip values that can't be stringified + continue + return " ".join(parts) + + concatenated = [concat_values(e) for e in m.results] + + # Token expectations (tolerant): + # - Root line should include 'dm-12' and 'noatime' (and typically 'ro') + assert any("dm-12" in s and "noatime" in s for s in concatenated), ( + f"No root-like tokens (dm-12 + noatime) found in parsed results: {concatenated}" + ) + + # - System line should include '/system' or 'by-name/system' and 'rw' + assert any( + (("by-name/system" in s or "/system" in s) and "rw" in s) + for s in concatenated + ), ( + f"No system-like tokens (system + rw) found in parsed results: {concatenated}" + ) + + # - Data line should include '/data' or 'by-name/data' and 'rw' + assert any( + (("by-name/data" in s or "/data" in s) and "rw" in s) for s in concatenated + ), f"No data-like tokens (data + rw) found in parsed results: {concatenated}" + + +class TestAndroidqfMountsModule: + def test_androidqf_module_no_mounts_file(self): + """ + When no `mounts.json` is present in the androidqf dataset, the module + should not produce results nor detections. + """ + from mvt.android.modules.androidqf.mounts import Mounts + + data_path = get_android_androidqf() + m = Mounts(target_path=data_path, log=logging) + files = list_files(data_path) + parent_path = Path(data_path).absolute().parent.as_posix() + m.from_folder(parent_path, files) + + run_module(m) + + # The provided androidqf test dataset does not include mounts.json, so + # results should remain empty. + assert len(m.results) == 0, ( + f"Expected no results when mounts.json is absent, got: {m.results}" + ) + assert len(m.detected) == 0, f"Expected no detections, got: {m.detected}" diff --git a/tests/android_androidqf/test_root_binaries.py b/tests/android_androidqf/test_root_binaries.py new file mode 100644 index 0000000..a59ecf5 --- /dev/null +++ b/tests/android_androidqf/test_root_binaries.py @@ -0,0 +1,116 @@ +# Mobile Verification Toolkit (MVT) +# Copyright (c) 2021-2023 The MVT Authors. +# Use of this software is governed by the MVT License 1.1 that can be found at +# https://license.mvt.re/1.1/ + +import logging +from pathlib import Path + +import pytest + +from mvt.android.modules.androidqf.root_binaries import RootBinaries +from mvt.common.module import run_module + +from ..utils import get_android_androidqf, list_files + + +@pytest.fixture() +def data_path(): + return get_android_androidqf() + + +@pytest.fixture() +def parent_data_path(data_path): + return Path(data_path).absolute().parent.as_posix() + + +@pytest.fixture() +def file_list(data_path): + return list_files(data_path) + + +@pytest.fixture() +def module(parent_data_path, file_list): + m = RootBinaries(target_path=parent_data_path, log=logging) + m.from_folder(parent_data_path, file_list) + return m + + +class TestAndroidqfRootBinaries: + def test_root_binaries_detection(self, module): + run_module(module) + + # Should find 4 root binaries from the test file + assert len(module.results) == 4 + assert len(module.detected) == 4 + + # Check that all results are detected as indicators + binary_paths = [result["path"] for result in module.results] + assert "/system/bin/su" in binary_paths + assert "/system/xbin/busybox" in binary_paths + assert "/data/local/tmp/magisk" in binary_paths + assert "/system/bin/magiskhide" in binary_paths + + def test_root_binaries_descriptions(self, module): + run_module(module) + + # Check that binary descriptions are correctly identified + su_result = next((r for r in module.results if "su" in r["binary_name"]), None) + assert su_result is not None + assert "SuperUser binary" in su_result["description"] + + busybox_result = next( + (r for r in module.results if "busybox" in r["binary_name"]), None + ) + assert busybox_result is not None + assert "BusyBox utilities" in busybox_result["description"] + + magisk_result = next( + (r for r in module.results if r["binary_name"] == "magisk"), None + ) + assert magisk_result is not None + assert "Magisk root framework" in magisk_result["description"] + + magiskhide_result = next( + (r for r in module.results if "magiskhide" in r["binary_name"]), None + ) + assert magiskhide_result is not None + assert "Magisk hide utility" in magiskhide_result["description"] + + def test_root_binaries_warnings(self, caplog, module): + run_module(module) + + # Check that warnings are logged for each root binary found + assert 'Found root binary "su" at path "/system/bin/su"' in caplog.text + assert ( + 'Found root binary "busybox" at path "/system/xbin/busybox"' in caplog.text + ) + assert ( + 'Found root binary "magisk" at path "/data/local/tmp/magisk"' in caplog.text + ) + assert ( + 'Found root binary "magiskhide" at path "/system/bin/magiskhide"' + in caplog.text + ) + assert "Device shows signs of rooting with 4 root binaries found" in caplog.text + + def test_serialize_method(self, module): + run_module(module) + + # Test that serialize method works correctly + if module.results: + serialized = module.serialize(module.results[0]) + assert serialized["module"] == "RootBinaries" + assert serialized["event"] == "root_binary_found" + assert "Root binary found:" in serialized["data"] + + def test_no_root_binaries_file(self, parent_data_path): + # Test behavior when no root_binaries.json file is present + empty_file_list = [] + m = RootBinaries(target_path=parent_data_path, log=logging) + m.from_folder(parent_data_path, empty_file_list) + + run_module(m) + + assert len(m.results) == 0 + assert len(m.detected) == 0 diff --git a/tests/artifacts/androidqf/root_binaries.json b/tests/artifacts/androidqf/root_binaries.json new file mode 100644 index 0000000..37a3ccc --- /dev/null +++ b/tests/artifacts/androidqf/root_binaries.json @@ -0,0 +1,6 @@ +[ + "/system/bin/su", + "/system/xbin/busybox", + "/data/local/tmp/magisk", + "/system/bin/magiskhide" +] diff --git a/tests/common/test_utils.py b/tests/common/test_utils.py index d1058e5..4dbe5c0 100644 --- a/tests/common/test_utils.py +++ b/tests/common/test_utils.py @@ -62,7 +62,7 @@ class TestHashes: def test_hash_from_folder(self): path = os.path.join(get_artifact_folder(), "androidqf") hashes = list(generate_hashes_from_path(path, logging)) - assert len(hashes) == 7 + assert len(hashes) == 8 # Sort the files to have reliable order for tests. hashes = sorted(hashes, key=lambda x: x["file_path"]) assert hashes[0]["file_path"] == os.path.join(path, "backup.ab")