diff --git a/.github/workflows/scripts/update-ios-releases.py b/.github/workflows/scripts/update-ios-releases.py index 9d71fcd..3e6f217 100644 --- a/.github/workflows/scripts/update-ios-releases.py +++ b/.github/workflows/scripts/update-ios-releases.py @@ -12,7 +12,7 @@ from packaging import version def download_apple_rss(feed_url): with urllib.request.urlopen(feed_url) as f: - rss_feed = f.read().decode('utf-8') + rss_feed = f.read().decode("utf-8") print("Downloaded RSS feed from Apple.") return rss_feed @@ -27,7 +27,10 @@ def parse_latest_ios_versions(rss_feed_text): continue import re - build_match = re.match(r"iOS (?P[\d\.]+) (?Pbeta )?(\S*)?\((?P.*)\)", title) + + build_match = re.match( + r"iOS (?P[\d\.]+) (?Pbeta )?(\S*)?\((?P.*)\)", title + ) if not build_match: print("Could not parse iOS build:", title) continue @@ -62,16 +65,22 @@ def update_mvt(mvt_checkout_path, latest_ios_versions): print("No new iOS versions found.") else: print("Found {} new iOS versions.".format(new_entry_count)) - new_version_list = sorted(current_versions, key=lambda x: version.Version(x["version"])) + new_version_list = sorted( + current_versions, key=lambda x: version.Version(x["version"]) + ) with open(version_path, "w") as version_file: json.dump(new_version_list, version_file, indent=4) def main(): print("Downloading RSS feed...") - mvt_checkout_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + mvt_checkout_path = os.path.abspath( + os.path.join(os.path.dirname(__file__), "../../../") + ) - rss_feed = download_apple_rss("https://developer.apple.com/news/releases/rss/releases.rss") + rss_feed = download_apple_rss( + "https://developer.apple.com/news/releases/rss/releases.rss" + ) latest_ios_version = parse_latest_ios_versions(rss_feed) update_mvt(mvt_checkout_path, latest_ios_version) diff --git a/mvt/android/cli.py b/mvt/android/cli.py index ad2a41f..2368a80 100644 --- a/mvt/android/cli.py +++ b/mvt/android/cli.py @@ -8,10 +8,16 @@ import logging import click from mvt.common.cmd_check_iocs import CmdCheckIOCS -from mvt.common.help import (HELP_MSG_FAST, HELP_MSG_HASHES, HELP_MSG_IOC, - HELP_MSG_LIST_MODULES, HELP_MSG_MODULE, - HELP_MSG_OUTPUT, HELP_MSG_SERIAL, - HELP_MSG_VERBOSE) +from mvt.common.help import ( + HELP_MSG_FAST, + HELP_MSG_HASHES, + HELP_MSG_IOC, + HELP_MSG_LIST_MODULES, + HELP_MSG_MODULE, + HELP_MSG_OUTPUT, + HELP_MSG_SERIAL, + HELP_MSG_VERBOSE, +) from mvt.common.logo import logo from mvt.common.updates import IndicatorsUpdates from mvt.common.utils import init_logging, set_verbose_logging @@ -28,39 +34,54 @@ from .modules.bugreport import BUGREPORT_MODULES init_logging() log = logging.getLogger("mvt") -CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) +CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) -#============================================================================== +# ============================================================================== # Main -#============================================================================== +# ============================================================================== @click.group(invoke_without_command=False) def cli(): logo() -#============================================================================== +# ============================================================================== # Command: version -#============================================================================== +# ============================================================================== @cli.command("version", help="Show the currently installed version of MVT") def version(): return -#============================================================================== +# ============================================================================== # Command: download-apks -#============================================================================== -@cli.command("download-apks", help="Download all or only non-system installed APKs", - context_settings=CONTEXT_SETTINGS) +# ============================================================================== +@cli.command( + "download-apks", + help="Download all or only non-system installed APKs", + context_settings=CONTEXT_SETTINGS, +) @click.option("--serial", "-s", type=str, help=HELP_MSG_SERIAL) -@click.option("--all-apks", "-a", is_flag=True, - help="Extract all packages installed on the phone, including system packages") +@click.option( + "--all-apks", + "-a", + is_flag=True, + help="Extract all packages installed on the phone, including system packages", +) @click.option("--virustotal", "-v", is_flag=True, help="Check packages on VirusTotal") -@click.option("--output", "-o", type=click.Path(exists=False), - help="Specify a path to a folder where you want to store the APKs") -@click.option("--from-file", "-f", type=click.Path(exists=True), - help="Instead of acquiring from phone, load an existing packages.json file for " - "lookups (mainly for debug purposes)") +@click.option( + "--output", + "-o", + type=click.Path(exists=False), + help="Specify a path to a folder where you want to store the APKs", +) +@click.option( + "--from-file", + "-f", + type=click.Path(exists=True), + help="Instead of acquiring from phone, load an existing packages.json file for " + "lookups (mainly for debug purposes)", +) @click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE) @click.pass_context def download_apks(ctx, all_apks, virustotal, output, from_file, serial, verbose): @@ -99,16 +120,24 @@ def download_apks(ctx, all_apks, virustotal, output, from_file, serial, verbose) ctx.exit(1) -#============================================================================== +# ============================================================================== # Command: check-adb -#============================================================================== -@cli.command("check-adb", help="Check an Android device over adb", - context_settings=CONTEXT_SETTINGS) +# ============================================================================== +@cli.command( + "check-adb", + help="Check an Android device over adb", + context_settings=CONTEXT_SETTINGS, +) @click.option("--serial", "-s", type=str, help=HELP_MSG_SERIAL) -@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True, - default=[], help=HELP_MSG_IOC) -@click.option("--output", "-o", type=click.Path(exists=False), - help=HELP_MSG_OUTPUT) +@click.option( + "--iocs", + "-i", + type=click.Path(exists=True), + multiple=True, + default=[], + help=HELP_MSG_IOC, +) +@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT) @click.option("--fast", "-f", is_flag=True, help=HELP_MSG_FAST) @click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES) @click.option("--module", "-m", help=HELP_MSG_MODULE) @@ -116,8 +145,13 @@ def download_apks(ctx, all_apks, virustotal, output, from_file, serial, verbose) @click.pass_context def check_adb(ctx, serial, iocs, output, fast, list_modules, module, verbose): set_verbose_logging(verbose) - cmd = CmdAndroidCheckADB(results_path=output, ioc_files=iocs, - module_name=module, serial=serial, fast_mode=fast) + cmd = CmdAndroidCheckADB( + results_path=output, + ioc_files=iocs, + module_name=module, + serial=serial, + fast_mode=fast, + ) if list_modules: cmd.list_modules() @@ -128,19 +162,29 @@ def check_adb(ctx, serial, iocs, output, fast, list_modules, module, verbose): cmd.run() if cmd.detected_count > 0: - log.warning("The analysis of the Android device produced %d detections!", - cmd.detected_count) + log.warning( + "The analysis of the Android device produced %d detections!", + cmd.detected_count, + ) -#============================================================================== +# ============================================================================== # Command: check-bugreport -#============================================================================== -@cli.command("check-bugreport", help="Check an Android Bug Report", - context_settings=CONTEXT_SETTINGS) -@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True, - default=[], help=HELP_MSG_IOC) -@click.option("--output", "-o", type=click.Path(exists=False), - help=HELP_MSG_OUTPUT) +# ============================================================================== +@cli.command( + "check-bugreport", + help="Check an Android Bug Report", + context_settings=CONTEXT_SETTINGS, +) +@click.option( + "--iocs", + "-i", + type=click.Path(exists=True), + multiple=True, + default=[], + help=HELP_MSG_IOC, +) +@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT) @click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES) @click.option("--module", "-m", help=HELP_MSG_MODULE) @click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE) @@ -148,10 +192,14 @@ def check_adb(ctx, serial, iocs, output, fast, list_modules, module, verbose): @click.pass_context def check_bugreport(ctx, iocs, output, list_modules, module, verbose, bugreport_path): set_verbose_logging(verbose) - # Always generate hashes as bug reports are small. - cmd = CmdAndroidCheckBugreport(target_path=bugreport_path, - results_path=output, ioc_files=iocs, - module_name=module, hashes=True) + # Always generate hashes as bug reports are small. + cmd = CmdAndroidCheckBugreport( + target_path=bugreport_path, + results_path=output, + ioc_files=iocs, + module_name=module, + hashes=True, + ) if list_modules: cmd.list_modules() @@ -162,19 +210,27 @@ def check_bugreport(ctx, iocs, output, list_modules, module, verbose, bugreport_ cmd.run() if cmd.detected_count > 0: - log.warning("The analysis of the Android bug report produced %d detections!", - cmd.detected_count) + log.warning( + "The analysis of the Android bug report produced %d detections!", + cmd.detected_count, + ) -#============================================================================== +# ============================================================================== # Command: check-backup -#============================================================================== -@cli.command("check-backup", help="Check an Android Backup", - context_settings=CONTEXT_SETTINGS) -@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True, - default=[], help=HELP_MSG_IOC) -@click.option("--output", "-o", type=click.Path(exists=False), - help=HELP_MSG_OUTPUT) +# ============================================================================== +@cli.command( + "check-backup", help="Check an Android Backup", context_settings=CONTEXT_SETTINGS +) +@click.option( + "--iocs", + "-i", + type=click.Path(exists=True), + multiple=True, + default=[], + help=HELP_MSG_IOC, +) +@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT) @click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES) @click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE) @click.argument("BACKUP_PATH", type=click.Path(exists=True)) @@ -182,8 +238,9 @@ def check_bugreport(ctx, iocs, output, list_modules, module, verbose, bugreport_ def check_backup(ctx, iocs, output, list_modules, verbose, backup_path): set_verbose_logging(verbose) # Always generate hashes as backups are generally small. - cmd = CmdAndroidCheckBackup(target_path=backup_path, results_path=output, - ioc_files=iocs, hashes=True) + cmd = CmdAndroidCheckBackup( + target_path=backup_path, results_path=output, ioc_files=iocs, hashes=True + ) if list_modules: cmd.list_modules() @@ -194,30 +251,46 @@ def check_backup(ctx, iocs, output, list_modules, verbose, backup_path): cmd.run() if cmd.detected_count > 0: - log.warning("The analysis of the Android backup produced %d detections!", - cmd.detected_count) + log.warning( + "The analysis of the Android backup produced %d detections!", + cmd.detected_count, + ) -#============================================================================== +# ============================================================================== # Command: check-androidqf -#============================================================================== -@cli.command("check-androidqf", help="Check data collected with AndroidQF", - context_settings=CONTEXT_SETTINGS) -@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True, - default=[], help=HELP_MSG_IOC) -@click.option("--output", "-o", type=click.Path(exists=False), - help=HELP_MSG_OUTPUT) +# ============================================================================== +@cli.command( + "check-androidqf", + help="Check data collected with AndroidQF", + context_settings=CONTEXT_SETTINGS, +) +@click.option( + "--iocs", + "-i", + type=click.Path(exists=True), + multiple=True, + default=[], + help=HELP_MSG_IOC, +) +@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT) @click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES) @click.option("--module", "-m", help=HELP_MSG_MODULE) @click.option("--hashes", "-H", is_flag=True, help=HELP_MSG_HASHES) @click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE) @click.argument("ANDROIDQF_PATH", type=click.Path(exists=True)) @click.pass_context -def check_androidqf(ctx, iocs, output, list_modules, module, hashes, verbose, androidqf_path): +def check_androidqf( + ctx, iocs, output, list_modules, module, hashes, verbose, androidqf_path +): set_verbose_logging(verbose) - cmd = CmdAndroidCheckAndroidQF(target_path=androidqf_path, - results_path=output, ioc_files=iocs, - module_name=module, hashes=hashes) + cmd = CmdAndroidCheckAndroidQF( + target_path=androidqf_path, + results_path=output, + ioc_files=iocs, + module_name=module, + hashes=hashes, + ) if list_modules: cmd.list_modules() @@ -228,17 +301,28 @@ def check_androidqf(ctx, iocs, output, list_modules, module, hashes, verbose, an cmd.run() if cmd.detected_count > 0: - log.warning("The analysis of the AndroidQF acquisition produced %d detections!", - cmd.detected_count) + log.warning( + "The analysis of the AndroidQF acquisition produced %d detections!", + cmd.detected_count, + ) -#============================================================================== +# ============================================================================== # Command: check-iocs -#============================================================================== -@cli.command("check-iocs", help="Compare stored JSON results to provided indicators", - context_settings=CONTEXT_SETTINGS) -@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True, - default=[], help=HELP_MSG_IOC) +# ============================================================================== +@cli.command( + "check-iocs", + help="Compare stored JSON results to provided indicators", + context_settings=CONTEXT_SETTINGS, +) +@click.option( + "--iocs", + "-i", + type=click.Path(exists=True), + multiple=True, + default=[], + help=HELP_MSG_IOC, +) @click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES) @click.option("--module", "-m", help=HELP_MSG_MODULE) @click.argument("FOLDER", type=click.Path(exists=True)) @@ -254,11 +338,14 @@ def check_iocs(ctx, iocs, list_modules, module, folder): cmd.run() -#============================================================================== +# ============================================================================== # Command: download-iocs -#============================================================================== -@cli.command("download-iocs", help="Download public STIX2 indicators", - context_settings=CONTEXT_SETTINGS) +# ============================================================================== +@cli.command( + "download-iocs", + help="Download public STIX2 indicators", + context_settings=CONTEXT_SETTINGS, +) def download_indicators(): ioc_updates = IndicatorsUpdates() ioc_updates.update() diff --git a/mvt/android/cmd_check_adb.py b/mvt/android/cmd_check_adb.py index e873ba0..1740718 100644 --- a/mvt/android/cmd_check_adb.py +++ b/mvt/android/cmd_check_adb.py @@ -14,7 +14,6 @@ log = logging.getLogger(__name__) class CmdAndroidCheckADB(Command): - def __init__( self, target_path: Optional[str] = None, @@ -22,11 +21,17 @@ class CmdAndroidCheckADB(Command): ioc_files: Optional[list] = None, module_name: Optional[str] = None, serial: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, ) -> None: - super().__init__(target_path=target_path, results_path=results_path, - ioc_files=ioc_files, module_name=module_name, - serial=serial, fast_mode=fast_mode, log=log) + super().__init__( + target_path=target_path, + results_path=results_path, + ioc_files=ioc_files, + module_name=module_name, + serial=serial, + fast_mode=fast_mode, + log=log, + ) self.name = "check-adb" self.modules = ADB_MODULES diff --git a/mvt/android/cmd_check_androidqf.py b/mvt/android/cmd_check_androidqf.py index d0d81dd..f0fd778 100644 --- a/mvt/android/cmd_check_androidqf.py +++ b/mvt/android/cmd_check_androidqf.py @@ -14,7 +14,6 @@ log = logging.getLogger(__name__) class CmdAndroidCheckAndroidQF(Command): - def __init__( self, target_path: Optional[str] = None, @@ -22,13 +21,19 @@ class CmdAndroidCheckAndroidQF(Command): ioc_files: Optional[list] = None, module_name: Optional[str] = None, serial: Optional[str] = None, - fast_mode: Optional[bool] = False, - hashes: Optional[bool] = False, + fast_mode: bool = False, + hashes: bool = False, ) -> None: - super().__init__(target_path=target_path, results_path=results_path, - ioc_files=ioc_files, module_name=module_name, - serial=serial, fast_mode=fast_mode, hashes=hashes, - log=log) + super().__init__( + target_path=target_path, + results_path=results_path, + ioc_files=ioc_files, + module_name=module_name, + serial=serial, + fast_mode=fast_mode, + hashes=hashes, + log=log, + ) self.name = "check-androidqf" self.modules = ANDROIDQF_MODULES diff --git a/mvt/android/cmd_check_backup.py b/mvt/android/cmd_check_backup.py index a7ab4d4..d310b47 100644 --- a/mvt/android/cmd_check_backup.py +++ b/mvt/android/cmd_check_backup.py @@ -14,9 +14,12 @@ from typing import List, Optional from rich.prompt import Prompt from mvt.android.modules.backup.base import BackupExtraction -from mvt.android.parsers.backup import (AndroidBackupParsingError, - InvalidBackupPassword, parse_ab_header, - parse_backup_file) +from mvt.android.parsers.backup import ( + AndroidBackupParsingError, + InvalidBackupPassword, + parse_ab_header, + parse_backup_file, +) from mvt.common.command import Command from .modules.backup import BACKUP_MODULES @@ -25,7 +28,6 @@ log = logging.getLogger(__name__) class CmdAndroidCheckBackup(Command): - def __init__( self, target_path: Optional[str] = None, @@ -33,13 +35,19 @@ class CmdAndroidCheckBackup(Command): ioc_files: Optional[list] = None, module_name: Optional[str] = None, serial: Optional[str] = None, - fast_mode: Optional[bool] = False, - hashes: Optional[bool] = False, + fast_mode: bool = False, + hashes: bool = False, ) -> None: - super().__init__(target_path=target_path, results_path=results_path, - ioc_files=ioc_files, module_name=module_name, - serial=serial, fast_mode=fast_mode, hashes=hashes, - log=log) + super().__init__( + target_path=target_path, + results_path=results_path, + ioc_files=ioc_files, + module_name=module_name, + serial=serial, + fast_mode=fast_mode, + hashes=hashes, + log=log, + ) self.name = "check-backup" self.modules = BACKUP_MODULES @@ -85,16 +93,18 @@ class CmdAndroidCheckBackup(Command): self.target_path = Path(self.target_path).absolute().as_posix() for root, subdirs, subfiles in os.walk(os.path.abspath(self.target_path)): for fname in subfiles: - self.backup_files.append(os.path.relpath(os.path.join(root, fname), - self.target_path)) + self.backup_files.append( + os.path.relpath(os.path.join(root, fname), self.target_path) + ) else: - log.critical("Invalid backup path, path should be a folder or an " - "Android Backup (.ab) file") + log.critical( + "Invalid backup path, path should be a folder or an " + "Android Backup (.ab) file" + ) sys.exit(1) def module_init(self, module: BackupExtraction) -> None: # type: ignore[override] if self.backup_type == "folder": module.from_folder(self.target_path, self.backup_files) else: - module.from_ab(self.target_path, self.backup_archive, - self.backup_files) + module.from_ab(self.target_path, self.backup_archive, self.backup_files) diff --git a/mvt/android/cmd_check_bugreport.py b/mvt/android/cmd_check_bugreport.py index 5126136..123390b 100644 --- a/mvt/android/cmd_check_bugreport.py +++ b/mvt/android/cmd_check_bugreport.py @@ -18,7 +18,6 @@ log = logging.getLogger(__name__) class CmdAndroidCheckBugreport(Command): - def __init__( self, target_path: Optional[str] = None, @@ -26,13 +25,19 @@ class CmdAndroidCheckBugreport(Command): ioc_files: Optional[list] = None, module_name: Optional[str] = None, serial: Optional[str] = None, - fast_mode: Optional[bool] = False, - hashes: Optional[bool] = False, + fast_mode: bool = False, + hashes: bool = False, ) -> None: - super().__init__(target_path=target_path, results_path=results_path, - ioc_files=ioc_files, module_name=module_name, - serial=serial, fast_mode=fast_mode, hashes=hashes, - log=log) + super().__init__( + target_path=target_path, + results_path=results_path, + ioc_files=ioc_files, + module_name=module_name, + serial=serial, + fast_mode=fast_mode, + hashes=hashes, + log=log, + ) self.name = "check-bugreport" self.modules = BUGREPORT_MODULES @@ -55,8 +60,9 @@ class CmdAndroidCheckBugreport(Command): parent_path = Path(self.target_path).absolute().as_posix() for root, _, subfiles in os.walk(os.path.abspath(self.target_path)): for file_name in subfiles: - file_path = os.path.relpath(os.path.join(root, file_name), - parent_path) + file_path = os.path.relpath( + os.path.join(root, file_name), parent_path + ) self.bugreport_files.append(file_path) def module_init(self, module: BugReportModule) -> None: # type: ignore[override] diff --git a/mvt/android/cmd_download_apks.py b/mvt/android/cmd_download_apks.py index 3fcc179..66a069c 100644 --- a/mvt/android/cmd_download_apks.py +++ b/mvt/android/cmd_download_apks.py @@ -26,7 +26,7 @@ class DownloadAPKs(AndroidExtraction): def __init__( self, results_path: Optional[str] = None, - all_apks: Optional[bool] = False, + all_apks: bool = False, packages: Optional[list] = None, ) -> None: """Initialize module. @@ -66,27 +66,31 @@ class DownloadAPKs(AndroidExtraction): if "==/" in remote_path: file_name = "_" + remote_path.split("==/")[1].replace(".apk", "") - local_path = os.path.join(self.results_path_apks, - f"{package_name}{file_name}.apk") + local_path = os.path.join( + self.results_path_apks, f"{package_name}{file_name}.apk" + ) name_counter = 0 while True: if not os.path.exists(local_path): break name_counter += 1 - local_path = os.path.join(self.results_path_apks, - f"{package_name}{file_name}_{name_counter}.apk") + local_path = os.path.join( + self.results_path_apks, f"{package_name}{file_name}_{name_counter}.apk" + ) try: self._adb_download(remote_path, local_path) except InsufficientPrivileges: - log.error("Unable to pull package file from %s: insufficient privileges, " - "it might be a system app", remote_path) + log.error( + "Unable to pull package file from %s: insufficient privileges, " + "it might be a system app", + remote_path, + ) self._adb_reconnect() return None except Exception as exc: - log.exception("Failed to pull package file from %s: %s", - remote_path, exc) + log.exception("Failed to pull package file from %s: %s", remote_path, exc) self._adb_reconnect() return None @@ -106,10 +110,10 @@ class DownloadAPKs(AndroidExtraction): self.packages = m.results def pull_packages(self) -> None: - """Download all files of all selected packages from the device. - """ - log.info("Starting extraction of installed APKs at folder %s", - self.results_path) + """Download all files of all selected packages from the device.""" + log.info( + "Starting extraction of installed APKs at folder %s", self.results_path + ) # If the user provided the flag --all-apks we select all packages. packages_selection = [] @@ -123,8 +127,10 @@ class DownloadAPKs(AndroidExtraction): if not package.get("system", False): packages_selection.append(package) - log.info("Selected only %d packages which are not marked as \"system\"", - len(packages_selection)) + log.info( + 'Selected only %d packages which are not marked as "system"', + len(packages_selection), + ) if len(packages_selection) == 0: log.info("No packages were selected for download") @@ -136,19 +142,26 @@ class DownloadAPKs(AndroidExtraction): if not os.path.exists(self.results_path_apks): os.makedirs(self.results_path_apks, exist_ok=True) - for i in track(range(len(packages_selection)), - description=f"Downloading {len(packages_selection)} packages..."): + for i in track( + range(len(packages_selection)), + description=f"Downloading {len(packages_selection)} packages...", + ): package = packages_selection[i] - log.info("[%d/%d] Package: %s", i, len(packages_selection), - package["package_name"]) + log.info( + "[%d/%d] Package: %s", + i, + len(packages_selection), + package["package_name"], + ) # Sometimes the package path contains multiple lines for multiple # apks. We loop through each line and download each file. for package_file in package["files"]: device_path = package_file["path"] - local_path = self.pull_package_file(package["package_name"], - device_path) + local_path = self.pull_package_file( + package["package_name"], device_path + ) if not local_path: continue diff --git a/mvt/android/modules/adb/__init__.py b/mvt/android/modules/adb/__init__.py index c9e65e0..6e0c263 100644 --- a/mvt/android/modules/adb/__init__.py +++ b/mvt/android/modules/adb/__init__.py @@ -23,8 +23,24 @@ from .settings import Settings from .sms import SMS from .whatsapp import Whatsapp -ADB_MODULES = [ChromeHistory, SMS, Whatsapp, Processes, Getprop, Settings, - SELinuxStatus, DumpsysBatteryHistory, DumpsysBatteryDaily, - DumpsysReceivers, DumpsysActivities, DumpsysAccessibility, - DumpsysDBInfo, DumpsysFull, DumpsysAppOps, Packages, Logcat, - RootBinaries, Files] +ADB_MODULES = [ + ChromeHistory, + SMS, + Whatsapp, + Processes, + Getprop, + Settings, + SELinuxStatus, + DumpsysBatteryHistory, + DumpsysBatteryDaily, + DumpsysReceivers, + DumpsysActivities, + DumpsysAccessibility, + DumpsysDBInfo, + DumpsysFull, + DumpsysAppOps, + Packages, + Logcat, + RootBinaries, + Files, +] diff --git a/mvt/android/modules/adb/base.py b/mvt/android/modules/adb/base.py index 017a9b6..726b3c6 100644 --- a/mvt/android/modules/adb/base.py +++ b/mvt/android/modules/adb/base.py @@ -16,13 +16,20 @@ from typing import Callable, Optional from adb_shell.adb_device import AdbDeviceTcp, AdbDeviceUsb from adb_shell.auth.keygen import keygen, write_public_keyfile from adb_shell.auth.sign_pythonrsa import PythonRSASigner -from adb_shell.exceptions import (AdbCommandFailureException, DeviceAuthError, - UsbDeviceNotFoundError, UsbReadFailedError) +from adb_shell.exceptions import ( + AdbCommandFailureException, + DeviceAuthError, + UsbDeviceNotFoundError, + UsbReadFailedError, +) from rich.prompt import Prompt from usb1 import USBErrorAccess, USBErrorBusy -from mvt.android.parsers.backup import (InvalidBackupPassword, parse_ab_header, - parse_backup_file) +from mvt.android.parsers.backup import ( + InvalidBackupPassword, + parse_ab_header, + parse_backup_file, +) from mvt.common.module import InsufficientPrivileges, MVTModule ADB_KEY_PATH = os.path.expanduser("~/.android/adbkey") @@ -37,13 +44,18 @@ class AndroidExtraction(MVTModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.device = None self.serial = None @@ -78,36 +90,49 @@ class AndroidExtraction(MVTModule): try: self.device = AdbDeviceUsb(serial=self.serial) except UsbDeviceNotFoundError: - self.log.critical("No device found. Make sure it is connected and unlocked.") + self.log.critical( + "No device found. Make sure it is connected and unlocked." + ) sys.exit(-1) # Otherwise we try to use the TCP transport. else: addr = self.serial.split(":") if len(addr) < 2: - raise ValueError("TCP serial number must follow the format: `address:port`") + raise ValueError( + "TCP serial number must follow the format: `address:port`" + ) - self.device = AdbDeviceTcp(addr[0], int(addr[1]), - default_transport_timeout_s=30.) + self.device = AdbDeviceTcp( + addr[0], int(addr[1]), default_transport_timeout_s=30.0 + ) while True: try: self.device.connect(rsa_keys=[signer], auth_timeout_s=5) except (USBErrorBusy, USBErrorAccess): - self.log.critical("Device is busy, maybe run `adb kill-server` and try again.") + self.log.critical( + "Device is busy, maybe run `adb kill-server` and try again." + ) sys.exit(-1) except DeviceAuthError: - self.log.error("You need to authorize this computer on the Android device. " - "Retrying in 5 seconds...") + self.log.error( + "You need to authorize this computer on the Android device. " + "Retrying in 5 seconds..." + ) time.sleep(5) except UsbReadFailedError: - self.log.error("Unable to connect to the device over USB. " - "Try to unplug, plug the device and start again.") + self.log.error( + "Unable to connect to the device over USB. " + "Try to unplug, plug the device and start again." + ) sys.exit(-1) except OSError as exc: if exc.errno == 113 and self.serial: - self.log.critical("Unable to connect to the device %s: " - "did you specify the correct IP address?", - self.serial) + self.log.critical( + "Unable to connect to the device %s: " + "did you specify the correct IP address?", + self.serial, + ) sys.exit(-1) else: break @@ -144,9 +169,11 @@ class AndroidExtraction(MVTModule): def _adb_root_or_die(self) -> None: """Check if we have a `su` binary, otherwise raise an Exception.""" if not self._adb_check_if_root(): - raise InsufficientPrivileges("This module is optionally available " - "in case the device is already rooted." - " Do NOT root your own device!") + raise InsufficientPrivileges( + "This module is optionally available " + "in case the device is already rooted." + " Do NOT root your own device!" + ) def _adb_command_as_root(self, command): """Execute an adb shell command. @@ -177,7 +204,7 @@ class AndroidExtraction(MVTModule): remote_path: str, local_path: str, progress_callback: Optional[Callable] = None, - retry_root: Optional[bool] = True + retry_root: Optional[bool] = True, ) -> None: """Download a file form the device. @@ -192,41 +219,48 @@ class AndroidExtraction(MVTModule): self.device.pull(remote_path, local_path, progress_callback) except AdbCommandFailureException as exc: if retry_root: - self._adb_download_root(remote_path, local_path, - progress_callback) + self._adb_download_root(remote_path, local_path, progress_callback) else: - raise Exception(f"Unable to download file {remote_path}: {exc}") from exc + raise Exception( + f"Unable to download file {remote_path}: {exc}" + ) from exc def _adb_download_root( self, remote_path: str, local_path: str, - progress_callback: Optional[Callable] = None + progress_callback: Optional[Callable] = None, ) -> None: try: # Check if we have root, if not raise an Exception. self._adb_root_or_die() # We generate a random temporary filename. - allowed_chars = (string.ascii_uppercase - + string.ascii_lowercase - + string.digits) - tmp_filename = "tmp_" + ''.join(random.choices(allowed_chars, k=10)) + allowed_chars = ( + string.ascii_uppercase + string.ascii_lowercase + string.digits + ) + tmp_filename = "tmp_" + "".join(random.choices(allowed_chars, k=10)) # We create a temporary local file. new_remote_path = f"/sdcard/{tmp_filename}" # We copy the file from the data folder to /sdcard/. cp_output = self._adb_command_as_root(f"cp {remote_path} {new_remote_path}") - if cp_output.startswith("cp: ") and "No such file or directory" in cp_output: + if ( + cp_output.startswith("cp: ") + and "No such file or directory" in cp_output + ): raise Exception(f"Unable to process file {remote_path}: File not found") if cp_output.startswith("cp: ") and "Permission denied" in cp_output: - raise Exception(f"Unable to process file {remote_path}: Permission denied") + raise Exception( + f"Unable to process file {remote_path}: Permission denied" + ) # We download from /sdcard/ to the local temporary file. # If it doesn't work now, don't try again (retry_root=False) - self._adb_download(new_remote_path, local_path, progress_callback, - retry_root=False) + self._adb_download( + new_remote_path, local_path, progress_callback, retry_root=False + ) # Delete the copy on /sdcard/. self._adb_command(f"rm -rf {new_remote_path}") @@ -234,8 +268,7 @@ class AndroidExtraction(MVTModule): except AdbCommandFailureException as exc: raise Exception(f"Unable to download file {remote_path}: {exc}") from exc - def _adb_process_file(self, remote_path: str, - process_routine: Callable) -> None: + def _adb_process_file(self, remote_path: str, process_routine: Callable) -> None: """Download a local copy of a file which is only accessible as root. This is a wrapper around process_routine. @@ -273,8 +306,10 @@ class AndroidExtraction(MVTModule): self._adb_command(f"rm -f {new_remote_path}") def _generate_backup(self, package_name: str) -> bytes: - self.log.info("Please check phone and accept Android backup prompt. " - "You may need to set a backup password. \a") + self.log.info( + "Please check phone and accept Android backup prompt. " + "You may need to set a backup password. \a" + ) # TODO: Base64 encoding as temporary fix to avoid byte-mangling over # the shell transport... @@ -284,19 +319,19 @@ class AndroidExtraction(MVTModule): header = parse_ab_header(backup_output) if not header["backup"]: - self.log.error("Extracting SMS via Android backup failed. " - "No valid backup data found.") + self.log.error( + "Extracting SMS via Android backup failed. " + "No valid backup data found." + ) return None if header["encryption"] == "none": return parse_backup_file(backup_output, password=None) for _ in range(0, 3): - backup_password = Prompt.ask("Enter backup password", - password=True) + backup_password = Prompt.ask("Enter backup password", password=True) try: - decrypted_backup_tar = parse_backup_file(backup_output, - backup_password) + decrypted_backup_tar = parse_backup_file(backup_output, backup_password) return decrypted_backup_tar except InvalidBackupPassword: self.log.error("You provided the wrong password! Please try again...") diff --git a/mvt/android/modules/adb/chrome_history.py b/mvt/android/modules/adb/chrome_history.py index cdd4e6f..b58d265 100644 --- a/mvt/android/modules/adb/chrome_history.py +++ b/mvt/android/modules/adb/chrome_history.py @@ -8,8 +8,7 @@ import os import sqlite3 from typing import Optional, Union -from mvt.common.utils import (convert_chrometime_to_datetime, - convert_datetime_to_iso) +from mvt.common.utils import convert_chrometime_to_datetime, convert_datetime_to_iso from .base import AndroidExtraction @@ -24,13 +23,18 @@ class ChromeHistory(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = [] def serialize(self, record: dict) -> Union[dict, list]: @@ -39,7 +43,7 @@ class ChromeHistory(AndroidExtraction): "module": self.__class__.__name__, "event": "visit", "data": f"{record['id']} - {record['url']} (visit ID: {record['visit_id']}, " - f"redirect source: {record['redirect_source']})" + f"redirect source: {record['redirect_source']})", } def check_indicators(self) -> None: @@ -59,7 +63,8 @@ class ChromeHistory(AndroidExtraction): assert isinstance(self.results, list) # assert results type for mypy conn = sqlite3.connect(db_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT urls.id, urls.url, @@ -69,31 +74,35 @@ class ChromeHistory(AndroidExtraction): FROM urls JOIN visits ON visits.url = urls.id ORDER BY visits.visit_time; - """) + """ + ) for item in cur: - self.results.append({ - "id": item[0], - "url": item[1], - "visit_id": item[2], - "timestamp": item[3], - "isodate": convert_datetime_to_iso( - convert_chrometime_to_datetime(item[3])), - "redirect_source": item[4], - }) + self.results.append( + { + "id": item[0], + "url": item[1], + "visit_id": item[2], + "timestamp": item[3], + "isodate": convert_datetime_to_iso( + convert_chrometime_to_datetime(item[3]) + ), + "redirect_source": item[4], + } + ) cur.close() conn.close() - self.log.info("Extracted a total of %d history items", - len(self.results)) + self.log.info("Extracted a total of %d history items", len(self.results)) def run(self) -> None: self._adb_connect() try: - self._adb_process_file(os.path.join("/", CHROME_HISTORY_PATH), - self._parse_db) + self._adb_process_file( + os.path.join("/", CHROME_HISTORY_PATH), self._parse_db + ) except Exception as exc: self.log.error(exc) diff --git a/mvt/android/modules/adb/dumpsys_accessibility.py b/mvt/android/modules/adb/dumpsys_accessibility.py index 1c6337c..d7f8728 100644 --- a/mvt/android/modules/adb/dumpsys_accessibility.py +++ b/mvt/android/modules/adb/dumpsys_accessibility.py @@ -19,13 +19,18 @@ class DumpsysAccessibility(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def check_indicators(self) -> None: if not self.indicators: @@ -46,8 +51,10 @@ class DumpsysAccessibility(AndroidExtraction): self.results = parse_dumpsys_accessibility(output) for result in self.results: - self.log.info("Found installed accessibility service \"%s\"", - result.get("service")) + self.log.info( + 'Found installed accessibility service "%s"', result.get("service") + ) - self.log.info("Identified a total of %d accessibility services", - len(self.results)) + self.log.info( + "Identified a total of %d accessibility services", len(self.results) + ) diff --git a/mvt/android/modules/adb/dumpsys_activities.py b/mvt/android/modules/adb/dumpsys_activities.py index 6919991..bf5b421 100644 --- a/mvt/android/modules/adb/dumpsys_activities.py +++ b/mvt/android/modules/adb/dumpsys_activities.py @@ -19,13 +19,18 @@ class DumpsysActivities(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = results if results else {} diff --git a/mvt/android/modules/adb/dumpsys_appops.py b/mvt/android/modules/adb/dumpsys_appops.py index 3ce43df..aeb6805 100644 --- a/mvt/android/modules/adb/dumpsys_appops.py +++ b/mvt/android/modules/adb/dumpsys_appops.py @@ -21,13 +21,18 @@ class DumpsysAppOps(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: records = [] @@ -37,13 +42,15 @@ class DumpsysAppOps(AndroidExtraction): for entry in perm["entries"]: if "timestamp" in entry: - records.append({ - "timestamp": entry["timestamp"], - "module": self.__class__.__name__, - "event": entry["access"], - "data": f"{record['package_name']} access to " - f"{perm['name']}: {entry['access']}", - }) + records.append( + { + "timestamp": entry["timestamp"], + "module": self.__class__.__name__, + "event": entry["access"], + "data": f"{record['package_name']} access to " + f"{perm['name']}: {entry['access']}", + } + ) return records @@ -57,10 +64,14 @@ class DumpsysAppOps(AndroidExtraction): continue for perm in result["permissions"]: - if (perm["name"] == "REQUEST_INSTALL_PACKAGES" - and perm["access"] == "allow"): - self.log.info("Package %s with REQUEST_INSTALL_PACKAGES " - "permission", result["package_name"]) + if ( + perm["name"] == "REQUEST_INSTALL_PACKAGES" + and perm["access"] == "allow" + ): + self.log.info( + "Package %s with REQUEST_INSTALL_PACKAGES " "permission", + result["package_name"], + ) def run(self) -> None: self._adb_connect() @@ -69,5 +80,6 @@ class DumpsysAppOps(AndroidExtraction): self.results = parse_dumpsys_appops(output) - self.log.info("Extracted a total of %d records from app-ops manager", - len(self.results)) + self.log.info( + "Extracted a total of %d records from app-ops manager", len(self.results) + ) diff --git a/mvt/android/modules/adb/dumpsys_battery_daily.py b/mvt/android/modules/adb/dumpsys_battery_daily.py index 28b9100..8a9c4a6 100644 --- a/mvt/android/modules/adb/dumpsys_battery_daily.py +++ b/mvt/android/modules/adb/dumpsys_battery_daily.py @@ -19,13 +19,18 @@ class DumpsysBatteryDaily(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -33,7 +38,7 @@ class DumpsysBatteryDaily(AndroidExtraction): "module": self.__class__.__name__, "event": "battery_daily", "data": f"Recorded update of package {record['package_name']} " - f"with vers {record['vers']}" + f"with vers {record['vers']}", } def check_indicators(self) -> None: @@ -54,5 +59,6 @@ class DumpsysBatteryDaily(AndroidExtraction): self.results = parse_dumpsys_battery_daily(output) - self.log.info("Extracted %d records from battery daily stats", - len(self.results)) + self.log.info( + "Extracted %d records from battery daily stats", len(self.results) + ) diff --git a/mvt/android/modules/adb/dumpsys_battery_history.py b/mvt/android/modules/adb/dumpsys_battery_history.py index 9ed77d9..6e505a4 100644 --- a/mvt/android/modules/adb/dumpsys_battery_history.py +++ b/mvt/android/modules/adb/dumpsys_battery_history.py @@ -19,13 +19,18 @@ class DumpsysBatteryHistory(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def check_indicators(self) -> None: if not self.indicators: @@ -45,5 +50,4 @@ class DumpsysBatteryHistory(AndroidExtraction): self.results = parse_dumpsys_battery_history(output) - self.log.info("Extracted %d records from battery history", - len(self.results)) + self.log.info("Extracted %d records from battery history", len(self.results)) diff --git a/mvt/android/modules/adb/dumpsys_dbinfo.py b/mvt/android/modules/adb/dumpsys_dbinfo.py index 01c0561..74cb6b9 100644 --- a/mvt/android/modules/adb/dumpsys_dbinfo.py +++ b/mvt/android/modules/adb/dumpsys_dbinfo.py @@ -21,13 +21,18 @@ class DumpsysDBInfo(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def check_indicators(self) -> None: if not self.indicators: @@ -49,5 +54,7 @@ class DumpsysDBInfo(AndroidExtraction): self.results = parse_dumpsys_dbinfo(output) - self.log.info("Extracted a total of %d records from database information", - len(self.results)) + self.log.info( + "Extracted a total of %d records from database information", + len(self.results), + ) diff --git a/mvt/android/modules/adb/dumpsys_full.py b/mvt/android/modules/adb/dumpsys_full.py index 2e2064b..b9d55e2 100644 --- a/mvt/android/modules/adb/dumpsys_full.py +++ b/mvt/android/modules/adb/dumpsys_full.py @@ -18,13 +18,18 @@ class DumpsysFull(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def run(self) -> None: self._adb_connect() diff --git a/mvt/android/modules/adb/dumpsys_receivers.py b/mvt/android/modules/adb/dumpsys_receivers.py index 447df3d..618e421 100644 --- a/mvt/android/modules/adb/dumpsys_receivers.py +++ b/mvt/android/modules/adb/dumpsys_receivers.py @@ -25,13 +25,18 @@ class DumpsysReceivers(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = results if results else {} @@ -42,21 +47,31 @@ class DumpsysReceivers(AndroidExtraction): for intent, receivers in self.results.items(): for receiver in receivers: if intent == INTENT_NEW_OUTGOING_SMS: - self.log.info("Found a receiver to intercept outgoing SMS messages: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver to intercept outgoing SMS messages: "%s"', + receiver["receiver"], + ) elif intent == INTENT_SMS_RECEIVED: - self.log.info("Found a receiver to intercept incoming SMS messages: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver to intercept incoming SMS messages: "%s"', + receiver["receiver"], + ) elif intent == INTENT_DATA_SMS_RECEIVED: - self.log.info("Found a receiver to intercept incoming data SMS message: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver to intercept incoming data SMS message: "%s"', + receiver["receiver"], + ) elif intent == INTENT_PHONE_STATE: - self.log.info("Found a receiver monitoring " - "telephony state/incoming calls: \"%s\"", - receiver["receiver"]) + self.log.info( + "Found a receiver monitoring " + 'telephony state/incoming calls: "%s"', + receiver["receiver"], + ) elif intent == INTENT_NEW_OUTGOING_CALL: - self.log.info("Found a receiver monitoring outgoing calls: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver monitoring outgoing calls: "%s"', + receiver["receiver"], + ) ioc = self.indicators.check_app_id(receiver["package_name"]) if ioc: diff --git a/mvt/android/modules/adb/files.py b/mvt/android/modules/adb/files.py index 522ad77..0916634 100644 --- a/mvt/android/modules/adb/files.py +++ b/mvt/android/modules/adb/files.py @@ -30,13 +30,18 @@ class Files(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.full_find = False def serialize(self, record: dict) -> Union[dict, list, None]: @@ -53,12 +58,15 @@ class Files(AndroidExtraction): def check_indicators(self) -> None: for result in self.results: if result.get("is_suid"): - self.log.warning("Found an SUID file in a non-standard directory \"%s\".", - result["path"]) + self.log.warning( + 'Found an SUID file in a non-standard directory "%s".', + result["path"], + ) if self.indicators and self.indicators.check_file_path(result["path"]): - self.log.warning("Found a known suspicous file at path: \"%s\"", - result["path"]) + self.log.warning( + 'Found a known suspicous file at path: "%s"', result["path"] + ) self.detected.append(result) def backup_file(self, file_path: str) -> None: @@ -73,13 +81,13 @@ class Files(AndroidExtraction): local_file_path = os.path.join(local_files_folder, local_file_name) try: - self._adb_download(remote_path=file_path, - local_path=local_file_path) + self._adb_download(remote_path=file_path, local_path=local_file_path) except Exception: pass else: - self.log.info("Downloaded file %s to local copy at %s", - file_path, local_file_path) + self.log.info( + "Downloaded file %s to local copy at %s", file_path, local_file_path + ) def find_files(self, folder: str) -> None: assert isinstance(self.results, list) @@ -92,20 +100,21 @@ class Files(AndroidExtraction): if len(file_line) < 6: self.log.info("Skipping invalid file info - %s", file_line.rstrip()) continue - [unix_timestamp, mode, size, - owner, group, full_path] = file_info + [unix_timestamp, mode, size, owner, group, full_path] = file_info mod_time = convert_unix_to_iso(unix_timestamp) - self.results.append({ - "path": full_path, - "modified_time": mod_time, - "mode": mode, - "is_suid": (int(mode, 8) & stat.S_ISUID) == 2048, - "is_sgid": (int(mode, 8) & stat.S_ISGID) == 1024, - "size": size, - "owner": owner, - "group": group, - }) + self.results.append( + { + "path": full_path, + "modified_time": mod_time, + "mode": mode, + "is_suid": (int(mode, 8) & stat.S_ISUID) == 2048, + "is_sgid": (int(mode, 8) & stat.S_ISGID) == 1024, + "size": size, + "owner": owner, + "group": group, + } + ) else: output = self._adb_command(f"find '{folder}' -type f 2> /dev/null") for file_line in output.splitlines(): @@ -123,15 +132,15 @@ class Files(AndroidExtraction): self.find_files(tmp_folder) for entry in self.results: - self.log.info("Found file in tmp folder at path %s", - entry.get("path")) + self.log.info("Found file in tmp folder at path %s", entry.get("path")) self.backup_file(entry.get("path")) for media_folder in ANDROID_MEDIA_FOLDERS: self.find_files(media_folder) - self.log.info("Found %s files in primary Android tmp and media folders", - len(self.results)) + self.log.info( + "Found %s files in primary Android tmp and media folders", len(self.results) + ) if self.fast_mode: self.log.info("Flag --fast was enabled: skipping full file listing") diff --git a/mvt/android/modules/adb/getprop.py b/mvt/android/modules/adb/getprop.py index 5574913..1b793e5 100644 --- a/mvt/android/modules/adb/getprop.py +++ b/mvt/android/modules/adb/getprop.py @@ -20,13 +20,18 @@ class Getprop(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = {} if not results else results @@ -52,10 +57,11 @@ class Getprop(AndroidExtraction): if entry.get("name", "") != "ro.build.version.security_patch": continue patch_date = datetime.strptime(entry["value"], "%Y-%m-%d") - if (datetime.now() - patch_date) > timedelta(days=6*30): - self.log.warning("This phone has not received security updates " - "for more than six months (last update: %s)", - entry["value"]) + if (datetime.now() - patch_date) > timedelta(days=6 * 30): + self.log.warning( + "This phone has not received security updates " + "for more than six months (last update: %s)", + entry["value"], + ) - self.log.info("Extracted %d Android system properties", - len(self.results)) + self.log.info("Extracted %d Android system properties", len(self.results)) diff --git a/mvt/android/modules/adb/logcat.py b/mvt/android/modules/adb/logcat.py index 02c8834..65ad67e 100644 --- a/mvt/android/modules/adb/logcat.py +++ b/mvt/android/modules/adb/logcat.py @@ -18,37 +18,40 @@ class Logcat(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def run(self) -> None: self._adb_connect() # Get the current logcat. - output = self._adb_command("logcat -d -b all \"*:V\"") + output = self._adb_command('logcat -d -b all "*:V"') # Get the locat prior to last reboot. - last_output = self._adb_command("logcat -L -b all \"*:V\"") + last_output = self._adb_command('logcat -L -b all "*:V"') if self.results_path: - logcat_path = os.path.join(self.results_path, - "logcat.txt") + logcat_path = os.path.join(self.results_path, "logcat.txt") with open(logcat_path, "w", encoding="utf-8") as handle: handle.write(output) - self.log.info("Current logcat logs stored at %s", - logcat_path) + self.log.info("Current logcat logs stored at %s", logcat_path) - logcat_last_path = os.path.join(self.results_path, - "logcat_last.txt") + logcat_last_path = os.path.join(self.results_path, "logcat_last.txt") with open(logcat_last_path, "w", encoding="utf-8") as handle: handle.write(last_output) - self.log.info("Logcat logs prior to last reboot stored at %s", - logcat_last_path) + self.log.info( + "Logcat logs prior to last reboot stored at %s", logcat_last_path + ) self._adb_disconnect() diff --git a/mvt/android/modules/adb/packages.py b/mvt/android/modules/adb/packages.py index d3950b1..a6cc613 100644 --- a/mvt/android/modules/adb/packages.py +++ b/mvt/android/modules/adb/packages.py @@ -93,59 +93,65 @@ class Packages(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: records = [] timestamps = [ - { - "event": "package_install", - "timestamp": record["timestamp"] - }, + {"event": "package_install", "timestamp": record["timestamp"]}, { "event": "package_first_install", - "timestamp": record["first_install_time"] - }, - { - "event": "package_last_update", - "timestamp": record["last_update_time"] + "timestamp": record["first_install_time"], }, + {"event": "package_last_update", "timestamp": record["last_update_time"]}, ] for timestamp in timestamps: - records.append({ - "timestamp": timestamp["timestamp"], - "module": self.__class__.__name__, - "event": timestamp["event"], - "data": f"{record['package_name']} (system: {record['system']}," - f" third party: {record['third_party']})", - }) + records.append( + { + "timestamp": timestamp["timestamp"], + "module": self.__class__.__name__, + "event": timestamp["event"], + "data": f"{record['package_name']} (system: {record['system']}," + f" third party: {record['third_party']})", + } + ) return records def check_indicators(self) -> None: for result in self.results: if result["package_name"] in ROOT_PACKAGES: - self.log.warning("Found an installed package related to " - "rooting/jailbreaking: \"%s\"", - result["package_name"]) + self.log.warning( + "Found an installed package related to " + 'rooting/jailbreaking: "%s"', + result["package_name"], + ) self.detected.append(result) continue if result["package_name"] in SECURITY_PACKAGES and result["disabled"]: - self.log.warning("Found a security package disabled: \"%s\"", - result["package_name"]) + self.log.warning( + 'Found a security package disabled: "%s"', result["package_name"] + ) if result["package_name"] in SYSTEM_UPDATE_PACKAGES and result["disabled"]: - self.log.warning("System OTA update package \"%s\" disabled on the phone", - result["package_name"]) + self.log.warning( + 'System OTA update package "%s" disabled on the phone', + result["package_name"], + ) if not self.indicators: continue @@ -239,22 +245,24 @@ class Packages(AndroidExtraction): for file_path in output.splitlines(): file_path = file_path.strip() - md5 = self._adb_command( - f"md5sum {file_path}").split(" ", maxsplit=1)[0] - sha1 = self._adb_command( - f"sha1sum {file_path}").split(" ", maxsplit=1)[0] - sha256 = self._adb_command( - f"sha256sum {file_path}").split(" ", maxsplit=1)[0] - sha512 = self._adb_command( - f"sha512sum {file_path}").split(" ", maxsplit=1)[0] + md5 = self._adb_command(f"md5sum {file_path}").split(" ", maxsplit=1)[0] + sha1 = self._adb_command(f"sha1sum {file_path}").split(" ", maxsplit=1)[0] + sha256 = self._adb_command(f"sha256sum {file_path}").split(" ", maxsplit=1)[ + 0 + ] + sha512 = self._adb_command(f"sha512sum {file_path}").split(" ", maxsplit=1)[ + 0 + ] - package_files.append({ - "path": file_path, - "md5": md5, - "sha1": sha1, - "sha256": sha256, - "sha512": sha512, - }) + package_files.append( + { + "path": file_path, + "md5": md5, + "sha1": sha1, + "sha256": sha256, + "sha512": sha512, + } + ) return package_files @@ -290,8 +298,7 @@ class Packages(AndroidExtraction): "files": package_files, } - dumpsys_package = self._adb_command( - f"dumpsys package {package_name}") + dumpsys_package = self._adb_command(f"dumpsys package {package_name}") package_details = self.parse_package_for_details(dumpsys_package) new_package.update(package_details) @@ -324,10 +331,12 @@ class Packages(AndroidExtraction): dangerous_permissions_count += 1 if dangerous_permissions_count >= DANGEROUS_PERMISSIONS_THRESHOLD: - self.log.info("Third-party package \"%s\" requested %d " - "potentially dangerous permissions", - result["package_name"], - dangerous_permissions_count) + self.log.info( + 'Third-party package "%s" requested %d ' + "potentially dangerous permissions", + result["package_name"], + dangerous_permissions_count, + ) packages_to_lookup = [] for result in self.results: @@ -335,14 +344,18 @@ class Packages(AndroidExtraction): continue packages_to_lookup.append(result) - self.log.info("Found non-system package with name \"%s\" installed by \"%s\" on %s", - result["package_name"], result["installer"], - result["timestamp"]) + self.log.info( + 'Found non-system package with name "%s" installed by "%s" on %s', + result["package_name"], + result["installer"], + result["timestamp"], + ) if not self.fast_mode: self.check_virustotal(packages_to_lookup) - self.log.info("Extracted at total of %d installed package names", - len(self.results)) + self.log.info( + "Extracted at total of %d installed package names", len(self.results) + ) self._adb_disconnect() diff --git a/mvt/android/modules/adb/processes.py b/mvt/android/modules/adb/processes.py index 10cb1be..7f12e25 100644 --- a/mvt/android/modules/adb/processes.py +++ b/mvt/android/modules/adb/processes.py @@ -17,13 +17,18 @@ class Processes(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def check_indicators(self) -> None: if not self.indicators: @@ -82,5 +87,4 @@ class Processes(AndroidExtraction): self._adb_disconnect() - self.log.info("Extracted records on a total of %d processes", - len(self.results)) + self.log.info("Extracted records on a total of %d processes", len(self.results)) diff --git a/mvt/android/modules/adb/root_binaries.py b/mvt/android/modules/adb/root_binaries.py index 4629328..0087e02 100644 --- a/mvt/android/modules/adb/root_binaries.py +++ b/mvt/android/modules/adb/root_binaries.py @@ -17,13 +17,18 @@ class RootBinaries(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def run(self) -> None: root_binaries = [ @@ -56,6 +61,6 @@ class RootBinaries(AndroidExtraction): continue self.detected.append(root_binary) - self.log.warning("Found root binary \"%s\"", root_binary) + self.log.warning('Found root binary "%s"', root_binary) self._adb_disconnect() diff --git a/mvt/android/modules/adb/selinux_status.py b/mvt/android/modules/adb/selinux_status.py index 6c51e0b..5246d83 100644 --- a/mvt/android/modules/adb/selinux_status.py +++ b/mvt/android/modules/adb/selinux_status.py @@ -19,13 +19,18 @@ class SELinuxStatus(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = {} if not results else results @@ -40,4 +45,4 @@ class SELinuxStatus(AndroidExtraction): if status == "enforcing": self.log.info("SELinux is being regularly enforced") else: - self.log.warning("SELinux status is \"%s\"!", status) + self.log.warning('SELinux status is "%s"!', status) diff --git a/mvt/android/modules/adb/settings.py b/mvt/android/modules/adb/settings.py index 60439d9..1ae9370 100644 --- a/mvt/android/modules/adb/settings.py +++ b/mvt/android/modules/adb/settings.py @@ -53,7 +53,7 @@ ANDROID_DANGEROUS_SETTINGS = [ "description": "enabled installation of non Google Play apps", "key": "install_non_market_apps", "safe_value": "0", - } + }, ] @@ -65,13 +65,18 @@ class Settings(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = {} if not results else results @@ -82,8 +87,12 @@ class Settings(AndroidExtraction): # Check if one of the dangerous settings is using an unsafe # value (different than the one specified). if danger["key"] == key and danger["safe_value"] != value: - self.log.warning("Found suspicious setting \"%s = %s\" (%s)", - key, value, danger["description"]) + self.log.warning( + 'Found suspicious setting "%s = %s" (%s)', + key, + value, + danger["description"], + ) break def run(self) -> None: diff --git a/mvt/android/modules/adb/sms.py b/mvt/android/modules/adb/sms.py index 3df55b2..05e6b58 100644 --- a/mvt/android/modules/adb/sms.py +++ b/mvt/android/modules/adb/sms.py @@ -8,8 +8,7 @@ import os import sqlite3 from typing import Optional, Union -from mvt.android.parsers.backup import (AndroidBackupParsingError, - parse_tar_for_sms) +from mvt.android.parsers.backup import AndroidBackupParsingError, parse_tar_for_sms from mvt.common.module import InsufficientPrivileges from mvt.common.utils import check_for_links, convert_unix_to_iso @@ -50,13 +49,18 @@ class SMS(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.sms_db_type = 0 @@ -66,7 +70,7 @@ class SMS(AndroidExtraction): "timestamp": record["isodate"], "module": self.__class__.__name__, "event": f"sms_{record['direction']}", - "data": f"{record.get('address', 'unknown source')}: \"{body}\"" + "data": f"{record.get('address', 'unknown source')}: \"{body}\"", } def check_indicators(self) -> None: @@ -105,7 +109,7 @@ class SMS(AndroidExtraction): for index, value in enumerate(item): message[names[index]] = value - message["direction"] = ("received" if message["incoming"] == 1 else "sent") + message["direction"] = "received" if message["incoming"] == 1 else "sent" message["isodate"] = convert_unix_to_iso(message["timestamp"]) # Extract links in the message body @@ -117,8 +121,7 @@ class SMS(AndroidExtraction): cur.close() conn.close() - self.log.info("Extracted a total of %d SMS messages", - len(self.results)) + self.log.info("Extracted a total of %d SMS messages", len(self.results)) def _extract_sms_adb(self) -> None: """Use the Android backup command to extract SMS data from the native @@ -135,13 +138,14 @@ class SMS(AndroidExtraction): try: self.results = parse_tar_for_sms(backup_tar) except AndroidBackupParsingError: - self.log.info("Impossible to read SMS from the Android Backup, " - "please extract the SMS and try extracting it with " - "Android Backup Extractor") + self.log.info( + "Impossible to read SMS from the Android Backup, " + "please extract the SMS and try extracting it with " + "Android Backup Extractor" + ) return - self.log.info("Extracted a total of %d SMS messages", - len(self.results)) + self.log.info("Extracted a total of %d SMS messages", len(self.results)) def run(self) -> None: self._adb_connect() @@ -149,20 +153,24 @@ class SMS(AndroidExtraction): try: if self._adb_check_file_exists(os.path.join("/", SMS_BUGLE_PATH)): self.sms_db_type = 1 - self._adb_process_file(os.path.join("/", SMS_BUGLE_PATH), - self._parse_db) + self._adb_process_file( + os.path.join("/", SMS_BUGLE_PATH), self._parse_db + ) elif self._adb_check_file_exists(os.path.join("/", SMS_MMSSMS_PATH)): self.sms_db_type = 2 - self._adb_process_file(os.path.join("/", SMS_MMSSMS_PATH), - self._parse_db) + self._adb_process_file( + os.path.join("/", SMS_MMSSMS_PATH), self._parse_db + ) self._adb_disconnect() return except InsufficientPrivileges: pass - self.log.info("No SMS database found. Trying extraction of SMS data " - "using Android backup feature.") + self.log.info( + "No SMS database found. Trying extraction of SMS data " + "using Android backup feature." + ) self._extract_sms_adb() self._adb_disconnect() diff --git a/mvt/android/modules/adb/whatsapp.py b/mvt/android/modules/adb/whatsapp.py index e0de1ae..1d43ee3 100644 --- a/mvt/android/modules/adb/whatsapp.py +++ b/mvt/android/modules/adb/whatsapp.py @@ -24,13 +24,18 @@ class Whatsapp(AndroidExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: text = record["data"].replace("\n", "\\n") @@ -38,7 +43,7 @@ class Whatsapp(AndroidExtraction): "timestamp": record["isodate"], "module": self.__class__.__name__, "event": f"whatsapp_msg_{record['direction']}", - "data": f"\"{text}\"" + "data": f'"{text}"', } def check_indicators(self) -> None: @@ -61,9 +66,11 @@ class Whatsapp(AndroidExtraction): """ conn = sqlite3.connect(db_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT * FROM messages; - """) + """ + ) names = [description[0] for description in cur.description] messages = [] @@ -75,32 +82,30 @@ class Whatsapp(AndroidExtraction): if not message["data"]: continue - message["direction"] = ("send" if message["key_from_me"] == 1 else "received") + message["direction"] = "send" if message["key_from_me"] == 1 else "received" message["isodate"] = convert_unix_to_iso(message["timestamp"]) # If we find links in the messages or if they are empty we add them # to the list. - if (check_for_links(message["data"]) - or message["data"].strip() == ""): + if check_for_links(message["data"]) or message["data"].strip() == "": if message.get("thumb_image"): - message["thumb_image"] = base64.b64encode( - message["thumb_image"]) + message["thumb_image"] = base64.b64encode(message["thumb_image"]) messages.append(message) cur.close() conn.close() - self.log.info("Extracted a total of %d WhatsApp messages containing links", - len(messages)) + self.log.info( + "Extracted a total of %d WhatsApp messages containing links", len(messages) + ) self.results = messages def run(self) -> None: self._adb_connect() try: - self._adb_process_file(os.path.join("/", WHATSAPP_PATH), - self._parse_db) + self._adb_process_file(os.path.join("/", WHATSAPP_PATH), self._parse_db) except Exception as exc: self.log.error(exc) diff --git a/mvt/android/modules/androidqf/__init__.py b/mvt/android/modules/androidqf/__init__.py index 568382e..ca749e2 100644 --- a/mvt/android/modules/androidqf/__init__.py +++ b/mvt/android/modules/androidqf/__init__.py @@ -13,6 +13,14 @@ from .processes import Processes from .settings import Settings from .sms import SMS -ANDROIDQF_MODULES = [DumpsysActivities, DumpsysReceivers, DumpsysAccessibility, - DumpsysAppops, Processes, Getprop, Settings, SMS, - DumpsysPackages] +ANDROIDQF_MODULES = [ + DumpsysActivities, + DumpsysReceivers, + DumpsysAccessibility, + DumpsysAppops, + Processes, + Getprop, + Settings, + SMS, + DumpsysPackages, +] diff --git a/mvt/android/modules/androidqf/base.py b/mvt/android/modules/androidqf/base.py index f996cb6..f36758f 100644 --- a/mvt/android/modules/androidqf/base.py +++ b/mvt/android/modules/androidqf/base.py @@ -19,13 +19,18 @@ class AndroidQFModule(MVTModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Union[List[Dict[str, Any]], Dict[str, Any], None] = None + results: Union[List[Dict[str, Any]], Dict[str, Any], None] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self._path = target_path self._files = [] diff --git a/mvt/android/modules/androidqf/dumpsys_accessibility.py b/mvt/android/modules/androidqf/dumpsys_accessibility.py index 48da869..b35ae48 100644 --- a/mvt/android/modules/androidqf/dumpsys_accessibility.py +++ b/mvt/android/modules/androidqf/dumpsys_accessibility.py @@ -19,13 +19,18 @@ class DumpsysAccessibility(AndroidQFModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def check_indicators(self) -> None: if not self.indicators: @@ -53,7 +58,9 @@ class DumpsysAccessibility(AndroidQFModule): if not in_accessibility: continue - if line.strip().startswith("-------------------------------------------------------------------------------"): # pylint: disable=line-too-long + if line.strip().startswith( + "-------------------------------------------------------------------------------" + ): # pylint: disable=line-too-long break lines.append(line.rstrip()) @@ -61,8 +68,10 @@ class DumpsysAccessibility(AndroidQFModule): self.results = parse_dumpsys_accessibility("\n".join(lines)) for result in self.results: - self.log.info("Found installed accessibility service \"%s\"", - result.get("service")) + self.log.info( + 'Found installed accessibility service "%s"', result.get("service") + ) - self.log.info("Identified a total of %d accessibility services", - len(self.results)) + self.log.info( + "Identified a total of %d accessibility services", len(self.results) + ) diff --git a/mvt/android/modules/androidqf/dumpsys_activities.py b/mvt/android/modules/androidqf/dumpsys_activities.py index 104c228..18db694 100644 --- a/mvt/android/modules/androidqf/dumpsys_activities.py +++ b/mvt/android/modules/androidqf/dumpsys_activities.py @@ -19,13 +19,18 @@ class DumpsysActivities(AndroidQFModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = results if results else {} @@ -56,7 +61,9 @@ class DumpsysActivities(AndroidQFModule): if not in_package: continue - if line.strip().startswith("------------------------------------------------------------------------------"): # pylint: disable=line-too-long + if line.strip().startswith( + "------------------------------------------------------------------------------" + ): # pylint: disable=line-too-long break lines.append(line.rstrip()) diff --git a/mvt/android/modules/androidqf/dumpsys_appops.py b/mvt/android/modules/androidqf/dumpsys_appops.py index 26dc889..e3ff57b 100644 --- a/mvt/android/modules/androidqf/dumpsys_appops.py +++ b/mvt/android/modules/androidqf/dumpsys_appops.py @@ -12,19 +12,23 @@ from .base import AndroidQFModule class DumpsysAppops(AndroidQFModule): - def __init__( self, file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: records = [] @@ -34,13 +38,15 @@ class DumpsysAppops(AndroidQFModule): for entry in perm["entries"]: if "timestamp" in entry: - records.append({ - "timestamp": entry["timestamp"], - "module": self.__class__.__name__, - "event": entry["access"], - "data": f"{record['package_name']} access to " - f"{perm['name']} : {entry['access']}", - }) + records.append( + { + "timestamp": entry["timestamp"], + "module": self.__class__.__name__, + "event": entry["access"], + "data": f"{record['package_name']} access to " + f"{perm['name']} : {entry['access']}", + } + ) return records @@ -54,10 +60,14 @@ class DumpsysAppops(AndroidQFModule): continue for perm in result["permissions"]: - if (perm["name"] == "REQUEST_INSTALL_PACKAGES" - and perm["access"] == "allow"): - self.log.info("Package %s with REQUEST_INSTALL_PACKAGES permission", - result["package_name"]) + if ( + perm["name"] == "REQUEST_INSTALL_PACKAGES" + and perm["access"] == "allow" + ): + self.log.info( + "Package %s with REQUEST_INSTALL_PACKAGES permission", + result["package_name"], + ) def run(self) -> None: dumpsys_file = self._get_files_by_pattern("*/dumpsys.txt") @@ -73,11 +83,12 @@ class DumpsysAppops(AndroidQFModule): continue if in_package: - if line.startswith("-------------------------------------------------------------------------------"): # pylint: disable=line-too-long + if line.startswith( + "-------------------------------------------------------------------------------" + ): # pylint: disable=line-too-long break lines.append(line.rstrip()) self.results = parse_dumpsys_appops("\n".join(lines)) - self.log.info("Identified %d applications in AppOps Manager", - len(self.results)) + self.log.info("Identified %d applications in AppOps Manager", len(self.results)) diff --git a/mvt/android/modules/androidqf/dumpsys_packages.py b/mvt/android/modules/androidqf/dumpsys_packages.py index 699ca3a..71e6166 100644 --- a/mvt/android/modules/androidqf/dumpsys_packages.py +++ b/mvt/android/modules/androidqf/dumpsys_packages.py @@ -6,9 +6,11 @@ import logging from typing import Any, Dict, List, Optional, Union -from mvt.android.modules.adb.packages import (DANGEROUS_PERMISSIONS, - DANGEROUS_PERMISSIONS_THRESHOLD, - ROOT_PACKAGES) +from mvt.android.modules.adb.packages import ( + DANGEROUS_PERMISSIONS, + DANGEROUS_PERMISSIONS_THRESHOLD, + ROOT_PACKAGES, +) from mvt.android.parsers.dumpsys import parse_dumpsys_packages from .base import AndroidQFModule @@ -22,34 +24,43 @@ class DumpsysPackages(AndroidQFModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[List[Dict[str, Any]]] = None + results: Optional[List[Dict[str, Any]]] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: entries = [] for entry in ["timestamp", "first_install_time", "last_update_time"]: if entry in record: - entries.append({ - "timestamp": record[entry], - "module": self.__class__.__name__, - "event": entry, - "data": f"Package {record['package_name']} " - f"({record['uid']})", - }) + entries.append( + { + "timestamp": record[entry], + "module": self.__class__.__name__, + "event": entry, + "data": f"Package {record['package_name']} " + f"({record['uid']})", + } + ) return entries def check_indicators(self) -> None: for result in self.results: if result["package_name"] in ROOT_PACKAGES: - self.log.warning("Found an installed package related to " - "rooting/jailbreaking: \"%s\"", - result["package_name"]) + self.log.warning( + "Found an installed package related to " + 'rooting/jailbreaking: "%s"', + result["package_name"], + ) self.detected.append(result) continue @@ -99,8 +110,10 @@ class DumpsysPackages(AndroidQFModule): dangerous_permissions_count += 1 if dangerous_permissions_count >= DANGEROUS_PERMISSIONS_THRESHOLD: - self.log.info("Found package \"%s\" requested %d potentially dangerous permissions", - result["package_name"], - dangerous_permissions_count) + self.log.info( + 'Found package "%s" requested %d potentially dangerous permissions', + result["package_name"], + dangerous_permissions_count, + ) self.log.info("Extracted details on %d packages", len(self.results)) diff --git a/mvt/android/modules/androidqf/dumpsys_receivers.py b/mvt/android/modules/androidqf/dumpsys_receivers.py index 8614ea0..db91316 100644 --- a/mvt/android/modules/androidqf/dumpsys_receivers.py +++ b/mvt/android/modules/androidqf/dumpsys_receivers.py @@ -7,8 +7,12 @@ import logging from typing import Any, Dict, List, Optional, Union from mvt.android.modules.adb.dumpsys_receivers import ( - INTENT_DATA_SMS_RECEIVED, INTENT_NEW_OUTGOING_CALL, - INTENT_NEW_OUTGOING_SMS, INTENT_PHONE_STATE, INTENT_SMS_RECEIVED) + INTENT_DATA_SMS_RECEIVED, + INTENT_NEW_OUTGOING_CALL, + INTENT_NEW_OUTGOING_SMS, + INTENT_PHONE_STATE, + INTENT_SMS_RECEIVED, +) from mvt.android.parsers import parse_dumpsys_receiver_resolver_table from .base import AndroidQFModule @@ -22,13 +26,18 @@ class DumpsysReceivers(AndroidQFModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Union[List[Any], Dict[str, Any], None] = None + results: Union[List[Any], Dict[str, Any], None] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = results if results else {} @@ -39,21 +48,31 @@ class DumpsysReceivers(AndroidQFModule): for intent, receivers in self.results.items(): for receiver in receivers: if intent == INTENT_NEW_OUTGOING_SMS: - self.log.info("Found a receiver to intercept outgoing SMS messages: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver to intercept outgoing SMS messages: "%s"', + receiver["receiver"], + ) elif intent == INTENT_SMS_RECEIVED: - self.log.info("Found a receiver to intercept incoming SMS messages: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver to intercept incoming SMS messages: "%s"', + receiver["receiver"], + ) elif intent == INTENT_DATA_SMS_RECEIVED: - self.log.info("Found a receiver to intercept incoming data SMS message: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver to intercept incoming data SMS message: "%s"', + receiver["receiver"], + ) elif intent == INTENT_PHONE_STATE: - self.log.info("Found a receiver monitoring " - "telephony state/incoming calls: \"%s\"", - receiver["receiver"]) + self.log.info( + "Found a receiver monitoring " + 'telephony state/incoming calls: "%s"', + receiver["receiver"], + ) elif intent == INTENT_NEW_OUTGOING_CALL: - self.log.info("Found a receiver monitoring outgoing calls: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver monitoring outgoing calls: "%s"', + receiver["receiver"], + ) ioc = self.indicators.check_app_id(receiver["package_name"]) if ioc: @@ -76,7 +95,9 @@ class DumpsysReceivers(AndroidQFModule): if not in_receivers: continue - if line.strip().startswith("------------------------------------------------------------------------------"): # pylint: disable=line-too-long + if line.strip().startswith( + "------------------------------------------------------------------------------" + ): # pylint: disable=line-too-long break lines.append(line.rstrip()) diff --git a/mvt/android/modules/androidqf/getprop.py b/mvt/android/modules/androidqf/getprop.py index 4866422..3ba63cb 100644 --- a/mvt/android/modules/androidqf/getprop.py +++ b/mvt/android/modules/androidqf/getprop.py @@ -22,7 +22,7 @@ INTERESTING_PROPERTIES = [ "ro.product.locale", "ro.product.vendor.manufacturer", "ro.product.vendor.model", - "ro.product.vendor.name" + "ro.product.vendor.name", ] @@ -34,13 +34,18 @@ class Getprop(AndroidQFModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = [] def check_indicators(self) -> None: @@ -68,9 +73,12 @@ class Getprop(AndroidQFModule): self.log.info("%s: %s", entry["name"], entry["value"]) if entry["name"] == "ro.build.version.security_patch": last_patch = datetime.strptime(entry["value"], "%Y-%m-%d") - if (datetime.now() - last_patch) > timedelta(days=6*31): - self.log.warning("This phone has not received security " - "updates for more than six months " - "(last update: %s)", entry["value"]) + if (datetime.now() - last_patch) > timedelta(days=6 * 31): + self.log.warning( + "This phone has not received security " + "updates for more than six months " + "(last update: %s)", + entry["value"], + ) self.log.info("Extracted a total of %d properties", len(self.results)) diff --git a/mvt/android/modules/androidqf/processes.py b/mvt/android/modules/androidqf/processes.py index fb879ae..2043b0a 100644 --- a/mvt/android/modules/androidqf/processes.py +++ b/mvt/android/modules/androidqf/processes.py @@ -17,13 +17,18 @@ class Processes(AndroidQFModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def check_indicators(self) -> None: if not self.indicators: @@ -55,7 +60,7 @@ class Processes(AndroidQFModule): # Sometimes WCHAN is empty. if len(proc) == 8: - proc = proc[:5] + [''] + proc[5:] + proc = proc[:5] + [""] + proc[5:] # Sometimes there is the security label. if proc[0].startswith("u:r"): @@ -68,18 +73,20 @@ class Processes(AndroidQFModule): if len(proc) < 9: proc = proc[:5] + [""] + proc[5:] - self.results.append({ - "user": proc[0], - "pid": int(proc[1]), - "ppid": int(proc[2]), - "virtual_memory_size": int(proc[3]), - "resident_set_size": int(proc[4]), - "wchan": proc[5], - "aprocress": proc[6], - "stat": proc[7], - "proc_name": proc[8].strip("[]"), - "label": label, - }) + self.results.append( + { + "user": proc[0], + "pid": int(proc[1]), + "ppid": int(proc[2]), + "virtual_memory_size": int(proc[3]), + "resident_set_size": int(proc[4]), + "wchan": proc[5], + "aprocress": proc[6], + "stat": proc[7], + "proc_name": proc[8].strip("[]"), + "label": label, + } + ) def run(self) -> None: ps_files = self._get_files_by_pattern("*/ps.txt") diff --git a/mvt/android/modules/androidqf/settings.py b/mvt/android/modules/androidqf/settings.py index 17756e7..4e3ca39 100644 --- a/mvt/android/modules/androidqf/settings.py +++ b/mvt/android/modules/androidqf/settings.py @@ -19,18 +19,23 @@ class Settings(AndroidQFModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = {} def run(self) -> None: for setting_file in self._get_files_by_pattern("*/settings_*.txt"): - namespace = setting_file[setting_file.rfind("_")+1:-4] + namespace = setting_file[setting_file.rfind("_") + 1 : -4] self.results[namespace] = {} @@ -48,11 +53,15 @@ class Settings(AndroidQFModule): continue for danger in ANDROID_DANGEROUS_SETTINGS: - if (danger["key"] == key - and danger["safe_value"] != value): - self.log.warning("Found suspicious setting \"%s = %s\" (%s)", - key, value, danger["description"]) + if danger["key"] == key and danger["safe_value"] != value: + self.log.warning( + 'Found suspicious setting "%s = %s" (%s)', + key, + value, + danger["description"], + ) break - self.log.info("Identified %d settings", - sum([len(val) for val in self.results.values()])) + self.log.info( + "Identified %d settings", sum([len(val) for val in self.results.values()]) + ) diff --git a/mvt/android/modules/androidqf/sms.py b/mvt/android/modules/androidqf/sms.py index bb2085e..58c436c 100644 --- a/mvt/android/modules/androidqf/sms.py +++ b/mvt/android/modules/androidqf/sms.py @@ -7,9 +7,13 @@ import getpass import logging from typing import Optional -from mvt.android.parsers.backup import (AndroidBackupParsingError, - InvalidBackupPassword, parse_ab_header, - parse_backup_file, parse_tar_for_sms) +from mvt.android.parsers.backup import ( + AndroidBackupParsingError, + InvalidBackupPassword, + parse_ab_header, + parse_backup_file, + parse_tar_for_sms, +) from .base import AndroidQFModule @@ -22,13 +26,18 @@ class SMS(AndroidQFModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def check_indicators(self) -> None: if not self.indicators: @@ -56,8 +65,10 @@ class SMS(AndroidQFModule): self.log.critical("Invalid backup password") return except AndroidBackupParsingError: - self.log.critical("Impossible to parse this backup file, please use" - " Android Backup Extractor instead") + self.log.critical( + "Impossible to parse this backup file, please use" + " Android Backup Extractor instead" + ) return if not tardata: @@ -66,9 +77,11 @@ class SMS(AndroidQFModule): try: self.results = parse_tar_for_sms(tardata) except AndroidBackupParsingError: - self.log.info("Impossible to read SMS from the Android Backup, " - "please extract the SMS and try extracting it with " - "Android Backup Extractor") + self.log.info( + "Impossible to read SMS from the Android Backup, " + "please extract the SMS and try extracting it with " + "Android Backup Extractor" + ) return def run(self) -> None: @@ -81,5 +94,4 @@ class SMS(AndroidQFModule): data = handle.read() self.parse_backup(data) - self.log.info("Identified %d SMS in backup data", - len(self.results)) + self.log.info("Identified %d SMS in backup data", len(self.results)) diff --git a/mvt/android/modules/backup/base.py b/mvt/android/modules/backup/base.py index 5e6aa18..7e35dd0 100644 --- a/mvt/android/modules/backup/base.py +++ b/mvt/android/modules/backup/base.py @@ -20,13 +20,18 @@ class BackupExtraction(MVTModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.ab = None self.backup_path = None self.tar = None @@ -39,7 +44,9 @@ class BackupExtraction(MVTModule): self.backup_path = backup_path self.files = files - def from_ab(self, file_path: Optional[str], tar: Optional[TarFile], files: List[str]) -> None: + def from_ab( + self, file_path: Optional[str], tar: Optional[TarFile], files: List[str] + ) -> None: """ Extract the files """ diff --git a/mvt/android/modules/backup/sms.py b/mvt/android/modules/backup/sms.py index 29eb4b2..4e1a1e8 100644 --- a/mvt/android/modules/backup/sms.py +++ b/mvt/android/modules/backup/sms.py @@ -12,19 +12,23 @@ from mvt.common.utils import check_for_links class SMS(BackupExtraction): - def __init__( self, file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = [] def check_indicators(self) -> None: @@ -55,5 +59,4 @@ class SMS(BackupExtraction): data = self._get_file_content(file) self.results.extend(parse_sms_file(data)) - self.log.info("Extracted a total of %d SMS & MMS messages", - len(self.results)) + self.log.info("Extracted a total of %d SMS & MMS messages", len(self.results)) diff --git a/mvt/android/modules/bugreport/__init__.py b/mvt/android/modules/bugreport/__init__.py index 2a30aef..702caba 100644 --- a/mvt/android/modules/bugreport/__init__.py +++ b/mvt/android/modules/bugreport/__init__.py @@ -13,5 +13,14 @@ from .getprop import Getprop from .packages import Packages from .receivers import Receivers -BUGREPORT_MODULES = [Accessibility, Activities, Appops, BatteryDaily, - BatteryHistory, DBInfo, Getprop, Packages, Receivers] +BUGREPORT_MODULES = [ + Accessibility, + Activities, + Appops, + BatteryDaily, + BatteryHistory, + DBInfo, + Getprop, + Packages, + Receivers, +] diff --git a/mvt/android/modules/bugreport/accessibility.py b/mvt/android/modules/bugreport/accessibility.py index cb906b9..d69ede1 100644 --- a/mvt/android/modules/bugreport/accessibility.py +++ b/mvt/android/modules/bugreport/accessibility.py @@ -19,13 +19,18 @@ class Accessibility(BugReportModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def check_indicators(self) -> None: if not self.indicators: @@ -41,8 +46,10 @@ class Accessibility(BugReportModule): def run(self) -> None: content = self._get_dumpstate_file() if not content: - self.log.error("Unable to find dumpstate file. " - "Did you provide a valid bug report archive?") + self.log.error( + "Unable to find dumpstate file. " + "Did you provide a valid bug report archive?" + ) return lines = [] @@ -55,15 +62,19 @@ class Accessibility(BugReportModule): if not in_accessibility: continue - if line.strip().startswith("------------------------------------------------------------------------------"): # pylint: disable=line-too-long + if line.strip().startswith( + "------------------------------------------------------------------------------" + ): # pylint: disable=line-too-long break lines.append(line) self.results = parse_dumpsys_accessibility("\n".join(lines)) for result in self.results: - self.log.info("Found installed accessibility service \"%s\"", - result.get("service")) + self.log.info( + 'Found installed accessibility service "%s"', result.get("service") + ) - self.log.info("Identified a total of %d accessibility services", - len(self.results)) + self.log.info( + "Identified a total of %d accessibility services", len(self.results) + ) diff --git a/mvt/android/modules/bugreport/activities.py b/mvt/android/modules/bugreport/activities.py index 60e7f53..e82b3bb 100644 --- a/mvt/android/modules/bugreport/activities.py +++ b/mvt/android/modules/bugreport/activities.py @@ -19,13 +19,18 @@ class Activities(BugReportModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = results if results else {} @@ -44,8 +49,10 @@ class Activities(BugReportModule): def run(self) -> None: content = self._get_dumpstate_file() if not content: - self.log.error("Unable to find dumpstate file. " - "Did you provide a valid bug report archive?") + self.log.error( + "Unable to find dumpstate file. " + "Did you provide a valid bug report archive?" + ) return lines = [] @@ -58,7 +65,9 @@ class Activities(BugReportModule): if not in_package: continue - if line.strip().startswith("------------------------------------------------------------------------------"): # pylint: disable=line-too-long + if line.strip().startswith( + "------------------------------------------------------------------------------" + ): # pylint: disable=line-too-long break lines.append(line) diff --git a/mvt/android/modules/bugreport/appops.py b/mvt/android/modules/bugreport/appops.py index 23838d0..71417e7 100644 --- a/mvt/android/modules/bugreport/appops.py +++ b/mvt/android/modules/bugreport/appops.py @@ -19,13 +19,18 @@ class Appops(BugReportModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: records = [] @@ -35,13 +40,15 @@ class Appops(BugReportModule): for entry in perm["entries"]: if "timestamp" in entry: - records.append({ - "timestamp": entry["timestamp"], - "module": self.__class__.__name__, - "event": entry["access"], - "data": f"{record['package_name']} access to " - f"{perm['name']}: {entry['access']}", - }) + records.append( + { + "timestamp": entry["timestamp"], + "module": self.__class__.__name__, + "event": entry["access"], + "data": f"{record['package_name']} access to " + f"{perm['name']}: {entry['access']}", + } + ) return records @@ -55,16 +62,22 @@ class Appops(BugReportModule): continue for perm in result["permissions"]: - if (perm["name"] == "REQUEST_INSTALL_PACKAGES" - and perm["access"] == "allow"): - self.log.info("Package %s with REQUEST_INSTALL_PACKAGES permission", - result["package_name"]) + if ( + perm["name"] == "REQUEST_INSTALL_PACKAGES" + and perm["access"] == "allow" + ): + self.log.info( + "Package %s with REQUEST_INSTALL_PACKAGES permission", + result["package_name"], + ) def run(self) -> None: content = self._get_dumpstate_file() if not content: - self.log.error("Unable to find dumpstate file. " - "Did you provide a valid bug report archive?") + self.log.error( + "Unable to find dumpstate file. " + "Did you provide a valid bug report archive?" + ) return lines = [] @@ -77,12 +90,15 @@ class Appops(BugReportModule): if not in_appops: continue - if line.strip().startswith("------------------------------------------------------------------------------"): # pylint: disable=line-too-long + if line.strip().startswith( + "------------------------------------------------------------------------------" + ): # pylint: disable=line-too-long break lines.append(line) self.results = parse_dumpsys_appops("\n".join(lines)) - self.log.info("Identified a total of %d packages in App-Ops Manager", - len(self.results)) + self.log.info( + "Identified a total of %d packages in App-Ops Manager", len(self.results) + ) diff --git a/mvt/android/modules/bugreport/base.py b/mvt/android/modules/bugreport/base.py index ee97a33..4ed2026 100644 --- a/mvt/android/modules/bugreport/base.py +++ b/mvt/android/modules/bugreport/base.py @@ -20,20 +20,27 @@ class BugReportModule(MVTModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.zip_archive: Optional[ZipFile] = None self.extract_path: Optional[str] = None self.extract_files: List[str] = [] self.zip_files: List[str] = [] - def from_folder(self, extract_path: Optional[str], extract_files: List[str]) -> None: + def from_folder( + self, extract_path: Optional[str], extract_files: List[str] + ) -> None: self.extract_path = extract_path self.extract_files = extract_files diff --git a/mvt/android/modules/bugreport/battery_daily.py b/mvt/android/modules/bugreport/battery_daily.py index 8b381f5..0700801 100644 --- a/mvt/android/modules/bugreport/battery_daily.py +++ b/mvt/android/modules/bugreport/battery_daily.py @@ -19,13 +19,18 @@ class BatteryDaily(BugReportModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -33,7 +38,7 @@ class BatteryDaily(BugReportModule): "module": self.__class__.__name__, "event": "battery_daily", "data": f"Recorded update of package {record['package_name']} " - f"with vers {record['vers']}" + f"with vers {record['vers']}", } def check_indicators(self) -> None: @@ -50,8 +55,10 @@ class BatteryDaily(BugReportModule): def run(self) -> None: content = self._get_dumpstate_file() if not content: - self.log.error("Unable to find dumpstate file. " - "Did you provide a valid bug report archive?") + self.log.error( + "Unable to find dumpstate file. " + "Did you provide a valid bug report archive?" + ) return lines = [] @@ -80,5 +87,4 @@ class BatteryDaily(BugReportModule): self.results = parse_dumpsys_battery_daily("\n".join(lines)) - self.log.info("Extracted a total of %d battery daily stats", - len(self.results)) + self.log.info("Extracted a total of %d battery daily stats", len(self.results)) diff --git a/mvt/android/modules/bugreport/battery_history.py b/mvt/android/modules/bugreport/battery_history.py index d9d4035..3aafd34 100644 --- a/mvt/android/modules/bugreport/battery_history.py +++ b/mvt/android/modules/bugreport/battery_history.py @@ -19,13 +19,18 @@ class BatteryHistory(BugReportModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def check_indicators(self) -> None: if not self.indicators: @@ -41,8 +46,10 @@ class BatteryHistory(BugReportModule): def run(self) -> None: content = self._get_dumpstate_file() if not content: - self.log.error("Unable to find dumpstate file. " - "Did you provide a valid bug report archive?") + self.log.error( + "Unable to find dumpstate file. " + "Did you provide a valid bug report archive?" + ) return lines = [] @@ -63,5 +70,6 @@ class BatteryHistory(BugReportModule): self.results = parse_dumpsys_battery_history("\n".join(lines)) - self.log.info("Extracted a total of %d battery history records", - len(self.results)) + self.log.info( + "Extracted a total of %d battery history records", len(self.results) + ) diff --git a/mvt/android/modules/bugreport/dbinfo.py b/mvt/android/modules/bugreport/dbinfo.py index b3b1b94..01e86a1 100644 --- a/mvt/android/modules/bugreport/dbinfo.py +++ b/mvt/android/modules/bugreport/dbinfo.py @@ -21,13 +21,18 @@ class DBInfo(BugReportModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def check_indicators(self) -> None: if not self.indicators: @@ -45,8 +50,10 @@ class DBInfo(BugReportModule): def run(self) -> None: content = self._get_dumpstate_file() if not content: - self.log.error("Unable to find dumpstate file. " - "Did you provide a valid bug report archive?") + self.log.error( + "Unable to find dumpstate file. " + "Did you provide a valid bug report archive?" + ) return in_dbinfo = False @@ -59,12 +66,16 @@ class DBInfo(BugReportModule): if not in_dbinfo: continue - if line.strip().startswith("------------------------------------------------------------------------------"): # pylint: disable=line-too-long + if line.strip().startswith( + "------------------------------------------------------------------------------" + ): # pylint: disable=line-too-long break lines.append(line) self.results = parse_dumpsys_dbinfo("\n".join(lines)) - self.log.info("Extracted a total of %d database connection pool records", - len(self.results)) + self.log.info( + "Extracted a total of %d database connection pool records", + len(self.results), + ) diff --git a/mvt/android/modules/bugreport/getprop.py b/mvt/android/modules/bugreport/getprop.py index 46861d9..b9fe673 100644 --- a/mvt/android/modules/bugreport/getprop.py +++ b/mvt/android/modules/bugreport/getprop.py @@ -20,21 +20,28 @@ class Getprop(BugReportModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = {} if not results else results def run(self) -> None: content = self._get_dumpstate_file() if not content: - self.log.error("Unable to find dumpstate file. " - "Did you provide a valid bug report archive?") + self.log.error( + "Unable to find dumpstate file. " + "Did you provide a valid bug report archive?" + ) return lines = [] @@ -60,10 +67,11 @@ class Getprop(BugReportModule): if entry["name"] == "ro.build.version.security_patch": security_patch = entry["value"] patch_date = datetime.strptime(security_patch, "%Y-%m-%d") - if (datetime.now() - patch_date) > timedelta(days=6*30): - self.log.warning("This phone has not received security updates " - "for more than six months (last update: %s)", - security_patch) + if (datetime.now() - patch_date) > timedelta(days=6 * 30): + self.log.warning( + "This phone has not received security updates " + "for more than six months (last update: %s)", + security_patch, + ) - self.log.info("Extracted %d Android system properties", - len(self.results)) + self.log.info("Extracted %d Android system properties", len(self.results)) diff --git a/mvt/android/modules/bugreport/packages.py b/mvt/android/modules/bugreport/packages.py index 442d76c..53c5431 100644 --- a/mvt/android/modules/bugreport/packages.py +++ b/mvt/android/modules/bugreport/packages.py @@ -6,9 +6,11 @@ import logging from typing import Optional, Union -from mvt.android.modules.adb.packages import (DANGEROUS_PERMISSIONS, - DANGEROUS_PERMISSIONS_THRESHOLD, - ROOT_PACKAGES) +from mvt.android.modules.adb.packages import ( + DANGEROUS_PERMISSIONS, + DANGEROUS_PERMISSIONS_THRESHOLD, + ROOT_PACKAGES, +) from mvt.android.parsers.dumpsys import parse_dumpsys_packages from .base import BugReportModule @@ -22,48 +24,51 @@ class Packages(BugReportModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: records = [] timestamps = [ - { - "event": "package_install", - "timestamp": record["timestamp"] - }, + {"event": "package_install", "timestamp": record["timestamp"]}, { "event": "package_first_install", - "timestamp": record["first_install_time"] - }, - { - "event": "package_last_update", - "timestamp": record["last_update_time"] + "timestamp": record["first_install_time"], }, + {"event": "package_last_update", "timestamp": record["last_update_time"]}, ] for timestamp in timestamps: - records.append({ - "timestamp": timestamp["timestamp"], - "module": self.__class__.__name__, - "event": timestamp["event"], - "data": f"Install or update of package {record['package_name']}", - }) + records.append( + { + "timestamp": timestamp["timestamp"], + "module": self.__class__.__name__, + "event": timestamp["event"], + "data": f"Install or update of package {record['package_name']}", + } + ) return records def check_indicators(self) -> None: for result in self.results: if result["package_name"] in ROOT_PACKAGES: - self.log.warning("Found an installed package related to " - "rooting/jailbreaking: \"%s\"", - result["package_name"]) + self.log.warning( + "Found an installed package related to " + 'rooting/jailbreaking: "%s"', + result["package_name"], + ) self.detected.append(result) continue @@ -79,8 +84,10 @@ class Packages(BugReportModule): def run(self) -> None: content = self._get_dumpstate_file() if not content: - self.log.error("Unable to find dumpstate file. " - "Did you provide a valid bug report archive?") + self.log.error( + "Unable to find dumpstate file. " + "Did you provide a valid bug report archive?" + ) return in_package = False @@ -115,8 +122,10 @@ class Packages(BugReportModule): dangerous_permissions_count += 1 if dangerous_permissions_count >= DANGEROUS_PERMISSIONS_THRESHOLD: - self.log.info("Found package \"%s\" requested %d potentially dangerous permissions", - result["package_name"], - dangerous_permissions_count) + self.log.info( + 'Found package "%s" requested %d potentially dangerous permissions', + result["package_name"], + dangerous_permissions_count, + ) self.log.info("Extracted details on %d packages", len(self.results)) diff --git a/mvt/android/modules/bugreport/receivers.py b/mvt/android/modules/bugreport/receivers.py index 4d1ece0..1469d6d 100644 --- a/mvt/android/modules/bugreport/receivers.py +++ b/mvt/android/modules/bugreport/receivers.py @@ -25,13 +25,18 @@ class Receivers(BugReportModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = results if results else {} @@ -42,21 +47,31 @@ class Receivers(BugReportModule): for intent, receivers in self.results.items(): for receiver in receivers: if intent == INTENT_NEW_OUTGOING_SMS: - self.log.info("Found a receiver to intercept outgoing SMS messages: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver to intercept outgoing SMS messages: "%s"', + receiver["receiver"], + ) elif intent == INTENT_SMS_RECEIVED: - self.log.info("Found a receiver to intercept incoming SMS messages: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver to intercept incoming SMS messages: "%s"', + receiver["receiver"], + ) elif intent == INTENT_DATA_SMS_RECEIVED: - self.log.info("Found a receiver to intercept incoming data SMS message: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver to intercept incoming data SMS message: "%s"', + receiver["receiver"], + ) elif intent == INTENT_PHONE_STATE: - self.log.info("Found a receiver monitoring " - "telephony state/incoming calls: \"%s\"", - receiver["receiver"]) + self.log.info( + "Found a receiver monitoring " + 'telephony state/incoming calls: "%s"', + receiver["receiver"], + ) elif intent == INTENT_NEW_OUTGOING_CALL: - self.log.info("Found a receiver monitoring outgoing calls: \"%s\"", - receiver["receiver"]) + self.log.info( + 'Found a receiver monitoring outgoing calls: "%s"', + receiver["receiver"], + ) ioc = self.indicators.check_app_id(receiver["package_name"]) if ioc: @@ -67,8 +82,10 @@ class Receivers(BugReportModule): def run(self) -> None: content = self._get_dumpstate_file() if not content: - self.log.error("Unable to find dumpstate file. " - "Did you provide a valid bug report archive?") + self.log.error( + "Unable to find dumpstate file. " + "Did you provide a valid bug report archive?" + ) return in_receivers = False @@ -81,7 +98,9 @@ class Receivers(BugReportModule): if not in_receivers: continue - if line.strip().startswith("------------------------------------------------------------------------------"): # pylint: disable=line-too-long + if line.strip().startswith( + "------------------------------------------------------------------------------" + ): # pylint: disable=line-too-long break lines.append(line) diff --git a/mvt/android/parsers/__init__.py b/mvt/android/parsers/__init__.py index b83e0c7..f86d5b3 100644 --- a/mvt/android/parsers/__init__.py +++ b/mvt/android/parsers/__init__.py @@ -3,9 +3,13 @@ # Use of this software is governed by the MVT License 1.1 that can be found at # https://license.mvt.re/1.1/ -from .dumpsys import (parse_dumpsys_accessibility, - parse_dumpsys_activity_resolver_table, - parse_dumpsys_appops, parse_dumpsys_battery_daily, - parse_dumpsys_battery_history, parse_dumpsys_dbinfo, - parse_dumpsys_receiver_resolver_table) +from .dumpsys import ( + parse_dumpsys_accessibility, + parse_dumpsys_activity_resolver_table, + parse_dumpsys_appops, + parse_dumpsys_battery_daily, + parse_dumpsys_battery_history, + parse_dumpsys_dbinfo, + parse_dumpsys_receiver_resolver_table, +) from .getprop import parse_getprop diff --git a/mvt/android/parsers/backup.py b/mvt/android/parsers/backup.py index cb36943..db0ff76 100644 --- a/mvt/android/parsers/backup.py +++ b/mvt/android/parsers/backup.py @@ -31,15 +31,16 @@ class InvalidBackupPassword(AndroidBackupParsingError): # TODO: Need to clean all the following code and conform it to the coding style. + def to_utf8_bytes(input_bytes): output = [] for byte in input_bytes: - if byte < ord(b'\x80'): + if byte < ord(b"\x80"): output.append(byte) else: - output.append(ord('\xef') | (byte >> 12)) - output.append(ord('\xbc') | ((byte >> 6) & ord('\x3f'))) - output.append(ord('\x80') | (byte & ord('\x3f'))) + output.append(ord("\xef") | (byte >> 12)) + output.append(ord("\xbc") | ((byte >> 6) & ord("\x3f"))) + output.append(ord("\x80") | (byte & ord("\x3f"))) return bytes(output) @@ -55,33 +56,38 @@ def parse_ab_header(data): "backup": True, "compression": (is_compressed == b"1"), "version": int(version), - "encryption": encryption.decode("utf-8") + "encryption": encryption.decode("utf-8"), } - return { - "backup": False, - "compression": None, - "version": None, - "encryption": None - } + return {"backup": False, "compression": None, "version": None, "encryption": None} -def decrypt_master_key(password, user_salt, user_iv, pbkdf2_rounds, - master_key_blob, format_version, checksum_salt): +def decrypt_master_key( + password, + user_salt, + user_iv, + pbkdf2_rounds, + master_key_blob, + format_version, + checksum_salt, +): """Generate AES key from user password uisng PBKDF2 The backup master key is extracted from the master key blog after decryption. """ # Derive key from password using PBKDF2. - kdf = PBKDF2HMAC(algorithm=hashes.SHA1(), length=32, salt=user_salt, - iterations=pbkdf2_rounds) + kdf = PBKDF2HMAC( + algorithm=hashes.SHA1(), length=32, salt=user_salt, iterations=pbkdf2_rounds + ) key = kdf.derive(password.encode("utf-8")) # Decrypt master key blob. cipher = Cipher(algorithms.AES(key), modes.CBC(user_iv)) decryptor = cipher.decryptor() try: - decryted_master_key_blob = decryptor.update(master_key_blob) + decryptor.finalize() + decryted_master_key_blob = ( + decryptor.update(master_key_blob) + decryptor.finalize() + ) # Extract key and IV from decrypted blob. key_blob = io.BytesIO(decryted_master_key_blob) @@ -103,8 +109,9 @@ def decrypt_master_key(password, user_salt, user_iv, pbkdf2_rounds, hmac_mk = master_key # Derive checksum to confirm successful backup decryption. - kdf = PBKDF2HMAC(algorithm=hashes.SHA1(), length=32, salt=checksum_salt, - iterations=pbkdf2_rounds) + kdf = PBKDF2HMAC( + algorithm=hashes.SHA1(), length=32, salt=checksum_salt, iterations=pbkdf2_rounds + ) calculated_checksum = kdf.derive(hmac_mk) if master_key_checksum != calculated_checksum: @@ -113,8 +120,7 @@ def decrypt_master_key(password, user_salt, user_iv, pbkdf2_rounds, return master_key, master_iv -def decrypt_backup_data(encrypted_backup, password, encryption_algo, - format_version): +def decrypt_backup_data(encrypted_backup, password, encryption_algo, format_version): """ Generate encryption keyffrom password and do decryption @@ -125,8 +131,14 @@ def decrypt_backup_data(encrypted_backup, password, encryption_algo, if password is None: raise InvalidBackupPassword() - [user_salt, checksum_salt, pbkdf2_rounds, user_iv, - master_key_blob, encrypted_data] = encrypted_backup.split(b"\n", 5) + [ + user_salt, + checksum_salt, + pbkdf2_rounds, + user_iv, + master_key_blob, + encrypted_data, + ] = encrypted_backup.split(b"\n", 5) user_salt = bytes.fromhex(user_salt.decode("utf-8")) checksum_salt = bytes.fromhex(checksum_salt.decode("utf-8")) @@ -135,13 +147,15 @@ def decrypt_backup_data(encrypted_backup, password, encryption_algo, master_key_blob = bytes.fromhex(master_key_blob.decode("utf-8")) # Derive decryption master key from password. - master_key, master_iv = decrypt_master_key(password=password, - user_salt=user_salt, - user_iv=user_iv, - pbkdf2_rounds=pbkdf2_rounds, - master_key_blob=master_key_blob, - format_version=format_version, - checksum_salt=checksum_salt) + master_key, master_iv = decrypt_master_key( + password=password, + user_salt=user_salt, + user_iv=user_iv, + pbkdf2_rounds=pbkdf2_rounds, + master_key_blob=master_key_blob, + format_version=format_version, + checksum_salt=checksum_salt, + ) # Decrypt and unpad backup data using derivied key. cipher = Cipher(algorithms.AES(master_key), modes.CBC(master_iv)) @@ -160,21 +174,23 @@ def parse_backup_file(data, password=None): if not data.startswith(b"ANDROID BACKUP"): raise AndroidBackupParsingError("Invalid file header") - [_, version, is_compressed, - encryption_algo, tar_data] = data.split(b"\n", 4) + [_, version, is_compressed, encryption_algo, tar_data] = data.split(b"\n", 4) version = int(version) is_compressed = int(is_compressed) if encryption_algo != b"none": - tar_data = decrypt_backup_data(tar_data, password, encryption_algo, - format_version=version) + tar_data = decrypt_backup_data( + tar_data, password, encryption_algo, format_version=version + ) if is_compressed: try: tar_data = zlib.decompress(tar_data) except zlib.error as exc: - raise AndroidBackupParsingError("Impossible to decompress the backup file") from exc + raise AndroidBackupParsingError( + "Impossible to decompress the backup file" + ) from exc return tar_data @@ -189,9 +205,10 @@ def parse_tar_for_sms(data): res = [] with tarfile.open(fileobj=dbytes) as tar: for member in tar.getmembers(): - if (member.name.startswith("apps/com.android.providers.telephony/d_f/") - and (member.name.endswith("_sms_backup") - or member.name.endswith("_mms_backup"))): + if member.name.startswith("apps/com.android.providers.telephony/d_f/") and ( + member.name.endswith("_sms_backup") + or member.name.endswith("_mms_backup") + ): dhandler = tar.extractfile(member) res.extend(parse_sms_file(dhandler.read())) @@ -216,7 +233,7 @@ def parse_sms_file(data): message_links = check_for_links(entry["body"]) entry["isodate"] = convert_unix_to_iso(int(entry["date"]) / 1000) - entry["direction"] = ("sent" if int(entry["date_sent"]) else "received") + entry["direction"] = "sent" if int(entry["date_sent"]) else "received" # Extract links from the body if message_links or entry["body"].strip() == "": diff --git a/mvt/android/parsers/dumpsys.py b/mvt/android/parsers/dumpsys.py index 04a3a66..b8ddb03 100644 --- a/mvt/android/parsers/dumpsys.py +++ b/mvt/android/parsers/dumpsys.py @@ -27,10 +27,12 @@ def parse_dumpsys_accessibility(output: str) -> List[Dict[str, str]]: service = line.split(":")[1].strip() - results.append({ - "package_name": service.split("/")[0], - "service": service, - }) + results.append( + { + "package_name": service.split("/")[0], + "service": service, + } + ) return results @@ -62,8 +64,7 @@ def parse_dumpsys_activity_resolver_table(output: str) -> Dict[str, Any]: break # We detect the action name. - if (line.startswith(" " * 6) and not line.startswith(" " * 8) - and ":" in line): + if line.startswith(" " * 6) and not line.startswith(" " * 8) and ":" in line: intent = line.strip().replace(":", "") results[intent] = [] continue @@ -84,10 +85,12 @@ def parse_dumpsys_activity_resolver_table(output: str) -> Dict[str, Any]: activity = line.strip().split(" ")[1] package_name = activity.split("/")[0] - results[intent].append({ - "package_name": package_name, - "activity": activity, - }) + results[intent].append( + { + "package_name": package_name, + "activity": activity, + } + ) return results @@ -119,19 +122,20 @@ def parse_dumpsys_battery_daily(output: str) -> list: already_seen = False for update in daily_updates: - if (package_name == update["package_name"] - and vers_nr == update["vers"]): + if package_name == update["package_name"] and vers_nr == update["vers"]: already_seen = True break if not already_seen: - daily_updates.append({ - "action": "update", - "from": daily["from"], - "to": daily["to"], - "package_name": package_name, - "vers": vers_nr, - }) + daily_updates.append( + { + "action": "update", + "from": daily["from"], + "to": daily["to"], + "package_name": package_name, + "vers": vers_nr, + } + ) if len(daily_updates) > 0: results.extend(daily_updates) @@ -154,18 +158,20 @@ def parse_dumpsys_battery_history(output: str) -> List[Dict[str, Any]]: event = "" if line.find("+job") > 0: event = "start_job" - uid = line[line.find("+job")+5:line.find(":")] - service = line[line.find(":")+1:].strip('"') + uid = line[line.find("+job") + 5 : line.find(":")] + service = line[line.find(":") + 1 :].strip('"') package_name = service.split("/")[0] elif line.find("-job") > 0: event = "end_job" - uid = line[line.find("-job")+5:line.find(":")] - service = line[line.find(":")+1:].strip('"') + uid = line[line.find("-job") + 5 : line.find(":")] + service = line[line.find(":") + 1 :].strip('"') package_name = service.split("/")[0] elif line.find("+running +wake_lock=") > 0: - uid = line[line.find("+running +wake_lock=")+21:line.find(":")] + uid = line[line.find("+running +wake_lock=") + 21 : line.find(":")] event = "wake" - service = line[line.find("*walarm*:")+9:].split(" ")[0].strip('"').strip() + service = ( + line[line.find("*walarm*:") + 9 :].split(" ")[0].strip('"').strip() + ) if service == "" or "/" not in service: continue @@ -177,20 +183,22 @@ def parse_dumpsys_battery_history(output: str) -> List[Dict[str, Any]]: else: event = "end_top" top_pos = line.find("-top=") - colon_pos = top_pos+line[top_pos:].find(":") - uid = line[top_pos+5:colon_pos] + colon_pos = top_pos + line[top_pos:].find(":") + uid = line[top_pos + 5 : colon_pos] service = "" - package_name = line[colon_pos+1:].strip('"') + package_name = line[colon_pos + 1 :].strip('"') else: continue - results.append({ - "time_elapsed": time_elapsed, - "event": event, - "uid": uid, - "package_name": package_name, - "service": service, - }) + results.append( + { + "time_elapsed": time_elapsed, + "event": event, + "uid": uid, + "package_name": package_name, + "service": service, + } + ) return results @@ -198,8 +206,12 @@ def parse_dumpsys_battery_history(output: str) -> List[Dict[str, Any]]: def parse_dumpsys_dbinfo(output: str) -> List[Dict[str, Any]]: results = [] - rxp = re.compile(r'.*\[([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3})\].*\[Pid:\((\d+)\)\](\w+).*sql\=\"(.+?)\"') # pylint: disable=line-too-long - rxp_no_pid = re.compile(r'.*\[([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3})\][ ]{1}(\w+).*sql\=\"(.+?)\"') # pylint: disable=line-too-long + rxp = re.compile( + r".*\[([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3})\].*\[Pid:\((\d+)\)\](\w+).*sql\=\"(.+?)\"" + ) # pylint: disable=line-too-long + rxp_no_pid = re.compile( + r".*\[([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3})\][ ]{1}(\w+).*sql\=\"(.+?)\"" + ) # pylint: disable=line-too-long pool = None in_operations = False @@ -229,21 +241,25 @@ def parse_dumpsys_dbinfo(output: str) -> List[Dict[str, Any]]: continue match = matches[0] - results.append({ - "isodate": match[0], - "action": match[1], - "sql": match[2], - "path": pool, - }) + results.append( + { + "isodate": match[0], + "action": match[1], + "sql": match[2], + "path": pool, + } + ) else: match = matches[0] - results.append({ - "isodate": match[0], - "pid": match[1], - "action": match[2], - "sql": match[3], - "path": pool, - }) + results.append( + { + "isodate": match[0], + "pid": match[1], + "action": match[2], + "sql": match[3], + "path": pool, + } + ) return results @@ -275,8 +291,7 @@ def parse_dumpsys_receiver_resolver_table(output: str) -> Dict[str, Any]: break # We detect the action name. - if (line.startswith(" " * 6) and not line.startswith(" " * 8) - and ":" in line): + if line.startswith(" " * 6) and not line.startswith(" " * 8) and ":" in line: intent = line.strip().replace(":", "") results[intent] = [] continue @@ -297,10 +312,12 @@ def parse_dumpsys_receiver_resolver_table(output: str) -> Dict[str, Any]: receiver = line.strip().split(" ")[1] package_name = receiver.split("/")[0] - results[intent].append({ - "package_name": package_name, - "receiver": receiver, - }) + results[intent].append( + { + "package_name": package_name, + "receiver": receiver, + } + ) return results @@ -366,13 +383,15 @@ def parse_dumpsys_appops(output: str) -> List[Dict[str, Any]]: entry = {} entry["access"] = line.split(":")[0].strip() - entry["type"] = line[line.find("[")+1:line.find("]")] + entry["type"] = line[line.find("[") + 1 : line.find("]")] try: entry["timestamp"] = convert_datetime_to_iso( datetime.strptime( - line[line.find("]")+1:line.find("(")].strip(), - "%Y-%m-%d %H:%M:%S.%f")) + line[line.find("]") + 1 : line.find("(")].strip(), + "%Y-%m-%d %H:%M:%S.%f", + ) + ) except ValueError: # Invalid date format pass @@ -418,13 +437,11 @@ def parse_dumpsys_package_for_details(output: str) -> Dict[str, Any]: permission = lineinfo[0] granted = None if "granted=" in lineinfo[1]: - granted = ("granted=true" in lineinfo[1]) + granted = "granted=true" in lineinfo[1] - details["permissions"].append({ - "name": permission, - "granted": granted, - "type": "install" - }) + details["permissions"].append( + {"name": permission, "granted": granted, "type": "install"} + ) if in_runtime_permissions: if not line.startswith(" " * 8): @@ -434,23 +451,18 @@ def parse_dumpsys_package_for_details(output: str) -> Dict[str, Any]: permission = lineinfo[0] granted = None if "granted=" in lineinfo[1]: - granted = ("granted=true" in lineinfo[1]) + granted = "granted=true" in lineinfo[1] - details["permissions"].append({ - "name": permission, - "granted": granted, - "type": "runtime" - }) + details["permissions"].append( + {"name": permission, "granted": granted, "type": "runtime"} + ) if in_declared_permissions: if not line.startswith(" " * 6): in_declared_permissions = False else: permission = line.strip().split(":")[0] - details["permissions"].append({ - "name": permission, - "type": "declared" - }) + details["permissions"].append({"name": permission, "type": "declared"}) if in_requested_permissions: if not line.startswith(" " * 6): in_requested_permissions = False diff --git a/mvt/android/parsers/getprop.py b/mvt/android/parsers/getprop.py index eadf298..70dd19d 100644 --- a/mvt/android/parsers/getprop.py +++ b/mvt/android/parsers/getprop.py @@ -20,10 +20,7 @@ def parse_getprop(output: str) -> List[Dict[str, str]]: if not matches or len(matches[0]) != 2: continue - entry = { - "name": matches[0][0], - "value": matches[0][1] - } + entry = {"name": matches[0][0], "value": matches[0][1]} results.append(entry) return results diff --git a/mvt/common/cmd_check_iocs.py b/mvt/common/cmd_check_iocs.py index a654202..6e01bd3 100644 --- a/mvt/common/cmd_check_iocs.py +++ b/mvt/common/cmd_check_iocs.py @@ -13,7 +13,6 @@ log = logging.getLogger(__name__) class CmdCheckIOCS(Command): - def __init__( self, target_path: Optional[str] = None, @@ -21,11 +20,17 @@ class CmdCheckIOCS(Command): ioc_files: Optional[list] = None, module_name: Optional[str] = None, serial: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, ) -> None: - super().__init__(target_path=target_path, results_path=results_path, - ioc_files=ioc_files, module_name=module_name, - serial=serial, fast_mode=fast_mode, log=log) + super().__init__( + target_path=target_path, + results_path=results_path, + ioc_files=ioc_files, + module_name=module_name, + serial=serial, + fast_mode=fast_mode, + log=log, + ) self.name = "check-iocs" @@ -50,11 +55,15 @@ class CmdCheckIOCS(Command): if iocs_module().get_slug() != name_only: continue - log.info("Loading results from \"%s\" with module %s", - file_name, iocs_module.__name__) + log.info( + 'Loading results from "%s" with module %s', + file_name, + iocs_module.__name__, + ) - m = iocs_module.from_json(file_path, - log=logging.getLogger(iocs_module.__module__)) + m = iocs_module.from_json( + file_path, log=logging.getLogger(iocs_module.__module__) + ) if self.iocs.total_ioc_count > 0: m.indicators = self.iocs m.indicators.log = m.log @@ -67,5 +76,6 @@ class CmdCheckIOCS(Command): total_detections += len(m.detected) if total_detections > 0: - log.warning("The check of the results produced %d detections!", - total_detections) + log.warning( + "The check of the results produced %d detections!", total_detections + ) diff --git a/mvt/common/command.py b/mvt/common/command.py index 3791501..5406c35 100644 --- a/mvt/common/command.py +++ b/mvt/common/command.py @@ -12,14 +12,15 @@ from typing import Optional from mvt.common.indicators import Indicators from mvt.common.module import MVTModule, run_module, save_timeline -from mvt.common.utils import (convert_datetime_to_iso, - generate_hashes_from_path, - get_sha256_from_file_path) +from mvt.common.utils import ( + convert_datetime_to_iso, + generate_hashes_from_path, + get_sha256_from_file_path, +) from mvt.common.version import MVT_VERSION class Command: - def __init__( self, target_path: Optional[str] = None, @@ -27,8 +28,8 @@ class Command: ioc_files: Optional[list] = None, module_name: Optional[str] = None, serial: Optional[str] = None, - fast_mode: Optional[bool] = False, - hashes: Optional[bool] = False, + fast_mode: bool = False, + hashes: bool = False, log: logging.Logger = logging.getLogger(__name__), ) -> None: self.name = "" @@ -62,8 +63,9 @@ class Command: try: os.makedirs(self.results_path) except Exception as exc: - self.log.critical("Unable to create output folder %s: %s", - self.results_path, exc) + self.log.critical( + "Unable to create output folder %s: %s", self.results_path, exc + ) sys.exit(1) def _setup_logging(self): @@ -71,10 +73,12 @@ class Command: return logger = logging.getLogger("mvt") - file_handler = logging.FileHandler(os.path.join(self.results_path, - "command.log")) - formatter = logging.Formatter("%(asctime)s - %(name)s - " - "%(levelname)s - %(message)s") + file_handler = logging.FileHandler( + os.path.join(self.results_path, "command.log") + ) + formatter = logging.Formatter( + "%(asctime)s - %(name)s - " "%(levelname)s - %(message)s" + ) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(formatter) logger.addHandler(file_handler) @@ -84,13 +88,15 @@ class Command: return if len(self.timeline) > 0: - save_timeline(self.timeline, - os.path.join(self.results_path, "timeline.csv")) + save_timeline( + self.timeline, os.path.join(self.results_path, "timeline.csv") + ) if len(self.timeline_detected) > 0: - save_timeline(self.timeline_detected, - os.path.join(self.results_path, - "timeline_detected.csv")) + save_timeline( + self.timeline_detected, + os.path.join(self.results_path, "timeline_detected.csv"), + ) def _store_info(self) -> None: if not self.results_path: @@ -124,7 +130,7 @@ class Command: if self.target_path and (os.environ.get("MVT_HASH_FILES") or self.hashes): info_hash = get_sha256_from_file_path(info_path) - self.log.info("Reference hash of the info.json file: \"%s\"", info_hash) + self.log.info('Reference hash of the info.json file: "%s"', info_hash) def generate_hashes(self) -> None: """ @@ -137,8 +143,7 @@ class Command: self.hash_values.append(file) def list_modules(self) -> None: - self.log.info("Following is the list of available %s modules:", - self.name) + self.log.info("Following is the list of available %s modules:", self.name) for module in self.modules: self.log.info(" - %s", module.__name__) @@ -152,7 +157,6 @@ class Command: raise NotImplementedError def run(self) -> None: - try: self.init() except NotImplementedError: @@ -162,13 +166,15 @@ class Command: if self.module_name and module.__name__ != self.module_name: continue - # FIXME: do we need the logger here + # FIXME: do we need the logger here module_logger = logging.getLogger(module.__module__) - m = module(target_path=self.target_path, - results_path=self.results_path, - fast_mode=self.fast_mode, - log=module_logger) + m = module( + target_path=self.target_path, + results_path=self.results_path, + fast_mode=self.fast_mode, + log=module_logger, + ) if self.iocs.total_ioc_count: m.indicators = self.iocs diff --git a/mvt/common/indicators.py b/mvt/common/indicators.py index a933835..63c4582 100644 --- a/mvt/common/indicators.py +++ b/mvt/common/indicators.py @@ -34,8 +34,7 @@ class Indicators: for ioc_file_name in os.listdir(MVT_INDICATORS_FOLDER): if ioc_file_name.lower().endswith(".stix2"): - self.parse_stix2(os.path.join(MVT_INDICATORS_FOLDER, - ioc_file_name)) + self.parse_stix2(os.path.join(MVT_INDICATORS_FOLDER, ioc_file_name)) def _check_stix2_env_variable(self) -> None: """ @@ -49,8 +48,9 @@ class Indicators: if os.path.isfile(path): self.parse_stix2(path) else: - self.log.error("Path specified with env MVT_STIX2 is not a valid file: %s", - path) + self.log.error( + "Path specified with env MVT_STIX2 is not a valid file: %s", path + ) def _new_collection( self, @@ -58,7 +58,7 @@ class Indicators: name: Optional[str] = None, description: Optional[str] = None, file_name: Optional[str] = None, - file_path: Optional[str] = None + file_path: Optional[str] = None, ) -> dict: return { "id": cid, @@ -78,8 +78,7 @@ class Indicators: "count": 0, } - def _add_indicator(self, ioc: str, ioc_coll: dict, - ioc_coll_list: list) -> None: + def _add_indicator(self, ioc: str, ioc_coll: dict, ioc_coll_list: list) -> None: ioc = ioc.strip("'") if ioc not in ioc_coll_list: ioc_coll_list.append(ioc) @@ -91,43 +90,51 @@ class Indicators: if key == "domain-name:value": # We force domain names to lower case. - self._add_indicator(ioc=value.lower(), - ioc_coll=collection, - ioc_coll_list=collection["domains"]) + self._add_indicator( + ioc=value.lower(), + ioc_coll=collection, + ioc_coll_list=collection["domains"], + ) elif key == "process:name": - self._add_indicator(ioc=value, - ioc_coll=collection, - ioc_coll_list=collection["processes"]) + self._add_indicator( + ioc=value, ioc_coll=collection, ioc_coll_list=collection["processes"] + ) elif key == "email-addr:value": # We force email addresses to lower case. - self._add_indicator(ioc=value.lower(), - ioc_coll=collection, - ioc_coll_list=collection["emails"]) + self._add_indicator( + ioc=value.lower(), + ioc_coll=collection, + ioc_coll_list=collection["emails"], + ) elif key == "file:name": - self._add_indicator(ioc=value, - ioc_coll=collection, - ioc_coll_list=collection["file_names"]) + self._add_indicator( + ioc=value, ioc_coll=collection, ioc_coll_list=collection["file_names"] + ) elif key == "file:path": - self._add_indicator(ioc=value, - ioc_coll=collection, - ioc_coll_list=collection["file_paths"]) + self._add_indicator( + ioc=value, ioc_coll=collection, ioc_coll_list=collection["file_paths"] + ) elif key == "file:hashes.sha256": - self._add_indicator(ioc=value, - ioc_coll=collection, - ioc_coll_list=collection["files_sha256"]) + self._add_indicator( + ioc=value, ioc_coll=collection, ioc_coll_list=collection["files_sha256"] + ) elif key == "app:id": - self._add_indicator(ioc=value, - ioc_coll=collection, - ioc_coll_list=collection["app_ids"]) + self._add_indicator( + ioc=value, ioc_coll=collection, ioc_coll_list=collection["app_ids"] + ) elif key == "configuration-profile:id": - self._add_indicator(ioc=value, - ioc_coll=collection, - ioc_coll_list=collection["ios_profile_ids"]) + self._add_indicator( + ioc=value, + ioc_coll=collection, + ioc_coll_list=collection["ios_profile_ids"], + ) elif key == "android-property:name": - self._add_indicator(ioc=value, - ioc_coll=collection, - ioc_coll_list=collection["android_property_names"]) + self._add_indicator( + ioc=value, + ioc_coll=collection, + ioc_coll_list=collection["android_property_names"], + ) def parse_stix2(self, file_path: str) -> None: """Extract indicators from a STIX2 file. @@ -142,8 +149,10 @@ class Indicators: try: data = json.load(handle) except json.decoder.JSONDecodeError: - self.log.critical("Unable to parse STIX2 indicator file. " - "The file is corrupted or in the wrong format!") + self.log.critical( + "Unable to parse STIX2 indicator file. " + "The file is corrupted or in the wrong format!" + ) return malware = {} @@ -163,10 +172,13 @@ class Indicators: collections = [] for mal_id, mal_values in malware.items(): - collection = self._new_collection(mal_id, mal_values.get("name"), - mal_values.get("description"), - os.path.basename(file_path), - file_path) + collection = self._new_collection( + mal_id, + mal_values.get("name"), + mal_values.get("description"), + os.path.basename(file_path), + file_path, + ) collections.append(collection) # We loop through all indicators. @@ -192,13 +204,17 @@ class Indicators: break for coll in collections: - self.log.info("Extracted %d indicators for collection with name \"%s\"", - coll["count"], coll["name"]) + self.log.info( + 'Extracted %d indicators for collection with name "%s"', + coll["count"], + coll["name"], + ) self.ioc_collections.extend(collections) - def load_indicators_files(self, files: list, - load_default: Optional[bool] = True) -> None: + def load_indicators_files( + self, files: list, load_default: Optional[bool] = True + ) -> None: """ Load a list of indicators files. """ @@ -206,16 +222,14 @@ class Indicators: if os.path.isfile(file_path): self.parse_stix2(file_path) else: - self.log.warning("No indicators file exists at path %s", - file_path) + self.log.warning("No indicators file exists at path %s", file_path) # Load downloaded indicators and any indicators from env variable. if load_default: self._load_downloaded_indicators() self._check_stix2_env_variable() - self.log.info("Loaded a total of %d unique indicators", - self.total_ioc_count) + self.log.info("Loaded a total of %d unique indicators", self.total_ioc_count) def get_iocs(self, ioc_type: str) -> Iterator[Dict[str, Any]]: for ioc_collection in self.ioc_collections: @@ -249,17 +263,19 @@ class Indicators: # HTTP HEAD request. unshortened = orig_url.unshorten() - self.log.debug("Found a shortened URL %s -> %s", - url, unshortened) + self.log.debug("Found a shortened URL %s -> %s", url, unshortened) if unshortened is None: return None # Now we check for any nested URL shorteners. dest_url = URL(unshortened) if dest_url.check_if_shortened(): - self.log.debug("Original URL %s appears to shorten another " - "shortened URL %s ... checking!", - orig_url.url, dest_url.url) + self.log.debug( + "Original URL %s appears to shorten another " + "shortened URL %s ... checking!", + orig_url.url, + dest_url.url, + ) return self.check_domain(dest_url.url) final_url = dest_url @@ -271,9 +287,12 @@ class Indicators: # match. for ioc in self.get_iocs("domains"): if ioc["value"].lower() in url: - self.log.warning("Maybe found a known suspicious domain %s " - "matching indicators from \"%s\"", - url, ioc["name"]) + self.log.warning( + "Maybe found a known suspicious domain %s " + 'matching indicators from "%s"', + url, + ioc["name"], + ) return ioc # If nothing matched, we can quit here. @@ -285,27 +304,41 @@ class Indicators: # First we check the full domain. if final_url.domain.lower() == ioc["value"]: if orig_url.is_shortened and orig_url.url != final_url.url: - self.log.warning("Found a known suspicious domain %s " - "shortened as %s matching indicators from \"%s\"", - final_url.url, orig_url.url, ioc["name"]) + self.log.warning( + "Found a known suspicious domain %s " + 'shortened as %s matching indicators from "%s"', + final_url.url, + orig_url.url, + ioc["name"], + ) else: - self.log.warning("Found a known suspicious domain %s " - "matching indicators from \"%s\"", - final_url.url, ioc["name"]) + self.log.warning( + "Found a known suspicious domain %s " + 'matching indicators from "%s"', + final_url.url, + ioc["name"], + ) return ioc # Then we just check the top level domain. if final_url.top_level.lower() == ioc["value"]: if orig_url.is_shortened and orig_url.url != final_url.url: - self.log.warning("Found a sub-domain with suspicious top " - "level %s shortened as %s matching " - "indicators from \"%s\"", final_url.url, - orig_url.url, ioc["name"]) + self.log.warning( + "Found a sub-domain with suspicious top " + "level %s shortened as %s matching " + 'indicators from "%s"', + final_url.url, + orig_url.url, + ioc["name"], + ) else: - self.log.warning("Found a sub-domain with a suspicious top " - "level %s matching indicators from \"%s\"", - final_url.url, ioc["name"]) + self.log.warning( + "Found a sub-domain with a suspicious top " + 'level %s matching indicators from "%s"', + final_url.url, + ioc["name"], + ) return ioc @@ -344,16 +377,22 @@ class Indicators: proc_name = os.path.basename(process) for ioc in self.get_iocs("processes"): if proc_name == ioc["value"]: - self.log.warning("Found a known suspicious process name \"%s\" " - "matching indicators from \"%s\"", - process, ioc["name"]) + self.log.warning( + 'Found a known suspicious process name "%s" ' + 'matching indicators from "%s"', + process, + ioc["name"], + ) return ioc if len(proc_name) == 16: if ioc["value"].startswith(proc_name): - self.log.warning("Found a truncated known suspicious " - "process name \"%s\" matching indicators from \"%s\"", - process, ioc["name"]) + self.log.warning( + "Found a truncated known suspicious " + 'process name "%s" matching indicators from "%s"', + process, + ioc["name"], + ) return ioc return None @@ -390,9 +429,12 @@ class Indicators: for ioc in self.get_iocs("emails"): if email.lower() == ioc["value"].lower(): - self.log.warning("Found a known suspicious email address \"%s\" " - "matching indicators from \"%s\"", - email, ioc["name"]) + self.log.warning( + 'Found a known suspicious email address "%s" ' + 'matching indicators from "%s"', + email, + ioc["name"], + ) return ioc return None @@ -411,9 +453,12 @@ class Indicators: for ioc in self.get_iocs("file_names"): if ioc["value"] == file_name: - self.log.warning("Found a known suspicious file name \"%s\" " - "matching indicators from \"%s\"", - file_name, ioc["name"]) + self.log.warning( + 'Found a known suspicious file name "%s" ' + 'matching indicators from "%s"', + file_name, + ioc["name"], + ) return ioc return None @@ -439,9 +484,12 @@ class Indicators: # Strip any trailing slash from indicator paths to match # directories. if file_path.startswith(ioc["value"].rstrip("/")): - self.log.warning("Found a known suspicious file path \"%s\" " - "matching indicators form \"%s\"", - file_path, ioc["name"]) + self.log.warning( + 'Found a known suspicious file path "%s" ' + 'matching indicators form "%s"', + file_path, + ioc["name"], + ) return ioc return None @@ -462,9 +510,12 @@ class Indicators: for ioc in self.get_iocs("processes"): parts = file_path.split("/") if ioc["value"] in parts: - self.log.warning("Found known suspicious process name mentioned in file at " - "path \"%s\" matching indicators from \"%s\"", - file_path, ioc["name"]) + self.log.warning( + "Found known suspicious process name mentioned in file at " + 'path "%s" matching indicators from "%s"', + file_path, + ioc["name"], + ) return ioc return None @@ -484,9 +535,12 @@ class Indicators: for ioc in self.get_iocs("ios_profile_ids"): if profile_uuid in ioc["value"]: - self.log.warning("Found a known suspicious profile ID \"%s\" " - "matching indicators from \"%s\"", - profile_uuid, ioc["name"]) + self.log.warning( + 'Found a known suspicious profile ID "%s" ' + 'matching indicators from "%s"', + profile_uuid, + ioc["name"], + ) return ioc return None @@ -504,9 +558,12 @@ class Indicators: for ioc in self.get_iocs("files_sha256"): if file_hash.lower() == ioc["value"].lower(): - self.log.warning("Found a known suspicious file with hash \"%s\" " - "matching indicators from \"%s\"", - file_hash, ioc["name"]) + self.log.warning( + 'Found a known suspicious file with hash "%s" ' + 'matching indicators from "%s"', + file_hash, + ioc["name"], + ) return ioc return None @@ -525,9 +582,12 @@ class Indicators: for ioc in self.get_iocs("app_ids"): if app_id.lower() == ioc["value"].lower(): - self.log.warning("Found a known suspicious app with ID \"%s\" " - "matching indicators from \"%s\"", app_id, - ioc["name"]) + self.log.warning( + 'Found a known suspicious app with ID "%s" ' + 'matching indicators from "%s"', + app_id, + ioc["name"], + ) return ioc return None @@ -545,9 +605,12 @@ class Indicators: for ioc in self.get_iocs("android_property_names"): if property_name.lower() == ioc["value"].lower(): - self.log.warning("Found a known suspicious Android property \"%s\" " - "matching indicators from \"%s\"", property_name, - ioc["name"]) + self.log.warning( + 'Found a known suspicious Android property "%s" ' + 'matching indicators from "%s"', + property_name, + ioc["name"], + ) return ioc return None diff --git a/mvt/common/logo.py b/mvt/common/logo.py index dbfe7ad..29e219f 100644 --- a/mvt/common/logo.py +++ b/mvt/common/logo.py @@ -18,8 +18,10 @@ def check_updates() -> None: pass else: if latest_version: - rich_print(f"\t\t[bold]Version {latest_version} is available! " - "Upgrade mvt with `pip3 install -U mvt`[/bold]") + rich_print( + f"\t\t[bold]Version {latest_version} is available! " + "Upgrade mvt with `pip3 install -U mvt`[/bold]" + ) # Then we check for indicators files updates. ioc_updates = IndicatorsUpdates() @@ -27,8 +29,10 @@ def check_updates() -> None: # Before proceeding, we check if we have downloaded an indicators index. # If not, there's no point in proceeding with the updates check. if ioc_updates.get_latest_update() == 0: - rich_print("\t\t[bold]You have not yet downloaded any indicators, check " - "the `download-iocs` command![/bold]") + rich_print( + "\t\t[bold]You have not yet downloaded any indicators, check " + "the `download-iocs` command![/bold]" + ) return # We only perform this check at a fixed frequency, in order to not @@ -36,8 +40,10 @@ def check_updates() -> None: # multiple times. should_check, hours = ioc_updates.should_check() if not should_check: - rich_print(f"\t\tIndicators updates checked recently, next automatic check " - f"in {int(hours)} hours") + rich_print( + f"\t\tIndicators updates checked recently, next automatic check " + f"in {int(hours)} hours" + ) return try: @@ -46,8 +52,10 @@ def check_updates() -> None: pass else: if ioc_to_update: - rich_print("\t\t[bold]There are updates to your indicators files! " - "Run the `download-iocs` command to update![/bold]") + rich_print( + "\t\t[bold]There are updates to your indicators files! " + "Run the `download-iocs` command to update![/bold]" + ) else: rich_print("\t\tYour indicators files seem to be up to date.") diff --git a/mvt/common/module.py b/mvt/common/module.py index a692e94..0dc02d1 100644 --- a/mvt/common/module.py +++ b/mvt/common/module.py @@ -35,9 +35,9 @@ class MVTModule: file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Union[List[Dict[str, Any]], Dict[str, Any], None] = None + results: Union[List[Dict[str, Any]], Dict[str, Any], None] = None, ) -> None: """Initialize module. @@ -70,8 +70,7 @@ class MVTModule: with open(json_path, "r", encoding="utf-8") as handle: results = json.load(handle) if log: - log.info("Loaded %d results from \"%s\"", - len(results), json_path) + log.info('Loaded %d results from "%s"', len(results), json_path) return cls(results=results, log=log) def get_slug(self) -> str: @@ -99,20 +98,21 @@ class MVTModule: if self.results: results_file_name = f"{name}.json" - results_json_path = os.path.join(self.results_path, - results_file_name) + results_json_path = os.path.join(self.results_path, results_file_name) with open(results_json_path, "w", encoding="utf-8") as handle: try: json.dump(self.results, handle, indent=4, default=str) except Exception as exc: - self.log.error("Unable to store results of module %s to file %s: %s", - self.__class__.__name__, results_file_name, - exc) + self.log.error( + "Unable to store results of module %s to file %s: %s", + self.__class__.__name__, + results_file_name, + exc, + ) if self.detected: detected_file_name = f"{name}_detected.json" - detected_json_path = os.path.join(self.results_path, - detected_file_name) + detected_json_path = os.path.join(self.results_path, detected_file_name) with open(detected_json_path, "w", encoding="utf-8") as handle: json.dump(self.detected, handle, indent=4, default=str) @@ -151,8 +151,7 @@ class MVTModule: # De-duplicate timeline entries. self.timeline = self._deduplicate_timeline(self.timeline) - self.timeline_detected = self._deduplicate_timeline( - self.timeline_detected) + self.timeline_detected = self._deduplicate_timeline(self.timeline_detected) def run(self) -> None: """Run the main module procedure.""" @@ -165,42 +164,63 @@ def run_module(module: MVTModule) -> None: try: module.run() except NotImplementedError: - module.log.exception("The run() procedure of module %s was not implemented yet!", - module.__class__.__name__) + module.log.exception( + "The run() procedure of module %s was not implemented yet!", + module.__class__.__name__, + ) except InsufficientPrivileges as exc: - module.log.info("Insufficient privileges for module %s: %s", - module.__class__.__name__, exc) + module.log.info( + "Insufficient privileges for module %s: %s", module.__class__.__name__, exc + ) except DatabaseNotFoundError as exc: - module.log.info("There might be no data to extract by module %s: %s", - module.__class__.__name__, exc) + module.log.info( + "There might be no data to extract by module %s: %s", + module.__class__.__name__, + exc, + ) except DatabaseCorruptedError as exc: - module.log.error("The %s module database seems to be corrupted: %s", - module.__class__.__name__, exc) + module.log.error( + "The %s module database seems to be corrupted: %s", + module.__class__.__name__, + exc, + ) except Exception as exc: - module.log.exception("Error in running extraction from module %s: %s", - module.__class__.__name__, exc) + module.log.exception( + "Error in running extraction from module %s: %s", + module.__class__.__name__, + exc, + ) else: try: module.check_indicators() except NotImplementedError: - module.log.info("The %s module does not support checking for indicators", - module.__class__.__name__) + module.log.info( + "The %s module does not support checking for indicators", + module.__class__.__name__, + ) except Exception as exc: - module.log.exception("Error when checking indicators from module %s: %s", - module.__class__.__name__, exc) + module.log.exception( + "Error when checking indicators from module %s: %s", + module.__class__.__name__, + exc, + ) else: if module.indicators and not module.detected: - module.log.info("The %s module produced no detections!", - module.__class__.__name__) + module.log.info( + "The %s module produced no detections!", module.__class__.__name__ + ) try: module.to_timeline() except NotImplementedError: pass except Exception as exc: - module.log.exception("Error when serializing data from module %s: %s", - module.__class__.__name__, exc) + module.log.exception( + "Error when serializing data from module %s: %s", + module.__class__.__name__, + exc, + ) module.save_to_json() @@ -213,15 +233,19 @@ def save_timeline(timeline: list, timeline_path: str) -> None: """ with open(timeline_path, "a+", encoding="utf-8") as handle: - csvoutput = csv.writer(handle, delimiter=",", quotechar="\"", - quoting=csv.QUOTE_ALL, escapechar='\\') + csvoutput = csv.writer( + handle, delimiter=",", quotechar='"', quoting=csv.QUOTE_ALL, escapechar="\\" + ) csvoutput.writerow(["UTC Timestamp", "Plugin", "Event", "Description"]) - for event in sorted(timeline, key=lambda x: x["timestamp"] - if x["timestamp"] is not None else ""): - csvoutput.writerow([ - event.get("timestamp"), - event.get("module"), - event.get("event"), - event.get("data"), - ]) + for event in sorted( + timeline, key=lambda x: x["timestamp"] if x["timestamp"] is not None else "" + ): + csvoutput.writerow( + [ + event.get("timestamp"), + event.get("module"), + event.get("event"), + event.get("data"), + ] + ) diff --git a/mvt/common/options.py b/mvt/common/options.py index b5cfb24..b488747 100644 --- a/mvt/common/options.py +++ b/mvt/common/options.py @@ -16,8 +16,10 @@ class MutuallyExclusiveOption(Option): help_msg = kwargs.get("help", "") if self.mutually_exclusive: ex_str = ", ".join(self.mutually_exclusive) - kwargs["help"] = (f"{help_msg} NOTE: This argument is mutually exclusive with arguments" - f"[{ex_str}].") + kwargs["help"] = ( + f"{help_msg} NOTE: This argument is mutually exclusive with arguments" + f"[{ex_str}]." + ) super().__init__(*args, **kwargs) diff --git a/mvt/common/updates.py b/mvt/common/updates.py index 787f61d..2c33f1e 100644 --- a/mvt/common/updates.py +++ b/mvt/common/updates.py @@ -22,7 +22,6 @@ INDICATORS_CHECK_FREQUENCY = 12 class MVTUpdates: - def check(self) -> str: res = requests.get("https://pypi.org/pypi/mvt/json") data = res.json() @@ -35,7 +34,6 @@ class MVTUpdates: class IndicatorsUpdates: - def __init__(self) -> None: self.github_raw_url = "https://raw.githubusercontent.com/{}/{}/{}/{}" @@ -47,10 +45,12 @@ class IndicatorsUpdates: if not os.path.exists(MVT_DATA_FOLDER): os.makedirs(MVT_DATA_FOLDER) - self.latest_update_path = os.path.join(MVT_DATA_FOLDER, - "latest_indicators_update") - self.latest_check_path = os.path.join(MVT_DATA_FOLDER, - "latest_indicators_check") + self.latest_update_path = os.path.join( + MVT_DATA_FOLDER, "latest_indicators_update" + ) + self.latest_check_path = os.path.join( + MVT_DATA_FOLDER, "latest_indicators_check" + ) def get_latest_check(self) -> int: if not os.path.exists(self.latest_check_path): @@ -85,12 +85,16 @@ class IndicatorsUpdates: handle.write(str(timestamp)) def get_remote_index(self) -> Optional[dict]: - url = self.github_raw_url.format(self.index_owner, self.index_repo, - self.index_branch, self.index_path) + url = self.github_raw_url.format( + self.index_owner, self.index_repo, self.index_branch, self.index_path + ) res = requests.get(url) if res.status_code != 200: - log.error("Failed to retrieve indicators index located at %s (error %d)", - url, res.status_code) + log.error( + "Failed to retrieve indicators index located at %s (error %d)", + url, + res.status_code, + ) return None return yaml.safe_load(res.content) @@ -98,8 +102,11 @@ class IndicatorsUpdates: def download_remote_ioc(self, ioc_url: str) -> Optional[str]: res = requests.get(ioc_url) if res.status_code != 200: - log.error("Failed to download indicators file from %s (error %d)", - ioc_url, res.status_code) + log.error( + "Failed to download indicators file from %s (error %d)", + ioc_url, + res.status_code, + ) return None clean_file_name = ioc_url.lstrip("https://").replace("/", "_") @@ -135,28 +142,37 @@ class IndicatorsUpdates: ioc_url = ioc.get("download_url", "") if not ioc_url: - log.error("Could not find a way to download indicator file for %s", - ioc.get("name")) + log.error( + "Could not find a way to download indicator file for %s", + ioc.get("name"), + ) continue ioc_local_path = self.download_remote_ioc(ioc_url) if not ioc_local_path: continue - log.info("Downloaded indicators \"%s\" to %s", - ioc.get("name"), ioc_local_path) + log.info( + 'Downloaded indicators "%s" to %s', ioc.get("name"), ioc_local_path + ) self.set_latest_update() - def _get_remote_file_latest_commit(self, owner: str, repo: str, - branch: str, path: str) -> int: + def _get_remote_file_latest_commit( + self, owner: str, repo: str, branch: str, path: str + ) -> int: # TODO: The branch is currently not taken into consideration. # How do we specify which branch to look up to the API? - file_commit_url = f"https://api.github.com/repos/{owner}/{repo}/commits?path={path}" + file_commit_url = ( + f"https://api.github.com/repos/{owner}/{repo}/commits?path={path}" + ) res = requests.get(file_commit_url) if res.status_code != 200: - log.error("Failed to get details about file %s (error %d)", - file_commit_url, res.status_code) + log.error( + "Failed to get details about file %s (error %d)", + file_commit_url, + res.status_code, + ) return -1 details = res.json() @@ -164,13 +180,16 @@ class IndicatorsUpdates: return -1 latest_commit = details[0] - latest_commit_date = latest_commit.get("commit", {}).get("author", {}).get("date", None) + latest_commit_date = ( + latest_commit.get("commit", {}).get("author", {}).get("date", None) + ) if not latest_commit_date: - log.error("Failed to retrieve date of latest update to indicators index file") + log.error( + "Failed to retrieve date of latest update to indicators index file" + ) return -1 - latest_commit_dt = datetime.strptime(latest_commit_date, - '%Y-%m-%dT%H:%M:%SZ') + latest_commit_dt = datetime.strptime(latest_commit_date, "%Y-%m-%dT%H:%M:%SZ") latest_commit_ts = int(latest_commit_dt.timestamp()) return latest_commit_ts @@ -192,10 +211,9 @@ class IndicatorsUpdates: self.set_latest_check() latest_update = self.get_latest_update() - latest_commit_ts = self._get_remote_file_latest_commit(self.index_owner, - self.index_repo, - self.index_branch, - self.index_path) + latest_commit_ts = self._get_remote_file_latest_commit( + self.index_owner, self.index_repo, self.index_branch, self.index_path + ) if latest_update < latest_commit_ts: return True @@ -214,10 +232,9 @@ class IndicatorsUpdates: branch = github.get("branch", "main") path = github.get("path", "") - file_latest_commit_ts = self._get_remote_file_latest_commit(owner, - repo, - branch, - path) + file_latest_commit_ts = self._get_remote_file_latest_commit( + owner, repo, branch, path + ) if latest_update < file_latest_commit_ts: return True diff --git a/mvt/common/url.py b/mvt/common/url.py index 85ba1d9..b75dc53 100644 --- a/mvt/common/url.py +++ b/mvt/common/url.py @@ -254,7 +254,6 @@ SHORTENER_DOMAINS = [ class URL: - def __init__(self, url: str) -> None: if isinstance(url, bytes): url = url.decode() @@ -273,9 +272,11 @@ class URL: :rtype: str """ - return get_tld(self.url, - as_object=True, - fix_protocol=True).parsed_url.netloc.lower().lstrip("www.") + return ( + get_tld(self.url, as_object=True, fix_protocol=True) + .parsed_url.netloc.lower() + .lstrip("www.") + ) def get_top_level(self) -> str: """Get only the top-level domain from a URL. @@ -286,9 +287,7 @@ class URL: :rtype: str """ - return get_tld(self.url, - as_object=True, - fix_protocol=True).fld.lower() + return get_tld(self.url, as_object=True, fix_protocol=True).fld.lower() def check_if_shortened(self) -> bool: """Check if the URL is among list of shortener services. diff --git a/mvt/common/utils.py b/mvt/common/utils.py index 0196555..0e33d47 100644 --- a/mvt/common/utils.py +++ b/mvt/common/utils.py @@ -42,7 +42,7 @@ def convert_datetime_to_iso(date_time: datetime.datetime) -> str: def convert_unix_to_utc_datetime( - timestamp: Union[int, float, str] + timestamp: Union[int, float, str] ) -> datetime.datetime: """Converts a unix epoch timestamp to UTC datetime. @@ -69,8 +69,7 @@ def convert_unix_to_iso(timestamp: Union[int, float, str]) -> str: return "" -def convert_mactime_to_datetime(timestamp: Union[int, float], - from_2001: bool = True): +def convert_mactime_to_datetime(timestamp: Union[int, float], from_2001: bool = True): """Converts Mac Standard Time to a datetime. :param timestamp: MacTime timestamp (either int or float). @@ -111,8 +110,7 @@ def convert_mactime_to_iso(timestamp: int, from_2001: bool = True): """ - return convert_datetime_to_iso( - convert_mactime_to_datetime(timestamp, from_2001)) + return convert_datetime_to_iso(convert_mactime_to_datetime(timestamp, from_2001)) def check_for_links(text: str) -> list: @@ -185,18 +183,20 @@ def generate_hashes_from_path(path: str, log) -> Iterator[dict]: hash_value = get_sha256_from_file_path(path) yield {"file_path": path, "sha256": hash_value} elif os.path.isdir(path): - for (root, _, files) in os.walk(path): + for root, _, files in os.walk(path): for file in files: file_path = os.path.join(root, file) try: sha256 = get_sha256_from_file_path(file_path) except FileNotFoundError: - log.error("Failed to hash the file %s: might be a symlink", - file_path) + log.error( + "Failed to hash the file %s: might be a symlink", file_path + ) continue except PermissionError: - log.error("Failed to hash the file %s: permission denied", - file_path) + log.error( + "Failed to hash the file %s: permission denied", file_path + ) continue yield {"file_path": file_path, "sha256": sha256} diff --git a/mvt/common/virustotal.py b/mvt/common/virustotal.py index ec09122..0ada35c 100644 --- a/mvt/common/virustotal.py +++ b/mvt/common/virustotal.py @@ -23,17 +23,20 @@ class VTQuotaExceeded(Exception): def virustotal_lookup(file_hash: str): if MVT_VT_API_KEY not in os.environ: - raise VTNoKey("No VirusTotal API key provided: to use VirusTotal " - "lookups please provide your API key with " - "`export MVT_VT_API_KEY=`") + raise VTNoKey( + "No VirusTotal API key provided: to use VirusTotal " + "lookups please provide your API key with " + "`export MVT_VT_API_KEY=`" + ) headers = { "User-Agent": "VirusTotal", "Content-Type": "application/json", "x-apikey": os.environ[MVT_VT_API_KEY], } - res = requests.get(f"https://www.virustotal.com/api/v3/files/{file_hash}", - headers=headers) + res = requests.get( + f"https://www.virustotal.com/api/v3/files/{file_hash}", headers=headers + ) if res.status_code == 200: report = res.json() diff --git a/mvt/ios/cli.py b/mvt/ios/cli.py index f7daf0a..b548d5c 100644 --- a/mvt/ios/cli.py +++ b/mvt/ios/cli.py @@ -11,14 +11,23 @@ import click from rich.prompt import Prompt from mvt.common.cmd_check_iocs import CmdCheckIOCS -from mvt.common.help import (HELP_MSG_FAST, HELP_MSG_HASHES, HELP_MSG_IOC, - HELP_MSG_LIST_MODULES, HELP_MSG_MODULE, - HELP_MSG_OUTPUT, HELP_MSG_VERBOSE) +from mvt.common.help import ( + HELP_MSG_FAST, + HELP_MSG_HASHES, + HELP_MSG_IOC, + HELP_MSG_LIST_MODULES, + HELP_MSG_MODULE, + HELP_MSG_OUTPUT, + HELP_MSG_VERBOSE, +) from mvt.common.logo import logo from mvt.common.options import MutuallyExclusiveOption from mvt.common.updates import IndicatorsUpdates -from mvt.common.utils import (generate_hashes_from_path, init_logging, - set_verbose_logging) +from mvt.common.utils import ( + generate_hashes_from_path, + init_logging, + set_verbose_logging, +) from .cmd_check_backup import CmdIOSCheckBackup from .cmd_check_fs import CmdIOSCheckFS @@ -32,41 +41,55 @@ log = logging.getLogger("mvt") # Set this environment variable to a password if needed. MVT_IOS_BACKUP_PASSWORD = "MVT_IOS_BACKUP_PASSWORD" -CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) +CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) -#============================================================================== +# ============================================================================== # Main -#============================================================================== +# ============================================================================== @click.group(invoke_without_command=False) def cli(): logo() -#============================================================================== +# ============================================================================== # Command: version -#============================================================================== +# ============================================================================== @cli.command("version", help="Show the currently installed version of MVT") def version(): return -#============================================================================== +# ============================================================================== # Command: decrypt-backup -#============================================================================== -@cli.command("decrypt-backup", help="Decrypt an encrypted iTunes backup", - context_settings=CONTEXT_SETTINGS) -@click.option("--destination", "-d", required=True, - help="Path to the folder where to store the decrypted backup") -@click.option("--password", "-p", cls=MutuallyExclusiveOption, - help="Password to use to decrypt the backup (or, set " - f"{MVT_IOS_BACKUP_PASSWORD} environment variable)", - mutually_exclusive=["key_file"]) -@click.option("--key-file", "-k", cls=MutuallyExclusiveOption, - type=click.Path(exists=True), - help="File containing raw encryption key to use to decrypt " - "the backup", - mutually_exclusive=["password"]) +# ============================================================================== +@cli.command( + "decrypt-backup", + help="Decrypt an encrypted iTunes backup", + context_settings=CONTEXT_SETTINGS, +) +@click.option( + "--destination", + "-d", + required=True, + help="Path to the folder where to store the decrypted backup", +) +@click.option( + "--password", + "-p", + cls=MutuallyExclusiveOption, + help="Password to use to decrypt the backup (or, set " + f"{MVT_IOS_BACKUP_PASSWORD} environment variable)", + mutually_exclusive=["key_file"], +) +@click.option( + "--key-file", + "-k", + cls=MutuallyExclusiveOption, + type=click.Path(exists=True), + help="File containing raw encryption key to use to decrypt " "the backup", + mutually_exclusive=["password"], +) @click.option("--hashes", "-H", is_flag=True, help=HELP_MSG_HASHES) @click.argument("BACKUP_PATH", type=click.Path(exists=True)) @click.pass_context @@ -75,22 +98,28 @@ def decrypt_backup(ctx, destination, password, key_file, hashes, backup_path): if key_file: if MVT_IOS_BACKUP_PASSWORD in os.environ: - log.info("Ignoring %s environment variable, using --key-file" - "'%s' instead", MVT_IOS_BACKUP_PASSWORD, key_file) + log.info( + "Ignoring %s environment variable, using --key-file" "'%s' instead", + MVT_IOS_BACKUP_PASSWORD, + key_file, + ) backup.decrypt_with_key_file(key_file) elif password: - log.info("Your password may be visible in the process table because it " - "was supplied on the command line!") + log.info( + "Your password may be visible in the process table because it " + "was supplied on the command line!" + ) if MVT_IOS_BACKUP_PASSWORD in os.environ: - log.info("Ignoring %s environment variable, using --password" - "argument instead", MVT_IOS_BACKUP_PASSWORD) + log.info( + "Ignoring %s environment variable, using --password" "argument instead", + MVT_IOS_BACKUP_PASSWORD, + ) backup.decrypt_with_password(password) elif MVT_IOS_BACKUP_PASSWORD in os.environ: - log.info("Using password from %s environment variable", - MVT_IOS_BACKUP_PASSWORD) + log.info("Using password from %s environment variable", MVT_IOS_BACKUP_PASSWORD) backup.decrypt_with_password(os.environ[MVT_IOS_BACKUP_PASSWORD]) else: sekrit = Prompt.ask("Enter backup password", password=True) @@ -112,33 +141,45 @@ def decrypt_backup(ctx, destination, password, key_file, hashes, backup_path): json.dump(info, handle, indent=4) -#============================================================================== +# ============================================================================== # Command: extract-key -#============================================================================== -@cli.command("extract-key", help="Extract decryption key from an iTunes backup", - context_settings=CONTEXT_SETTINGS) -@click.option("--password", "-p", - help="Password to use to decrypt the backup (or, set " - f"{MVT_IOS_BACKUP_PASSWORD} environment variable)") -@click.option("--key-file", "-k", - help="Key file to be written (if unset, will print to STDOUT)", - required=False, - type=click.Path(exists=False, file_okay=True, dir_okay=False, - writable=True)) +# ============================================================================== +@cli.command( + "extract-key", + help="Extract decryption key from an iTunes backup", + context_settings=CONTEXT_SETTINGS, +) +@click.option( + "--password", + "-p", + help="Password to use to decrypt the backup (or, set " + f"{MVT_IOS_BACKUP_PASSWORD} environment variable)", +) +@click.option( + "--key-file", + "-k", + help="Key file to be written (if unset, will print to STDOUT)", + required=False, + type=click.Path(exists=False, file_okay=True, dir_okay=False, writable=True), +) @click.argument("BACKUP_PATH", type=click.Path(exists=True)) def extract_key(password, key_file, backup_path): backup = DecryptBackup(backup_path) if password: - log.info("Your password may be visible in the process table because it " - "was supplied on the command line!") + log.info( + "Your password may be visible in the process table because it " + "was supplied on the command line!" + ) if MVT_IOS_BACKUP_PASSWORD in os.environ: - log.info("Ignoring %s environment variable, using --password " - "argument instead", MVT_IOS_BACKUP_PASSWORD) + log.info( + "Ignoring %s environment variable, using --password " + "argument instead", + MVT_IOS_BACKUP_PASSWORD, + ) elif MVT_IOS_BACKUP_PASSWORD in os.environ: - log.info("Using password from %s environment variable", - MVT_IOS_BACKUP_PASSWORD) + log.info("Using password from %s environment variable", MVT_IOS_BACKUP_PASSWORD) password = os.environ[MVT_IOS_BACKUP_PASSWORD] else: password = Prompt.ask("Enter backup password", password=True) @@ -150,15 +191,23 @@ def extract_key(password, key_file, backup_path): backup.write_key(key_file) -#============================================================================== +# ============================================================================== # Command: check-backup -#============================================================================== -@cli.command("check-backup", help="Extract artifacts from an iTunes backup", - context_settings=CONTEXT_SETTINGS) -@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True, - default=[], help=HELP_MSG_IOC) -@click.option("--output", "-o", type=click.Path(exists=False), - help=HELP_MSG_OUTPUT) +# ============================================================================== +@cli.command( + "check-backup", + help="Extract artifacts from an iTunes backup", + context_settings=CONTEXT_SETTINGS, +) +@click.option( + "--iocs", + "-i", + type=click.Path(exists=True), + multiple=True, + default=[], + help=HELP_MSG_IOC, +) +@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT) @click.option("--fast", "-f", is_flag=True, help=HELP_MSG_FAST) @click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES) @click.option("--module", "-m", help=HELP_MSG_MODULE) @@ -166,12 +215,19 @@ def extract_key(password, key_file, backup_path): @click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE) @click.argument("BACKUP_PATH", type=click.Path(exists=True)) @click.pass_context -def check_backup(ctx, iocs, output, fast, list_modules, module, hashes, verbose, backup_path): +def check_backup( + ctx, iocs, output, fast, list_modules, module, hashes, verbose, backup_path +): set_verbose_logging(verbose) - cmd = CmdIOSCheckBackup(target_path=backup_path, results_path=output, - ioc_files=iocs, module_name=module, fast_mode=fast, - hashes=hashes) + cmd = CmdIOSCheckBackup( + target_path=backup_path, + results_path=output, + ioc_files=iocs, + module_name=module, + fast_mode=fast, + hashes=hashes, + ) if list_modules: cmd.list_modules() @@ -182,19 +238,28 @@ def check_backup(ctx, iocs, output, fast, list_modules, module, hashes, verbose, cmd.run() if cmd.detected_count > 0: - log.warning("The analysis of the backup produced %d detections!", - cmd.detected_count) + log.warning( + "The analysis of the backup produced %d detections!", cmd.detected_count + ) -#============================================================================== +# ============================================================================== # Command: check-fs -#============================================================================== -@cli.command("check-fs", help="Extract artifacts from a full filesystem dump", - context_settings=CONTEXT_SETTINGS) -@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True, - default=[], help=HELP_MSG_IOC) -@click.option("--output", "-o", type=click.Path(exists=False), - help=HELP_MSG_OUTPUT) +# ============================================================================== +@cli.command( + "check-fs", + help="Extract artifacts from a full filesystem dump", + context_settings=CONTEXT_SETTINGS, +) +@click.option( + "--iocs", + "-i", + type=click.Path(exists=True), + multiple=True, + default=[], + help=HELP_MSG_IOC, +) +@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT) @click.option("--fast", "-f", is_flag=True, help=HELP_MSG_FAST) @click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES) @click.option("--module", "-m", help=HELP_MSG_MODULE) @@ -204,9 +269,14 @@ def check_backup(ctx, iocs, output, fast, list_modules, module, hashes, verbose, @click.pass_context def check_fs(ctx, iocs, output, fast, list_modules, module, hashes, verbose, dump_path): set_verbose_logging(verbose) - cmd = CmdIOSCheckFS(target_path=dump_path, results_path=output, - ioc_files=iocs, module_name=module, fast_mode=fast, - hashes=hashes) + cmd = CmdIOSCheckFS( + target_path=dump_path, + results_path=output, + ioc_files=iocs, + module_name=module, + fast_mode=fast, + hashes=hashes, + ) if list_modules: cmd.list_modules() @@ -217,17 +287,28 @@ def check_fs(ctx, iocs, output, fast, list_modules, module, hashes, verbose, dum cmd.run() if cmd.detected_count > 0: - log.warning("The analysis of the iOS filesystem produced %d detections!", - cmd.detected_count) + log.warning( + "The analysis of the iOS filesystem produced %d detections!", + cmd.detected_count, + ) -#============================================================================== +# ============================================================================== # Command: check-iocs -#============================================================================== -@cli.command("check-iocs", help="Compare stored JSON results to provided indicators", - context_settings=CONTEXT_SETTINGS) -@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True, - default=[], help=HELP_MSG_IOC) +# ============================================================================== +@cli.command( + "check-iocs", + help="Compare stored JSON results to provided indicators", + context_settings=CONTEXT_SETTINGS, +) +@click.option( + "--iocs", + "-i", + type=click.Path(exists=True), + multiple=True, + default=[], + help=HELP_MSG_IOC, +) @click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES) @click.option("--module", "-m", help=HELP_MSG_MODULE) @click.argument("FOLDER", type=click.Path(exists=True)) @@ -243,11 +324,14 @@ def check_iocs(ctx, iocs, list_modules, module, folder): cmd.run() -#============================================================================== +# ============================================================================== # Command: download-iocs -#============================================================================== -@cli.command("download-iocs", help="Download public STIX2 indicators", - context_settings=CONTEXT_SETTINGS) +# ============================================================================== +@cli.command( + "download-iocs", + help="Download public STIX2 indicators", + context_settings=CONTEXT_SETTINGS, +) def download_iocs(): ioc_updates = IndicatorsUpdates() ioc_updates.update() diff --git a/mvt/ios/cmd_check_backup.py b/mvt/ios/cmd_check_backup.py index 53ed6bd..b0fdefb 100644 --- a/mvt/ios/cmd_check_backup.py +++ b/mvt/ios/cmd_check_backup.py @@ -15,7 +15,6 @@ log = logging.getLogger(__name__) class CmdIOSCheckBackup(Command): - def __init__( self, target_path: Optional[str] = None, @@ -23,13 +22,19 @@ class CmdIOSCheckBackup(Command): ioc_files: Optional[list] = None, module_name: Optional[str] = None, serial: Optional[str] = None, - fast_mode: Optional[bool] = False, - hashes: Optional[bool] = False, + fast_mode: bool = False, + hashes: bool = False, ) -> None: - super().__init__(target_path=target_path, results_path=results_path, - ioc_files=ioc_files, module_name=module_name, - serial=serial, fast_mode=fast_mode, hashes=hashes, - log=log) + super().__init__( + target_path=target_path, + results_path=results_path, + ioc_files=ioc_files, + module_name=module_name, + serial=serial, + fast_mode=fast_mode, + hashes=hashes, + log=log, + ) self.name = "check-backup" self.modules = BACKUP_MODULES + MIXED_MODULES diff --git a/mvt/ios/cmd_check_fs.py b/mvt/ios/cmd_check_fs.py index cb9175b..6365904 100644 --- a/mvt/ios/cmd_check_fs.py +++ b/mvt/ios/cmd_check_fs.py @@ -15,7 +15,6 @@ log = logging.getLogger(__name__) class CmdIOSCheckFS(Command): - def __init__( self, target_path: Optional[str] = None, @@ -23,13 +22,19 @@ class CmdIOSCheckFS(Command): ioc_files: Optional[list] = None, module_name: Optional[str] = None, serial: Optional[str] = None, - fast_mode: Optional[bool] = False, - hashes: Optional[bool] = False, + fast_mode: bool = False, + hashes: bool = False, ) -> None: - super().__init__(target_path=target_path, results_path=results_path, - ioc_files=ioc_files, module_name=module_name, - serial=serial, fast_mode=fast_mode, hashes=hashes, - log=log) + super().__init__( + target_path=target_path, + results_path=results_path, + ioc_files=ioc_files, + module_name=module_name, + serial=serial, + fast_mode=fast_mode, + hashes=hashes, + log=log, + ) self.name = "check-fs" self.modules = FS_MODULES + MIXED_MODULES diff --git a/mvt/ios/decrypt.py b/mvt/ios/decrypt.py index 07629ef..6a68738 100644 --- a/mvt/ios/decrypt.py +++ b/mvt/ios/decrypt.py @@ -55,13 +55,19 @@ class DecryptBackup: log.critical("The backup does not seem encrypted!") return False - def _process_file(self, relative_path: str, domain: str, item, - file_id: str, item_folder: str) -> None: - self._backup.getFileDecryptedCopy(manifestEntry=item, - targetName=file_id, - targetFolder=item_folder) - log.info("Decrypted file %s [%s] to %s/%s", relative_path, domain, - item_folder, file_id) + def _process_file( + self, relative_path: str, domain: str, item, file_id: str, item_folder: str + ) -> None: + self._backup.getFileDecryptedCopy( + manifestEntry=item, targetName=file_id, targetFolder=item_folder + ) + log.info( + "Decrypted file %s [%s] to %s/%s", + relative_path, + domain, + item_folder, + file_id, + ) def process_backup(self) -> None: if not os.path.exists(self.dest_path): @@ -83,11 +89,12 @@ class DecryptBackup: # This may be a partial backup. Skip files from the manifest # which do not exist locally. - source_file_path = os.path.join(self.backup_path, file_id[0:2], - file_id) + source_file_path = os.path.join(self.backup_path, file_id[0:2], file_id) if not os.path.exists(source_file_path): - log.debug("Skipping file %s. File not found in encrypted backup directory.", - source_file_path) + log.debug( + "Skipping file %s. File not found in encrypted backup directory.", + source_file_path, + ) continue item_folder = os.path.join(self.dest_path, file_id[0:2]) @@ -99,10 +106,10 @@ class DecryptBackup: # Add manifest plist to both keys to handle this. item["manifest"] = item["file"] - pool.apply_async(self._process_file, args=(relative_path, - domain, item, - file_id, - item_folder)) + pool.apply_async( + self._process_file, + args=(relative_path, domain, item, file_id, item_folder), + ) except Exception as exc: log.error("Failed to decrypt file %s: %s", relative_path, exc) @@ -112,10 +119,8 @@ class DecryptBackup: # Copying over the root plist files as well. for file_name in os.listdir(self.backup_path): if file_name.endswith(".plist"): - log.info("Copied plist file %s to %s", - file_name, self.dest_path) - shutil.copy(os.path.join(self.backup_path, file_name), - self.dest_path) + log.info("Copied plist file %s to %s", file_name, self.dest_path) + shutil.copy(os.path.join(self.backup_path, file_name), self.dest_path) def decrypt_with_password(self, password: str) -> None: """Decrypts an encrypted iOS backup. @@ -123,22 +128,26 @@ class DecryptBackup: :param password: Password to use to decrypt the original backup """ - log.info("Decrypting iOS backup at path %s with password", - self.backup_path) + log.info("Decrypting iOS backup at path %s with password", self.backup_path) if not os.path.exists(os.path.join(self.backup_path, "Manifest.plist")): - possible = glob.glob(os.path.join( - self.backup_path, "*", "Manifest.plist")) + possible = glob.glob(os.path.join(self.backup_path, "*", "Manifest.plist")) if len(possible) == 1: newpath = os.path.dirname(possible[0]) - log.warning("No Manifest.plist in %s, using %s instead.", - self.backup_path, newpath) + log.warning( + "No Manifest.plist in %s, using %s instead.", + self.backup_path, + newpath, + ) self.backup_path = newpath elif len(possible) > 1: - log.critical("No Manifest.plist in %s, and %d Manifest.plist files in subdirs. " - "Please choose one!", - self.backup_path, len(possible)) + log.critical( + "No Manifest.plist in %s, and %d Manifest.plist files in subdirs. " + "Please choose one!", + self.backup_path, + len(possible), + ) return # Before proceeding, we check whether the backup is indeed encrypted. @@ -146,23 +155,33 @@ class DecryptBackup: return try: - self._backup = iOSbackup(udid=os.path.basename(self.backup_path), - cleartextpassword=password, - backuproot=os.path.dirname(self.backup_path)) + self._backup = iOSbackup( + udid=os.path.basename(self.backup_path), + cleartextpassword=password, + backuproot=os.path.dirname(self.backup_path), + ) except Exception as exc: - if (isinstance(exc, KeyError) - and len(exc.args) > 0 - and exc.args[0] == b"KEY"): + if ( + isinstance(exc, KeyError) + and len(exc.args) > 0 + and exc.args[0] == b"KEY" + ): log.critical("Failed to decrypt backup. Password is probably wrong.") - elif (isinstance(exc, FileNotFoundError) - and os.path.basename(exc.filename) == "Manifest.plist"): - log.critical("Failed to find a valid backup at %s. " - "Did you point to the right backup path?", - self.backup_path) + elif ( + isinstance(exc, FileNotFoundError) + and os.path.basename(exc.filename) == "Manifest.plist" + ): + log.critical( + "Failed to find a valid backup at %s. " + "Did you point to the right backup path?", + self.backup_path, + ) else: log.exception(exc) - log.critical("Failed to decrypt backup. Did you provide the correct password? " - "Did you point to the right backup path?") + log.critical( + "Failed to decrypt backup. Did you provide the correct password? " + "Did you point to the right backup path?" + ) def decrypt_with_key_file(self, key_file: str) -> None: """Decrypts an encrypted iOS backup using a key file. @@ -170,8 +189,11 @@ class DecryptBackup: :param key_file: File to read the key bytes to decrypt the backup """ - log.info("Decrypting iOS backup at path %s with key file %s", - self.backup_path, key_file) + log.info( + "Decrypting iOS backup at path %s with key file %s", + self.backup_path, + key_file, + ) # Before proceeding, we check whether the backup is indeed encrypted. if not self.is_encrypted(self.backup_path): @@ -182,17 +204,23 @@ class DecryptBackup: # Key should be 64 hex encoded characters (32 raw bytes) if len(key_bytes) != 64: - log.critical("Invalid key from key file. Did you provide the correct key file?") + log.critical( + "Invalid key from key file. Did you provide the correct key file?" + ) return try: key_bytes_raw = binascii.unhexlify(key_bytes) - self._backup = iOSbackup(udid=os.path.basename(self.backup_path), - derivedkey=key_bytes_raw, - backuproot=os.path.dirname(self.backup_path)) + self._backup = iOSbackup( + udid=os.path.basename(self.backup_path), + derivedkey=key_bytes_raw, + backuproot=os.path.dirname(self.backup_path), + ) except Exception as exc: log.exception(exc) - log.critical("Failed to decrypt backup. Did you provide the correct key file?") + log.critical( + "Failed to decrypt backup. Did you provide the correct key file?" + ) def get_key(self) -> None: """Retrieve and prints the encryption key.""" @@ -200,8 +228,11 @@ class DecryptBackup: return self._decryption_key = self._backup.getDecryptionKey() - log.info("Derived decryption key for backup at path %s is: \"%s\"", - self.backup_path, self._decryption_key) + log.info( + 'Derived decryption key for backup at path %s is: "%s"', + self.backup_path, + self._decryption_key, + ) def write_key(self, key_path: str) -> None: """Save extracted key to file. @@ -214,13 +245,15 @@ class DecryptBackup: return try: - with open(key_path, 'w', encoding="utf-8") as handle: + with open(key_path, "w", encoding="utf-8") as handle: handle.write(self._decryption_key) except Exception as exc: log.exception(exc) log.critical("Failed to write key to file: %s", key_path) return else: - log.info("Wrote decryption key to file: %s. This file is " - "equivalent to a plaintext password. Keep it safe!", - key_path) + log.info( + "Wrote decryption key to file: %s. This file is " + "equivalent to a plaintext password. Keep it safe!", + key_path, + ) diff --git a/mvt/ios/modules/backup/backup_info.py b/mvt/ios/modules/backup/backup_info.py index 239fdd8..8bc26e6 100644 --- a/mvt/ios/modules/backup/backup_info.py +++ b/mvt/ios/modules/backup/backup_info.py @@ -22,31 +22,51 @@ class BackupInfo(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = {} def run(self) -> None: info_path = os.path.join(self.target_path, "Info.plist") if not os.path.exists(info_path): - raise DatabaseNotFoundError("No Info.plist at backup path, unable to extract device " - "information") + raise DatabaseNotFoundError( + "No Info.plist at backup path, unable to extract device " "information" + ) with open(info_path, "rb") as handle: info = plistlib.load(handle) - fields = ["Build Version", "Device Name", "Display Name", - "GUID", "ICCID", "IMEI", "MEID", "Installed Applications", - "Last Backup Date", "Phone Number", "Product Name", - "Product Type", "Product Version", "Serial Number", - "Target Identifier", "Target Type", "Unique Identifier", - "iTunes Version"] + fields = [ + "Build Version", + "Device Name", + "Display Name", + "GUID", + "ICCID", + "IMEI", + "MEID", + "Installed Applications", + "Last Backup Date", + "Phone Number", + "Product Name", + "Product Type", + "Product Version", + "Serial Number", + "Target Identifier", + "Target Type", + "Unique Identifier", + "iTunes Version", + ] for field in fields: value = info.get(field, None) diff --git a/mvt/ios/modules/backup/configuration_profiles.py b/mvt/ios/modules/backup/configuration_profiles.py index d0497e5..df32eed 100644 --- a/mvt/ios/modules/backup/configuration_profiles.py +++ b/mvt/ios/modules/backup/configuration_profiles.py @@ -13,7 +13,9 @@ from mvt.common.utils import convert_datetime_to_iso from ..base import IOSExtraction -CONF_PROFILES_DOMAIN = "SysSharedContainerDomain-systemgroup.com.apple.configurationprofiles" +CONF_PROFILES_DOMAIN = ( + "SysSharedContainerDomain-systemgroup.com.apple.configurationprofiles" +) class ConfigurationProfiles(IOSExtraction): @@ -24,26 +26,31 @@ class ConfigurationProfiles(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: if not record["install_date"]: return {} - payload_name = record['plist'].get('PayloadDisplayName') - payload_description = record['plist'].get('PayloadDescription') + payload_name = record["plist"].get("PayloadDisplayName") + payload_description = record["plist"].get("PayloadDescription") return { "timestamp": record["install_date"], "module": self.__class__.__name__, "event": "configuration_profile_install", "data": f"{record['plist']['PayloadType']} installed: {record['plist']['PayloadUUID']} " - f"- {payload_name}: {payload_description}" + f"- {payload_name}: {payload_description}", } def check_indicators(self) -> None: @@ -58,10 +65,12 @@ class ConfigurationProfiles(IOSExtraction): # indicator list. ioc = self.indicators.check_profile(result["plist"]["PayloadUUID"]) if ioc: - self.log.warning("Found a known malicious configuration " - "profile \"%s\" with UUID %s", - result['plist']['PayloadDisplayName'], - result['plist']['PayloadUUID']) + self.log.warning( + "Found a known malicious configuration " + 'profile "%s" with UUID %s', + result["plist"]["PayloadDisplayName"], + result["plist"]["PayloadUUID"], + ) result["matched_indicator"] = ioc self.detected.append(result) continue @@ -69,22 +78,26 @@ class ConfigurationProfiles(IOSExtraction): # Highlight suspicious configuration profiles which may be used # to hide notifications. if payload_content["PayloadType"] in ["com.apple.notificationsettings"]: - self.log.warning("Found a potentially suspicious configuration profile " - "\"%s\" with payload type %s", - result['plist']['PayloadDisplayName'], - payload_content['PayloadType']) + self.log.warning( + "Found a potentially suspicious configuration profile " + '"%s" with payload type %s', + result["plist"]["PayloadDisplayName"], + payload_content["PayloadType"], + ) self.detected.append(result) continue def run(self) -> None: for conf_file in self._get_backup_files_from_manifest( - domain=CONF_PROFILES_DOMAIN): + domain=CONF_PROFILES_DOMAIN + ): conf_rel_path = conf_file["relative_path"] # Filter out all configuration files that are not configuration # profiles. - if not conf_rel_path or not os.path.basename( - conf_rel_path).startswith("profile-"): + if not conf_rel_path or not os.path.basename(conf_rel_path).startswith( + "profile-" + ): continue conf_file_path = self._get_backup_file_from_id(conf_file["file_id"]) @@ -100,37 +113,75 @@ class ConfigurationProfiles(IOSExtraction): # TODO: Tidy up the following code hell. if "SignerCerts" in conf_plist: - conf_plist["SignerCerts"] = [b64encode(x) for x in conf_plist["SignerCerts"]] + conf_plist["SignerCerts"] = [ + b64encode(x) for x in conf_plist["SignerCerts"] + ] if "OTAProfileStub" in conf_plist: if "SignerCerts" in conf_plist["OTAProfileStub"]: - conf_plist["OTAProfileStub"]["SignerCerts"] = [b64encode(x) for x in conf_plist["OTAProfileStub"]["SignerCerts"]] + conf_plist["OTAProfileStub"]["SignerCerts"] = [ + b64encode(x) + for x in conf_plist["OTAProfileStub"]["SignerCerts"] + ] if "PayloadContent" in conf_plist["OTAProfileStub"]: - if "EnrollmentIdentityPersistentID" in conf_plist["OTAProfileStub"]["PayloadContent"]: - conf_plist["OTAProfileStub"]["PayloadContent"]["EnrollmentIdentityPersistentID"] = b64encode(conf_plist["OTAProfileStub"]["PayloadContent"]["EnrollmentIdentityPersistentID"]) + if ( + "EnrollmentIdentityPersistentID" + in conf_plist["OTAProfileStub"]["PayloadContent"] + ): + conf_plist["OTAProfileStub"]["PayloadContent"][ + "EnrollmentIdentityPersistentID" + ] = b64encode( + conf_plist["OTAProfileStub"]["PayloadContent"][ + "EnrollmentIdentityPersistentID" + ] + ) if "PushTokenDataSentToServerKey" in conf_plist: - conf_plist["PushTokenDataSentToServerKey"] = b64encode(conf_plist["PushTokenDataSentToServerKey"]) + conf_plist["PushTokenDataSentToServerKey"] = b64encode( + conf_plist["PushTokenDataSentToServerKey"] + ) if "LastPushTokenHash" in conf_plist: - conf_plist["LastPushTokenHash"] = b64encode(conf_plist["LastPushTokenHash"]) + conf_plist["LastPushTokenHash"] = b64encode( + conf_plist["LastPushTokenHash"] + ) if "PayloadContent" in conf_plist: for content_entry in range(len(conf_plist["PayloadContent"])): if "PERSISTENT_REF" in conf_plist["PayloadContent"][content_entry]: - conf_plist["PayloadContent"][content_entry]["PERSISTENT_REF"] = b64encode(conf_plist["PayloadContent"][content_entry]["PERSISTENT_REF"]) + conf_plist["PayloadContent"][content_entry][ + "PERSISTENT_REF" + ] = b64encode( + conf_plist["PayloadContent"][content_entry][ + "PERSISTENT_REF" + ] + ) - if "IdentityPersistentRef" in conf_plist["PayloadContent"][content_entry]: - conf_plist["PayloadContent"][content_entry]["IdentityPersistentRef"] = b64encode(conf_plist["PayloadContent"][content_entry]["IdentityPersistentRef"]) + if ( + "IdentityPersistentRef" + in conf_plist["PayloadContent"][content_entry] + ): + conf_plist["PayloadContent"][content_entry][ + "IdentityPersistentRef" + ] = b64encode( + conf_plist["PayloadContent"][content_entry][ + "IdentityPersistentRef" + ] + ) - self.results.append({ - "file_id": conf_file["file_id"], - "relative_path": conf_file["relative_path"], - "domain": conf_file["domain"], - "plist": conf_plist, - "install_date": convert_datetime_to_iso(conf_plist.get("InstallDate")), - }) + self.results.append( + { + "file_id": conf_file["file_id"], + "relative_path": conf_file["relative_path"], + "domain": conf_file["domain"], + "plist": conf_plist, + "install_date": convert_datetime_to_iso( + conf_plist.get("InstallDate") + ), + } + ) - self.log.info("Extracted details about %d configuration profiles", - len(self.results)) + self.log.info( + "Extracted details about %d configuration profiles", len(self.results) + ) diff --git a/mvt/ios/modules/backup/manifest.py b/mvt/ios/modules/backup/manifest.py index 5c4b5ea..917d949 100644 --- a/mvt/ios/modules/backup/manifest.py +++ b/mvt/ios/modules/backup/manifest.py @@ -26,13 +26,18 @@ class Manifest(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def _get_key(self, dictionary, key): """Unserialized plist objects can have keys which are str or byte types @@ -42,8 +47,7 @@ class Manifest(IOSExtraction): :param key: """ - return (dictionary.get(key.encode("utf-8"), None) - or dictionary.get(key, None)) + return dictionary.get(key.encode("utf-8"), None) or dictionary.get(key, None) @staticmethod def _convert_timestamp(timestamp_or_unix_time_int): @@ -62,20 +66,23 @@ class Manifest(IOSExtraction): if "modified" not in record or "status_changed" not in record: return records - for timestamp in set([record["created"], record["modified"], - record["status_changed"]]): + for timestamp in set( + [record["created"], record["modified"], record["status_changed"]] + ): macb = "" macb += "M" if timestamp == record["modified"] else "-" macb += "-" macb += "C" if timestamp == record["status_changed"] else "-" macb += "B" if timestamp == record["created"] else "-" - records.append({ - "timestamp": timestamp, - "module": self.__class__.__name__, - "event": macb, - "data": f"{record['relative_path']} - {record['domain']}" - }) + records.append( + { + "timestamp": timestamp, + "module": self.__class__.__name__, + "event": macb, + "data": f"{record['relative_path']} - {record['domain']}", + } + ) return records @@ -85,10 +92,15 @@ class Manifest(IOSExtraction): continue if result["domain"]: - if (os.path.basename(result["relative_path"]) == "com.apple.CrashReporter.plist" - and result["domain"] == "RootDomain"): - self.log.warning("Found a potentially suspicious " - "\"com.apple.CrashReporter.plist\" file created in RootDomain") + if ( + os.path.basename(result["relative_path"]) + == "com.apple.CrashReporter.plist" + and result["domain"] == "RootDomain" + ): + self.log.warning( + "Found a potentially suspicious " + '"com.apple.CrashReporter.plist" file created in RootDomain' + ) self.detected.append(result) continue @@ -109,8 +121,12 @@ class Manifest(IOSExtraction): ioc = self.indicators.check_domain(part) if ioc: - self.log.warning("Found mention of domain \"%s\" in a backup file with " - "path: %s", ioc["value"], rel_path) + self.log.warning( + 'Found mention of domain "%s" in a backup file with ' + "path: %s", + ioc["value"], + rel_path, + ) result["matched_indicator"] = ioc self.detected.append(result) @@ -119,8 +135,7 @@ class Manifest(IOSExtraction): if not os.path.isfile(manifest_db_path): raise DatabaseNotFoundError("unable to find backup's Manifest.db") - self.log.info("Found Manifest.db database at path: %s", - manifest_db_path) + self.log.info("Found Manifest.db database at path: %s", manifest_db_path) conn = sqlite3.connect(manifest_db_path) cur = conn.cursor() @@ -148,27 +163,33 @@ class Manifest(IOSExtraction): birth = self._get_key(file_metadata, "Birth") last_modified = self._get_key(file_metadata, "LastModified") - last_status_change = self._get_key(file_metadata, - "LastStatusChange") + last_status_change = self._get_key( + file_metadata, "LastStatusChange" + ) - cleaned_metadata.update({ - "created": self._convert_timestamp(birth), - "modified": self._convert_timestamp(last_modified), - "status_changed": self._convert_timestamp(last_status_change), - "mode": oct(self._get_key(file_metadata, "Mode")), - "owner": self._get_key(file_metadata, "UserID"), - "size": self._get_key(file_metadata, "Size"), - }) + cleaned_metadata.update( + { + "created": self._convert_timestamp(birth), + "modified": self._convert_timestamp(last_modified), + "status_changed": self._convert_timestamp( + last_status_change + ), + "mode": oct(self._get_key(file_metadata, "Mode")), + "owner": self._get_key(file_metadata, "UserID"), + "size": self._get_key(file_metadata, "Size"), + } + ) except Exception: - self.log.exception("Error reading manifest file metadata for file with ID %s " - "and relative path %s", - file_data["fileID"], - file_data["relativePath"]) + self.log.exception( + "Error reading manifest file metadata for file with ID %s " + "and relative path %s", + file_data["fileID"], + file_data["relativePath"], + ) self.results.append(cleaned_metadata) cur.close() conn.close() - self.log.info("Extracted a total of %d file metadata items", - len(self.results)) + self.log.info("Extracted a total of %d file metadata items", len(self.results)) diff --git a/mvt/ios/modules/backup/profile_events.py b/mvt/ios/modules/backup/profile_events.py index d5dcdbd..758bfa1 100644 --- a/mvt/ios/modules/backup/profile_events.py +++ b/mvt/ios/modules/backup/profile_events.py @@ -21,18 +21,24 @@ class ProfileEvents(IOSExtraction): """ + def __init__( self, file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -40,8 +46,8 @@ class ProfileEvents(IOSExtraction): "module": self.__class__.__name__, "event": "profile_operation", "data": f"Process {record.get('process')} started operation " - f"{record.get('operation')} of profile " - f"{record.get('profile_id')}" + f"{record.get('operation')} of profile " + f"{record.get('profile_id')}", } def check_indicators(self) -> None: @@ -92,21 +98,24 @@ class ProfileEvents(IOSExtraction): def run(self) -> None: for events_file in self._get_backup_files_from_manifest( - relative_path=CONF_PROFILES_EVENTS_RELPATH): - events_file_path = self._get_backup_file_from_id( - events_file["file_id"]) + relative_path=CONF_PROFILES_EVENTS_RELPATH + ): + events_file_path = self._get_backup_file_from_id(events_file["file_id"]) if not events_file_path: continue - self.log.info("Found MCProfileEvents.plist file at %s", - events_file_path) + self.log.info("Found MCProfileEvents.plist file at %s", events_file_path) with open(events_file_path, "rb") as handle: self.results.extend(self.parse_profile_events(handle.read())) for result in self.results: - self.log.info("On %s process \"%s\" started operation \"%s\" of profile \"%s\"", - result.get("timestamp"), result.get("process"), - result.get("operation"), result.get("profile_id")) + self.log.info( + 'On %s process "%s" started operation "%s" of profile "%s"', + result.get("timestamp"), + result.get("process"), + result.get("operation"), + result.get("profile_id"), + ) self.log.info("Extracted %d profile events", len(self.results)) diff --git a/mvt/ios/modules/base.py b/mvt/ios/modules/base.py index 6d0d395..a5a01a4 100644 --- a/mvt/ios/modules/base.py +++ b/mvt/ios/modules/base.py @@ -11,8 +11,7 @@ import sqlite3 import subprocess from typing import Iterator, Optional, Union -from mvt.common.module import (DatabaseCorruptedError, DatabaseNotFoundError, - MVTModule) +from mvt.common.module import DatabaseCorruptedError, DatabaseNotFoundError, MVTModule class IOSExtraction(MVTModule): @@ -24,19 +23,25 @@ class IOSExtraction(MVTModule): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.is_backup = False self.is_fs_dump = False - def _recover_sqlite_db_if_needed(self, file_path: str, - forced: Optional[bool] = False) -> None: + def _recover_sqlite_db_if_needed( + self, file_path: str, forced: bool = False + ) -> None: """Tries to recover a malformed database by running a .clone command. :param file_path: Path to the malformed database file. @@ -59,30 +64,35 @@ class IOSExtraction(MVTModule): if not recover: return - self.log.info("Database at path %s is malformed. Trying to recover...", - file_path) + self.log.info( + "Database at path %s is malformed. Trying to recover...", file_path + ) if not shutil.which("sqlite3"): - raise DatabaseCorruptedError("failed to recover without sqlite3 binary: please install " - "sqlite3!") + raise DatabaseCorruptedError( + "failed to recover without sqlite3 binary: please install " "sqlite3!" + ) if '"' in file_path: - raise DatabaseCorruptedError(f"database at path '{file_path}' is corrupted. unable to " - "recover because it has a quotation mark (\") in its name") + raise DatabaseCorruptedError( + f"database at path '{file_path}' is corrupted. unable to " + 'recover because it has a quotation mark (") in its name' + ) bak_path = f"{file_path}.bak" shutil.move(file_path, bak_path) - ret = subprocess.call(["sqlite3", bak_path, f".clone \"{file_path}\""], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + ret = subprocess.call( + ["sqlite3", bak_path, f'.clone "{file_path}"'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) if ret != 0: raise DatabaseCorruptedError("failed to recover database") self.log.info("Database at path %s recovered successfully!", file_path) def _get_backup_files_from_manifest( - self, - relative_path: Optional[str] = None, - domain: Optional[str] = None + self, relative_path: Optional[str] = None, domain: Optional[str] = None ) -> Iterator[dict]: """Locate files from Manifest.db. @@ -102,16 +112,19 @@ class IOSExtraction(MVTModule): conn = sqlite3.connect(manifest_db_path) cur = conn.cursor() if relative_path and domain: - cur.execute(f"{base_sql} relativePath = ? AND domain = ?;", - (relative_path, domain)) + cur.execute( + f"{base_sql} relativePath = ? AND domain = ?;", + (relative_path, domain), + ) else: if relative_path: if "*" in relative_path: - cur.execute(f"{base_sql} relativePath LIKE ?;", - (relative_path.replace("*", "%"),)) + cur.execute( + f"{base_sql} relativePath LIKE ?;", + (relative_path.replace("*", "%"),), + ) else: - cur.execute(f"{base_sql} relativePath = ?;", - (relative_path,)) + cur.execute(f"{base_sql} relativePath = ?;", (relative_path,)) elif domain: cur.execute(f"{base_sql} domain = ?;", (domain,)) except Exception as exc: @@ -133,17 +146,14 @@ class IOSExtraction(MVTModule): def _get_fs_files_from_patterns(self, root_paths: list) -> Iterator[str]: for root_path in root_paths: - for found_path in glob.glob(os.path.join(self.target_path, - root_path)): + for found_path in glob.glob(os.path.join(self.target_path, root_path)): if not os.path.exists(found_path): continue yield found_path def _find_ios_database( - self, - backup_ids: Optional[list] = None, - root_paths: Optional[list] = None + self, backup_ids: Optional[list] = None, root_paths: Optional[list] = None ) -> None: """Try to locate a module's database file from either an iTunes backup or a full filesystem dump. This is intended only for diff --git a/mvt/ios/modules/fs/__init__.py b/mvt/ios/modules/fs/__init__.py index 73cb690..9f15b5b 100644 --- a/mvt/ios/modules/fs/__init__.py +++ b/mvt/ios/modules/fs/__init__.py @@ -15,6 +15,16 @@ from .webkit_indexeddb import WebkitIndexedDB from .webkit_localstorage import WebkitLocalStorage from .webkit_safariviewservice import WebkitSafariViewService -FS_MODULES = [CacheFiles, Filesystem, Netusage, Analytics, AnalyticsIOSVersions, - SafariFavicon, ShutdownLog, IOSVersionHistory, WebkitIndexedDB, - WebkitLocalStorage, WebkitSafariViewService] +FS_MODULES = [ + CacheFiles, + Filesystem, + Netusage, + Analytics, + AnalyticsIOSVersions, + SafariFavicon, + ShutdownLog, + IOSVersionHistory, + WebkitIndexedDB, + WebkitLocalStorage, + WebkitSafariViewService, +] diff --git a/mvt/ios/modules/fs/analytics.py b/mvt/ios/modules/fs/analytics.py index 742418d..401cc34 100644 --- a/mvt/ios/modules/fs/analytics.py +++ b/mvt/ios/modules/fs/analytics.py @@ -27,13 +27,18 @@ class Analytics(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -54,9 +59,12 @@ class Analytics(IOSExtraction): ioc = self.indicators.check_process(value) if ioc: - self.log.warning("Found mention of a malicious process \"%s\" in %s file at %s", - value, result["artifact"], - result["isodate"]) + self.log.warning( + 'Found mention of a malicious process "%s" in %s file at %s', + value, + result["artifact"], + result["isodate"], + ) new_result = copy.copy(result) new_result["matched_indicator"] = ioc self.detected.append(new_result) @@ -64,9 +72,12 @@ class Analytics(IOSExtraction): ioc = self.indicators.check_domain(value) if ioc: - self.log.warning("Found mention of a malicious domain \"%s\" in %s file at %s", - value, result["artifact"], - result["isodate"]) + self.log.warning( + 'Found mention of a malicious domain "%s" in %s file at %s', + value, + result["artifact"], + result["isodate"], + ) new_result = copy.copy(result) new_result["matched_indicator"] = ioc self.detected.append(new_result) @@ -78,7 +89,8 @@ class Analytics(IOSExtraction): cur = conn.cursor() try: - cur.execute(""" + cur.execute( + """ SELECT timestamp, data @@ -93,9 +105,11 @@ class Analytics(IOSExtraction): timestamp, data FROM all_events; - """) + """ + ) except sqlite3.OperationalError: - cur.execute(""" + cur.execute( + """ SELECT timestamp, data @@ -105,7 +119,8 @@ class Analytics(IOSExtraction): timestamp, data FROM soft_failures; - """) + """ + ) for row in cur: if row[0] and row[1]: @@ -131,14 +146,14 @@ class Analytics(IOSExtraction): def process_analytics_dbs(self): for file_path in self._get_fs_files_from_patterns(ANALYTICS_DB_PATH): self.file_path = file_path - self.log.info("Found Analytics database file at path: %s", - file_path) + self.log.info("Found Analytics database file at path: %s", file_path) self._extract_analytics_data() def run(self) -> None: self.process_analytics_dbs() - self.log.info("Extracted %d records from analytics databases", - len(self.results)) + self.log.info( + "Extracted %d records from analytics databases", len(self.results) + ) self.results = sorted(self.results, key=lambda entry: entry["isodate"]) diff --git a/mvt/ios/modules/fs/analytics_ios_versions.py b/mvt/ios/modules/fs/analytics_ios_versions.py index f71dcfa..1320923 100644 --- a/mvt/ios/modules/fs/analytics_ios_versions.py +++ b/mvt/ios/modules/fs/analytics_ios_versions.py @@ -23,13 +23,18 @@ class AnalyticsIOSVersions(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -68,13 +73,19 @@ class AnalyticsIOSVersions(IOSExtraction): for build, isodate in builds.items(): version = find_version_by_build(build) - self.results.append({ - "isodate": isodate, - "build": build, - "version": version, - }) + self.results.append( + { + "isodate": isodate, + "build": build, + "version": version, + } + ) self.results = sorted(self.results, key=lambda entry: entry["isodate"]) for result in self.results: - self.log.info("iOS version %s (%s) first appeared on %s", - result["version"], result["build"], result["isodate"]) + self.log.info( + "iOS version %s (%s) first appeared on %s", + result["version"], + result["build"], + result["isodate"], + ) diff --git a/mvt/ios/modules/fs/cache_files.py b/mvt/ios/modules/fs/cache_files.py index 16aa9b3..90c8b7a 100644 --- a/mvt/ios/modules/fs/cache_files.py +++ b/mvt/ios/modules/fs/cache_files.py @@ -12,29 +12,35 @@ from ..base import IOSExtraction class CacheFiles(IOSExtraction): - def __init__( self, file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: records = [] for item in self.results[record]: - records.append({ - "timestamp": item["isodate"], - "module": self.__class__.__name__, - "event": "cache_response", - "data": f"{record} recorded visit to URL {item['url']}" - }) + records.append( + { + "timestamp": item["isodate"], + "module": self.__class__.__name__, + "event": "cache_response", + "data": f"{record} recorded visit to URL {item['url']}", + } + ) return records @@ -49,7 +55,9 @@ class CacheFiles(IOSExtraction): if ioc: value["matched_indicator"] = ioc if key not in self.detected: - self.detected[key] = [value, ] + self.detected[key] = [ + value, + ] else: self.detected[key].append(value) @@ -69,14 +77,16 @@ class CacheFiles(IOSExtraction): self.results[key_name] = [] for row in cur: - self.results[key_name].append({ - "entry_id": row[0], - "version": row[1], - "hash_value": row[2], - "storage_policy": row[3], - "url": row[4], - "isodate": row[5], - }) + self.results[key_name].append( + { + "entry_id": row[0], + "version": row[1], + "hash_value": row[2], + "storage_policy": row[3], + "url": row[4], + "isodate": row[5], + } + ) def run(self) -> None: self.results = {} diff --git a/mvt/ios/modules/fs/filesystem.py b/mvt/ios/modules/fs/filesystem.py index 7c5d6f3..b8b2a04 100644 --- a/mvt/ios/modules/fs/filesystem.py +++ b/mvt/ios/modules/fs/filesystem.py @@ -22,13 +22,18 @@ class Filesystem(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -67,8 +72,7 @@ class Filesystem(IOSExtraction): dir_path = os.path.join(root, dir_name) result = { "path": os.path.relpath(dir_path, self.target_path), - "modified": convert_unix_to_iso( - os.stat(dir_path).st_mtime), + "modified": convert_unix_to_iso(os.stat(dir_path).st_mtime), } except Exception: continue @@ -80,8 +84,7 @@ class Filesystem(IOSExtraction): file_path = os.path.join(root, file_name) result = { "path": os.path.relpath(file_path, self.target_path), - "modified": convert_unix_to_iso( - os.stat(file_path).st_mtime), + "modified": convert_unix_to_iso(os.stat(file_path).st_mtime), } except Exception: continue diff --git a/mvt/ios/modules/fs/net_netusage.py b/mvt/ios/modules/fs/net_netusage.py index 1c5837a..a133694 100644 --- a/mvt/ios/modules/fs/net_netusage.py +++ b/mvt/ios/modules/fs/net_netusage.py @@ -11,7 +11,7 @@ from ..net_base import NetBase NETUSAGE_ROOT_PATHS = [ "private/var/networkd/netusage.sqlite", - "private/var/networkd/db/netusage.sqlite" + "private/var/networkd/db/netusage.sqlite", ] @@ -27,13 +27,18 @@ class Netusage(NetBase): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def run(self) -> None: for netusage_path in self._get_fs_files_from_patterns(NETUSAGE_ROOT_PATHS): @@ -42,8 +47,11 @@ class Netusage(NetBase): try: self._extract_net_data() except sqlite3.OperationalError as exc: - self.log.info("Skipping this NetUsage database because " - "it seems empty or malformed: %s", exc) + self.log.info( + "Skipping this NetUsage database because " + "it seems empty or malformed: %s", + exc, + ) continue self._find_suspicious_processes() diff --git a/mvt/ios/modules/fs/safari_favicon.py b/mvt/ios/modules/fs/safari_favicon.py index 2f3cb67..f484151 100644 --- a/mvt/ios/modules/fs/safari_favicon.py +++ b/mvt/ios/modules/fs/safari_favicon.py @@ -25,13 +25,18 @@ class SafariFavicon(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -39,7 +44,7 @@ class SafariFavicon(IOSExtraction): "module": self.__class__.__name__, "event": "safari_favicon", "data": f"Safari favicon from {record['url']} with icon URL " - f"{record['icon_url']} ({record['type']})", + f"{record['icon_url']} ({record['type']})", } def check_indicators(self) -> None: @@ -60,7 +65,8 @@ class SafariFavicon(IOSExtraction): # Fetch valid icon cache. cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT page_url.url, icon_info.url, @@ -68,47 +74,52 @@ class SafariFavicon(IOSExtraction): FROM page_url JOIN icon_info ON page_url.uuid = icon_info.uuid ORDER BY icon_info.timestamp; - """) + """ + ) for row in cur: - self.results.append({ - "url": row[0], - "icon_url": row[1], - "timestamp": row[2], - "isodate": convert_mactime_to_iso(row[2]), - "type": "valid", - "safari_favicon_db_path": file_path, - }) + self.results.append( + { + "url": row[0], + "icon_url": row[1], + "timestamp": row[2], + "isodate": convert_mactime_to_iso(row[2]), + "type": "valid", + "safari_favicon_db_path": file_path, + } + ) # Fetch icons from the rejected icons table. - cur.execute(""" + cur.execute( + """ SELECT page_url, icon_url, timestamp FROM rejected_resources ORDER BY timestamp; - """) + """ + ) for row in cur: - self.results.append({ - "url": row[0], - "icon_url": row[1], - "timestamp": row[2], - "isodate": convert_mactime_to_iso(row[2]), - "type": "rejected", - "safari_favicon_db_path": file_path, - }) + self.results.append( + { + "url": row[0], + "icon_url": row[1], + "timestamp": row[2], + "isodate": convert_mactime_to_iso(row[2]), + "type": "rejected", + "safari_favicon_db_path": file_path, + } + ) cur.close() conn.close() def run(self) -> None: for file_path in self._get_fs_files_from_patterns(SAFARI_FAVICON_ROOT_PATHS): - self.log.info("Found Safari favicon cache database at path: %s", - file_path) + self.log.info("Found Safari favicon cache database at path: %s", file_path) self._process_favicon_db(file_path) - self.log.info("Extracted a total of %d favicon records", - len(self.results)) + self.log.info("Extracted a total of %d favicon records", len(self.results)) self.results = sorted(self.results, key=lambda x: x["isodate"]) diff --git a/mvt/ios/modules/fs/shutdownlog.py b/mvt/ios/modules/fs/shutdownlog.py index e0a5041..e14daf9 100644 --- a/mvt/ios/modules/fs/shutdownlog.py +++ b/mvt/ios/modules/fs/shutdownlog.py @@ -23,13 +23,18 @@ class ShutdownLog(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -37,7 +42,7 @@ class ShutdownLog(IOSExtraction): "module": self.__class__.__name__, "event": "shutdown", "data": f"Client {record['client']} with PID {record['pid']} " - "was running when the device was shut down", + "was running when the device was shut down", } def check_indicators(self) -> None: @@ -54,8 +59,11 @@ class ShutdownLog(IOSExtraction): for ioc in self.indicators.get_iocs("processes"): parts = result["client"].split("/") if ioc in parts: - self.log.warning("Found mention of a known malicious process \"%s\" in " - "shutdown.log", ioc) + self.log.warning( + 'Found mention of a known malicious process "%s" in ' + "shutdown.log", + ioc, + ) result["matched_indicator"] = ioc self.detected.append(result) continue @@ -66,28 +74,32 @@ class ShutdownLog(IOSExtraction): line = line.strip() if line.startswith("remaining client pid:"): - current_processes.append({ - "pid": line[line.find("pid: ")+5:line.find(" (")], - "client": line[line.find("(")+1:line.find(")")], - }) + current_processes.append( + { + "pid": line[line.find("pid: ") + 5 : line.find(" (")], + "client": line[line.find("(") + 1 : line.find(")")], + } + ) elif line.startswith("SIGTERM: "): try: - mac_timestamp = int(line[line.find("[")+1:line.find("]")]) + mac_timestamp = int(line[line.find("[") + 1 : line.find("]")]) except ValueError: try: start = line.find(" @") + 2 - mac_timestamp = int(line[start:start+10]) + mac_timestamp = int(line[start : start + 10]) except Exception: mac_timestamp = 0 isodate = convert_mactime_to_iso(mac_timestamp, from_2001=False) for current_process in current_processes: - self.results.append({ - "isodate": isodate, - "pid": current_process["pid"], - "client": current_process["client"], - }) + self.results.append( + { + "isodate": isodate, + "pid": current_process["pid"], + "client": current_process["client"], + } + ) current_processes = [] diff --git a/mvt/ios/modules/fs/version_history.py b/mvt/ios/modules/fs/version_history.py index 5e64868..12e9b70 100644 --- a/mvt/ios/modules/fs/version_history.py +++ b/mvt/ios/modules/fs/version_history.py @@ -25,13 +25,18 @@ class IOSVersionHistory(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -46,12 +51,15 @@ class IOSVersionHistory(IOSExtraction): with open(found_path, "r", encoding="utf-8") as analytics_log: log_line = json.loads(analytics_log.readline().strip()) - timestamp = datetime.datetime.strptime(log_line["timestamp"], - "%Y-%m-%d %H:%M:%S.%f %z") + timestamp = datetime.datetime.strptime( + log_line["timestamp"], "%Y-%m-%d %H:%M:%S.%f %z" + ) timestamp_utc = timestamp.astimezone(datetime.timezone.utc) - self.results.append({ - "isodate": convert_datetime_to_iso(timestamp_utc), - "os_version": log_line["os_version"], - }) + self.results.append( + { + "isodate": convert_datetime_to_iso(timestamp_utc), + "os_version": log_line["os_version"], + } + ) self.results = sorted(self.results, key=lambda entry: entry["isodate"]) diff --git a/mvt/ios/modules/fs/webkit_base.py b/mvt/ios/modules/fs/webkit_base.py index ac2957b..91c1415 100644 --- a/mvt/ios/modules/fs/webkit_base.py +++ b/mvt/ios/modules/fs/webkit_base.py @@ -35,8 +35,10 @@ class WebkitBase(IOSExtraction): name = name.replace("https_", "https://") url = name.split("_")[0] - self.results.append({ - "folder": key, - "url": url, - "isodate": convert_unix_to_iso(os.stat(found_path).st_mtime), - }) + self.results.append( + { + "folder": key, + "url": url, + "isodate": convert_unix_to_iso(os.stat(found_path).st_mtime), + } + ) diff --git a/mvt/ios/modules/fs/webkit_indexeddb.py b/mvt/ios/modules/fs/webkit_indexeddb.py index cf33ce6..c39a506 100644 --- a/mvt/ios/modules/fs/webkit_indexeddb.py +++ b/mvt/ios/modules/fs/webkit_indexeddb.py @@ -27,13 +27,18 @@ class WebkitIndexedDB(WebkitBase): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -41,10 +46,11 @@ class WebkitIndexedDB(WebkitBase): "module": self.__class__.__name__, "event": "webkit_indexeddb", "data": f"IndexedDB folder {record['folder']} containing " - f"file for URL {record['url']}", + f"file for URL {record['url']}", } def run(self) -> None: self._process_webkit_folder(WEBKIT_INDEXEDDB_ROOT_PATHS) - self.log.info("Extracted a total of %d WebKit IndexedDB records", - len(self.results)) + self.log.info( + "Extracted a total of %d WebKit IndexedDB records", len(self.results) + ) diff --git a/mvt/ios/modules/fs/webkit_localstorage.py b/mvt/ios/modules/fs/webkit_localstorage.py index 1d47fea..eac3880 100644 --- a/mvt/ios/modules/fs/webkit_localstorage.py +++ b/mvt/ios/modules/fs/webkit_localstorage.py @@ -25,13 +25,18 @@ class WebkitLocalStorage(WebkitBase): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -39,10 +44,12 @@ class WebkitLocalStorage(WebkitBase): "module": self.__class__.__name__, "event": "webkit_local_storage", "data": f"WebKit Local Storage folder {record['folder']} " - f"containing file for URL {record['url']}", + f"containing file for URL {record['url']}", } def run(self) -> None: self._process_webkit_folder(WEBKIT_LOCALSTORAGE_ROOT_PATHS) - self.log.info("Extracted a total of %d records from WebKit Local Storages", - len(self.results)) + self.log.info( + "Extracted a total of %d records from WebKit Local Storages", + len(self.results), + ) diff --git a/mvt/ios/modules/fs/webkit_safariviewservice.py b/mvt/ios/modules/fs/webkit_safariviewservice.py index b03aabb..b537326 100644 --- a/mvt/ios/modules/fs/webkit_safariviewservice.py +++ b/mvt/ios/modules/fs/webkit_safariviewservice.py @@ -25,15 +25,22 @@ class WebkitSafariViewService(WebkitBase): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def run(self) -> None: self._process_webkit_folder(WEBKIT_SAFARIVIEWSERVICE_ROOT_PATHS) - self.log.info("Extracted a total of %d records from WebKit SafariViewService WebsiteData", - len(self.results)) + self.log.info( + "Extracted a total of %d records from WebKit SafariViewService WebsiteData", + len(self.results), + ) diff --git a/mvt/ios/modules/mixed/__init__.py b/mvt/ios/modules/mixed/__init__.py index 70cb2f4..5bc2dea 100644 --- a/mvt/ios/modules/mixed/__init__.py +++ b/mvt/ios/modules/mixed/__init__.py @@ -26,9 +26,27 @@ from .webkit_resource_load_statistics import WebkitResourceLoadStatistics from .webkit_session_resource_log import WebkitSessionResourceLog from .whatsapp import Whatsapp -MIXED_MODULES = [Calls, ChromeFavicon, ChromeHistory, Contacts, FirefoxFavicon, - FirefoxHistory, IDStatusCache, InteractionC, LocationdClients, - OSAnalyticsADDaily, Datausage, SafariBrowserState, SafariHistory, - TCC, SMS, SMSAttachments, WebkitResourceLoadStatistics, - WebkitSessionResourceLog, Whatsapp, Shortcuts, Applications, - Calendar] +MIXED_MODULES = [ + Calls, + ChromeFavicon, + ChromeHistory, + Contacts, + FirefoxFavicon, + FirefoxHistory, + IDStatusCache, + InteractionC, + LocationdClients, + OSAnalyticsADDaily, + Datausage, + SafariBrowserState, + SafariHistory, + TCC, + SMS, + SMSAttachments, + WebkitResourceLoadStatistics, + WebkitSessionResourceLog, + Whatsapp, + Shortcuts, + Applications, + Calendar, +] diff --git a/mvt/ios/modules/mixed/applications.py b/mvt/ios/modules/mixed/applications.py index e5ac8c2..d510263 100644 --- a/mvt/ios/modules/mixed/applications.py +++ b/mvt/ios/modules/mixed/applications.py @@ -21,18 +21,24 @@ APPLICATIONS_DB_PATH = [ class Applications(IOSExtraction): """Extract information from accounts installed on the phone.""" + def __init__( self, file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: if "isodate" in record: @@ -40,7 +46,7 @@ class Applications(IOSExtraction): "timestamp": record["isodate"], "module": self.__class__.__name__, "event": "app_installed", - "data": f"App {record.get('name', '')} version {record.get('bundleShortVersionString', '')} from {record.get('artistName', '')} installed from {record.get('sourceApp', '')}" + "data": f"App {record.get('name', '')} version {record.get('bundleShortVersionString', '')} from {record.get('artistName', '')} installed from {record.get('sourceApp', '')}", } return [] @@ -48,36 +54,54 @@ class Applications(IOSExtraction): for result in self.results: if self.indicators: if "softwareVersionBundleId" not in result: - self.log.warning("Suspicious application identified without softwareVersionBundleId") + self.log.warning( + "Suspicious application identified without softwareVersionBundleId" + ) self.detected.append(result) continue ioc = self.indicators.check_process(result["softwareVersionBundleId"]) if ioc: - self.log.warning("Malicious application %s identified", result["softwareVersionBundleId"]) + self.log.warning( + "Malicious application %s identified", + result["softwareVersionBundleId"], + ) result["matched_indicator"] = ioc self.detected.append(result) continue ioc = self.indicators.check_app_id(result["softwareVersionBundleId"]) if ioc: - self.log.warning("Malicious application %s identified", result["softwareVersionBundleId"]) + self.log.warning( + "Malicious application %s identified", + result["softwareVersionBundleId"], + ) result["matched_indicator"] = ioc self.detected.append(result) continue - if result.get("sourceApp", "com.apple.AppStore") not in ["com.apple.AppStore", "com.apple.dmd", "dmd"]: - self.log.warning("Suspicious app not installed from the App Store or MDM: %s", result["softwareVersionBundleId"]) + if result.get("sourceApp", "com.apple.AppStore") not in [ + "com.apple.AppStore", + "com.apple.dmd", + "dmd", + ]: + self.log.warning( + "Suspicious app not installed from the App Store or MDM: %s", + result["softwareVersionBundleId"], + ) self.detected.append(result) def _parse_itunes_timestamp(self, entry: Dict[str, Any]) -> None: """ Parse the iTunes metadata info """ - if entry.get("com.apple.iTunesStore.downloadInfo", {}).get("purchaseDate", None): + if entry.get("com.apple.iTunesStore.downloadInfo", {}).get( + "purchaseDate", None + ): timestamp = datetime.strptime( entry["com.apple.iTunesStore.downloadInfo"]["purchaseDate"], - "%Y-%m-%dT%H:%M:%SZ") + "%Y-%m-%dT%H:%M:%SZ", + ) timestamp_utc = timestamp.astimezone(timezone.utc) entry["isodate"] = convert_datetime_to_iso(timestamp_utc) @@ -124,5 +148,4 @@ class Applications(IOSExtraction): for file_path in self._get_fs_files_from_patterns(APPLICATIONS_DB_PATH): self._parse_itunes_metadata(file_path) - self.log.info("Extracted a total of %d applications", - len(self.results)) + self.log.info("Extracted a total of %d applications", len(self.results)) diff --git a/mvt/ios/modules/mixed/calendar.py b/mvt/ios/modules/mixed/calendar.py index 6a5a866..7bc2716 100644 --- a/mvt/ios/modules/mixed/calendar.py +++ b/mvt/ios/modules/mixed/calendar.py @@ -14,9 +14,7 @@ from ..base import IOSExtraction CALENDAR_BACKUP_IDS = [ "2041457d5fe04d39d0ab481178355df6781e6858", ] -CALENDAR_ROOT_PATHS = [ - "private/var/mobile/Library/Calendar/Calendar.sqlitedb" -] +CALENDAR_ROOT_PATHS = ["private/var/mobile/Library/Calendar/Calendar.sqlitedb"] class Calendar(IOSExtraction): @@ -27,19 +25,24 @@ class Calendar(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.timestamps = [ "start_date", "end_date", "last_modified", "creation_date", - "participant_last_modified" + "participant_last_modified", ] def serialize(self, record: dict) -> Union[dict, list]: @@ -48,13 +51,15 @@ class Calendar(IOSExtraction): if timestamp not in record or not record[timestamp]: continue - records.append({ - "timestamp": record[timestamp], - "module": self.__class__.__name__, - "event": timestamp, - "data": f"Calendar event {record['summary']} ({record['description']}) " - f"(invitation by {record['participant_email']})" - }) + records.append( + { + "timestamp": record[timestamp], + "module": self.__class__.__name__, + "event": timestamp, + "data": f"Calendar event {record['summary']} ({record['description']}) " + f"(invitation by {record['participant_email']})", + } + ) return records def check_indicators(self) -> None: @@ -66,9 +71,11 @@ class Calendar(IOSExtraction): self.detected.append(result) continue - # Custom check for Quadream exploit + # Custom check for Quadream exploit if result["summary"] == "Meeting" and result["description"] == "Notes": - self.log.warning("Potential Quadream exploit event identified: %s", result["uuid"]) + self.log.warning( + "Potential Quadream exploit event identified: %s", result["uuid"] + ) self.detected.append(result) def _parse_calendar_db(self): @@ -78,7 +85,8 @@ class Calendar(IOSExtraction): conn = sqlite3.connect(self.file_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT CalendarItem.ROWID as "id", CalendarItem.summary as "summary", @@ -105,7 +113,8 @@ class Calendar(IOSExtraction): Participant.last_modified as "participant_last_modified" FROM CalendarItem LEFT JOIN Participant ON Participant.ROWID = CalendarItem.organizer_id; - """) + """ + ) names = [description[0] for description in cur.description] for item in cur: @@ -125,12 +134,11 @@ class Calendar(IOSExtraction): conn.close() def run(self) -> None: - self._find_ios_database(backup_ids=CALENDAR_BACKUP_IDS, - root_paths=CALENDAR_ROOT_PATHS) - self.log.info("Found calendar database at path: %s", - self.file_path) + self._find_ios_database( + backup_ids=CALENDAR_BACKUP_IDS, root_paths=CALENDAR_ROOT_PATHS + ) + self.log.info("Found calendar database at path: %s", self.file_path) self._parse_calendar_db() - self.log.info("Extracted a total of %d calendar items", - len(self.results)) + self.log.info("Extracted a total of %d calendar items", len(self.results)) diff --git a/mvt/ios/modules/mixed/calls.py b/mvt/ios/modules/mixed/calls.py index c0a167c..18b3857 100644 --- a/mvt/ios/modules/mixed/calls.py +++ b/mvt/ios/modules/mixed/calls.py @@ -14,21 +14,29 @@ from ..base import IOSExtraction CALLS_BACKUP_IDS = [ "5a4935c78a5255723f707230a451d79c540d2741", ] -CALLS_ROOT_PATHS = [ - "private/var/mobile/Library/CallHistoryDB/CallHistory.storedata" -] +CALLS_ROOT_PATHS = ["private/var/mobile/Library/CallHistoryDB/CallHistory.storedata"] class Calls(IOSExtraction): """This module extracts phone calls details""" - def __init__(self, file_path: str = None, target_path: str = None, - results_path: str = None, fast_mode: bool = False, - log: logging.Logger = logging.getLogger(__name__), - results: list = []) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + def __init__( + self, + file_path: str = None, + target_path: str = None, + results_path: str = None, + fast_mode: bool = False, + log: logging.Logger = logging.getLogger(__name__), + results: list = [], + ) -> None: + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -36,31 +44,38 @@ class Calls(IOSExtraction): "module": self.__class__.__name__, "event": "call", "data": f"From {record['number']} using {record['provider']} " - f"during {record['duration']} seconds" + f"during {record['duration']} seconds", } def run(self) -> None: - self._find_ios_database(backup_ids=CALLS_BACKUP_IDS, - root_paths=CALLS_ROOT_PATHS) + self._find_ios_database( + backup_ids=CALLS_BACKUP_IDS, root_paths=CALLS_ROOT_PATHS + ) self.log.info("Found Calls database at path: %s", self.file_path) conn = sqlite3.connect(self.file_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT ZDATE, ZDURATION, ZLOCATION, ZADDRESS, ZSERVICE_PROVIDER FROM ZCALLRECORD; - """) + """ + ) # names = [description[0] for description in cur.description] for row in cur: - self.results.append({ - "isodate": convert_mactime_to_iso(row[0]), - "duration": row[1], - "location": row[2], - "number": row[3].decode("utf-8") if row[3] and row[3] is bytes else row[3], - "provider": row[4] - }) + self.results.append( + { + "isodate": convert_mactime_to_iso(row[0]), + "duration": row[1], + "location": row[2], + "number": row[3].decode("utf-8") + if row[3] and row[3] is bytes + else row[3], + "provider": row[4], + } + ) cur.close() conn.close() diff --git a/mvt/ios/modules/mixed/chrome_favicon.py b/mvt/ios/modules/mixed/chrome_favicon.py index 0371a82..4c27c3d 100644 --- a/mvt/ios/modules/mixed/chrome_favicon.py +++ b/mvt/ios/modules/mixed/chrome_favicon.py @@ -7,14 +7,11 @@ import logging import sqlite3 from typing import Optional, Union -from mvt.common.utils import (convert_chrometime_to_datetime, - convert_datetime_to_iso) +from mvt.common.utils import convert_chrometime_to_datetime, convert_datetime_to_iso from ..base import IOSExtraction -CHROME_FAVICON_BACKUP_IDS = [ - "55680ab883d0fdcffd94f959b1632e5fbbb18c5b" -] +CHROME_FAVICON_BACKUP_IDS = ["55680ab883d0fdcffd94f959b1632e5fbbb18c5b"] # TODO: Confirm Chrome database path. CHROME_FAVICON_ROOT_PATHS = [ "private/var/mobile/Containers/Data/Application/*/Library/Application Support/Google/Chrome/Default/Favicons", @@ -29,20 +26,25 @@ class ChromeFavicon(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { "timestamp": record["isodate"], "module": self.__class__.__name__, "event": "new_favicon", - "data": f"{record['icon_url']} from {record['url']}" + "data": f"{record['icon_url']} from {record['url']}", } def check_indicators(self) -> None: @@ -59,16 +61,17 @@ class ChromeFavicon(IOSExtraction): self.detected.append(result) def run(self) -> None: - self._find_ios_database(backup_ids=CHROME_FAVICON_BACKUP_IDS, - root_paths=CHROME_FAVICON_ROOT_PATHS) - self.log.info("Found Chrome favicon cache database at path: %s", - self.file_path) + self._find_ios_database( + backup_ids=CHROME_FAVICON_BACKUP_IDS, root_paths=CHROME_FAVICON_ROOT_PATHS + ) + self.log.info("Found Chrome favicon cache database at path: %s", self.file_path) conn = sqlite3.connect(self.file_path) # Fetch icon cache cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT icon_mapping.page_url, favicons.url, @@ -78,18 +81,22 @@ class ChromeFavicon(IOSExtraction): JOIN favicon_bitmaps ON icon_mapping.icon_id = favicon_bitmaps.icon_id JOIN favicons ON icon_mapping.icon_id = favicons.id ORDER BY icon_mapping.id; - """) + """ + ) records = [] for row in cur: last_timestamp = int(row[2]) or int(row[3]) - records.append({ - "url": row[0], - "icon_url": row[1], - "timestamp": last_timestamp, - "isodate": convert_datetime_to_iso( - convert_chrometime_to_datetime(last_timestamp)), - }) + records.append( + { + "url": row[0], + "icon_url": row[1], + "timestamp": last_timestamp, + "isodate": convert_datetime_to_iso( + convert_chrometime_to_datetime(last_timestamp) + ), + } + ) cur.close() conn.close() diff --git a/mvt/ios/modules/mixed/chrome_history.py b/mvt/ios/modules/mixed/chrome_history.py index cbd4464..1997059 100644 --- a/mvt/ios/modules/mixed/chrome_history.py +++ b/mvt/ios/modules/mixed/chrome_history.py @@ -7,8 +7,7 @@ import logging import sqlite3 from typing import Optional, Union -from mvt.common.utils import (convert_chrometime_to_datetime, - convert_datetime_to_iso) +from mvt.common.utils import convert_chrometime_to_datetime, convert_datetime_to_iso from ..base import IOSExtraction @@ -29,13 +28,18 @@ class ChromeHistory(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -43,8 +47,8 @@ class ChromeHistory(IOSExtraction): "module": self.__class__.__name__, "event": "visit", "data": f"{record['id']} - {record['url']} " - f"(visit ID: {record['visit_id']}, " - f"redirect source: {record['redirect_source']})" + f"(visit ID: {record['visit_id']}, " + f"redirect source: {record['redirect_source']})", } def check_indicators(self) -> None: @@ -58,14 +62,15 @@ class ChromeHistory(IOSExtraction): self.detected.append(result) def run(self) -> None: - self._find_ios_database(backup_ids=CHROME_HISTORY_BACKUP_IDS, - root_paths=CHROME_HISTORY_ROOT_PATHS) - self.log.info("Found Chrome history database at path: %s", - self.file_path) + self._find_ios_database( + backup_ids=CHROME_HISTORY_BACKUP_IDS, root_paths=CHROME_HISTORY_ROOT_PATHS + ) + self.log.info("Found Chrome history database at path: %s", self.file_path) conn = sqlite3.connect(self.file_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT urls.id, urls.url, @@ -75,21 +80,24 @@ class ChromeHistory(IOSExtraction): FROM urls JOIN visits ON visits.url = urls.id ORDER BY visits.visit_time; - """) + """ + ) for item in cur: - self.results.append({ - "id": item[0], - "url": item[1], - "visit_id": item[2], - "timestamp": item[3], - "isodate": convert_datetime_to_iso( - convert_chrometime_to_datetime(item[3])), - "redirect_source": item[4], - }) + self.results.append( + { + "id": item[0], + "url": item[1], + "visit_id": item[2], + "timestamp": item[3], + "isodate": convert_datetime_to_iso( + convert_chrometime_to_datetime(item[3]) + ), + "redirect_source": item[4], + } + ) cur.close() conn.close() - self.log.info("Extracted a total of %d history items", - len(self.results)) + self.log.info("Extracted a total of %d history items", len(self.results)) diff --git a/mvt/ios/modules/mixed/contacts.py b/mvt/ios/modules/mixed/contacts.py index 024ce71..8f2f2cf 100644 --- a/mvt/ios/modules/mixed/contacts.py +++ b/mvt/ios/modules/mixed/contacts.py @@ -25,30 +25,38 @@ class Contacts(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def run(self) -> None: - self._find_ios_database(backup_ids=CONTACTS_BACKUP_IDS, - root_paths=CONTACTS_ROOT_PATHS) + self._find_ios_database( + backup_ids=CONTACTS_BACKUP_IDS, root_paths=CONTACTS_ROOT_PATHS + ) self.log.info("Found Contacts database at path: %s", self.file_path) conn = sqlite3.connect(self.file_path) cur = conn.cursor() try: - cur.execute(""" + cur.execute( + """ SELECT multi.value, person.first, person.middle, person.last, person.organization FROM ABPerson person, ABMultiValue multi WHERE person.rowid = multi.record_id and multi.value not null ORDER by person.rowid ASC; - """) + """ + ) except sqlite3.OperationalError as e: self.log.info("Error while reading the contact table: %s", e) return None @@ -64,5 +72,6 @@ class Contacts(IOSExtraction): cur.close() conn.close() - self.log.info("Extracted a total of %d contacts from the address book", - len(self.results)) + self.log.info( + "Extracted a total of %d contacts from the address book", len(self.results) + ) diff --git a/mvt/ios/modules/mixed/firefox_favicon.py b/mvt/ios/modules/mixed/firefox_favicon.py index 9177e3a..bb6c492 100644 --- a/mvt/ios/modules/mixed/firefox_favicon.py +++ b/mvt/ios/modules/mixed/firefox_favicon.py @@ -27,13 +27,18 @@ class FirefoxFavicon(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -41,7 +46,7 @@ class FirefoxFavicon(IOSExtraction): "module": self.__class__.__name__, "event": "firefox_history", "data": f"Firefox favicon {record['url']} " - f"when visiting {record['history_url']}", + f"when visiting {record['history_url']}", } def check_indicators(self) -> None: @@ -58,14 +63,15 @@ class FirefoxFavicon(IOSExtraction): self.detected.append(result) def run(self) -> None: - self._find_ios_database(backup_ids=FIREFOX_HISTORY_BACKUP_IDS, - root_paths=FIREFOX_HISTORY_ROOT_PATHS) - self.log.info("Found Firefox favicon database at path: %s", - self.file_path) + self._find_ios_database( + backup_ids=FIREFOX_HISTORY_BACKUP_IDS, root_paths=FIREFOX_HISTORY_ROOT_PATHS + ) + self.log.info("Found Firefox favicon database at path: %s", self.file_path) conn = sqlite3.connect(self.file_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT favicons.id, favicons.url, @@ -78,22 +84,24 @@ class FirefoxFavicon(IOSExtraction): FROM favicons INNER JOIN favicon_sites ON favicon_sites.faviconID = favicons.id INNER JOIN history ON favicon_sites.siteID = history.id; - """) + """ + ) for item in cur: - self.results.append({ - "id": item[0], - "url": item[1], - "width": item[2], - "height": item[3], - "type": item[4], - "isodate": convert_unix_to_iso(item[5]), - "history_id": item[6], - "history_url": item[7] - }) + self.results.append( + { + "id": item[0], + "url": item[1], + "width": item[2], + "height": item[3], + "type": item[4], + "isodate": convert_unix_to_iso(item[5]), + "history_id": item[6], + "history_url": item[7], + } + ) cur.close() conn.close() - self.log.info("Extracted a total of %d history items", - len(self.results)) + self.log.info("Extracted a total of %d history items", len(self.results)) diff --git a/mvt/ios/modules/mixed/firefox_history.py b/mvt/ios/modules/mixed/firefox_history.py index 40013d1..70751bf 100644 --- a/mvt/ios/modules/mixed/firefox_history.py +++ b/mvt/ios/modules/mixed/firefox_history.py @@ -31,13 +31,18 @@ class FirefoxHistory(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -58,14 +63,15 @@ class FirefoxHistory(IOSExtraction): self.detected.append(result) def run(self) -> None: - self._find_ios_database(backup_ids=FIREFOX_HISTORY_BACKUP_IDS, - root_paths=FIREFOX_HISTORY_ROOT_PATHS) - self.log.info("Found Firefox history database at path: %s", - self.file_path) + self._find_ios_database( + backup_ids=FIREFOX_HISTORY_BACKUP_IDS, root_paths=FIREFOX_HISTORY_ROOT_PATHS + ) + self.log.info("Found Firefox history database at path: %s", self.file_path) conn = sqlite3.connect(self.file_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT visits.id, visits.date/1000000, @@ -75,20 +81,22 @@ class FirefoxHistory(IOSExtraction): visits.type FROM visits, history WHERE visits.siteID = history.id; - """) + """ + ) for row in cur: - self.results.append({ - "id": row[0], - "isodate": convert_unix_to_iso(row[1]), - "url": row[2], - "title": row[3], - "i1000000s_local": row[4], - "type": row[5] - }) + self.results.append( + { + "id": row[0], + "isodate": convert_unix_to_iso(row[1]), + "url": row[2], + "title": row[3], + "i1000000s_local": row[4], + "type": row[5], + } + ) cur.close() conn.close() - self.log.info("Extracted a total of %d history items", - len(self.results)) + self.log.info("Extracted a total of %d history items", len(self.results)) diff --git a/mvt/ios/modules/mixed/idstatuscache.py b/mvt/ios/modules/mixed/idstatuscache.py index 17e51af..a8252c4 100644 --- a/mvt/ios/modules/mixed/idstatuscache.py +++ b/mvt/ios/modules/mixed/idstatuscache.py @@ -29,13 +29,18 @@ class IDStatusCache(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -43,7 +48,7 @@ class IDStatusCache(IOSExtraction): "module": self.__class__.__name__, "event": "lookup", "data": f"Lookup of {record['user']} within {record['package']} " - f"(Status {record['idstatus']})" + f"(Status {record['idstatus']})", } def check_indicators(self) -> None: @@ -60,8 +65,10 @@ class IDStatusCache(IOSExtraction): continue if "\\x00\\x00" in result.get("user", ""): - self.log.warning("Found an ID Status Cache entry with suspicious patterns: %s", - result.get("user")) + self.log.warning( + "Found an ID Status Cache entry with suspicious patterns: %s", + result.get("user"), + ) self.detected.append(result) def _extract_idstatuscache_entries(self, file_path): @@ -80,35 +87,36 @@ class IDStatusCache(IOSExtraction): except KeyError: continue - id_status_cache_entries.append({ - "package": app, - "user": entry.replace("\x00", "\\x00"), - "isodate": convert_mactime_to_iso(lookup_date), - "idstatus": id_status, - }) + id_status_cache_entries.append( + { + "package": app, + "user": entry.replace("\x00", "\\x00"), + "isodate": convert_mactime_to_iso(lookup_date), + "idstatus": id_status, + } + ) - entry_counter = collections.Counter([entry["user"] - for entry in - id_status_cache_entries]) + entry_counter = collections.Counter( + [entry["user"] for entry in id_status_cache_entries] + ) for entry in id_status_cache_entries: # Add total count of occurrences to the status cache entry. entry["occurrences"] = entry_counter[entry["user"]] self.results.append(entry) def run(self) -> None: - if self.is_backup: self._find_ios_database(backup_ids=IDSTATUSCACHE_BACKUP_IDS) - self.log.info("Found IDStatusCache plist at path: %s", - self.file_path) + self.log.info("Found IDStatusCache plist at path: %s", self.file_path) self._extract_idstatuscache_entries(self.file_path) elif self.is_fs_dump: for idstatuscache_path in self._get_fs_files_from_patterns( - IDSTATUSCACHE_ROOT_PATHS): + IDSTATUSCACHE_ROOT_PATHS + ): self.file_path = idstatuscache_path - self.log.info("Found IDStatusCache plist at path: %s", - self.file_path) + self.log.info("Found IDStatusCache plist at path: %s", self.file_path) self._extract_idstatuscache_entries(self.file_path) - self.log.info("Extracted a total of %d ID Status Cache entries", - len(self.results)) + self.log.info( + "Extracted a total of %d ID Status Cache entries", len(self.results) + ) diff --git a/mvt/ios/modules/mixed/interactionc.py b/mvt/ios/modules/mixed/interactionc.py index 0f223cc..d4c75df 100644 --- a/mvt/ios/modules/mixed/interactionc.py +++ b/mvt/ios/modules/mixed/interactionc.py @@ -209,7 +209,7 @@ QUERIES = [ LEFT JOIN ZCONTACTS ON ZINTERACTIONS.ZSENDER = ZCONTACTS.Z_PK - """ + """, ] @@ -221,13 +221,18 @@ class InteractionC(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.timestamps = [ "start_date", @@ -254,22 +259,25 @@ class InteractionC(IOSExtraction): if record[timestamp] in processed: continue - records.append({ - "timestamp": record[timestamp], - "module": self.__class__.__name__, - "event": timestamp, - "data": f"[{record['bundle_id']}] {record['account']} - " - f"from {record['sender_display_name']} ({record['sender_identifier']}) " - f"to {record.get('recipient_display_name', '')} ({record.get('recipient_identifier', '')}):" - f" {record.get('content', '')}" - }) + records.append( + { + "timestamp": record[timestamp], + "module": self.__class__.__name__, + "event": timestamp, + "data": f"[{record['bundle_id']}] {record['account']} - " + f"from {record['sender_display_name']} ({record['sender_identifier']}) " + f"to {record.get('recipient_display_name', '')} ({record.get('recipient_identifier', '')}):" + f" {record.get('content', '')}", + } + ) processed.append(record[timestamp]) return records def run(self) -> None: - self._find_ios_database(backup_ids=INTERACTIONC_BACKUP_IDS, - root_paths=INTERACTIONC_ROOT_PATHS) + self._find_ios_database( + backup_ids=INTERACTIONC_BACKUP_IDS, root_paths=INTERACTIONC_ROOT_PATHS + ) self.log.info("Found InteractionC database at path: %s", self.file_path) conn = sqlite3.connect(self.file_path) @@ -287,7 +295,9 @@ class InteractionC(IOSExtraction): try: cur.execute(QUERIES[3]) except sqlite3.OperationalError as e: - self.log.info("Error while reading the InteractionC table: %s", e) + self.log.info( + "Error while reading the InteractionC table: %s", e + ) return None names = [description[0] for description in cur.description] @@ -307,5 +317,4 @@ class InteractionC(IOSExtraction): cur.close() conn.close() - self.log.info("Extracted a total of %d InteractionC events", - len(self.results)) + self.log.info("Extracted a total of %d InteractionC events", len(self.results)) diff --git a/mvt/ios/modules/mixed/locationd.py b/mvt/ios/modules/mixed/locationd.py index a726eee..8af9753 100644 --- a/mvt/ios/modules/mixed/locationd.py +++ b/mvt/ios/modules/mixed/locationd.py @@ -16,7 +16,7 @@ LOCATIOND_BACKUP_IDS = [ ] LOCATIOND_ROOT_PATHS = [ "private/var/mobile/Library/Caches/locationd/clients.plist", - "private/var/root/Library/Caches/locationd/clients.plist" + "private/var/root/Library/Caches/locationd/clients.plist", ] @@ -28,13 +28,18 @@ class LocationdClients(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.timestamps = [ "ConsumptionPeriodBegin", @@ -52,12 +57,14 @@ class LocationdClients(IOSExtraction): records = [] for timestamp in self.timestamps: if timestamp in record.keys(): - records.append({ - "timestamp": record[timestamp], - "module": self.__class__.__name__, - "event": timestamp, - "data": f"{timestamp} from {record['package']}" - }) + records.append( + { + "timestamp": record[timestamp], + "module": self.__class__.__name__, + "event": timestamp, + "data": f"{timestamp} from {record['package']}", + } + ) return records @@ -67,12 +74,14 @@ class LocationdClients(IOSExtraction): for result in self.results: parts = result["package"].split("/") - proc_name = parts[len(parts)-1] + proc_name = parts[len(parts) - 1] ioc = self.indicators.check_process(proc_name) if ioc: - self.log.warning("Found a suspicious process name in LocationD entry %s", - result["package"]) + self.log.warning( + "Found a suspicious process name in LocationD entry %s", + result["package"], + ) result["matched_indicator"] = ioc self.detected.append(result) continue @@ -80,8 +89,10 @@ class LocationdClients(IOSExtraction): if "BundlePath" in result: ioc = self.indicators.check_file_path(result["BundlePath"]) if ioc: - self.log.warning("Found a suspicious file path in Location D: %s", - result["BundlePath"]) + self.log.warning( + "Found a suspicious file path in Location D: %s", + result["BundlePath"], + ) result["matched_indicator"] = ioc self.detected.append(result) continue @@ -89,8 +100,10 @@ class LocationdClients(IOSExtraction): if "Executable" in result: ioc = self.indicators.check_file_path(result["Executable"]) if ioc: - self.log.warning("Found a suspicious file path in Location D: %s", - result["Executable"]) + self.log.warning( + "Found a suspicious file path in Location D: %s", + result["Executable"], + ) result["matched_indicator"] = ioc self.detected.append(result) continue @@ -98,8 +111,10 @@ class LocationdClients(IOSExtraction): if "Registered" in result: ioc = self.indicators.check_file_path(result["Registered"]) if ioc: - self.log.warning("Found a suspicious file path in Location D: %s", - result["Registered"]) + self.log.warning( + "Found a suspicious file path in Location D: %s", + result["Registered"], + ) result["matched_indicator"] = ioc self.detected.append(result) continue @@ -113,24 +128,25 @@ class LocationdClients(IOSExtraction): result["package"] = key for timestamp in self.timestamps: if timestamp in result.keys(): - result[timestamp] = convert_mactime_to_iso( - result[timestamp]) + result[timestamp] = convert_mactime_to_iso(result[timestamp]) self.results.append(result) def run(self) -> None: if self.is_backup: self._find_ios_database(backup_ids=LOCATIOND_BACKUP_IDS) - self.log.info("Found Locationd Clients plist at path: %s", - self.file_path) + self.log.info("Found Locationd Clients plist at path: %s", self.file_path) self._extract_locationd_entries(self.file_path) elif self.is_fs_dump: for locationd_path in self._get_fs_files_from_patterns( - LOCATIOND_ROOT_PATHS): + LOCATIOND_ROOT_PATHS + ): self.file_path = locationd_path - self.log.info("Found Locationd Clients plist at path: %s", - self.file_path) + self.log.info( + "Found Locationd Clients plist at path: %s", self.file_path + ) self._extract_locationd_entries(self.file_path) - self.log.info("Extracted a total of %d Locationd Clients entries", - len(self.results)) + self.log.info( + "Extracted a total of %d Locationd Clients entries", len(self.results) + ) diff --git a/mvt/ios/modules/mixed/net_datausage.py b/mvt/ios/modules/mixed/net_datausage.py index 84f3a40..60526bf 100644 --- a/mvt/ios/modules/mixed/net_datausage.py +++ b/mvt/ios/modules/mixed/net_datausage.py @@ -28,17 +28,23 @@ class Datausage(NetBase): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def run(self) -> None: - self._find_ios_database(backup_ids=DATAUSAGE_BACKUP_IDS, - root_paths=DATAUSAGE_ROOT_PATHS) + self._find_ios_database( + backup_ids=DATAUSAGE_BACKUP_IDS, root_paths=DATAUSAGE_ROOT_PATHS + ) self.log.info("Found DataUsage database at path: %s", self.file_path) self._extract_net_data() diff --git a/mvt/ios/modules/mixed/osanalytics_addaily.py b/mvt/ios/modules/mixed/osanalytics_addaily.py index 464c4c1..7800a5f 100644 --- a/mvt/ios/modules/mixed/osanalytics_addaily.py +++ b/mvt/ios/modules/mixed/osanalytics_addaily.py @@ -28,13 +28,18 @@ class OSAnalyticsADDaily(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -42,9 +47,9 @@ class OSAnalyticsADDaily(IOSExtraction): "module": self.__class__.__name__, "event": "osanalytics_addaily", "data": f"{record['package']} WIFI IN: {record['wifi_in']}, " - f"WIFI OUT: {record['wifi_out']} - " - f"WWAN IN: {record['wwan_in']}, " - f"WWAN OUT: {record['wwan_out']}", + f"WIFI OUT: {record['wifi_out']} - " + f"WWAN IN: {record['wwan_in']}, " + f"WWAN OUT: {record['wwan_out']}", } def check_indicators(self) -> None: @@ -58,23 +63,30 @@ class OSAnalyticsADDaily(IOSExtraction): self.detected.append(result) def run(self) -> None: - self._find_ios_database(backup_ids=OSANALYTICS_ADDAILY_BACKUP_IDS, - root_paths=OSANALYTICS_ADDAILY_ROOT_PATHS) - self.log.info("Found com.apple.osanalytics.addaily plist at path: %s", - self.file_path) + self._find_ios_database( + backup_ids=OSANALYTICS_ADDAILY_BACKUP_IDS, + root_paths=OSANALYTICS_ADDAILY_ROOT_PATHS, + ) + self.log.info( + "Found com.apple.osanalytics.addaily plist at path: %s", self.file_path + ) with open(self.file_path, "rb") as handle: file_plist = plistlib.load(handle) for app, values in file_plist.get("netUsageBaseline", {}).items(): - self.results.append({ - "package": app, - "ts": convert_datetime_to_iso(values[0]), - "wifi_in": values[1], - "wifi_out": values[2], - "wwan_in": values[3], - "wwan_out": values[4], - }) + self.results.append( + { + "package": app, + "ts": convert_datetime_to_iso(values[0]), + "wifi_in": values[1], + "wifi_out": values[2], + "wwan_in": values[3], + "wwan_out": values[4], + } + ) - self.log.info("Extracted a total of %d com.apple.osanalytics.addaily entries", - len(self.results)) + self.log.info( + "Extracted a total of %d com.apple.osanalytics.addaily entries", + len(self.results), + ) diff --git a/mvt/ios/modules/mixed/safari_browserstate.py b/mvt/ios/modules/mixed/safari_browserstate.py index 0b509ac..ffb26b9 100644 --- a/mvt/ios/modules/mixed/safari_browserstate.py +++ b/mvt/ios/modules/mixed/safari_browserstate.py @@ -29,13 +29,18 @@ class SafariBrowserState(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self._session_history_count = 0 @@ -44,7 +49,7 @@ class SafariBrowserState(IOSExtraction): "timestamp": record["last_viewed_timestamp"], "module": self.__class__.__name__, "event": "tab", - "data": f"{record['tab_title']} - {record['tab_url']}" + "data": f"{record['tab_title']} - {record['tab_url']}", } def check_indicators(self) -> None: @@ -75,7 +80,8 @@ class SafariBrowserState(IOSExtraction): cur = conn.cursor() try: - cur.execute(""" + cur.execute( + """ SELECT tabs.title, tabs.url, @@ -85,15 +91,18 @@ class SafariBrowserState(IOSExtraction): FROM tabs JOIN tab_sessions ON tabs.uuid = tab_sessions.tab_uuid ORDER BY tabs.last_viewed_time; - """) + """ + ) except sqlite3.OperationalError: # Old version iOS <12 likely - cur.execute(""" + cur.execute( + """ SELECT title, url, user_visible_url, last_viewed_time, session_data FROM tabs ORDER BY last_viewed_time; - """) + """ + ) for row in cur: session_entries = [] @@ -110,48 +119,68 @@ class SafariBrowserState(IOSExtraction): if "SessionHistoryEntries" in session_data.get("SessionHistory", {}): for session_entry in session_data["SessionHistory"].get( - "SessionHistoryEntries"): + "SessionHistoryEntries" + ): self._session_history_count += 1 data_length = 0 if "SessionHistoryEntryData" in session_entry: - data_length = len(session_entry.get("SessionHistoryEntryData")) + data_length = len( + session_entry.get("SessionHistoryEntryData") + ) - session_entries.append({ - "entry_title": session_entry.get("SessionHistoryEntryOriginalURL"), - "entry_url": session_entry.get("SessionHistoryEntryURL"), - "data_length": data_length, - }) + session_entries.append( + { + "entry_title": session_entry.get( + "SessionHistoryEntryOriginalURL" + ), + "entry_url": session_entry.get( + "SessionHistoryEntryURL" + ), + "data_length": data_length, + } + ) - self.results.append({ - "tab_title": row[0], - "tab_url": row[1], - "tab_visible_url": row[2], - "last_viewed_timestamp": convert_mactime_to_iso(row[3]), - "session_data": session_entries, - "safari_browser_state_db": os.path.relpath(db_path, - self.target_path), - }) + self.results.append( + { + "tab_title": row[0], + "tab_url": row[1], + "tab_visible_url": row[2], + "last_viewed_timestamp": convert_mactime_to_iso(row[3]), + "session_data": session_entries, + "safari_browser_state_db": os.path.relpath( + db_path, self.target_path + ), + } + ) def run(self) -> None: if self.is_backup: for backup_file in self._get_backup_files_from_manifest( - relative_path=SAFARI_BROWSER_STATE_BACKUP_RELPATH): + relative_path=SAFARI_BROWSER_STATE_BACKUP_RELPATH + ): browserstate_path = self._get_backup_file_from_id( - backup_file["file_id"]) + backup_file["file_id"] + ) if not browserstate_path: continue - self.log.info("Found Safari browser state database at path: %s", - browserstate_path) + self.log.info( + "Found Safari browser state database at path: %s", browserstate_path + ) self._process_browser_state_db(browserstate_path) elif self.is_fs_dump: for browserstate_path in self._get_fs_files_from_patterns( - SAFARI_BROWSER_STATE_ROOT_PATHS): - self.log.info("Found Safari browser state database at path: %s", - browserstate_path) + SAFARI_BROWSER_STATE_ROOT_PATHS + ): + self.log.info( + "Found Safari browser state database at path: %s", browserstate_path + ) self._process_browser_state_db(browserstate_path) - self.log.info("Extracted a total of %d tab records and %d session history entries", - len(self.results), self._session_history_count) + self.log.info( + "Extracted a total of %d tab records and %d session history entries", + len(self.results), + self._session_history_count, + ) diff --git a/mvt/ios/modules/mixed/safari_history.py b/mvt/ios/modules/mixed/safari_history.py index 9958827..51e0ee5 100644 --- a/mvt/ios/modules/mixed/safari_history.py +++ b/mvt/ios/modules/mixed/safari_history.py @@ -9,8 +9,7 @@ import sqlite3 from typing import Optional, Union from mvt.common.url import URL -from mvt.common.utils import (convert_mactime_to_datetime, - convert_mactime_to_iso) +from mvt.common.utils import convert_mactime_to_datetime, convert_mactime_to_iso from ..base import IOSExtraction @@ -33,13 +32,18 @@ class SafariHistory(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -47,7 +51,7 @@ class SafariHistory(IOSExtraction): "module": self.__class__.__name__, "event": "safari_history", "data": f"Safari visit to {record['url']} (ID: {record['id']}, " - f"Visit ID: {record['visit_id']})", + f"Visit ID: {record['visit_id']})", } def _find_injections(self): @@ -80,8 +84,11 @@ class SafariHistory(IOSExtraction): if origin_domain == redirect_domain: continue - self.log.info("Found HTTP redirect to different domain: \"%s\" -> \"%s\"", - origin_domain, redirect_domain) + self.log.info( + 'Found HTTP redirect to different domain: "%s" -> "%s"', + origin_domain, + redirect_domain, + ) redirect_time = convert_mactime_to_datetime(redirect["timestamp"]) origin_time = convert_mactime_to_datetime(result["timestamp"]) @@ -89,8 +96,10 @@ class SafariHistory(IOSExtraction): elapsed_ms = elapsed_time.microseconds / 1000 if elapsed_time.seconds == 0: - self.log.warning("Redirect took less than a second! (%d milliseconds)", - elapsed_ms) + self.log.warning( + "Redirect took less than a second! (%d milliseconds)", + elapsed_ms, + ) def check_indicators(self) -> None: self._find_injections() @@ -108,7 +117,8 @@ class SafariHistory(IOSExtraction): self._recover_sqlite_db_if_needed(history_path) conn = sqlite3.connect(history_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT history_items.id, history_items.url, @@ -119,20 +129,24 @@ class SafariHistory(IOSExtraction): FROM history_items JOIN history_visits ON history_visits.history_item = history_items.id ORDER BY history_visits.visit_time; - """) + """ + ) for row in cur: - self.results.append({ - "id": row[0], - "url": row[1], - "visit_id": row[2], - "timestamp": row[3], - "isodate": convert_mactime_to_iso(row[3]), - "redirect_source": row[4], - "redirect_destination": row[5], - "safari_history_db": os.path.relpath(history_path, - self.target_path), - }) + self.results.append( + { + "id": row[0], + "url": row[1], + "visit_id": row[2], + "timestamp": row[3], + "isodate": convert_mactime_to_iso(row[3]), + "redirect_source": row[4], + "redirect_destination": row[5], + "safari_history_db": os.path.relpath( + history_path, self.target_path + ), + } + ) cur.close() conn.close() @@ -140,23 +154,21 @@ class SafariHistory(IOSExtraction): def run(self) -> None: if self.is_backup: for history_file in self._get_backup_files_from_manifest( - relative_path=SAFARI_HISTORY_BACKUP_RELPATH): - history_path = self._get_backup_file_from_id( - history_file["file_id"]) + relative_path=SAFARI_HISTORY_BACKUP_RELPATH + ): + history_path = self._get_backup_file_from_id(history_file["file_id"]) if not history_path: continue - self.log.info("Found Safari history database at path: %s", - history_path) + self.log.info("Found Safari history database at path: %s", history_path) self._process_history_db(history_path) elif self.is_fs_dump: for history_path in self._get_fs_files_from_patterns( - SAFARI_HISTORY_ROOT_PATHS): - self.log.info("Found Safari history database at path: %s", - history_path) + SAFARI_HISTORY_ROOT_PATHS + ): + self.log.info("Found Safari history database at path: %s", history_path) self._process_history_db(history_path) - self.log.info("Extracted a total of %d history records", - len(self.results)) + self.log.info("Extracted a total of %d history records", len(self.results)) diff --git a/mvt/ios/modules/mixed/shortcuts.py b/mvt/ios/modules/mixed/shortcuts.py index e8501e3..6655d3f 100644 --- a/mvt/ios/modules/mixed/shortcuts.py +++ b/mvt/ios/modules/mixed/shortcuts.py @@ -30,13 +30,18 @@ class Shortcuts(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: found_urls = "" @@ -47,17 +52,20 @@ class Shortcuts(IOSExtraction): if record["description"]: desc = record["description"].decode("utf-8", errors="ignore") - return [{ - "timestamp": record["isodate"], - "module": self.__class__.__name__, - "event": "shortcut_created", - "data": f"iOS Shortcut '{record['shortcut_name'].decode('utf-8')}': {desc} {found_urls}" - }, { - "timestamp": record["modified_date"], - "module": self.__class__.__name__, - "event": "shortcut_modified", - "data": f"iOS Shortcut '{record['shortcut_name'].decode('utf-8')}': {desc} {found_urls}" - }] + return [ + { + "timestamp": record["isodate"], + "module": self.__class__.__name__, + "event": "shortcut_created", + "data": f"iOS Shortcut '{record['shortcut_name'].decode('utf-8')}': {desc} {found_urls}", + }, + { + "timestamp": record["modified_date"], + "module": self.__class__.__name__, + "event": "shortcut_modified", + "data": f"iOS Shortcut '{record['shortcut_name'].decode('utf-8')}': {desc} {found_urls}", + }, + ] def check_indicators(self) -> None: if not self.indicators: @@ -70,15 +78,17 @@ class Shortcuts(IOSExtraction): self.detected.append(result) def run(self) -> None: - self._find_ios_database(backup_ids=SHORTCUT_BACKUP_IDS, - root_paths=SHORTCUT_ROOT_PATHS) + self._find_ios_database( + backup_ids=SHORTCUT_BACKUP_IDS, root_paths=SHORTCUT_ROOT_PATHS + ) self.log.info("Found Shortcuts database at path: %s", self.file_path) conn = sqlite3.connect(self.file_path) conn.text_factory = bytes cur = conn.cursor() try: - cur.execute(""" + cur.execute( + """ SELECT ZSHORTCUT.Z_PK as "shortcut_id", ZSHORTCUT.ZNAME as "shortcut_name", @@ -88,9 +98,10 @@ class Shortcuts(IOSExtraction): ZSHORTCUTACTIONS.ZDATA as "action_data" FROM ZSHORTCUT LEFT JOIN ZSHORTCUTACTIONS ON ZSHORTCUTACTIONS.ZSHORTCUT == ZSHORTCUT.Z_PK; - """) + """ + ) except sqlite3.OperationalError: - # Table ZSHORTCUT does not exist + # Table ZSHORTCUT does not exist self.log.info("Invalid shortcut database format, skipping...") cur.close() conn.close() @@ -105,8 +116,7 @@ class Shortcuts(IOSExtraction): shortcut[names[index]] = value try: - action_data = plistlib.load(io.BytesIO( - shortcut.pop("action_data", []))) + action_data = plistlib.load(io.BytesIO(shortcut.pop("action_data", []))) actions = [] for action_entry in action_data: action = {} @@ -122,15 +132,18 @@ class Shortcuts(IOSExtraction): action["urls"] = [url.rstrip("',") for url in extracted_urls] actions.append(action) shortcut["parsed_actions"] = len(actions) - shortcut["action_urls"] = list(itertools.chain( - *[action["urls"] for action in actions])) + shortcut["action_urls"] = list( + itertools.chain(*[action["urls"] for action in actions]) + ) except plistlib.InvalidFileException: self.log.debug("Shortcut without action data") shortcut["action_urls"] = None shortcut["parsed_actions"] = 0 shortcut["isodate"] = convert_mactime_to_iso(shortcut.pop("created_date")) - shortcut["modified_date"] = convert_mactime_to_iso(shortcut["modified_date"]) + shortcut["modified_date"] = convert_mactime_to_iso( + shortcut["modified_date"] + ) self.results.append(shortcut) cur.close() diff --git a/mvt/ios/modules/mixed/sms.py b/mvt/ios/modules/mixed/sms.py index c4b4699..78be325 100644 --- a/mvt/ios/modules/mixed/sms.py +++ b/mvt/ios/modules/mixed/sms.py @@ -28,13 +28,18 @@ class SMS(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: text = record["text"].replace("\n", "\\n") @@ -43,7 +48,7 @@ class SMS(IOSExtraction): "module": self.__class__.__name__, "event": "sms_received", "data": f"{record['service']}: {record['guid']} \"{text}\" " - f"from {record['phone_number']} ({record['account']})" + f"from {record['phone_number']} ({record['account']})", } def check_indicators(self) -> None: @@ -61,20 +66,21 @@ class SMS(IOSExtraction): self.detected.append(result) def run(self) -> None: - self._find_ios_database(backup_ids=SMS_BACKUP_IDS, - root_paths=SMS_ROOT_PATHS) + self._find_ios_database(backup_ids=SMS_BACKUP_IDS, root_paths=SMS_ROOT_PATHS) self.log.info("Found SMS database at path: %s", self.file_path) try: conn = sqlite3.connect(self.file_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT message.*, handle.id as "phone_number" FROM message, handle WHERE handle.rowid = message.handle_id; - """) + """ + ) # Force the query early to catch database issues items = list(cur) except sqlite3.DatabaseError as exc: @@ -83,13 +89,15 @@ class SMS(IOSExtraction): self._recover_sqlite_db_if_needed(self.file_path, forced=True) conn = sqlite3.connect(self.file_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT message.*, handle.id as "phone_number" FROM message, handle WHERE handle.rowid = message.handle_id; - """) + """ + ) items = list(cur) else: raise exc @@ -100,9 +108,11 @@ class SMS(IOSExtraction): for index, value in enumerate(item): # We base64 escape some of the attributes that could contain # binary data. - if (names[index] == "attributedBody" - or names[index] == "payload_data" - or names[index] == "message_summary_info") and value: + if ( + names[index] == "attributedBody" + or names[index] == "payload_data" + or names[index] == "message_summary_info" + ) and value: value = b64encode(value).decode() # We store the value of each column under the proper key. @@ -110,8 +120,9 @@ class SMS(IOSExtraction): # We convert Mac's ridiculous timestamp format. message["isodate"] = convert_mactime_to_iso(message["date"]) - message["direction"] = ("sent" if message.get("is_from_me", 0) == 1 - else "received") + message["direction"] = ( + "sent" if message.get("is_from_me", 0) == 1 else "received" + ) # Sometimes "text" is None instead of empty string. if not message.get("text", None): @@ -119,8 +130,10 @@ class SMS(IOSExtraction): alert = "ALERT: State-sponsored attackers may be targeting your iPhone" if message.get("text", "").startswith(alert): - self.log.warning("Apple warning about state-sponsored attack received on the %s", - message["isodate"]) + self.log.warning( + "Apple warning about state-sponsored attack received on the %s", + message["isodate"], + ) else: # Extract links from the SMS message. message_links = check_for_links(message.get("text", "")) @@ -131,5 +144,4 @@ class SMS(IOSExtraction): cur.close() conn.close() - self.log.info("Extracted a total of %d SMS messages", - len(self.results)) + self.log.info("Extracted a total of %d SMS messages", len(self.results)) diff --git a/mvt/ios/modules/mixed/sms_attachments.py b/mvt/ios/modules/mixed/sms_attachments.py index 492e1b5..0be6046 100644 --- a/mvt/ios/modules/mixed/sms_attachments.py +++ b/mvt/ios/modules/mixed/sms_attachments.py @@ -28,13 +28,18 @@ class SMSAttachments(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: return { @@ -42,21 +47,21 @@ class SMSAttachments(IOSExtraction): "module": self.__class__.__name__, "event": "sms_attachment", "data": f"{record['service']}: Attachment " - f"'{record['transfer_name']}' {record['direction']} " - f"from {record['phone_number']} " - f"with {record['total_bytes']} bytes " - f"(is_sticker: {record['is_sticker']}, " - f"has_user_info: {record['has_user_info']})" + f"'{record['transfer_name']}' {record['direction']} " + f"from {record['phone_number']} " + f"with {record['total_bytes']} bytes " + f"(is_sticker: {record['is_sticker']}, " + f"has_user_info: {record['has_user_info']})", } def run(self) -> None: - self._find_ios_database(backup_ids=SMS_BACKUP_IDS, - root_paths=SMS_ROOT_PATHS) + self._find_ios_database(backup_ids=SMS_BACKUP_IDS, root_paths=SMS_ROOT_PATHS) self.log.info("Found SMS database at path: %s", self.file_path) conn = sqlite3.connect(self.file_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT attachment.ROWID as "attachment_id", attachment.*, @@ -68,33 +73,45 @@ class SMSAttachments(IOSExtraction): LEFT JOIN message ON message.ROWID = message_attachment_join.message_id LEFT JOIN handle ON handle.ROWID = message.handle_id; - """) + """ + ) names = [description[0] for description in cur.description] for item in cur: attachment = {} for index, value in enumerate(item): - if (names[index] in ["user_info", "sticker_user_info", - "attribution_info", - "ck_server_change_token_blob", - "sr_ck_server_change_token_blob"]) and value: + if ( + names[index] + in [ + "user_info", + "sticker_user_info", + "attribution_info", + "ck_server_change_token_blob", + "sr_ck_server_change_token_blob", + ] + ) and value: value = b64encode(value).decode() attachment[names[index]] = value - attachment["isodate"] = convert_mactime_to_iso( - attachment["created_date"]) - attachment["start_date"] = convert_mactime_to_iso( - attachment["start_date"]) - attachment["direction"] = ("sent" if attachment["is_outgoing"] == 1 else "received") + attachment["isodate"] = convert_mactime_to_iso(attachment["created_date"]) + attachment["start_date"] = convert_mactime_to_iso(attachment["start_date"]) + attachment["direction"] = ( + "sent" if attachment["is_outgoing"] == 1 else "received" + ) attachment["has_user_info"] = attachment["user_info"] is not None attachment["service"] = attachment["service"] or "Unknown" attachment["filename"] = attachment["filename"] or "NULL" - if (attachment["filename"].startswith("/var/tmp/") - and attachment["filename"].endswith("-1") - and attachment["direction"] == "received"): - self.log.warning("Suspicious iMessage attachment %s on %s", - attachment['filename'], attachment['isodate']) + if ( + attachment["filename"].startswith("/var/tmp/") + and attachment["filename"].endswith("-1") + and attachment["direction"] == "received" + ): + self.log.warning( + "Suspicious iMessage attachment %s on %s", + attachment["filename"], + attachment["isodate"], + ) self.detected.append(attachment) self.results.append(attachment) @@ -102,5 +119,4 @@ class SMSAttachments(IOSExtraction): cur.close() conn.close() - self.log.info("Extracted a total of %d SMS attachments", - len(self.results)) + self.log.info("Extracted a total of %d SMS attachments", len(self.results)) diff --git a/mvt/ios/modules/mixed/tcc.py b/mvt/ios/modules/mixed/tcc.py index 65e45ca..1212f75 100644 --- a/mvt/ios/modules/mixed/tcc.py +++ b/mvt/ios/modules/mixed/tcc.py @@ -18,10 +18,7 @@ TCC_ROOT_PATHS = [ "private/var/mobile/Library/TCC/TCC.db", ] -AUTH_VALUE_OLD = { - 0: "denied", - 1: "allowed" -} +AUTH_VALUE_OLD = {0: "denied", 1: "allowed"} AUTH_VALUES = { 0: "denied", 1: "unknown", @@ -52,28 +49,37 @@ class TCC(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: if "last_modified" in record: if "allowed_value" in record: - msg = (f"Access to {record['service']} by {record['client']} " - f"{record['allowed_value']}") + msg = ( + f"Access to {record['service']} by {record['client']} " + f"{record['allowed_value']}" + ) else: - msg = (f"Access to {record['service']} by {record['client']} " - f"{record['auth_value']}") + msg = ( + f"Access to {record['service']} by {record['client']} " + f"{record['auth_value']}" + ) return { "timestamp": record["last_modified"], "module": self.__class__.__name__, "event": "AccessRequest", - "data": msg + "data": msg, } return {} @@ -93,31 +99,36 @@ class TCC(IOSExtraction): cur = conn.cursor() db_version = "v3" try: - cur.execute("""SELECT + cur.execute( + """SELECT service, client, client_type, auth_value, auth_reason, last_modified - FROM access;""") + FROM access;""" + ) except sqlite3.OperationalError: # v2 version try: - cur.execute("""SELECT + cur.execute( + """SELECT service, client, client_type, allowed, prompt_count, last_modified - FROM access;""") + FROM access;""" + ) db_version = "v2" except sqlite3.OperationalError: - cur.execute("""SELECT + cur.execute( + """SELECT service, client, client_type, allowed, prompt_count - FROM access;""") + FROM access;""" + ) db_version = "v1" for row in cur: service = row[0] client = row[1] client_type = row[2] - client_type_desc = ("bundle_id" if client_type == 0 - else "absolute_path") + client_type_desc = "bundle_id" if client_type == 0 else "absolute_path" if db_version == "v3": auth_value = row[3] auth_value_desc = AUTH_VALUES.get(auth_value, "") @@ -126,19 +137,28 @@ class TCC(IOSExtraction): last_modified = convert_unix_to_iso(row[5]) if service in ["kTCCServiceMicrophone", "kTCCServiceCamera"]: - device = "microphone" if service == "kTCCServiceMicrophone" else "camera" - self.log.info("Found client \"%s\" with access %s to %s on %s by %s", - client, auth_value_desc, device, - last_modified, auth_reason_desc) + device = ( + "microphone" if service == "kTCCServiceMicrophone" else "camera" + ) + self.log.info( + 'Found client "%s" with access %s to %s on %s by %s', + client, + auth_value_desc, + device, + last_modified, + auth_reason_desc, + ) - self.results.append({ - "service": service, - "client": client, - "client_type": client_type_desc, - "auth_value": auth_value_desc, - "auth_reason_desc": auth_reason_desc, - "last_modified": last_modified, - }) + self.results.append( + { + "service": service, + "client": client, + "client_type": client_type_desc, + "auth_value": auth_value_desc, + "auth_reason_desc": auth_reason_desc, + "last_modified": last_modified, + } + ) else: allowed_value = row[3] allowed_desc = AUTH_VALUE_OLD.get(allowed_value, "") @@ -151,41 +171,52 @@ class TCC(IOSExtraction): if service == "kTCCServiceMicrophone": device = "microphone" - self.log.info("Found client \"%s\" with access %s to %s at %s", - client, allowed_desc, device, - last_modified) + self.log.info( + 'Found client "%s" with access %s to %s at %s', + client, + allowed_desc, + device, + last_modified, + ) - self.results.append({ - "service": service, - "client": client, - "client_type": client_type_desc, - "allowed_value": allowed_desc, - "prompt_count": prompt_count, - "last_modified": last_modified - }) + self.results.append( + { + "service": service, + "client": client, + "client_type": client_type_desc, + "allowed_value": allowed_desc, + "prompt_count": prompt_count, + "last_modified": last_modified, + } + ) else: if service in ["kTCCServiceMicrophone", "kTCCServiceCamera"]: device = "camera" if service == "kTCCServiceMicrophone": device = "microphone" - self.log.info("Found client \"%s\" with access %s to %s", - client, allowed_desc, device) + self.log.info( + 'Found client "%s" with access %s to %s', + client, + allowed_desc, + device, + ) - self.results.append({ - "service": service, - "client": client, - "client_type": client_type_desc, - "allowed_value": allowed_desc, - "prompt_count": prompt_count - }) + self.results.append( + { + "service": service, + "client": client, + "client_type": client_type_desc, + "allowed_value": allowed_desc, + "prompt_count": prompt_count, + } + ) cur.close() conn.close() def run(self) -> None: - self._find_ios_database(backup_ids=TCC_BACKUP_IDS, - root_paths=TCC_ROOT_PATHS) + self._find_ios_database(backup_ids=TCC_BACKUP_IDS, root_paths=TCC_ROOT_PATHS) self.log.info("Found TCC database at path: %s", self.file_path) self.process_db(self.file_path) diff --git a/mvt/ios/modules/mixed/webkit_resource_load_statistics.py b/mvt/ios/modules/mixed/webkit_resource_load_statistics.py index d4371d8..37d8967 100644 --- a/mvt/ios/modules/mixed/webkit_resource_load_statistics.py +++ b/mvt/ios/modules/mixed/webkit_resource_load_statistics.py @@ -28,13 +28,18 @@ class WebkitResourceLoadStatistics(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = [] if not results else results @@ -46,7 +51,7 @@ class WebkitResourceLoadStatistics(IOSExtraction): "timestamp": record["last_seen_isodate"], "module": self.__class__.__name__, "event": "visit", - "data": msg + "data": msg, } def check_indicators(self) -> None: @@ -61,8 +66,10 @@ class WebkitResourceLoadStatistics(IOSExtraction): self.detected.append(result) def _process_observations_db(self, db_path: str, domain: str, path: str) -> None: - self.log.info("Found WebKit ResourceLoadStatistics observations.db file at path %s", - db_path) + self.log.info( + "Found WebKit ResourceLoadStatistics observations.db file at path %s", + db_path, + ) self._recover_sqlite_db_if_needed(db_path) @@ -70,46 +77,59 @@ class WebkitResourceLoadStatistics(IOSExtraction): cur = conn.cursor() try: - # FIXME: table contains extra fields with timestamp here - cur.execute(""" + # FIXME: table contains extra fields with timestamp here + cur.execute( + """ SELECT domainID, registrableDomain, lastSeen, hadUserInteraction from ObservedDomains; - """) + """ + ) except sqlite3.OperationalError: return for row in cur: - self.results.append({ - "domain_id": row[0], - "registrable_domain": row[1], - "last_seen": row[2], - "had_user_interaction": bool(row[3]), - "last_seen_isodate": convert_unix_to_iso(row[2]), - "domain": domain, - "path": path - }) + self.results.append( + { + "domain_id": row[0], + "registrable_domain": row[1], + "last_seen": row[2], + "had_user_interaction": bool(row[3]), + "last_seen_isodate": convert_unix_to_iso(row[2]), + "domain": domain, + "path": path, + } + ) if len(self.results) > 0: - self.log.info("Extracted a total of %d records from %s", - len(self.results), db_path) + self.log.info( + "Extracted a total of %d records from %s", len(self.results), db_path + ) def run(self) -> None: if self.is_backup: try: for backup_file in self._get_backup_files_from_manifest( - relative_path=WEBKIT_RESOURCELOADSTATICS_BACKUP_RELPATH): + relative_path=WEBKIT_RESOURCELOADSTATICS_BACKUP_RELPATH + ): db_path = self._get_backup_file_from_id(backup_file["file_id"]) if db_path: - self._process_observations_db(db_path=db_path, domain=backup_file['domain'], path=WEBKIT_RESOURCELOADSTATICS_BACKUP_RELPATH) + self._process_observations_db( + db_path=db_path, + domain=backup_file["domain"], + path=WEBKIT_RESOURCELOADSTATICS_BACKUP_RELPATH, + ) except Exception as exc: self.log.info("Unable to find WebKit observations.db: %s", exc) elif self.is_fs_dump: for db_path in self._get_fs_files_from_patterns( - WEBKIT_RESOURCELOADSTATICS_ROOT_PATHS): + WEBKIT_RESOURCELOADSTATICS_ROOT_PATHS + ): db_rel_path = os.path.relpath(db_path, self.target_path) - self._process_observations_db(db_path=db_path, domain="", path=db_rel_path) + self._process_observations_db( + db_path=db_path, domain="", path=db_rel_path + ) diff --git a/mvt/ios/modules/mixed/webkit_session_resource_log.py b/mvt/ios/modules/mixed/webkit_session_resource_log.py index 160b8d5..96b7cec 100644 --- a/mvt/ios/modules/mixed/webkit_session_resource_log.py +++ b/mvt/ios/modules/mixed/webkit_session_resource_log.py @@ -36,13 +36,18 @@ class WebkitSessionResourceLog(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) self.results = {} if not results else results @@ -68,7 +73,8 @@ class WebkitSessionResourceLog(IOSExtraction): for entry in entries: source_domains = self._extract_domains(entry["redirect_source"]) destination_domains = self._extract_domains( - entry["redirect_destination"]) + entry["redirect_destination"] + ) # TODO: Currently not used. # subframe_origins = self._extract_domains( @@ -77,9 +83,7 @@ class WebkitSessionResourceLog(IOSExtraction): # entry["subresource_under_origin"]) all_origins = set( - [entry["origin"]] - + source_domains - + destination_domains + [entry["origin"]] + source_domains + destination_domains ) ioc = self.indicators.check_domains(all_origins) @@ -91,7 +95,7 @@ class WebkitSessionResourceLog(IOSExtraction): if len(source_domains) > 0: redirect_path += "SOURCE: " for idx, item in enumerate(source_domains): - source_domains[idx] = f"\"{item}\"" + source_domains[idx] = f'"{item}"' redirect_path += ", ".join(source_domains) redirect_path += " -> " @@ -102,12 +106,14 @@ class WebkitSessionResourceLog(IOSExtraction): redirect_path += " -> " redirect_path += "DESTINATION: " for idx, item in enumerate(destination_domains): - destination_domains[idx] = f"\"{item}\"" + destination_domains[idx] = f'"{item}"' redirect_path += ", ".join(destination_domains) - self.log.warning("Found HTTP redirect between suspicious domains: %s", - redirect_path) + self.log.warning( + "Found HTTP redirect between suspicious domains: %s", + redirect_path, + ) def _extract_browsing_stats(self, log_path): items = [] @@ -121,39 +127,52 @@ class WebkitSessionResourceLog(IOSExtraction): browsing_stats = file_plist["browsingStatistics"] for item in browsing_stats: - items.append({ - "origin": item.get("PrevalentResourceOrigin", ""), - "redirect_source": item.get("topFrameUniqueRedirectsFrom", ""), - "redirect_destination": item.get("topFrameUniqueRedirectsTo", ""), - "subframe_under_origin": item.get("subframeUnderTopFrameOrigins", ""), - "subresource_under_origin": item.get("subresourceUnderTopFrameOrigins", ""), - "user_interaction": item.get("hadUserInteraction"), - "most_recent_interaction": convert_datetime_to_iso( - item["mostRecentUserInteraction"]), - "last_seen": convert_datetime_to_iso(item["lastSeen"]), - }) + items.append( + { + "origin": item.get("PrevalentResourceOrigin", ""), + "redirect_source": item.get("topFrameUniqueRedirectsFrom", ""), + "redirect_destination": item.get("topFrameUniqueRedirectsTo", ""), + "subframe_under_origin": item.get( + "subframeUnderTopFrameOrigins", "" + ), + "subresource_under_origin": item.get( + "subresourceUnderTopFrameOrigins", "" + ), + "user_interaction": item.get("hadUserInteraction"), + "most_recent_interaction": convert_datetime_to_iso( + item["mostRecentUserInteraction"] + ), + "last_seen": convert_datetime_to_iso(item["lastSeen"]), + } + ) return items def run(self) -> None: if self.is_backup: for log_file in self._get_backup_files_from_manifest( - relative_path=WEBKIT_SESSION_RESOURCE_LOG_BACKUP_RELPATH): + relative_path=WEBKIT_SESSION_RESOURCE_LOG_BACKUP_RELPATH + ): log_path = self._get_backup_file_from_id(log_file["file_id"]) if not log_path: continue - self.log.info("Found Safari browsing session resource log at path: %s", - log_path) + self.log.info( + "Found Safari browsing session resource log at path: %s", log_path + ) self.results[log_path] = self._extract_browsing_stats(log_path) elif self.is_fs_dump: for log_path in self._get_fs_files_from_patterns( - WEBKIT_SESSION_RESOURCE_LOG_ROOT_PATHS): - self.log.info("Found Safari browsing session resource log at path: %s", - log_path) + WEBKIT_SESSION_RESOURCE_LOG_ROOT_PATHS + ): + self.log.info( + "Found Safari browsing session resource log at path: %s", log_path + ) key = os.path.relpath(log_path, self.target_path) self.results[key] = self._extract_browsing_stats(log_path) - self.log.info("Extracted records from %d Safari browsing session resource logs", - len(self.results)) + self.log.info( + "Extracted records from %d Safari browsing session resource logs", + len(self.results), + ) diff --git a/mvt/ios/modules/mixed/whatsapp.py b/mvt/ios/modules/mixed/whatsapp.py index 6ef644e..3287eb7 100644 --- a/mvt/ios/modules/mixed/whatsapp.py +++ b/mvt/ios/modules/mixed/whatsapp.py @@ -27,13 +27,18 @@ class Whatsapp(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def serialize(self, record: dict) -> Union[dict, list]: text = record.get("ZTEXT", "").replace("\n", "\\n") @@ -45,7 +50,7 @@ class Whatsapp(IOSExtraction): "timestamp": record.get("isodate"), "module": self.__class__.__name__, "event": "message", - "data": f"\'{text}\' from {record.get('ZFROMJID', 'Unknown')}{links_text}", + "data": f"'{text}' from {record.get('ZFROMJID', 'Unknown')}{links_text}", } def check_indicators(self) -> None: @@ -59,8 +64,9 @@ class Whatsapp(IOSExtraction): self.detected.append(result) def run(self) -> None: - self._find_ios_database(backup_ids=WHATSAPP_BACKUP_IDS, - root_paths=WHATSAPP_ROOT_PATHS) + self._find_ios_database( + backup_ids=WHATSAPP_BACKUP_IDS, root_paths=WHATSAPP_ROOT_PATHS + ) self.log.info("Found WhatsApp database at path: %s", self.file_path) conn = sqlite3.connect(self.file_path) @@ -68,7 +74,8 @@ class Whatsapp(IOSExtraction): # Query all messages and join tables which can contain media attachments # and links. - cur.execute(""" + cur.execute( + """ SELECT ZWAMESSAGE.*, ZWAMEDIAITEM.ZAUTHORNAME, @@ -82,7 +89,8 @@ class Whatsapp(IOSExtraction): LEFT JOIN ZWAMEDIAITEM ON ZWAMEDIAITEM.ZMESSAGE = ZWAMESSAGE.Z_PK LEFT JOIN ZWAMESSAGEDATAITEM ON ZWAMESSAGEDATAITEM.ZMESSAGE = ZWAMESSAGE.Z_PK; - """) + """ + ) names = [description[0] for description in cur.description] for message_row in cur: @@ -90,26 +98,31 @@ class Whatsapp(IOSExtraction): for index, value in enumerate(message_row): message[names[index]] = value - message["isodate"] = convert_mactime_to_iso( - message.get("ZMESSAGEDATE")) + message["isodate"] = convert_mactime_to_iso(message.get("ZMESSAGEDATE")) message["ZTEXT"] = message["ZTEXT"] if message["ZTEXT"] else "" # Extract links from the WhatsApp message. URLs can be stored in # multiple fields/columns. # Check each of them! message_links = [] - fields_with_links = ["ZTEXT", "ZMATCHEDTEXT", "ZMEDIAURL", - "ZCONTENT1", "ZCONTENT2"] + fields_with_links = [ + "ZTEXT", + "ZMATCHEDTEXT", + "ZMEDIAURL", + "ZCONTENT1", + "ZCONTENT2", + ] for field in fields_with_links: if message.get(field): - message_links.extend(check_for_links( - message.get(field, ""))) + message_links.extend(check_for_links(message.get(field, ""))) # Remove WhatsApp internal media URLs. filtered_links = [] for link in message_links: - if not (link.startswith("https://mmg-fna.whatsapp.net/") - or link.startswith("https://mmg.whatsapp.net/")): + if not ( + link.startswith("https://mmg-fna.whatsapp.net/") + or link.startswith("https://mmg.whatsapp.net/") + ): filtered_links.append(link) # Add all the links found to the record @@ -120,5 +133,4 @@ class Whatsapp(IOSExtraction): cur.close() conn.close() - self.log.info("Extracted a total of %d WhatsApp messages", - len(self.results)) + self.log.info("Extracted a total of %d WhatsApp messages", len(self.results)) diff --git a/mvt/ios/modules/net_base.py b/mvt/ios/modules/net_base.py index 3d7bebc..99a432d 100644 --- a/mvt/ios/modules/net_base.py +++ b/mvt/ios/modules/net_base.py @@ -23,18 +23,24 @@ class NetBase(IOSExtraction): file_path: Optional[str] = None, target_path: Optional[str] = None, results_path: Optional[str] = None, - fast_mode: Optional[bool] = False, + fast_mode: bool = False, log: logging.Logger = logging.getLogger(__name__), - results: Optional[list] = None + results: Optional[list] = None, ) -> None: - super().__init__(file_path=file_path, target_path=target_path, - results_path=results_path, fast_mode=fast_mode, - log=log, results=results) + super().__init__( + file_path=file_path, + target_path=target_path, + results_path=results_path, + fast_mode=fast_mode, + log=log, + results=results, + ) def _extract_net_data(self): conn = sqlite3.connect(self.file_path) cur = conn.cursor() - cur.execute(""" + cur.execute( + """ SELECT ZPROCESS.ZFIRSTTIMESTAMP, ZPROCESS.ZTIMESTAMP, @@ -55,7 +61,8 @@ class NetBase(IOSExtraction): NULL, NULL, NULL, NULL, NULL, NULL, NULL FROM ZPROCESS WHERE Z_PK NOT IN (SELECT ZHASPROCESS FROM ZLIVEUSAGE); - """) + """ + ) for row in cur: # ZPROCESS records can be missing after the JOIN. @@ -72,62 +79,73 @@ class NetBase(IOSExtraction): else: live_timestamp = "" - self.results.append({ - "first_isodate": first_isodate, - "isodate": isodate, - "proc_name": row[2], - "bundle_id": row[3], - "proc_id": row[4], - "wifi_in": row[5], - "wifi_out": row[6], - "wwan_in": row[7], - "wwan_out": row[8], - "live_id": row[9], - "live_proc_id": row[10], - "live_isodate": live_timestamp if row[10] else first_isodate, - }) + self.results.append( + { + "first_isodate": first_isodate, + "isodate": isodate, + "proc_name": row[2], + "bundle_id": row[3], + "proc_id": row[4], + "wifi_in": row[5], + "wifi_out": row[6], + "wwan_in": row[7], + "wwan_out": row[8], + "live_id": row[9], + "live_proc_id": row[10], + "live_isodate": live_timestamp if row[10] else first_isodate, + } + ) cur.close() conn.close() - self.log.info("Extracted information on %d processes", - len(self.results)) + self.log.info("Extracted information on %d processes", len(self.results)) def serialize(self, record: dict) -> Union[dict, list]: - record_data = (f"{record['proc_name']} (Bundle ID: {record['bundle_id']}," - f" ID: {record['proc_id']})") - record_data_usage = (record_data + " " - f"WIFI IN: {record['wifi_in']}, " - f"WIFI OUT: {record['wifi_out']} - " - f"WWAN IN: {record['wwan_in']}, " - f"WWAN OUT: {record['wwan_out']}") + record_data = ( + f"{record['proc_name']} (Bundle ID: {record['bundle_id']}," + f" ID: {record['proc_id']})" + ) + record_data_usage = ( + record_data + " " + f"WIFI IN: {record['wifi_in']}, " + f"WIFI OUT: {record['wifi_out']} - " + f"WWAN IN: {record['wwan_in']}, " + f"WWAN OUT: {record['wwan_out']}" + ) - records = [{ - "timestamp": record["live_isodate"], - "module": self.__class__.__name__, - "event": "live_usage", - "data": record_data_usage, - }] + records = [ + { + "timestamp": record["live_isodate"], + "module": self.__class__.__name__, + "event": "live_usage", + "data": record_data_usage, + } + ] # Only included first_usage and current_usage records when a # ZPROCESS entry exists. - if ("MANIPULATED" not in record["proc_name"] - and "MISSING" not in record["proc_name"] - and record["live_proc_id"] is not None): - records.extend([ - { - "timestamp": record["first_isodate"], - "module": self.__class__.__name__, - "event": "first_usage", - "data": record_data, - }, - { - "timestamp": record["isodate"], - "module": self.__class__.__name__, - "event": "current_usage", - "data": record_data, - } - ]) + if ( + "MANIPULATED" not in record["proc_name"] + and "MISSING" not in record["proc_name"] + and record["live_proc_id"] is not None + ): + records.extend( + [ + { + "timestamp": record["first_isodate"], + "module": self.__class__.__name__, + "event": "first_usage", + "data": record_data, + }, + { + "timestamp": record["isodate"], + "module": self.__class__.__name__, + "event": "current_usage", + "data": record_data, + }, + ] + ) return records @@ -140,8 +158,10 @@ class NetBase(IOSExtraction): # If we are instructed to run fast, we skip this. if self.fast_mode: - self.log.info("Flag --fast was enabled: skipping extended " - "search for suspicious processes") + self.log.info( + "Flag --fast was enabled: skipping extended " + "search for suspicious processes" + ) return self.log.info("Extended search for suspicious processes ...") @@ -158,8 +178,9 @@ class NetBase(IOSExtraction): for proc in self.results: if not proc["bundle_id"]: - self.log.debug("Found process with no Bundle ID with name: %s", - proc["proc_name"]) + self.log.debug( + "Found process with no Bundle ID with name: %s", proc["proc_name"] + ) binary_path = None for file in files: @@ -170,34 +191,45 @@ class NetBase(IOSExtraction): if binary_path: self.log.debug("Located at %s", binary_path) else: - msg = ("Could not find the binary associated with the " - f"process with name {proc['proc_name']}") + msg = ( + "Could not find the binary associated with the " + f"process with name {proc['proc_name']}" + ) if not proc["proc_name"]: - msg = ("Found process entry with empty 'proc_name': " - f"{proc['live_proc_id']} at {proc['live_isodate']}") + msg = ( + "Found process entry with empty 'proc_name': " + f"{proc['live_proc_id']} at {proc['live_isodate']}" + ) elif len(proc["proc_name"]) == 16: - msg += (" (However, the process name might have " - "been truncated in the database)") + msg += ( + " (However, the process name might have " + "been truncated in the database)" + ) self.log.warning(msg) if not proc["live_proc_id"]: - self.log.info("Found process entry in ZPROCESS but not in ZLIVEUSAGE: %s at %s", - proc['proc_name'], proc['live_isodate']) + self.log.info( + "Found process entry in ZPROCESS but not in ZLIVEUSAGE: %s at %s", + proc["proc_name"], + proc["live_isodate"], + ) def check_manipulated(self): """Check for missing or manipulate DB entries""" # Don't show duplicates for each missing process. missing_process_cache = set() - for result in sorted( - self.results, key=operator.itemgetter("live_isodate")): + for result in sorted(self.results, key=operator.itemgetter("live_isodate")): if result["proc_id"]: continue # Avoid duplicate warnings for same process. if result["live_proc_id"] not in missing_process_cache: missing_process_cache.add(result["live_proc_id"]) - self.log.warning("Found manipulated process entry %s. Entry on %s", - result["live_proc_id"], result["live_isodate"]) + self.log.warning( + "Found manipulated process entry %s. Entry on %s", + result["live_proc_id"], + result["live_isodate"], + ) # Set manipulated proc timestamp so it appears in timeline. result["first_isodate"] = result["isodate"] = result["live_isodate"] @@ -207,7 +239,9 @@ class NetBase(IOSExtraction): def find_deleted(self): """Identify process which may have been deleted from the DataUsage database.""" - results_by_proc = {proc["proc_id"]: proc for proc in self.results if proc["proc_id"]} + results_by_proc = { + proc["proc_id"]: proc for proc in self.results if proc["proc_id"] + } all_proc_id = sorted(results_by_proc.keys()) # Fix issue #108 @@ -218,9 +252,12 @@ class NetBase(IOSExtraction): for proc_id in range(min(all_proc_id), max(all_proc_id)): if proc_id not in all_proc_id: previous_proc = results_by_proc[last_proc_id] - self.log.info("Missing process %d. Previous process at \"%s\" (%s)", - proc_id, previous_proc["first_isodate"], - previous_proc["proc_name"]) + self.log.info( + 'Missing process %d. Previous process at "%s" (%s)', + proc_id, + previous_proc["first_isodate"], + previous_proc["proc_name"], + ) missing_procs[proc_id] = { "proc_id": proc_id, @@ -236,15 +273,16 @@ class NetBase(IOSExtraction): for proc_id, proc in missing_procs.items(): # Set default DataUsage keys. result = {key: None for key in self.results[0].keys()} - result["first_isodate"] = result["isodate"] = result["live_isodate"] = proc["prev_proc_first"] + result["first_isodate"] = result["isodate"] = result["live_isodate"] = proc[ + "prev_proc_first" + ] result["proc_name"] = f"MISSING [follows {proc['prev_proc_name']}]" result["proc_id"] = result["live_proc_id"] = proc["proc_id"] result["bundle_id"] = None self.results.append(result) - self.results = sorted(self.results, - key=operator.itemgetter("first_isodate")) + self.results = sorted(self.results, key=operator.itemgetter("first_isodate")) def check_indicators(self) -> None: # Check for manipulated process records. diff --git a/mvt/ios/versions.py b/mvt/ios/versions.py index 74a58e0..6f823ea 100644 --- a/mvt/ios/versions.py +++ b/mvt/ios/versions.py @@ -13,8 +13,7 @@ IPHONE_MODELS = json.loads(pkgutil.get_data("mvt", "ios/data/ios_models.json")) IPHONE_IOS_VERSIONS = json.loads(pkgutil.get_data("mvt", "ios/data/ios_versions.json")) -def get_device_desc_from_id(identifier: str, - devices_list: list = IPHONE_MODELS) -> str: +def get_device_desc_from_id(identifier: str, devices_list: list = IPHONE_MODELS) -> str: for model in devices_list: if identifier == model["identifier"]: return model["description"] @@ -44,7 +43,7 @@ def is_ios_version_outdated(version: str, log: Optional[Logger] = None) -> bool: # Check if it is a build if "." not in version: version = find_version_by_build(version) - # If we can't find it + # If we can't find it if version == "": return False @@ -52,8 +51,10 @@ def is_ios_version_outdated(version: str, log: Optional[Logger] = None) -> bool: current_parsed = packaging.version.parse(version) if current_parsed < latest_parsed: if log: - log.warning("This phone is running an outdated iOS version: %s (latest is %s)", - version, - latest_ios_version()["version"]) + log.warning( + "This phone is running an outdated iOS version: %s (latest is %s)", + version, + latest_ios_version()["version"], + ) return True return False diff --git a/scripts/lint.sh b/scripts/lint.sh new file mode 100755 index 0000000..7e76739 --- /dev/null +++ b/scripts/lint.sh @@ -0,0 +1,14 @@ +#!/bin/sh -e + +export SOURCE="mvt tests" + +export PREFIX="" +if [ -d 'venv' ] ; then + export PREFIX="venv/bin/" +fi + +set -x + +${PREFIX}autoflake --in-place --recursive --exclude venv ${SOURCE} +${PREFIX}isort ${SOURCE} +${PREFIX}black --exclude venv ${SOURCE} diff --git a/tests/android/test_backup_module.py b/tests/android/test_backup_module.py index 8ca642d..1be19f2 100644 --- a/tests/android/test_backup_module.py +++ b/tests/android/test_backup_module.py @@ -15,7 +15,6 @@ from ..utils import get_android_backup_folder class TestBackupModule: - def test_module_folder(self): backup_path = get_android_backup_folder() mod = SMS(target_path=backup_path) diff --git a/tests/android/test_backup_parser.py b/tests/android/test_backup_parser.py index bd309ef..c5e6b0b 100644 --- a/tests/android/test_backup_parser.py +++ b/tests/android/test_backup_parser.py @@ -11,7 +11,6 @@ from ..utils import get_artifact class TestBackupParsing: - def test_parsing_noencryption(self): file = get_artifact("android_backup/backup.ab") with open(file, "rb") as f: @@ -20,7 +19,10 @@ class TestBackupParsing: m = hashlib.sha256() m.update(ddata) - assert m.hexdigest() == "ce1ac5009fea5187a9f546b51e1446ba450243ae91d31dc779233ec0937b5d18" + assert ( + m.hexdigest() + == "ce1ac5009fea5187a9f546b51e1446ba450243ae91d31dc779233ec0937b5d18" + ) sms = parse_tar_for_sms(ddata) assert isinstance(sms, list) assert len(sms) == 2 @@ -35,7 +37,10 @@ class TestBackupParsing: m = hashlib.sha256() m.update(ddata) - assert m.hexdigest() == "f365ace1effbc4902c6aeba241ca61544f8a96ad456c1861808ea87b7dd03896" + assert ( + m.hexdigest() + == "f365ace1effbc4902c6aeba241ca61544f8a96ad456c1861808ea87b7dd03896" + ) sms = parse_tar_for_sms(ddata) assert isinstance(sms, list) assert len(sms) == 1 @@ -50,7 +55,10 @@ class TestBackupParsing: m = hashlib.sha256() m.update(ddata) - assert m.hexdigest() == "33e73df2ede9798dcb3a85c06200ee41c8f52dd2f2e50ffafcceb0407bc13e3a" + assert ( + m.hexdigest() + == "33e73df2ede9798dcb3a85c06200ee41c8f52dd2f2e50ffafcceb0407bc13e3a" + ) sms = parse_tar_for_sms(ddata) print(sms) assert isinstance(sms, list) diff --git a/tests/android/test_dumpsys_parser.py b/tests/android/test_dumpsys_parser.py index 807145f..3a767fb 100644 --- a/tests/android/test_dumpsys_parser.py +++ b/tests/android/test_dumpsys_parser.py @@ -3,15 +3,16 @@ # Use of this software is governed by the MVT License 1.1 that can be found at # https://license.mvt.re/1.1/ -from mvt.android.parsers.dumpsys import (parse_dumpsys_appops, - parse_dumpsys_battery_history, - parse_dumpsys_packages) +from mvt.android.parsers.dumpsys import ( + parse_dumpsys_appops, + parse_dumpsys_battery_history, + parse_dumpsys_packages, +) from ..utils import get_artifact class TestDumpsysParsing: - def test_appops_parsing(self): file = get_artifact("android_data/dumpsys_appops.txt") with open(file) as f: diff --git a/tests/android_androidqf/test_dumpsysaccessbility.py b/tests/android_androidqf/test_dumpsysaccessbility.py index a84d46e..918e02e 100644 --- a/tests/android_androidqf/test_dumpsysaccessbility.py +++ b/tests/android_androidqf/test_dumpsysaccessbility.py @@ -3,8 +3,7 @@ # Use of this software is governed by the MVT License 1.1 that can be found at # https://license.mvt.re/1.1/ -from mvt.android.modules.androidqf.dumpsys_accessibility import \ - DumpsysAccessibility +from mvt.android.modules.androidqf.dumpsys_accessibility import DumpsysAccessibility from mvt.common.module import run_module from ..utils import get_android_androidqf diff --git a/tests/android_androidqf/test_dumpsyspackages.py b/tests/android_androidqf/test_dumpsyspackages.py index 1421aaf..a91520a 100644 --- a/tests/android_androidqf/test_dumpsyspackages.py +++ b/tests/android_androidqf/test_dumpsyspackages.py @@ -20,7 +20,10 @@ class TestDumpsysPackagesModule: assert len(m.results) == 2 assert len(m.detected) == 0 assert len(m.timeline) == 6 - assert m.results[0]["package_name"] == "com.samsung.android.provider.filterprovider" + assert ( + m.results[0]["package_name"] + == "com.samsung.android.provider.filterprovider" + ) def test_detection_pkgname(self, indicator_file): data_path = get_android_androidqf() diff --git a/tests/android_androidqf/test_getprop.py b/tests/android_androidqf/test_getprop.py index 0e87c00..34572c6 100644 --- a/tests/android_androidqf/test_getprop.py +++ b/tests/android_androidqf/test_getprop.py @@ -14,9 +14,10 @@ from ..utils import get_artifact_folder class TestAndroidqfGetpropAnalysis: - def test_androidqf_getprop(self): - m = Getprop(target_path=os.path.join(get_artifact_folder(), "androidqf"), log=logging) + m = Getprop( + target_path=os.path.join(get_artifact_folder(), "androidqf"), log=logging + ) run_module(m) assert len(m.results) == 10 assert m.results[0]["name"] == "dalvik.vm.appimageformat" @@ -25,7 +26,9 @@ class TestAndroidqfGetpropAnalysis: assert len(m.detected) == 0 def test_androidqf_getprop_detection(self, indicator_file): - m = Getprop(target_path=os.path.join(get_artifact_folder(), "androidqf"), log=logging) + m = Getprop( + target_path=os.path.join(get_artifact_folder(), "androidqf"), log=logging + ) ind = Indicators(log=logging.getLogger()) ind.parse_stix2(indicator_file) ind.ioc_collections[0]["android_property_names"].append("dalvik.vm.heapmaxfree") diff --git a/tests/android_androidqf/test_processes.py b/tests/android_androidqf/test_processes.py index d246073..f1cb53a 100644 --- a/tests/android_androidqf/test_processes.py +++ b/tests/android_androidqf/test_processes.py @@ -14,7 +14,9 @@ from ..utils import get_artifact_folder class TestAndroidqfProcessesAnalysis: def test_androidqf_processes(self): - m = Processes(target_path=os.path.join(get_artifact_folder(), "androidqf"), log=logging) + m = Processes( + target_path=os.path.join(get_artifact_folder(), "androidqf"), log=logging + ) run_module(m) assert len(m.results) == 15 assert len(m.timeline) == 0 diff --git a/tests/android_androidqf/test_sms.py b/tests/android_androidqf/test_sms.py index 0e41a72..6c684b7 100644 --- a/tests/android_androidqf/test_sms.py +++ b/tests/android_androidqf/test_sms.py @@ -14,7 +14,9 @@ from ..utils import get_artifact_folder class TestAndroidqfSMSAnalysis: def test_androidqf_sms(self): - m = SMS(target_path=os.path.join(get_artifact_folder(), "androidqf"), log=logging) + m = SMS( + target_path=os.path.join(get_artifact_folder(), "androidqf"), log=logging + ) run_module(m) assert len(m.results) == 2 assert len(m.timeline) == 0 diff --git a/tests/android_bugreport/test_bugreport.py b/tests/android_bugreport/test_bugreport.py index deb823c..d4563d0 100644 --- a/tests/android_bugreport/test_bugreport.py +++ b/tests/android_bugreport/test_bugreport.py @@ -15,7 +15,6 @@ from ..utils import get_artifact_folder class TestBugreportAnalysis: - def launch_bug_report_module(self, module): fpath = os.path.join(get_artifact_folder(), "android_data/bugreport/") m = module(target_path=fpath) @@ -23,7 +22,9 @@ class TestBugreportAnalysis: parent_path = Path(fpath).absolute().as_posix() for root, subdirs, subfiles in os.walk(os.path.abspath(fpath)): for file_name in subfiles: - folder_files.append(os.path.relpath(os.path.join(root, file_name), parent_path)) + folder_files.append( + os.path.relpath(os.path.join(root, file_name), parent_path) + ) m.from_folder(fpath, folder_files) run_module(m) return m @@ -37,7 +38,10 @@ class TestBugreportAnalysis: def test_packages_module(self): m = self.launch_bug_report_module(Packages) assert len(m.results) == 2 - assert m.results[0]["package_name"] == "com.samsung.android.provider.filterprovider" + assert ( + m.results[0]["package_name"] + == "com.samsung.android.provider.filterprovider" + ) assert m.results[1]["package_name"] == "com.instagram.android" assert len(m.results[0]["permissions"]) == 4 assert len(m.results[1]["permissions"]) == 32 diff --git a/tests/artifacts/generate_stix.py b/tests/artifacts/generate_stix.py index 57db7d9..47750fb 100644 --- a/tests/artifacts/generate_stix.py +++ b/tests/artifacts/generate_stix.py @@ -22,27 +22,47 @@ def generate_test_stix_file(file_path): malware = Malware(name="TestMalware", is_family=False, description="") res.append(malware) for d in domains: - i = Indicator(indicator_types=["malicious-activity"], pattern="[domain-name:value='{}']".format(d), pattern_type="stix") + i = Indicator( + indicator_types=["malicious-activity"], + pattern="[domain-name:value='{}']".format(d), + pattern_type="stix", + ) res.append(i) res.append(Relationship(i, "indicates", malware)) for p in processes: - i = Indicator(indicator_types=["malicious-activity"], pattern="[process:name='{}']".format(p), pattern_type="stix") + i = Indicator( + indicator_types=["malicious-activity"], + pattern="[process:name='{}']".format(p), + pattern_type="stix", + ) res.append(i) res.append(Relationship(i, "indicates", malware)) for f in filenames: - i = Indicator(indicator_types=["malicious-activity"], pattern="[file:name='{}']".format(f), pattern_type="stix") + i = Indicator( + indicator_types=["malicious-activity"], + pattern="[file:name='{}']".format(f), + pattern_type="stix", + ) res.append(i) res.append(Relationship(i, "indicates", malware)) for e in emails: - i = Indicator(indicator_types=["malicious-activity"], pattern="[email-addr:value='{}']".format(e), pattern_type="stix") + i = Indicator( + indicator_types=["malicious-activity"], + pattern="[email-addr:value='{}']".format(e), + pattern_type="stix", + ) res.append(i) res.append(Relationship(i, "indicates", malware)) for p in android_property: - i = Indicator(indicator_types=["malicious-activity"], pattern="[android-property:name='{}']".format(p), pattern_type="stix") + i = Indicator( + indicator_types=["malicious-activity"], + pattern="[android-property:name='{}']".format(p), + pattern_type="stix", + ) res.append(i) res.append(Relationship(i, "indicates", malware)) diff --git a/tests/common/test_indicators.py b/tests/common/test_indicators.py index 89cad49..0583d72 100644 --- a/tests/common/test_indicators.py +++ b/tests/common/test_indicators.py @@ -10,7 +10,6 @@ from mvt.common.indicators import Indicators class TestIndicators: - def test_parse_stix2(self, indicator_file): ind = Indicators(log=logging) ind.load_indicators_files([indicator_file], load_default=False) diff --git a/tests/common/test_utils.py b/tests/common/test_utils.py index 3d3f9ca..64ba4aa 100644 --- a/tests/common/test_utils.py +++ b/tests/common/test_utils.py @@ -6,11 +6,14 @@ import logging import os -from mvt.common.utils import (convert_datetime_to_iso, convert_mactime_to_iso, - convert_unix_to_iso, - convert_unix_to_utc_datetime, - generate_hashes_from_path, - get_sha256_from_file_path) +from mvt.common.utils import ( + convert_datetime_to_iso, + convert_mactime_to_iso, + convert_unix_to_iso, + convert_unix_to_utc_datetime, + generate_hashes_from_path, + get_sha256_from_file_path, +) from ..utils import get_artifact_folder @@ -20,7 +23,6 @@ TEST_DATE_MAC = TEST_DATE_EPOCH - 978307200 class TestDateConversions: - def test_convert_unix_to_iso(self): assert convert_unix_to_iso(TEST_DATE_EPOCH) == TEST_DATE_ISO @@ -39,11 +41,12 @@ class TestDateConversions: class TestHashes: - def test_hash_from_file(self): path = os.path.join(get_artifact_folder(), "androidqf", "backup.ab") sha256 = get_sha256_from_file_path(path) - assert sha256 == "f0e32fe8a7fd5ac0e2de19636d123c0072e979396986139ba2bc49ec385dc325" + assert ( + sha256 == "f0e32fe8a7fd5ac0e2de19636d123c0072e979396986139ba2bc49ec385dc325" + ) def test_hash_from_folder(self): path = os.path.join(get_artifact_folder(), "androidqf") @@ -52,6 +55,12 @@ class TestHashes: # Sort the files to have reliable order for tests. hashes = sorted(hashes, key=lambda x: x["file_path"]) assert hashes[0]["file_path"] == os.path.join(path, "backup.ab") - assert hashes[0]["sha256"] == "f0e32fe8a7fd5ac0e2de19636d123c0072e979396986139ba2bc49ec385dc325" + assert ( + hashes[0]["sha256"] + == "f0e32fe8a7fd5ac0e2de19636d123c0072e979396986139ba2bc49ec385dc325" + ) assert hashes[1]["file_path"] == os.path.join(path, "dumpsys.txt") - assert hashes[1]["sha256"] == "bac858001784657a43c7cfa771fd1fc4a49428eb6b7c458a1ebf2fdeef78dd86" + assert ( + hashes[1]["sha256"] + == "bac858001784657a43c7cfa771fd1fc4a49428eb6b7c458a1ebf2fdeef78dd86" + ) diff --git a/tests/ios_backup/test_backup_info.py b/tests/ios_backup/test_backup_info.py index 96544aa..bfab0a1 100644 --- a/tests/ios_backup/test_backup_info.py +++ b/tests/ios_backup/test_backup_info.py @@ -10,7 +10,6 @@ from ..utils import get_ios_backup_folder class TestBackupInfoModule: - def test_manifest(self): m = BackupInfo(target_path=get_ios_backup_folder()) run_module(m) diff --git a/tests/ios_backup/test_calendar.py b/tests/ios_backup/test_calendar.py index 75383c6..31622c6 100644 --- a/tests/ios_backup/test_calendar.py +++ b/tests/ios_backup/test_calendar.py @@ -13,7 +13,6 @@ from ..utils import get_ios_backup_folder class TestCalendarModule: - def test_calendar(self): m = Calendar(target_path=get_ios_backup_folder()) run_module(m) diff --git a/tests/ios_backup/test_datausage.py b/tests/ios_backup/test_datausage.py index 208eb07..a2b785e 100644 --- a/tests/ios_backup/test_datausage.py +++ b/tests/ios_backup/test_datausage.py @@ -13,7 +13,6 @@ from ..utils import get_ios_backup_folder class TestDatausageModule: - def test_datausage(self): m = Datausage(target_path=get_ios_backup_folder()) run_module(m) diff --git a/tests/ios_backup/test_manifest.py b/tests/ios_backup/test_manifest.py index 98c5b76..58cd5ab 100644 --- a/tests/ios_backup/test_manifest.py +++ b/tests/ios_backup/test_manifest.py @@ -13,7 +13,6 @@ from ..utils import get_ios_backup_folder class TestManifestModule: - def test_manifest(self): m = Manifest(target_path=get_ios_backup_folder()) run_module(m) diff --git a/tests/ios_backup/test_safari_browserstate.py b/tests/ios_backup/test_safari_browserstate.py index c96b3e0..3e5b084 100644 --- a/tests/ios_backup/test_safari_browserstate.py +++ b/tests/ios_backup/test_safari_browserstate.py @@ -13,7 +13,6 @@ from ..utils import get_ios_backup_folder class TestSafariBrowserStateModule: - def test_parsing(self): m = SafariBrowserState(target_path=get_ios_backup_folder()) m.is_backup = True diff --git a/tests/ios_backup/test_sms.py b/tests/ios_backup/test_sms.py index f2bb745..84ce374 100644 --- a/tests/ios_backup/test_sms.py +++ b/tests/ios_backup/test_sms.py @@ -13,7 +13,6 @@ from ..utils import get_ios_backup_folder class TestSMSModule: - def test_sms(self): m = SMS(target_path=get_ios_backup_folder()) run_module(m) diff --git a/tests/ios_backup/test_tcc.py b/tests/ios_backup/test_tcc.py index b110b0d..5b683b8 100644 --- a/tests/ios_backup/test_tcc.py +++ b/tests/ios_backup/test_tcc.py @@ -13,7 +13,6 @@ from ..utils import get_ios_backup_folder class TestTCCtModule: - def test_tcc(self): m = TCC(target_path=get_ios_backup_folder()) run_module(m) diff --git a/tests/ios_backup/test_webkit_resource_load_statistics.py b/tests/ios_backup/test_webkit_resource_load_statistics.py index c6da371..24789e4 100644 --- a/tests/ios_backup/test_webkit_resource_load_statistics.py +++ b/tests/ios_backup/test_webkit_resource_load_statistics.py @@ -4,14 +4,14 @@ # https://license.mvt.re/1.1/ from mvt.common.module import run_module -from mvt.ios.modules.mixed.webkit_resource_load_statistics import \ - WebkitResourceLoadStatistics +from mvt.ios.modules.mixed.webkit_resource_load_statistics import ( + WebkitResourceLoadStatistics, +) from ..utils import get_ios_backup_folder class TestWebkitResourceLoadStatisticsModule: - def test_webkit(self): m = WebkitResourceLoadStatistics(target_path=get_ios_backup_folder()) m.is_backup = True diff --git a/tests/ios_fs/test_filesystem.py b/tests/ios_fs/test_filesystem.py index c313c99..a49aec1 100644 --- a/tests/ios_fs/test_filesystem.py +++ b/tests/ios_fs/test_filesystem.py @@ -13,7 +13,6 @@ from ..utils import get_ios_backup_folder class TestFilesystem: - def test_filesystem(self): m = Filesystem(target_path=get_ios_backup_folder()) run_module(m) @@ -26,7 +25,9 @@ class TestFilesystem: ind = Indicators(log=logging.getLogger()) ind.parse_stix2(indicator_file) # Adds a filename that exist in the folder - ind.ioc_collections[0]["processes"].append("64d0019cb3d46bfc8cce545a8ba54b93e7ea9347") + ind.ioc_collections[0]["processes"].append( + "64d0019cb3d46bfc8cce545a8ba54b93e7ea9347" + ) m.indicators = ind run_module(m) assert len(m.results) == 14 diff --git a/tests/test_check_android_androidqf.py b/tests/test_check_android_androidqf.py index b9ae4d2..dbdf9d8 100644 --- a/tests/test_check_android_androidqf.py +++ b/tests/test_check_android_androidqf.py @@ -13,7 +13,6 @@ from .utils import get_artifact_folder class TestCheckAndroidqfCommand: - def test_check(self): runner = CliRunner() path = os.path.join(get_artifact_folder(), "androidqf") diff --git a/tests/test_check_android_bugreport.py b/tests/test_check_android_bugreport.py index 263009f..181d45d 100644 --- a/tests/test_check_android_bugreport.py +++ b/tests/test_check_android_bugreport.py @@ -13,7 +13,6 @@ from .utils import get_artifact_folder class TestCheckBugreportCommand: - def test_check(self): runner = CliRunner() path = os.path.join(get_artifact_folder(), "android_data/bugreport/") diff --git a/tests/test_check_ios_backup.py b/tests/test_check_ios_backup.py index 3e0fbdd..305fe82 100644 --- a/tests/test_check_ios_backup.py +++ b/tests/test_check_ios_backup.py @@ -11,7 +11,6 @@ from .utils import get_ios_backup_folder class TestCheckBackupCommand: - def test_check(self): runner = CliRunner() path = get_ios_backup_folder() diff --git a/tests/test_ios_versions.py b/tests/test_ios_versions.py index 9f6b74c..125c76a 100644 --- a/tests/test_ios_versions.py +++ b/tests/test_ios_versions.py @@ -7,7 +7,6 @@ from mvt.ios.versions import is_ios_version_outdated class TestIosVersions: - def test_is_ios_version_outdated(self): assert is_ios_version_outdated("20B110") is True assert is_ios_version_outdated("16.3") is True