Compare commits

...

28 Commits

Author SHA1 Message Date
Nex
6936908f86 Bumped version 2022-08-15 10:27:36 +02:00
Nex
f3e5763c6a Added SECURITY.md 2022-08-14 19:28:30 +02:00
Nex
f438f7b1fb Fixing unix epoch timestamps conversion to float 2022-08-13 23:37:35 +02:00
Nex
66a157868f Ensuring all adb connect/disconnect are happening in modules only 2022-08-13 23:12:43 +02:00
Nex
a966b694ea More line length enforcement 2022-08-13 18:27:54 +02:00
Nex
c9dd3af278 More line length enforcing 2022-08-13 18:24:11 +02:00
Nex
82a60ee07c Enforcing line length 2022-08-13 17:52:56 +02:00
Nex
8bc5113bd2 Enforcing line length 2022-08-13 17:51:06 +02:00
Nex
00d82f7f00 Enforcing line lenght 2022-08-13 17:50:00 +02:00
Nex
2781f33fb5 Added more date conversion wrappers 2022-08-13 14:04:10 +02:00
Nex
271fe5fbee Continuing enforcement of line length and simplifying date conversions 2022-08-13 02:14:24 +02:00
Nex
0f503f72b5 Starting to enforce line lengths on mvt-ios 2022-08-12 19:38:57 +02:00
Nex
424b86a261 Fixed typos 2022-08-12 19:25:56 +02:00
Nex
1fe595f4cc Added CONTRIBUTING.md file 2022-08-12 19:25:11 +02:00
Nex
b8c59f1183 Removed public_indicators.json legacy file 2022-08-12 19:15:17 +02:00
Nex
a935347aed Trying to enforce line lengths at 80/100 2022-08-12 19:14:05 +02:00
Nex
661d0a8669 Using Union type hints in order to support older versions of Python 2022-08-12 16:29:43 +02:00
Nex
63ff5fd334 Started linting the code 2022-08-12 16:20:16 +02:00
Nex
146b9245ab Sorted imports 2022-08-11 16:57:08 +02:00
Nex
99d33922be Conformed ways modules logger is initialized 2022-08-11 16:42:04 +02:00
Nex
c42634af3f Fixed logging in accessibility module 2022-08-11 14:50:25 +02:00
Nex
6cb59cc3ab Trying to tidy up ConfigurationProfiles module 2022-08-10 16:44:43 +02:00
Nex
e0481686b7 Fixed test file 2022-08-08 16:47:01 +02:00
Nex
804ade3a40 Conformed browerstate plugin to others with similar structure 2022-08-08 16:44:54 +02:00
tek
c5ccaef0c4 Fixes a bug in Safari Browser State module 2022-08-08 11:20:05 +02:00
Nex
c4416d406a Avoiding duplicate entries for stix2 files with multiple malware definitions 2022-08-06 14:49:05 +02:00
Nex
6b8a23ae10 Added an attribute list to keep track of executed modules 2022-08-05 13:52:51 +02:00
tek
872d5d766e Adds product name in iOS backup info module 2022-08-03 16:34:39 +02:00
110 changed files with 1594 additions and 1013 deletions

19
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,19 @@
# Contributing
Thank you for your interest in contributing to Mobile Verification Toolkit (MVT)! Your help is very much appreciated.
## Where to start
Starting to contribute to a somewhat complex project like MVT might seem intimidating. Unless you have specific ideas of new functionality you would like to submit, some good starting points are searching for `TODO:` and `FIXME:` comments throughout the code. Alternatively you can check if any GitHub issues existed marked with the ["help wanted"](https://github.com/mvt-project/mvt/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22) tag.
## Code style
When contributing code to
- **Indentation**: we use 4-spaces tabs.
- **Quotes**: we use double quotes (`"`) as a default. Single quotes (`'`) can be favored with nested strings instead of escaping (`\"`), or when using f-formatting.
- **Maximum line length**: we strongly encourage to respect a 80 characters long lines and to follow [PEP8 indentation guidelines](https://peps.python.org/pep-0008/#indentation) when having to wrap. However, if breaking at 80 is not possible or is detrimental to the readability of the code, exceptions are tolerated so long as they remain within a hard maximum length of 100 characters.

View File

@@ -11,3 +11,6 @@ upload:
test-upload:
python3 -m twine upload --repository testpypi dist/*
pylint:
pylint --rcfile=setup.cfg mvt

5
SECURITY.md Normal file
View File

@@ -0,0 +1,5 @@
# Reporting security issues
Thank you for your interest in reporting security issues and vulnerabilities! Security research is of utmost importance and we take all reports seriously. If you discover an issue please report it to us right away!
Please DO NOT file a public issue, instead send your report privately to *nex [at] nex [dot] sx*. You can also write PGP-encrypted emails to [this key](https://keybase.io/nex/pgp_keys.asc?fingerprint=05216f3b86848a303c2fe37dd166f1667359d880).

View File

@@ -58,14 +58,16 @@ def version():
@click.option("--output", "-o", type=click.Path(exists=False),
help="Specify a path to a folder where you want to store the APKs")
@click.option("--from-file", "-f", type=click.Path(exists=True),
help="Instead of acquiring from phone, load an existing packages.json file for lookups (mainly for debug purposes)")
help="Instead of acquiring from phone, load an existing packages.json file for "
"lookups (mainly for debug purposes)")
@click.pass_context
def download_apks(ctx, all_apks, virustotal, output, from_file, serial):
try:
if from_file:
download = DownloadAPKs.from_json(from_file)
else:
# TODO: Do we actually want to be able to run without storing any file?
# TODO: Do we actually want to be able to run without storing any
# file?
if not output:
log.critical("You need to specify an output folder with --output!")
ctx.exit(1)
@@ -130,14 +132,16 @@ def check_adb(ctx, serial, iocs, output, fast, list_modules, module):
@cli.command("check-bugreport", help="Check an Android Bug Report")
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
default=[], help=HELP_MSG_IOC)
@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT)
@click.option("--output", "-o", type=click.Path(exists=False),
help=HELP_MSG_OUTPUT)
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
@click.option("--module", "-m", help=HELP_MSG_MODULE)
@click.argument("BUGREPORT_PATH", type=click.Path(exists=True))
@click.pass_context
def check_bugreport(ctx, iocs, output, list_modules, module, bugreport_path):
cmd = CmdAndroidCheckBugreport(target_path=bugreport_path, results_path=output,
ioc_files=iocs, module_name=module)
cmd = CmdAndroidCheckBugreport(target_path=bugreport_path,
results_path=output, ioc_files=iocs,
module_name=module)
if list_modules:
cmd.list_modules()
@@ -156,14 +160,14 @@ def check_bugreport(ctx, iocs, output, list_modules, module, bugreport_path):
# Command: check-backup
#==============================================================================
@cli.command("check-backup", help="Check an Android Backup")
@click.option("--serial", "-s", type=str, help=HELP_MSG_SERIAL)
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
default=[], help=HELP_MSG_IOC)
@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT)
@click.option("--output", "-o", type=click.Path(exists=False),
help=HELP_MSG_OUTPUT)
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
@click.pass_context
def check_backup(ctx, serial, iocs, output, list_modules, backup_path):
def check_backup(ctx, iocs, output, list_modules, backup_path):
cmd = CmdAndroidCheckBackup(target_path=backup_path, results_path=output,
ioc_files=iocs)

View File

@@ -14,12 +14,12 @@ log = logging.getLogger(__name__)
class CmdAndroidCheckADB(Command):
name = "check-adb"
modules = ADB_MODULES
def __init__(self, target_path: str = None, results_path: str = None,
ioc_files: list = [], module_name: str = None, serial: str = None,
fast_mode: bool = False):
ioc_files: list = [], module_name: str = None,
serial: str = None, fast_mode: bool = False):
super().__init__(target_path=target_path, results_path=results_path,
ioc_files=ioc_files, module_name=module_name,
serial=serial, fast_mode=fast_mode, log=log)
self.name = "check-adb"
self.modules = ADB_MODULES

View File

@@ -25,16 +25,16 @@ log = logging.getLogger(__name__)
class CmdAndroidCheckBackup(Command):
name = "check-backup"
modules = BACKUP_MODULES
def __init__(self, target_path: str = None, results_path: str = None,
ioc_files: list = [], module_name: str = None, serial: str = None,
fast_mode: bool = False):
ioc_files: list = [], module_name: str = None,
serial: str = None, fast_mode: bool = False):
super().__init__(target_path=target_path, results_path=results_path,
ioc_files=ioc_files, module_name=module_name,
serial=serial, fast_mode=fast_mode, log=log)
self.name = "check-backup"
self.modules = BACKUP_MODULES
self.backup_type = None
self.backup_archive = None
self.backup_files = []
@@ -58,8 +58,8 @@ class CmdAndroidCheckBackup(Command):
except InvalidBackupPassword:
log.critical("Invalid backup password")
sys.exit(1)
except AndroidBackupParsingError as e:
log.critical("Impossible to parse this backup file: %s", e)
except AndroidBackupParsingError as exc:
log.critical("Impossible to parse this backup file: %s", exc)
log.critical("Please use Android Backup Extractor (ABE) instead")
sys.exit(1)
@@ -73,13 +73,16 @@ class CmdAndroidCheckBackup(Command):
self.target_path = Path(self.target_path).absolute().as_posix()
for root, subdirs, subfiles in os.walk(os.path.abspath(self.target_path)):
for fname in subfiles:
self.backup_files.append(os.path.relpath(os.path.join(root, fname), self.target_path))
self.backup_files.append(os.path.relpath(os.path.join(root, fname),
self.target_path))
else:
log.critical("Invalid backup path, path should be a folder or an Android Backup (.ab) file")
log.critical("Invalid backup path, path should be a folder or an "
"Android Backup (.ab) file")
sys.exit(1)
def module_init(self, module: Callable) -> None:
if self.backup_type == "folder":
module.from_folder(self.target_path, self.backup_files)
else:
module.from_ab(self.target_path, self.backup_archive, self.backup_files)
module.from_ab(self.target_path, self.backup_archive,
self.backup_files)

View File

@@ -18,16 +18,16 @@ log = logging.getLogger(__name__)
class CmdAndroidCheckBugreport(Command):
name = "check-bugreport"
modules = BUGREPORT_MODULES
def __init__(self, target_path: str = None, results_path: str = None,
ioc_files: list = [], module_name: str = None, serial: str = None,
fast_mode: bool = False):
ioc_files: list = [], module_name: str = None,
serial: str = None, fast_mode: bool = False):
super().__init__(target_path=target_path, results_path=results_path,
ioc_files=ioc_files, module_name=module_name,
serial=serial, fast_mode=fast_mode, log=log)
self.name = "check-bugreport"
self.modules = BUGREPORT_MODULES
self.bugreport_format = None
self.bugreport_archive = None
self.bugreport_files = []
@@ -41,9 +41,11 @@ class CmdAndroidCheckBugreport(Command):
elif os.path.isdir(self.target_path):
self.bugreport_format = "dir"
parent_path = Path(self.target_path).absolute().as_posix()
for root, subdirs, subfiles in os.walk(os.path.abspath(self.target_path)):
for root, _, subfiles in os.walk(os.path.abspath(self.target_path)):
for file_name in subfiles:
self.bugreport_files.append(os.path.relpath(os.path.join(root, file_name), parent_path))
file_path = os.path.relpath(os.path.join(root, file_name),
parent_path)
self.bugreport_files.append(file_path)
def module_init(self, module: Callable) -> None:
if self.bugreport_format == "zip":

View File

@@ -78,13 +78,14 @@ class DownloadAPKs(AndroidExtraction):
try:
self._adb_download(remote_path, local_path)
except InsufficientPrivileges:
log.error("Unable to pull package file from %s: insufficient privileges, it might be a system app",
log.error("Unable to pull package file from %s: insufficient "
"privileges, it might be a system app",
remote_path)
self._adb_reconnect()
return None
except Exception as e:
except Exception as exc:
log.exception("Failed to pull package file from %s: %s",
remote_path, e)
remote_path, exc)
self._adb_reconnect()
return None
@@ -121,8 +122,8 @@ class DownloadAPKs(AndroidExtraction):
if not package.get("system", False):
packages_selection.append(package)
log.info("Selected only %d packages which are not marked as \"system\"",
len(packages_selection))
log.info("Selected only %d packages which are not marked as "
"\"system\"", len(packages_selection))
if len(packages_selection) == 0:
log.info("No packages were selected for download")
@@ -141,8 +142,8 @@ class DownloadAPKs(AndroidExtraction):
log.info("[%d/%d] Package: %s", i, len(packages_selection),
package["package_name"])
# Sometimes the package path contains multiple lines for multiple apks.
# We loop through each line and download each file.
# Sometimes the package path contains multiple lines for multiple
# apks. We loop through each line and download each file.
for package_file in package["files"]:
device_path = package_file["path"]
local_path = self.pull_package_file(package["package_name"],

View File

@@ -25,8 +25,6 @@ from mvt.android.parsers.backup import (InvalidBackupPassword, parse_ab_header,
parse_backup_file)
from mvt.common.module import InsufficientPrivileges, MVTModule
log = logging.getLogger(__name__)
ADB_KEY_PATH = os.path.expanduser("~/.android/adbkey")
ADB_PUB_KEY_PATH = os.path.expanduser("~/.android/adbkey.pub")
@@ -36,7 +34,8 @@ class AndroidExtraction(MVTModule):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -74,13 +73,15 @@ class AndroidExtraction(MVTModule):
try:
self.device = AdbDeviceUsb(serial=self.serial)
except UsbDeviceNotFoundError:
log.critical("No device found. Make sure it is connected and unlocked.")
self.log.critical("No device found. Make sure it is connected "
"and unlocked.")
sys.exit(-1)
# Otherwise we try to use the TCP transport.
else:
addr = self.serial.split(":")
if len(addr) < 2:
raise ValueError("TCP serial number must follow the format: `address:port`")
raise ValueError("TCP serial number must follow the format: "
"`address:port`")
self.device = AdbDeviceTcp(addr[0], int(addr[1]),
default_transport_timeout_s=30.)
@@ -89,18 +90,22 @@ class AndroidExtraction(MVTModule):
try:
self.device.connect(rsa_keys=[signer], auth_timeout_s=5)
except (USBErrorBusy, USBErrorAccess):
log.critical("Device is busy, maybe run `adb kill-server` and try again.")
self.log.critical("Device is busy, maybe run `adb kill-server` "
"and try again.")
sys.exit(-1)
except DeviceAuthError:
log.error("You need to authorize this computer on the Android device. Retrying in 5 seconds...")
self.log.error("You need to authorize this computer on the "
"Android device. Retrying in 5 seconds...")
time.sleep(5)
except UsbReadFailedError:
log.error("Unable to connect to the device over USB. Try to unplug, plug the device and start again.")
self.log.error("Unable to connect to the device over USB. "
"Try to unplug, plug the device and start again.")
sys.exit(-1)
except OSError as e:
if e.errno == 113 and self.serial:
log.critical("Unable to connect to the device %s: did you specify the correct IP addres?",
self.serial)
except OSError as exc:
if exc.errno == 113 and self.serial:
self.log.critical("Unable to connect to the device %s: "
"did you specify the correct IP addres?",
self.serial)
sys.exit(-1)
else:
break
@@ -111,7 +116,7 @@ class AndroidExtraction(MVTModule):
def _adb_reconnect(self) -> None:
"""Reconnect to device using adb."""
log.info("Reconnecting ...")
self.log.info("Reconnecting ...")
self._adb_disconnect()
self._adb_connect()
@@ -136,7 +141,9 @@ class AndroidExtraction(MVTModule):
def _adb_root_or_die(self) -> None:
"""Check if we have a `su` binary, otherwise raise an Exception."""
if not self._adb_check_if_root():
raise InsufficientPrivileges("This module is optionally available in case the device is already rooted. Do NOT root your own device!")
raise InsufficientPrivileges("This module is optionally available "
"in case the device is already rooted."
" Do NOT root your own device!")
def _adb_command_as_root(self, command):
"""Execute an adb shell command.
@@ -157,8 +164,6 @@ class AndroidExtraction(MVTModule):
# TODO: Need to support checking files without root privileges as well.
# Connect to the device over adb.
self._adb_connect()
# Check if we have root, if not raise an Exception.
self._adb_root_or_die()
@@ -171,17 +176,19 @@ class AndroidExtraction(MVTModule):
:param remote_path: Path to download from the device
:param local_path: Path to where to locally store the copy of the file
:param progress_callback: Callback for download progress bar (Default value = None)
:param progress_callback: Callback for download progress bar
(Default value = None)
:param retry_root: Default value = True)
"""
try:
self.device.pull(remote_path, local_path, progress_callback)
except AdbCommandFailureException as e:
except AdbCommandFailureException as exc:
if retry_root:
self._adb_download_root(remote_path, local_path, progress_callback)
self._adb_download_root(remote_path, local_path,
progress_callback)
else:
raise Exception(f"Unable to download file {remote_path}: {e}")
raise Exception(f"Unable to download file {remote_path}: {exc}") from exc
def _adb_download_root(self, remote_path: str, local_path: str,
progress_callback: Callable = None) -> None:
@@ -190,27 +197,31 @@ class AndroidExtraction(MVTModule):
self._adb_root_or_die()
# We generate a random temporary filename.
tmp_filename = "tmp_" + ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=10))
allowed_chars = (string.ascii_uppercase
+ string.ascii_lowercase
+ string.digits)
tmp_filename = "tmp_" + ''.join(random.choices(allowed_chars, k=10))
# We create a temporary local file.
new_remote_path = f"/sdcard/{tmp_filename}"
# We copy the file from the data folder to /sdcard/.
cp = self._adb_command_as_root(f"cp {remote_path} {new_remote_path}")
if cp.startswith("cp: ") and "No such file or directory" in cp:
cp_output = self._adb_command_as_root(f"cp {remote_path} {new_remote_path}")
if cp_output.startswith("cp: ") and "No such file or directory" in cp_output:
raise Exception(f"Unable to process file {remote_path}: File not found")
elif cp.startswith("cp: ") and "Permission denied" in cp:
if cp_output.startswith("cp: ") and "Permission denied" in cp_output:
raise Exception(f"Unable to process file {remote_path}: Permission denied")
# We download from /sdcard/ to the local temporary file.
# If it doesn't work now, don't try again (retry_root=False)
self._adb_download(new_remote_path, local_path, retry_root=False)
self._adb_download(new_remote_path, local_path, progress_callback,
retry_root=False)
# Delete the copy on /sdcard/.
self._adb_command(f"rm -rf {new_remote_path}")
except AdbCommandFailureException as e:
raise Exception(f"Unable to download file {remote_path}: {e}")
except AdbCommandFailureException as exc:
raise Exception(f"Unable to download file {remote_path}: {exc}") from exc
def _adb_process_file(self, remote_path: str,
process_routine: Callable) -> None:
@@ -223,7 +234,6 @@ class AndroidExtraction(MVTModule):
"""
# Connect to the device over adb.
self._adb_connect()
# Check if we have root, if not raise an Exception.
self._adb_root_or_die()
@@ -234,10 +244,10 @@ class AndroidExtraction(MVTModule):
new_remote_path = f"/sdcard/Download/{local_name}"
# We copy the file from the data folder to /sdcard/.
cp = self._adb_command_as_root(f"cp {remote_path} {new_remote_path}")
if cp.startswith("cp: ") and "No such file or directory" in cp:
cp_output = self._adb_command_as_root(f"cp {remote_path} {new_remote_path}")
if cp_output.startswith("cp: ") and "No such file or directory" in cp_output:
raise Exception(f"Unable to process file {remote_path}: File not found")
elif cp.startswith("cp: ") and "Permission denied" in cp:
if cp_output.startswith("cp: ") and "Permission denied" in cp_output:
raise Exception(f"Unable to process file {remote_path}: Permission denied")
# We download from /sdcard/ to the local temporary file.
@@ -250,35 +260,41 @@ class AndroidExtraction(MVTModule):
tmp.close()
# Delete the copy on /sdcard/.
self._adb_command(f"rm -f {new_remote_path}")
# Disconnect from the device.
self._adb_disconnect()
def _generate_backup(self, package_name: str) -> bytes:
self.log.warning("Please check phone and accept Android backup prompt. You may need to set a backup password. \a")
self.log.warning("Please check phone and accept Android backup prompt. "
"You may need to set a backup password. \a")
# TODO: Base64 encoding as temporary fix to avoid byte-mangling over the shell transport...
backup_output_b64 = self._adb_command("/system/bin/bu backup -nocompress '{}' | base64".format(
package_name))
# TODO: Base64 encoding as temporary fix to avoid byte-mangling over
# the shell transport...
cmd = f"/system/bin/bu backup -nocompress '{package_name}' | base64"
backup_output_b64 = self._adb_command(cmd)
backup_output = base64.b64decode(backup_output_b64)
header = parse_ab_header(backup_output)
if not header["backup"]:
self.log.error("Extracting SMS via Android backup failed. No valid backup data found.")
return
self.log.error("Extracting SMS via Android backup failed. "
"No valid backup data found.")
return None
if header["encryption"] == "none":
return parse_backup_file(backup_output, password=None)
for password_retry in range(0, 3):
backup_password = Prompt.ask("Enter backup password", password=True)
for _ in range(0, 3):
backup_password = Prompt.ask("Enter backup password",
password=True)
try:
decrypted_backup_tar = parse_backup_file(backup_output, backup_password)
decrypted_backup_tar = parse_backup_file(backup_output,
backup_password)
return decrypted_backup_tar
except InvalidBackupPassword:
self.log.error("You provided the wrong password! Please try again...")
self.log.error("You provided the wrong password! "
"Please try again...")
self.log.warn("All attempts to decrypt backup with password failed!")
return None
def run(self) -> None:
"""Run the main procedure."""
raise NotImplementedError

View File

@@ -6,14 +6,13 @@
import logging
import os
import sqlite3
from typing import Union
from mvt.common.utils import (convert_chrometime_to_unix,
convert_timestamp_to_iso)
from mvt.common.utils import (convert_chrometime_to_datetime,
convert_datetime_to_iso)
from .base import AndroidExtraction
log = logging.getLogger(__name__)
CHROME_HISTORY_PATH = "data/data/com.android.chrome/app_chrome/Default/History"
@@ -22,17 +21,19 @@ class ChromeHistory(AndroidExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "visit",
"data": f"{record['id']} - {record['url']} (visit ID: {record['visit_id']}, redirect source: {record['redirect_source']})"
"data": f"{record['id']} - {record['url']} (visit ID: {record['visit_id']}, "
f"redirect source: {record['redirect_source']})"
}
def check_indicators(self) -> None:
@@ -69,18 +70,23 @@ class ChromeHistory(AndroidExtraction):
"url": item[1],
"visit_id": item[2],
"timestamp": item[3],
"isodate": convert_timestamp_to_iso(convert_chrometime_to_unix(item[3])),
"isodate": convert_datetime_to_iso(convert_chrometime_to_datetime(item[3])),
"redirect_source": item[4],
})
cur.close()
conn.close()
log.info("Extracted a total of %d history items", len(self.results))
self.log.info("Extracted a total of %d history items",
len(self.results))
def run(self) -> None:
self._adb_connect()
try:
self._adb_process_file(os.path.join("/", CHROME_HISTORY_PATH),
self._parse_db)
except Exception as e:
self.log.error(e)
except Exception as exc:
self.log.error(exc)
self._adb_disconnect()

View File

@@ -9,15 +9,14 @@ from mvt.android.parsers import parse_dumpsys_accessibility
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class DumpsysAccessibility(AndroidExtraction):
"""This module extracts stats on accessibility."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -41,6 +40,8 @@ class DumpsysAccessibility(AndroidExtraction):
self.results = parse_dumpsys_accessibility(output)
for result in self.results:
log.info("Found installed accessibility service \"%s\"", result.get("service"))
self.log.info("Found installed accessibility service \"%s\"",
result.get("service"))
self.log.info("Identified a total of %d accessibility services", len(self.results))
self.log.info("Identified a total of %d accessibility services",
len(self.results))

View File

@@ -9,15 +9,14 @@ from mvt.android.parsers import parse_dumpsys_activity_resolver_table
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class DumpsysActivities(AndroidExtraction):
"""This module extracts details on receivers for risky activities."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)

View File

@@ -4,13 +4,12 @@
# https://license.mvt.re/1.1/
import logging
from typing import Union
from mvt.android.parsers.dumpsys import parse_dumpsys_appops
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class DumpsysAppOps(AndroidExtraction):
"""This module extracts records from App-op Manager."""
@@ -19,12 +18,13 @@ class DumpsysAppOps(AndroidExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
records = []
for perm in record["permissions"]:
if "entries" not in perm:
@@ -36,7 +36,8 @@ class DumpsysAppOps(AndroidExtraction):
"timestamp": entry["timestamp"],
"module": self.__class__.__name__,
"event": entry["access"],
"data": f"{record['package_name']} access to {perm['name']}: {entry['access']}",
"data": f"{record['package_name']} access to "
f"{perm['name']}: {entry['access']}",
})
return records
@@ -51,9 +52,10 @@ class DumpsysAppOps(AndroidExtraction):
continue
for perm in result["permissions"]:
if perm["name"] == "REQUEST_INSTALL_PACKAGES" and perm["access"] == "allow":
self.log.info("Package %s with REQUEST_INSTALL_PACKAGES permission",
result["package_name"])
if (perm["name"] == "REQUEST_INSTALL_PACKAGES"
and perm["access"] == "allow"):
self.log.info("Package %s with REQUEST_INSTALL_PACKAGES "
"permission", result["package_name"])
def run(self) -> None:
self._adb_connect()

View File

@@ -4,30 +4,31 @@
# https://license.mvt.re/1.1/
import logging
from typing import Union
from mvt.android.parsers import parse_dumpsys_battery_daily
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class DumpsysBatteryDaily(AndroidExtraction):
"""This module extracts records from battery daily updates."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["from"],
"module": self.__class__.__name__,
"event": "battery_daily",
"data": f"Recorded update of package {record['package_name']} with vers {record['vers']}"
"data": f"Recorded update of package {record['package_name']} "
f"with vers {record['vers']}"
}
def check_indicators(self) -> None:

View File

@@ -9,15 +9,14 @@ from mvt.android.parsers import parse_dumpsys_battery_history
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class DumpsysBatteryHistory(AndroidExtraction):
"""This module extracts records from battery history events."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -40,4 +39,5 @@ class DumpsysBatteryHistory(AndroidExtraction):
self.results = parse_dumpsys_battery_history(output)
self.log.info("Extracted %d records from battery history", len(self.results))
self.log.info("Extracted %d records from battery history",
len(self.results))

View File

@@ -9,8 +9,6 @@ from mvt.android.parsers import parse_dumpsys_dbinfo
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class DumpsysDBInfo(AndroidExtraction):
"""This module extracts records from battery daily updates."""
@@ -19,7 +17,8 @@ class DumpsysDBInfo(AndroidExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)

View File

@@ -8,15 +8,14 @@ import os
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class DumpsysFull(AndroidExtraction):
"""This module extracts stats on battery consumption by processes."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -30,6 +29,6 @@ class DumpsysFull(AndroidExtraction):
with open(output_path, "w", encoding="utf-8") as handle:
handle.write(output)
log.info("Full dumpsys output stored at %s", output_path)
self.log.info("Full dumpsys output stored at %s", output_path)
self._adb_disconnect()

View File

@@ -9,8 +9,6 @@ from mvt.android.parsers import parse_dumpsys_receiver_resolver_table
from .base import AndroidExtraction
log = logging.getLogger(__name__)
INTENT_NEW_OUTGOING_SMS = "android.provider.Telephony.NEW_OUTGOING_SMS"
INTENT_SMS_RECEIVED = "android.provider.Telephony.SMS_RECEIVED"
INTENT_DATA_SMS_RECEIVED = "android.intent.action.DATA_SMS_RECEIVED"
@@ -23,7 +21,8 @@ class DumpsysReceivers(AndroidExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -37,26 +36,31 @@ class DumpsysReceivers(AndroidExtraction):
for intent, receivers in self.results.items():
for receiver in receivers:
if intent == INTENT_NEW_OUTGOING_SMS:
self.log.info("Found a receiver to intercept outgoing SMS messages: \"%s\"",
self.log.info("Found a receiver to intercept "
"outgoing SMS messages: \"%s\"",
receiver["receiver"])
elif intent == INTENT_SMS_RECEIVED:
self.log.info("Found a receiver to intercept incoming SMS messages: \"%s\"",
self.log.info("Found a receiver to intercept "
"incoming SMS messages: \"%s\"",
receiver["receiver"])
elif intent == INTENT_DATA_SMS_RECEIVED:
self.log.info("Found a receiver to intercept incoming data SMS message: \"%s\"",
self.log.info("Found a receiver to intercept "
"incoming data SMS message: \"%s\"",
receiver["receiver"])
elif intent == INTENT_PHONE_STATE:
self.log.info("Found a receiver monitoring telephony state/incoming calls: \"%s\"",
self.log.info("Found a receiver monitoring "
"telephony state/incoming calls: \"%s\"",
receiver["receiver"])
elif intent == INTENT_NEW_OUTGOING_CALL:
self.log.info("Found a receiver monitoring outgoing calls: \"%s\"",
self.log.info("Found a receiver monitoring "
"outgoing calls: \"%s\"",
receiver["receiver"])
ioc = self.indicators.check_app_id(receiver["package_name"])
if ioc:
receiver["matched_indicator"] = ioc
self.detected.append({intent: receiver})
continue
ioc = self.indicators.check_app_id(receiver["package_name"])
if ioc:
receiver["matched_indicator"] = ioc
self.detected.append({intent: receiver})
continue
def run(self) -> None:
self._adb_connect()

View File

@@ -3,17 +3,15 @@
# Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/
import datetime
import logging
import os
import stat
from typing import Union
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_unix_to_iso
from .base import AndroidExtraction
log = logging.getLogger(__name__)
ANDROID_TMP_FOLDERS = [
"/tmp/",
"/data/local/tmp/",
@@ -29,13 +27,14 @@ class Files(AndroidExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
self.full_find = False
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
if "modified_time" in record:
return {
"timestamp": record["modified_time"],
@@ -44,14 +43,17 @@ class Files(AndroidExtraction):
"data": record["path"],
}
return None
def check_indicators(self) -> None:
for result in self.results:
if result.get("is_suid"):
self.log.warning("Found an SUID file in a non-standard directory \"%s\".",
result["path"])
self.log.warning("Found an SUID file in a non-standard "
"directory \"%s\".", result["path"])
if self.indicators and self.indicators.check_file_path(result["path"]):
self.log.warning("Found a known suspicous file at path: \"%s\"", result["path"])
self.log.warning("Found a known suspicous file at path: \"%s\"",
result["path"])
self.detected.append(result)
def backup_file(self, file_path: str) -> None:
@@ -73,11 +75,13 @@ class Files(AndroidExtraction):
def find_files(self, folder: str) -> None:
if self.full_find:
output = self._adb_command(f"find '{folder}' -type f -printf '%T@ %m %s %u %g %p\n' 2> /dev/null")
cmd = f"find '{folder}' -type f -printf '%T@ %m %s %u %g %p\n' 2> /dev/null"
output = self._adb_command(cmd)
for file_line in output.splitlines():
[unix_timestamp, mode, size, owner, group, full_path] = file_line.rstrip().split(" ", 5)
mod_time = convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(int(float(unix_timestamp))))
[unix_timestamp, mode, size,
owner, group, full_path] = file_line.rstrip().split(" ", 5)
mod_time = convert_unix_to_iso(unix_timestamp)
self.results.append({
"path": full_path,
@@ -97,7 +101,8 @@ class Files(AndroidExtraction):
def run(self) -> None:
self._adb_connect()
output = self._adb_command("find '/' -maxdepth 1 -printf '%T@ %m %s %u %g %p\n' 2> /dev/null")
cmd = "find '/' -maxdepth 1 -printf '%T@ %m %s %u %g %p\n' 2> /dev/null"
output = self._adb_command(cmd)
if output or output.strip().splitlines():
self.full_find = True
@@ -119,7 +124,8 @@ class Files(AndroidExtraction):
if self.fast_mode:
self.log.info("Flag --fast was enabled: skipping full file listing")
else:
self.log.info("Processing full file listing. This may take a while...")
self.log.info("Processing full file listing. "
"This may take a while...")
self.find_files("/")
self.log.info("Found %s total files", len(self.results))

View File

@@ -10,15 +10,14 @@ from mvt.android.parsers import parse_getprop
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class Getprop(AndroidExtraction):
"""This module extracts device properties from getprop command."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -37,7 +36,9 @@ class Getprop(AndroidExtraction):
if security_patch:
patch_date = datetime.strptime(security_patch, "%Y-%m-%d")
if (datetime.now() - patch_date) > timedelta(days=6*30):
self.log.warning("This phone has not received security updates for more than "
"six months (last update: %s)", security_patch)
self.log.warning("This phone has not received security updates "
"for more than six months (last update: %s)",
security_patch)
self.log.info("Extracted %d Android system properties", len(self.results))
self.log.info("Extracted %d Android system properties",
len(self.results))

View File

@@ -8,15 +8,14 @@ import os
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class Logcat(AndroidExtraction):
"""This module extracts details on installed packages."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -35,15 +34,15 @@ class Logcat(AndroidExtraction):
with open(logcat_path, "w", encoding="utf-8") as handle:
handle.write(output)
log.info("Current logcat logs stored at %s",
logcat_path)
self.log.info("Current logcat logs stored at %s",
logcat_path)
logcat_last_path = os.path.join(self.results_path,
"logcat_last.txt")
with open(logcat_last_path, "w", encoding="utf-8") as handle:
handle.write(last_output)
log.info("Logcat logs prior to last reboot stored at %s",
logcat_last_path)
self.log.info("Logcat logs prior to last reboot stored at %s",
logcat_last_path)
self._adb_disconnect()

View File

@@ -4,6 +4,7 @@
# https://license.mvt.re/1.1/
import logging
from typing import Union
from rich.console import Console
from rich.progress import track
@@ -14,8 +15,6 @@ from mvt.common.virustotal import VTNoKey, VTQuotaExceeded, virustotal_lookup
from .base import AndroidExtraction
log = logging.getLogger(__name__)
DANGEROUS_PERMISSIONS_THRESHOLD = 10
DANGEROUS_PERMISSIONS = [
"android.permission.ACCESS_COARSE_LOCATION",
@@ -74,26 +73,37 @@ class Packages(AndroidExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
records = []
timestamps = [
{"event": "package_install", "timestamp": record["timestamp"]},
{"event": "package_first_install", "timestamp": record["first_install_time"]},
{"event": "package_last_update", "timestamp": record["last_update_time"]},
{
"event": "package_install",
"timestamp": record["timestamp"]
},
{
"event": "package_first_install",
"timestamp": record["first_install_time"]
},
{
"event": "package_last_update",
"timestamp": record["last_update_time"]
},
]
for ts in timestamps:
for timestamp in timestamps:
records.append({
"timestamp": ts["timestamp"],
"timestamp": timestamp["timestamp"],
"module": self.__class__.__name__,
"event": ts["event"],
"data": f"{record['package_name']} (system: {record['system']}, third party: {record['third_party']})",
"event": timestamp["event"],
"data": f"{record['package_name']} (system: {record['system']},"
f" third party: {record['third_party']})",
})
return records
@@ -101,7 +111,8 @@ class Packages(AndroidExtraction):
def check_indicators(self) -> None:
for result in self.results:
if result["package_name"] in ROOT_PACKAGES:
self.log.warning("Found an installed package related to rooting/jailbreaking: \"%s\"",
self.log.warning("Found an installed package related to "
"rooting/jailbreaking: \"%s\"",
result["package_name"])
self.detected.append(result)
continue
@@ -132,14 +143,14 @@ class Packages(AndroidExtraction):
total_hashes = len(hashes)
detections = {}
for i in track(range(total_hashes), description=f"Looking up {total_hashes} files..."):
progress_desc = f"Looking up {total_hashes} files..."
for i in track(range(total_hashes), description=progress_desc):
try:
results = virustotal_lookup(hashes[i])
except VTNoKey as e:
log.info(e)
except VTNoKey:
return
except VTQuotaExceeded as e:
log.error("Unable to continue: %s", e)
except VTQuotaExceeded as exc:
print("Unable to continue: %s", exc)
break
if not results:
@@ -224,10 +235,10 @@ class Packages(AndroidExtraction):
for file_path in output.splitlines():
file_path = file_path.strip()
md5 = self._adb_command(f"md5sum {file_path}").split(" ")[0]
sha1 = self._adb_command(f"sha1sum {file_path}").split(" ")[0]
sha256 = self._adb_command(f"sha256sum {file_path}").split(" ")[0]
sha512 = self._adb_command(f"sha512sum {file_path}").split(" ")[0]
md5 = self._adb_command(f"md5sum {file_path}").split(" ", maxsplit=1)[0]
sha1 = self._adb_command(f"sha1sum {file_path}").split(" ", maxsplit=1)[0]
sha256 = self._adb_command(f"sha256sum {file_path}").split(" ", maxsplit=1)[0]
sha512 = self._adb_command(f"sha512sum {file_path}").split(" ", maxsplit=1)[0]
package_files.append({
"path": file_path,
@@ -304,8 +315,10 @@ class Packages(AndroidExtraction):
dangerous_permissions_count += 1
if dangerous_permissions_count >= DANGEROUS_PERMISSIONS_THRESHOLD:
self.log.info("Third-party package \"%s\" requested %d potentially dangerous permissions",
result["package_name"], dangerous_permissions_count)
self.log.info("Third-party package \"%s\" requested %d "
"potentially dangerous permissions",
result["package_name"],
dangerous_permissions_count)
packages_to_lookup = []
for result in self.results:
@@ -313,8 +326,9 @@ class Packages(AndroidExtraction):
continue
packages_to_lookup.append(result)
self.log.info("Found non-system package with name \"%s\" installed by \"%s\" on %s",
result["package_name"], result["installer"], result["timestamp"])
self.log.info("Found non-system package with name \"%s\" installed "
"by \"%s\" on %s", result["package_name"],
result["installer"], result["timestamp"])
if not self.fast_mode:
self.check_virustotal(packages_to_lookup)

View File

@@ -7,15 +7,14 @@ import logging
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class Processes(AndroidExtraction):
"""This module extracts details on running processes."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -63,4 +62,5 @@ class Processes(AndroidExtraction):
self._adb_disconnect()
log.info("Extracted records on a total of %d processes", len(self.results))
self.log.info("Extracted records on a total of %d processes",
len(self.results))

View File

@@ -7,15 +7,14 @@ import logging
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class RootBinaries(AndroidExtraction):
"""This module extracts the list of installed packages."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)

View File

@@ -7,8 +7,6 @@ import logging
from .base import AndroidExtraction
log = logging.getLogger(__name__)
class SELinuxStatus(AndroidExtraction):
"""This module checks if SELinux is being enforced."""
@@ -17,7 +15,8 @@ class SELinuxStatus(AndroidExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)

View File

@@ -7,9 +7,6 @@ import logging
from .base import AndroidExtraction
log = logging.getLogger(__name__)
ANDROID_DANGEROUS_SETTINGS = [
{
"description": "disabled Google Play Services apps verification",
@@ -64,7 +61,8 @@ class Settings(AndroidExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -72,7 +70,7 @@ class Settings(AndroidExtraction):
self.results = {} if not results else results
def check_indicators(self) -> None:
for namespace, settings in self.results.items():
for _, settings in self.results.items():
for key, value in settings.items():
for danger in ANDROID_DANGEROUS_SETTINGS:
# Check if one of the dangerous settings is using an unsafe

View File

@@ -6,16 +6,15 @@
import logging
import os
import sqlite3
from typing import Union
from mvt.android.parsers.backup import (AndroidBackupParsingError,
parse_tar_for_sms)
from mvt.common.module import InsufficientPrivileges
from mvt.common.utils import check_for_links, convert_timestamp_to_iso
from mvt.common.utils import check_for_links, convert_unix_to_iso
from .base import AndroidExtraction
log = logging.getLogger(__name__)
SMS_BUGLE_PATH = "data/data/com.google.android.apps.messaging/databases/bugle_db"
SMS_BUGLE_QUERY = """
SELECT
@@ -48,12 +47,15 @@ class SMS(AndroidExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
self.sms_db_type = 0
def serialize(self, record: dict) -> Union[dict, list]:
body = record["body"].replace("\n", "\\n")
return {
"timestamp": record["isodate"],
@@ -84,9 +86,9 @@ class SMS(AndroidExtraction):
conn = sqlite3.connect(db_path)
cur = conn.cursor()
if self.SMS_DB_TYPE == 1:
if self.sms_db_type == 1:
cur.execute(SMS_BUGLE_QUERY)
elif self.SMS_DB_TYPE == 2:
elif self.sms_db_type == 2:
cur.execute(SMS_MMSMS_QUERY)
names = [description[0] for description in cur.description]
@@ -97,7 +99,7 @@ class SMS(AndroidExtraction):
message[names[index]] = value
message["direction"] = ("received" if message["incoming"] == 1 else "sent")
message["isodate"] = convert_timestamp_to_iso(message["timestamp"])
message["isodate"] = convert_unix_to_iso(message["timestamp"])
# If we find links in the messages or if they are empty we add
# them to the list of results.
@@ -107,11 +109,12 @@ class SMS(AndroidExtraction):
cur.close()
conn.close()
log.info("Extracted a total of %d SMS messages containing links", len(self.results))
self.log.info("Extracted a total of %d SMS messages containing links",
len(self.results))
def _extract_sms_adb(self) -> None:
"""Use the Android backup command to extract SMS data from the native SMS
app.
"""Use the Android backup command to extract SMS data from the native
SMS app.
It is crucial to use the under-documented "-nocompress" flag to disable
the non-standard Java compression algorithm. This module only supports
@@ -124,24 +127,34 @@ class SMS(AndroidExtraction):
try:
self.results = parse_tar_for_sms(backup_tar)
except AndroidBackupParsingError:
self.log.info("Impossible to read SMS from the Android Backup, please extract "
"the SMS and try extracting it with Android Backup Extractor")
self.log.info("Impossible to read SMS from the Android Backup, "
"please extract the SMS and try extracting it with "
"Android Backup Extractor")
return
log.info("Extracted a total of %d SMS messages containing links", len(self.results))
self.log.info("Extracted a total of %d SMS messages containing links",
len(self.results))
def run(self) -> None:
self._adb_connect()
try:
if (self._adb_check_file_exists(os.path.join("/", SMS_BUGLE_PATH))):
self.SMS_DB_TYPE = 1
self._adb_process_file(os.path.join("/", SMS_BUGLE_PATH), self._parse_db)
elif (self._adb_check_file_exists(os.path.join("/", SMS_MMSSMS_PATH))):
self.SMS_DB_TYPE = 2
self._adb_process_file(os.path.join("/", SMS_MMSSMS_PATH), self._parse_db)
if self._adb_check_file_exists(os.path.join("/", SMS_BUGLE_PATH)):
self.sms_db_type = 1
self._adb_process_file(os.path.join("/", SMS_BUGLE_PATH),
self._parse_db)
elif self._adb_check_file_exists(os.path.join("/", SMS_MMSSMS_PATH)):
self.sms_db_type = 2
self._adb_process_file(os.path.join("/", SMS_MMSSMS_PATH),
self._parse_db)
self._adb_disconnect()
return
except InsufficientPrivileges:
pass
self.log.warn("No SMS database found. Trying extraction of SMS data using "
"Android backup feature.")
self.log.warn("No SMS database found. Trying extraction of SMS data "
"using Android backup feature.")
self._extract_sms_adb()
self._adb_disconnect()

View File

@@ -7,13 +7,12 @@ import base64
import logging
import os
import sqlite3
from typing import Union
from mvt.common.utils import check_for_links, convert_timestamp_to_iso
from mvt.common.utils import check_for_links, convert_unix_to_iso
from .base import AndroidExtraction
log = logging.getLogger(__name__)
WHATSAPP_PATH = "data/data/com.whatsapp/databases/msgstore.db"
@@ -22,12 +21,13 @@ class Whatsapp(AndroidExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
text = record["data"].replace("\n", "\\n")
return {
"timestamp": record["isodate"],
@@ -71,22 +71,30 @@ class Whatsapp(AndroidExtraction):
continue
message["direction"] = ("send" if message["key_from_me"] == 1 else "received")
message["isodate"] = convert_timestamp_to_iso(message["timestamp"])
message["isodate"] = convert_unix_to_iso(message["timestamp"])
# If we find links in the messages or if they are empty we add them to the list.
# If we find links in the messages or if they are empty we add them
# to the list.
if check_for_links(message["data"]) or message["data"].strip() == "":
if (message.get('thumb_image') is not None):
message['thumb_image'] = base64.b64encode(message['thumb_image'])
if message.get("thumb_image"):
message["thumb_image"] = base64.b64encode(message["thumb_image"])
messages.append(message)
cur.close()
conn.close()
log.info("Extracted a total of %d WhatsApp messages containing links", len(messages))
self.log.info("Extracted a total of %d WhatsApp messages "
"containing links", len(messages))
self.results = messages
def run(self) -> None:
self._adb_connect()
try:
self._adb_process_file(os.path.join("/", WHATSAPP_PATH), self._parse_db)
except Exception as e:
self.log.error(e)
self._adb_process_file(os.path.join("/", WHATSAPP_PATH),
self._parse_db)
except Exception as exc:
self.log.error(exc)
self._adb_disconnect()

View File

@@ -4,6 +4,7 @@
# https://license.mvt.re/1.1/
import fnmatch
import logging
import os
from tarfile import TarFile
@@ -12,7 +13,19 @@ from mvt.common.module import MVTModule
class BackupExtraction(MVTModule):
"""This class provides a base for all backup extractios modules"""
ab = None
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
self.ab = None
self.backup_path = None
self.tar = None
self.files = []
def from_folder(self, backup_path: str, files: list) -> None:
"""

View File

@@ -12,7 +12,8 @@ from mvt.android.parsers.backup import parse_sms_file
class SMS(BackupExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -30,15 +31,17 @@ class SMS(BackupExtraction):
self.detected.append(message)
def run(self) -> None:
for file in self._get_files_by_pattern("apps/com.android.providers.telephony/d_f/*_sms_backup"):
sms_path = "apps/com.android.providers.telephony/d_f/*_sms_backup"
for file in self._get_files_by_pattern(sms_path):
self.log.info("Processing SMS backup file at %s", file)
data = self._get_file_content(file)
self.results.extend(parse_sms_file(data))
for file in self._get_files_by_pattern("apps/com.android.providers.telephony/d_f/*_mms_backup"):
mms_path = "apps/com.android.providers.telephony/d_f/*_mms_backup"
for file in self._get_files_by_pattern(mms_path):
self.log.info("Processing MMS backup file at %s", file)
data = self._get_file_content(file)
self.results.extend(parse_sms_file(data))
self.log.info("Extracted a total of %d SMS & MMS messages containing links",
len(self.results))
self.log.info("Extracted a total of %d SMS & MMS messages "
"containing links", len(self.results))

View File

@@ -9,15 +9,14 @@ from mvt.android.parsers import parse_dumpsys_accessibility
from .base import BugReportModule
log = logging.getLogger(__name__)
class Accessibility(BugReportModule):
"""This module extracts stats on accessibility."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -36,7 +35,8 @@ class Accessibility(BugReportModule):
def run(self) -> None:
content = self._get_dumpstate_file()
if not content:
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
self.log.error("Unable to find dumpstate file. Did you provide a "
"valid bug report archive?")
return
lines = []
@@ -56,6 +56,8 @@ class Accessibility(BugReportModule):
self.results = parse_dumpsys_accessibility("\n".join(lines))
for result in self.results:
log.info("Found installed accessibility service \"%s\"", result.get("service"))
self.log.info("Found installed accessibility service \"%s\"",
result.get("service"))
self.log.info("Identified a total of %d accessibility services", len(self.results))
self.log.info("Identified a total of %d accessibility services",
len(self.results))

View File

@@ -9,15 +9,14 @@ from mvt.android.parsers import parse_dumpsys_activity_resolver_table
from .base import BugReportModule
log = logging.getLogger(__name__)
class Activities(BugReportModule):
"""This module extracts details on receivers for risky activities."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -39,7 +38,8 @@ class Activities(BugReportModule):
def run(self) -> None:
content = self._get_dumpstate_file()
if not content:
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
self.log.error("Unable to find dumpstate file. Did you provide a "
"valid bug report archive?")
return
lines = []

View File

@@ -4,25 +4,25 @@
# https://license.mvt.re/1.1/
import logging
from typing import Union
from mvt.android.parsers import parse_dumpsys_appops
from .base import BugReportModule
log = logging.getLogger(__name__)
class Appops(BugReportModule):
"""This module extracts information on package from App-Ops Manager."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
records = []
for perm in record["permissions"]:
if "entries" not in perm:
@@ -34,7 +34,8 @@ class Appops(BugReportModule):
"timestamp": entry["timestamp"],
"module": self.__class__.__name__,
"event": entry["access"],
"data": f"{record['package_name']} access to {perm['name']}: {entry['access']}",
"data": f"{record['package_name']} access to "
f"{perm['name']}: {entry['access']}",
})
return records
@@ -49,13 +50,16 @@ class Appops(BugReportModule):
continue
for perm in result["permissions"]:
if perm["name"] == "REQUEST_INSTALL_PACKAGES" and perm["access"] == "allow":
self.log.info("Package %s with REQUEST_INSTALL_PACKAGES permission", result["package_name"])
if (perm["name"] == "REQUEST_INSTALL_PACKAGES"
and perm["access"] == "allow"):
self.log.info("Package %s with REQUEST_INSTALL_PACKAGES permission",
result["package_name"])
def run(self) -> None:
content = self._get_dumpstate_file()
if not content:
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
self.log.error("Unable to find dumpstate file. Did you provide a "
"valid bug report archive?")
return
lines = []

View File

@@ -10,13 +10,22 @@ from zipfile import ZipFile
from mvt.common.module import MVTModule
log = logging.getLogger(__name__)
class BugReportModule(MVTModule):
"""This class provides a base for all Android Bug Report modules."""
zip_archive = None
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
self.zip_archive = None
self.extract_path = None
self.extract_files = []
self.zip_files = []
def from_folder(self, extract_path: str, extract_files: str) -> None:
self.extract_path = extract_path

View File

@@ -4,30 +4,31 @@
# https://license.mvt.re/1.1/
import logging
from typing import Union
from mvt.android.parsers import parse_dumpsys_battery_daily
from .base import BugReportModule
log = logging.getLogger(__name__)
class BatteryDaily(BugReportModule):
"""This module extracts records from battery daily updates."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["from"],
"module": self.__class__.__name__,
"event": "battery_daily",
"data": f"Recorded update of package {record['package_name']} with vers {record['vers']}"
"data": f"Recorded update of package {record['package_name']} "
f"with vers {record['vers']}"
}
def check_indicators(self) -> None:
@@ -44,7 +45,8 @@ class BatteryDaily(BugReportModule):
def run(self) -> None:
content = self._get_dumpstate_file()
if not content:
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
self.log.error("Unable to find dumpstate file. Did you provide a "
"valid bug report archive?")
return
lines = []

View File

@@ -9,15 +9,14 @@ from mvt.android.parsers import parse_dumpsys_battery_history
from .base import BugReportModule
log = logging.getLogger(__name__)
class BatteryHistory(BugReportModule):
"""This module extracts records from battery daily updates."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -36,7 +35,8 @@ class BatteryHistory(BugReportModule):
def run(self) -> None:
content = self._get_dumpstate_file()
if not content:
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
self.log.error("Unable to find dumpstate file. Did you provide "
"a valid bug report archive?")
return
lines = []

View File

@@ -9,8 +9,6 @@ from mvt.android.parsers import parse_dumpsys_dbinfo
from .base import BugReportModule
log = logging.getLogger(__name__)
class DBInfo(BugReportModule):
"""This module extracts records from battery daily updates."""
@@ -19,7 +17,8 @@ class DBInfo(BugReportModule):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -40,7 +39,8 @@ class DBInfo(BugReportModule):
def run(self) -> None:
content = self._get_dumpstate_file()
if not content:
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
self.log.error("Unable to find dumpstate file. Did you provide a "
"valid bug report archive?")
return
in_dbinfo = False

View File

@@ -10,15 +10,14 @@ from mvt.android.parsers import parse_getprop
from .base import BugReportModule
log = logging.getLogger(__name__)
class Getprop(BugReportModule):
"""This module extracts device properties from getprop command."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -28,7 +27,8 @@ class Getprop(BugReportModule):
def run(self) -> None:
content = self._get_dumpstate_file()
if not content:
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
self.log.error("Unable to find dumpstate file. Did you provide a "
"valid bug report archive?")
return
lines = []
@@ -53,7 +53,9 @@ class Getprop(BugReportModule):
if security_patch:
patch_date = datetime.strptime(security_patch, "%Y-%m-%d")
if (datetime.now() - patch_date) > timedelta(days=6*30):
self.log.warning("This phone has not received security updates for more than "
"six months (last update: %s)", security_patch)
self.log.warning("This phone has not received security updates "
"for more than six months (last update: %s)",
security_patch)
self.log.info("Extracted %d Android system properties", len(self.results))
self.log.info("Extracted %d Android system properties",
len(self.results))

View File

@@ -5,6 +5,7 @@
import logging
import re
from typing import Union
from mvt.android.modules.adb.packages import (DANGEROUS_PERMISSIONS,
DANGEROUS_PERMISSIONS_THRESHOLD,
@@ -12,33 +13,41 @@ from mvt.android.modules.adb.packages import (DANGEROUS_PERMISSIONS,
from .base import BugReportModule
log = logging.getLogger(__name__)
class Packages(BugReportModule):
"""This module extracts details on receivers for risky activities."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
records = []
timestamps = [
{"event": "package_install", "timestamp": record["timestamp"]},
{"event": "package_first_install", "timestamp": record["first_install_time"]},
{"event": "package_last_update", "timestamp": record["last_update_time"]},
{
"event": "package_install",
"timestamp": record["timestamp"]
},
{
"event": "package_first_install",
"timestamp": record["first_install_time"]
},
{
"event": "package_last_update",
"timestamp": record["last_update_time"]
},
]
for ts in timestamps:
for timestamp in timestamps:
records.append({
"timestamp": ts["timestamp"],
"timestamp": timestamp["timestamp"],
"module": self.__class__.__name__,
"event": ts["event"],
"event": timestamp["event"],
"data": f"Install or update of package {record['package_name']}",
})
@@ -47,7 +56,8 @@ class Packages(BugReportModule):
def check_indicators(self) -> None:
for result in self.results:
if result["package_name"] in ROOT_PACKAGES:
self.log.warning("Found an installed package related to rooting/jailbreaking: \"%s\"",
self.log.warning("Found an installed package related to "
"rooting/jailbreaking: \"%s\"",
result["package_name"])
self.detected.append(result)
continue
@@ -147,7 +157,8 @@ class Packages(BugReportModule):
def run(self) -> None:
content = self._get_dumpstate_file()
if not content:
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
self.log.error("Unable to find dumpstate file. Did you provide a "
"valid bug report archive?")
return
in_package = False
@@ -182,7 +193,8 @@ class Packages(BugReportModule):
dangerous_permissions_count += 1
if dangerous_permissions_count >= DANGEROUS_PERMISSIONS_THRESHOLD:
self.log.info("Found package \"%s\" requested %d potentially dangerous permissions",
result["package_name"], dangerous_permissions_count)
self.log.info("Found package \"%s\" requested %d potentially "
"dangerous permissions", result["package_name"],
dangerous_permissions_count)
self.log.info("Extracted details on %d packages", len(self.results))

View File

@@ -9,8 +9,6 @@ from mvt.android.parsers import parse_dumpsys_receiver_resolver_table
from .base import BugReportModule
log = logging.getLogger(__name__)
INTENT_NEW_OUTGOING_SMS = "android.provider.Telephony.NEW_OUTGOING_SMS"
INTENT_SMS_RECEIVED = "android.provider.Telephony.SMS_RECEIVED"
INTENT_DATA_SMS_RECEIVED = "android.intent.action.DATA_SMS_RECEIVED"
@@ -23,7 +21,8 @@ class Receivers(BugReportModule):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -37,31 +36,37 @@ class Receivers(BugReportModule):
for intent, receivers in self.results.items():
for receiver in receivers:
if intent == INTENT_NEW_OUTGOING_SMS:
self.log.info("Found a receiver to intercept outgoing SMS messages: \"%s\"",
self.log.info("Found a receiver to intercept "
"outgoing SMS messages: \"%s\"",
receiver["receiver"])
elif intent == INTENT_SMS_RECEIVED:
self.log.info("Found a receiver to intercept incoming SMS messages: \"%s\"",
self.log.info("Found a receiver to intercept "
"incoming SMS messages: \"%s\"",
receiver["receiver"])
elif intent == INTENT_DATA_SMS_RECEIVED:
self.log.info("Found a receiver to intercept incoming data SMS message: \"%s\"",
self.log.info("Found a receiver to intercept "
"incoming data SMS message: \"%s\"",
receiver["receiver"])
elif intent == INTENT_PHONE_STATE:
self.log.info("Found a receiver monitoring telephony state/incoming calls: \"%s\"",
self.log.info("Found a receiver monitoring "
"telephony state/incoming calls: \"%s\"",
receiver["receiver"])
elif intent == INTENT_NEW_OUTGOING_CALL:
self.log.info("Found a receiver monitoring outgoing calls: \"%s\"",
self.log.info("Found a receiver monitoring "
"outgoing calls: \"%s\"",
receiver["receiver"])
ioc = self.indicators.check_app_id(receiver["package_name"])
if ioc:
receiver["matched_indicator"] = ioc
self.detected.append({intent: receiver})
continue
ioc = self.indicators.check_app_id(receiver["package_name"])
if ioc:
receiver["matched_indicator"] = ioc
self.detected.append({intent: receiver})
continue
def run(self) -> None:
content = self._get_dumpstate_file()
if not content:
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
self.log.error("Unable to find dumpstate file. Did you provide a "
"valid bug report archive?")
return
in_receivers = False

View File

@@ -3,7 +3,6 @@
# Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/
import datetime
import io
import json
import tarfile
@@ -13,7 +12,7 @@ from cryptography.hazmat.primitives import hashes, padding
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
from mvt.common.utils import check_for_links, convert_timestamp_to_iso
from mvt.common.utils import check_for_links, convert_unix_to_iso
PBKDF2_KEY_SIZE = 32
@@ -30,6 +29,8 @@ class InvalidBackupPassword(AndroidBackupParsingError):
pass
# TODO: Need to clean all the following code and conform it to the coding style.
def to_utf8_bytes(input_bytes):
output = []
for byte in input_bytes:
@@ -49,7 +50,7 @@ def parse_ab_header(data):
'encryption': "none", 'version': 4}
"""
if data.startswith(b"ANDROID BACKUP"):
[magic_header, version, is_compressed, encryption, tar_data] = data.split(b"\n", 4)
[_, version, is_compressed, encryption, _] = data.split(b"\n", 4)
return {
"backup": True,
"compression": (is_compressed == b"1"),
@@ -65,13 +66,15 @@ def parse_ab_header(data):
}
def decrypt_master_key(password, user_salt, user_iv, pbkdf2_rounds, master_key_blob, format_version, checksum_salt):
def decrypt_master_key(password, user_salt, user_iv, pbkdf2_rounds,
master_key_blob, format_version, checksum_salt):
"""Generate AES key from user password uisng PBKDF2
The backup master key is extracted from the master key blog after decryption.
"""
# Derive key from password using PBKDF2.
kdf = PBKDF2HMAC(algorithm=hashes.SHA1(), length=32, salt=user_salt, iterations=pbkdf2_rounds)
kdf = PBKDF2HMAC(algorithm=hashes.SHA1(), length=32, salt=user_salt,
iterations=pbkdf2_rounds)
key = kdf.derive(password.encode("utf-8"))
# Decrypt master key blob.
@@ -90,8 +93,8 @@ def decrypt_master_key(password, user_salt, user_iv, pbkdf2_rounds, master_key_b
master_key_checksum_length = ord(key_blob.read(1))
master_key_checksum = key_blob.read(master_key_checksum_length)
except TypeError:
raise InvalidBackupPassword()
except TypeError as exc:
raise InvalidBackupPassword() from exc
# Handle quirky encoding of master key bytes in Android original Java crypto code.
if format_version > 1:
@@ -100,7 +103,8 @@ def decrypt_master_key(password, user_salt, user_iv, pbkdf2_rounds, master_key_b
hmac_mk = master_key
# Derive checksum to confirm successful backup decryption.
kdf = PBKDF2HMAC(algorithm=hashes.SHA1(), length=32, salt=checksum_salt, iterations=pbkdf2_rounds)
kdf = PBKDF2HMAC(algorithm=hashes.SHA1(), length=32, salt=checksum_salt,
iterations=pbkdf2_rounds)
calculated_checksum = kdf.derive(hmac_mk)
if master_key_checksum != calculated_checksum:
@@ -109,7 +113,8 @@ def decrypt_master_key(password, user_salt, user_iv, pbkdf2_rounds, master_key_b
return master_key, master_iv
def decrypt_backup_data(encrypted_backup, password, encryption_algo, format_version):
def decrypt_backup_data(encrypted_backup, password, encryption_algo,
format_version):
"""
Generate encryption keyffrom password and do decryption
@@ -120,7 +125,9 @@ def decrypt_backup_data(encrypted_backup, password, encryption_algo, format_vers
if password is None:
raise InvalidBackupPassword()
[user_salt, checksum_salt, pbkdf2_rounds, user_iv, master_key_blob, encrypted_data] = encrypted_backup.split(b"\n", 5)
[user_salt, checksum_salt, pbkdf2_rounds, user_iv,
master_key_blob, encrypted_data] = encrypted_backup.split(b"\n", 5)
user_salt = bytes.fromhex(user_salt.decode("utf-8"))
checksum_salt = bytes.fromhex(checksum_salt.decode("utf-8"))
pbkdf2_rounds = int(pbkdf2_rounds)
@@ -128,9 +135,13 @@ def decrypt_backup_data(encrypted_backup, password, encryption_algo, format_vers
master_key_blob = bytes.fromhex(master_key_blob.decode("utf-8"))
# Derive decryption master key from password.
master_key, master_iv = decrypt_master_key(password=password, user_salt=user_salt, user_iv=user_iv,
pbkdf2_rounds=pbkdf2_rounds, master_key_blob=master_key_blob,
format_version=format_version, checksum_salt=checksum_salt)
master_key, master_iv = decrypt_master_key(password=password,
user_salt=user_salt,
user_iv=user_iv,
pbkdf2_rounds=pbkdf2_rounds,
master_key_blob=master_key_blob,
format_version=format_version,
checksum_salt=checksum_salt)
# Decrypt and unpad backup data using derivied key.
cipher = Cipher(algorithms.AES(master_key), modes.CBC(master_iv))
@@ -149,18 +160,21 @@ def parse_backup_file(data, password=None):
if not data.startswith(b"ANDROID BACKUP"):
raise AndroidBackupParsingError("Invalid file header")
[magic_header, version, is_compressed, encryption_algo, tar_data] = data.split(b"\n", 4)
[_, version, is_compressed,
encryption_algo, tar_data] = data.split(b"\n", 4)
version = int(version)
is_compressed = int(is_compressed)
if encryption_algo != b"none":
tar_data = decrypt_backup_data(tar_data, password, encryption_algo, format_version=version)
tar_data = decrypt_backup_data(tar_data, password, encryption_algo,
format_version=version)
if is_compressed:
try:
tar_data = zlib.decompress(tar_data)
except zlib.error:
raise AndroidBackupParsingError("Impossible to decompress the backup file")
except zlib.error as exc:
raise AndroidBackupParsingError("Impossible to decompress the backup file") from exc
return tar_data
@@ -171,13 +185,15 @@ def parse_tar_for_sms(data):
Returns an array of SMS
"""
dbytes = io.BytesIO(data)
tar = tarfile.open(fileobj=dbytes)
res = []
for member in tar.getmembers():
if member.name.startswith("apps/com.android.providers.telephony/d_f/") and \
(member.name.endswith("_sms_backup") or member.name.endswith("_mms_backup")):
dhandler = tar.extractfile(member)
res.extend(parse_sms_file(dhandler.read()))
with tarfile.open(fileobj=dbytes) as tar:
for member in tar.getmembers():
if (member.name.startswith("apps/com.android.providers.telephony/d_f/")
and (member.name.endswith("_sms_backup")
or member.name.endswith("_mms_backup"))):
dhandler = tar.extractfile(member)
res.extend(parse_sms_file(dhandler.read()))
return res
@@ -192,18 +208,18 @@ def parse_sms_file(data):
json_data = json.loads(data)
for entry in json_data:
# Adapt MMS format to SMS format
# Adapt MMS format to SMS format.
if "mms_body" in entry:
entry["body"] = entry["mms_body"]
entry.pop("mms_body")
message_links = check_for_links(entry["body"])
utc_timestamp = datetime.datetime.utcfromtimestamp(int(entry["date"]) / 1000)
entry["isodate"] = convert_timestamp_to_iso(utc_timestamp)
entry["isodate"] = convert_unix_to_iso(int(entry["date"]) / 1000)
entry["direction"] = ("sent" if int(entry["date_sent"]) else "received")
# If we find links in the messages or if they are empty we add them to the list.
# If we find links in the messages or if they are empty we add them to
# the list.
if message_links or entry["body"].strip() == "":
entry["links"] = message_links
res.append(entry)

View File

@@ -6,7 +6,7 @@
import re
from datetime import datetime
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_datetime_to_iso
def parse_dumpsys_accessibility(output: str) -> list:
@@ -61,7 +61,8 @@ def parse_dumpsys_activity_resolver_table(output: str) -> dict:
break
# We detect the action name.
if line.startswith(" " * 6) and not line.startswith(" " * 8) and ":" in line:
if (line.startswith(" " * 6) and not line.startswith(" " * 8)
and ":" in line):
intent = line.strip().replace(":", "")
results[intent] = []
continue
@@ -117,7 +118,8 @@ def parse_dumpsys_battery_daily(output: str) -> list:
already_seen = False
for update in daily_updates:
if package_name == update["package_name"] and vers_nr == update["vers"]:
if (package_name == update["package_name"]
and vers_nr == update["vers"]):
already_seen = True
break
@@ -213,14 +215,14 @@ def parse_dumpsys_dbinfo(output: str) -> list:
matches = rxp_no_pid.findall(line)
if not matches:
continue
else:
match = matches[0]
results.append({
"isodate": match[0],
"action": match[1],
"sql": match[2],
"path": pool,
})
match = matches[0]
results.append({
"isodate": match[0],
"action": match[1],
"sql": match[2],
"path": pool,
})
else:
match = matches[0]
results.append({
@@ -261,7 +263,8 @@ def parse_dumpsys_receiver_resolver_table(output: str) -> dict:
break
# We detect the action name.
if line.startswith(" " * 6) and not line.startswith(" " * 8) and ":" in line:
if (line.startswith(" " * 6) and not line.startswith(" " * 8)
and ":" in line):
intent = line.strip().replace(":", "")
results[intent] = []
continue
@@ -354,7 +357,7 @@ def parse_dumpsys_appops(output: str) -> list:
entry["type"] = line[line.find("[")+1:line.find("]")]
try:
entry["timestamp"] = convert_timestamp_to_iso(
entry["timestamp"] = convert_datetime_to_iso(
datetime.strptime(
line[line.find("]")+1:line.find("(")].strip(),
"%Y-%m-%d %H:%M:%S.%f"))

View File

@@ -13,16 +13,15 @@ log = logging.getLogger(__name__)
class CmdCheckIOCS(Command):
name = "check-iocs"
modules = []
def __init__(self, target_path: str = None, results_path: str = None,
ioc_files: list = [], module_name: str = None, serial: str = None,
fast_mode: bool = False):
ioc_files: list = [], module_name: str = None,
serial: str = None, fast_mode: bool = False):
super().__init__(target_path=target_path, results_path=results_path,
ioc_files=ioc_files, module_name=module_name,
serial=serial, fast_mode=fast_mode, log=log)
self.name = "check-iocs"
def run(self) -> None:
all_modules = []
for entry in self.modules:
@@ -33,7 +32,7 @@ class CmdCheckIOCS(Command):
total_detections = 0
for file_name in os.listdir(self.target_path):
name_only, ext = os.path.splitext(file_name)
name_only, _ = os.path.splitext(file_name)
file_path = os.path.join(self.target_path, file_name)
for iocs_module in all_modules:

View File

@@ -13,17 +13,18 @@ from typing import Callable
from mvt.common.indicators import Indicators
from mvt.common.module import run_module, save_timeline
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_datetime_to_iso
from mvt.common.version import MVT_VERSION
class Command(object):
class Command:
def __init__(self, target_path: str = None, results_path: str = None,
ioc_files: list = [], module_name: str = None, serial: str = None,
fast_mode: bool = False,
ioc_files: list = [], module_name: str = None,
serial: str = None, fast_mode: bool = False,
log: logging.Logger = logging.getLogger(__name__)):
self.name = ""
self.modules = []
self.target_path = target_path
self.results_path = results_path
@@ -36,6 +37,10 @@ class Command(object):
self.iocs = Indicators(log=log)
self.iocs.load_indicators_files(ioc_files)
# This list will contain all executed modules.
# We can use this to reference e.g. self.executed[0].results.
self.executed = []
self.timeline = []
self.timeline_detected = []
@@ -43,20 +48,22 @@ class Command(object):
if self.results_path and not os.path.exists(self.results_path):
try:
os.makedirs(self.results_path)
except Exception as e:
except Exception as exc:
self.log.critical("Unable to create output folder %s: %s",
self.results_path, e)
self.results_path, exc)
sys.exit(1)
def _add_log_file_handler(self, logger: logging.Logger) -> None:
if not self.results_path:
return
fh = logging.FileHandler(os.path.join(self.results_path, "command.log"))
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
file_handler = logging.FileHandler(os.path.join(self.results_path,
"command.log"))
formatter = logging.Formatter("%(asctime)s - %(name)s - "
"%(levelname)s - %(message)s")
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
def _store_timeline(self) -> None:
if not self.results_path:
@@ -68,7 +75,8 @@ class Command(object):
if len(self.timeline_detected) > 0:
save_timeline(self.timeline_detected,
os.path.join(self.results_path, "timeline_detected.csv"))
os.path.join(self.results_path,
"timeline_detected.csv"))
def _store_info(self) -> None:
if not self.results_path:
@@ -81,52 +89,58 @@ class Command(object):
info = {
"target_path": target_path,
"mvt_version": MVT_VERSION,
"date": convert_timestamp_to_iso(datetime.now()),
"date": convert_datetime_to_iso(datetime.now()),
"ioc_files": [],
"hashes": [],
}
for coll in self.iocs.ioc_collections:
info["ioc_files"].append(coll.get("stix2_file_path", ""))
ioc_file_path = coll.get("stix2_file_path", "")
if ioc_file_path and ioc_file_path not in info["ioc_files"]:
info["ioc_files"].append(ioc_file_path)
# TODO: Revisit if setting this from environment variable is good
# enough.
if self.target_path and os.environ.get("MVT_HASH_FILES"):
if os.path.isfile(self.target_path):
h = hashlib.sha256()
sha256 = hashlib.sha256()
with open(self.target_path, "rb") as handle:
h.update(handle.read())
sha256.update(handle.read())
info["hashes"].append({
"file_path": self.target_path,
"sha256": h.hexdigest(),
"sha256": sha256.hexdigest(),
})
elif os.path.isdir(self.target_path):
for (root, dirs, files) in os.walk(self.target_path):
for (root, _, files) in os.walk(self.target_path):
for file in files:
file_path = os.path.join(root, file)
h = hashlib.sha256()
sha256 = hashlib.sha256()
try:
with open(file_path, "rb") as handle:
h.update(handle.read())
sha256.update(handle.read())
except FileNotFoundError:
self.log.error("Failed to hash the file %s: might be a symlink", file_path)
self.log.error("Failed to hash the file %s: might "
"be a symlink", file_path)
continue
except PermissionError:
self.log.error("Failed to hash the file %s: permission denied", file_path)
self.log.error("Failed to hash the file %s: "
"permission denied", file_path)
continue
info["hashes"].append({
"file_path": file_path,
"sha256": h.hexdigest(),
"sha256": sha256.hexdigest(),
})
with open(os.path.join(self.results_path, "info.json"), "w+") as handle:
info_path = os.path.join(self.results_path, "info.json")
with open(info_path, "w+", encoding="utf-8") as handle:
json.dump(info, handle, indent=4)
def list_modules(self) -> None:
self.log.info("Following is the list of available %s modules:", self.name)
self.log.info("Following is the list of available %s modules:",
self.name)
for module in self.modules:
self.log.info(" - %s", module.__name__)
@@ -174,6 +188,8 @@ class Command(object):
run_module(m)
self.executed.append(m)
self.timeline.extend(m.timeline)
self.timeline_detected.extend(m.timeline_detected)

View File

@@ -6,6 +6,7 @@
import json
import logging
import os
from typing import Union
from appdirs import user_data_dir
@@ -31,7 +32,8 @@ class Indicators:
for ioc_file_name in os.listdir(MVT_INDICATORS_FOLDER):
if ioc_file_name.lower().endswith(".stix2"):
self.parse_stix2(os.path.join(MVT_INDICATORS_FOLDER, ioc_file_name))
self.parse_stix2(os.path.join(MVT_INDICATORS_FOLDER,
ioc_file_name))
def _check_stix2_env_variable(self) -> None:
"""
@@ -45,11 +47,12 @@ class Indicators:
if os.path.isfile(path):
self.parse_stix2(path)
else:
self.log.error("Path specified with env MVT_STIX2 is not a valid file: %s",
path)
self.log.error("Path specified with env MVT_STIX2 is not "
"a valid file: %s", path)
def _new_collection(self, cid: str = "", name: str = "", description: str = "",
file_name: str = "", file_path: str = "") -> dict:
def _new_collection(self, cid: str = "", name: str = "",
description: str = "", file_name: str = "",
file_path: str = "") -> dict:
return {
"id": cid,
"name": name,
@@ -67,13 +70,52 @@ class Indicators:
"count": 0,
}
def _add_indicator(self, ioc: str, ioc_coll: dict, ioc_coll_list: list) -> None:
def _add_indicator(self, ioc: str, ioc_coll: dict,
ioc_coll_list: list) -> None:
ioc = ioc.strip("'")
if ioc not in ioc_coll_list:
ioc_coll_list.append(ioc)
ioc_coll["count"] += 1
self.total_ioc_count += 1
def _process_indicator(self, indicator: dict, collection: dict) -> None:
key, value = indicator.get("pattern", "").strip("[]").split("=")
if key == "domain-name:value":
# We force domain names to lower case.
self._add_indicator(ioc=value.lower(),
ioc_coll=collection,
ioc_coll_list=collection["domains"])
elif key == "process:name":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["processes"])
elif key == "email-addr:value":
# We force email addresses to lower case.
self._add_indicator(ioc=value.lower(),
ioc_coll=collection,
ioc_coll_list=collection["emails"])
elif key == "file:name":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["file_names"])
elif key == "file:path":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["file_paths"])
elif key == "file:hashes.sha256":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["files_sha256"])
elif key == "app:id":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["app_ids"])
elif key == "configuration-profile:id":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["ios_profile_ids"])
def parse_stix2(self, file_path: str) -> None:
"""Extract indicators from a STIX2 file.
@@ -88,7 +130,8 @@ class Indicators:
data = json.load(handle)
except json.decoder.JSONDecodeError:
self.log.critical("Unable to parse STIX2 indicator file. "
"The file is corrupted or in the wrong format!")
"The file is corrupted or in the wrong "
"format!")
return
malware = {}
@@ -132,47 +175,9 @@ class Indicators:
# Now we look for the correct collection matching the malware ID we
# got from the relationship.
for collection in collections:
if collection["id"] != malware_id:
continue
key, value = indicator.get("pattern", "").strip("[]").split("=")
if key == "domain-name:value":
# We force domain names to lower case.
self._add_indicator(ioc=value.lower(),
ioc_coll=collection,
ioc_coll_list=collection["domains"])
elif key == "process:name":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["processes"])
elif key == "email-addr:value":
# We force email addresses to lower case.
self._add_indicator(ioc=value.lower(),
ioc_coll=collection,
ioc_coll_list=collection["emails"])
elif key == "file:name":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["file_names"])
elif key == "file:path":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["file_paths"])
elif key == "file:hashes.sha256":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["files_sha256"])
elif key == "app:id":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["app_ids"])
elif key == "configuration-profile:id":
self._add_indicator(ioc=value,
ioc_coll=collection,
ioc_coll_list=collection["ios_profile_ids"])
break
if collection["id"] == malware_id:
self._process_indicator(indicator, collection)
break
for coll in collections:
self.log.info("Extracted %d indicators for collection with name \"%s\"",
@@ -180,7 +185,8 @@ class Indicators:
self.ioc_collections.extend(collections)
def load_indicators_files(self, files: list, load_default: bool = True) -> None:
def load_indicators_files(self, files: list,
load_default: bool = True) -> None:
"""
Load a list of indicators files.
"""
@@ -196,9 +202,10 @@ class Indicators:
self._load_downloaded_indicators()
self._check_stix2_env_variable()
self.log.info("Loaded a total of %d unique indicators", self.total_ioc_count)
self.log.info("Loaded a total of %d unique indicators",
self.total_ioc_count)
def get_iocs(self, ioc_type: str) -> dict:
def get_iocs(self, ioc_type: str) -> Union[dict, None]:
for ioc_collection in self.ioc_collections:
for ioc in ioc_collection.get(ioc_type, []):
yield {
@@ -208,7 +215,7 @@ class Indicators:
"stix2_file_name": ioc_collection["stix2_file_name"],
}
def check_domain(self, url: str) -> dict:
def check_domain(self, url: str) -> Union[dict, None]:
"""Check if a given URL matches any of the provided domain indicators.
:param url: URL to match against domain indicators
@@ -236,7 +243,8 @@ class Indicators:
# Now we check for any nested URL shorteners.
dest_url = URL(unshortened)
if dest_url.check_if_shortened():
# self.log.info("Original URL %s appears to shorten another shortened URL %s ... checking!",
# self.log.info("Original URL %s appears to shorten another "
# "shortened URL %s ... checking!",
# orig_url.url, dest_url.url)
return self.check_domain(dest_url.url)
@@ -249,22 +257,27 @@ class Indicators:
# match.
for ioc in self.get_iocs("domains"):
if ioc["value"].lower() in url:
self.log.warning("Maybe found a known suspicious domain %s matching indicators from \"%s\"",
self.log.warning("Maybe found a known suspicious domain %s "
"matching indicators from \"%s\"",
url, ioc["name"])
return ioc
# If nothing matched, we can quit here.
return None
# If all parsing worked, we start walking through available domain indicators.
# If all parsing worked, we start walking through available domain
# indicators.
for ioc in self.get_iocs("domains"):
# First we check the full domain.
if final_url.domain.lower() == ioc["value"]:
if orig_url.is_shortened and orig_url.url != final_url.url:
self.log.warning("Found a known suspicious domain %s shortened as %s matching indicators from \"%s\"",
final_url.url, orig_url.url, ioc["name"])
self.log.warning("Found a known suspicious domain %s "
"shortened as %s matching indicators "
"from \"%s\"", final_url.url, orig_url.url,
ioc["name"])
else:
self.log.warning("Found a known suspicious domain %s matching indicators from \"%s\"",
self.log.warning("Found a known suspicious domain %s "
"matching indicators from \"%s\"",
final_url.url, ioc["name"])
return ioc
@@ -272,15 +285,20 @@ class Indicators:
# Then we just check the top level domain.
if final_url.top_level.lower() == ioc["value"]:
if orig_url.is_shortened and orig_url.url != final_url.url:
self.log.warning("Found a sub-domain with suspicious top level %s shortened as %s matching indicators from \"%s\"",
final_url.url, orig_url.url, ioc["name"])
self.log.warning("Found a sub-domain with suspicious top "
"level %s shortened as %s matching "
"indicators from \"%s\"", final_url.url,
orig_url.url, ioc["name"])
else:
self.log.warning("Found a sub-domain with a suspicious top level %s matching indicators from \"%s\"",
self.log.warning("Found a sub-domain with a suspicious top "
"level %s matching indicators from \"%s\"",
final_url.url, ioc["name"])
return ioc
def check_domains(self, urls: list) -> dict:
return None
def check_domains(self, urls: list) -> Union[dict, None]:
"""Check a list of URLs against the provided list of domain indicators.
:param urls: List of URLs to check against domain indicators
@@ -296,7 +314,9 @@ class Indicators:
if check:
return check
def check_process(self, process: str) -> dict:
return None
def check_process(self, process: str) -> Union[dict, None]:
"""Check the provided process name against the list of process
indicators.
@@ -311,17 +331,21 @@ class Indicators:
proc_name = os.path.basename(process)
for ioc in self.get_iocs("processes"):
if proc_name == ioc["value"]:
self.log.warning("Found a known suspicious process name \"%s\" matching indicators from \"%s\"",
self.log.warning("Found a known suspicious process name \"%s\" "
"matching indicators from \"%s\"",
process, ioc["name"])
return ioc
if len(proc_name) == 16:
if ioc["value"].startswith(proc_name):
self.log.warning("Found a truncated known suspicious process name \"%s\" matching indicators from \"%s\"",
process, ioc["name"])
self.log.warning("Found a truncated known suspicious "
"process name \"%s\" matching indicators "
"from \"%s\"", process, ioc["name"])
return ioc
def check_processes(self, processes: list) -> dict:
return None
def check_processes(self, processes: list) -> Union[dict, None]:
"""Check the provided list of processes against the list of
process indicators.
@@ -338,7 +362,9 @@ class Indicators:
if check:
return check
def check_email(self, email: str) -> dict:
return None
def check_email(self, email: str) -> Union[dict, None]:
"""Check the provided email against the list of email indicators.
:param email: Email address to check against email indicators
@@ -351,11 +377,14 @@ class Indicators:
for ioc in self.get_iocs("emails"):
if email.lower() == ioc["value"].lower():
self.log.warning("Found a known suspicious email address \"%s\" matching indicators from \"%s\"",
self.log.warning("Found a known suspicious email address \"%s\""
" matching indicators from \"%s\"",
email, ioc["name"])
return ioc
def check_file_name(self, file_name: str) -> dict:
return None
def check_file_name(self, file_name: str) -> Union[dict, None]:
"""Check the provided file name against the list of file indicators.
:param file_name: File name to check against file
@@ -369,12 +398,16 @@ class Indicators:
for ioc in self.get_iocs("file_names"):
if ioc["value"] == file_name:
self.log.warning("Found a known suspicious file name \"%s\" matching indicators from \"%s\"",
self.log.warning("Found a known suspicious file name \"%s\" "
"matching indicators from \"%s\"",
file_name, ioc["name"])
return ioc
def check_file_path(self, file_path: str) -> dict:
"""Check the provided file path against the list of file indicators (both path and name).
return None
def check_file_path(self, file_path: str) -> Union[dict, None]:
"""Check the provided file path against the list of file indicators
(both path and name).
:param file_path: File path or file name to check against file
indicators
@@ -390,16 +423,22 @@ class Indicators:
return ioc
for ioc in self.get_iocs("file_paths"):
# Strip any trailing slash from indicator paths to match directories.
# Strip any trailing slash from indicator paths to match
# directories.
if file_path.startswith(ioc["value"].rstrip("/")):
self.log.warning("Found a known suspicious file path \"%s\" matching indicators form \"%s\"",
self.log.warning("Found a known suspicious file path \"%s\" "
"matching indicators form \"%s\"",
file_path, ioc["name"])
return ioc
def check_profile(self, profile_uuid: str) -> dict:
"""Check the provided configuration profile UUID against the list of indicators.
return None
:param profile_uuid: Profile UUID to check against configuration profile indicators
def check_profile(self, profile_uuid: str) -> Union[dict, None]:
"""Check the provided configuration profile UUID against the list of
indicators.
:param profile_uuid: Profile UUID to check against configuration profile
indicators
:type profile_uuid: str
:returns: Indicator details if matched, otherwise None
@@ -409,11 +448,14 @@ class Indicators:
for ioc in self.get_iocs("ios_profile_ids"):
if profile_uuid in ioc["value"]:
self.log.warning("Found a known suspicious profile ID \"%s\" matching indicators from \"%s\"",
self.log.warning("Found a known suspicious profile ID \"%s\" "
"matching indicators from \"%s\"",
profile_uuid, ioc["name"])
return ioc
def check_file_hash(self, file_hash: str) -> dict:
return None
def check_file_hash(self, file_hash: str) -> Union[dict, None]:
"""Check the provided SHA256 file hash against the list of indicators.
:param file_hash: SHA256 hash to check
@@ -426,11 +468,14 @@ class Indicators:
for ioc in self.get_iocs("files_sha256"):
if file_hash.lower() == ioc["value"].lower():
self.log.warning("Found a known suspicious file with hash \"%s\" matching indicators from \"%s\"",
self.log.warning("Found a known suspicious file with hash "
"\"%s\" matching indicators from \"%s\"",
file_hash, ioc["name"])
return ioc
def check_app_id(self, app_id: str) -> dict:
return None
def check_app_id(self, app_id: str) -> Union[dict, None]:
"""Check the provided app identifier (typically an Android package name)
against the list of indicators.
@@ -444,6 +489,9 @@ class Indicators:
for ioc in self.get_iocs("app_ids"):
if app_id.lower() == ioc["value"].lower():
self.log.warning("Found a known suspicious app with ID \"%s\" matching indicators from \"%s\"",
app_id, ioc["name"])
self.log.warning("Found a known suspicious app with ID \"%s\" "
"matching indicators from \"%s\"", app_id,
ioc["name"])
return ioc
return None

View File

@@ -18,7 +18,8 @@ def check_updates() -> None:
pass
else:
if latest_version:
print(f"\t\t[bold]Version {latest_version} is available! Upgrade mvt![/bold]")
print(f"\t\t[bold]Version {latest_version} is available! "
"Upgrade mvt![/bold]")
# Then we check for indicators files updates.
ioc_updates = IndicatorsUpdates()
@@ -26,7 +27,8 @@ def check_updates() -> None:
# Before proceeding, we check if we have downloaded an indicators index.
# If not, there's no point in proceeding with the updates check.
if ioc_updates.get_latest_update() == 0:
print("\t\t[bold]You have not yet downloaded any indicators, check the `download-iocs` command![/bold]")
print("\t\t[bold]You have not yet downloaded any indicators, check "
"the `download-iocs` command![/bold]")
return
# We only perform this check at a fixed frequency, in order to not
@@ -34,7 +36,8 @@ def check_updates() -> None:
# multiple times.
should_check, hours = ioc_updates.should_check()
if not should_check:
print(f"\t\tIndicators updates checked recently, next automatic check in {int(hours)} hours")
print(f"\t\tIndicators updates checked recently, next automatic check "
f"in {int(hours)} hours")
return
try:
@@ -43,7 +46,8 @@ def check_updates() -> None:
pass
else:
if ioc_to_update:
print("\t\t[bold]There are updates to your indicators files! Run the `download-iocs` command to update![/bold]")
print("\t\t[bold]There are updates to your indicators files! "
"Run the `download-iocs` command to update![/bold]")
else:
print("\t\tYour indicators files seem to be up to date.")

View File

@@ -7,7 +7,7 @@ import csv
import logging
import os
import re
from typing import Callable
from typing import Callable, Union
import simplejson as json
@@ -24,7 +24,7 @@ class InsufficientPrivileges(Exception):
pass
class MVTModule(object):
class MVTModule:
"""This class provides a base for all extraction modules."""
enabled = True
@@ -37,7 +37,8 @@ class MVTModule(object):
:param file_path: Path to the module's database file, if there is any
:type file_path: str
:param target_path: Path to the target folder (backup or filesystem dump)
:param target_path: Path to the target folder (backup or filesystem
dump)
:type file_path: str
:param results_path: Folder where results will be stored
:type results_path: str
@@ -92,21 +93,24 @@ class MVTModule(object):
if self.results:
results_file_name = f"{name}.json"
results_json_path = os.path.join(self.results_path, results_file_name)
results_json_path = os.path.join(self.results_path,
results_file_name)
with open(results_json_path, "w", encoding="utf-8") as handle:
try:
json.dump(self.results, handle, indent=4, default=str)
except Exception as e:
self.log.error("Unable to store results of module %s to file %s: %s",
self.__class__.__name__, results_file_name, e)
except Exception as exc:
self.log.error("Unable to store results of module %s "
"to file %s: %s", self.__class__.__name__,
results_file_name, exc)
if self.detected:
detected_file_name = f"{name}_detected.json"
detected_json_path = os.path.join(self.results_path, detected_file_name)
detected_json_path = os.path.join(self.results_path,
detected_file_name)
with open(detected_json_path, "w", encoding="utf-8") as handle:
json.dump(self.detected, handle, indent=4, default=str)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
raise NotImplementedError
@staticmethod
@@ -126,7 +130,7 @@ class MVTModule(object):
for result in self.results:
record = self.serialize(result)
if record:
if type(record) == list:
if isinstance(record, list):
self.timeline.extend(record)
else:
self.timeline.append(record)
@@ -134,7 +138,7 @@ class MVTModule(object):
for detected in self.detected:
record = self.serialize(detected)
if record:
if type(record) == list:
if isinstance(record, list):
self.timeline_detected.extend(record)
else:
self.timeline_detected.append(record)
@@ -154,26 +158,26 @@ def run_module(module: Callable) -> None:
try:
module.run()
except NotImplementedError:
module.log.exception("The run() procedure of module %s was not implemented yet!",
module.__class__.__name__)
except InsufficientPrivileges as e:
module.log.info("Insufficient privileges for module %s: %s", module.__class__.__name__, e)
except DatabaseNotFoundError as e:
module.log.exception("The run() procedure of module %s was not "
"implemented yet!", module.__class__.__name__)
except InsufficientPrivileges as exc:
module.log.info("Insufficient privileges for module %s: %s",
module.__class__.__name__, exc)
except DatabaseNotFoundError as exc:
module.log.info("There might be no data to extract by module %s: %s",
module.__class__.__name__, e)
except DatabaseCorruptedError as e:
module.__class__.__name__, exc)
except DatabaseCorruptedError as exc:
module.log.error("The %s module database seems to be corrupted: %s",
module.__class__.__name__, e)
except Exception as e:
module.__class__.__name__, exc)
except Exception as exc:
module.log.exception("Error in running extraction from module %s: %s",
module.__class__.__name__, e)
module.__class__.__name__, exc)
else:
try:
module.check_indicators()
except NotImplementedError:
module.log.info("The %s module does not support checking for indicators",
module.__class__.__name__)
pass
module.log.info("The %s module does not support checking for "
"indicators", module.__class__.__name__)
else:
if module.indicators and not module.detected:
module.log.info("The %s module produced no detections!",
@@ -198,7 +202,9 @@ def save_timeline(timeline: list, timeline_path: str) -> None:
csvoutput = csv.writer(handle, delimiter=",", quotechar="\"",
quoting=csv.QUOTE_ALL)
csvoutput.writerow(["UTC Timestamp", "Plugin", "Event", "Description"])
for event in sorted(timeline, key=lambda x: x["timestamp"] if x["timestamp"] is not None else ""):
for event in sorted(timeline, key=lambda x: x["timestamp"]
if x["timestamp"] is not None else ""):
csvoutput.writerow([
event.get("timestamp"),
event.get("module"),

View File

@@ -13,27 +13,21 @@ class MutuallyExclusiveOption(Option):
def __init__(self, *args, **kwargs):
self.mutually_exclusive = set(kwargs.pop("mutually_exclusive", []))
help = kwargs.get("help", "")
help_msg = kwargs.get("help", "")
if self.mutually_exclusive:
ex_str = ", ".join(self.mutually_exclusive)
kwargs["help"] = help + (
kwargs["help"] = help_msg + (
" NOTE: This argument is mutually exclusive with "
"arguments: [" + ex_str + "]."
)
super(MutuallyExclusiveOption, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
def handle_parse_result(self, ctx, opts, args):
if self.mutually_exclusive.intersection(opts) and self.name in opts:
raise UsageError(
"Illegal usage: `{}` is mutually exclusive with "
"arguments `{}`.".format(
self.name,
", ".join(self.mutually_exclusive)
)
f"Illegal usage: `{self.name}` is mutually exclusive with "
f"arguments `{', '.join(self.mutually_exclusive)}`."
)
return super(MutuallyExclusiveOption, self).handle_parse_result(
ctx,
opts,
args
)
return super().handle_parse_result(ctx, opts, args)

View File

@@ -55,7 +55,7 @@ class IndicatorsUpdates:
if not os.path.exists(self.latest_check_path):
return 0
with open(self.latest_check_path, "r") as handle:
with open(self.latest_check_path, "r", encoding="utf-8") as handle:
data = handle.read().strip()
if data:
return int(data)
@@ -64,14 +64,14 @@ class IndicatorsUpdates:
def set_latest_check(self) -> None:
timestamp = int(datetime.utcnow().timestamp())
with open(self.latest_check_path, "w") as handle:
with open(self.latest_check_path, "w", encoding="utf-8") as handle:
handle.write(str(timestamp))
def get_latest_update(self) -> int:
if not os.path.exists(self.latest_update_path):
return 0
with open(self.latest_update_path, "r") as handle:
with open(self.latest_update_path, "r", encoding="utf-8") as handle:
data = handle.read().strip()
if data:
return int(data)
@@ -80,7 +80,7 @@ class IndicatorsUpdates:
def set_latest_update(self) -> None:
timestamp = int(datetime.utcnow().timestamp())
with open(self.latest_update_path, "w") as handle:
with open(self.latest_update_path, "w", encoding="utf-8") as handle:
handle.write(str(timestamp))
def get_remote_index(self) -> dict:
@@ -88,8 +88,8 @@ class IndicatorsUpdates:
self.index_branch, self.index_path)
res = requests.get(url)
if res.status_code != 200:
log.error("Failed to retrieve indicators index located at %s (error %d)",
url, res.status_code)
log.error("Failed to retrieve indicators index located at %s "
"(error %d)", url, res.status_code)
return None
return yaml.safe_load(res.content)
@@ -131,8 +131,8 @@ class IndicatorsUpdates:
ioc_url = ioc.get("download_url", "")
if not ioc_url:
log.error("Could not find a way to download indicator file for %s",
ioc.get("name"))
log.error("Could not find a way to download indicator file "
"for %s", ioc.get("name"))
continue
ioc_local_path = self.download_remote_ioc(ioc_url)
@@ -145,25 +145,29 @@ class IndicatorsUpdates:
self.set_latest_update()
def _get_remote_file_latest_commit(self, owner: str, repo: str,
branch: str, path: str) -> bool:
file_commit_url = f"https://api.github.com/repos/{self.index_owner}/{self.index_repo}/commits?path={self.index_path}"
branch: str, path: str) -> int:
# TODO: The branch is currently not taken into consideration.
# How do we specify which branch to look up to the API?
file_commit_url = f"https://api.github.com/repos/{owner}/{repo}/commits?path={path}"
res = requests.get(file_commit_url)
if res.status_code != 200:
log.error("Failed to get details about file %s (error %d)",
file_commit_url, res.status_code)
return False
return -1
details = res.json()
if len(details) == 0:
return False
return -1
latest_commit = details[0]
latest_commit_date = latest_commit.get("commit", {}).get("author", {}).get("date", None)
if not latest_commit_date:
log.error("Failed to retrieve date of latest update to indicators index file")
return False
log.error("Failed to retrieve date of latest update to indicators "
"index file")
return -1
latest_commit_dt = datetime.strptime(latest_commit_date, '%Y-%m-%dT%H:%M:%SZ')
latest_commit_dt = datetime.strptime(latest_commit_date,
'%Y-%m-%dT%H:%M:%SZ')
latest_commit_ts = int(latest_commit_dt.timestamp())
return latest_commit_ts

View File

@@ -256,7 +256,7 @@ SHORTENER_DOMAINS = [
class URL:
def __init__(self, url: str) -> None:
if type(url) == bytes:
if isinstance(url, bytes):
url = url.decode()
self.url = url
@@ -315,3 +315,5 @@ class URL:
res = requests.head(self.url)
if str(res.status_code).startswith("30"):
return res.headers["Location"]
return ""

View File

@@ -6,16 +6,72 @@
import datetime
import hashlib
import re
from typing import Union
def convert_mactime_to_unix(timestamp, from_2001: bool = True):
"""Converts Mac Standard Time to a Unix timestamp.
def convert_chrometime_to_datetime(timestamp: int) -> int:
"""Converts Chrome timestamp to a datetime.
:param timestamp: Chrome timestamp as int.
:type timestamp: int
:returns: datetime.
"""
epoch_start = datetime.datetime(1601, 1, 1)
delta = datetime.timedelta(microseconds=timestamp)
return epoch_start + delta
def convert_datetime_to_iso(datetime: datetime.datetime) -> str:
"""Converts datetime to ISO string.
:param datetime: datetime.
:type datetime: datetime.datetime
:returns: ISO datetime string in YYYY-mm-dd HH:MM:SS.ms format.
:rtype: str
"""
try:
return datetime.strftime("%Y-%m-%d %H:%M:%S.%f")
except Exception:
return ""
def convert_unix_to_utc_datetime(timestamp: Union[int, float, str]) -> datetime.datetime:
"""Converts a unix epoch timestamp to UTC datetime.
:param timestamp: Epoc timestamp to convert.
:type timestamp: int
:returns: datetime.
"""
return datetime.datetime.utcfromtimestamp(float(timestamp))
def convert_unix_to_iso(timestamp: int) -> str:
"""Converts a unix epoch to ISO string.
:param timestamp: Epoc timestamp to convert.
:type timestamp: int
:returns: ISO datetime string in YYYY-mm-dd HH:MM:SS.ms format.
:rtype: str
"""
try:
return convert_datetime_to_iso(convert_unix_to_utc_datetime(timestamp))
except Exception:
return ""
def convert_mactime_to_datetime(timestamp: Union[int, float],
from_2001: bool = True):
"""Converts Mac Standard Time to a datetime.
:param timestamp: MacTime timestamp (either int or float).
:type timestamp: int
:param from_2001: bool: Whether to (Default value = True)
:param from_2001: Default value = True)
:returns: Unix epoch timestamp.
:returns: datetime.
"""
if not timestamp:
@@ -23,7 +79,7 @@ def convert_mactime_to_unix(timestamp, from_2001: bool = True):
# This is to fix formats in case of, for example, SMS messages database
# timestamp format.
if type(timestamp) == int and len(str(timestamp)) == 18:
if isinstance(timestamp, int) and len(str(timestamp)) == 18:
timestamp = int(str(timestamp)[:9])
# MacTime counts from 2001-01-01.
@@ -32,37 +88,25 @@ def convert_mactime_to_unix(timestamp, from_2001: bool = True):
# TODO: This is rather ugly. Happens sometimes with invalid timestamps.
try:
return datetime.datetime.utcfromtimestamp(timestamp)
return convert_unix_to_utc_datetime(timestamp)
except Exception:
return None
def convert_chrometime_to_unix(timestamp: int) -> int:
"""Converts Chrome timestamp to a Unix timestamp.
def convert_mactime_to_iso(timestamp: int, from_2001: bool = True):
"""Wraps two conversions from mactime to iso date.
:param timestamp: Chrome timestamp as int.
:type timestamp: int
:returns: Unix epoch timestamp.
"""
epoch_start = datetime.datetime(1601, 1, 1)
delta = datetime.timedelta(microseconds=timestamp)
return epoch_start + delta
def convert_timestamp_to_iso(timestamp: str) -> str:
"""Converts Unix timestamp to ISO string.
:param timestamp: Unix timestamp.
:param timestamp: MacTime timestamp (either int or float).
:type timestamp: int
:param from_2001: bool: Whether to (Default value = True)
:param from_2001: Default value = True)
:returns: ISO timestamp string in YYYY-mm-dd HH:MM:SS.ms format.
:rtype: str
"""
try:
return timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")
except Exception:
return None
return convert_datetime_to_iso(convert_mactime_to_datetime(timestamp,
from_2001))
def check_for_links(text: str) -> list:
@@ -106,8 +150,8 @@ def keys_bytes_to_string(obj) -> str:
if isinstance(obj, (tuple, list, set)):
value = [keys_bytes_to_string(x) for x in obj]
return value
else:
return obj
return obj
for key, value in obj.items():
if isinstance(key, bytes):

View File

@@ -3,4 +3,4 @@
# Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/
MVT_VERSION = "2.1.3"
MVT_VERSION = "2.1.4"

View File

@@ -23,23 +23,28 @@ class VTQuotaExceeded(Exception):
def virustotal_lookup(file_hash: str):
if MVT_VT_API_KEY not in os.environ:
raise VTNoKey("No VirusTotal API key provided: to use VirusTotal lookups please provide your API key with `export MVT_VT_API_KEY=<key>`")
raise VTNoKey("No VirusTotal API key provided: to use VirusTotal "
"lookups please provide your API key with "
"`export MVT_VT_API_KEY=<key>`")
headers = {
"User-Agent": "VirusTotal",
"Content-Type": "application/json",
"x-apikey": os.environ[MVT_VT_API_KEY],
}
res = requests.get(f"https://www.virustotal.com/api/v3/files/{file_hash}", headers=headers)
res = requests.get(f"https://www.virustotal.com/api/v3/files/{file_hash}",
headers=headers)
if res.status_code == 200:
report = res.json()
return report["data"]
elif res.status_code == 404:
if res.status_code == 404:
log.info("Could not find results for file with hash %s", file_hash)
elif res.status_code == 429:
raise VTQuotaExceeded("You have exceeded the quota for your VirusTotal API key")
raise VTQuotaExceeded("You have exceeded the quota for your "
"VirusTotal API key")
else:
raise Exception("Unexpected response from VirusTotal: %s", res.status_code)
raise Exception(f"Unexpected response from VirusTotal: {res.status_code}")
return None

View File

@@ -58,11 +58,13 @@ def version():
@click.option("--destination", "-d", required=True,
help="Path to the folder where to store the decrypted backup")
@click.option("--password", "-p", cls=MutuallyExclusiveOption,
help=f"Password to use to decrypt the backup (or, set {MVT_IOS_BACKUP_PASSWORD} environment variable)",
help="Password to use to decrypt the backup (or, set "
f"{MVT_IOS_BACKUP_PASSWORD} environment variable)",
mutually_exclusive=["key_file"])
@click.option("--key-file", "-k", cls=MutuallyExclusiveOption,
type=click.Path(exists=True),
help="File containing raw encryption key to use to decrypt the backup",
help="File containing raw encryption key to use to decrypt "
"the backup",
mutually_exclusive=["password"])
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
@click.pass_context
@@ -71,20 +73,22 @@ def decrypt_backup(ctx, destination, password, key_file, backup_path):
if key_file:
if MVT_IOS_BACKUP_PASSWORD in os.environ:
log.info("Ignoring environment variable, using --key-file '%s' instead",
MVT_IOS_BACKUP_PASSWORD, key_file)
log.info("Ignoring %s environment variable, using --key-file"
"'%s' instead", MVT_IOS_BACKUP_PASSWORD, key_file)
backup.decrypt_with_key_file(key_file)
elif password:
log.info("Your password may be visible in the process table because it was supplied on the command line!")
log.info("Your password may be visible in the process table because it "
"was supplied on the command line!")
if MVT_IOS_BACKUP_PASSWORD in os.environ:
log.info("Ignoring %s environment variable, using --password argument instead",
MVT_IOS_BACKUP_PASSWORD)
log.info("Ignoring %s environment variable, using --password"
"argument instead", MVT_IOS_BACKUP_PASSWORD)
backup.decrypt_with_password(password)
elif MVT_IOS_BACKUP_PASSWORD in os.environ:
log.info("Using password from %s environment variable", MVT_IOS_BACKUP_PASSWORD)
log.info("Using password from %s environment variable",
MVT_IOS_BACKUP_PASSWORD)
backup.decrypt_with_password(os.environ[MVT_IOS_BACKUP_PASSWORD])
else:
sekrit = Prompt.ask("Enter backup password", password=True)
@@ -101,23 +105,27 @@ def decrypt_backup(ctx, destination, password, key_file, backup_path):
#==============================================================================
@cli.command("extract-key", help="Extract decryption key from an iTunes backup")
@click.option("--password", "-p",
help=f"Password to use to decrypt the backup (or, set {MVT_IOS_BACKUP_PASSWORD} environment variable)")
help="Password to use to decrypt the backup (or, set "
f"{MVT_IOS_BACKUP_PASSWORD} environment variable)")
@click.option("--key-file", "-k",
help="Key file to be written (if unset, will print to STDOUT)",
required=False,
type=click.Path(exists=False, file_okay=True, dir_okay=False, writable=True))
type=click.Path(exists=False, file_okay=True, dir_okay=False,
writable=True))
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
def extract_key(password, key_file, backup_path):
backup = DecryptBackup(backup_path)
if password:
log.info("Your password may be visible in the process table because it was supplied on the command line!")
log.info("Your password may be visible in the process table because it "
"was supplied on the command line!")
if MVT_IOS_BACKUP_PASSWORD in os.environ:
log.info("Ignoring %s environment variable, using --password argument instead",
MVT_IOS_BACKUP_PASSWORD)
log.info("Ignoring %s environment variable, using --password "
"argument instead", MVT_IOS_BACKUP_PASSWORD)
elif MVT_IOS_BACKUP_PASSWORD in os.environ:
log.info("Using password from %s environment variable", MVT_IOS_BACKUP_PASSWORD)
log.info("Using password from %s environment variable",
MVT_IOS_BACKUP_PASSWORD)
password = os.environ[MVT_IOS_BACKUP_PASSWORD]
else:
password = Prompt.ask("Enter backup password", password=True)
@@ -135,7 +143,8 @@ def extract_key(password, key_file, backup_path):
@cli.command("check-backup", help="Extract artifacts from an iTunes backup")
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
default=[], help=HELP_MSG_IOC)
@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT)
@click.option("--output", "-o", type=click.Path(exists=False),
help=HELP_MSG_OUTPUT)
@click.option("--fast", "-f", is_flag=True, help=HELP_MSG_FAST)
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
@click.option("--module", "-m", help=HELP_MSG_MODULE)
@@ -164,7 +173,8 @@ def check_backup(ctx, iocs, output, fast, list_modules, module, backup_path):
@cli.command("check-fs", help="Extract artifacts from a full filesystem dump")
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
default=[], help=HELP_MSG_IOC)
@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT)
@click.option("--output", "-o", type=click.Path(exists=False),
help=HELP_MSG_OUTPUT)
@click.option("--fast", "-f", is_flag=True, help=HELP_MSG_FAST)
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
@click.option("--module", "-m", help=HELP_MSG_MODULE)

View File

@@ -15,15 +15,15 @@ log = logging.getLogger(__name__)
class CmdIOSCheckBackup(Command):
name = "check-backup"
modules = BACKUP_MODULES + MIXED_MODULES
def __init__(self, target_path: str = None, results_path: str = None,
ioc_files: list = [], module_name: str = None, serial: str = None,
fast_mode: bool = False):
ioc_files: list = [], module_name: str = None,
serial: str = None, fast_mode: bool = False):
super().__init__(target_path=target_path, results_path=results_path,
ioc_files=ioc_files, module_name=module_name,
serial=serial, fast_mode=fast_mode, log=log)
self.name = "check-backup"
self.modules = BACKUP_MODULES + MIXED_MODULES
def module_init(self, module):
module.is_backup = True

View File

@@ -15,15 +15,15 @@ log = logging.getLogger(__name__)
class CmdIOSCheckFS(Command):
name = "check-fs"
modules = FS_MODULES + MIXED_MODULES
def __init__(self, target_path: str = None, results_path: str = None,
ioc_files: list = [], module_name: str = None, serial: str = None,
fast_mode: bool = False):
ioc_files: list = [], module_name: str = None,
serial: str = None, fast_mode: bool = False):
super().__init__(target_path=target_path, results_path=results_path,
ioc_files=ioc_files, module_name=module_name,
serial=serial, fast_mode=fast_mode, log=log)
self.name = "check-fs"
self.modules = FS_MODULES + MIXED_MODULES
def module_init(self, module):
module.is_fs_dump = True

View File

@@ -59,7 +59,8 @@ class DecryptBackup:
self._backup.getFileDecryptedCopy(manifestEntry=item,
targetName=file_id,
targetFolder=item_folder)
log.info("Decrypted file %s [%s] to %s/%s", relative_path, domain, item_folder, file_id)
log.info("Decrypted file %s [%s] to %s/%s", relative_path, domain,
item_folder, file_id)
def process_backup(self) -> None:
if not os.path.exists(self.dest_path):
@@ -79,8 +80,10 @@ class DecryptBackup:
relative_path = item["relativePath"]
domain = item["domain"]
# This may be a partial backup. Skip files from the manifest which do not exist locally.
source_file_path = os.path.join(self.backup_path, file_id[0:2], file_id)
# This may be a partial backup. Skip files from the manifest
# which do not exist locally.
source_file_path = os.path.join(self.backup_path, file_id[0:2],
file_id)
if not os.path.exists(source_file_path):
log.debug("Skipping file %s. File not found in encrypted backup directory.",
source_file_path)
@@ -99,8 +102,8 @@ class DecryptBackup:
domain, item,
file_id,
item_folder))
except Exception as e:
log.error("Failed to decrypt file %s: %s", relative_path, e)
except Exception as exc:
log.error("Failed to decrypt file %s: %s", relative_path, exc)
pool.close()
pool.join()
@@ -128,7 +131,8 @@ class DecryptBackup:
self.backup_path, newpath)
self.backup_path = newpath
elif len(possible) > 1:
log.critical("No Manifest.plist in %s, and %d Manifest.plist files in subdirs. Please choose one!",
log.critical("No Manifest.plist in %s, and %d Manifest.plist "
"files in subdirs. Please choose one!",
self.backup_path, len(possible))
return
@@ -140,15 +144,19 @@ class DecryptBackup:
self._backup = iOSbackup(udid=os.path.basename(self.backup_path),
cleartextpassword=password,
backuproot=os.path.dirname(self.backup_path))
except Exception as e:
if isinstance(e, KeyError) and len(e.args) > 0 and e.args[0] == b"KEY":
except Exception as exc:
if isinstance(exc, KeyError) and len(exc.args) > 0 and exc.args[0] == b"KEY":
log.critical("Failed to decrypt backup. Password is probably wrong.")
elif isinstance(e, FileNotFoundError) and os.path.basename(e.filename) == "Manifest.plist":
log.critical("Failed to find a valid backup at %s. Did you point to the right backup path?",
elif (isinstance(exc, FileNotFoundError)
and os.path.basename(exc.filename) == "Manifest.plist"):
log.critical("Failed to find a valid backup at %s. "
"Did you point to the right backup path?",
self.backup_path)
else:
log.exception(e)
log.critical("Failed to decrypt backup. Did you provide the correct password? Did you point to the right backup path?")
log.exception(exc)
log.critical("Failed to decrypt backup. Did you provide the "
"correct password? Did you point to the right "
"backup path?")
def decrypt_with_key_file(self, key_file: str) -> None:
"""Decrypts an encrypted iOS backup using a key file.
@@ -168,7 +176,8 @@ class DecryptBackup:
# Key should be 64 hex encoded characters (32 raw bytes)
if len(key_bytes) != 64:
log.critical("Invalid key from key file. Did you provide the correct key file?")
log.critical("Invalid key from key file. Did you provide the "
"correct key file?")
return
try:
@@ -176,9 +185,10 @@ class DecryptBackup:
self._backup = iOSbackup(udid=os.path.basename(self.backup_path),
derivedkey=key_bytes_raw,
backuproot=os.path.dirname(self.backup_path))
except Exception as e:
log.exception(e)
log.critical("Failed to decrypt backup. Did you provide the correct key file?")
except Exception as exc:
log.exception(exc)
log.critical("Failed to decrypt backup. Did you provide the "
"correct key file?")
def get_key(self) -> None:
"""Retrieve and prints the encryption key."""
@@ -192,7 +202,8 @@ class DecryptBackup:
def write_key(self, key_path: str) -> None:
"""Save extracted key to file.
:param key_path: Path to the file where to write the derived decryption key.
:param key_path: Path to the file where to write the derived decryption
key.
"""
if not self._decryption_key:
@@ -201,10 +212,11 @@ class DecryptBackup:
try:
with open(key_path, 'w', encoding="utf-8") as handle:
handle.write(self._decryption_key)
except Exception as e:
log.exception(e)
except Exception as exc:
log.exception(exc)
log.critical("Failed to write key to file: %s", key_path)
return
else:
log.info("Wrote decryption key to file: %s. This file is equivalent to a plaintext password. Keep it safe!",
log.info("Wrote decryption key to file: %s. This file is "
"equivalent to a plaintext password. Keep it safe!",
key_path)

View File

@@ -8,7 +8,7 @@ import os
import plistlib
from mvt.common.module import DatabaseNotFoundError
from mvt.ios.versions import latest_ios_version
from mvt.ios.versions import get_device_desc_from_id, latest_ios_version
from ..base import IOSExtraction
@@ -18,7 +18,8 @@ class BackupInfo(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -28,7 +29,8 @@ class BackupInfo(IOSExtraction):
def run(self) -> None:
info_path = os.path.join(self.target_path, "Info.plist")
if not os.path.exists(info_path):
raise DatabaseNotFoundError("No Info.plist at backup path, unable to extract device information")
raise DatabaseNotFoundError("No Info.plist at backup path, unable "
"to extract device information")
with open(info_path, "rb") as handle:
info = plistlib.load(handle)
@@ -42,11 +44,20 @@ class BackupInfo(IOSExtraction):
for field in fields:
value = info.get(field, None)
self.log.info("%s: %s", field, value)
# Converting the product type in product name
if field == "Product Type" and value:
product_name = get_device_desc_from_id(value)
if product_name:
self.log.info("%s: %s (%s)", field, value, product_name)
else:
self.log.info("%s: %s", field, value)
else:
self.log.info("%s: %s", field, value)
self.results[field] = value
if "Product Version" in info:
latest = latest_ios_version()
if info["Product Version"] != latest["version"]:
self.log.warning("This phone is running an outdated iOS version: %s (latest is %s)",
self.log.warning("This phone is running an outdated iOS "
"version: %s (latest is %s)",
info["Product Version"], latest['version'])

View File

@@ -7,8 +7,9 @@ import logging
import os
import plistlib
from base64 import b64encode
from typing import Union
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_datetime_to_iso
from ..base import IOSExtraction
@@ -20,14 +21,15 @@ class ConfigurationProfiles(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
if not record["install_date"]:
return
return {}
payload_name = record['plist'].get('PayloadDisplayName')
payload_description = record['plist'].get('PayloadDescription')
@@ -35,7 +37,9 @@ class ConfigurationProfiles(IOSExtraction):
"timestamp": record["install_date"],
"module": self.__class__.__name__,
"event": "configuration_profile_install",
"data": f"{record['plist']['PayloadType']} installed: {record['plist']['PayloadUUID']} - {payload_name}: {payload_description}"
"data": f"{record['plist']['PayloadType']} installed: "
f"{record['plist']['PayloadUUID']} - "
f"{payload_name}: {payload_description}"
}
def check_indicators(self) -> None:
@@ -46,24 +50,32 @@ class ConfigurationProfiles(IOSExtraction):
if result["plist"].get("PayloadUUID"):
payload_content = result["plist"]["PayloadContent"][0]
# Alert on any known malicious configuration profiles in the indicator list.
# Alert on any known malicious configuration profiles in the
# indicator list.
ioc = self.indicators.check_profile(result["plist"]["PayloadUUID"])
if ioc:
self.log.warning(f"Found a known malicious configuration profile \"{result['plist']['PayloadDisplayName']}\" with UUID '{result['plist']['PayloadUUID']}'.")
self.log.warning(f"Found a known malicious configuration profile "
f"\"{result['plist']['PayloadDisplayName']}\" "
f"with UUID '{result['plist']['PayloadUUID']}'.")
result["matched_indicator"] = ioc
self.detected.append(result)
continue
# Highlight suspicious configuration profiles which may be used to hide notifications.
# Highlight suspicious configuration profiles which may be used
# to hide notifications.
if payload_content["PayloadType"] in ["com.apple.notificationsettings"]:
self.log.warning(f"Found a potentially suspicious configuration profile \"{result['plist']['PayloadDisplayName']}\" with payload type '{payload_content['PayloadType']}'.")
self.log.warning(f"Found a potentially suspicious configuration profile "
f"\"{result['plist']['PayloadDisplayName']}\" with "
f"payload type '{payload_content['PayloadType']}'.")
self.detected.append(result)
continue
def run(self) -> None:
for conf_file in self._get_backup_files_from_manifest(domain=CONF_PROFILES_DOMAIN):
conf_rel_path = conf_file["relative_path"]
# Filter out all configuration files that are not configuration profiles.
# Filter out all configuration files that are not configuration
# profiles.
if not conf_rel_path or not os.path.basename(conf_rel_path).startswith("profile-"):
continue
@@ -76,31 +88,38 @@ class ConfigurationProfiles(IOSExtraction):
conf_plist = plistlib.load(handle)
except Exception:
conf_plist = {}
if "SignerCerts" in conf_plist:
conf_plist["SignerCerts"] = [b64encode(x) for x in conf_plist["SignerCerts"]]
if "OTAProfileStub" in conf_plist:
if "SignerCerts" in conf_plist["OTAProfileStub"]:
conf_plist["OTAProfileStub"]["SignerCerts"] = [b64encode(x) for x in conf_plist["OTAProfileStub"]["SignerCerts"]]
if "PayloadContent" in conf_plist["OTAProfileStub"]:
if "EnrollmentIdentityPersistentID" in conf_plist["OTAProfileStub"]["PayloadContent"]:
conf_plist["OTAProfileStub"]["PayloadContent"]["EnrollmentIdentityPersistentID"] = b64encode(conf_plist["OTAProfileStub"]["PayloadContent"]["EnrollmentIdentityPersistentID"])
if "PushTokenDataSentToServerKey" in conf_plist:
conf_plist["PushTokenDataSentToServerKey"] = b64encode(conf_plist["PushTokenDataSentToServerKey"])
if "LastPushTokenHash" in conf_plist:
conf_plist["LastPushTokenHash"] = b64encode(conf_plist["LastPushTokenHash"])
if "PayloadContent" in conf_plist:
for x in range(len(conf_plist["PayloadContent"])):
if "PERSISTENT_REF" in conf_plist["PayloadContent"][x]:
conf_plist["PayloadContent"][x]["PERSISTENT_REF"] = b64encode(conf_plist["PayloadContent"][x]["PERSISTENT_REF"])
if "IdentityPersistentRef" in conf_plist["PayloadContent"][x]:
conf_plist["PayloadContent"][x]["IdentityPersistentRef"] = b64encode(conf_plist["PayloadContent"][x]["IdentityPersistentRef"])
for content_entry in range(len(conf_plist["PayloadContent"])):
if "PERSISTENT_REF" in conf_plist["PayloadContent"][content_entry]:
conf_plist["PayloadContent"][content_entry]["PERSISTENT_REF"] = b64encode(conf_plist["PayloadContent"][content_entry]["PERSISTENT_REF"])
if "IdentityPersistentRef" in conf_plist["PayloadContent"][content_entry]:
conf_plist["PayloadContent"][content_entry]["IdentityPersistentRef"] = b64encode(conf_plist["PayloadContent"][content_entry]["IdentityPersistentRef"])
self.results.append({
"file_id": conf_file["file_id"],
"relative_path": conf_file["relative_path"],
"domain": conf_file["domain"],
"plist": conf_plist,
"install_date": convert_timestamp_to_iso(conf_plist.get("InstallDate")),
"install_date": convert_datetime_to_iso(conf_plist.get("InstallDate")),
})
self.log.info("Extracted details about %d configuration profiles", len(self.results))

View File

@@ -11,7 +11,7 @@ import plistlib
import sqlite3
from mvt.common.module import DatabaseNotFoundError
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_datetime_to_iso, convert_unix_to_iso
from ..base import IOSExtraction
@@ -21,7 +21,8 @@ class Manifest(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -34,7 +35,8 @@ class Manifest(IOSExtraction):
:param key:
"""
return dictionary.get(key.encode("utf-8"), None) or dictionary.get(key, None)
return (dictionary.get(key.encode("utf-8"), None)
or dictionary.get(key, None))
@staticmethod
def _convert_timestamp(timestamp_or_unix_time_int):
@@ -44,24 +46,25 @@ class Manifest(IOSExtraction):
"""
if isinstance(timestamp_or_unix_time_int, datetime.datetime):
return convert_timestamp_to_iso(timestamp_or_unix_time_int)
else:
timestamp = datetime.datetime.utcfromtimestamp(timestamp_or_unix_time_int)
return convert_timestamp_to_iso(timestamp)
return convert_datetime_to_iso(timestamp_or_unix_time_int)
def serialize(self, record: dict) -> None:
return convert_unix_to_iso(timestamp_or_unix_time_int)
def serialize(self, record: dict) -> []:
records = []
if "modified" not in record or "status_changed" not in record:
return
for ts in set([record["created"], record["modified"], record["status_changed"]]):
return records
for timestamp in set([record["created"], record["modified"],
record["status_changed"]]):
macb = ""
macb += "M" if ts == record["modified"] else "-"
macb += "M" if timestamp == record["modified"] else "-"
macb += "-"
macb += "C" if ts == record["status_changed"] else "-"
macb += "B" if ts == record["created"] else "-"
macb += "C" if timestamp == record["status_changed"] else "-"
macb += "B" if timestamp == record["created"] else "-"
records.append({
"timestamp": ts,
"timestamp": timestamp,
"module": self.__class__.__name__,
"event": macb,
"data": f"{record['relative_path']} - {record['domain']}"
@@ -78,8 +81,11 @@ class Manifest(IOSExtraction):
continue
if result["domain"]:
if os.path.basename(result["relative_path"]) == "com.apple.CrashReporter.plist" and result["domain"] == "RootDomain":
self.log.warning("Found a potentially suspicious \"com.apple.CrashReporter.plist\" file created in RootDomain")
if (os.path.basename(result["relative_path"]) == "com.apple.CrashReporter.plist"
and result["domain"] == "RootDomain"):
self.log.warning("Found a potentially suspicious "
"\"com.apple.CrashReporter.plist\" "
"file created in RootDomain")
self.detected.append(result)
continue
@@ -90,7 +96,8 @@ class Manifest(IOSExtraction):
rel_path = result["relative_path"].lower()
for ioc in self.indicators.get_iocs("domains"):
if ioc["value"].lower() in rel_path:
self.log.warning("Found mention of domain \"%s\" in a backup file with path: %s",
self.log.warning("Found mention of domain \"%s\" in a "
"backup file with path: %s",
ioc["value"], rel_path)
self.detected.append(result)
@@ -99,7 +106,8 @@ class Manifest(IOSExtraction):
if not os.path.isfile(manifest_db_path):
raise DatabaseNotFoundError("unable to find backup's Manifest.db")
self.log.info("Found Manifest.db database at path: %s", manifest_db_path)
self.log.info("Found Manifest.db database at path: %s",
manifest_db_path)
conn = sqlite3.connect(manifest_db_path)
cur = conn.cursor()
@@ -126,20 +134,24 @@ class Manifest(IOSExtraction):
file_metadata = self._get_key(file_plist, "$objects")[1]
cleaned_metadata.update({
"created": self._convert_timestamp(self._get_key(file_metadata, "Birth")),
"modified": self._convert_timestamp(self._get_key(file_metadata, "LastModified")),
"status_changed": self._convert_timestamp(self._get_key(file_metadata, "LastStatusChange")),
"modified": self._convert_timestamp(self._get_key(file_metadata,
"LastModified")),
"status_changed": self._convert_timestamp(self._get_key(file_metadata,
"LastStatusChange")),
"mode": oct(self._get_key(file_metadata, "Mode")),
"owner": self._get_key(file_metadata, "UserID"),
"size": self._get_key(file_metadata, "Size"),
})
except Exception:
self.log.exception("Error reading manifest file metadata for file with ID %s and relative path %s",
file_data["fileID"], file_data["relativePath"])
pass
self.log.exception("Error reading manifest file metadata "
"for file with ID %s and relative path %s",
file_data["fileID"],
file_data["relativePath"])
self.results.append(cleaned_metadata)
cur.close()
conn.close()
self.log.info("Extracted a total of %d file metadata items", len(self.results))
self.log.info("Extracted a total of %d file metadata items",
len(self.results))

View File

@@ -5,8 +5,9 @@
import logging
import plistlib
from typing import Union
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_datetime_to_iso
from ..base import IOSExtraction
@@ -21,18 +22,20 @@ class ProfileEvents(IOSExtraction):
"""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record.get("timestamp"),
"module": self.__class__.__name__,
"event": "profile_operation",
"data": f"Process {record.get('process')} started operation "
f"{record.get('operation')} of profile {record.get('profile_id')}"
f"{record.get('operation')} of profile "
f"{record.get('profile_id')}"
}
def check_indicators(self) -> None:
@@ -73,7 +76,7 @@ class ProfileEvents(IOSExtraction):
for key, value in event[key].items():
key = key.lower()
if key == "timestamp":
result["timestamp"] = str(convert_timestamp_to_iso(value))
result["timestamp"] = str(convert_datetime_to_iso(value))
else:
result[key] = value
@@ -87,13 +90,15 @@ class ProfileEvents(IOSExtraction):
if not events_file_path:
continue
self.log.info("Found MCProfileEvents.plist file at %s", events_file_path)
self.log.info("Found MCProfileEvents.plist file at %s",
events_file_path)
with open(events_file_path, "rb") as handle:
self.results.extend(self.parse_profile_events(handle.read()))
for result in self.results:
self.log.info("On %s process \"%s\" started operation \"%s\" of profile \"%s\"",
self.log.info("On %s process \"%s\" started operation \"%s\" "
"of profile \"%s\"",
result.get("timestamp"), result.get("process"),
result.get("operation"), result.get("profile_id"))

View File

@@ -15,11 +15,13 @@ from mvt.common.module import (DatabaseCorruptedError, DatabaseNotFoundError,
class IOSExtraction(MVTModule):
"""This class provides a base for all iOS filesystem/backup extraction modules."""
"""This class provides a base for all iOS filesystem/backup extraction
modules."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -42,8 +44,8 @@ class IOSExtraction(MVTModule):
try:
recover = False
cur.execute("SELECT name FROM sqlite_master WHERE type='table';")
except sqlite3.DatabaseError as e:
if "database disk image is malformed" in str(e):
except sqlite3.DatabaseError as exc:
if "database disk image is malformed" in str(exc):
recover = True
finally:
conn.close()
@@ -51,12 +53,17 @@ class IOSExtraction(MVTModule):
if not recover:
return
self.log.info("Database at path %s is malformed. Trying to recover...", file_path)
self.log.info("Database at path %s is malformed. Trying to recover...",
file_path)
if not shutil.which("sqlite3"):
raise DatabaseCorruptedError("failed to recover without sqlite3 binary: please install sqlite3!")
raise DatabaseCorruptedError("failed to recover without sqlite3 "
"binary: please install sqlite3!")
if '"' in file_path:
raise DatabaseCorruptedError(f"database at path '{file_path}' is corrupted. unable to recover because it has a quotation mark (\") in its name")
raise DatabaseCorruptedError(f"database at path '{file_path}' is "
"corrupted. unable to recover because "
"it has a quotation mark (\") in its "
"name")
bak_path = f"{file_path}.bak"
shutil.move(file_path, bak_path)
@@ -71,8 +78,10 @@ class IOSExtraction(MVTModule):
def _get_backup_files_from_manifest(self, relative_path=None, domain=None):
"""Locate files from Manifest.db.
:param relative_path: Relative path to use as filter from Manifest.db. (Default value = None)
:param domain: Domain to use as filter from Manifest.db. (Default value = None)
:param relative_path: Relative path to use as filter from Manifest.db.
(Default value = None)
:param domain: Domain to use as filter from Manifest.db.
(Default value = None)
"""
manifest_db_path = os.path.join(self.target_path, "Manifest.db")
@@ -89,11 +98,12 @@ class IOSExtraction(MVTModule):
(relative_path, domain))
else:
if relative_path:
cur.execute(f"{base_sql} relativePath = ?;", (relative_path,))
cur.execute(f"{base_sql} relativePath = ?;",
(relative_path,))
elif domain:
cur.execute(f"{base_sql} domain = ?;", (domain,))
except Exception as e:
raise DatabaseCorruptedError("failed to query Manifest.db: %s", e)
except Exception as exc:
raise DatabaseCorruptedError(f"failed to query Manifest.db: {exc}") from exc
for row in cur:
yield {
@@ -111,7 +121,8 @@ class IOSExtraction(MVTModule):
def _get_fs_files_from_patterns(self, root_paths):
for root_path in root_paths:
for found_path in glob.glob(os.path.join(self.target_path, root_path)):
for found_path in glob.glob(os.path.join(self.target_path,
root_path)):
if not os.path.exists(found_path):
continue
@@ -125,7 +136,8 @@ class IOSExtraction(MVTModule):
you should use the helper functions above.
:param backup_id: iTunes backup database file's ID (or hash).
:param root_paths: Glob patterns for files to seek in filesystem dump. (Default value = [])
:param root_paths: Glob patterns for files to seek in filesystem dump.
(Default value = [])
:param backup_ids: Default value = None)
"""
@@ -154,6 +166,7 @@ class IOSExtraction(MVTModule):
if file_path:
self.file_path = file_path
else:
raise DatabaseNotFoundError("unable to find the module's database file")
raise DatabaseNotFoundError("unable to find the module's "
"database file")
self._recover_sqlite_db_if_needed(self.file_path)

View File

@@ -6,8 +6,9 @@
import logging
import plistlib
import sqlite3
from typing import Union
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from mvt.common.utils import convert_mactime_to_iso
from ..base import IOSExtraction
@@ -17,16 +18,18 @@ ANALYTICS_DB_PATH = [
class Analytics(IOSExtraction):
"""This module extracts information from the private/var/Keychains/Analytics/*.db files."""
"""This module extracts information from the
private/var/Keychains/Analytics/*.db files."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
@@ -45,16 +48,20 @@ class Analytics(IOSExtraction):
ioc = self.indicators.check_process(value)
if ioc:
self.log.warning("Found mention of a malicious process \"%s\" in %s file at %s",
value, result["artifact"], result["timestamp"])
self.log.warning("Found mention of a malicious process "
"\"%s\" in %s file at %s",
value, result["artifact"],
result["timestamp"])
result["matched_indicator"] = ioc
self.detected.append(result)
continue
ioc = self.indicators.check_domain(value)
if ioc:
self.log.warning("Found mention of a malicious domain \"%s\" in %s file at %s",
value, result["artifact"], result["timestamp"])
self.log.warning("Found mention of a malicious domain "
"\"%s\" in %s file at %s",
value, result["artifact"],
result["timestamp"])
result["matched_indicator"] = ioc
self.detected.append(result)
@@ -96,11 +103,11 @@ class Analytics(IOSExtraction):
for row in cur:
if row[0] and row[1]:
isodate = convert_timestamp_to_iso(convert_mactime_to_unix(row[0], False))
isodate = convert_mactime_to_iso(row[0], False)
data = plistlib.loads(row[1])
data["isodate"] = isodate
elif row[0]:
isodate = convert_timestamp_to_iso(convert_mactime_to_unix(row[0], False))
isodate = convert_mactime_to_iso(row[0], False)
data = {}
data["isodate"] = isodate
elif row[1]:
@@ -118,7 +125,8 @@ class Analytics(IOSExtraction):
def process_analytics_dbs(self):
for file_path in self._get_fs_files_from_patterns(ANALYTICS_DB_PATH):
self.file_path = file_path
self.log.info("Found Analytics database file at path: %s", file_path)
self.log.info("Found Analytics database file at path: %s",
file_path)
self._extract_analytics_data()
def run(self) -> None:

View File

@@ -5,6 +5,7 @@
import logging
from datetime import datetime
from typing import Union
from mvt.ios.versions import find_version_by_build
@@ -19,12 +20,13 @@ class AnalyticsIOSVersions(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
@@ -44,25 +46,25 @@ class AnalyticsIOSVersions(IOSExtraction):
if not build:
continue
ts = result.get("isodate", None)
if not ts:
isodate = result.get("isodate", None)
if not isodate:
continue
if build not in builds.keys():
builds[build] = ts
builds[build] = isodate
continue
result_dt = datetime.strptime(ts, dt_format)
result_dt = datetime.strptime(isodate, dt_format)
cur_dt = datetime.strptime(builds[build], dt_format)
if result_dt < cur_dt:
builds[build] = ts
builds[build] = isodate
for build, ts in builds.items():
for build, isodate in builds.items():
version = find_version_by_build(build)
self.results.append({
"isodate": ts,
"isodate": isodate,
"build": build,
"version": version,
})

View File

@@ -6,6 +6,7 @@
import logging
import os
import sqlite3
from typing import Union
from ..base import IOSExtraction
@@ -14,12 +15,13 @@ class CacheFiles(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
records = []
for item in self.results[record]:
records.append({
@@ -73,7 +75,7 @@ class CacheFiles(IOSExtraction):
def run(self) -> None:
self.results = {}
for root, dirs, files in os.walk(self.target_path):
for root, _, files in os.walk(self.target_path):
for file_name in files:
if file_name != "Cache.db":
continue

View File

@@ -3,11 +3,11 @@
# Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/
import datetime
import logging
import os
from typing import Union
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_unix_to_iso
from ..base import IOSExtraction
@@ -21,12 +21,13 @@ class Filesystem(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["modified"],
"module": self.__class__.__name__,
@@ -54,7 +55,9 @@ class Filesystem(IOSExtraction):
for ioc in self.indicators.get_iocs("processes"):
parts = result["path"].split("/")
if ioc["value"] in parts:
self.log.warning("Found known suspicious process name mentioned in file at path \"%s\" matching indicators from \"%s\"",
self.log.warning("Found known suspicious process name "
"mentioned in file at path \"%s\" "
"matching indicators from \"%s\"",
result["path"], ioc["name"])
result["matched_indicator"] = ioc
self.detected.append(result)
@@ -66,7 +69,7 @@ class Filesystem(IOSExtraction):
dir_path = os.path.join(root, dir_name)
result = {
"path": os.path.relpath(dir_path, self.target_path),
"modified": convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(os.stat(dir_path).st_mtime)),
"modified": convert_unix_to_iso(os.stat(dir_path).st_mtime),
}
except Exception:
continue
@@ -78,7 +81,7 @@ class Filesystem(IOSExtraction):
file_path = os.path.join(root, file_name)
result = {
"path": os.path.relpath(file_path, self.target_path),
"modified": convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(os.stat(file_path).st_mtime)),
"modified": convert_unix_to_iso(os.stat(file_path).st_mtime),
}
except Exception:
continue

View File

@@ -23,7 +23,8 @@ class Netusage(NetBase):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -34,8 +35,9 @@ class Netusage(NetBase):
self.log.info("Found NetUsage database at path: %s", self.file_path)
try:
self._extract_net_data()
except sqlite3.OperationalError as e:
self.log.info("Skipping this NetUsage database because it seems empty or malformed: %s", e)
except sqlite3.OperationalError as exc:
self.log.info("Skipping this NetUsage database because "
"it seems empty or malformed: %s", exc)
continue
self._find_suspicious_processes()

View File

@@ -5,8 +5,9 @@
import logging
import sqlite3
from typing import Union
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from mvt.common.utils import convert_mactime_to_iso
from ..base import IOSExtraction
@@ -21,17 +22,19 @@ class SafariFavicon(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "safari_favicon",
"data": f"Safari favicon from {record['url']} with icon URL {record['icon_url']} ({record['type']})",
"data": f"Safari favicon from {record['url']} with icon URL "
f"{record['icon_url']} ({record['type']})",
}
def check_indicators(self) -> None:
@@ -67,7 +70,7 @@ class SafariFavicon(IOSExtraction):
"url": row[0],
"icon_url": row[1],
"timestamp": row[2],
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[2])),
"isodate": convert_mactime_to_iso(row[2]),
"type": "valid",
"safari_favicon_db_path": file_path,
})
@@ -86,7 +89,7 @@ class SafariFavicon(IOSExtraction):
"url": row[0],
"icon_url": row[1],
"timestamp": row[2],
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[2])),
"isodate": convert_mactime_to_iso(row[2]),
"type": "rejected",
"safari_favicon_db_path": file_path,
})
@@ -96,8 +99,10 @@ class SafariFavicon(IOSExtraction):
def run(self) -> None:
for file_path in self._get_fs_files_from_patterns(SAFARI_FAVICON_ROOT_PATHS):
self.log.info("Found Safari favicon cache database at path: %s", file_path)
self.log.info("Found Safari favicon cache database at path: %s",
file_path)
self._process_favicon_db(file_path)
self.log.info("Extracted a total of %d favicon records", len(self.results))
self.log.info("Extracted a total of %d favicon records",
len(self.results))
self.results = sorted(self.results, key=lambda x: x["isodate"])

View File

@@ -4,8 +4,9 @@
# https://license.mvt.re/1.1/
import logging
from typing import Union
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from mvt.common.utils import convert_mactime_to_iso
from ..base import IOSExtraction
@@ -19,17 +20,19 @@ class ShutdownLog(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "shutdown",
"data": f"Client {record['client']} with PID {record['pid']} was running when the device was shut down",
"data": f"Client {record['client']} with PID {record['pid']} "
"was running when the device was shut down",
}
def check_indicators(self) -> None:
@@ -46,7 +49,8 @@ class ShutdownLog(IOSExtraction):
for ioc in self.indicators.get_iocs("processes"):
parts = result["client"].split("/")
if ioc in parts:
self.log.warning("Found mention of a known malicious process \"%s\" in shutdown.log",
self.log.warning("Found mention of a known malicious "
"process \"%s\" in shutdown.log",
ioc)
result["matched_indicator"] = ioc
self.detected.append(result)
@@ -72,8 +76,7 @@ class ShutdownLog(IOSExtraction):
except Exception:
mac_timestamp = 0
timestamp = convert_mactime_to_unix(mac_timestamp, from_2001=False)
isodate = convert_timestamp_to_iso(timestamp)
isodate = convert_mactime_to_iso(mac_timestamp, from_2001=False)
for current_process in current_processes:
self.results.append({

View File

@@ -6,8 +6,9 @@
import datetime
import json
import logging
from typing import Union
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_datetime_to_iso
from ..base import IOSExtraction
@@ -21,12 +22,13 @@ class IOSVersionHistory(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
@@ -43,7 +45,7 @@ class IOSVersionHistory(IOSExtraction):
"%Y-%m-%d %H:%M:%S.%f %z")
timestamp_utc = timestamp.astimezone(datetime.timezone.utc)
self.results.append({
"isodate": convert_timestamp_to_iso(timestamp_utc),
"isodate": convert_datetime_to_iso(timestamp_utc),
"os_version": log_line["os_version"],
})

View File

@@ -3,10 +3,9 @@
# Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/
import datetime
import os
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_unix_to_iso
from ..base import IOSExtraction
@@ -39,5 +38,5 @@ class WebkitBase(IOSExtraction):
self.results.append({
"folder": key,
"url": url,
"isodate": convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(os.stat(found_path).st_mtime)),
"isodate": convert_unix_to_iso(os.stat(found_path).st_mtime),
})

View File

@@ -4,6 +4,7 @@
# https://license.mvt.re/1.1/
import logging
from typing import Union
from .webkit_base import WebkitBase
@@ -23,17 +24,19 @@ class WebkitIndexedDB(WebkitBase):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "webkit_indexeddb",
"data": f"IndexedDB folder {record['folder']} containing file for URL {record['url']}",
"data": f"IndexedDB folder {record['folder']} containing "
f"file for URL {record['url']}",
}
def run(self) -> None:

View File

@@ -4,6 +4,7 @@
# https://license.mvt.re/1.1/
import logging
from typing import Union
from .webkit_base import WebkitBase
@@ -21,20 +22,23 @@ class WebkitLocalStorage(WebkitBase):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "webkit_local_storage",
"data": f"WebKit Local Storage folder {record['folder']} containing file for URL {record['url']}",
"data": f"WebKit Local Storage folder {record['folder']} "
f"containing file for URL {record['url']}",
}
def run(self) -> None:
self._process_webkit_folder(WEBKIT_LOCALSTORAGE_ROOT_PATHS)
self.log.info("Extracted a total of %d records from WebKit Local Storages",
self.log.info("Extracted a total of %d records from WebKit "
"Local Storages",
len(self.results))

View File

@@ -21,12 +21,14 @@ class WebkitSafariViewService(WebkitBase):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def run(self) -> None:
self._process_webkit_folder(WEBKIT_SAFARIVIEWSERVICE_ROOT_PATHS)
self.log.info("Extracted a total of %d records from WebKit SafariViewService WebsiteData",
self.log.info("Extracted a total of %d records from WebKit "
"SafariViewService WebsiteData",
len(self.results))

View File

@@ -5,8 +5,9 @@
import logging
import sqlite3
from typing import Union
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from mvt.common.utils import convert_mactime_to_iso
from ..base import IOSExtraction
@@ -23,17 +24,19 @@ class Calls(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "call",
"data": f"From {record['number']} using {record['provider']} during {record['duration']} seconds"
"data": f"From {record['number']} using {record['provider']} "
f"during {record['duration']} seconds"
}
def run(self) -> None:
@@ -52,7 +55,7 @@ class Calls(IOSExtraction):
for row in cur:
self.results.append({
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[0])),
"isodate": convert_mactime_to_iso(row[0]),
"duration": row[1],
"location": row[2],
"number": row[3].decode("utf-8") if row[3] and row[3] is bytes else row[3],

View File

@@ -5,9 +5,10 @@
import logging
import sqlite3
from typing import Union
from mvt.common.utils import (convert_chrometime_to_unix,
convert_timestamp_to_iso)
from mvt.common.utils import (convert_chrometime_to_datetime,
convert_datetime_to_iso)
from ..base import IOSExtraction
@@ -26,12 +27,13 @@ class ChromeFavicon(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
@@ -55,7 +57,8 @@ class ChromeFavicon(IOSExtraction):
def run(self) -> None:
self._find_ios_database(backup_ids=CHROME_FAVICON_BACKUP_IDS,
root_paths=CHROME_FAVICON_ROOT_PATHS)
self.log.info("Found Chrome favicon cache database at path: %s", self.file_path)
self.log.info("Found Chrome favicon cache database at path: %s",
self.file_path)
conn = sqlite3.connect(self.file_path)
@@ -80,7 +83,7 @@ class ChromeFavicon(IOSExtraction):
"url": row[0],
"icon_url": row[1],
"timestamp": last_timestamp,
"isodate": convert_timestamp_to_iso(convert_chrometime_to_unix(last_timestamp)),
"isodate": convert_datetime_to_iso(convert_chrometime_to_datetime(last_timestamp)),
})
cur.close()

View File

@@ -5,9 +5,10 @@
import logging
import sqlite3
from typing import Union
from mvt.common.utils import (convert_chrometime_to_unix,
convert_timestamp_to_iso)
from mvt.common.utils import (convert_chrometime_to_datetime,
convert_datetime_to_iso)
from ..base import IOSExtraction
@@ -25,17 +26,20 @@ class ChromeHistory(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "visit",
"data": f"{record['id']} - {record['url']} (visit ID: {record['visit_id']}, redirect source: {record['redirect_source']})"
"data": f"{record['id']} - {record['url']} "
f"(visit ID: {record['visit_id']}, "
f"redirect source: {record['redirect_source']})"
}
def check_indicators(self) -> None:
@@ -51,7 +55,8 @@ class ChromeHistory(IOSExtraction):
def run(self) -> None:
self._find_ios_database(backup_ids=CHROME_HISTORY_BACKUP_IDS,
root_paths=CHROME_HISTORY_ROOT_PATHS)
self.log.info("Found Chrome history database at path: %s", self.file_path)
self.log.info("Found Chrome history database at path: %s",
self.file_path)
conn = sqlite3.connect(self.file_path)
cur = conn.cursor()
@@ -73,11 +78,12 @@ class ChromeHistory(IOSExtraction):
"url": item[1],
"visit_id": item[2],
"timestamp": item[3],
"isodate": convert_timestamp_to_iso(convert_chrometime_to_unix(item[3])),
"isodate": convert_datetime_to_iso(convert_chrometime_to_datetime(item[3])),
"redirect_source": item[4],
})
cur.close()
conn.close()
self.log.info("Extracted a total of %d history items", len(self.results))
self.log.info("Extracted a total of %d history items",
len(self.results))

View File

@@ -21,13 +21,15 @@ class Contacts(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def run(self) -> None:
self._find_ios_database(backup_ids=CONTACTS_BACKUP_IDS, root_paths=CONTACTS_ROOT_PATHS)
self._find_ios_database(backup_ids=CONTACTS_BACKUP_IDS,
root_paths=CONTACTS_ROOT_PATHS)
self.log.info("Found Contacts database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)

View File

@@ -5,9 +5,9 @@
import logging
import sqlite3
from datetime import datetime
from typing import Union
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_unix_to_iso
from ..base import IOSExtraction
@@ -24,17 +24,19 @@ class FirefoxFavicon(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "firefox_history",
"data": f"Firefox favicon {record['url']} when visiting {record['history_url']}",
"data": f"Firefox favicon {record['url']} "
f"when visiting {record['history_url']}",
}
def check_indicators(self) -> None:
@@ -53,7 +55,8 @@ class FirefoxFavicon(IOSExtraction):
def run(self) -> None:
self._find_ios_database(backup_ids=FIREFOX_HISTORY_BACKUP_IDS,
root_paths=FIREFOX_HISTORY_ROOT_PATHS)
self.log.info("Found Firefox favicon database at path: %s", self.file_path)
self.log.info("Found Firefox favicon database at path: %s",
self.file_path)
conn = sqlite3.connect(self.file_path)
cur = conn.cursor()
@@ -79,7 +82,7 @@ class FirefoxFavicon(IOSExtraction):
"width": item[2],
"height": item[3],
"type": item[4],
"isodate": convert_timestamp_to_iso(datetime.utcfromtimestamp(item[5])),
"isodate": convert_unix_to_iso(item[5]),
"history_id": item[6],
"history_url": item[7]
})
@@ -87,4 +90,5 @@ class FirefoxFavicon(IOSExtraction):
cur.close()
conn.close()
self.log.info("Extracted a total of %d history items", len(self.results))
self.log.info("Extracted a total of %d history items",
len(self.results))

View File

@@ -5,9 +5,9 @@
import logging
import sqlite3
from datetime import datetime
from typing import Union
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_unix_to_iso
from ..base import IOSExtraction
@@ -28,17 +28,19 @@ class FirefoxHistory(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "firefox_history",
"data": f"Firefox visit with ID {record['id']} to URL: {record['url']}",
"data": f"Firefox visit with ID {record['id']} "
f"to URL: {record['url']}",
}
def check_indicators(self) -> None:
@@ -52,8 +54,10 @@ class FirefoxHistory(IOSExtraction):
self.detected.append(result)
def run(self) -> None:
self._find_ios_database(backup_ids=FIREFOX_HISTORY_BACKUP_IDS, root_paths=FIREFOX_HISTORY_ROOT_PATHS)
self.log.info("Found Firefox history database at path: %s", self.file_path)
self._find_ios_database(backup_ids=FIREFOX_HISTORY_BACKUP_IDS,
root_paths=FIREFOX_HISTORY_ROOT_PATHS)
self.log.info("Found Firefox history database at path: %s",
self.file_path)
conn = sqlite3.connect(self.file_path)
cur = conn.cursor()
@@ -72,7 +76,7 @@ class FirefoxHistory(IOSExtraction):
for row in cur:
self.results.append({
"id": row[0],
"isodate": convert_timestamp_to_iso(datetime.utcfromtimestamp(row[1])),
"isodate": convert_unix_to_iso(row[1]),
"url": row[2],
"title": row[3],
"i1000000s_local": row[4],
@@ -82,4 +86,5 @@ class FirefoxHistory(IOSExtraction):
cur.close()
conn.close()
self.log.info("Extracted a total of %d history items", len(self.results))
self.log.info("Extracted a total of %d history items",
len(self.results))

View File

@@ -6,8 +6,9 @@
import collections
import logging
import plistlib
from typing import Union
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from mvt.common.utils import convert_mactime_to_iso
from ..base import IOSExtraction
@@ -25,17 +26,19 @@ class IDStatusCache(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "lookup",
"data": f"Lookup of {record['user']} within {record['package']} (Status {record['idstatus']})"
"data": f"Lookup of {record['user']} within {record['package']} "
f"(Status {record['idstatus']})"
}
def check_indicators(self) -> None:
@@ -52,7 +55,8 @@ class IDStatusCache(IOSExtraction):
continue
if "\\x00\\x00" in result.get("user", ""):
self.log.warning("Found an ID Status Cache entry with suspicious patterns: %s",
self.log.warning("Found an ID Status Cache entry with "
"suspicious patterns: %s",
result.get("user"))
self.detected.append(result)
@@ -75,7 +79,7 @@ class IDStatusCache(IOSExtraction):
id_status_cache_entries.append({
"package": app,
"user": entry.replace("\x00", "\\x00"),
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(lookup_date)),
"isodate": convert_mactime_to_iso(lookup_date),
"idstatus": id_status,
})
@@ -89,12 +93,15 @@ class IDStatusCache(IOSExtraction):
if self.is_backup:
self._find_ios_database(backup_ids=IDSTATUSCACHE_BACKUP_IDS)
self.log.info("Found IDStatusCache plist at path: %s", self.file_path)
self.log.info("Found IDStatusCache plist at path: %s",
self.file_path)
self._extract_idstatuscache_entries(self.file_path)
elif self.is_fs_dump:
for idstatuscache_path in self._get_fs_files_from_patterns(IDSTATUSCACHE_ROOT_PATHS):
self.file_path = idstatuscache_path
self.log.info("Found IDStatusCache plist at path: %s", self.file_path)
self.log.info("Found IDStatusCache plist at path: %s",
self.file_path)
self._extract_idstatuscache_entries(self.file_path)
self.log.info("Extracted a total of %d ID Status Cache entries", len(self.results))
self.log.info("Extracted a total of %d ID Status Cache entries",
len(self.results))

View File

@@ -5,8 +5,9 @@
import logging
import sqlite3
from typing import Union
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from mvt.common.utils import convert_mactime_to_iso
from ..base import IOSExtraction
@@ -23,7 +24,8 @@ class InteractionC(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -41,27 +43,30 @@ class InteractionC(IOSExtraction):
"last_outgoing_recipient_date",
]
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
records = []
processed = []
for ts in self.timestamps:
for timestamp in self.timestamps:
# Check if the record has the current timestamp.
if ts not in record or not record[ts]:
if timestamp not in record or not record[timestamp]:
continue
# Check if the timestamp was already processed.
if record[ts] in processed:
if record[timestamp] in processed:
continue
records.append({
"timestamp": record[ts],
"timestamp": record[timestamp],
"module": self.__class__.__name__,
"event": ts,
"data": f"[{record['bundle_id']}] {record['account']} - from {record['sender_display_name']} "
f"({record['sender_identifier']}) to {record['recipient_display_name']} "
f"({record['recipient_identifier']}): {record['content']}"
"event": timestamp,
"data": f"[{record['bundle_id']}] {record['account']} - "
f"from {record['sender_display_name']} "
f"({record['sender_identifier']}) "
f"to {record['recipient_display_name']} "
f"({record['recipient_identifier']}): "
f"{record['content']}"
})
processed.append(record[ts])
processed.append(record[timestamp])
return records
@@ -131,8 +136,8 @@ class InteractionC(IOSExtraction):
for row in cur:
self.results.append({
"start_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[0])),
"end_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[1])),
"start_date": convert_mactime_to_iso(row[0]),
"end_date": convert_mactime_to_iso(row[1]),
"bundle_id": row[2],
"account": row[3],
"target_bundle_id": row[4],
@@ -156,14 +161,14 @@ class InteractionC(IOSExtraction):
"incoming_recipient_count": row[22],
"incoming_sender_count": row[23],
"outgoing_recipient_count": row[24],
"interactions_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[25])) if row[25] else None,
"contacts_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[26])) if row[26] else None,
"first_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[27])) if row[27] else None,
"first_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[28])) if row[28] else None,
"first_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[29])) if row[29] else None,
"last_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[30])) if row[30] else None,
"last_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[31])) if row[31] else None,
"last_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[32])) if row[32] else None,
"interactions_creation_date": convert_mactime_to_iso(row[25]) if row[25] else None,
"contacts_creation_date": convert_mactime_to_iso(row[26]) if row[26] else None,
"first_incoming_recipient_date": convert_mactime_to_iso(row[27]) if row[27] else None,
"first_incoming_sender_date": convert_mactime_to_iso(row[28]) if row[28] else None,
"first_outgoing_recipient_date": convert_mactime_to_iso(row[29]) if row[29] else None,
"last_incoming_sender_date": convert_mactime_to_iso(row[30]) if row[30] else None,
"last_incoming_recipient_date": convert_mactime_to_iso(row[31]) if row[31] else None,
"last_outgoing_recipient_date": convert_mactime_to_iso(row[32]) if row[32] else None,
"custom_id": row[33],
"location_uuid": row[35],
"group_name": row[36],
@@ -174,4 +179,5 @@ class InteractionC(IOSExtraction):
cur.close()
conn.close()
self.log.info("Extracted a total of %d InteractionC events", len(self.results))
self.log.info("Extracted a total of %d InteractionC events",
len(self.results))

View File

@@ -5,8 +5,9 @@
import logging
import plistlib
from typing import Union
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from mvt.common.utils import convert_mactime_to_iso
from ..base import IOSExtraction
@@ -24,7 +25,8 @@ class LocationdClients(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -41,7 +43,7 @@ class LocationdClients(IOSExtraction):
"BeaconRegionTimeStopped",
]
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
records = []
for timestamp in self.timestamps:
if timestamp in record.keys():
@@ -64,8 +66,8 @@ class LocationdClients(IOSExtraction):
ioc = self.indicators.check_process(proc_name)
if ioc:
self.log.warning("Found a suspicious process name in LocationD entry %s",
result["package"])
self.log.warning("Found a suspicious process name in "
"LocationD entry %s", result["package"])
result["matched_indicator"] = ioc
self.detected.append(result)
continue
@@ -73,8 +75,8 @@ class LocationdClients(IOSExtraction):
if "BundlePath" in result:
ioc = self.indicators.check_file_path(result["BundlePath"])
if ioc:
self.log.warning("Found a suspicious file path in Location D: %s",
result["BundlePath"])
self.log.warning("Found a suspicious file path in "
"Location D: %s", result["BundlePath"])
result["matched_indicator"] = ioc
self.detected.append(result)
continue
@@ -82,8 +84,8 @@ class LocationdClients(IOSExtraction):
if "Executable" in result:
ioc = self.indicators.check_file_path(result["Executable"])
if ioc:
self.log.warning("Found a suspicious file path in Location D: %s",
result["Executable"])
self.log.warning("Found a suspicious file path in "
"Location D: %s", result["Executable"])
result["matched_indicator"] = ioc
self.detected.append(result)
continue
@@ -91,8 +93,8 @@ class LocationdClients(IOSExtraction):
if "Registered" in result:
ioc = self.indicators.check_file_path(result["Registered"])
if ioc:
self.log.warning("Found a suspicious file path in Location D: %s",
result["Registered"])
self.log.warning("Found a suspicious file path in "
"Location D: %s", result["Registered"])
result["matched_indicator"] = ioc
self.detected.append(result)
continue
@@ -101,24 +103,27 @@ class LocationdClients(IOSExtraction):
with open(file_path, "rb") as handle:
file_plist = plistlib.load(handle)
for key, values in file_plist.items():
for key, _ in file_plist.items():
result = file_plist[key]
result["package"] = key
for ts in self.timestamps:
if ts in result.keys():
result[ts] = convert_timestamp_to_iso(convert_mactime_to_unix(result[ts]))
for timestamp in self.timestamps:
if timestamp in result.keys():
result[timestamp] = convert_mactime_to_iso(result[timestamp])
self.results.append(result)
def run(self) -> None:
if self.is_backup:
self._find_ios_database(backup_ids=LOCATIOND_BACKUP_IDS)
self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
self.log.info("Found Locationd Clients plist at path: %s",
self.file_path)
self._extract_locationd_entries(self.file_path)
elif self.is_fs_dump:
for locationd_path in self._get_fs_files_from_patterns(LOCATIOND_ROOT_PATHS):
self.file_path = locationd_path
self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
self.log.info("Found Locationd Clients plist at path: %s",
self.file_path)
self._extract_locationd_entries(self.file_path)
self.log.info("Extracted a total of %d Locationd Clients entries", len(self.results))
self.log.info("Extracted a total of %d Locationd Clients entries",
len(self.results))

View File

@@ -24,7 +24,8 @@ class Datausage(NetBase):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)

View File

@@ -5,8 +5,9 @@
import logging
import plistlib
from typing import Union
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_datetime_to_iso
from ..base import IOSExtraction
@@ -19,23 +20,26 @@ OSANALYTICS_ADDAILY_ROOT_PATHS = [
class OSAnalyticsADDaily(IOSExtraction):
"""Extract network usage information by process, from com.apple.osanalytics.addaily.plist"""
"""Extract network usage information by process,
from com.apple.osanalytics.addaily.plist"""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
record_data = f"{record['package']} WIFI IN: {record['wifi_in']}, WIFI OUT: {record['wifi_out']} - " \
f"WWAN IN: {record['wwan_in']}, WWAN OUT: {record['wwan_out']}"
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["ts"],
"module": self.__class__.__name__,
"event": "osanalytics_addaily",
"data": record_data,
"data": f"{record['package']} WIFI IN: {record['wifi_in']}, "
f"WIFI OUT: {record['wifi_out']} - "
f"WWAN IN: {record['wwan_in']}, "
f"WWAN OUT: {record['wwan_out']}",
}
def check_indicators(self) -> None:
@@ -51,7 +55,8 @@ class OSAnalyticsADDaily(IOSExtraction):
def run(self) -> None:
self._find_ios_database(backup_ids=OSANALYTICS_ADDAILY_BACKUP_IDS,
root_paths=OSANALYTICS_ADDAILY_ROOT_PATHS)
self.log.info("Found com.apple.osanalytics.addaily plist at path: %s", self.file_path)
self.log.info("Found com.apple.osanalytics.addaily plist at path: %s",
self.file_path)
with open(self.file_path, "rb") as handle:
file_plist = plistlib.load(handle)
@@ -59,11 +64,12 @@ class OSAnalyticsADDaily(IOSExtraction):
for app, values in file_plist.get("netUsageBaseline", {}).items():
self.results.append({
"package": app,
"ts": convert_timestamp_to_iso(values[0]),
"ts": convert_datetime_to_iso(values[0]),
"wifi_in": values[1],
"wifi_out": values[2],
"wwan_in": values[3],
"wwan_out": values[4],
})
self.log.info("Extracted a total of %d com.apple.osanalytics.addaily entries", len(self.results))
self.log.info("Extracted a total of %d com.apple.osanalytics.addaily "
"entries", len(self.results))

View File

@@ -8,9 +8,9 @@ import logging
import os
import plistlib
import sqlite3
from typing import Union
from mvt.common.utils import (convert_mactime_to_unix,
convert_timestamp_to_iso, keys_bytes_to_string)
from mvt.common.utils import convert_mactime_to_iso, keys_bytes_to_string
from ..base import IOSExtraction
@@ -26,14 +26,15 @@ class SafariBrowserState(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
self._session_history_count = 0
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["last_viewed_timestamp"],
"module": self.__class__.__name__,
@@ -115,22 +116,28 @@ class SafariBrowserState(IOSExtraction):
"tab_title": row[0],
"tab_url": row[1],
"tab_visible_url": row[2],
"last_viewed_timestamp": convert_timestamp_to_iso(convert_mactime_to_unix(row[3])),
"last_viewed_timestamp": convert_mactime_to_iso(row[3]),
"session_data": session_entries,
"safari_browser_state_db": os.path.relpath(db_path, self.target_path),
"safari_browser_state_db": os.path.relpath(db_path,
self.target_path),
})
def run(self) -> None:
if self.is_backup:
for backup_file in self._get_backup_files_from_manifest(relative_path=SAFARI_BROWSER_STATE_BACKUP_RELPATH):
self.file_path = self._get_backup_file_from_id(backup_file["file_id"])
self.log.info("Found Safari browser state database at path: %s", self.file_path)
self._process_browser_state_db(self.file_path)
elif self.is_fs_dump:
for safari_browserstate_path in self._get_fs_files_from_patterns(SAFARI_BROWSER_STATE_ROOT_PATHS):
self.file_path = safari_browserstate_path
self.log.info("Found Safari browser state database at path: %s", self.file_path)
self._process_browser_state_db(self.file_path)
browserstate_path = self._get_backup_file_from_id(backup_file["file_id"])
if not browserstate_path:
continue
self.log.info("Extracted a total of %d tab records and %d session history entries",
len(self.results), self._session_history_count)
self.log.info("Found Safari browser state database at path: %s",
browserstate_path)
self._process_browser_state_db(browserstate_path)
elif self.is_fs_dump:
for browserstate_path in self._get_fs_files_from_patterns(SAFARI_BROWSER_STATE_ROOT_PATHS):
self.log.info("Found Safari browser state database at path: %s",
browserstate_path)
self._process_browser_state_db(browserstate_path)
self.log.info("Extracted a total of %d tab records and %d session "
"history entries", len(self.results),
self._session_history_count)

View File

@@ -6,9 +6,11 @@
import logging
import os
import sqlite3
from typing import Union
from mvt.common.url import URL
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from mvt.common.utils import (convert_mactime_to_datetime,
convert_mactime_to_iso)
from ..base import IOSExtraction
@@ -28,12 +30,13 @@ class SafariHistory(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
@@ -67,8 +70,8 @@ class SafariHistory(IOSExtraction):
self.log.info("Found HTTP redirect to different domain: \"%s\" -> \"%s\"",
origin_domain, redirect_domain)
redirect_time = convert_mactime_to_unix(redirect["timestamp"])
origin_time = convert_mactime_to_unix(result["timestamp"])
redirect_time = convert_mactime_to_datetime(redirect["timestamp"])
origin_time = convert_mactime_to_datetime(result["timestamp"])
elapsed_time = redirect_time - origin_time
elapsed_ms = elapsed_time.microseconds / 1000
@@ -110,7 +113,7 @@ class SafariHistory(IOSExtraction):
"url": row[1],
"visit_id": row[2],
"timestamp": row[3],
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[3])),
"isodate": convert_mactime_to_iso(row[3]),
"redirect_source": row[4],
"redirect_destination": row[5],
"safari_history_db": os.path.relpath(history_path, self.target_path),

View File

@@ -8,9 +8,9 @@ import itertools
import logging
import plistlib
import sqlite3
from typing import Union
from mvt.common.utils import (check_for_links, convert_mactime_to_unix,
convert_timestamp_to_iso)
from mvt.common.utils import check_for_links, convert_mactime_to_iso
from ..base import IOSExtraction
@@ -27,18 +27,20 @@ class Shortcuts(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
found_urls = ""
if record["action_urls"]:
found_urls = "- URLs in actions: {}".format(", ".join(record["action_urls"]))
found_urls = f"- URLs in actions: {', '.join(record['action_urls'])}"
desc = ""
if record["description"]:
desc = record["description"].decode('utf-8', errors='ignore')
desc = record["description"].decode("utf-8", errors="ignore")
return [{
"timestamp": record["isodate"],
@@ -111,8 +113,8 @@ class Shortcuts(IOSExtraction):
action["urls"] = [url.rstrip("',") for url in extracted_urls]
actions.append(action)
shortcut["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(shortcut.pop("created_date")))
shortcut["modified_date"] = convert_timestamp_to_iso(convert_mactime_to_unix(shortcut["modified_date"]))
shortcut["isodate"] = convert_mactime_to_iso(shortcut.pop("created_date"))
shortcut["modified_date"] = convert_mactime_to_iso(shortcut["modified_date"])
shortcut["parsed_actions"] = len(actions)
shortcut["action_urls"] = list(itertools.chain(*[action["urls"] for action in actions]))
self.results.append(shortcut)

View File

@@ -6,9 +6,9 @@
import logging
import sqlite3
from base64 import b64encode
from typing import Union
from mvt.common.utils import (check_for_links, convert_mactime_to_unix,
convert_timestamp_to_iso)
from mvt.common.utils import check_for_links, convert_mactime_to_iso
from ..base import IOSExtraction
@@ -25,18 +25,20 @@ class SMS(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
text = record["text"].replace("\n", "\\n")
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "sms_received",
"data": f"{record['service']}: {record['guid']} \"{text}\" from {record['phone_number']} ({record['account']})"
"data": f"{record['service']}: {record['guid']} \"{text}\" "
f"from {record['phone_number']} ({record['account']})"
}
def check_indicators(self) -> None:
@@ -67,9 +69,9 @@ class SMS(IOSExtraction):
""")
# Force the query early to catch database issues
items = list(cur)
except sqlite3.DatabaseError as e:
except sqlite3.DatabaseError as exc:
conn.close()
if "database disk image is malformed" in str(e):
if "database disk image is malformed" in str(exc):
self._recover_sqlite_db_if_needed(self.file_path, forced=True)
conn = sqlite3.connect(self.file_path)
cur = conn.cursor()
@@ -82,7 +84,7 @@ class SMS(IOSExtraction):
""")
items = list(cur)
else:
raise e
raise exc
names = [description[0] for description in cur.description]
for item in items:
@@ -98,25 +100,29 @@ class SMS(IOSExtraction):
message[names[index]] = value
# We convert Mac's ridiculous timestamp format.
message["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(message["date"]))
message["direction"] = ("sent" if message.get("is_from_me", 0) == 1 else "received")
message["isodate"] = convert_mactime_to_iso(message["date"])
message["direction"] = ("sent" if message.get("is_from_me", 0) == 1
else "received")
# Sometimes "text" is None instead of empty string.
if not message.get("text", None):
message["text"] = ""
if message.get("text", "").startswith("ALERT: State-sponsored attackers may be targeting your iPhone"):
self.log.warn("Apple warning about state-sponsored attack received on the %s", message["isodate"])
self.log.warn("Apple warning about state-sponsored attack "
"received on the %s", message["isodate"])
self.results.append(message)
else:
# Extract links from the SMS message.
message_links = check_for_links(message.get("text", ""))
# If we find links in the messages or if they are empty we add them to the list.
# If we find links in the messages or if they are empty we add
# them to the list.
if message_links or message.get("text", "").strip() == "":
self.results.append(message)
cur.close()
conn.close()
self.log.info("Extracted a total of %d SMS messages containing links", len(self.results))
self.log.info("Extracted a total of %d SMS messages containing links",
len(self.results))

View File

@@ -6,8 +6,9 @@
import logging
import sqlite3
from base64 import b64encode
from typing import Union
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from mvt.common.utils import convert_mactime_to_iso
from ..base import IOSExtraction
@@ -24,18 +25,23 @@ class SMSAttachments(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
return {
"timestamp": record["isodate"],
"module": self.__class__.__name__,
"event": "sms_attachment",
"data": f"{record['service']}: Attachment '{record['transfer_name']}' {record['direction']} from {record['phone_number']} "
f"with {record['total_bytes']} bytes (is_sticker: {record['is_sticker']}, has_user_info: {record['has_user_info']})"
"data": f"{record['service']}: Attachment "
f"'{record['transfer_name']}' {record['direction']} "
f"from {record['phone_number']} "
f"with {record['total_bytes']} bytes "
f"(is_sticker: {record['is_sticker']}, "
f"has_user_info: {record['has_user_info']})"
}
def run(self) -> None:
@@ -68,16 +74,18 @@ class SMSAttachments(IOSExtraction):
value = b64encode(value).decode()
attachment[names[index]] = value
attachment["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(attachment["created_date"]))
attachment["start_date"] = convert_timestamp_to_iso(convert_mactime_to_unix(attachment["start_date"]))
attachment["isodate"] = convert_mactime_to_iso(attachment["created_date"])
attachment["start_date"] = convert_mactime_to_iso(attachment["start_date"])
attachment["direction"] = ("sent" if attachment["is_outgoing"] == 1 else "received")
attachment["has_user_info"] = attachment["user_info"] is not None
attachment["service"] = attachment["service"] or "Unknown"
attachment["filename"] = attachment["filename"] or "NULL"
if (attachment["filename"].startswith("/var/tmp/") and attachment["filename"].endswith("-1")
if (attachment["filename"].startswith("/var/tmp/")
and attachment["filename"].endswith("-1")
and attachment["direction"] == "received"):
self.log.warn(f"Suspicious iMessage attachment '{attachment['filename']}' on {attachment['isodate']}")
self.log.warn("Suspicious iMessage attachment %s on %s",
attachment['filename'], attachment['isodate'])
self.detected.append(attachment)
self.results.append(attachment)

View File

@@ -5,9 +5,9 @@
import logging
import sqlite3
from datetime import datetime
from typing import Union
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_unix_to_iso
from ..base import IOSExtraction
@@ -22,7 +22,6 @@ AUTH_VALUE_OLD = {
0: "denied",
1: "allowed"
}
AUTH_VALUES = {
0: "denied",
1: "unknown",
@@ -50,17 +49,21 @@ class TCC(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
if "last_modified" in record:
if "allowed_value" in record:
msg = f"Access to {record['service']} by {record['client']} {record['allowed_value']}"
msg = (f"Access to {record['service']} by {record['client']} "
f"{record['allowed_value']}")
else:
msg = f"Access to {record['service']} by {record['client']} {record['auth_value']}"
msg = (f"Access to {record['service']} by {record['client']} "
f"{record['auth_value']}")
return {
"timestamp": record["last_modified"],
"module": self.__class__.__name__,
@@ -68,6 +71,8 @@ class TCC(IOSExtraction):
"data": msg
}
return {}
def check_indicators(self) -> None:
if not self.indicators:
return
@@ -84,18 +89,21 @@ class TCC(IOSExtraction):
db_version = "v3"
try:
cur.execute("""SELECT
service, client, client_type, auth_value, auth_reason, last_modified
service, client, client_type, auth_value,
auth_reason, last_modified
FROM access;""")
except sqlite3.OperationalError:
# v2 version
try:
cur.execute("""SELECT
service, client, client_type, allowed, prompt_count, last_modified
service, client, client_type, allowed,
prompt_count, last_modified
FROM access;""")
db_version = "v2"
except sqlite3.OperationalError:
cur.execute("""SELECT
service, client, client_type, allowed, prompt_count
service, client, client_type, allowed,
prompt_count
FROM access;""")
db_version = "v1"
@@ -103,18 +111,20 @@ class TCC(IOSExtraction):
service = row[0]
client = row[1]
client_type = row[2]
client_type_desc = "bundle_id" if client_type == 0 else "absolute_path"
client_type_desc = ("bundle_id" if client_type == 0
else "absolute_path")
if db_version == "v3":
auth_value = row[3]
auth_value_desc = AUTH_VALUES.get(auth_value, "")
auth_reason = row[4]
auth_reason_desc = AUTH_REASONS.get(auth_reason, "unknown")
last_modified = convert_timestamp_to_iso(datetime.utcfromtimestamp((row[5])))
last_modified = convert_unix_to_iso(row[5])
if service in ["kTCCServiceMicrophone", "kTCCServiceCamera"]:
device = "microphone" if service == "kTCCServiceMicrophone" else "camera"
self.log.info("Found client \"%s\" with access %s to %s on %s by %s",
client, auth_value_desc, device, last_modified, auth_reason_desc)
self.log.info("Found client \"%s\" with access %s to %s "
"on %s by %s", client, auth_value_desc,
device, last_modified, auth_reason_desc)
self.results.append({
"service": service,
@@ -129,11 +139,13 @@ class TCC(IOSExtraction):
allowed_desc = AUTH_VALUE_OLD.get(allowed_value, "")
prompt_count = row[4]
if db_version == "v2":
last_modified = convert_timestamp_to_iso(datetime.utcfromtimestamp((row[5])))
last_modified = convert_unix_to_iso(row[5])
if service in ["kTCCServiceMicrophone", "kTCCServiceCamera"]:
device = "microphone" if service == "kTCCServiceMicrophone" else "camera"
self.log.info("Found client \"%s\" with access %s to %s at %s",
client, allowed_desc, device, last_modified)
self.log.info("Found client \"%s\" with access %s to "
"%s at %s", client, allowed_desc, device,
last_modified)
self.results.append({
"service": service,
"client": client,
@@ -147,6 +159,7 @@ class TCC(IOSExtraction):
device = "microphone" if service == "kTCCServiceMicrophone" else "camera"
self.log.info("Found client \"%s\" with access %s to %s",
client, allowed_desc, device)
self.results.append({
"service": service,
"client": client,
@@ -159,7 +172,8 @@ class TCC(IOSExtraction):
conn.close()
def run(self) -> None:
self._find_ios_database(backup_ids=TCC_BACKUP_IDS, root_paths=TCC_ROOT_PATHS)
self._find_ios_database(backup_ids=TCC_BACKUP_IDS,
root_paths=TCC_ROOT_PATHS)
self.log.info("Found TCC database at path: %s", self.file_path)
self.process_db(self.file_path)

View File

@@ -3,12 +3,11 @@
# Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/
import datetime
import logging
import os
import sqlite3
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_unix_to_iso
from ..base import IOSExtraction
@@ -20,12 +19,14 @@ WEBKIT_RESOURCELOADSTATICS_ROOT_PATHS = [
class WebkitResourceLoadStatistics(IOSExtraction):
"""This module extracts records from WebKit ResourceLoadStatistics observations.db."""
"""This module extracts records from WebKit ResourceLoadStatistics
observations.db."""
# TODO: Add serialize().
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -48,7 +49,8 @@ class WebkitResourceLoadStatistics(IOSExtraction):
self.detected[key].append(item)
def _process_observations_db(self, db_path, key):
self.log.info("Found WebKit ResourceLoadStatistics observations.db file at path %s", db_path)
self.log.info("Found WebKit ResourceLoadStatistics observations.db "
"file at path %s", db_path)
self._recover_sqlite_db_if_needed(db_path)
@@ -69,11 +71,12 @@ class WebkitResourceLoadStatistics(IOSExtraction):
"registrable_domain": row[1],
"last_seen": row[2],
"had_user_interaction": bool(row[3]),
"last_seen_isodate": convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(int(row[2]))),
"last_seen_isodate": convert_unix_to_iso(row[2]),
})
if len(self.results[key]) > 0:
self.log.info("Extracted a total of %d records from %s", len(self.results[key]), db_path)
self.log.info("Extracted a total of %d records from %s",
len(self.results[key]), db_path)
def run(self) -> None:
if self.is_backup:
@@ -83,8 +86,9 @@ class WebkitResourceLoadStatistics(IOSExtraction):
key = f"{backup_file['domain']}/{WEBKIT_RESOURCELOADSTATICS_BACKUP_RELPATH}"
if db_path:
self._process_observations_db(db_path=db_path, key=key)
except Exception as e:
self.log.info("Unable to search for WebKit observations.db: %s", e)
except Exception as exc:
self.log.info("Unable to find WebKit observations.db: %s", exc)
elif self.is_fs_dump:
for db_path in self._get_fs_files_from_patterns(WEBKIT_RESOURCELOADSTATICS_ROOT_PATHS):
self._process_observations_db(db_path=db_path, key=os.path.relpath(db_path, self.target_path))
db_rel_path = os.path.relpath(db_path, self.target_path)
self._process_observations_db(db_path=db_path, key=db_rel_path)

View File

@@ -7,7 +7,7 @@ import logging
import os
import plistlib
from mvt.common.utils import convert_timestamp_to_iso
from mvt.common.utils import convert_datetime_to_iso
from ..base import IOSExtraction
@@ -32,7 +32,8 @@ class WebkitSessionResourceLog(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -57,7 +58,7 @@ class WebkitSessionResourceLog(IOSExtraction):
if not self.indicators:
return
for key, entries in self.results.items():
for _, entries in self.results.items():
for entry in entries:
source_domains = self._extract_domains(entry["redirect_source"])
destination_domains = self._extract_domains(entry["redirect_destination"])
@@ -92,7 +93,8 @@ class WebkitSessionResourceLog(IOSExtraction):
redirect_path += ", ".join(destination_domains)
self.log.warning("Found HTTP redirect between suspicious domains: %s", redirect_path)
self.log.warning("Found HTTP redirect between suspicious "
"domains: %s", redirect_path)
def _extract_browsing_stats(self, log_path):
items = []
@@ -113,8 +115,8 @@ class WebkitSessionResourceLog(IOSExtraction):
"subframe_under_origin": item.get("subframeUnderTopFrameOrigins", ""),
"subresource_under_origin": item.get("subresourceUnderTopFrameOrigins", ""),
"user_interaction": item.get("hadUserInteraction"),
"most_recent_interaction": convert_timestamp_to_iso(item["mostRecentUserInteraction"]),
"last_seen": convert_timestamp_to_iso(item["lastSeen"]),
"most_recent_interaction": convert_datetime_to_iso(item["mostRecentUserInteraction"]),
"last_seen": convert_datetime_to_iso(item["lastSeen"]),
})
return items
@@ -125,13 +127,16 @@ class WebkitSessionResourceLog(IOSExtraction):
log_path = self._get_backup_file_from_id(log_file["file_id"])
if not log_path:
continue
self.log.info("Found Safari browsing session resource log at path: %s", log_path)
self.log.info("Found Safari browsing session resource log at "
"path: %s", log_path)
self.results[log_path] = self._extract_browsing_stats(log_path)
elif self.is_fs_dump:
for log_path in self._get_fs_files_from_patterns(WEBKIT_SESSION_RESOURCE_LOG_ROOT_PATHS):
self.log.info("Found Safari browsing session resource log at path: %s", log_path)
self.log.info("Found Safari browsing session resource log at "
"path: %s", log_path)
key = os.path.relpath(log_path, self.target_path)
self.results[key] = self._extract_browsing_stats(log_path)
self.log.info("Extracted records from %d Safari browsing session resource logs",
len(self.results))
self.log.info("Extracted records from %d Safari browsing session "
"resource logs", len(self.results))

View File

@@ -5,14 +5,12 @@
import logging
import sqlite3
from typing import Union
from mvt.common.utils import (check_for_links, convert_mactime_to_unix,
convert_timestamp_to_iso)
from mvt.common.utils import check_for_links, convert_mactime_to_iso
from ..base import IOSExtraction
log = logging.getLogger(__name__)
WHATSAPP_BACKUP_IDS = [
"7c7fba66680ef796b916b067077cc246adacf01d",
]
@@ -26,12 +24,13 @@ class Whatsapp(IOSExtraction):
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record: dict) -> None:
def serialize(self, record: dict) -> Union[dict, list]:
text = record.get("ZTEXT", "").replace("\n", "\\n")
links_text = ""
if record["links"]:
@@ -62,7 +61,8 @@ class Whatsapp(IOSExtraction):
conn = sqlite3.connect(self.file_path)
cur = conn.cursor()
# Query all messages and join tables which can contain media attachments and links
# Query all messages and join tables which can contain media attachments
# and links.
cur.execute("""
SELECT
ZWAMESSAGE.*,
@@ -84,13 +84,15 @@ class Whatsapp(IOSExtraction):
for index, value in enumerate(message_row):
message[names[index]] = value
message["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(message.get("ZMESSAGEDATE")))
message["isodate"] = convert_mactime_to_iso(message.get("ZMESSAGEDATE"))
message["ZTEXT"] = message["ZTEXT"] if message["ZTEXT"] else ""
# Extract links from the WhatsApp message. URLs can be stored in multiple fields/columns.
# Extract links from the WhatsApp message. URLs can be stored in
# multiple fields/columns.
# Check each of them!
message_links = []
fields_with_links = ["ZTEXT", "ZMATCHEDTEXT", "ZMEDIAURL", "ZCONTENT1", "ZCONTENT2"]
fields_with_links = ["ZTEXT", "ZMATCHEDTEXT", "ZMEDIAURL",
"ZCONTENT1", "ZCONTENT2"]
for field in fields_with_links:
if message.get(field):
message_links.extend(check_for_links(message.get(field, "")))
@@ -98,10 +100,12 @@ class Whatsapp(IOSExtraction):
# Remove WhatsApp internal media URLs.
filtered_links = []
for link in message_links:
if not (link.startswith("https://mmg-fna.whatsapp.net/") or link.startswith("https://mmg.whatsapp.net/")):
if not (link.startswith("https://mmg-fna.whatsapp.net/")
or link.startswith("https://mmg.whatsapp.net/")):
filtered_links.append(link)
# If we find messages with links, or if there's an empty message we add it to the results list.
# If we find messages with links, or if there's an empty message
# we add it to the results list.
if filtered_links or (message.get("ZTEXT") or "").strip() == "":
message["links"] = list(set(filtered_links))
self.results.append(message)
@@ -109,4 +113,5 @@ class Whatsapp(IOSExtraction):
cur.close()
conn.close()
self.log.info("Extracted a total of %d WhatsApp messages containing links", len(self.results))
self.log.info("Extracted a total of %d WhatsApp messages containing "
"links", len(self.results))

View File

@@ -7,18 +7,21 @@ import logging
import operator
import sqlite3
from pathlib import Path
from typing import Union
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from mvt.common.utils import convert_mactime_to_iso
from .base import IOSExtraction
class NetBase(IOSExtraction):
"""This class provides a base for DataUsage and NetUsage extraction modules."""
"""This class provides a base for DataUsage and NetUsage extraction
modules."""
def __init__(self, file_path: str = None, target_path: str = None,
results_path: str = None, fast_mode: bool = False,
log: logging.Logger = None, results: list = []) -> None:
log: logging.Logger = logging.getLogger(__name__),
results: list = []) -> None:
super().__init__(file_path=file_path, target_path=target_path,
results_path=results_path, fast_mode=fast_mode,
log=log, results=results)
@@ -43,20 +46,23 @@ class NetBase(IOSExtraction):
FROM ZLIVEUSAGE
LEFT JOIN ZPROCESS ON ZLIVEUSAGE.ZHASPROCESS = ZPROCESS.Z_PK
UNION
SELECT ZFIRSTTIMESTAMP, ZTIMESTAMP, ZPROCNAME, ZBUNDLENAME, Z_PK, NULL, NULL, NULL, NULL, NULL, NULL, NULL FROM ZPROCESS WHERE Z_PK NOT IN (SELECT ZHASPROCESS FROM ZLIVEUSAGE);
SELECT ZFIRSTTIMESTAMP, ZTIMESTAMP, ZPROCNAME, ZBUNDLENAME, Z_PK,
NULL, NULL, NULL, NULL, NULL, NULL, NULL
FROM ZPROCESS WHERE Z_PK NOT IN
(SELECT ZHASPROCESS FROM ZLIVEUSAGE);
""")
for row in cur:
# ZPROCESS records can be missing after the JOIN. Handle NULL timestamps.
if row[0] and row[1]:
first_isodate = convert_timestamp_to_iso(convert_mactime_to_unix(row[0]))
isodate = convert_timestamp_to_iso(convert_mactime_to_unix(row[1]))
first_isodate = convert_mactime_to_iso(row[0])
isodate = convert_mactime_to_iso(row[1])
else:
first_isodate = row[0]
isodate = row[1]
if row[11]:
live_timestamp = convert_timestamp_to_iso(convert_mactime_to_unix(row[11]))
live_timestamp = convert_mactime_to_iso(row[11])
else:
live_timestamp = ""
@@ -80,10 +86,14 @@ class NetBase(IOSExtraction):
self.log.info("Extracted information on %d processes", len(self.results))
def serialize(self, record: dict) -> None:
record_data = f"{record['proc_name']} (Bundle ID: {record['bundle_id']}, ID: {record['proc_id']})"
record_data_usage = record_data + f" WIFI IN: {record['wifi_in']}, WIFI OUT: {record['wifi_out']} - " \
f"WWAN IN: {record['wwan_in']}, WWAN OUT: {record['wwan_out']}"
def serialize(self, record: dict) -> Union[dict, list]:
record_data = (f"{record['proc_name']} (Bundle ID: {record['bundle_id']},"
f" ID: {record['proc_id']})")
record_data_usage = (record_data + " "
f"WIFI IN: {record['wifi_in']}, "
f"WIFI OUT: {record['wifi_out']} - "
f"WWAN IN: {record['wwan_in']}, "
f"WWAN OUT: {record['wwan_out']}")
records = [{
"timestamp": record["live_isodate"],
@@ -92,8 +102,11 @@ class NetBase(IOSExtraction):
"data": record_data_usage,
}]
# Only included first_usage and current_usage records when a ZPROCESS entry exists.
if "MANIPULATED" not in record["proc_name"] and "MISSING" not in record["proc_name"] and record["live_proc_id"] is not None:
# Only included first_usage and current_usage records when a
# ZPROCESS entry exists.
if ("MANIPULATED" not in record["proc_name"]
and "MISSING" not in record["proc_name"]
and record["live_proc_id"] is not None):
records.extend([
{
"timestamp": record["first_isodate"],
@@ -120,7 +133,8 @@ class NetBase(IOSExtraction):
# If we are instructed to run fast, we skip this.
if self.fast_mode:
self.log.info("Flag --fast was enabled: skipping extended search for suspicious processes")
self.log.info("Flag --fast was enabled: skipping extended "
"search for suspicious processes")
return
self.log.info("Extended search for suspicious processes ...")
@@ -133,11 +147,12 @@ class NetBase(IOSExtraction):
except PermissionError:
continue
files.append([posix_path.name, posix_path.__str__()])
files.append([posix_path.name, str(posix_path)])
for proc in self.results:
if not proc["bundle_id"]:
self.log.debug("Found process with no Bundle ID with name: %s", proc["proc_name"])
self.log.debug("Found process with no Bundle ID with "
"name: %s", proc["proc_name"])
binary_path = None
for file in files:
@@ -148,15 +163,20 @@ class NetBase(IOSExtraction):
if binary_path:
self.log.debug("Located at %s", binary_path)
else:
msg = f"Could not find the binary associated with the process with name {proc['proc_name']}"
if (proc["proc_name"] is None):
msg = f"Found process entry with empty 'proc_name': {proc['live_proc_id']} at {proc['live_isodate']}"
msg = ("Could not find the binary associated with the "
f"process with name {proc['proc_name']}")
if not proc["proc_name"]:
msg = ("Found process entry with empty 'proc_name': "
f"{proc['live_proc_id']} at {proc['live_isodate']}")
elif len(proc["proc_name"]) == 16:
msg = msg + " (However, the process name might have been truncated in the database)"
msg += (" (However, the process name might have "
"been truncated in the database)")
self.log.warning(msg)
if not proc["live_proc_id"]:
self.log.info(f"Found process entry in ZPROCESS but not in ZLIVEUSAGE: {proc['proc_name']} at {proc['live_isodate']}")
self.log.info("Found process entry in ZPROCESS but not in "
"ZLIVEUSAGE: %s at %s",
proc['proc_name'], proc['live_isodate'])
def check_manipulated(self):
"""Check for missing or manipulate DB entries"""
@@ -169,8 +189,9 @@ class NetBase(IOSExtraction):
# Avoid duplicate warnings for same process.
if result["live_proc_id"] not in missing_process_cache:
missing_process_cache.add(result["live_proc_id"])
self.log.warning("Found manipulated process entry %s. Entry on %s",
result["live_proc_id"], result["live_isodate"])
self.log.warning("Found manipulated process entry %s. "
"Entry on %s", result["live_proc_id"],
result["live_isodate"])
# Set manipulated proc timestamp so it appears in timeline.
result["first_isodate"] = result["isodate"] = result["live_isodate"]
@@ -191,7 +212,8 @@ class NetBase(IOSExtraction):
if proc_id not in all_proc_id:
previous_proc = results_by_proc[last_proc_id]
self.log.info("Missing process %d. Previous process at \"%s\" (%s)",
proc_id, previous_proc["first_isodate"], previous_proc["proc_name"])
proc_id, previous_proc["first_isodate"],
previous_proc["proc_name"])
missing_procs[proc_id] = {
"proc_id": proc_id,
@@ -208,13 +230,14 @@ class NetBase(IOSExtraction):
# Set default DataUsage keys.
result = {key: None for key in self.results[0].keys()}
result["first_isodate"] = result["isodate"] = result["live_isodate"] = proc["prev_proc_first"]
result["proc_name"] = "MISSING [follows {}]".format(proc["prev_proc_name"])
result["proc_name"] = f"MISSING [follows {proc['prev_proc_name']}]"
result["proc_id"] = result["live_proc_id"] = proc["proc_id"]
result["bundle_id"] = None
self.results.append(result)
self.results = sorted(self.results, key=operator.itemgetter("first_isodate"))
self.results = sorted(self.results,
key=operator.itemgetter("first_isodate"))
def check_indicators(self) -> None:
# Check for manipulated process records.

View File

@@ -247,10 +247,12 @@ IPHONE_IOS_VERSIONS = [
def get_device_desc_from_id(identifier: str,
devices_list: list = IPHONE_MODELS) -> str:
for model in IPHONE_MODELS:
for model in devices_list:
if identifier == model["identifier"]:
return model["description"]
return ""
def find_version_by_build(build: str) -> str:
build = build.upper()
@@ -258,6 +260,8 @@ def find_version_by_build(build: str) -> str:
if build == version["build"]:
return version["version"]
return ""
def latest_ios_version() -> str:
return IPHONE_IOS_VERSIONS[-1]

View File

@@ -1,20 +0,0 @@
[
{
"name": "NSO Group Pegasus Indicators of Compromise",
"source": "Amnesty International",
"reference": "https://www.amnesty.org/en/latest/research/2021/07/forensic-methodology-report-how-to-catch-nso-groups-pegasus/",
"stix2_url": "https://raw.githubusercontent.com/AmnestyTech/investigations/master/2021-07-18_nso/pegasus.stix2"
},
{
"name": "Cytrox Predator Spyware Indicators of Compromise",
"source": "Meta, Amnesty International, Citizen Lab",
"reference": "https://citizenlab.ca/2021/12/pegasus-vs-predator-dissidents-doubly-infected-iphone-reveals-cytrox-mercenary-spyware/",
"stix2_url": "https://raw.githubusercontent.com/AmnestyTech/investigations/master/2021-12-16_cytrox/cytrox.stix2"
},
{
"name": "RCS Lab Spyware Indicators of Compromise",
"source": "Google, Lookout",
"reference": "https://blog.google/threat-analysis-group/italian-spyware-vendor-targets-users-in-italy-and-kazakhstan/",
"stix2_url": "https://raw.githubusercontent.com/mvt-project/mvt-indicators/main/2022-06-23_rcs_lab/rcs.stix2"
}
]

View File

@@ -43,7 +43,7 @@ console_scripts =
mvt-android = mvt.android:cli
[flake8]
max-complexit = 10
max-complexity = 10
max-line-length = 1000
ignore =
C901,
@@ -52,3 +52,34 @@ ignore =
E127,
W503,
E226
[pylint]
score = no
reports = no
output-format = colorized
max-locals = 25
max-args = 10
good-names = i,m
min-similarity-lines = 10
ignore-comments = yes
ignore-docstrings = yes
ignore-imports = yes
ignored-argument-names=args|kwargs
# https://pylint.pycqa.org/en/stable/technical_reference/features.html
disable =
too-many-instance-attributes,
broad-except,
abstract-method,
dangerous-default-value,
too-few-public-methods,
missing-docstring,
missing-module-docstring,
missing-class-docstring,
missing-function-docstring,
#duplicate-code,
#line-too-long,

View File

@@ -4,7 +4,6 @@
# https://license.mvt.re/1.1/
import io
import logging
import os
import tarfile
@@ -19,7 +18,7 @@ class TestBackupModule:
def test_module_folder(self):
backup_path = get_android_backup_folder()
mod = SMS(target_path=backup_path, log=logging)
mod = SMS(target_path=backup_path)
files = []
for root, subdirs, subfiles in os.walk(os.path.abspath(backup_path)):
for fname in subfiles:
@@ -32,7 +31,7 @@ class TestBackupModule:
def test_module_file(self):
fpath = os.path.join(get_android_backup_folder(), "backup.ab")
mod = SMS(target_path=fpath, log=logging)
mod = SMS(target_path=fpath)
with open(fpath, "rb") as f:
data = f.read()
tardata = parse_backup_file(data)
@@ -48,7 +47,7 @@ class TestBackupModule:
def test_module_file2(self):
fpath = os.path.join(get_android_backup_folder(), "backup2.ab")
mod = SMS(target_path=fpath, log=logging)
mod = SMS(target_path=fpath)
with open(fpath, "rb") as f:
data = f.read()
tardata = parse_backup_file(data, password="123456")
@@ -64,7 +63,7 @@ class TestBackupModule:
def test_module_file3(self):
fpath = os.path.join(get_android_backup_folder(), "backup3.ab")
mod = SMS(target_path=fpath, log=logging)
mod = SMS(target_path=fpath)
with open(fpath, "rb") as f:
data = f.read()
tardata = parse_backup_file(data)

View File

@@ -52,6 +52,7 @@ class TestBackupParsing:
m.update(ddata)
assert m.hexdigest() == "33e73df2ede9798dcb3a85c06200ee41c8f52dd2f2e50ffafcceb0407bc13e3a"
sms = parse_tar_for_sms(ddata)
print(sms)
assert isinstance(sms, list)
assert len(sms) == 1
assert len(sms[0]["links"]) == 1

Some files were not shown because too many files have changed in this diff Show More