mirror of
https://github.com/mvt-project/mvt.git
synced 2026-02-14 17:42:46 +00:00
Compare commits
132 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8f88f872df | ||
|
|
2d16218489 | ||
|
|
3215e797ec | ||
|
|
e65a598903 | ||
|
|
e80c02451c | ||
|
|
5df50f864c | ||
|
|
45b31bb718 | ||
|
|
e10f1767e6 | ||
|
|
d64277c0bf | ||
|
|
3f3261511a | ||
|
|
4cfe75e2d4 | ||
|
|
cdd90332f7 | ||
|
|
d9b29b3739 | ||
|
|
79bb7d1d4b | ||
|
|
a653cb3cfc | ||
|
|
b25cc48be0 | ||
|
|
40bd9ddc1d | ||
|
|
deb95297da | ||
|
|
02014b414b | ||
|
|
7dd5fe7831 | ||
|
|
11d1a3dcee | ||
|
|
74f9db2bf2 | ||
|
|
356bddc3af | ||
|
|
512f40dcb4 | ||
|
|
b3a464ba58 | ||
|
|
529df85f0f | ||
|
|
19a6da8fe7 | ||
|
|
34c997f923 | ||
|
|
02bf903411 | ||
|
|
7019375767 | ||
|
|
34dd27c5d2 | ||
|
|
a4d6a08a8b | ||
|
|
635d3a392d | ||
|
|
2d78bddbba | ||
|
|
c1938d2ead | ||
|
|
104b01e5cd | ||
|
|
7087e8adb2 | ||
|
|
67608ac02b | ||
|
|
6d8de5b461 | ||
|
|
b0177d6104 | ||
|
|
e0c9a44b10 | ||
|
|
ef8c1ae895 | ||
|
|
3165801e2b | ||
|
|
1aa371a398 | ||
|
|
f8e380baa1 | ||
|
|
35559b09a8 | ||
|
|
daf5c1f3de | ||
|
|
f601db2174 | ||
|
|
3ce9641c23 | ||
|
|
9be393e3f6 | ||
|
|
5f125974b8 | ||
|
|
aa0f152ba1 | ||
|
|
169f5fbc26 | ||
|
|
5ea3460c09 | ||
|
|
c38df37967 | ||
|
|
7f29b522fa | ||
|
|
40b0da9885 | ||
|
|
94a8d9dd91 | ||
|
|
963d3db51a | ||
|
|
660e208473 | ||
|
|
01e68ccc6a | ||
|
|
fba0fa1f2c | ||
|
|
1cbf55e50e | ||
|
|
8fcc79ebfa | ||
|
|
423462395a | ||
|
|
1f08572a6a | ||
|
|
94e3c0ce7b | ||
|
|
904daad935 | ||
|
|
eb2a8b8b41 | ||
|
|
60a17381a2 | ||
|
|
ef2bb93dc4 | ||
|
|
f68b7e7089 | ||
|
|
a22241ec32 | ||
|
|
8ad1bc7a2b | ||
|
|
c6b3509ed4 | ||
|
|
75b5b296a5 | ||
|
|
2d62e31eaa | ||
|
|
1bfc683e4b | ||
|
|
7ab09669b5 | ||
|
|
757bd8618e | ||
|
|
f1d039346d | ||
|
|
ccdfd92d4a | ||
|
|
032b229eb8 | ||
|
|
93936976c7 | ||
|
|
f3a4e9d108 | ||
|
|
93a9735b5e | ||
|
|
7b0e2d4564 | ||
|
|
725a99bcd5 | ||
|
|
35a6f6ec9a | ||
|
|
f4ba29f1ef | ||
|
|
3f9809f36c | ||
|
|
6da6595108 | ||
|
|
35dfeaccee | ||
|
|
e5f2aa3c3d | ||
|
|
3236c1b390 | ||
|
|
80a670273d | ||
|
|
969b5cc506 | ||
|
|
ef8622d4c3 | ||
|
|
e39e9e6f92 | ||
|
|
7b32ed3179 | ||
|
|
315317863e | ||
|
|
08d35b056a | ||
|
|
3e679312d1 | ||
|
|
be4f1afed6 | ||
|
|
0dea25d86e | ||
|
|
505d3c7e60 | ||
|
|
8f04c09b75 | ||
|
|
595b7e2066 | ||
|
|
d3941bb5d3 | ||
|
|
194c8a0ac1 | ||
|
|
bef190fe50 | ||
|
|
cacf027051 | ||
|
|
da97f5ca30 | ||
|
|
a774577940 | ||
|
|
7252cc82a7 | ||
|
|
b34d80fd11 | ||
|
|
0347dfa3c9 | ||
|
|
28647b8493 | ||
|
|
c2ec26fd75 | ||
|
|
856a6fb895 | ||
|
|
62f3c535df | ||
|
|
34c64af815 | ||
|
|
ea4da71277 | ||
|
|
94fe3c90e0 | ||
|
|
f78332aa71 | ||
|
|
0c4eb0bb34 | ||
|
|
e70054d0c2 | ||
|
|
a75cf58f72 | ||
|
|
c859b43220 | ||
|
|
75ee2db02e | ||
|
|
f6efb3c89a | ||
|
|
b27047ed27 |
@@ -38,12 +38,15 @@ RUN apt update \
|
||||
# Build libimobiledevice
|
||||
# ----------------------
|
||||
RUN git clone https://github.com/libimobiledevice/libplist \
|
||||
&& git clone https://github.com/libimobiledevice/libimobiledevice-glue \
|
||||
&& git clone https://github.com/libimobiledevice/libusbmuxd \
|
||||
&& git clone https://github.com/libimobiledevice/libimobiledevice \
|
||||
&& git clone https://github.com/libimobiledevice/usbmuxd \
|
||||
|
||||
&& cd libplist && ./autogen.sh && make && make install && ldconfig \
|
||||
|
||||
&& cd ../libimobiledevice-glue && PKG_CONFIG_PATH=/usr/local/lib/pkgconfig ./autogen.sh --prefix=/usr && make && make install && ldconfig \
|
||||
|
||||
&& cd ../libusbmuxd && PKG_CONFIG_PATH=/usr/local/lib/pkgconfig ./autogen.sh && make && make install && ldconfig \
|
||||
|
||||
&& cd ../libimobiledevice && PKG_CONFIG_PATH=/usr/local/lib/pkgconfig ./autogen.sh --enable-debug && make && make install && ldconfig \
|
||||
@@ -51,7 +54,7 @@ RUN git clone https://github.com/libimobiledevice/libplist \
|
||||
&& cd ../usbmuxd && PKG_CONFIG_PATH=/usr/local/lib/pkgconfig ./autogen.sh --prefix=/usr --sysconfdir=/etc --localstatedir=/var --runstatedir=/run && make && make install \
|
||||
|
||||
# Clean up.
|
||||
&& cd .. && rm -rf libplist libusbmuxd libimobiledevice usbmuxd
|
||||
&& cd .. && rm -rf libplist libimobiledevice-glue libusbmuxd libimobiledevice usbmuxd
|
||||
|
||||
# Installing MVT
|
||||
# --------------
|
||||
|
||||
@@ -15,15 +15,15 @@ It has been developed and released by the [Amnesty International Security Lab](h
|
||||
|
||||
## Installation
|
||||
|
||||
MVT can be installed from sources or from [PyPi](https://pypi.org/project/mvt/) (you will need some dependencies, check the [documentation](https://docs.mvt.re/en/latest/install.html)):
|
||||
MVT can be installed from sources or from [PyPi](https://pypi.org/project/mvt/) (you will need some dependencies, check the [documentation](https://docs.mvt.re/en/latest/install/)):
|
||||
|
||||
```
|
||||
pip3 install mvt
|
||||
```
|
||||
|
||||
Alternatively, you can decide to run MVT and all relevant tools through a [Docker container](https://docs.mvt.re/en/latest/docker.html).
|
||||
Alternatively, you can decide to run MVT and all relevant tools through a [Docker container](https://docs.mvt.re/en/latest/docker/).
|
||||
|
||||
**Please note:** MVT is best run on Linux or Mac systems. [It does not currently support running natively on Windows.](https://docs.mvt.re/en/latest/install.html#mvt-on-windows)
|
||||
**Please note:** MVT is best run on Linux or Mac systems. [It does not currently support running natively on Windows.](https://docs.mvt.re/en/latest/install/#mvt-on-windows)
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -31,4 +31,4 @@ MVT provides two commands `mvt-ios` and `mvt-android`. [Check out the documentat
|
||||
|
||||
## License
|
||||
|
||||
The purpose of MVT is to facilitate the ***consensual forensic analysis*** of devices of those who might be targets of sophisticated mobile spyware attacks, especially members of civil society and marginalized communities. We do not want MVT to enable privacy violations of non-consenting individuals. In order to achieve this, MVT is released under its own license. [Read more here.](https://docs.mvt.re/en/latest/license.html)
|
||||
The purpose of MVT is to facilitate the ***consensual forensic analysis*** of devices of those who might be targets of sophisticated mobile spyware attacks, especially members of civil society and marginalized communities. We do not want MVT to enable privacy violations of non-consenting individuals. In order to achieve this, MVT is released under its own license. [Read more here.](https://docs.mvt.re/en/latest/license/)
|
||||
|
||||
@@ -22,7 +22,7 @@ adb backup -all
|
||||
|
||||
## Unpack the backup
|
||||
|
||||
In order to reliable unpack th [Android Backup Extractor (ABE)](https://github.com/nelenkov/android-backup-extractor) to convert it to a readable file format. Make sure that java is installed on your system and use the following command:
|
||||
In order to unpack the backup, use [Android Backup Extractor (ABE)](https://github.com/nelenkov/android-backup-extractor) to convert it to a readable file format. Make sure that java is installed on your system and use the following command:
|
||||
|
||||
```bash
|
||||
java -jar ~/path/to/abe.jar unpack backup.ab backup.tar
|
||||
@@ -31,6 +31,8 @@ tar xvf backup.tar
|
||||
|
||||
If the backup is encrypted, ABE will prompt you to enter the password.
|
||||
|
||||
Alternatively, [ab-decrypt](https://github.com/joernheissler/ab-decrypt) can be used for that purpose.
|
||||
|
||||
## Check the backup
|
||||
|
||||
You can then extract SMSs containing links with MVT:
|
||||
|
||||
@@ -8,7 +8,7 @@ However, not all is lost.
|
||||
|
||||
Because malware attacks over Android typically take the form of malicious or backdoored apps, the very first thing you might want to do is to extract and verify all installed Android packages and triage quickly if there are any which stand out as malicious or which might be atypical.
|
||||
|
||||
While it is out of the scope of this documentation to dwell into details on how to analyze Android apps, MVT does allow to easily and automatically extract information about installed apps, download copies of them, and quickly lookup services such as [VirusTotal](https://www.virustotal.com) or [Koodous](https://www.koodous.com) which might quickly indicate known bad apps.
|
||||
While it is out of the scope of this documentation to dwell into details on how to analyze Android apps, MVT does allow to easily and automatically extract information about installed apps, download copies of them, and quickly lookup services such as [VirusTotal](https://www.virustotal.com) or [Koodous](https://koodous.com) which might quickly indicate known bad apps.
|
||||
|
||||
|
||||
## Check the device over Android Debug Bridge
|
||||
|
||||
@@ -28,9 +28,17 @@ The `--iocs` option can be invoked multiple times to let MVT import multiple STI
|
||||
mvt-ios check-backup --iocs ~/iocs/malware1.stix --iocs ~/iocs/malware2.stix2 /path/to/backup
|
||||
```
|
||||
|
||||
It is also possible to load STIX2 files automatically from the environment variable `MVT_STIX2`:
|
||||
|
||||
```bash
|
||||
export MVT_STIX2="/home/user/IOC1.stix2:/home/user/IOC2.stix2"
|
||||
```
|
||||
|
||||
## Known repositories of STIX2 IOCs
|
||||
|
||||
- The [Amnesty International investigations repository](https://github.com/AmnestyTech/investigations) contains STIX-formatted IOCs for:
|
||||
- [Pegasus](https://en.wikipedia.org/wiki/Pegasus_(spyware)) ([STIX2](https://raw.githubusercontent.com/AmnestyTech/investigations/master/2021-07-18_nso/pegasus.stix2))
|
||||
- [Predator from Cytrox](https://citizenlab.ca/2021/12/pegasus-vs-predator-dissidents-doubly-infected-iphone-reveals-cytrox-mercenary-spyware/) ([STIX2](https://github.com/AmnestyTech/investigations/tree/master/2021-12-16_cytrox/cytrox.stix2))
|
||||
- [This repository](https://github.com/Te-k/stalkerware-indicators) contains IOCs for Android stalkerware including [a STIX MVT-compatible file](https://github.com/Te-k/stalkerware-indicators/blob/master/stalkerware.stix2).
|
||||
|
||||
Please [open an issue](https://github.com/mvt-project/mvt/issues/) to suggest new sources of STIX-formatted IOCs.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Install libimobiledevice
|
||||
|
||||
Before proceeding with doing any acquisition of iOS devices we recommend installing [libimobiledevice](https://www.libimobiledevice.org/) utilities. These utilities will become useful when extracting crash logs and generating iTunes backups. Because the utilities and its libraries are subject to frequent changes in response to new versions of iOS, you might want to consider compiling libimobiledevice utilities from sources. Otherwise, if available, you can try installing packages available in your distribution:
|
||||
Before proceeding with doing any acquisition of iOS devices we recommend installing [libimobiledevice](https://libimobiledevice.org/) utilities. These utilities will become useful when extracting crash logs and generating iTunes backups. Because the utilities and its libraries are subject to frequent changes in response to new versions of iOS, you might want to consider compiling libimobiledevice utilities from sources. Otherwise, if available, you can try installing packages available in your distribution:
|
||||
|
||||
```bash
|
||||
sudo apt install libimobiledevice-utils
|
||||
|
||||
@@ -4,10 +4,22 @@ In this page you can find a (reasonably) up-to-date breakdown of the files creat
|
||||
|
||||
## Records extracted by `check-fs` or `check-backup`
|
||||
|
||||
### `analytics.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup (if encrypted): :material-close:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Analytics` module. The module extracts records from the plists inside the SQLite databases located at *private/var/Keychains/Analytics/\*.db*, which contain various analytics information regarding networking, certificate-pinning, TLS, etc. failures.
|
||||
|
||||
If indicators are provided through the command-line, processes and domains are checked against all fields of the plist. Any matches are stored in *analytics_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `backup_info.json`
|
||||
|
||||
!!! info "Availabiliy"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-close:
|
||||
|
||||
This JSON file is created by mvt-ios' `BackupInfo` module. The module extracts some details about the backup and the device, such as name, phone number, IMEI, product type and version.
|
||||
@@ -17,7 +29,7 @@ This JSON file is created by mvt-ios' `BackupInfo` module. The module extracts s
|
||||
### `cache_files.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-close:
|
||||
Backup: :material-close:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `CacheFiles` module. The module extracts records from all SQLite database files stored on disk with the name *Cache.db*. These databases typically contain data from iOS' [internal URL caching](https://developer.apple.com/documentation/foundation/nsurlcache). Through this module you might be able to recover records of HTTP requests and responses performed my applications as well as system services, that would otherwise be unavailable. For example, you might see HTTP requests part of an exploitation chain performed by an iOS service attempting to download a first stage malicious payload.
|
||||
@@ -29,7 +41,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
### `calls.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup (if encrypted): :material-check:
|
||||
Backup (if encrypted): :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Calls` module. The module extracts records from a SQLite database located at */private/var/mobile/Library/CallHistoryDB/CallHistory.storedata*, which contains records of incoming and outgoing calls, including from messaging apps such as WhatsApp or Skype.
|
||||
@@ -39,7 +51,7 @@ This JSON file is created by mvt-ios' `Calls` module. The module extracts record
|
||||
### `chrome_favicon.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `ChromeFavicon` module. The module extracts records from a SQLite database located at */private/var/mobile/Containers/Data/Application/\*/Library/Application Support/Google/Chrome/Default/Favicons*, which contains a mapping of favicons' URLs and the visited URLs which loaded them.
|
||||
@@ -51,29 +63,31 @@ If indicators are provided through the command-line, they are checked against bo
|
||||
### `chrome_history.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `ChromeHistory` module. The module extracts records from a SQLite database located at */private/var/mobile/Containers/Data/Application/\*/Library/Application Support/Google/Chrome/Default/History*, which contains a history of URL visits.
|
||||
|
||||
If indicators a provided through the command-line, they are checked against the visited URL. Any matches are stored in *chrome_history_detected.json*.
|
||||
If indicators are provided through the command-line, they are checked against the visited URL. Any matches are stored in *chrome_history_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `configuration_profiles.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-close:
|
||||
|
||||
This JSON file is created by mvt-ios' `ConfigurationProfiles` module. The module extracts details about iOS configuration profiles that have been installed on the device. These should include both default iOS as well as third-party profiles.
|
||||
|
||||
If indicators are provided through the command-line, they are checked against the configuration profile UUID to identify any known malicious profiles. Any matches are stored in *configuration_profiles_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `contacts.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Contacts` module. The module extracts records from a SQLite database located at */private/var/mobile/Library/AddressBook/AddressBook.sqlitedb*, which contains records from the phone's address book. While this database obviously would not contain any malicious indicators per se, you might want to use it to compare records from other apps (such as iMessage, SMS, etc.) to filter those originating from unknown origins.
|
||||
@@ -83,7 +97,7 @@ This JSON file is created by mvt-ios' `Contacts` module. The module extracts rec
|
||||
### `firefox_favicon.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `FirefoxFavicon` module. The module extracts records from a SQLite database located at */private/var/mobile/profile.profile/browser.db*, which contains a mapping of favicons' URLs and the visited URLs which loaded them.
|
||||
@@ -95,19 +109,19 @@ If indicators are provided through the command-line, they are checked against bo
|
||||
### `firefox_history.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `FirefoxHistory` module. The module extracts records from a SQLite database located at */private/var/mobile/profile.profile/browser.db*, which contains a history of URL visits.
|
||||
|
||||
If indicators a provided through the command-line, they are checked against the visited URL. Any matches are stored in *firefox_history_detected.json*.
|
||||
If indicators are provided through the command-line, they are checked against the visited URL. Any matches are stored in *firefox_history_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `id_status_cache.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup (before iOS 14.7): :material-check:
|
||||
Backup (before iOS 14.7): :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `IDStatusCache` module. The module extracts records from a plist file located at */private/var/mobile/Library/Preferences/com.apple.identityservices.idstatuscache.plist*, which contains a cache of Apple user ID authentication. This chance will indicate when apps like Facetime and iMessage first established contacts with other registered Apple IDs. This is significant because it might contain traces of malicious accounts involved in exploitation of those apps.
|
||||
@@ -116,10 +130,20 @@ Starting from iOS 14.7.0, this file is empty or absent.
|
||||
|
||||
---
|
||||
|
||||
### `shortcuts.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Shortcuts` module. The module extracts records from an SQLite database located at */private/var/mobile/Library/Shortcuts/Shortcuts.sqlite*, which contains records about the Shortcuts application. Shortcuts are a built-in iOS feature which allows users to automation certain actions on their device. In some cases the legitimate Shortcuts app may be abused by spyware to maintain persistence on an infected devices.
|
||||
|
||||
---
|
||||
|
||||
### `interaction_c.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup (if encrypted): :material-check:
|
||||
Backup (if encrypted): :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `InteractionC` module. The module extracts records from a SQLite database located at */private/var/mobile/Library/CoreDuet/People/interactionC.db*, which contains details about user interactions with installed apps.
|
||||
@@ -129,7 +153,7 @@ This JSON file is created by mvt-ios' `InteractionC` module. The module extracts
|
||||
### `locationd_clients.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `LocationdClients` module. The module extracts records from a plist file located at */private/var/mobile/Library/Caches/locationd/clients.plist*, which contains a cache of apps which requested access to location services.
|
||||
@@ -139,7 +163,7 @@ This JSON file is created by mvt-ios' `LocationdClients` module. The module extr
|
||||
### `manifest.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-close:
|
||||
|
||||
This JSON file is created by mvt-ios' `Manifest` module. The module extracts records from the SQLite database *Manifest.db* contained in iTunes backups, and which indexes the locally backed-up files to the original paths on the iOS device.
|
||||
@@ -148,10 +172,22 @@ If indicators are provided through the command-line, they are checked against th
|
||||
|
||||
---
|
||||
|
||||
### `os_analytics_ad_daily.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `OSAnalyticsADDaily` module. The module extracts records from a plist located *private/var/mobile/Library/Preferences/com.apple.osanalytics.addaily.plist*, which contains a history of data usage by processes running on the system. Besides the network statistics, these records are particularly important because they might show traces of malicious process executions and the relevant timeframe.
|
||||
|
||||
If indicators are provided through the command-line, they are checked against the process names. Any matches are stored in *os_analytics_ad_daily_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `datausage.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Datausage` module. The module extracts records from a SQLite database located */private/var/wireless/Library/Databases/DataUsage.sqlite*, which contains a history of data usage by processes running on the system. Besides the network statistics, these records are particularly important because they might show traces of malicious process executions and the relevant timeframe. In particular, processes which do not have a valid bundle ID might require particular attention.
|
||||
@@ -163,7 +199,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
### `netusage.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-close:
|
||||
Backup: :material-close:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Netusage` module. The module extracts records from a SQLite database located */private/var/networkd/netusage.sqlite*, which contains a history of data usage by processes running on the system. Besides the network statistics, these records are particularly important because they might show traces of malicious process executions and the relevant timeframe. In particular, processes which do not have a valid bundle ID might require particular attention.
|
||||
@@ -175,7 +211,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
### `profile_events.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-close:
|
||||
|
||||
This JSON file is created by mvt-ios' `ProfileEvents` module. The module extracts a timeline of configuration profile operations. For example, it should indicate when a new profile was installed from the Settings app, or when one was removed.
|
||||
@@ -185,19 +221,19 @@ This JSON file is created by mvt-ios' `ProfileEvents` module. The module extract
|
||||
### `safari_browser_state.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup (if encrypted): :material-check:
|
||||
Backup (if encrypted): :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `SafariBrowserState` module. The module extracts records from the SQLite databases located at */private/var/mobile/Library/Safari/BrowserState.db* or */private/var/mobile/Containers/Data/Application/\*/Library/Safari/BrowserState.db*, which contain records of opened tabs.
|
||||
|
||||
If indicators a provided through the command-line, they are checked against the visited URL. Any matches are stored in *safari_browser_state_detected.json*.
|
||||
If indicators are provided through the command-line, they are checked against the visited URL. Any matches are stored in *safari_browser_state_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `safari_favicon.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-close:
|
||||
Backup: :material-close:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `SafariFavicon` module. The module extracts records from the SQLite databases located at */private/var/mobile/Library/Image Cache/Favicons/Favicons.db* or */private/var/mobile/Containers/Data/Application/\*/Library/Image Cache/Favicons/Favicons.db*, which contain mappings of favicons' URLs and the visited URLs which loaded them.
|
||||
@@ -209,7 +245,7 @@ If indicators are provided through the command-line, they are checked against bo
|
||||
### `safari_history.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup (if encrypted): :material-check:
|
||||
Backup (if encrypted): :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `SafariHistory` module. The module extracts records from the SQLite databases located at */private/var/mobile/Library/Safari/History.db* or */private/var/mobile/Containers/Data/Application/\*/Library/Safari/History.db*, which contain a history of URL visits.
|
||||
@@ -218,10 +254,22 @@ If indicators are provided through the command-line, they are checked against th
|
||||
|
||||
---
|
||||
|
||||
### `shutdown_log.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup (if encrypted): :material-close:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `ShutdownLog` module. The module extracts records from the shutdown log located at *private/var/db/diagnostics/shutdown.log*. When shutting down an iPhone, a SIGTERM will be sent to all processes runnning. The `shutdown.log` file will log any process (with its pid and path) that did not shut down after the SIGTERM was sent.
|
||||
|
||||
If indicators are provided through the command-line, they are checked against the paths. Any matches are stored in *shutdown_log_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `sms.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `SMS` module. The module extracts a list of SMS messages containing HTTP links from the SQLite database located at */private/var/mobile/Library/SMS/sms.db*.
|
||||
@@ -233,17 +281,27 @@ If indicators are provided through the command-line, they are checked against th
|
||||
### `sms_attachments.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `SMSAttachments` module. The module extracts details about attachments sent via SMS or iMessage from the same database used by the `SMS` module. These records might be useful to indicate unique patterns that might be indicative of exploitation attempts leveraging potential vulnerabilities in file format parsers or other forms of file handling by the Messages app.
|
||||
|
||||
---
|
||||
|
||||
### `tcc.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `TCC` module. The module extracts records from a SQLite database located at */private/var/mobile/Library/TCC/TCC.db*, which contains a list of which services such as microphone, camera, or location, apps have been granted or denied access to.
|
||||
|
||||
---
|
||||
|
||||
### `version_history.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-close:
|
||||
Backup: :material-close:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `IOSVersionHistory` module. The module extracts records of iOS software updates from analytics plist files located at */private/var/db/analyticsd/Analytics-Journal-\*.ips*.
|
||||
@@ -253,7 +311,7 @@ This JSON file is created by mvt-ios' `IOSVersionHistory` module. The module ext
|
||||
### `webkit_indexeddb.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-close:
|
||||
Backup: :material-close:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `WebkitIndexedDB` module. The module extracts a list of file and folder names located at the following path */private/var/mobile/Containers/Data/Application/\*/Library/WebKit/WebsiteData/IndexedDB*, which contains IndexedDB files created by any app installed on the device.
|
||||
@@ -265,7 +323,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
### `webkit_local_storage.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-close:
|
||||
Backup: :material-close:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `WebkitLocalStorage` module. The module extracts a list of file and folder names located at the following path */private/var/mobile/Containers/Data/Application/\*/Library/WebKit/WebsiteData/LocalStorage/*, which contains local storage files created by any app installed on the device.
|
||||
@@ -277,7 +335,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
### `webkit_resource_load_statistics.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios `WebkitResourceLoadStatistics` module. The module extracts records from available WebKit ResourceLoadStatistics *observations.db* SQLite3 databases. These records should indicate domain names contacted by apps, including a timestamp.
|
||||
@@ -289,7 +347,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
### `webkit_safari_view_service.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-close:
|
||||
Backup: :material-close:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `WebkitSafariViewService` module. The module extracts a list of file and folder names located at the following path */private/var/mobile/Containers/Data/Application/\*/SystemData/com.apple.SafariViewService/Library/WebKit/WebsiteData/*, which contains files cached by SafariVewService.
|
||||
@@ -301,7 +359,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
### `webkit_session_resource_log.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `WebkitSessionResourceLog` module. The module extracts records from plist files with the name *full_browsing_session_resourceLog.plist*, which contain records of resources loaded by different domains visited.
|
||||
@@ -313,7 +371,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
### `whatsapp.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `WhatsApp` module. The module extracts a list of WhatsApp messages containing HTTP links from the SQLite database located at *private/var/mobile/Containers/Shared/AppGroup/\*/ChatStorage.sqlite*.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
mkdocs==1.2.1
|
||||
mkdocs==1.2.3
|
||||
mkdocs-autorefs
|
||||
mkdocs-material
|
||||
mkdocs-material-extensions
|
||||
|
||||
@@ -9,8 +9,11 @@ import os
|
||||
import click
|
||||
from rich.logging import RichHandler
|
||||
|
||||
from mvt.common.help import *
|
||||
from mvt.common.help import HELP_MSG_MODULE, HELP_MSG_IOC
|
||||
from mvt.common.help import HELP_MSG_FAST, HELP_MSG_OUTPUT, HELP_MSG_LIST_MODULES
|
||||
from mvt.common.help import HELP_MSG_SERIAL
|
||||
from mvt.common.indicators import Indicators, IndicatorsFileBadFormat
|
||||
from mvt.common.logo import logo
|
||||
from mvt.common.module import run_module, save_timeline
|
||||
|
||||
from .download_apks import DownloadAPKs
|
||||
@@ -25,11 +28,20 @@ logging.basicConfig(level="INFO", format=LOG_FORMAT, handlers=[
|
||||
RichHandler(show_path=False, log_time_format="%X")])
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Main
|
||||
#==============================================================================
|
||||
@click.group(invoke_without_command=False)
|
||||
def cli():
|
||||
logo()
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: version
|
||||
#==============================================================================
|
||||
@cli.command("version", help="Show the currently installed version of MVT")
|
||||
def version():
|
||||
return
|
||||
|
||||
|
||||
@@ -95,10 +107,11 @@ def download_apks(ctx, all_apks, virustotal, koodous, all_checks, output, from_f
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--output", "-o", type=click.Path(exists=False),
|
||||
help=HELP_MSG_OUTPUT)
|
||||
@click.option("--fast", "-f", is_flag=True, help=HELP_MSG_FAST)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.pass_context
|
||||
def check_adb(ctx, iocs, output, list_modules, module, serial):
|
||||
def check_adb(ctx, iocs, output, fast, list_modules, module, serial):
|
||||
if list_modules:
|
||||
log.info("Following is the list of available check-adb modules:")
|
||||
for adb_module in ADB_MODULES:
|
||||
@@ -130,7 +143,8 @@ def check_adb(ctx, iocs, output, list_modules, module, serial):
|
||||
if module and adb_module.__name__ != module:
|
||||
continue
|
||||
|
||||
m = adb_module(output_folder=output, log=logging.getLogger(adb_module.__module__))
|
||||
m = adb_module(output_folder=output, fast_mode=fast,
|
||||
log=logging.getLogger(adb_module.__module__))
|
||||
if serial:
|
||||
m.serial = serial
|
||||
|
||||
@@ -182,7 +196,7 @@ def check_backup(ctx, iocs, output, backup_path, serial):
|
||||
log.critical("The path you specified is a not a folder!")
|
||||
|
||||
if os.path.basename(backup_path) == "backup.ab":
|
||||
log.info("You can use ABE (https://github.com/nelenkov/android-backup-extractor) " \
|
||||
log.info("You can use ABE (https://github.com/nelenkov/android-backup-extractor) "
|
||||
"to extract 'backup.ab' files!")
|
||||
ctx.exit(1)
|
||||
|
||||
@@ -193,7 +207,7 @@ def check_backup(ctx, iocs, output, backup_path, serial):
|
||||
if serial:
|
||||
m.serial = serial
|
||||
|
||||
if iocs:
|
||||
if len(indicators.ioc_count) > 0:
|
||||
indicators.log = m.log
|
||||
m.indicators = indicators
|
||||
|
||||
|
||||
@@ -7,17 +7,16 @@ import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pkg_resources
|
||||
from tqdm import tqdm
|
||||
|
||||
from mvt.common.module import InsufficientPrivileges
|
||||
from mvt.common.utils import get_sha256_from_file_path
|
||||
|
||||
from .modules.adb.base import AndroidExtraction
|
||||
from .modules.adb.packages import Packages
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# TODO: Would be better to replace tqdm with rich.progress to reduce
|
||||
# the number of dependencies. Need to investigate whether
|
||||
# it's possible to have a similar callback system.
|
||||
@@ -32,7 +31,10 @@ class PullProgress(tqdm):
|
||||
|
||||
class DownloadAPKs(AndroidExtraction):
|
||||
"""DownloadAPKs is the main class operating the download of APKs
|
||||
from the device."""
|
||||
from the device.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, output_folder=None, all_apks=False, log=None,
|
||||
packages=None):
|
||||
@@ -51,7 +53,9 @@ class DownloadAPKs(AndroidExtraction):
|
||||
@classmethod
|
||||
def from_json(cls, json_path):
|
||||
"""Initialize this class from an existing apks.json file.
|
||||
|
||||
:param json_path: Path to the apks.json file to parse.
|
||||
|
||||
"""
|
||||
with open(json_path, "r") as handle:
|
||||
packages = json.load(handle)
|
||||
@@ -59,9 +63,11 @@ class DownloadAPKs(AndroidExtraction):
|
||||
|
||||
def pull_package_file(self, package_name, remote_path):
|
||||
"""Pull files related to specific package from the device.
|
||||
|
||||
:param package_name: Name of the package to download
|
||||
:param remote_path: Path to the file to download
|
||||
:returns: Path to the local copy
|
||||
|
||||
"""
|
||||
log.info("Downloading %s ...", remote_path)
|
||||
|
||||
@@ -101,6 +107,8 @@ class DownloadAPKs(AndroidExtraction):
|
||||
def get_packages(self):
|
||||
"""Use the Packages adb module to retrieve the list of packages.
|
||||
We reuse the same extraction logic to then download the APKs.
|
||||
|
||||
|
||||
"""
|
||||
self.log.info("Retrieving list of installed packages...")
|
||||
|
||||
@@ -111,8 +119,7 @@ class DownloadAPKs(AndroidExtraction):
|
||||
self.packages = m.results
|
||||
|
||||
def pull_packages(self):
|
||||
"""Download all files of all selected packages from the device.
|
||||
"""
|
||||
"""Download all files of all selected packages from the device."""
|
||||
log.info("Starting extraction of installed APKs at folder %s", self.output_folder)
|
||||
|
||||
if not os.path.exists(self.output_folder):
|
||||
@@ -131,7 +138,7 @@ class DownloadAPKs(AndroidExtraction):
|
||||
packages_selection.append(package)
|
||||
|
||||
log.info("Selected only %d packages which are not marked as system",
|
||||
len(packages_selection))
|
||||
len(packages_selection))
|
||||
|
||||
if len(packages_selection) == 0:
|
||||
log.info("No packages were selected for download")
|
||||
@@ -150,50 +157,27 @@ class DownloadAPKs(AndroidExtraction):
|
||||
log.info("[%d/%d] Package: %s", counter, len(packages_selection),
|
||||
package["package_name"])
|
||||
|
||||
# Get the file path for the specific package.
|
||||
try:
|
||||
output = self._adb_command(f"pm path {package['package_name']}")
|
||||
output = output.strip().replace("package:", "")
|
||||
if not output:
|
||||
continue
|
||||
except Exception as e:
|
||||
log.exception("Failed to get path of package %s: %s",
|
||||
package["package_name"], e)
|
||||
self._adb_reconnect()
|
||||
continue
|
||||
|
||||
# Sometimes the package path contains multiple lines for multiple apks.
|
||||
# We loop through each line and download each file.
|
||||
for path in output.split("\n"):
|
||||
device_path = path.strip()
|
||||
file_path = self.pull_package_file(package["package_name"],
|
||||
device_path)
|
||||
if not file_path:
|
||||
for package_file in package["files"]:
|
||||
device_path = package_file["path"]
|
||||
local_path = self.pull_package_file(package["package_name"],
|
||||
device_path)
|
||||
if not local_path:
|
||||
continue
|
||||
|
||||
file_info = {
|
||||
"path": device_path,
|
||||
"local_name": file_path,
|
||||
"sha256": get_sha256_from_file_path(file_path),
|
||||
}
|
||||
|
||||
if "files" not in package:
|
||||
package["files"] = [file_info,]
|
||||
else:
|
||||
package["files"].append(file_info)
|
||||
package_file["local_path"] = local_path
|
||||
|
||||
log.info("Download of selected packages completed")
|
||||
|
||||
def save_json(self):
|
||||
"""Save the results to the package.json file.
|
||||
"""
|
||||
"""Save the results to the package.json file."""
|
||||
json_path = os.path.join(self.output_folder, "apks.json")
|
||||
with open(json_path, "w") as handle:
|
||||
json.dump(self.packages, handle, indent=4)
|
||||
|
||||
def run(self):
|
||||
"""Run all steps of fetch-apk.
|
||||
"""
|
||||
"""Run all steps of fetch-apk."""
|
||||
self.get_packages()
|
||||
self._adb_connect()
|
||||
self.pull_packages()
|
||||
|
||||
@@ -13,6 +13,7 @@ from rich.text import Text
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def koodous_lookup(packages):
|
||||
log.info("Looking up all extracted files on Koodous (www.koodous.com)")
|
||||
log.info("This might take a while...")
|
||||
@@ -27,12 +28,12 @@ def koodous_lookup(packages):
|
||||
total_packages = len(packages)
|
||||
for i in track(range(total_packages), description=f"Looking up {total_packages} packages..."):
|
||||
package = packages[i]
|
||||
for file in package.files:
|
||||
for file in package.get("files", []):
|
||||
url = f"https://api.koodous.com/apks/{file['sha256']}"
|
||||
res = requests.get(url)
|
||||
report = res.json()
|
||||
|
||||
row = [package.name, file["local_name"]]
|
||||
row = [package["package_name"], file["path"]]
|
||||
|
||||
if "package_name" in report:
|
||||
trusted = "no"
|
||||
|
||||
@@ -13,6 +13,7 @@ from rich.text import Text
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_virustotal_report(hashes):
|
||||
apikey = "233f22e200ca5822bd91103043ccac138b910db79f29af5616a9afe8b6f215ad"
|
||||
url = f"https://www.virustotal.com/partners/sysinternals/file-reports?apikey={apikey}"
|
||||
@@ -36,18 +37,20 @@ def get_virustotal_report(hashes):
|
||||
log.error("Unexpected response from VirusTotal: %s", res.status_code)
|
||||
return None
|
||||
|
||||
|
||||
def virustotal_lookup(packages):
|
||||
log.info("Looking up all extracted files on VirusTotal (www.virustotal.com)")
|
||||
|
||||
unique_hashes = []
|
||||
for package in packages:
|
||||
for file in package.files:
|
||||
for file in package.get("files", []):
|
||||
if file["sha256"] not in unique_hashes:
|
||||
unique_hashes.append(file["sha256"])
|
||||
|
||||
total_unique_hashes = len(unique_hashes)
|
||||
|
||||
detections = {}
|
||||
|
||||
def virustotal_query(batch):
|
||||
report = get_virustotal_report(batch)
|
||||
if not report:
|
||||
@@ -74,8 +77,8 @@ def virustotal_lookup(packages):
|
||||
table.add_column("Detections")
|
||||
|
||||
for package in packages:
|
||||
for file in package.files:
|
||||
row = [package.name, file["local_name"]]
|
||||
for file in package.get("files", []):
|
||||
row = [package["package_name"], file["path"]]
|
||||
|
||||
if file["sha256"] in detections:
|
||||
detection = detections[file["sha256"]]
|
||||
|
||||
@@ -5,8 +5,12 @@
|
||||
|
||||
from .chrome_history import ChromeHistory
|
||||
from .dumpsys_batterystats import DumpsysBatterystats
|
||||
from .dumpsys_full import DumpsysFull
|
||||
from .dumpsys_packages import DumpsysPackages
|
||||
from .dumpsys_procstats import DumpsysProcstats
|
||||
from .dumpsys_receivers import DumpsysReceivers
|
||||
from .files import Files
|
||||
from .logcat import Logcat
|
||||
from .packages import Packages
|
||||
from .processes import Processes
|
||||
from .rootbinaries import RootBinaries
|
||||
@@ -15,4 +19,5 @@ from .whatsapp import Whatsapp
|
||||
|
||||
ADB_MODULES = [ChromeHistory, SMS, Whatsapp, Processes,
|
||||
DumpsysBatterystats, DumpsysProcstats,
|
||||
DumpsysPackages, Packages, RootBinaries]
|
||||
DumpsysPackages, DumpsysReceivers, DumpsysFull,
|
||||
Packages, RootBinaries, Logcat, Files]
|
||||
|
||||
@@ -25,6 +25,7 @@ log = logging.getLogger(__name__)
|
||||
ADB_KEY_PATH = os.path.expanduser("~/.android/adbkey")
|
||||
ADB_PUB_KEY_PATH = os.path.expanduser("~/.android/adbkey.pub")
|
||||
|
||||
|
||||
class AndroidExtraction(MVTModule):
|
||||
"""This class provides a base for all Android extraction modules."""
|
||||
|
||||
@@ -37,9 +38,12 @@ class AndroidExtraction(MVTModule):
|
||||
self.device = None
|
||||
self.serial = None
|
||||
|
||||
def _adb_check_keys(self):
|
||||
"""Make sure Android adb keys exist.
|
||||
"""
|
||||
@staticmethod
|
||||
def _adb_check_keys():
|
||||
"""Make sure Android adb keys exist."""
|
||||
if not os.path.isdir(os.path.dirname(ADB_KEY_PATH)):
|
||||
os.makedirs(os.path.dirname(ADB_KEY_PATH))
|
||||
|
||||
if not os.path.exists(ADB_KEY_PATH):
|
||||
keygen(ADB_KEY_PATH)
|
||||
|
||||
@@ -47,14 +51,16 @@ class AndroidExtraction(MVTModule):
|
||||
write_public_keyfile(ADB_KEY_PATH, ADB_PUB_KEY_PATH)
|
||||
|
||||
def _adb_connect(self):
|
||||
"""Connect to the device over adb.
|
||||
"""
|
||||
"""Connect to the device over adb."""
|
||||
self._adb_check_keys()
|
||||
|
||||
with open(ADB_KEY_PATH, "rb") as handle:
|
||||
priv_key = handle.read()
|
||||
|
||||
signer = PythonRSASigner("", priv_key)
|
||||
with open(ADB_PUB_KEY_PATH, "rb") as handle:
|
||||
pub_key = handle.read()
|
||||
|
||||
signer = PythonRSASigner(pub_key, priv_key)
|
||||
|
||||
# If no serial was specified or if the serial does not seem to be
|
||||
# a HOST:PORT definition, we use the USB transport.
|
||||
@@ -84,53 +90,59 @@ class AndroidExtraction(MVTModule):
|
||||
except OSError as e:
|
||||
if e.errno == 113 and self.serial:
|
||||
log.critical("Unable to connect to the device %s: did you specify the correct IP addres?",
|
||||
self.serial)
|
||||
self.serial)
|
||||
sys.exit(-1)
|
||||
else:
|
||||
break
|
||||
|
||||
def _adb_disconnect(self):
|
||||
"""Close adb connection to the device.
|
||||
"""
|
||||
"""Close adb connection to the device."""
|
||||
self.device.close()
|
||||
|
||||
def _adb_reconnect(self):
|
||||
"""Reconnect to device using adb.
|
||||
"""
|
||||
"""Reconnect to device using adb."""
|
||||
log.info("Reconnecting ...")
|
||||
self._adb_disconnect()
|
||||
self._adb_connect()
|
||||
|
||||
def _adb_command(self, command):
|
||||
"""Execute an adb shell command.
|
||||
|
||||
:param command: Shell command to execute
|
||||
:returns: Output of command
|
||||
|
||||
"""
|
||||
return self.device.shell(command)
|
||||
|
||||
def _adb_check_if_root(self):
|
||||
"""Check if we have a `su` binary on the Android device.
|
||||
|
||||
|
||||
:returns: Boolean indicating whether a `su` binary is present or not
|
||||
|
||||
"""
|
||||
return bool(self._adb_command("command -v su"))
|
||||
|
||||
def _adb_root_or_die(self):
|
||||
"""Check if we have a `su` binary, otherwise raise an Exception.
|
||||
"""
|
||||
"""Check if we have a `su` binary, otherwise raise an Exception."""
|
||||
if not self._adb_check_if_root():
|
||||
raise InsufficientPrivileges("This module is optionally available in case the device is already rooted. Do NOT root your own device!")
|
||||
|
||||
def _adb_command_as_root(self, command):
|
||||
"""Execute an adb shell command.
|
||||
|
||||
:param command: Shell command to execute as root
|
||||
:returns: Output of command
|
||||
|
||||
"""
|
||||
return self._adb_command(f"su -c {command}")
|
||||
|
||||
|
||||
def _adb_check_file_exists(self, file):
|
||||
"""Verify that a file exists.
|
||||
|
||||
:param file: Path of the file
|
||||
:returns: Boolean indicating whether the file exists or not
|
||||
|
||||
"""
|
||||
|
||||
# TODO: Need to support checking files without root privileges as well.
|
||||
@@ -144,9 +156,12 @@ class AndroidExtraction(MVTModule):
|
||||
|
||||
def _adb_download(self, remote_path, local_path, progress_callback=None, retry_root=True):
|
||||
"""Download a file form the device.
|
||||
|
||||
:param remote_path: Path to download from the device
|
||||
:param local_path: Path to where to locally store the copy of the file
|
||||
:param progress_callback: Callback for download progress bar
|
||||
:param progress_callback: Callback for download progress bar (Default value = None)
|
||||
:param retry_root: Default value = True)
|
||||
|
||||
"""
|
||||
try:
|
||||
self.device.pull(remote_path, local_path, progress_callback)
|
||||
@@ -155,7 +170,7 @@ class AndroidExtraction(MVTModule):
|
||||
self._adb_download_root(remote_path, local_path, progress_callback)
|
||||
else:
|
||||
raise Exception(f"Unable to download file {remote_path}: {e}")
|
||||
|
||||
|
||||
def _adb_download_root(self, remote_path, local_path, progress_callback=None):
|
||||
try:
|
||||
# Check if we have root, if not raise an Exception.
|
||||
@@ -180,16 +195,18 @@ class AndroidExtraction(MVTModule):
|
||||
|
||||
# Delete the copy on /sdcard/.
|
||||
self._adb_command(f"rm -rf {new_remote_path}")
|
||||
|
||||
|
||||
except AdbCommandFailureException as e:
|
||||
raise Exception(f"Unable to download file {remote_path}: {e}")
|
||||
|
||||
def _adb_process_file(self, remote_path, process_routine):
|
||||
"""Download a local copy of a file which is only accessible as root.
|
||||
This is a wrapper around process_routine.
|
||||
|
||||
:param remote_path: Path of the file on the device to process
|
||||
:param process_routine: Function to be called on the local copy of the
|
||||
downloaded file
|
||||
|
||||
"""
|
||||
# Connect to the device over adb.
|
||||
self._adb_connect()
|
||||
@@ -223,6 +240,5 @@ class AndroidExtraction(MVTModule):
|
||||
self._adb_disconnect()
|
||||
|
||||
def run(self):
|
||||
"""Run the main procedure.
|
||||
"""
|
||||
"""Run the main procedure."""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -16,6 +16,7 @@ log = logging.getLogger(__name__)
|
||||
|
||||
CHROME_HISTORY_PATH = "data/data/com.android.chrome/app_chrome/Default/History"
|
||||
|
||||
|
||||
class ChromeHistory(AndroidExtraction):
|
||||
"""This module extracts records from Android's Chrome browsing history."""
|
||||
|
||||
@@ -33,9 +34,19 @@ class ChromeHistory(AndroidExtraction):
|
||||
"data": f"{record['id']} - {record['url']} (visit ID: {record['visit_id']}, redirect source: {record['redirect_source']})"
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if self.indicators.check_domain(result["url"]):
|
||||
self.detected.append(result)
|
||||
|
||||
def _parse_db(self, db_path):
|
||||
"""Parse a Chrome History database file.
|
||||
|
||||
:param db_path: Path to the History database to process.
|
||||
|
||||
"""
|
||||
conn = sqlite3.connect(db_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
@@ -10,6 +10,7 @@ from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysBatterystats(AndroidExtraction):
|
||||
"""This module extracts stats on battery consumption by processes."""
|
||||
|
||||
@@ -30,7 +31,7 @@ class DumpsysBatterystats(AndroidExtraction):
|
||||
handle.write(stats)
|
||||
|
||||
log.info("Records from dumpsys batterystats stored at %s",
|
||||
stats_path)
|
||||
stats_path)
|
||||
|
||||
history = self._adb_command("dumpsys batterystats --history")
|
||||
if self.output_folder:
|
||||
|
||||
36
mvt/android/modules/adb/dumpsys_full.py
Normal file
36
mvt/android/modules/adb/dumpsys_full.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysFull(AndroidExtraction):
|
||||
"""This module extracts stats on battery consumption by processes."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
stats = self._adb_command("dumpsys")
|
||||
if self.output_folder:
|
||||
stats_path = os.path.join(self.output_folder,
|
||||
"dumpsys.txt")
|
||||
with open(stats_path, "w") as handle:
|
||||
handle.write(stats)
|
||||
|
||||
log.info("Full dumpsys output stored at %s",
|
||||
stats_path)
|
||||
|
||||
self._adb_disconnect()
|
||||
@@ -10,8 +10,9 @@ from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysPackages(AndroidExtraction):
|
||||
"""This module extracts stats on installed packages."""
|
||||
"""This module extracts details on installed packages."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
@@ -23,6 +24,7 @@ class DumpsysPackages(AndroidExtraction):
|
||||
self._adb_connect()
|
||||
|
||||
output = self._adb_command("dumpsys package")
|
||||
|
||||
if self.output_folder:
|
||||
packages_path = os.path.join(self.output_folder,
|
||||
"dumpsys_packages.txt")
|
||||
|
||||
@@ -10,6 +10,7 @@ from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysProcstats(AndroidExtraction):
|
||||
"""This module extracts stats on memory consumption by processes."""
|
||||
|
||||
|
||||
87
mvt/android/modules/adb/dumpsys_receivers.py
Normal file
87
mvt/android/modules/adb/dumpsys_receivers.py
Normal file
@@ -0,0 +1,87 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
ACTION_NEW_OUTGOING_SMS = "android.provider.Telephony.NEW_OUTGOING_SMS"
|
||||
ACTION_SMS_RECEIVED = "android.provider.Telephony.SMS_RECEIVED"
|
||||
ACTION_DATA_SMS_RECEIVED = "android.intent.action.DATA_SMS_RECEIVED"
|
||||
ACTION_PHONE_STATE = "android.intent.action.PHONE_STATE"
|
||||
|
||||
|
||||
class DumpsysReceivers(AndroidExtraction):
|
||||
"""This module extracts details on receivers for risky activities."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
output = self._adb_command("dumpsys package")
|
||||
if not output:
|
||||
return
|
||||
|
||||
activity = None
|
||||
for line in output.split("\n"):
|
||||
# Find activity block markers.
|
||||
if line.strip().startswith(ACTION_NEW_OUTGOING_SMS):
|
||||
activity = ACTION_NEW_OUTGOING_SMS
|
||||
continue
|
||||
elif line.strip().startswith(ACTION_SMS_RECEIVED):
|
||||
activity = ACTION_SMS_RECEIVED
|
||||
continue
|
||||
elif line.strip().startswith(ACTION_PHONE_STATE):
|
||||
activity = ACTION_PHONE_STATE
|
||||
continue
|
||||
elif line.strip().startswith(ACTION_DATA_SMS_RECEIVED):
|
||||
activity = ACTION_DATA_SMS_RECEIVED
|
||||
continue
|
||||
|
||||
# If we are not in an activity block yet, skip.
|
||||
if not activity:
|
||||
continue
|
||||
|
||||
# If we are in a block but the line does not start with 8 spaces
|
||||
# it means the block ended a new one started, so we reset and
|
||||
# continue.
|
||||
if not line.startswith(" " * 8):
|
||||
activity = None
|
||||
continue
|
||||
|
||||
# If we got this far, we are processing receivers for the
|
||||
# activities we are interested in.
|
||||
receiver = line.strip().split(" ")[1]
|
||||
package_name = receiver.split("/")[0]
|
||||
if package_name == "com.google.android.gms":
|
||||
continue
|
||||
|
||||
if activity == ACTION_NEW_OUTGOING_SMS:
|
||||
self.log.info("Found a receiver to intercept outgoing SMS messages: \"%s\"",
|
||||
receiver)
|
||||
elif activity == ACTION_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming SMS messages: \"%s\"",
|
||||
receiver)
|
||||
elif activity == ACTION_DATA_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming data SMS message: \"%s\"",
|
||||
receiver)
|
||||
elif activity == ACTION_PHONE_STATE:
|
||||
self.log.info("Found a receiver monitoring telephony state: \"%s\"",
|
||||
receiver)
|
||||
|
||||
self.results.append({
|
||||
"activity": activity,
|
||||
"package_name": package_name,
|
||||
"receiver": receiver,
|
||||
})
|
||||
|
||||
self._adb_disconnect()
|
||||
124
mvt/android/modules/adb/files.py
Normal file
124
mvt/android/modules/adb/files.py
Normal file
@@ -0,0 +1,124 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import datetime
|
||||
|
||||
from mvt.common.utils import check_for_links, convert_timestamp_to_iso
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Files(AndroidExtraction):
|
||||
"""This module extracts the list of files on the device."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
self.full_find = None
|
||||
|
||||
def find_path(self, file_path):
|
||||
"""Checks if Android system supports full find command output"""
|
||||
# Check find command params on first run
|
||||
# Run find command with correct args and parse results.
|
||||
|
||||
# Check that full file printf options are suppported on first run.
|
||||
if self.full_find == None:
|
||||
output = self._adb_command(f"find '/' -maxdepth 1 -printf '%T@ %m %s %u %g %p\n' 2> /dev/null")
|
||||
if not (output or output.strip().splitlines()):
|
||||
# Full find command failed to generate output, fallback to basic file arguments
|
||||
self.full_find = False
|
||||
else:
|
||||
self.full_find = True
|
||||
|
||||
found_files = []
|
||||
if self.full_find == True:
|
||||
# Run full file command and collect additonal file information.
|
||||
output = self._adb_command(f"find '{file_path}' -printf '%T@ %m %s %u %g %p\n' 2> /dev/null")
|
||||
for file_line in output.splitlines():
|
||||
[unix_timestamp, mode, size, owner, group, full_path] = file_line.rstrip().split(" ", 5)
|
||||
mod_time = convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(int(float(unix_timestamp))))
|
||||
found_files.append({
|
||||
"path": full_path,
|
||||
"modified_time": mod_time,
|
||||
"mode": mode,
|
||||
"is_suid": (int(mode, 8) & stat.S_ISUID) == 2048,
|
||||
"is_sgid": (int(mode, 8) & stat.S_ISGID) == 1024,
|
||||
"size": size,
|
||||
"owner": owner,
|
||||
"group": group,
|
||||
})
|
||||
else:
|
||||
# Run a basic listing of file paths.
|
||||
output = self._adb_command(f"find '{file_path}' 2> /dev/null")
|
||||
for file_line in output.splitlines():
|
||||
found_files.append({
|
||||
"path": file_line.rstrip()
|
||||
})
|
||||
|
||||
return found_files
|
||||
|
||||
def serialize(self, record):
|
||||
if "modified_time" in record:
|
||||
return {
|
||||
"timestamp": record["modified_time"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "file_modified",
|
||||
"data": record["path"],
|
||||
}
|
||||
|
||||
def check_suspicious(self):
|
||||
"""Check for files with suspicious permissions"""
|
||||
for result in sorted(self.results, key=lambda item: item["path"]):
|
||||
if result.get("is_suid"):
|
||||
self.log.warning("Found an SUID file in a non-standard directory \"%s\".",
|
||||
result["path"])
|
||||
self.detected.append(result)
|
||||
|
||||
def check_indicators(self):
|
||||
"""Check file list for known suspicious files or suspicious properties"""
|
||||
self.check_suspicious()
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if self.indicators.check_filename(result["path"]):
|
||||
self.log.warning("Found a known suspicous filename at path: \"%s\"", result["path"])
|
||||
self.detected.append(result)
|
||||
|
||||
if self.indicators.check_file_path(result["path"]):
|
||||
self.log.warning("Found a known suspicous file at path: \"%s\"", result["path"])
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
found_file_paths = []
|
||||
|
||||
DATA_PATHS = ["/data/local/tmp/", "/sdcard/", "/tmp/"]
|
||||
for path in DATA_PATHS:
|
||||
file_info = self.find_path(path)
|
||||
found_file_paths.extend(file_info)
|
||||
|
||||
# Store results
|
||||
self.results.extend(found_file_paths)
|
||||
self.log.info("Found %s files in primary Android data directories.", len(found_file_paths))
|
||||
|
||||
if self.fast_mode:
|
||||
self.log.info("Flag --fast was enabled: skipping full file listing")
|
||||
else:
|
||||
self.log.info("Flag --fast was not enabled: processing full file listing. "
|
||||
"This may take a while...")
|
||||
output = self.find_path("/")
|
||||
if output and self.output_folder:
|
||||
self.results.extend(output)
|
||||
log.info("List of visible files stored in files.json")
|
||||
|
||||
self._adb_disconnect()
|
||||
48
mvt/android/modules/adb/logcat.py
Normal file
48
mvt/android/modules/adb/logcat.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Logcat(AndroidExtraction):
|
||||
"""This module extracts details on installed packages."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
# Get the current logcat.
|
||||
output = self._adb_command("logcat -d")
|
||||
# Get the locat prior to last reboot.
|
||||
last_output = self._adb_command("logcat -L")
|
||||
|
||||
if self.output_folder:
|
||||
logcat_path = os.path.join(self.output_folder,
|
||||
"logcat.txt")
|
||||
with open(logcat_path, "w") as handle:
|
||||
handle.write(output)
|
||||
|
||||
log.info("Current logcat logs stored at %s",
|
||||
logcat_path)
|
||||
|
||||
logcat_last_path = os.path.join(self.output_folder,
|
||||
"logcat_last.txt")
|
||||
with open(logcat_last_path, "w") as handle:
|
||||
handle.write(last_output)
|
||||
|
||||
log.info("Logcat logs prior to last reboot stored at %s",
|
||||
logcat_last_path)
|
||||
|
||||
self._adb_disconnect()
|
||||
@@ -12,6 +12,7 @@ from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Packages(AndroidExtraction):
|
||||
"""This module extracts the list of installed packages."""
|
||||
|
||||
@@ -41,19 +42,54 @@ class Packages(AndroidExtraction):
|
||||
return records
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
root_packages_path = os.path.join("..", "..", "data", "root_packages.txt")
|
||||
root_packages_string = pkg_resources.resource_string(__name__, root_packages_path)
|
||||
root_packages = root_packages_string.decode("utf-8").split("\n")
|
||||
root_packages = [rp.strip() for rp in root_packages]
|
||||
|
||||
for root_package in root_packages:
|
||||
root_package = root_package.strip()
|
||||
if not root_package:
|
||||
continue
|
||||
|
||||
if root_package in self.results:
|
||||
for result in self.results:
|
||||
if result["package_name"] in root_packages:
|
||||
self.log.warning("Found an installed package related to rooting/jailbreaking: \"%s\"",
|
||||
root_package)
|
||||
self.detected.append(root_package)
|
||||
result["package_name"])
|
||||
self.detected.append(result)
|
||||
if result["package_name"] in self.indicators.ioc_app_ids:
|
||||
self.log.warning("Found a malicious package name: \"%s\"",
|
||||
result["package_name"])
|
||||
self.detected.append(result)
|
||||
for file in result["files"]:
|
||||
if file["sha256"] in self.indicators.ioc_files_sha256:
|
||||
self.log.warning("Found a malicious APK: \"%s\" %s",
|
||||
result["package_name"],
|
||||
file["sha256"])
|
||||
self.detected.append(result)
|
||||
|
||||
def _get_files_for_package(self, package_name):
|
||||
output = self._adb_command(f"pm path {package_name}")
|
||||
output = output.strip().replace("package:", "")
|
||||
if not output:
|
||||
return []
|
||||
|
||||
package_files = []
|
||||
for file_path in output.split("\n"):
|
||||
file_path = file_path.strip()
|
||||
|
||||
md5 = self._adb_command(f"md5sum {file_path}").split(" ")[0]
|
||||
sha1 = self._adb_command(f"sha1sum {file_path}").split(" ")[0]
|
||||
sha256 = self._adb_command(f"sha256sum {file_path}").split(" ")[0]
|
||||
sha512 = self._adb_command(f"sha512sum {file_path}").split(" ")[0]
|
||||
|
||||
package_files.append({
|
||||
"path": file_path,
|
||||
"md5": md5,
|
||||
"sha1": sha1,
|
||||
"sha256": sha256,
|
||||
"sha512": sha512,
|
||||
})
|
||||
|
||||
return package_files
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
@@ -85,6 +121,8 @@ class Packages(AndroidExtraction):
|
||||
first_install = dumpsys[1].split("=")[1].strip()
|
||||
last_update = dumpsys[2].split("=")[1].strip()
|
||||
|
||||
package_files = self._get_files_for_package(package_name)
|
||||
|
||||
self.results.append({
|
||||
"package_name": package_name,
|
||||
"file_name": file_name,
|
||||
@@ -96,6 +134,7 @@ class Packages(AndroidExtraction):
|
||||
"disabled": False,
|
||||
"system": False,
|
||||
"third_party": False,
|
||||
"files": package_files,
|
||||
})
|
||||
|
||||
cmds = [
|
||||
|
||||
@@ -9,6 +9,7 @@ from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Processes(AndroidExtraction):
|
||||
"""This module extracts details on running processes."""
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RootBinaries(AndroidExtraction):
|
||||
"""This module extracts the list of installed packages."""
|
||||
|
||||
|
||||
@@ -15,12 +15,12 @@ log = logging.getLogger(__name__)
|
||||
|
||||
SMS_BUGLE_PATH = "data/data/com.google.android.apps.messaging/databases/bugle_db"
|
||||
SMS_BUGLE_QUERY = """
|
||||
SELECT
|
||||
SELECT
|
||||
ppl.normalized_destination AS number,
|
||||
p.timestamp AS timestamp,
|
||||
CASE WHEN m.sender_id IN
|
||||
CASE WHEN m.sender_id IN
|
||||
(SELECT _id FROM participants WHERE contact_id=-1)
|
||||
THEN 2 ELSE 1 END incoming, p.text AS text
|
||||
THEN 2 ELSE 1 END incoming, p.text AS text
|
||||
FROM messages m, conversations c, parts p,
|
||||
participants ppl, conversation_participants cp
|
||||
WHERE (m.conversation_id = c._id)
|
||||
@@ -31,14 +31,15 @@ WHERE (m.conversation_id = c._id)
|
||||
|
||||
SMS_MMSSMS_PATH = "data/data/com.android.providers.telephony/databases/mmssms.db"
|
||||
SMS_MMSMS_QUERY = """
|
||||
SELECT
|
||||
SELECT
|
||||
address AS number,
|
||||
date_sent AS timestamp,
|
||||
type as incoming,
|
||||
body AS text
|
||||
body AS text
|
||||
FROM sms;
|
||||
"""
|
||||
|
||||
|
||||
class SMS(AndroidExtraction):
|
||||
"""This module extracts all SMS messages containing links."""
|
||||
|
||||
@@ -62,7 +63,7 @@ class SMS(AndroidExtraction):
|
||||
return
|
||||
|
||||
for message in self.results:
|
||||
if not "text" in message:
|
||||
if "text" not in message:
|
||||
continue
|
||||
|
||||
message_links = check_for_links(message["text"])
|
||||
@@ -71,11 +72,13 @@ class SMS(AndroidExtraction):
|
||||
|
||||
def _parse_db(self, db_path):
|
||||
"""Parse an Android bugle_db SMS database file.
|
||||
|
||||
:param db_path: Path to the Android SMS database file to process
|
||||
|
||||
"""
|
||||
conn = sqlite3.connect(db_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
|
||||
if (self.SMS_DB_TYPE == 1):
|
||||
cur.execute(SMS_BUGLE_QUERY)
|
||||
elif (self.SMS_DB_TYPE == 2):
|
||||
|
||||
@@ -16,6 +16,7 @@ log = logging.getLogger(__name__)
|
||||
|
||||
WHATSAPP_PATH = "data/data/com.whatsapp/databases/msgstore.db"
|
||||
|
||||
|
||||
class Whatsapp(AndroidExtraction):
|
||||
"""This module extracts all WhatsApp messages containing links."""
|
||||
|
||||
@@ -39,7 +40,7 @@ class Whatsapp(AndroidExtraction):
|
||||
return
|
||||
|
||||
for message in self.results:
|
||||
if not "data" in message:
|
||||
if "data" not in message:
|
||||
continue
|
||||
|
||||
message_links = check_for_links(message["data"])
|
||||
@@ -48,7 +49,9 @@ class Whatsapp(AndroidExtraction):
|
||||
|
||||
def _parse_db(self, db_path):
|
||||
"""Parse an Android msgstore.db WhatsApp database file.
|
||||
|
||||
:param db_path: Path to the Android WhatsApp database file to process
|
||||
|
||||
"""
|
||||
conn = sqlite3.connect(db_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
@@ -5,4 +5,4 @@
|
||||
|
||||
from .sms import SMS
|
||||
|
||||
BACKUP_MODULES = [SMS,]
|
||||
BACKUP_MODULES = [SMS]
|
||||
|
||||
@@ -24,7 +24,7 @@ class SMS(MVTModule):
|
||||
return
|
||||
|
||||
for message in self.results:
|
||||
if not "body" in message:
|
||||
if "body" not in message:
|
||||
continue
|
||||
|
||||
message_links = check_for_links(message["body"])
|
||||
|
||||
@@ -12,9 +12,12 @@ from .url import URL
|
||||
class IndicatorsFileBadFormat(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Indicators:
|
||||
"""This class is used to parse indicators from a STIX2 file and provide
|
||||
functions to compare extracted artifacts to the indicators.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, log=None):
|
||||
@@ -23,15 +26,35 @@ class Indicators:
|
||||
self.ioc_processes = []
|
||||
self.ioc_emails = []
|
||||
self.ioc_files = []
|
||||
self.ioc_files_sha256 = []
|
||||
self.ioc_app_ids = []
|
||||
self.ios_profile_ids = []
|
||||
self.ioc_count = 0
|
||||
self._check_env_variable()
|
||||
|
||||
def _add_indicator(self, ioc, iocs_list):
|
||||
if ioc not in iocs_list:
|
||||
iocs_list.append(ioc)
|
||||
self.ioc_count += 1
|
||||
|
||||
def _check_env_variable(self):
|
||||
"""
|
||||
Checks if a variable MVT_STIX2 contains path to STIX Files
|
||||
"""
|
||||
if "MVT_STIX2" in os.environ:
|
||||
paths = os.environ["MVT_STIX2"].split(":")
|
||||
for path in paths:
|
||||
if os.path.isfile(path):
|
||||
self.parse_stix2(path)
|
||||
else:
|
||||
self.log.info("Invalid STIX2 path %s in MVT_STIX2 environment variable", path)
|
||||
|
||||
def parse_stix2(self, file_path):
|
||||
"""Extract indicators from a STIX2 file.
|
||||
|
||||
:param file_path: Path to the STIX2 file to parse
|
||||
:type file_path: str
|
||||
|
||||
"""
|
||||
self.log.info("Parsing STIX2 indicators file at path %s",
|
||||
file_path)
|
||||
@@ -63,10 +86,29 @@ class Indicators:
|
||||
elif key == "file:name":
|
||||
self._add_indicator(ioc=value,
|
||||
iocs_list=self.ioc_files)
|
||||
elif key == "app:id":
|
||||
self._add_indicator(ioc=value,
|
||||
iocs_list=self.ioc_app_ids)
|
||||
elif key == "configuration-profile:id":
|
||||
self._add_indicator(ioc=value,
|
||||
iocs_list=self.ios_profile_ids)
|
||||
elif key == "file:hashes.sha256":
|
||||
self._add_indicator(ioc=value,
|
||||
iocs_list=self.ioc_files_sha256)
|
||||
|
||||
def check_domain(self, url):
|
||||
def check_domain(self, url) -> bool:
|
||||
"""Check if a given URL matches any of the provided domain indicators.
|
||||
|
||||
:param url: URL to match against domain indicators
|
||||
:type url: str
|
||||
:returns: True if the URL matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
# TODO: If the IOC domain contains a subdomain, it is not currently
|
||||
# being matched.
|
||||
if not url:
|
||||
return False
|
||||
|
||||
try:
|
||||
# First we use the provided URL.
|
||||
@@ -91,7 +133,7 @@ class Indicators:
|
||||
else:
|
||||
# If it's not shortened, we just use the original URL object.
|
||||
final_url = orig_url
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
# If URL parsing failed, we just try to do a simple substring
|
||||
# match.
|
||||
for ioc in self.ioc_domains:
|
||||
@@ -124,18 +166,35 @@ class Indicators:
|
||||
|
||||
return True
|
||||
|
||||
def check_domains(self, urls):
|
||||
"""Check the provided list of (suspicious) domains against a list of URLs.
|
||||
:param urls: List of URLs to check
|
||||
return False
|
||||
|
||||
def check_domains(self, urls) -> bool:
|
||||
"""Check a list of URLs against the provided list of domain indicators.
|
||||
|
||||
:param urls: List of URLs to check against domain indicators
|
||||
:type urls: list
|
||||
:returns: True if any URL matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if not urls:
|
||||
return False
|
||||
|
||||
for url in urls:
|
||||
if self.check_domain(url):
|
||||
return True
|
||||
|
||||
def check_process(self, process):
|
||||
return False
|
||||
|
||||
def check_process(self, process) -> bool:
|
||||
"""Check the provided process name against the list of process
|
||||
indicators.
|
||||
:param process: Process name to check
|
||||
|
||||
:param process: Process name to check against process indicators
|
||||
:type process: str
|
||||
:returns: True if process matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if not process:
|
||||
return False
|
||||
@@ -151,18 +210,35 @@ class Indicators:
|
||||
self.log.warning("Found a truncated known suspicious process name \"%s\"", process)
|
||||
return True
|
||||
|
||||
def check_processes(self, processes):
|
||||
return False
|
||||
|
||||
def check_processes(self, processes) -> bool:
|
||||
"""Check the provided list of processes against the list of
|
||||
process indicators.
|
||||
:param processes: List of processes to check
|
||||
|
||||
:param processes: List of processes to check against process indicators
|
||||
:type processes: list
|
||||
:returns: True if process matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if not processes:
|
||||
return False
|
||||
|
||||
for process in processes:
|
||||
if self.check_process(process):
|
||||
return True
|
||||
|
||||
def check_email(self, email):
|
||||
return False
|
||||
|
||||
def check_email(self, email) -> bool:
|
||||
"""Check the provided email against the list of email indicators.
|
||||
:param email: Suspicious email to check
|
||||
|
||||
:param email: Email address to check against email indicators
|
||||
:type email: str
|
||||
:returns: True if email address matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if not email:
|
||||
return False
|
||||
@@ -171,14 +247,56 @@ class Indicators:
|
||||
self.log.warning("Found a known suspicious email address: \"%s\"", email)
|
||||
return True
|
||||
|
||||
def check_file(self, file_path):
|
||||
return False
|
||||
|
||||
def check_filename(self, file_path) -> bool:
|
||||
"""Check the provided file path against the list of file indicators.
|
||||
:param file_path: Path or name of the file to check
|
||||
|
||||
:param file_path: File path or file name to check against file
|
||||
indicators
|
||||
:type file_path: str
|
||||
:returns: True if the file path matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if not file_path:
|
||||
return False
|
||||
|
||||
file_name = os.path.basename(file_path)
|
||||
if file_name in self.ioc_files:
|
||||
self.log.warning("Found a known suspicious file: \"%s\"", file_path)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def check_file_path(self, file_path) -> bool:
|
||||
"""Check the provided file path against the list of file indicators.
|
||||
|
||||
:param file_path: File path or file name to check against file
|
||||
indicators
|
||||
:type file_path: str
|
||||
:returns: True if the file path matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if not file_path:
|
||||
return False
|
||||
|
||||
for ioc_file in self.ioc_files:
|
||||
# Strip any trailing slash from indicator paths to match directories.
|
||||
if file_path.startswith(ioc_file.rstrip("/")):
|
||||
return True
|
||||
return False
|
||||
|
||||
def check_profile(self, profile_uuid) -> bool:
|
||||
"""Check the provided configuration profile UUID against the list of indicators.
|
||||
|
||||
:param profile_uuid: Profile UUID to check against configuration profile indicators
|
||||
:type profile_uuid: str
|
||||
:returns: True if the UUID in indicator list, otherwise False
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if profile_uuid in self.ios_profile_ids:
|
||||
return True
|
||||
|
||||
return False
|
||||
25
mvt/common/logo.py
Normal file
25
mvt/common/logo.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from rich import print
|
||||
|
||||
from .version import MVT_VERSION, check_for_updates
|
||||
|
||||
|
||||
def logo():
|
||||
print("\n")
|
||||
print("\t[bold]MVT[/bold] - Mobile Verification Toolkit")
|
||||
print("\t\thttps://mvt.re")
|
||||
print(f"\t\tVersion: {MVT_VERSION}")
|
||||
|
||||
try:
|
||||
latest_version = check_for_updates()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
if latest_version:
|
||||
print(f"\t\t[bold]Version {latest_version} is available! Upgrade mvt![/bold]")
|
||||
|
||||
print("\n")
|
||||
@@ -10,18 +10,19 @@ import re
|
||||
|
||||
import simplejson as json
|
||||
|
||||
from .indicators import Indicators
|
||||
|
||||
|
||||
class DatabaseNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DatabaseCorruptedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InsufficientPrivileges(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MVTModule(object):
|
||||
"""This class provides a base for all extraction modules."""
|
||||
|
||||
@@ -31,12 +32,18 @@ class MVTModule(object):
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
"""Initialize module.
|
||||
:param file_path: Path to the module's database file, if there is any.
|
||||
|
||||
:param file_path: Path to the module's database file, if there is any
|
||||
:type file_path: str
|
||||
:param base_folder: Path to the base folder (backup or filesystem dump)
|
||||
:type file_path: str
|
||||
:param output_folder: Folder where results will be stored
|
||||
:type output_folder: str
|
||||
:param fast_mode: Flag to enable or disable slow modules
|
||||
:type fast_mode: bool
|
||||
:param log: Handle to logger
|
||||
:param results: Provided list of results entries
|
||||
:type results: list
|
||||
"""
|
||||
self.file_path = file_path
|
||||
self.base_folder = base_folder
|
||||
@@ -59,23 +66,23 @@ class MVTModule(object):
|
||||
return cls(results=results, log=log)
|
||||
|
||||
def get_slug(self):
|
||||
"""Use the module's class name to retrieve a slug"""
|
||||
if self.slug:
|
||||
return self.slug
|
||||
|
||||
sub = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", self.__class__.__name__)
|
||||
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", sub).lower()
|
||||
|
||||
def load_indicators(self, file_path):
|
||||
self.indicators = Indicators(file_path, self.log)
|
||||
|
||||
def check_indicators(self):
|
||||
"""Check the results of this module against a provided list of
|
||||
indicators."""
|
||||
indicators.
|
||||
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def save_to_json(self):
|
||||
"""Save the collected results to a json file.
|
||||
"""
|
||||
"""Save the collected results to a json file."""
|
||||
if not self.output_folder:
|
||||
return
|
||||
|
||||
@@ -100,9 +107,20 @@ class MVTModule(object):
|
||||
def serialize(self, record):
|
||||
raise NotImplementedError
|
||||
|
||||
def to_timeline(self):
|
||||
"""Convert results into a timeline.
|
||||
@staticmethod
|
||||
def _deduplicate_timeline(timeline):
|
||||
"""Serialize entry as JSON to deduplicate repeated entries
|
||||
|
||||
:param timeline: List of entries from timeline to deduplicate
|
||||
|
||||
"""
|
||||
timeline_set = set()
|
||||
for record in timeline:
|
||||
timeline_set.add(json.dumps(record, sort_keys=True))
|
||||
return [json.loads(record) for record in timeline_set]
|
||||
|
||||
def to_timeline(self):
|
||||
"""Convert results into a timeline."""
|
||||
for result in self.results:
|
||||
record = self.serialize(result)
|
||||
if record:
|
||||
@@ -120,19 +138,11 @@ class MVTModule(object):
|
||||
self.timeline_detected.append(record)
|
||||
|
||||
# De-duplicate timeline entries.
|
||||
self.timeline = self.timeline_deduplicate(self.timeline)
|
||||
self.timeline_detected = self.timeline_deduplicate(self.timeline_detected)
|
||||
|
||||
def timeline_deduplicate(self, timeline):
|
||||
"""Serialize entry as JSON to deduplicate repeated entries"""
|
||||
timeline_set = set()
|
||||
for record in timeline:
|
||||
timeline_set.add(json.dumps(record, sort_keys=True))
|
||||
return [json.loads(record) for record in timeline_set]
|
||||
self.timeline = self._deduplicate_timeline(self.timeline)
|
||||
self.timeline_detected = self._deduplicate_timeline(self.timeline_detected)
|
||||
|
||||
def run(self):
|
||||
"""Run the main module procedure.
|
||||
"""
|
||||
"""Run the main module procedure."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@@ -150,7 +160,7 @@ def run_module(module):
|
||||
module.log.info("There might be no data to extract by module %s: %s",
|
||||
module.__class__.__name__, e)
|
||||
except DatabaseCorruptedError as e:
|
||||
module.log.error("The %s module database seems to be corrupted and recovery failed: %s",
|
||||
module.log.error("The %s module database seems to be corrupted: %s",
|
||||
module.__class__.__name__, e)
|
||||
except Exception as e:
|
||||
module.log.exception("Error in running extraction from module %s: %s",
|
||||
@@ -177,8 +187,10 @@ def run_module(module):
|
||||
|
||||
def save_timeline(timeline, timeline_path):
|
||||
"""Save the timeline in a csv file.
|
||||
:param timeline: List of records to order and store.
|
||||
:param timeline_path: Path to the csv file to store the timeline to.
|
||||
|
||||
:param timeline: List of records to order and store
|
||||
:param timeline_path: Path to the csv file to store the timeline to
|
||||
|
||||
"""
|
||||
with io.open(timeline_path, "a+", encoding="utf-8") as handle:
|
||||
csvoutput = csv.writer(handle, delimiter=",", quotechar="\"")
|
||||
|
||||
@@ -9,8 +9,7 @@ from click import Option, UsageError
|
||||
|
||||
|
||||
class MutuallyExclusiveOption(Option):
|
||||
"""This class extends click to support mutually exclusive options.
|
||||
"""
|
||||
"""This class extends click to support mutually exclusive options."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.mutually_exclusive = set(kwargs.pop("mutually_exclusive", []))
|
||||
|
||||
@@ -7,6 +7,7 @@ import requests
|
||||
from tld import get_tld
|
||||
|
||||
SHORTENER_DOMAINS = [
|
||||
"1drv.ms",
|
||||
"1link.in",
|
||||
"1url.com",
|
||||
"2big.at",
|
||||
@@ -15,29 +16,29 @@ SHORTENER_DOMAINS = [
|
||||
"2ya.com",
|
||||
"4url.cc",
|
||||
"6url.com",
|
||||
"a.gg",
|
||||
"a.nf",
|
||||
"a2a.me",
|
||||
"abbrr.com",
|
||||
"adf.ly",
|
||||
"adjix.com",
|
||||
"a.gg",
|
||||
"alturl.com",
|
||||
"a.nf",
|
||||
"atu.ca",
|
||||
"b23.ru",
|
||||
"bacn.me",
|
||||
"bit.ly",
|
||||
"bit.do",
|
||||
"bit.ly",
|
||||
"bkite.com",
|
||||
"bloat.me",
|
||||
"budurl.com",
|
||||
"buff.ly",
|
||||
"buk.me",
|
||||
"burnurl.com",
|
||||
"c-o.in",
|
||||
"chilp.it",
|
||||
"clck.ru",
|
||||
"clickmeter.com",
|
||||
"cli.gs",
|
||||
"c-o.in",
|
||||
"clickmeter.com",
|
||||
"cort.as",
|
||||
"cut.ly",
|
||||
"cuturl.com",
|
||||
@@ -55,19 +56,20 @@ SHORTENER_DOMAINS = [
|
||||
"esyurl.com",
|
||||
"ewerl.com",
|
||||
"fa.b",
|
||||
"fff.to",
|
||||
"ff.im",
|
||||
"fff.to",
|
||||
"fhurl.com",
|
||||
"fire.to",
|
||||
"firsturl.de",
|
||||
"flic.kr",
|
||||
"fly2.ws",
|
||||
"fon.gs",
|
||||
"forms.gle",
|
||||
"fwd4.me",
|
||||
"gl.am",
|
||||
"go2cut.com",
|
||||
"go2.me",
|
||||
"go.9nl.com",
|
||||
"go2.me",
|
||||
"go2cut.com",
|
||||
"goo.gl",
|
||||
"goshrink.com",
|
||||
"gowat.ch",
|
||||
@@ -77,6 +79,7 @@ SHORTENER_DOMAINS = [
|
||||
"hex.io",
|
||||
"hover.com",
|
||||
"href.in",
|
||||
"ht.ly",
|
||||
"htxt.it",
|
||||
"hugeurl.com",
|
||||
"hurl.it",
|
||||
@@ -85,8 +88,8 @@ SHORTENER_DOMAINS = [
|
||||
"icanhaz.com",
|
||||
"idek.net",
|
||||
"inreply.to",
|
||||
"iscool.net",
|
||||
"is.gd",
|
||||
"iscool.net",
|
||||
"iterasi.net",
|
||||
"jijr.com",
|
||||
"jmp2.net",
|
||||
@@ -102,10 +105,11 @@ SHORTENER_DOMAINS = [
|
||||
"linkbee.com",
|
||||
"linkbun.ch",
|
||||
"liurl.cn",
|
||||
"lnk.gd",
|
||||
"lnk.in",
|
||||
"ln-s.net",
|
||||
"ln-s.ru",
|
||||
"lnk.gd",
|
||||
"lnk.in",
|
||||
"lnkd.in",
|
||||
"loopt.us",
|
||||
"lru.jp",
|
||||
"lt.tl",
|
||||
@@ -122,44 +126,44 @@ SHORTENER_DOMAINS = [
|
||||
"nn.nf",
|
||||
"notlong.com",
|
||||
"nsfw.in",
|
||||
"o-x.fr",
|
||||
"om.ly",
|
||||
"ow.ly",
|
||||
"o-x.fr",
|
||||
"pd.am",
|
||||
"pic.gd",
|
||||
"ping.fm",
|
||||
"piurl.com",
|
||||
"pnt.me",
|
||||
"poprl.com",
|
||||
"posted.at",
|
||||
"post.ly",
|
||||
"posted.at",
|
||||
"profile.to",
|
||||
"qicute.com",
|
||||
"qlnk.net",
|
||||
"quip-art.com",
|
||||
"rb6.me",
|
||||
"redirx.com",
|
||||
"rickroll.it",
|
||||
"ri.ms",
|
||||
"rickroll.it",
|
||||
"riz.gd",
|
||||
"rsmonkey.com",
|
||||
"rubyurl.com",
|
||||
"ru.ly",
|
||||
"rubyurl.com",
|
||||
"s7y.us",
|
||||
"safe.mn",
|
||||
"sharein.com",
|
||||
"sharetabs.com",
|
||||
"shorl.com",
|
||||
"short.ie",
|
||||
"short.to",
|
||||
"shortlinks.co.uk",
|
||||
"shortna.me",
|
||||
"short.to",
|
||||
"shorturl.com",
|
||||
"shoturl.us",
|
||||
"shrinkify.com",
|
||||
"shrinkster.com",
|
||||
"shrten.com",
|
||||
"shrt.st",
|
||||
"shrten.com",
|
||||
"shrunkin.com",
|
||||
"shw.me",
|
||||
"simurl.com",
|
||||
@@ -177,20 +181,20 @@ SHORTENER_DOMAINS = [
|
||||
"tcrn.ch",
|
||||
"thrdl.es",
|
||||
"tighturl.com",
|
||||
"tiny123.com",
|
||||
"tinyarro.ws",
|
||||
"tiny.cc",
|
||||
"tiny.pl",
|
||||
"tiny123.com",
|
||||
"tinyarro.ws",
|
||||
"tinytw.it",
|
||||
"tinyuri.ca",
|
||||
"tinyurl.com",
|
||||
"tinyvid.io",
|
||||
"tnij.org",
|
||||
"togoto.us",
|
||||
"to.ly",
|
||||
"traceurl.com",
|
||||
"togoto.us",
|
||||
"tr.im",
|
||||
"tr.my",
|
||||
"traceurl.com",
|
||||
"turo.us",
|
||||
"tweetburner.com",
|
||||
"twirl.at",
|
||||
@@ -200,21 +204,21 @@ SHORTENER_DOMAINS = [
|
||||
"twiturl.de",
|
||||
"twurl.cc",
|
||||
"twurl.nl",
|
||||
"u6e.de",
|
||||
"ub0.cc",
|
||||
"u.mavrev.com",
|
||||
"u.nu",
|
||||
"u6e.de",
|
||||
"ub0.cc",
|
||||
"updating.me",
|
||||
"ur1.ca",
|
||||
"url.co.uk",
|
||||
"url.ie",
|
||||
"url4.eu",
|
||||
"urlao.com",
|
||||
"urlbrief.com",
|
||||
"url.co.uk",
|
||||
"urlcover.com",
|
||||
"urlcut.com",
|
||||
"urlenco.de",
|
||||
"urlhawk.com",
|
||||
"url.ie",
|
||||
"urlkiss.com",
|
||||
"urlot.com",
|
||||
"urlpire.com",
|
||||
@@ -227,29 +231,26 @@ SHORTENER_DOMAINS = [
|
||||
"wapurl.co.uk",
|
||||
"wipi.es",
|
||||
"wp.me",
|
||||
"xaddr.com",
|
||||
"x.co",
|
||||
"x.se",
|
||||
"xaddr.com",
|
||||
"xeeurl.com",
|
||||
"xr.com",
|
||||
"xrl.in",
|
||||
"xrl.us",
|
||||
"x.se",
|
||||
"xurl.jp",
|
||||
"xzb.cc",
|
||||
"yep.it",
|
||||
"yfrog.com",
|
||||
"ymlp.com",
|
||||
"yweb.com",
|
||||
"zi.ma",
|
||||
"zi.pe",
|
||||
"zipmyurl.com",
|
||||
"zz.gd",
|
||||
"ymlp.com",
|
||||
"forms.gle",
|
||||
"ht.ly",
|
||||
"lnkd.in",
|
||||
"1drv.ms",
|
||||
]
|
||||
|
||||
|
||||
class URL:
|
||||
|
||||
def __init__(self, url):
|
||||
@@ -263,33 +264,50 @@ class URL:
|
||||
|
||||
def get_domain(self):
|
||||
"""Get the domain from a URL.
|
||||
|
||||
:param url: URL to parse
|
||||
:returns: Just the domain name extracted from the URL
|
||||
:type url: str
|
||||
:returns: Domain name extracted from URL
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
# TODO: Properly handle exception.
|
||||
try:
|
||||
return get_tld(self.url, as_object=True, fix_protocol=True).parsed_url.netloc.lower().lstrip("www.")
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_top_level(self):
|
||||
"""Get only the top level domain from a URL.
|
||||
"""Get only the top-level domain from a URL.
|
||||
|
||||
:param url: URL to parse
|
||||
:returns: The top level domain extracted from the URL
|
||||
:type url: str
|
||||
:returns: Top-level domain name extracted from URL
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
# TODO: Properly handle exception.
|
||||
try:
|
||||
return get_tld(self.url, as_object=True, fix_protocol=True).fld.lower()
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def check_if_shortened(self):
|
||||
def check_if_shortened(self) -> bool:
|
||||
"""Check if the URL is among list of shortener services.
|
||||
|
||||
|
||||
:returns: True if the URL is shortened, otherwise False
|
||||
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if self.domain.lower() in SHORTENER_DOMAINS:
|
||||
self.is_shortened = True
|
||||
|
||||
return self.is_shortened
|
||||
|
||||
def unshorten(self):
|
||||
"""Unshorten the URL by requesting an HTTP HEAD response."""
|
||||
res = requests.head(self.url)
|
||||
if str(res.status_code).startswith("30"):
|
||||
return res.headers["Location"]
|
||||
|
||||
@@ -10,8 +10,13 @@ import re
|
||||
|
||||
def convert_mactime_to_unix(timestamp, from_2001=True):
|
||||
"""Converts Mac Standard Time to a Unix timestamp.
|
||||
:param timestamp: MacTime timestamp (either int or float)
|
||||
:returns: Unix epoch timestamp
|
||||
|
||||
:param timestamp: MacTime timestamp (either int or float).
|
||||
:type timestamp: int
|
||||
:param from_2001: bool: Whether to (Default value = True)
|
||||
:param from_2001: Default value = True)
|
||||
:returns: Unix epoch timestamp.
|
||||
|
||||
"""
|
||||
if not timestamp:
|
||||
return None
|
||||
@@ -34,35 +39,49 @@ def convert_mactime_to_unix(timestamp, from_2001=True):
|
||||
|
||||
def convert_chrometime_to_unix(timestamp):
|
||||
"""Converts Chrome timestamp to a Unix timestamp.
|
||||
:param timestamp: Chrome timestamp as int
|
||||
:returns: Unix epoch timestamp
|
||||
|
||||
:param timestamp: Chrome timestamp as int.
|
||||
:type timestamp: int
|
||||
:returns: Unix epoch timestamp.
|
||||
|
||||
"""
|
||||
epoch_start = datetime.datetime(1601, 1 , 1)
|
||||
epoch_start = datetime.datetime(1601, 1, 1)
|
||||
delta = datetime.timedelta(microseconds=timestamp)
|
||||
return epoch_start + delta
|
||||
|
||||
|
||||
def convert_timestamp_to_iso(timestamp):
|
||||
"""Converts Unix timestamp to ISO string.
|
||||
:param timestamp: Unix timestamp
|
||||
:returns: ISO timestamp string in YYYY-mm-dd HH:MM:SS.ms format
|
||||
|
||||
:param timestamp: Unix timestamp.
|
||||
:type timestamp: int
|
||||
:returns: ISO timestamp string in YYYY-mm-dd HH:MM:SS.ms format.
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
try:
|
||||
return timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def check_for_links(text):
|
||||
"""Checks if a given text contains HTTP links.
|
||||
:param text: Any provided text
|
||||
:returns: Search results
|
||||
|
||||
:param text: Any provided text.
|
||||
:type text: str
|
||||
:returns: Search results.
|
||||
|
||||
"""
|
||||
return re.findall("(?P<url>https?://[^\s]+)", text, re.IGNORECASE)
|
||||
|
||||
|
||||
def get_sha256_from_file_path(file_path):
|
||||
"""Calculate the SHA256 hash of a file from a file path.
|
||||
|
||||
:param file_path: Path to the file to hash
|
||||
:returns: The SHA256 hash string
|
||||
|
||||
"""
|
||||
sha256_hash = hashlib.sha256()
|
||||
with open(file_path, "rb") as handle:
|
||||
@@ -71,12 +90,16 @@ def get_sha256_from_file_path(file_path):
|
||||
|
||||
return sha256_hash.hexdigest()
|
||||
|
||||
|
||||
# Note: taken from here:
|
||||
# https://stackoverflow.com/questions/57014259/json-dumps-on-dictionary-with-bytes-for-keys
|
||||
def keys_bytes_to_string(obj):
|
||||
"""Convert object keys from bytes to string.
|
||||
|
||||
:param obj: Object to convert from bytes to string.
|
||||
:returns: Converted object.
|
||||
:returns: Object converted to string.
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
new_obj = {}
|
||||
if not isinstance(obj, dict):
|
||||
|
||||
20
mvt/common/version.py
Normal file
20
mvt/common/version.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import requests
|
||||
from packaging import version
|
||||
|
||||
MVT_VERSION = "1.4.0"
|
||||
|
||||
|
||||
def check_for_updates():
|
||||
res = requests.get("https://pypi.org/pypi/mvt/json")
|
||||
data = res.json()
|
||||
latest_version = data.get("info", {}).get("version", "")
|
||||
|
||||
if version.parse(latest_version) > version.parse(MVT_VERSION):
|
||||
return latest_version
|
||||
|
||||
return None
|
||||
@@ -10,8 +10,11 @@ import click
|
||||
from rich.logging import RichHandler
|
||||
from rich.prompt import Prompt
|
||||
|
||||
from mvt.common.help import *
|
||||
from mvt.common.help import HELP_MSG_MODULE, HELP_MSG_IOC
|
||||
from mvt.common.help import HELP_MSG_FAST, HELP_MSG_OUTPUT
|
||||
from mvt.common.help import HELP_MSG_LIST_MODULES
|
||||
from mvt.common.indicators import Indicators, IndicatorsFileBadFormat
|
||||
from mvt.common.logo import logo
|
||||
from mvt.common.module import run_module, save_timeline
|
||||
from mvt.common.options import MutuallyExclusiveOption
|
||||
|
||||
@@ -29,11 +32,20 @@ log = logging.getLogger(__name__)
|
||||
# Set this environment variable to a password if needed.
|
||||
PASSWD_ENV = "MVT_IOS_BACKUP_PASSWORD"
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Main
|
||||
#==============================================================================
|
||||
@click.group(invoke_without_command=False)
|
||||
def cli():
|
||||
logo()
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: version
|
||||
#==============================================================================
|
||||
@cli.command("version", help="Show the currently installed version of MVT")
|
||||
def version():
|
||||
return
|
||||
|
||||
|
||||
@@ -163,7 +175,7 @@ def check_backup(ctx, iocs, output, fast, backup_path, list_modules, module):
|
||||
log=logging.getLogger(backup_module.__module__))
|
||||
m.is_backup = True
|
||||
|
||||
if iocs:
|
||||
if indicators.ioc_count > 0:
|
||||
m.indicators = indicators
|
||||
m.indicators.log = m.log
|
||||
|
||||
|
||||
@@ -14,9 +14,12 @@ from iOSbackup import iOSbackup
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DecryptBackup:
|
||||
"""This class provides functions to decrypt an encrypted iTunes backup
|
||||
using either a password or a key file.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, backup_path, dest_path=None):
|
||||
@@ -32,9 +35,12 @@ class DecryptBackup:
|
||||
def can_process(self) -> bool:
|
||||
return self._backup is not None
|
||||
|
||||
def is_encrypted(self, backup_path) -> bool:
|
||||
@staticmethod
|
||||
def is_encrypted(backup_path) -> bool:
|
||||
"""Query Manifest.db file to see if it's encrypted or not.
|
||||
|
||||
:param backup_path: Path to the backup to decrypt
|
||||
|
||||
"""
|
||||
conn = sqlite3.connect(os.path.join(backup_path, "Manifest.db"))
|
||||
cur = conn.cursor()
|
||||
@@ -94,7 +100,9 @@ class DecryptBackup:
|
||||
|
||||
def decrypt_with_password(self, password):
|
||||
"""Decrypts an encrypted iOS backup.
|
||||
|
||||
:param password: Password to use to decrypt the original backup
|
||||
|
||||
"""
|
||||
log.info("Decrypting iOS backup at path %s with password", self.backup_path)
|
||||
|
||||
@@ -130,7 +138,9 @@ class DecryptBackup:
|
||||
|
||||
def decrypt_with_key_file(self, key_file):
|
||||
"""Decrypts an encrypted iOS backup using a key file.
|
||||
|
||||
:param key_file: File to read the key bytes to decrypt the backup
|
||||
|
||||
"""
|
||||
log.info("Decrypting iOS backup at path %s with key file %s",
|
||||
self.backup_path, key_file)
|
||||
@@ -157,8 +167,7 @@ class DecryptBackup:
|
||||
log.critical("Failed to decrypt backup. Did you provide the correct key file?")
|
||||
|
||||
def get_key(self):
|
||||
"""Retrieve and prints the encryption key.
|
||||
"""
|
||||
"""Retrieve and prints the encryption key."""
|
||||
if not self._backup:
|
||||
return
|
||||
|
||||
@@ -168,7 +177,9 @@ class DecryptBackup:
|
||||
|
||||
def write_key(self, key_path):
|
||||
"""Save extracted key to file.
|
||||
|
||||
:param key_path: Path to the file where to write the derived decryption key.
|
||||
|
||||
"""
|
||||
if not self._decryption_key:
|
||||
return
|
||||
|
||||
@@ -30,9 +30,9 @@ class BackupInfo(IOSExtraction):
|
||||
with open(info_path, "rb") as handle:
|
||||
info = plistlib.load(handle)
|
||||
|
||||
fields = ["Build Version", "Device Name", "Display Name", "GUID",
|
||||
fields = ["Build Version", "Device Name", "Display Name",
|
||||
"GUID", "ICCID", "IMEI", "MEID", "Installed Applications",
|
||||
"Last Backup Data", "Phone Number", "Product Name",
|
||||
"Last Backup Date", "Phone Number", "Product Name",
|
||||
"Product Type", "Product Version", "Serial Number",
|
||||
"Target Identifier", "Target Type", "Unique Identifier",
|
||||
"iTunes Version"]
|
||||
|
||||
@@ -1,18 +1,21 @@
|
||||
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import os
|
||||
import plistlib
|
||||
from base64 import b64encode
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
CONF_PROFILES_DOMAIN = "SysSharedContainerDomain-systemgroup.com.apple.configurationprofiles"
|
||||
|
||||
|
||||
class ConfigurationProfiles(IOSExtraction):
|
||||
"""This module extracts the full plist data from configuration profiles.
|
||||
"""
|
||||
"""This module extracts the full plist data from configuration profiles."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
@@ -20,23 +23,73 @@ class ConfigurationProfiles(IOSExtraction):
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
if not record["install_date"]:
|
||||
return
|
||||
|
||||
payload_name = record['plist'].get('PayloadDisplayName')
|
||||
payload_description = record['plist'].get('PayloadDescription')
|
||||
return {
|
||||
"timestamp": record["install_date"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "configuration_profile_install",
|
||||
"data": f"{record['plist']['PayloadType']} installed: {record['plist']['PayloadUUID']} - {payload_name}: {payload_description}"
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if result["plist"].get("PayloadUUID"):
|
||||
payload_content = result["plist"]["PayloadContent"][0]
|
||||
|
||||
# Alert on any known malicious configuration profiles in the indicator list.
|
||||
if self.indicators.check_profile(result["plist"]["PayloadUUID"]):
|
||||
self.log.warning(f"Found a known malicious configuration profile \"{result['plist']['PayloadDisplayName']}\" with UUID '{result['plist']['PayloadUUID']}'.")
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
# Highlight suspicious configuration profiles which may be used to hide notifications.
|
||||
if payload_content["PayloadType"] in ["com.apple.notificationsettings"]:
|
||||
self.log.warning(f"Found a potentially suspicious configuration profile \"{result['plist']['PayloadDisplayName']}\" with payload type '{payload_content['PayloadType']}'.")
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
for conf_file in self._get_backup_files_from_manifest(domain=CONF_PROFILES_DOMAIN):
|
||||
conf_rel_path = conf_file["relative_path"]
|
||||
# Filter out all configuration files that are not configuration profiles.
|
||||
if not conf_rel_path or not os.path.basename(conf_rel_path).startswith("profile-"):
|
||||
continue
|
||||
|
||||
conf_file_path = self._get_backup_file_from_id(conf_file["file_id"])
|
||||
if not conf_file_path:
|
||||
continue
|
||||
|
||||
with open(conf_file_path, "rb") as handle:
|
||||
conf_plist = plistlib.load(handle)
|
||||
try:
|
||||
conf_plist = plistlib.load(handle)
|
||||
except:
|
||||
conf_plist = {}
|
||||
|
||||
if "SignerCerts" in conf_plist:
|
||||
conf_plist["SignerCerts"] = [b64encode(x) for x in conf_plist["SignerCerts"]]
|
||||
if "PushTokenDataSentToServerKey" in conf_plist:
|
||||
conf_plist["PushTokenDataSentToServerKey"] = b64encode(conf_plist["PushTokenDataSentToServerKey"])
|
||||
if "LastPushTokenHash" in conf_plist:
|
||||
conf_plist["LastPushTokenHash"] = b64encode(conf_plist["LastPushTokenHash"])
|
||||
if "PayloadContent" in conf_plist:
|
||||
for x in range(len(conf_plist["PayloadContent"])):
|
||||
if "PERSISTENT_REF" in conf_plist["PayloadContent"][x]:
|
||||
conf_plist["PayloadContent"][x]["PERSISTENT_REF"] = b64encode(conf_plist["PayloadContent"][x]["PERSISTENT_REF"])
|
||||
|
||||
self.results.append({
|
||||
"file_id": conf_file["file_id"],
|
||||
"relative_path": conf_file["relative_path"],
|
||||
"domain": conf_file["domain"],
|
||||
"plist": conf_plist,
|
||||
"install_date": convert_timestamp_to_iso(conf_plist.get("InstallDate")),
|
||||
})
|
||||
|
||||
self.log.info("Extracted details about %d configuration profiles", len(self.results))
|
||||
|
||||
@@ -27,11 +27,19 @@ class Manifest(IOSExtraction):
|
||||
def _get_key(self, dictionary, key):
|
||||
"""Unserialized plist objects can have keys which are str or byte types
|
||||
This is a helper to try fetch a key as both a byte or string type.
|
||||
|
||||
:param dictionary:
|
||||
:param key:
|
||||
|
||||
"""
|
||||
return dictionary.get(key.encode("utf-8"), None) or dictionary.get(key, None)
|
||||
|
||||
def _convert_timestamp(self, timestamp_or_unix_time_int):
|
||||
@staticmethod
|
||||
def _convert_timestamp(timestamp_or_unix_time_int):
|
||||
"""Older iOS versions stored the manifest times as unix timestamps.
|
||||
|
||||
:param timestamp_or_unix_time_int:
|
||||
|
||||
"""
|
||||
if isinstance(timestamp_or_unix_time_int, datetime.datetime):
|
||||
return convert_timestamp_to_iso(timestamp_or_unix_time_int)
|
||||
@@ -64,7 +72,7 @@ class Manifest(IOSExtraction):
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if not "relative_path" in result:
|
||||
if "relative_path" not in result:
|
||||
continue
|
||||
if not result["relative_path"]:
|
||||
continue
|
||||
@@ -75,7 +83,7 @@ class Manifest(IOSExtraction):
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
if self.indicators.check_file(result["relative_path"]):
|
||||
if self.indicators.check_filename(result["relative_path"]):
|
||||
self.log.warning("Found a known malicious file at path: %s", result["relative_path"])
|
||||
self.detected.append(result)
|
||||
continue
|
||||
@@ -90,7 +98,7 @@ class Manifest(IOSExtraction):
|
||||
def run(self):
|
||||
manifest_db_path = os.path.join(self.base_folder, "Manifest.db")
|
||||
if not os.path.isfile(manifest_db_path):
|
||||
raise DatabaseNotFoundError("Impossible to find the module's database file")
|
||||
raise DatabaseNotFoundError("unable to find backup's Manifest.db")
|
||||
|
||||
self.log.info("Found Manifest.db database at path: %s", manifest_db_path)
|
||||
|
||||
@@ -125,7 +133,7 @@ class Manifest(IOSExtraction):
|
||||
"owner": self._get_key(file_metadata, "UserID"),
|
||||
"size": self._get_key(file_metadata, "Size"),
|
||||
})
|
||||
except:
|
||||
except Exception:
|
||||
self.log.exception("Error reading manifest file metadata for file with ID %s and relative path %s",
|
||||
file_data["fileID"], file_data["relativePath"])
|
||||
pass
|
||||
|
||||
@@ -11,9 +11,12 @@ from ..base import IOSExtraction
|
||||
|
||||
CONF_PROFILES_EVENTS_RELPATH = "Library/ConfigurationProfiles/MCProfileEvents.plist"
|
||||
|
||||
|
||||
class ProfileEvents(IOSExtraction):
|
||||
"""This module extracts events related to the installation of configuration
|
||||
profiles.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
|
||||
@@ -26,32 +26,35 @@ class IOSExtraction(MVTModule):
|
||||
self.is_fs_dump = False
|
||||
self.is_sysdiagnose = False
|
||||
|
||||
def _recover_sqlite_db_if_needed(self, file_path):
|
||||
def _recover_sqlite_db_if_needed(self, file_path, forced=False):
|
||||
"""Tries to recover a malformed database by running a .clone command.
|
||||
|
||||
:param file_path: Path to the malformed database file.
|
||||
|
||||
"""
|
||||
# TODO: Find a better solution.
|
||||
conn = sqlite3.connect(file_path)
|
||||
cur = conn.cursor()
|
||||
if not forced:
|
||||
conn = sqlite3.connect(file_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
try:
|
||||
recover = False
|
||||
cur.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
except sqlite3.DatabaseError as e:
|
||||
if "database disk image is malformed" in str(e):
|
||||
recover = True
|
||||
finally:
|
||||
conn.close()
|
||||
try:
|
||||
recover = False
|
||||
cur.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
except sqlite3.DatabaseError as e:
|
||||
if "database disk image is malformed" in str(e):
|
||||
recover = True
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
if not recover:
|
||||
return
|
||||
if not recover:
|
||||
return
|
||||
|
||||
self.log.info("Database at path %s is malformed. Trying to recover...", file_path)
|
||||
|
||||
if not shutil.which("sqlite3"):
|
||||
raise DatabaseCorruptedError("Unable to recover without sqlite3 binary. Please install sqlite3!")
|
||||
raise DatabaseCorruptedError("failed to recover without sqlite3 binary: please install sqlite3!")
|
||||
if '"' in file_path:
|
||||
raise DatabaseCorruptedError(f"Database at path '{file_path}' is corrupted. unable to recover because it has a quotation mark (\") in its name.")
|
||||
raise DatabaseCorruptedError(f"database at path '{file_path}' is corrupted. unable to recover because it has a quotation mark (\") in its name")
|
||||
|
||||
bak_path = f"{file_path}.bak"
|
||||
shutil.move(file_path, bak_path)
|
||||
@@ -59,18 +62,20 @@ class IOSExtraction(MVTModule):
|
||||
ret = subprocess.call(["sqlite3", bak_path, f".clone \"{file_path}\""],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if ret != 0:
|
||||
raise DatabaseCorruptedError("Recovery of database failed")
|
||||
raise DatabaseCorruptedError("failed to recover database")
|
||||
|
||||
self.log.info("Database at path %s recovered successfully!", file_path)
|
||||
|
||||
def _get_backup_files_from_manifest(self, relative_path=None, domain=None):
|
||||
"""Locate files from Manifest.db.
|
||||
:param relative_path: Relative path to use as filter from Manifest.db.
|
||||
:param domain: Domain to use as filter from Manifest.db.
|
||||
|
||||
:param relative_path: Relative path to use as filter from Manifest.db. (Default value = None)
|
||||
:param domain: Domain to use as filter from Manifest.db. (Default value = None)
|
||||
|
||||
"""
|
||||
manifest_db_path = os.path.join(self.base_folder, "Manifest.db")
|
||||
if not os.path.exists(manifest_db_path):
|
||||
raise Exception("Unable to find backup's Manifest.db")
|
||||
raise DatabaseNotFoundError("unable to find backup's Manifest.db")
|
||||
|
||||
base_sql = "SELECT fileID, domain, relativePath FROM Files WHERE "
|
||||
|
||||
@@ -86,7 +91,7 @@ class IOSExtraction(MVTModule):
|
||||
elif domain:
|
||||
cur.execute(f"{base_sql} domain = ?;", (domain,))
|
||||
except Exception as e:
|
||||
raise Exception("Query to Manifest.db failed: %s", e)
|
||||
raise DatabaseCorruptedError("failed to query Manifest.db: %s", e)
|
||||
|
||||
for row in cur:
|
||||
yield {
|
||||
@@ -116,8 +121,11 @@ class IOSExtraction(MVTModule):
|
||||
modules that expect to work with a single SQLite database.
|
||||
If a module requires to process multiple databases or files,
|
||||
you should use the helper functions above.
|
||||
|
||||
:param backup_id: iTunes backup database file's ID (or hash).
|
||||
:param root_paths: Glob patterns for files to seek in filesystem dump.
|
||||
:param root_paths: Glob patterns for files to seek in filesystem dump. (Default value = [])
|
||||
:param backup_ids: Default value = None)
|
||||
|
||||
"""
|
||||
file_path = None
|
||||
# First we check if the was an explicit file path specified.
|
||||
@@ -144,6 +152,6 @@ class IOSExtraction(MVTModule):
|
||||
if file_path:
|
||||
self.file_path = file_path
|
||||
else:
|
||||
raise DatabaseNotFoundError("Unable to find the module's database file")
|
||||
raise DatabaseNotFoundError("unable to find the module's database file")
|
||||
|
||||
self._recover_sqlite_db_if_needed(self.file_path)
|
||||
|
||||
@@ -3,14 +3,17 @@
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from .analytics import Analytics
|
||||
from .cache_files import CacheFiles
|
||||
from .filesystem import Filesystem
|
||||
from .net_netusage import Netusage
|
||||
from .safari_favicon import SafariFavicon
|
||||
from .shutdownlog import ShutdownLog
|
||||
from .version_history import IOSVersionHistory
|
||||
from .webkit_indexeddb import WebkitIndexedDB
|
||||
from .webkit_localstorage import WebkitLocalStorage
|
||||
from .webkit_safariviewservice import WebkitSafariViewService
|
||||
|
||||
FS_MODULES = [CacheFiles, Filesystem, Netusage, SafariFavicon, IOSVersionHistory,
|
||||
WebkitIndexedDB, WebkitLocalStorage, WebkitSafariViewService,]
|
||||
FS_MODULES = [CacheFiles, Filesystem, Netusage, Analytics, SafariFavicon, ShutdownLog,
|
||||
IOSVersionHistory, WebkitIndexedDB, WebkitLocalStorage,
|
||||
WebkitSafariViewService]
|
||||
|
||||
119
mvt/ios/modules/fs/analytics.py
Normal file
119
mvt/ios/modules/fs/analytics.py
Normal file
@@ -0,0 +1,119 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import plistlib
|
||||
import sqlite3
|
||||
|
||||
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
ANALYTICS_DB_PATH = [
|
||||
"private/var/Keychains/Analytics/*.db",
|
||||
]
|
||||
|
||||
|
||||
class Analytics(IOSExtraction):
|
||||
"""This module extracts information from the private/var/Keychains/Analytics/*.db files."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
return {
|
||||
"timestamp": record["timestamp"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": record["artifact"],
|
||||
"data": f"{record}",
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
for ioc in self.indicators.ioc_processes:
|
||||
for key in result.keys():
|
||||
if ioc == result[key]:
|
||||
self.log.warning("Found mention of a malicious process \"%s\" in %s file at %s",
|
||||
ioc, result["artifact"], result["timestamp"])
|
||||
self.detected.append(result)
|
||||
break
|
||||
for ioc in self.indicators.ioc_domains:
|
||||
for key in result.keys():
|
||||
if ioc in str(result[key]):
|
||||
self.log.warning("Found mention of a malicious domain \"%s\" in %s file at %s",
|
||||
ioc, result["artifact"], result["timestamp"])
|
||||
self.detected.append(result)
|
||||
break
|
||||
|
||||
def _extract_analytics_data(self):
|
||||
artifact = self.file_path.split("/")[-1]
|
||||
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
try:
|
||||
cur.execute("""
|
||||
SELECT
|
||||
timestamp,
|
||||
data
|
||||
FROM hard_failures
|
||||
UNION
|
||||
SELECT
|
||||
timestamp,
|
||||
data
|
||||
FROM soft_failures
|
||||
UNION
|
||||
SELECT
|
||||
timestamp,
|
||||
data
|
||||
FROM all_events;
|
||||
""")
|
||||
except sqlite3.OperationalError:
|
||||
cur.execute("""
|
||||
SELECT
|
||||
timestamp,
|
||||
data
|
||||
FROM hard_failures
|
||||
UNION
|
||||
SELECT
|
||||
timestamp,
|
||||
data
|
||||
FROM soft_failures;
|
||||
""")
|
||||
|
||||
for row in cur:
|
||||
if row[0] and row[1]:
|
||||
timestamp = convert_timestamp_to_iso(convert_mactime_to_unix(row[0], False))
|
||||
data = plistlib.loads(row[1])
|
||||
data["timestamp"] = timestamp
|
||||
elif row[0]:
|
||||
timestamp = convert_timestamp_to_iso(convert_mactime_to_unix(row[0], False))
|
||||
data = {}
|
||||
data["timestamp"] = timestamp
|
||||
elif row[1]:
|
||||
timestamp = ""
|
||||
data = plistlib.loads(row[1])
|
||||
data["timestamp"] = timestamp
|
||||
data["artifact"] = artifact
|
||||
|
||||
self.results.append(data)
|
||||
|
||||
self.results = sorted(self.results, key=lambda entry: entry["timestamp"])
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
self.log.info("Extracted information on %d analytics data from %s", len(self.results), artifact)
|
||||
|
||||
def run(self):
|
||||
for file_path in self._get_fs_files_from_patterns(ANALYTICS_DB_PATH):
|
||||
self.file_path = file_path
|
||||
self.log.info("Found Analytics database file at path: %s", file_path)
|
||||
self._extract_analytics_data()
|
||||
@@ -38,7 +38,7 @@ class CacheFiles(IOSExtraction):
|
||||
for item in items:
|
||||
if self.indicators.check_domain(item["url"]):
|
||||
if key not in self.detected:
|
||||
self.detected[key] = [item,]
|
||||
self.detected[key] = [item, ]
|
||||
else:
|
||||
self.detected[key].append(item)
|
||||
|
||||
@@ -54,7 +54,7 @@ class CacheFiles(IOSExtraction):
|
||||
return
|
||||
|
||||
key_name = os.path.relpath(file_path, self.base_folder)
|
||||
if not key_name in self.results:
|
||||
if key_name not in self.results:
|
||||
self.results[key_name] = []
|
||||
|
||||
for row in cur:
|
||||
|
||||
@@ -13,7 +13,10 @@ from ..base import IOSExtraction
|
||||
|
||||
class Filesystem(IOSExtraction):
|
||||
"""This module extracts creation and modification date of files from a
|
||||
full file-system dump."""
|
||||
full file-system dump.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
@@ -25,8 +28,8 @@ class Filesystem(IOSExtraction):
|
||||
return {
|
||||
"timestamp": record["modified"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "file_modified",
|
||||
"data": record["file_path"],
|
||||
"event": "entry_modified",
|
||||
"data": record["path"],
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
@@ -34,19 +37,46 @@ class Filesystem(IOSExtraction):
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if self.indicators.check_file(result["file_path"]):
|
||||
if self.indicators.check_file(result["path"]):
|
||||
self.log.warning("Found a known malicious file name at path: %s", result["path"])
|
||||
self.detected.append(result)
|
||||
|
||||
if self.indicators.check_file_path(result["path"]):
|
||||
self.log.warning("Found a known malicious file path at path: %s", result["path"])
|
||||
self.detected.append(result)
|
||||
|
||||
# If we are instructed to run fast, we skip this.
|
||||
if self.fast_mode:
|
||||
self.log.info("Flag --fast was enabled: skipping extended search for suspicious files/processes")
|
||||
else:
|
||||
for ioc in self.indicators.ioc_processes:
|
||||
parts = result["path"].split("/")
|
||||
if ioc in parts:
|
||||
self.log.warning("Found a known malicious file/process at path: %s", result["path"])
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
for root, dirs, files in os.walk(self.base_folder):
|
||||
for dir_name in dirs:
|
||||
try:
|
||||
dir_path = os.path.join(root, dir_name)
|
||||
result = {
|
||||
"path": os.path.relpath(dir_path, self.base_folder),
|
||||
"modified": convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(os.stat(dir_path).st_mtime)),
|
||||
}
|
||||
except Exception:
|
||||
continue
|
||||
else:
|
||||
self.results.append(result)
|
||||
|
||||
for file_name in files:
|
||||
try:
|
||||
file_path = os.path.join(root, file_name)
|
||||
result = {
|
||||
"file_path": os.path.relpath(file_path, self.base_folder),
|
||||
"path": os.path.relpath(file_path, self.base_folder),
|
||||
"modified": convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(os.stat(file_path).st_mtime)),
|
||||
}
|
||||
except:
|
||||
except Exception:
|
||||
continue
|
||||
else:
|
||||
self.results.append(result)
|
||||
|
||||
@@ -12,9 +12,13 @@ NETUSAGE_ROOT_PATHS = [
|
||||
"private/var/networkd/db/netusage.sqlite"
|
||||
]
|
||||
|
||||
|
||||
class Netusage(NetBase):
|
||||
"""This class extracts data from netusage.sqlite and attempts to identify
|
||||
any suspicious processes if running on a full filesystem dump."""
|
||||
any suspicious processes if running on a full filesystem dump.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
|
||||
@@ -14,6 +14,7 @@ SAFARI_FAVICON_ROOT_PATHS = [
|
||||
"private/var/mobile/Containers/Data/Application/*/Library/Image Cache/Favicons/Favicons.db",
|
||||
]
|
||||
|
||||
|
||||
class SafariFavicon(IOSExtraction):
|
||||
"""This module extracts all Safari favicon records."""
|
||||
|
||||
|
||||
82
mvt/ios/modules/fs/shutdownlog.py
Normal file
82
mvt/ios/modules/fs/shutdownlog.py
Normal file
@@ -0,0 +1,82 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
SHUTDOWN_LOG_PATH = [
|
||||
"private/var/db/diagnostics/shutdown.log",
|
||||
]
|
||||
|
||||
|
||||
class ShutdownLog(IOSExtraction):
|
||||
"""This module extracts processes information from the shutdown log file."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "shutdown",
|
||||
"data": f"Client {record['client']} with PID {record['pid']} was running when the device was shut down",
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
for ioc in self.indicators.ioc_processes:
|
||||
parts = result["client"].split("/")
|
||||
if ioc in parts:
|
||||
self.log.warning("Found mention of a known malicious process \"%s\" in shutdown.log",
|
||||
ioc)
|
||||
self.detected.append(result)
|
||||
|
||||
def process_shutdownlog(self, content):
|
||||
current_processes = []
|
||||
for line in content.split("\n"):
|
||||
line = line.strip()
|
||||
|
||||
if line.startswith("remaining client pid:"):
|
||||
current_processes.append({
|
||||
"pid": line[line.find("pid: ")+5:line.find(" (")],
|
||||
"client": line[line.find("(")+1:line.find(")")],
|
||||
})
|
||||
elif line.startswith("SIGTERM: "):
|
||||
try:
|
||||
mac_timestamp = int(line[line.find("[")+1:line.find("]")])
|
||||
except ValueError:
|
||||
try:
|
||||
start = line.find(" @")+2
|
||||
mac_timestamp = int(line[start:start+10])
|
||||
except Exception:
|
||||
mac_timestamp = 0
|
||||
|
||||
timestamp = convert_mactime_to_unix(mac_timestamp, from_2001=False)
|
||||
isodate = convert_timestamp_to_iso(timestamp)
|
||||
|
||||
for current_process in current_processes:
|
||||
self.results.append({
|
||||
"isodate": isodate,
|
||||
"pid": current_process["pid"],
|
||||
"client": current_process["client"],
|
||||
})
|
||||
|
||||
current_processes = []
|
||||
|
||||
self.results = sorted(self.results, key=lambda entry: entry["isodate"])
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(root_paths=SHUTDOWN_LOG_PATH)
|
||||
self.log.info("Found shutdown log at path: %s", self.file_path)
|
||||
with open(self.file_path, "r") as handle:
|
||||
self.process_shutdownlog(handle.read())
|
||||
@@ -14,6 +14,7 @@ IOS_ANALYTICS_JOURNAL_PATHS = [
|
||||
"private/var/db/analyticsd/Analytics-Journal-*.ips",
|
||||
]
|
||||
|
||||
|
||||
class IOSVersionHistory(IOSExtraction):
|
||||
"""This module extracts iOS update history from Analytics Journal log files."""
|
||||
|
||||
|
||||
@@ -9,9 +9,13 @@ WEBKIT_INDEXEDDB_ROOT_PATHS = [
|
||||
"private/var/mobile/Containers/Data/Application/*/Library/WebKit/WebsiteData/IndexedDB",
|
||||
]
|
||||
|
||||
|
||||
class WebkitIndexedDB(WebkitBase):
|
||||
"""This module looks extracts records from WebKit IndexedDB folders,
|
||||
and checks them against any provided list of suspicious domains."""
|
||||
and checks them against any provided list of suspicious domains.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
slug = "webkit_indexeddb"
|
||||
|
||||
|
||||
@@ -9,9 +9,13 @@ WEBKIT_LOCALSTORAGE_ROOT_PATHS = [
|
||||
"private/var/mobile/Containers/Data/Application/*/Library/WebKit/WebsiteData/LocalStorage/",
|
||||
]
|
||||
|
||||
|
||||
class WebkitLocalStorage(WebkitBase):
|
||||
"""This module looks extracts records from WebKit LocalStorage folders,
|
||||
and checks them against any provided list of suspicious domains."""
|
||||
and checks them against any provided list of suspicious domains.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
|
||||
@@ -9,9 +9,13 @@ WEBKIT_SAFARIVIEWSERVICE_ROOT_PATHS = [
|
||||
"private/var/mobile/Containers/Data/Application/*/SystemData/com.apple.SafariViewService/Library/WebKit/WebsiteData/",
|
||||
]
|
||||
|
||||
|
||||
class WebkitSafariViewService(WebkitBase):
|
||||
"""This module looks extracts records from WebKit LocalStorage folders,
|
||||
and checks them against any provided list of suspicious domains."""
|
||||
and checks them against any provided list of suspicious domains.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
|
||||
@@ -13,15 +13,19 @@ from .idstatuscache import IDStatusCache
|
||||
from .interactionc import InteractionC
|
||||
from .locationd import LocationdClients
|
||||
from .net_datausage import Datausage
|
||||
from .osanalytics_addaily import OSAnalyticsADDaily
|
||||
from .safari_browserstate import SafariBrowserState
|
||||
from .safari_history import SafariHistory
|
||||
from .sms import SMS
|
||||
from .sms_attachments import SMSAttachments
|
||||
from .tcc import TCC
|
||||
from .webkit_resource_load_statistics import WebkitResourceLoadStatistics
|
||||
from .webkit_session_resource_log import WebkitSessionResourceLog
|
||||
from .whatsapp import Whatsapp
|
||||
from .shortcuts import Shortcuts
|
||||
|
||||
MIXED_MODULES = [Calls, ChromeFavicon, ChromeHistory, Contacts, FirefoxFavicon,
|
||||
FirefoxHistory, IDStatusCache, InteractionC, LocationdClients,
|
||||
Datausage, SafariBrowserState, SafariHistory, SMS, SMSAttachments,
|
||||
WebkitResourceLoadStatistics, WebkitSessionResourceLog, Whatsapp,]
|
||||
OSAnalyticsADDaily, Datausage, SafariBrowserState, SafariHistory,
|
||||
TCC, SMS, SMSAttachments, WebkitResourceLoadStatistics,
|
||||
WebkitSessionResourceLog, Whatsapp, Shortcuts]
|
||||
|
||||
@@ -16,6 +16,7 @@ CALLS_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/CallHistoryDB/CallHistory.storedata"
|
||||
]
|
||||
|
||||
|
||||
class Calls(IOSExtraction):
|
||||
"""This module extracts phone calls details"""
|
||||
|
||||
@@ -45,7 +46,7 @@ class Calls(IOSExtraction):
|
||||
ZDATE, ZDURATION, ZLOCATION, ZADDRESS, ZSERVICE_PROVIDER
|
||||
FROM ZCALLRECORD;
|
||||
""")
|
||||
names = [description[0] for description in cur.description]
|
||||
# names = [description[0] for description in cur.description]
|
||||
|
||||
for row in cur:
|
||||
self.results.append({
|
||||
|
||||
@@ -19,6 +19,7 @@ CHROME_FAVICON_ROOT_PATHS = [
|
||||
"private/var/mobile/Containers/Data/Application/*/Library/Application Support/Google/Chrome/Default/Favicons",
|
||||
]
|
||||
|
||||
|
||||
class ChromeFavicon(IOSExtraction):
|
||||
"""This module extracts all Chrome favicon records."""
|
||||
|
||||
|
||||
@@ -13,12 +13,12 @@ from ..base import IOSExtraction
|
||||
CHROME_HISTORY_BACKUP_IDS = [
|
||||
"faf971ce92c3ac508c018dce1bef2a8b8e9838f1",
|
||||
]
|
||||
|
||||
# TODO: Confirm Chrome database path.
|
||||
CHROME_HISTORY_ROOT_PATHS = [
|
||||
"private/var/mobile/Containers/Data/Application/*/Library/Application Support/Google/Chrome/Default/History",
|
||||
]
|
||||
|
||||
|
||||
class ChromeHistory(IOSExtraction):
|
||||
"""This module extracts all Chome visits."""
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ CONTACTS_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/AddressBook/AddressBook.sqlitedb",
|
||||
]
|
||||
|
||||
|
||||
class Contacts(IOSExtraction):
|
||||
"""This module extracts all contact details from the phone's address book."""
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ FIREFOX_HISTORY_ROOT_PATHS = [
|
||||
"private/var/mobile/profile.profile/browser.db",
|
||||
]
|
||||
|
||||
|
||||
class FirefoxFavicon(IOSExtraction):
|
||||
"""This module extracts all Firefox favicon"""
|
||||
|
||||
@@ -39,8 +40,8 @@ class FirefoxFavicon(IOSExtraction):
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if (self.indicators.check_domain(result.get("url", "")) or
|
||||
self.indicators.check_domain(result.get("history_url", ""))):
|
||||
if (self.indicators.check_domain(result.get("url", "")) or
|
||||
self.indicators.check_domain(result.get("history_url", ""))):
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
|
||||
@@ -17,9 +17,13 @@ FIREFOX_HISTORY_ROOT_PATHS = [
|
||||
"private/var/mobile/profile.profile/browser.db",
|
||||
]
|
||||
|
||||
|
||||
class FirefoxHistory(IOSExtraction):
|
||||
"""This module extracts all Firefox visits and tries to detect potential
|
||||
network injection attacks."""
|
||||
network injection attacks.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
|
||||
@@ -15,8 +15,10 @@ IDSTATUSCACHE_BACKUP_IDS = [
|
||||
]
|
||||
IDSTATUSCACHE_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/Preferences/com.apple.identityservices.idstatuscache.plist",
|
||||
"private/var/mobile/Library/IdentityServices/idstatuscache.plist",
|
||||
]
|
||||
|
||||
|
||||
class IDStatusCache(IOSExtraction):
|
||||
"""Extracts Apple Authentication information from idstatuscache.plist"""
|
||||
|
||||
@@ -50,12 +52,8 @@ class IDStatusCache(IOSExtraction):
|
||||
result.get("user"))
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(backup_ids=IDSTATUSCACHE_BACKUP_IDS,
|
||||
root_paths=IDSTATUSCACHE_ROOT_PATHS)
|
||||
self.log.info("Found IDStatusCache plist at path: %s", self.file_path)
|
||||
|
||||
with open(self.file_path, "rb") as handle:
|
||||
def _extract_idstatuscache_entries(self, file_path):
|
||||
with open(file_path, "rb") as handle:
|
||||
file_plist = plistlib.load(handle)
|
||||
|
||||
id_status_cache_entries = []
|
||||
@@ -83,4 +81,16 @@ class IDStatusCache(IOSExtraction):
|
||||
entry["occurrences"] = entry_counter[entry["user"]]
|
||||
self.results.append(entry)
|
||||
|
||||
def run(self):
|
||||
|
||||
if self.is_backup:
|
||||
self._find_ios_database(backup_ids=IDSTATUSCACHE_BACKUP_IDS)
|
||||
self.log.info("Found IDStatusCache plist at path: %s", self.file_path)
|
||||
self._extract_idstatuscache_entries(self.file_path)
|
||||
elif self.is_fs_dump:
|
||||
for idstatuscache_path in self._get_fs_files_from_patterns(IDSTATUSCACHE_ROOT_PATHS):
|
||||
self.file_path = idstatuscache_path
|
||||
self.log.info("Found IDStatusCache plist at path: %s", self.file_path)
|
||||
self._extract_idstatuscache_entries(self.file_path)
|
||||
|
||||
self.log.info("Extracted a total of %d ID Status Cache entries", len(self.results))
|
||||
|
||||
@@ -16,6 +16,7 @@ INTERACTIONC_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/CoreDuet/People/interactionC.db",
|
||||
]
|
||||
|
||||
|
||||
class InteractionC(IOSExtraction):
|
||||
"""This module extracts data from InteractionC db."""
|
||||
|
||||
@@ -54,8 +55,8 @@ class InteractionC(IOSExtraction):
|
||||
"timestamp": record[ts],
|
||||
"module": self.__class__.__name__,
|
||||
"event": ts,
|
||||
"data": f"[{record['bundle_id']}] {record['account']} - from {record['sender_display_name']} " \
|
||||
f"({record['sender_identifier']}) to {record['recipient_display_name']} " \
|
||||
"data": f"[{record['bundle_id']}] {record['account']} - from {record['sender_display_name']} "
|
||||
f"({record['sender_identifier']}) to {record['recipient_display_name']} "
|
||||
f"({record['recipient_identifier']}): {record['content']}"
|
||||
})
|
||||
processed.append(record[ts])
|
||||
@@ -123,8 +124,7 @@ class InteractionC(IOSExtraction):
|
||||
LEFT JOIN Z_2INTERACTIONRECIPIENT ON ZINTERACTIONS.Z_PK== Z_2INTERACTIONRECIPIENT.Z_3INTERACTIONRECIPIENT
|
||||
LEFT JOIN ZCONTACTS RECEIPIENTCONACT ON Z_2INTERACTIONRECIPIENT.Z_2RECIPIENTS== RECEIPIENTCONACT.Z_PK;
|
||||
""")
|
||||
|
||||
names = [description[0] for description in cur.description]
|
||||
# names = [description[0] for description in cur.description]
|
||||
|
||||
for row in cur:
|
||||
self.results.append({
|
||||
|
||||
@@ -14,10 +14,12 @@ LOCATIOND_BACKUP_IDS = [
|
||||
]
|
||||
LOCATIOND_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/Caches/locationd/clients.plist",
|
||||
"private/var/root/Library/Caches/locationd/clients.plist"
|
||||
]
|
||||
|
||||
|
||||
class LocationdClients(IOSExtraction):
|
||||
"""Extract information from apps who used geolocation"""
|
||||
"""Extract information from apps who used geolocation."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
@@ -50,22 +52,40 @@ class LocationdClients(IOSExtraction):
|
||||
|
||||
return records
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(backup_ids=LOCATIOND_BACKUP_IDS,
|
||||
root_paths=LOCATIOND_ROOT_PATHS)
|
||||
self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
with open(self.file_path, "rb") as handle:
|
||||
for result in self.results:
|
||||
parts = result["package"].split("/")
|
||||
proc_name = parts[len(parts)-1]
|
||||
|
||||
if self.indicators.check_process(proc_name):
|
||||
self.detected.append(result)
|
||||
|
||||
def _extract_locationd_entries(self, file_path):
|
||||
with open(file_path, "rb") as handle:
|
||||
file_plist = plistlib.load(handle)
|
||||
|
||||
for app in file_plist:
|
||||
if file_plist[app] is dict:
|
||||
result = file_plist[app]
|
||||
result["package"] = app
|
||||
for ts in self.timestamps:
|
||||
if ts in result.keys():
|
||||
result[ts] = convert_timestamp_to_iso(convert_mactime_to_unix(result[ts]))
|
||||
for key, values in file_plist.items():
|
||||
result = file_plist[key]
|
||||
result["package"] = key
|
||||
for ts in self.timestamps:
|
||||
if ts in result.keys():
|
||||
result[ts] = convert_timestamp_to_iso(convert_mactime_to_unix(result[ts]))
|
||||
|
||||
self.results.append(result)
|
||||
self.results.append(result)
|
||||
|
||||
def run(self):
|
||||
|
||||
if self.is_backup:
|
||||
self._find_ios_database(backup_ids=LOCATIOND_BACKUP_IDS)
|
||||
self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
|
||||
self._extract_locationd_entries(self.file_path)
|
||||
elif self.is_fs_dump:
|
||||
for locationd_path in self._get_fs_files_from_patterns(LOCATIOND_ROOT_PATHS):
|
||||
self.file_path = locationd_path
|
||||
self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
|
||||
self._extract_locationd_entries(self.file_path)
|
||||
|
||||
self.log.info("Extracted a total of %d Locationd Clients entries", len(self.results))
|
||||
|
||||
@@ -12,9 +12,13 @@ DATAUSAGE_ROOT_PATHS = [
|
||||
"private/var/wireless/Library/Databases/DataUsage.sqlite",
|
||||
]
|
||||
|
||||
|
||||
class Datausage(NetBase):
|
||||
"""This class extracts data from DataUsage.sqlite and attempts to identify
|
||||
any suspicious processes if running on a full filesystem dump."""
|
||||
any suspicious processes if running on a full filesystem dump.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
|
||||
65
mvt/ios/modules/mixed/osanalytics_addaily.py
Normal file
65
mvt/ios/modules/mixed/osanalytics_addaily.py
Normal file
@@ -0,0 +1,65 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import plistlib
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
OSANALYTICS_ADDAILY_BACKUP_IDS = [
|
||||
"f65b5fafc69bbd3c60be019c6e938e146825fa83",
|
||||
]
|
||||
OSANALYTICS_ADDAILY_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/Preferences/com.apple.osanalytics.addaily.plist",
|
||||
]
|
||||
|
||||
|
||||
class OSAnalyticsADDaily(IOSExtraction):
|
||||
"""Extract network usage information by process, from com.apple.osanalytics.addaily.plist"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
record_data = f"{record['package']} WIFI IN: {record['wifi_in']}, WIFI OUT: {record['wifi_out']} - " \
|
||||
f"WWAN IN: {record['wwan_in']}, WWAN OUT: {record['wwan_out']}"
|
||||
return {
|
||||
"timestamp": record["ts"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "osanalytics_addaily",
|
||||
"data": record_data,
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if self.indicators.check_process(result["package"]):
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(backup_ids=OSANALYTICS_ADDAILY_BACKUP_IDS,
|
||||
root_paths=OSANALYTICS_ADDAILY_ROOT_PATHS)
|
||||
self.log.info("Found com.apple.osanalytics.addaily plist at path: %s", self.file_path)
|
||||
|
||||
with open(self.file_path, "rb") as handle:
|
||||
file_plist = plistlib.load(handle)
|
||||
|
||||
for app, values in file_plist.get("netUsageBaseline", {}).items():
|
||||
self.results.append({
|
||||
"package": app,
|
||||
"ts": convert_timestamp_to_iso(values[0]),
|
||||
"wifi_in": values[1],
|
||||
"wifi_out": values[2],
|
||||
"wwan_in": values[3],
|
||||
"wwan_out": values[4],
|
||||
})
|
||||
|
||||
self.log.info("Extracted a total of %d com.apple.osanalytics.addaily entries", len(self.results))
|
||||
@@ -13,15 +13,13 @@ from mvt.common.utils import (convert_mactime_to_unix,
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
SAFARI_BROWSER_STATE_BACKUP_IDS = [
|
||||
"3a47b0981ed7c10f3e2800aa66bac96a3b5db28e",
|
||||
]
|
||||
SAFARI_BROWSER_STATE_BACKUP_RELPATH = "Library/Safari/BrowserState.db"
|
||||
SAFARI_BROWSER_STATE_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/Safari/BrowserState.db",
|
||||
"private/var/mobile/Containers/Data/Application/*/Library/Safari/BrowserState.db",
|
||||
]
|
||||
|
||||
|
||||
class SafariBrowserState(IOSExtraction):
|
||||
"""This module extracts all Safari browser state records."""
|
||||
|
||||
@@ -50,7 +48,7 @@ class SafariBrowserState(IOSExtraction):
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
if not "session_data" in result:
|
||||
if "session_data" not in result:
|
||||
continue
|
||||
|
||||
for session_entry in result["session_data"]:
|
||||
@@ -61,17 +59,26 @@ class SafariBrowserState(IOSExtraction):
|
||||
conn = sqlite3.connect(db_path)
|
||||
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
SELECT
|
||||
tabs.title,
|
||||
tabs.url,
|
||||
tabs.user_visible_url,
|
||||
tabs.last_viewed_time,
|
||||
tab_sessions.session_data
|
||||
FROM tabs
|
||||
JOIN tab_sessions ON tabs.uuid = tab_sessions.tab_uuid
|
||||
ORDER BY tabs.last_viewed_time;
|
||||
""")
|
||||
try:
|
||||
cur.execute("""
|
||||
SELECT
|
||||
tabs.title,
|
||||
tabs.url,
|
||||
tabs.user_visible_url,
|
||||
tabs.last_viewed_time,
|
||||
tab_sessions.session_data
|
||||
FROM tabs
|
||||
JOIN tab_sessions ON tabs.uuid = tab_sessions.tab_uuid
|
||||
ORDER BY tabs.last_viewed_time;
|
||||
""")
|
||||
except sqlite3.OperationalError:
|
||||
# Old version iOS <12 likely
|
||||
cur.execute("""
|
||||
SELECT
|
||||
title, url, user_visible_url, last_viewed_time, session_data
|
||||
FROM tabs
|
||||
ORDER BY last_viewed_time;
|
||||
""")
|
||||
|
||||
for row in cur:
|
||||
session_entries = []
|
||||
@@ -101,12 +108,17 @@ class SafariBrowserState(IOSExtraction):
|
||||
})
|
||||
|
||||
def run(self):
|
||||
# TODO: Is there really only one BrowserState.db in a device?
|
||||
self._find_ios_database(backup_ids=SAFARI_BROWSER_STATE_BACKUP_IDS,
|
||||
root_paths=SAFARI_BROWSER_STATE_ROOT_PATHS)
|
||||
self.log.info("Found Safari browser state database at path: %s", self.file_path)
|
||||
|
||||
self._process_browser_state_db(self.file_path)
|
||||
if self.is_backup:
|
||||
for backup_file in self._get_backup_files_from_manifest(relative_path=SAFARI_BROWSER_STATE_BACKUP_RELPATH):
|
||||
self.file_path = self._get_backup_file_from_id(backup_file["file_id"])
|
||||
self.log.info("Found Safari browser state database at path: %s", self.file_path)
|
||||
self._process_browser_state_db(self.file_path)
|
||||
elif self.is_fs_dump:
|
||||
for safari_browserstate_path in self._get_fs_files_from_patterns(SAFARI_BROWSER_STATE_ROOT_PATHS):
|
||||
self.file_path = safari_browserstate_path
|
||||
self.log.info("Found Safari browser state database at path: %s", self.file_path)
|
||||
self._process_browser_state_db(self.file_path)
|
||||
|
||||
self.log.info("Extracted a total of %d tab records and %d session history entries",
|
||||
len(self.results), self._session_history_count)
|
||||
|
||||
@@ -17,9 +17,13 @@ SAFARI_HISTORY_ROOT_PATHS = [
|
||||
"private/var/mobile/Containers/Data/Application/*/Library/Safari/History.db",
|
||||
]
|
||||
|
||||
|
||||
class SafariHistory(IOSExtraction):
|
||||
"""This module extracts all Safari visits and tries to detect potential
|
||||
network injection attacks."""
|
||||
network injection attacks.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
@@ -59,7 +63,7 @@ class SafariHistory(IOSExtraction):
|
||||
continue
|
||||
|
||||
self.log.info("Found HTTP redirect to different domain: \"%s\" -> \"%s\"",
|
||||
origin_domain, redirect_domain)
|
||||
origin_domain, redirect_domain)
|
||||
|
||||
redirect_time = convert_mactime_to_unix(redirect["timestamp"])
|
||||
origin_time = convert_mactime_to_unix(result["timestamp"])
|
||||
@@ -80,6 +84,7 @@ class SafariHistory(IOSExtraction):
|
||||
self.detected.append(result)
|
||||
|
||||
def _process_history_db(self, history_path):
|
||||
self._recover_sqlite_db_if_needed(history_path)
|
||||
conn = sqlite3.connect(history_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
|
||||
103
mvt/ios/modules/mixed/shortcuts.py
Normal file
103
mvt/ios/modules/mixed/shortcuts.py
Normal file
@@ -0,0 +1,103 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import sqlite3
|
||||
import io
|
||||
import plistlib
|
||||
import itertools
|
||||
|
||||
from mvt.common.utils import check_for_links, convert_mactime_to_unix, convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
SHORTCUT_BACKUP_IDS = [
|
||||
"5b4d0b44b5990f62b9f4d34ad8dc382bf0b01094",
|
||||
]
|
||||
SHORTCUT_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/Shortcuts/Shortcuts.sqlite",
|
||||
]
|
||||
|
||||
|
||||
class Shortcuts(IOSExtraction):
|
||||
"""This module extracts all info about SMS/iMessage attachments."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
found_urls = ""
|
||||
if record["action_urls"]:
|
||||
found_urls = "- URLs in actions: {}".format(", ".join(record["action_urls"]))
|
||||
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "shortcut",
|
||||
"data": f"iOS Shortcut '{record['shortcut_name']}': {record['description']} {found_urls}"
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for action in self.results:
|
||||
if self.indicators.check_domains(action["action_urls"]):
|
||||
self.detected.append(action)
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(backup_ids=SHORTCUT_BACKUP_IDS,
|
||||
root_paths=SHORTCUT_ROOT_PATHS)
|
||||
self.log.info("Found Shortcuts database at path: %s", self.file_path)
|
||||
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
conn.text_factory = bytes
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
SELECT
|
||||
ZSHORTCUT.Z_PK as "shortcut_id",
|
||||
ZSHORTCUT.ZNAME as "shortcut_name",
|
||||
ZSHORTCUT.ZCREATIONDATE as "created_date",
|
||||
ZSHORTCUT.ZMODIFICATIONDATE as "modified_date",
|
||||
ZSHORTCUT.ZACTIONSDESCRIPTION as "description",
|
||||
ZSHORTCUTACTIONS.ZDATA as "action_data"
|
||||
FROM ZSHORTCUT
|
||||
LEFT JOIN ZSHORTCUTACTIONS ON ZSHORTCUTACTIONS.ZSHORTCUT == ZSHORTCUT.Z_PK;
|
||||
""")
|
||||
names = [description[0] for description in cur.description]
|
||||
|
||||
for item in cur:
|
||||
shortcut = {}
|
||||
# We store the value of each column under the proper key.
|
||||
for index, value in enumerate(item):
|
||||
shortcut[names[index]] = value
|
||||
|
||||
action_data = plistlib.load(io.BytesIO(shortcut.pop("action_data", [])))
|
||||
actions = []
|
||||
for action_entry in action_data:
|
||||
action = {}
|
||||
action["identifier"] = action_entry["WFWorkflowActionIdentifier"]
|
||||
action["parameters"] = action_entry["WFWorkflowActionParameters"]
|
||||
|
||||
# URLs might be in multiple fields, do a simple regex search across the parameters
|
||||
extracted_urls = check_for_links(str(action["parameters"]))
|
||||
|
||||
# Remove quoting characters that may have been captured by the regex
|
||||
action["urls"] = [url.rstrip("',") for url in extracted_urls]
|
||||
actions.append(action)
|
||||
|
||||
# pprint.pprint(actions)
|
||||
shortcut["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(shortcut.pop("created_date")))
|
||||
shortcut["modified_date"] = convert_timestamp_to_iso(convert_mactime_to_unix(shortcut["modified_date"]))
|
||||
shortcut["parsed_actions"] = len(actions)
|
||||
shortcut["action_urls"] = list(itertools.chain(*[action["urls"] for action in actions]))
|
||||
self.results.append(shortcut)
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
self.log.info("Extracted a total of %d Shortcuts", len(self.results))
|
||||
@@ -18,6 +18,7 @@ SMS_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/SMS/sms.db",
|
||||
]
|
||||
|
||||
|
||||
class SMS(IOSExtraction):
|
||||
"""This module extracts all SMS messages containing links."""
|
||||
|
||||
@@ -50,25 +51,44 @@ class SMS(IOSExtraction):
|
||||
root_paths=SMS_ROOT_PATHS)
|
||||
self.log.info("Found SMS database at path: %s", self.file_path)
|
||||
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
SELECT
|
||||
message.*,
|
||||
handle.id as "phone_number"
|
||||
FROM message, handle
|
||||
WHERE handle.rowid = message.handle_id;
|
||||
""")
|
||||
try:
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
SELECT
|
||||
message.*,
|
||||
handle.id as "phone_number"
|
||||
FROM message, handle
|
||||
WHERE handle.rowid = message.handle_id;
|
||||
""")
|
||||
# Force the query early to catch database issues
|
||||
items = list(cur)
|
||||
except sqlite3.DatabaseError as e:
|
||||
conn.close()
|
||||
if "database disk image is malformed" in str(e):
|
||||
self._recover_sqlite_db_if_needed(self.file_path, forced=True)
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
SELECT
|
||||
message.*,
|
||||
handle.id as "phone_number"
|
||||
FROM message, handle
|
||||
WHERE handle.rowid = message.handle_id;
|
||||
""")
|
||||
items = list(cur)
|
||||
else:
|
||||
raise e
|
||||
names = [description[0] for description in cur.description]
|
||||
|
||||
for item in cur:
|
||||
for item in items:
|
||||
message = {}
|
||||
for index, value in enumerate(item):
|
||||
# We base64 escape some of the attributes that could contain
|
||||
# binary data.
|
||||
if (names[index] == "attributedBody" or
|
||||
names[index] == "payload_data" or
|
||||
names[index] == "message_summary_info") and value:
|
||||
names[index] == "payload_data" or
|
||||
names[index] == "message_summary_info") and value:
|
||||
value = b64encode(value).decode()
|
||||
|
||||
# We store the value of each column under the proper key.
|
||||
@@ -82,12 +102,16 @@ class SMS(IOSExtraction):
|
||||
if not message.get("text", None):
|
||||
message["text"] = ""
|
||||
|
||||
# Extract links from the SMS message.
|
||||
message_links = check_for_links(message.get("text", ""))
|
||||
|
||||
# If we find links in the messages or if they are empty we add them to the list.
|
||||
if message_links or message.get("text", "").strip() == "":
|
||||
if message.get("text", "").startswith("ALERT: State-sponsored attackers may be targeting your iPhone"):
|
||||
self.log.warn("Apple warning about state-sponsored attack received on the %s", message["isodate"])
|
||||
self.results.append(message)
|
||||
else:
|
||||
# Extract links from the SMS message.
|
||||
message_links = check_for_links(message.get("text", ""))
|
||||
|
||||
# If we find links in the messages or if they are empty we add them to the list.
|
||||
if message_links or message.get("text", "").strip() == "":
|
||||
self.results.append(message)
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
@@ -17,6 +17,7 @@ SMS_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/SMS/sms.db",
|
||||
]
|
||||
|
||||
|
||||
class SMSAttachments(IOSExtraction):
|
||||
"""This module extracts all info about SMS/iMessage attachments."""
|
||||
|
||||
@@ -45,7 +46,7 @@ class SMSAttachments(IOSExtraction):
|
||||
cur.execute("""
|
||||
SELECT
|
||||
attachment.ROWID as "attachment_id",
|
||||
attachment.*,
|
||||
attachment.*,
|
||||
message.service as "service",
|
||||
handle.id as "phone_number"
|
||||
FROM attachment
|
||||
@@ -73,7 +74,7 @@ class SMSAttachments(IOSExtraction):
|
||||
attachment["filename"] = attachment["filename"] or "NULL"
|
||||
|
||||
if (attachment["filename"].startswith("/var/tmp/") and attachment["filename"].endswith("-1") and
|
||||
attachment["direction"] == "received"):
|
||||
attachment["direction"] == "received"):
|
||||
self.log.warn(f"Suspicious iMessage attachment '{attachment['filename']}' on {attachment['isodate']}")
|
||||
self.detected.append(attachment)
|
||||
|
||||
|
||||
155
mvt/ios/modules/mixed/tcc.py
Normal file
155
mvt/ios/modules/mixed/tcc.py
Normal file
@@ -0,0 +1,155 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
TCC_BACKUP_IDS = [
|
||||
"64d0019cb3d46bfc8cce545a8ba54b93e7ea9347",
|
||||
]
|
||||
TCC_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/TCC/TCC.db",
|
||||
]
|
||||
|
||||
AUTH_VALUE_OLD = {
|
||||
0: "denied",
|
||||
1: "allowed"
|
||||
}
|
||||
|
||||
AUTH_VALUES = {
|
||||
0: "denied",
|
||||
1: "unknown",
|
||||
2: "allowed",
|
||||
3: "limited",
|
||||
}
|
||||
AUTH_REASONS = {
|
||||
1: "error",
|
||||
2: "user_consent",
|
||||
3: "user_set",
|
||||
4: "system_set",
|
||||
5: "service_policy",
|
||||
6: "mdm_policy",
|
||||
7: "override_policy",
|
||||
8: "missing_usage_string",
|
||||
9: "prompt_timeout",
|
||||
10: "preflight_unknown",
|
||||
11: "entitled",
|
||||
12: "app_type_policy",
|
||||
}
|
||||
|
||||
|
||||
class TCC(IOSExtraction):
|
||||
"""This module extracts records from the TCC.db SQLite database."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
if "last_modified" in record:
|
||||
if "allowed_value" in record:
|
||||
msg = f"Access to {record['service']} by {record['client']} {record['allowed_value']}"
|
||||
else:
|
||||
msg = f"Access to {record['service']} by {record['client']} {record['auth_value']}"
|
||||
return {
|
||||
"timestamp": record["last_modified"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "AccessRequest",
|
||||
"data": msg
|
||||
}
|
||||
|
||||
def process_db(self, file_path):
|
||||
conn = sqlite3.connect(file_path)
|
||||
cur = conn.cursor()
|
||||
db_version = "v3"
|
||||
try:
|
||||
cur.execute("""SELECT
|
||||
service, client, client_type, auth_value, auth_reason, last_modified
|
||||
FROM access;""")
|
||||
except sqlite3.OperationalError:
|
||||
# v2 version
|
||||
try:
|
||||
cur.execute("""SELECT
|
||||
service, client, client_type, allowed, prompt_count, last_modified
|
||||
FROM access;""")
|
||||
db_version = "v2"
|
||||
except sqlite3.OperationalError:
|
||||
cur.execute("""SELECT
|
||||
service, client, client_type, allowed, prompt_count
|
||||
FROM access;""")
|
||||
db_version = "v1"
|
||||
|
||||
|
||||
for row in cur:
|
||||
service = row[0]
|
||||
client = row[1]
|
||||
client_type = row[2]
|
||||
client_type_desc = "bundle_id" if client_type == 0 else "absolute_path"
|
||||
if db_version == "v3":
|
||||
auth_value = row[3]
|
||||
auth_value_desc = AUTH_VALUES.get(auth_value, "")
|
||||
auth_reason = row[4]
|
||||
auth_reason_desc = AUTH_REASONS.get(auth_reason, "unknown")
|
||||
last_modified = convert_timestamp_to_iso(datetime.utcfromtimestamp((row[5])))
|
||||
|
||||
if service in ["kTCCServiceMicrophone", "kTCCServiceCamera"]:
|
||||
device = "microphone" if service == "kTCCServiceMicrophone" else "camera"
|
||||
self.log.info("Found client \"%s\" with access %s to %s on %s by %s",
|
||||
client, auth_value_desc, device, last_modified, auth_reason_desc)
|
||||
|
||||
self.results.append({
|
||||
"service": service,
|
||||
"client": client,
|
||||
"client_type": client_type_desc,
|
||||
"auth_value": auth_value_desc,
|
||||
"auth_reason_desc": auth_reason_desc,
|
||||
"last_modified": last_modified,
|
||||
})
|
||||
else:
|
||||
allowed_value = row[3]
|
||||
allowed_desc = AUTH_VALUE_OLD.get(allowed_value, "")
|
||||
prompt_count = row[4]
|
||||
if db_version == "v2":
|
||||
last_modified = convert_timestamp_to_iso(datetime.utcfromtimestamp((row[5])))
|
||||
if service in ["kTCCServiceMicrophone", "kTCCServiceCamera"]:
|
||||
device = "microphone" if service == "kTCCServiceMicrophone" else "camera"
|
||||
self.log.info("Found client \"%s\" with access %s to %s at %s",
|
||||
client, allowed_desc, device, last_modified)
|
||||
self.results.append({
|
||||
"service": service,
|
||||
"client": client,
|
||||
"client_type": client_type_desc,
|
||||
"allowed_value": allowed_desc,
|
||||
"prompt_count": prompt_count,
|
||||
"last_modified": last_modified
|
||||
})
|
||||
else:
|
||||
if service in ["kTCCServiceMicrophone", "kTCCServiceCamera"]:
|
||||
device = "microphone" if service == "kTCCServiceMicrophone" else "camera"
|
||||
self.log.info("Found client \"%s\" with access %s to %s",
|
||||
client, allowed_desc, device)
|
||||
self.results.append({
|
||||
"service": service,
|
||||
"client": client,
|
||||
"client_type": client_type_desc,
|
||||
"allowed_value": allowed_desc,
|
||||
"prompt_count": prompt_count
|
||||
})
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(backup_ids=TCC_BACKUP_IDS, root_paths=TCC_ROOT_PATHS)
|
||||
self.log.info("Found TCC database at path: %s", self.file_path)
|
||||
self.process_db(self.file_path)
|
||||
|
||||
self.log.info("Extracted a total of %d TCC items", len(self.results))
|
||||
@@ -17,9 +17,9 @@ WEBKIT_RESOURCELOADSTATICS_ROOT_PATHS = [
|
||||
"private/var/mobile/Containers/Data/Application/*/SystemData/com.apple.SafariViewService/Library/WebKit/WebsiteData/observations.db",
|
||||
]
|
||||
|
||||
|
||||
class WebkitResourceLoadStatistics(IOSExtraction):
|
||||
"""This module extracts records from WebKit ResourceLoadStatistics observations.db.
|
||||
"""
|
||||
"""This module extracts records from WebKit ResourceLoadStatistics observations.db."""
|
||||
# TODO: Add serialize().
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
@@ -39,7 +39,7 @@ class WebkitResourceLoadStatistics(IOSExtraction):
|
||||
for item in items:
|
||||
if self.indicators.check_domain(item["registrable_domain"]):
|
||||
if key not in self.detected:
|
||||
self.detected[key] = [item,]
|
||||
self.detected[key] = [item, ]
|
||||
else:
|
||||
self.detected[key].append(item)
|
||||
|
||||
@@ -56,7 +56,7 @@ class WebkitResourceLoadStatistics(IOSExtraction):
|
||||
except sqlite3.OperationalError:
|
||||
return
|
||||
|
||||
if not key in self.results:
|
||||
if key not in self.results:
|
||||
self.results[key] = []
|
||||
|
||||
for row in cur:
|
||||
@@ -75,7 +75,7 @@ class WebkitResourceLoadStatistics(IOSExtraction):
|
||||
if self.is_backup:
|
||||
try:
|
||||
for backup_file in self._get_backup_files_from_manifest(relative_path=WEBKIT_RESOURCELOADSTATICS_BACKUP_RELPATH):
|
||||
db_path = os.path.join(self.base_folder, backup_file["file_id"][0:2], backup_file["file_id"])
|
||||
db_path = self._get_backup_file_from_id(backup_file["file_id"])
|
||||
key = f"{backup_file['domain']}/{WEBKIT_RESOURCELOADSTATICS_BACKUP_RELPATH}"
|
||||
self._process_observations_db(db_path=db_path, key=key)
|
||||
except Exception as e:
|
||||
|
||||
@@ -20,10 +20,14 @@ WEBKIT_SESSION_RESOURCE_LOG_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/WebClips/*/Storage/full_browsing_session_resourceLog.plist",
|
||||
]
|
||||
|
||||
|
||||
class WebkitSessionResourceLog(IOSExtraction):
|
||||
"""This module extracts records from WebKit browsing session
|
||||
resource logs, and checks them against any provided list of
|
||||
suspicious domains."""
|
||||
suspicious domains.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
@@ -113,7 +117,10 @@ class WebkitSessionResourceLog(IOSExtraction):
|
||||
|
||||
def run(self):
|
||||
if self.is_backup:
|
||||
for log_path in self._get_backup_files_from_manifest(relative_path=WEBKIT_SESSION_RESOURCE_LOG_BACKUP_RELPATH):
|
||||
for log_file in self._get_backup_files_from_manifest(relative_path=WEBKIT_SESSION_RESOURCE_LOG_BACKUP_RELPATH):
|
||||
log_path = self._get_backup_file_from_id(log_file["file_id"])
|
||||
if not log_path:
|
||||
continue
|
||||
self.log.info("Found Safari browsing session resource log at path: %s", log_path)
|
||||
self.results[log_path] = self._extract_browsing_stats(log_path)
|
||||
elif self.is_fs_dump:
|
||||
|
||||
@@ -20,6 +20,7 @@ WHATSAPP_ROOT_PATHS = [
|
||||
"private/var/mobile/Containers/Shared/AppGroup/*/ChatStorage.sqlite",
|
||||
]
|
||||
|
||||
|
||||
class Whatsapp(IOSExtraction):
|
||||
"""This module extracts all WhatsApp messages containing links."""
|
||||
|
||||
@@ -31,11 +32,14 @@ class Whatsapp(IOSExtraction):
|
||||
|
||||
def serialize(self, record):
|
||||
text = record.get("ZTEXT", "").replace("\n", "\\n")
|
||||
links_text = ""
|
||||
if record["links"]:
|
||||
links_text = " - Embedded links: " + ", ".join(record["links"])
|
||||
return {
|
||||
"timestamp": record.get("isodate"),
|
||||
"module": self.__class__.__name__,
|
||||
"event": "message",
|
||||
"data": f"{text} from {record.get('ZFROMJID', 'Unknown')}",
|
||||
"data": f"\'{text}\' from {record.get('ZFROMJID', 'Unknown')}{links_text}",
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
@@ -43,8 +47,7 @@ class Whatsapp(IOSExtraction):
|
||||
return
|
||||
|
||||
for message in self.results:
|
||||
message_links = check_for_links(message.get("ZTEXT", ""))
|
||||
if self.indicators.check_domains(message_links):
|
||||
if self.indicators.check_domains(message["links"]):
|
||||
self.detected.append(message)
|
||||
|
||||
def run(self):
|
||||
@@ -54,26 +57,49 @@ class Whatsapp(IOSExtraction):
|
||||
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("SELECT * FROM ZWAMESSAGE;")
|
||||
|
||||
# Query all messages and join tables which can contain media attachments and links
|
||||
cur.execute("""
|
||||
SELECT
|
||||
ZWAMESSAGE.*,
|
||||
ZWAMEDIAITEM.ZAUTHORNAME,
|
||||
ZWAMEDIAITEM.ZMEDIAURL,
|
||||
ZWAMESSAGEDATAITEM.ZCONTENT1,
|
||||
ZWAMESSAGEDATAITEM.ZCONTENT2,
|
||||
ZWAMESSAGEDATAITEM.ZMATCHEDTEXT,
|
||||
ZWAMESSAGEDATAITEM.ZSUMMARY,
|
||||
ZWAMESSAGEDATAITEM.ZTITLE
|
||||
FROM ZWAMESSAGE
|
||||
LEFT JOIN ZWAMEDIAITEM ON ZWAMEDIAITEM.ZMESSAGE = ZWAMESSAGE.Z_PK
|
||||
LEFT JOIN ZWAMESSAGEDATAITEM ON ZWAMESSAGEDATAITEM.ZMESSAGE = ZWAMESSAGE.Z_PK;
|
||||
""")
|
||||
names = [description[0] for description in cur.description]
|
||||
|
||||
for message in cur:
|
||||
new_message = {}
|
||||
for index, value in enumerate(message):
|
||||
new_message[names[index]] = value
|
||||
for message_row in cur:
|
||||
message = {}
|
||||
for index, value in enumerate(message_row):
|
||||
message[names[index]] = value
|
||||
|
||||
if not new_message.get("ZTEXT", None):
|
||||
continue
|
||||
message["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(message.get("ZMESSAGEDATE")))
|
||||
message["ZTEXT"] = message["ZTEXT"] if message["ZTEXT"] else ""
|
||||
|
||||
# We convert Mac's silly timestamp again.
|
||||
new_message["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(new_message.get("ZMESSAGEDATE")))
|
||||
# Extract links from the WhatsApp message. URLs can be stored in multiple fields/columns. Check each of them!
|
||||
message_links = []
|
||||
fields_with_links = ["ZTEXT", "ZMATCHEDTEXT", "ZMEDIAURL", "ZCONTENT1", "ZCONTENT2"]
|
||||
for field in fields_with_links:
|
||||
if message.get(field):
|
||||
message_links.extend(check_for_links(message.get(field, "")))
|
||||
|
||||
# Extract links from the WhatsApp message.
|
||||
message_links = check_for_links(new_message["ZTEXT"])
|
||||
# Remove WhatsApp internal media URLs
|
||||
filtered_links = []
|
||||
for link in message_links:
|
||||
if not (link.startswith("https://mmg-fna.whatsapp.net/") or link.startswith("https://mmg.whatsapp.net/")):
|
||||
filtered_links.append(link)
|
||||
|
||||
# If we find messages, or if there's an empty message we add it to the list.
|
||||
if new_message["ZTEXT"] and (message_links or new_message["ZTEXT"].strip() == ""):
|
||||
self.results.append(new_message)
|
||||
# If we find messages with links, or if there's an empty message we add it to the results list.
|
||||
if filtered_links or (message.get("ZTEXT") or "").strip() == "":
|
||||
message["links"] = list(set(filtered_links))
|
||||
self.results.append(message)
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
@@ -39,7 +39,9 @@ class NetBase(IOSExtraction):
|
||||
ZLIVEUSAGE.ZHASPROCESS,
|
||||
ZLIVEUSAGE.ZTIMESTAMP
|
||||
FROM ZLIVEUSAGE
|
||||
LEFT JOIN ZPROCESS ON ZLIVEUSAGE.ZHASPROCESS = ZPROCESS.Z_PK;
|
||||
LEFT JOIN ZPROCESS ON ZLIVEUSAGE.ZHASPROCESS = ZPROCESS.Z_PK
|
||||
UNION
|
||||
SELECT ZFIRSTTIMESTAMP, ZTIMESTAMP, ZPROCNAME, ZBUNDLENAME, Z_PK, NULL, NULL, NULL, NULL, NULL, NULL, NULL FROM ZPROCESS WHERE Z_PK NOT IN (SELECT ZHASPROCESS FROM ZLIVEUSAGE);
|
||||
""")
|
||||
|
||||
for row in cur:
|
||||
@@ -68,7 +70,7 @@ class NetBase(IOSExtraction):
|
||||
"wwan_out": row[8],
|
||||
"live_id": row[9],
|
||||
"live_proc_id": row[10],
|
||||
"live_isodate": live_timestamp,
|
||||
"live_isodate": live_timestamp if row[10] else first_isodate,
|
||||
})
|
||||
|
||||
cur.close()
|
||||
@@ -79,7 +81,7 @@ class NetBase(IOSExtraction):
|
||||
def serialize(self, record):
|
||||
record_data = f"{record['proc_name']} (Bundle ID: {record['bundle_id']}, ID: {record['proc_id']})"
|
||||
record_data_usage = record_data + f" WIFI IN: {record['wifi_in']}, WIFI OUT: {record['wifi_out']} - " \
|
||||
f"WWAN IN: {record['wwan_in']}, WWAN OUT: {record['wwan_out']}"
|
||||
f"WWAN IN: {record['wwan_in']}, WWAN OUT: {record['wwan_out']}"
|
||||
|
||||
records = [{
|
||||
"timestamp": record["live_isodate"],
|
||||
@@ -89,7 +91,7 @@ class NetBase(IOSExtraction):
|
||||
}]
|
||||
|
||||
# Only included first_usage and current_usage records when a ZPROCESS entry exists.
|
||||
if "MANIPULATED" not in record["proc_name"] and "MISSING" not in record["proc_name"]:
|
||||
if "MANIPULATED" not in record["proc_name"] and "MISSING" not in record["proc_name"] and record["live_proc_id"] is not None:
|
||||
records.extend([
|
||||
{
|
||||
"timestamp": record["first_isodate"],
|
||||
@@ -145,14 +147,17 @@ class NetBase(IOSExtraction):
|
||||
self.log.debug("Located at %s", binary_path)
|
||||
else:
|
||||
msg = f"Could not find the binary associated with the process with name {proc['proc_name']}"
|
||||
if len(proc["proc_name"]) == 16:
|
||||
if (proc["proc_name"] is None):
|
||||
msg = f"Found process entry with empty 'proc_name' : {proc['live_proc_id']} at {proc['live_isodate']}"
|
||||
elif len(proc["proc_name"]) == 16:
|
||||
msg = msg + " (However, the process name might have been truncated in the database)"
|
||||
|
||||
self.log.warning(msg)
|
||||
if not proc["live_proc_id"]:
|
||||
self.log.info(f"Found process entry in ZPROCESS but not in ZLIVEUSAGE : {proc['proc_name']} at {proc['live_isodate']}")
|
||||
|
||||
def check_manipulated(self):
|
||||
"""Check for missing or manipulate DB entries
|
||||
"""
|
||||
"""Check for missing or manipulate DB entries"""
|
||||
# Don't show duplicates for each missing process.
|
||||
missing_process_cache = set()
|
||||
for result in sorted(self.results, key=operator.itemgetter("live_isodate")):
|
||||
|
||||
@@ -4,40 +4,44 @@
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
IPHONE_MODELS = [
|
||||
{"description": "iPhone 4S", "identifier": "iPhone4,1"},
|
||||
{"description": "iPhone 5", "identifier": "iPhone5,1"},
|
||||
{"description": "iPhone 5", "identifier": "iPhone5,2"},
|
||||
{"description": "iPhone 5c", "identifier": "iPhone5,3"},
|
||||
{"description": "iPhone 5c", "identifier": "iPhone5,4"},
|
||||
{"description": "iPhone 5s", "identifier": "iPhone6,1"},
|
||||
{"description": "iPhone 5s", "identifier": "iPhone6,2"},
|
||||
{"description": "iPhone 6 Plus", "identifier": "iPhone7,1"},
|
||||
{"description": "iPhone 6", "identifier": "iPhone7,2"},
|
||||
{"description": "iPhone 6s", "identifier": "iPhone8,1"},
|
||||
{"description": "iPhone 6s Plus", "identifier": "iPhone8,2"},
|
||||
{"description": "iPhone SE (1st generation)", "identifier": "iPhone8,4"},
|
||||
{"description": "iPhone 7", "identifier": "iPhone9,1"},
|
||||
{"description": "iPhone 7 Plus", "identifier": "iPhone9,2"},
|
||||
{"description": "iPhone 7", "identifier": "iPhone9,3"},
|
||||
{"description": "iPhone 7 Plus", "identifier": "iPhone9,4"},
|
||||
{"description": "iPhone 8", "identifier": "iPhone10,1"},
|
||||
{"description": "iPhone 8 Plus", "identifier": "iPhone10,2"},
|
||||
{"description": "iPhone X", "identifier": "iPhone10,3"},
|
||||
{"description": "iPhone 8", "identifier": "iPhone10,4"},
|
||||
{"description": "iPhone 8 Plus", "identifier": "iPhone10,5"},
|
||||
{"description": "iPhone X", "identifier": "iPhone10,6"},
|
||||
{"description": "iPhone XS", "identifier": "iPhone11,2"},
|
||||
{"description": "iPhone XS Max", "identifier": "iPhone11,4"},
|
||||
{"description": "iPhone XS Max", "identifier": "iPhone11,6"},
|
||||
{"description": "iPhone XR", "identifier": "iPhone11,8"},
|
||||
{"description": "iPhone 11", "identifier": "iPhone12,1"},
|
||||
{"description": "iPhone 11 Pro", "identifier": "iPhone12,3"},
|
||||
{"description": "iPhone 11 Pro Max", "identifier": "iPhone12,5"},
|
||||
{"description": "iPhone SE (2nd generation)", "identifier": "iPhone12,8"},
|
||||
{"description": "iPhone 12 mini", "identifier": "iPhone13,1"},
|
||||
{"description": "iPhone 12", "identifier": "iPhone13,2"},
|
||||
{"description": "iPhone 12 Pro", "identifier": "iPhone13,3"},
|
||||
{"description": "iPhone 12 Pro Max", "identifier": "iPhone13,4"},
|
||||
{"identifier": "iPhone4,1", "description": "iPhone 4S"},
|
||||
{"identifier": "iPhone5,1", "description": "iPhone 5"},
|
||||
{"identifier": "iPhone5,2", "description": "iPhone 5"},
|
||||
{"identifier": "iPhone5,3", "description": "iPhone 5c"},
|
||||
{"identifier": "iPhone5,4", "description": "iPhone 5c"},
|
||||
{"identifier": "iPhone6,1", "description": "iPhone 5s"},
|
||||
{"identifier": "iPhone6,2", "description": "iPhone 5s"},
|
||||
{"identifier": "iPhone7,1", "description": "iPhone 6 Plus"},
|
||||
{"identifier": "iPhone7,2", "description": "iPhone 6"},
|
||||
{"identifier": "iPhone8,1", "description": "iPhone 6s"},
|
||||
{"identifier": "iPhone8,2", "description": "iPhone 6s Plus"},
|
||||
{"identifier": "iPhone8,4", "description": "iPhone SE (1st generation)"},
|
||||
{"identifier": "iPhone9,1", "description": "iPhone 7"},
|
||||
{"identifier": "iPhone9,2", "description": "iPhone 7 Plus"},
|
||||
{"identifier": "iPhone9,3", "description": "iPhone 7"},
|
||||
{"identifier": "iPhone9,4", "description": "iPhone 7 Plus"},
|
||||
{"identifier": "iPhone10,1", "description": "iPhone 8"},
|
||||
{"identifier": "iPhone10,2", "description": "iPhone 8 Plus"},
|
||||
{"identifier": "iPhone10,3", "description": "iPhone X"},
|
||||
{"identifier": "iPhone10,4", "description": "iPhone 8"},
|
||||
{"identifier": "iPhone10,5", "description": "iPhone 8 Plus"},
|
||||
{"identifier": "iPhone10,6", "description": "iPhone X"},
|
||||
{"identifier": "iPhone11,2", "description": "iPhone XS"},
|
||||
{"identifier": "iPhone11,4", "description": "iPhone XS Max"},
|
||||
{"identifier": "iPhone11,6", "description": "iPhone XS Max"},
|
||||
{"identifier": "iPhone11,8", "description": "iPhone XR"},
|
||||
{"identifier": "iPhone12,1", "description": "iPhone 11"},
|
||||
{"identifier": "iPhone12,3", "description": "iPhone 11 Pro"},
|
||||
{"identifier": "iPhone12,5", "description": "iPhone 11 Pro Max"},
|
||||
{"identifier": "iPhone12,8", "description": "iPhone SE (2nd generation)"},
|
||||
{"identifier": "iPhone13,1", "description": "iPhone 12 mini"},
|
||||
{"identifier": "iPhone13,2", "description": "iPhone 12"},
|
||||
{"identifier": "iPhone13,3", "description": "iPhone 12 Pro"},
|
||||
{"identifier": "iPhone13,4", "description": "iPhone 12 Pro Max"},
|
||||
{"identifier": "iPhone14,4", "description": "iPhone 13 Mini"},
|
||||
{"identifier": "iPhone14,5", "description": "iPhone 13"},
|
||||
{"identifier": "iPhone14,2", "description": "iPhone 13 Pro"},
|
||||
{"identifier": "iPhone14,3", "description": "iPhone 13 Pro Max"},
|
||||
]
|
||||
|
||||
IPHONE_IOS_VERSIONS = [
|
||||
@@ -222,13 +226,23 @@ IPHONE_IOS_VERSIONS = [
|
||||
{"build": "18F72", "version": "14.6"},
|
||||
{"build": "18G69", "version": "14.7"},
|
||||
{"build": "18G82", "version": "14.7.1"},
|
||||
{"build": "18H17", "version": "14.8"},
|
||||
{"build": "18H107", "version": "14.8.1"},
|
||||
{"build": "19A341", "version": "15.0"},
|
||||
{"build": "19A346", "version": "15.0"},
|
||||
{"build": "19A348", "version": "15.0.1"},
|
||||
{"build": "19A404", "version": "15.0.2"},
|
||||
{"build": "19B74", "version": "15.1"},
|
||||
{"build": "19B81", "version": "15.1.1"},
|
||||
]
|
||||
|
||||
|
||||
def get_device_desc_from_id(identifier, devices_list=IPHONE_MODELS):
|
||||
for model in IPHONE_MODELS:
|
||||
if identifier == model["identifier"]:
|
||||
return model["description"]
|
||||
|
||||
|
||||
def find_version_by_build(build):
|
||||
build = build.upper()
|
||||
for version in IPHONE_IOS_VERSIONS:
|
||||
|
||||
27
setup.py
27
setup.py
@@ -7,9 +7,7 @@ import os
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
__package_name__ = "mvt"
|
||||
__version__ = "1.2.2"
|
||||
__description__ = "Mobile Verification Toolkit"
|
||||
from mvt.common.version import MVT_VERSION
|
||||
|
||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||
readme_path = os.path.join(this_directory, "README.md")
|
||||
@@ -18,19 +16,21 @@ with open(readme_path, encoding="utf-8") as handle:
|
||||
|
||||
requires = (
|
||||
# Base dependencies:
|
||||
"click>=8.0.1",
|
||||
"rich>=10.6.0",
|
||||
"click>=8.0.3",
|
||||
"rich>=10.12.0",
|
||||
"tld>=0.12.6",
|
||||
"tqdm>=4.61.2",
|
||||
"tqdm>=4.62.3",
|
||||
"requests>=2.26.0",
|
||||
"simplejson>=3.17.3",
|
||||
"simplejson>=3.17.5",
|
||||
"packaging>=21.0",
|
||||
# iOS dependencies:
|
||||
"iOSbackup>=0.9.912",
|
||||
"iOSbackup>=0.9.921",
|
||||
# Android dependencies:
|
||||
"adb-shell>=0.4.0",
|
||||
"libusb1>=1.9.3",
|
||||
"adb-shell>=0.4.2",
|
||||
"libusb1>=2.0.1",
|
||||
)
|
||||
|
||||
|
||||
def get_package_data(package):
|
||||
walk = [(dirpath.replace(package + os.sep, "", 1), filenames)
|
||||
for dirpath, dirnames, filenames in os.walk(package)
|
||||
@@ -42,10 +42,11 @@ def get_package_data(package):
|
||||
for filename in filenames])
|
||||
return {package: filepaths}
|
||||
|
||||
|
||||
setup(
|
||||
name=__package_name__,
|
||||
version=__version__,
|
||||
description=__description__,
|
||||
name="mvt",
|
||||
version=MVT_VERSION,
|
||||
description="Mobile Verification Toolkit",
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
url="https://github.com/mvt-project/mvt",
|
||||
|
||||
Reference in New Issue
Block a user